diff --git a/.editorconfig b/.editorconfig index 273ca35ba..34f0e2ee3 100644 --- a/.editorconfig +++ b/.editorconfig @@ -28,3 +28,5 @@ dotnet_diagnostic.CA1861.severity = none # SYSLIB1045: Use GeneratedRegexAttribute to generate the regular expression implementation at compile time. dotnet_diagnostic.SYSLIB1045.severity = none + +csharp_style_namespace_declarations = file_scoped:error diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessor.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessor.cs index 90e6e8464..0dab6f183 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessor.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessor.cs @@ -3,39 +3,38 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Applications.DicomDirectoryProcessor +namespace SmiServices.Applications.DicomDirectoryProcessor; + +/// +/// Command line program to process a directory and write an Accession +/// Directory message to the message exchange for each directory found +/// that contains DICOM (*.dcm) files. +/// +public static class DicomDirectoryProcessor { /// - /// Command line program to process a directory and write an Accession - /// Directory message to the message exchange for each directory found - /// that contains DICOM (*.dcm) files. + /// Main program. /// - public static class DicomDirectoryProcessor + /// + /// Arguments. There should be exactly one argument that specified the + /// path to the top level directory that is be searched. + /// + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - /// - /// Main program. - /// - /// - /// Arguments. There should be exactly one argument that specified the - /// path to the top level directory that is be searched. - /// - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit - .ParseAndRun( - args, - nameof(DicomDirectoryProcessor), - OnParse - ); - return ret; - } + int ret = SmiCliInit + .ParseAndRun( + args, + nameof(DicomDirectoryProcessor), + OnParse + ); + return ret; + } - private static int OnParse(GlobalOptions globals, DicomDirectoryProcessorCliOptions parsedOptions) - { - var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomDirectoryProcessorHost(globals, parsedOptions)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, DicomDirectoryProcessorCliOptions parsedOptions) + { + var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomDirectoryProcessorHost(globals, parsedOptions)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessorCliOptions.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessorCliOptions.cs index 52a7487c5..47905ac4f 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessorCliOptions.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessorCliOptions.cs @@ -5,45 +5,44 @@ using System.Collections.Generic; using System.IO; -namespace SmiServices.Applications.DicomDirectoryProcessor +namespace SmiServices.Applications.DicomDirectoryProcessor; + +public class DicomDirectoryProcessorCliOptions : CliOptions { - public class DicomDirectoryProcessorCliOptions : CliOptions - { - [Option('d', "to-process", Required = true, HelpText = "The directory to process")] - public string ToProcess { get; set; } = null!; + [Option('d', "to-process", Required = true, HelpText = "The directory to process")] + public string ToProcess { get; set; } = null!; - [Option('f', "directory-format", Required = false, HelpText = "The specific directory search format to use (case insensitive). Options include PACS,LIST,ZIPS and DEFAULT", Default = "Default")] - public string? DirectoryFormat { get; set; } + [Option('f', "directory-format", Required = false, HelpText = "The specific directory search format to use (case insensitive). Options include PACS,LIST,ZIPS and DEFAULT", Default = "Default")] + public string? DirectoryFormat { get; set; } - public DirectoryInfo? ToProcessDir + public DirectoryInfo? ToProcessDir + { + get { - get - { - return ToProcess == null - ? null - : new DirectoryInfo(ToProcess); - } - set => ToProcess = value?.FullName ?? throw new ArgumentNullException(nameof(value)); + return ToProcess == null + ? null + : new DirectoryInfo(ToProcess); } + set => ToProcess = value?.FullName ?? throw new ArgumentNullException(nameof(value)); + } - [Usage] - public static IEnumerable Examples + [Usage] + public static IEnumerable Examples + { + get { - get - { - yield return - new Example("Normal Scenario", new DicomDirectoryProcessorCliOptions { ToProcess = @"c:\temp\bob" }); - yield return - new Example("Override Yaml File", new DicomDirectoryProcessorCliOptions { ToProcess = @"c:\temp\bob", YamlFile = "myconfig.yaml" }); - yield return - new Example("Search using the PACS directory structure", new DicomDirectoryProcessorCliOptions { ToProcess = @"c:\temp\bob", DirectoryFormat = "PACS" }); - } + yield return + new Example("Normal Scenario", new DicomDirectoryProcessorCliOptions { ToProcess = @"c:\temp\bob" }); + yield return + new Example("Override Yaml File", new DicomDirectoryProcessorCliOptions { ToProcess = @"c:\temp\bob", YamlFile = "myconfig.yaml" }); + yield return + new Example("Search using the PACS directory structure", new DicomDirectoryProcessorCliOptions { ToProcess = @"c:\temp\bob", DirectoryFormat = "PACS" }); } + } - public override string ToString() - { - return base.ToString() + "ToProcess: \"" + ToProcess + ", DirectoryFormat" + DirectoryFormat + "\"\n"; - } + public override string ToString() + { + return base.ToString() + "ToProcess: \"" + ToProcess + ", DirectoryFormat" + DirectoryFormat + "\"\n"; } } diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessorHost.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessorHost.cs index 88d3feee1..14873fc8d 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessorHost.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DicomDirectoryProcessorHost.cs @@ -5,102 +5,101 @@ using System.Globalization; using System.IO; -namespace SmiServices.Applications.DicomDirectoryProcessor +namespace SmiServices.Applications.DicomDirectoryProcessor; + +/// +/// Processes directories to find those that contain DICOM files +/// +public class DicomDirectoryProcessorHost : MicroserviceHost { + private readonly DicomDirectoryProcessorCliOptions _cliOptions; + private readonly IDicomDirectoryFinder _ddf; + /// - /// Processes directories to find those that contain DICOM files + /// Constructor /// - public class DicomDirectoryProcessorHost : MicroserviceHost + /// Common microservices options. Must contain details for an message exchange labelled as "accessionDirectories" + /// Configuration settings for the program + public DicomDirectoryProcessorHost(GlobalOptions globals, DicomDirectoryProcessorCliOptions cliOptions) + : base(globals) { - private readonly DicomDirectoryProcessorCliOptions _cliOptions; - private readonly IDicomDirectoryFinder _ddf; + _cliOptions = cliOptions; - /// - /// Constructor - /// - /// Common microservices options. Must contain details for an message exchange labelled as "accessionDirectories" - /// Configuration settings for the program - public DicomDirectoryProcessorHost(GlobalOptions globals, DicomDirectoryProcessorCliOptions cliOptions) - : base(globals) + if (!cliOptions.DirectoryFormat!.ToLower().Equals("list")) { - _cliOptions = cliOptions; - - if (!cliOptions.DirectoryFormat!.ToLower().Equals("list")) - { - // TODO(rkm 2020-02-12) I think we want to check this regardless of the mode - // (bp 2020-02-13) By not doing this check on list means that the list of paths is not required to be in PACS and can be imported from anywhere - if (!Directory.Exists(globals.FileSystemOptions!.FileSystemRoot)) - throw new ArgumentException("Cannot find the FileSystemRoot specified in the given MicroservicesOptions (" + globals.FileSystemOptions.FileSystemRoot + ")"); + // TODO(rkm 2020-02-12) I think we want to check this regardless of the mode + // (bp 2020-02-13) By not doing this check on list means that the list of paths is not required to be in PACS and can be imported from anywhere + if (!Directory.Exists(globals.FileSystemOptions!.FileSystemRoot)) + throw new ArgumentException("Cannot find the FileSystemRoot specified in the given MicroservicesOptions (" + globals.FileSystemOptions.FileSystemRoot + ")"); - if (!cliOptions.ToProcessDir!.Exists) - throw new ArgumentException("Could not find directory " + cliOptions.ToProcessDir.FullName); + if (!cliOptions.ToProcessDir!.Exists) + throw new ArgumentException("Could not find directory " + cliOptions.ToProcessDir.FullName); - if (!cliOptions.ToProcessDir.FullName.StartsWith(globals.FileSystemOptions.FileSystemRoot, true, CultureInfo.CurrentCulture)) - throw new ArgumentException("Directory parameter (" + cliOptions.ToProcessDir.FullName + ") must be below the FileSystemRoot (" + globals.FileSystemOptions.FileSystemRoot + ")"); - } - else - { - if (!File.Exists(cliOptions.ToProcessDir!.FullName)) - throw new ArgumentException("Could not find accession directory list file (" + cliOptions.ToProcessDir.FullName + ")"); + if (!cliOptions.ToProcessDir.FullName.StartsWith(globals.FileSystemOptions.FileSystemRoot, true, CultureInfo.CurrentCulture)) + throw new ArgumentException("Directory parameter (" + cliOptions.ToProcessDir.FullName + ") must be below the FileSystemRoot (" + globals.FileSystemOptions.FileSystemRoot + ")"); + } + else + { + if (!File.Exists(cliOptions.ToProcessDir!.FullName)) + throw new ArgumentException("Could not find accession directory list file (" + cliOptions.ToProcessDir.FullName + ")"); - if (!Path.GetExtension(cliOptions.ToProcessDir.FullName).Equals(".csv")) - throw new ArgumentException("When in 'list' mode, path to accession directory file of format .csv expected (" + cliOptions.ToProcessDir.FullName + ")"); - } + if (!Path.GetExtension(cliOptions.ToProcessDir.FullName).Equals(".csv")) + throw new ArgumentException("When in 'list' mode, path to accession directory file of format .csv expected (" + cliOptions.ToProcessDir.FullName + ")"); + } - switch (cliOptions.DirectoryFormat.ToLower()) - { - case "pacs": - Logger.Info("Creating PACS directory finder"); + switch (cliOptions.DirectoryFormat.ToLower()) + { + case "pacs": + Logger.Info("Creating PACS directory finder"); - _ddf = new PacsDirectoryFinder(globals.FileSystemOptions!.FileSystemRoot!, - globals.FileSystemOptions.DicomSearchPattern!, MessageBroker.SetupProducer(globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!, isBatch: false)); - break; - case "list": - Logger.Info("Creating accession directory lister"); + _ddf = new PacsDirectoryFinder(globals.FileSystemOptions!.FileSystemRoot!, + globals.FileSystemOptions.DicomSearchPattern!, MessageBroker.SetupProducer(globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!, isBatch: false)); + break; + case "list": + Logger.Info("Creating accession directory lister"); - _ddf = new AccessionDirectoryLister(globals.FileSystemOptions!.FileSystemRoot!, - globals.FileSystemOptions.DicomSearchPattern!, MessageBroker.SetupProducer(globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!, isBatch: false)); - break; - case "default": - Logger.Info("Creating basic directory finder"); + _ddf = new AccessionDirectoryLister(globals.FileSystemOptions!.FileSystemRoot!, + globals.FileSystemOptions.DicomSearchPattern!, MessageBroker.SetupProducer(globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!, isBatch: false)); + break; + case "default": + Logger.Info("Creating basic directory finder"); - _ddf = new BasicDicomDirectoryFinder(globals.FileSystemOptions!.FileSystemRoot!, - globals.FileSystemOptions.DicomSearchPattern!, MessageBroker.SetupProducer(globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!, isBatch: false)); - break; - case "zips": - Logger.Info("Creating zip directory finder"); + _ddf = new BasicDicomDirectoryFinder(globals.FileSystemOptions!.FileSystemRoot!, + globals.FileSystemOptions.DicomSearchPattern!, MessageBroker.SetupProducer(globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!, isBatch: false)); + break; + case "zips": + Logger.Info("Creating zip directory finder"); - _ddf = new ZipDicomDirectoryFinder(globals.FileSystemOptions!.FileSystemRoot!, - globals.FileSystemOptions.DicomSearchPattern!, MessageBroker.SetupProducer(globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!, isBatch: false)); - break; - default: - throw new ArgumentException( - $"Could not match directory format {cliOptions.DirectoryFormat} to an directory scan implementation"); - } + _ddf = new ZipDicomDirectoryFinder(globals.FileSystemOptions!.FileSystemRoot!, + globals.FileSystemOptions.DicomSearchPattern!, MessageBroker.SetupProducer(globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!, isBatch: false)); + break; + default: + throw new ArgumentException( + $"Could not match directory format {cliOptions.DirectoryFormat} to an directory scan implementation"); } + } - /// - /// Searches from the given directory to look for DICOM files and writes AccessionDirectoryMessages to the message exchange - /// - public override void Start() + /// + /// Searches from the given directory to look for DICOM files and writes AccessionDirectoryMessages to the message exchange + /// + public override void Start() + { + try { - try - { - _ddf.SearchForDicomDirectories(_cliOptions.ToProcessDir!.FullName); - } - catch (Exception e) - { - Fatal(e.Message, e); - return; - } - - Stop("Directory scan completed"); + _ddf.SearchForDicomDirectories(_cliOptions.ToProcessDir!.FullName); } - - public override void Stop(string reason) + catch (Exception e) { - _ddf.Stop(); - base.Stop(reason); + Fatal(e.Message, e); + return; } + + Stop("Directory scan completed"); + } + + public override void Stop(string reason) + { + _ddf.Stop(); + base.Stop(reason); } } diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/AccessionDirectoryLister.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/AccessionDirectoryLister.cs index a5043b5f1..a1320f7c8 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/AccessionDirectoryLister.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/AccessionDirectoryLister.cs @@ -5,72 +5,71 @@ using System.Linq; using System.Text.RegularExpressions; -namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders +namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders; + +public class AccessionDirectoryLister : DicomDirectoryFinder { - public class AccessionDirectoryLister : DicomDirectoryFinder - { - // Regex that matches when we are at the yyyy\mm\dd\xxxxx directory level - private static readonly Regex _accDirectoryRegex = new(@"(20\d{2}[\\\/]\d{2}[\\\/]\d{2}[\\\/][a-zA-Z0-9._-]+[\\\/]$)"); + // Regex that matches when we are at the yyyy\mm\dd\xxxxx directory level + private static readonly Regex _accDirectoryRegex = new(@"(20\d{2}[\\\/]\d{2}[\\\/]\d{2}[\\\/][a-zA-Z0-9._-]+[\\\/]$)"); - public AccessionDirectoryLister(string fileSystemRoot, IFileSystem fileSystem, string dicomSearchPattern, IProducerModel directoriesProducerModel) - : base(fileSystemRoot, fileSystem, dicomSearchPattern, directoriesProducerModel) { } + public AccessionDirectoryLister(string fileSystemRoot, IFileSystem fileSystem, string dicomSearchPattern, IProducerModel directoriesProducerModel) + : base(fileSystemRoot, fileSystem, dicomSearchPattern, directoriesProducerModel) { } - public AccessionDirectoryLister(string fileSystemRoot, string dicomSearchPattern, IProducerModel directoriesProducerModel) - : this(fileSystemRoot, new FileSystem(), dicomSearchPattern, directoriesProducerModel) { } + public AccessionDirectoryLister(string fileSystemRoot, string dicomSearchPattern, IProducerModel directoriesProducerModel) + : this(fileSystemRoot, new FileSystem(), dicomSearchPattern, directoriesProducerModel) { } - public override void SearchForDicomDirectories(string accessionsList) + public override void SearchForDicomDirectories(string accessionsList) + { + Logger.Info($"Starting accession directory path listing from: {accessionsList}"); + IsProcessing = true; + TotalSent = 0; + + using var reader = FileSystem.File.OpenText(accessionsList); + while (!reader.EndOfStream && !TokenSource.IsCancellationRequested) { - Logger.Info($"Starting accession directory path listing from: {accessionsList}"); - IsProcessing = true; - TotalSent = 0; - using var reader = FileSystem.File.OpenText(accessionsList); - while (!reader.EndOfStream && !TokenSource.IsCancellationRequested) + var accessionDirectory = reader.ReadLine()?.Replace(",", ""); + + if (accessionDirectory is null || !_accDirectoryRegex.IsMatch(accessionDirectory)) { + Logger.Warn($"This path does not point to an accession directory: ({accessionDirectory}), continuing"); + continue; + } - var accessionDirectory = reader.ReadLine()?.Replace(",", ""); - - if (accessionDirectory is null || !_accDirectoryRegex.IsMatch(accessionDirectory)) - { - Logger.Warn($"This path does not point to an accession directory: ({accessionDirectory}), continuing"); - continue; - } - - if (!FileSystem.Directory.Exists(accessionDirectory)) - { - Logger.Warn($"Can not find {accessionDirectory}, continuing"); - continue; - } - - var dirInfo = FileSystem.DirectoryInfo.New(accessionDirectory); - IEnumerable fileEnumerator; - - try - { - fileEnumerator = dirInfo.EnumerateFiles(SearchPattern); - } - catch (Exception e) - { - Logger.Error($"Could not enumerate files: {e.Message}"); - continue; - } - - if (fileEnumerator.FirstOrDefault() == null) - { - Logger.Warn( - $"Could not find dicom files in the given accession directory ({accessionDirectory}), skipping"); - continue; - } - - Logger.Debug($"Sending message ({accessionDirectory})"); - FoundNewDicomDirectory(accessionDirectory.Remove(0, FileSystemRoot.Length)); + if (!FileSystem.Directory.Exists(accessionDirectory)) + { + Logger.Warn($"Can not find {accessionDirectory}, continuing"); + continue; } - IsProcessing = false; + var dirInfo = FileSystem.DirectoryInfo.New(accessionDirectory); + IEnumerable fileEnumerator; - Logger.Info("Reading from list finished"); - Logger.Info($"Total messages sent: {TotalSent}"); + try + { + fileEnumerator = dirInfo.EnumerateFiles(SearchPattern); + } + catch (Exception e) + { + Logger.Error($"Could not enumerate files: {e.Message}"); + continue; + } + + if (fileEnumerator.FirstOrDefault() == null) + { + Logger.Warn( + $"Could not find dicom files in the given accession directory ({accessionDirectory}), skipping"); + continue; + } + + Logger.Debug($"Sending message ({accessionDirectory})"); + FoundNewDicomDirectory(accessionDirectory.Remove(0, FileSystemRoot.Length)); } + + IsProcessing = false; + + Logger.Info("Reading from list finished"); + Logger.Info($"Total messages sent: {TotalSent}"); } } diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/BasicDicomDirectoryFinder.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/BasicDicomDirectoryFinder.cs index 95fc93a4e..75522b8e3 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/BasicDicomDirectoryFinder.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/BasicDicomDirectoryFinder.cs @@ -6,121 +6,120 @@ using System.Linq; using System.Text; -namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders -{ - public class BasicDicomDirectoryFinder : DicomDirectoryFinder - { - /// - /// True - Always go to bottom of directory structure - /// False - If a directory contains dicom files do not enumerate it's subdirectories - /// - public bool AlwaysSearchSubdirectories { get; set; } +namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders; - public BasicDicomDirectoryFinder(string fileSystemRoot, IFileSystem fileSystem, string dicomSearchPattern, IProducerModel directoriesProducerModel) - : base(fileSystemRoot, fileSystem, dicomSearchPattern, directoriesProducerModel) { } - - public BasicDicomDirectoryFinder(string fileSystemRoot, string dicomSearchPattern, IProducerModel directoriesProducerModel) - : this(fileSystemRoot, new FileSystem(), dicomSearchPattern, directoriesProducerModel) { } +public class BasicDicomDirectoryFinder : DicomDirectoryFinder +{ + /// + /// True - Always go to bottom of directory structure + /// False - If a directory contains dicom files do not enumerate it's subdirectories + /// + public bool AlwaysSearchSubdirectories { get; set; } + public BasicDicomDirectoryFinder(string fileSystemRoot, IFileSystem fileSystem, string dicomSearchPattern, IProducerModel directoriesProducerModel) + : base(fileSystemRoot, fileSystem, dicomSearchPattern, directoriesProducerModel) { } - public override void SearchForDicomDirectories(string topLevelDirectory) - { - Logger.Info("Starting directory scan of: " + topLevelDirectory); - IsProcessing = true; - TotalSent = 0; + public BasicDicomDirectoryFinder(string fileSystemRoot, string dicomSearchPattern, IProducerModel directoriesProducerModel) + : this(fileSystemRoot, new FileSystem(), dicomSearchPattern, directoriesProducerModel) { } - if (!FileSystem.Directory.Exists(topLevelDirectory)) - throw new DirectoryNotFoundException("Could not find the top level directory at the start of the scan \"" + topLevelDirectory + "\""); - Times = []; - for (var i = 0; i < 6; ++i) - Times.Add([]); + public override void SearchForDicomDirectories(string topLevelDirectory) + { + Logger.Info("Starting directory scan of: " + topLevelDirectory); + IsProcessing = true; + TotalSent = 0; - var dirStack = new Stack(); - dirStack.Push(topLevelDirectory); + if (!FileSystem.Directory.Exists(topLevelDirectory)) + throw new DirectoryNotFoundException("Could not find the top level directory at the start of the scan \"" + topLevelDirectory + "\""); - var largestStackSize = 1; + Times = []; + for (var i = 0; i < 6; ++i) + Times.Add([]); - while (dirStack.Count > 0 && !TokenSource.IsCancellationRequested) - { - Logger.Debug($"Start of loop, stack size is: {dirStack.Count}"); + var dirStack = new Stack(); + dirStack.Push(topLevelDirectory); - string dir = dirStack.Pop(); - Logger.Debug($"Scanning {dir}"); + var largestStackSize = 1; - if (!FileSystem.Directory.Exists(dir)) - { - // Occurs too often on the VM for us to throw and exit from here, just have to log & continue for now - //throw new DirectoryNotFoundException("A previously seen directory can no longer be found: " + dir); + while (dirStack.Count > 0 && !TokenSource.IsCancellationRequested) + { + Logger.Debug($"Start of loop, stack size is: {dirStack.Count}"); - Logger.Warn($"Can no longer find {dir}, continuing"); - continue; - } + string dir = dirStack.Pop(); + Logger.Debug($"Scanning {dir}"); - // Lazy-evaluate the contents of the directory so we don't overwhelm the filesystem - // and return on the first instance of a dicom file + if (!FileSystem.Directory.Exists(dir)) + { + // Occurs too often on the VM for us to throw and exit from here, just have to log & continue for now + //throw new DirectoryNotFoundException("A previously seen directory can no longer be found: " + dir); - Stopwatch.Restart(); - StringBuilder = new StringBuilder(); + Logger.Warn($"Can no longer find {dir}, continuing"); + continue; + } - IDirectoryInfo dirInfo = FileSystem.DirectoryInfo.New(dir); - LogTime(TimeLabel.NewDirInfo); + // Lazy-evaluate the contents of the directory so we don't overwhelm the filesystem + // and return on the first instance of a dicom file - IEnumerable fileEnumerator; - try - { - fileEnumerator = GetEnumerator(dirInfo); - LogTime(TimeLabel.EnumFiles); - } - catch (Exception e) - { - Logger.Error($"Couldn't enumerate files: {e.Message}"); - continue; - } + Stopwatch.Restart(); + StringBuilder = new StringBuilder(); - bool hasDicom = fileEnumerator.FirstOrDefault() != null; - LogTime(TimeLabel.FirstOrDef); + IDirectoryInfo dirInfo = FileSystem.DirectoryInfo.New(dir); + LogTime(TimeLabel.NewDirInfo); - // If directory contains any DICOM files report and don't go any further - if (hasDicom) - { - FoundNewDicomDirectory(dir); - LogTime(TimeLabel.FoundNewDir); - } + IEnumerable fileEnumerator; + try + { + fileEnumerator = GetEnumerator(dirInfo); + LogTime(TimeLabel.EnumFiles); + } + catch (Exception e) + { + Logger.Error($"Couldn't enumerate files: {e.Message}"); + continue; + } - if (!hasDicom || AlwaysSearchSubdirectories) - { - Logger.Debug($"Enumerating subdirectories of {dir}"); + bool hasDicom = fileEnumerator.FirstOrDefault() != null; + LogTime(TimeLabel.FirstOrDef); - IEnumerable dirEnumerable = FileSystem.Directory.EnumerateDirectories(dir); - LogTime(TimeLabel.EnumDirs); + // If directory contains any DICOM files report and don't go any further + if (hasDicom) + { + FoundNewDicomDirectory(dir); + LogTime(TimeLabel.FoundNewDir); + } - var totalSubDirs = 0; + if (!hasDicom || AlwaysSearchSubdirectories) + { + Logger.Debug($"Enumerating subdirectories of {dir}"); - foreach (string subDir in dirEnumerable) - { - dirStack.Push(subDir); - ++totalSubDirs; - } + IEnumerable dirEnumerable = FileSystem.Directory.EnumerateDirectories(dir); + LogTime(TimeLabel.EnumDirs); - if (dirStack.Count > largestStackSize) - largestStackSize = dirStack.Count; + var totalSubDirs = 0; - LogTime(TimeLabel.PushDirs); - Logger.Debug($"Found {totalSubDirs} subdirectories"); + foreach (string subDir in dirEnumerable) + { + dirStack.Push(subDir); + ++totalSubDirs; } - Logger.Debug(StringBuilder.ToString); + if (dirStack.Count > largestStackSize) + largestStackSize = dirStack.Count; + + LogTime(TimeLabel.PushDirs); + Logger.Debug($"Found {totalSubDirs} subdirectories"); } - IsProcessing = false; + Logger.Debug(StringBuilder.ToString); + } - Logger.Info("Directory scan finished"); - Logger.Info($"Total messages sent: {TotalSent}"); - Logger.Info($"Largest stack size was: {largestStackSize}"); + IsProcessing = false; - if (TotalSent > 0) - Logger.Info(CalcAverages()); - } + Logger.Info("Directory scan finished"); + Logger.Info($"Total messages sent: {TotalSent}"); + Logger.Info($"Largest stack size was: {largestStackSize}"); + + if (TotalSent > 0) + Logger.Info(CalcAverages()); } } diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/DicomDirectoryFinder.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/DicomDirectoryFinder.cs index b8b7b8e49..1b13ddbc1 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/DicomDirectoryFinder.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/DicomDirectoryFinder.cs @@ -10,135 +10,134 @@ using System.Text; using System.Threading; -namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders +namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders; + +/// +/// Finds directories that contain DICOM files and outputs one AccessionDirectoryMessage for each directory it finds +/// that contains a *.dcm file (or ). It will search into subdirectories but will not search +/// into subdirectories below any directory that does contain a *.dcm file (or ). +/// +public abstract class DicomDirectoryFinder : IDicomDirectoryFinder { - /// - /// Finds directories that contain DICOM files and outputs one AccessionDirectoryMessage for each directory it finds - /// that contains a *.dcm file (or ). It will search into subdirectories but will not search - /// into subdirectories below any directory that does contain a *.dcm file (or ). - /// - public abstract class DicomDirectoryFinder : IDicomDirectoryFinder - { - protected readonly ILogger Logger; - - protected readonly string FileSystemRoot; - protected readonly IFileSystem FileSystem; - - private readonly IProducerModel _directoriesProducerModel; - protected int TotalSent; + protected readonly ILogger Logger; - protected bool IsProcessing; - protected readonly CancellationTokenSource TokenSource = new(); + protected readonly string FileSystemRoot; + protected readonly IFileSystem FileSystem; - protected readonly Stopwatch Stopwatch = new(); - protected StringBuilder? StringBuilder; - protected List>? Times; + private readonly IProducerModel _directoriesProducerModel; + protected int TotalSent; - /// - /// The filenames to look for in directories. Defaults to *.dcm - /// - protected readonly string SearchPattern; - - protected enum TimeLabel - { - NewDirInfo, - EnumFiles, - FirstOrDef, - FoundNewDir, - EnumDirs, - PushDirs - } + protected bool IsProcessing; + protected readonly CancellationTokenSource TokenSource = new(); + protected readonly Stopwatch Stopwatch = new(); + protected StringBuilder? StringBuilder; + protected List>? Times; - protected DicomDirectoryFinder( - string fileSystemRoot, - IFileSystem fileSystem, - string dicomSearchPattern, - IProducerModel directoriesProducerModel - ) - { - FileSystemRoot = fileSystemRoot; - FileSystem = fileSystem; - SearchPattern = dicomSearchPattern; - _directoriesProducerModel = directoriesProducerModel; - Logger = LogManager.GetLogger(GetType().Name); - } + /// + /// The filenames to look for in directories. Defaults to *.dcm + /// + protected readonly string SearchPattern; - public abstract void SearchForDicomDirectories(string rootDir); + protected enum TimeLabel + { + NewDirInfo, + EnumFiles, + FirstOrDef, + FoundNewDir, + EnumDirs, + PushDirs + } - public void Stop() - { - if (!IsProcessing) - return; - Logger.Info("Stop requested while still processing, attempting to kill"); - TokenSource.Cancel(); + protected DicomDirectoryFinder( + string fileSystemRoot, + IFileSystem fileSystem, + string dicomSearchPattern, + IProducerModel directoriesProducerModel + ) + { + FileSystemRoot = fileSystemRoot; + FileSystem = fileSystem; + SearchPattern = dicomSearchPattern; + _directoriesProducerModel = directoriesProducerModel; + Logger = LogManager.GetLogger(GetType().Name); + } - var timeout = 5000; - const int delta = 500; - while (IsProcessing && timeout > 0) - { - Thread.Sleep(delta); - timeout -= delta; - } + public abstract void SearchForDicomDirectories(string rootDir); - if (timeout <= 0) - throw new ApplicationException("SearchForDicomDirectories did not exit in time"); + public void Stop() + { + if (!IsProcessing) + return; - Logger.Info("Directory scan aborted, exiting"); - } + Logger.Info("Stop requested while still processing, attempting to kill"); + TokenSource.Cancel(); - /// - /// Handled when a new DICOM directory is found. Writes an AccessionDirectoryMessage to the message exchange - /// - /// Full path to a directory that has been found to contain a DICOM file - protected void FoundNewDicomDirectory(string dir) + var timeout = 5000; + const int delta = 500; + while (IsProcessing && timeout > 0) { - Logger.Debug("DicomDirectoryFinder: Found " + dir); + Thread.Sleep(delta); + timeout -= delta; + } - string dirPath = Path.GetFullPath(dir).TrimEnd(Path.DirectorySeparatorChar); + if (timeout <= 0) + throw new ApplicationException("SearchForDicomDirectories did not exit in time"); - if (dirPath.StartsWith(FileSystemRoot)) - dirPath = dirPath.Remove(0, FileSystemRoot.Length); + Logger.Info("Directory scan aborted, exiting"); + } - dirPath = dirPath.TrimStart(Path.DirectorySeparatorChar); + /// + /// Handled when a new DICOM directory is found. Writes an AccessionDirectoryMessage to the message exchange + /// + /// Full path to a directory that has been found to contain a DICOM file + protected void FoundNewDicomDirectory(string dir) + { + Logger.Debug("DicomDirectoryFinder: Found " + dir); - var message = new AccessionDirectoryMessage - { - DirectoryPath = dirPath, - }; + string dirPath = Path.GetFullPath(dir).TrimEnd(Path.DirectorySeparatorChar); - _directoriesProducerModel.SendMessage(message, isInResponseTo: null, routingKey: null); - ++TotalSent; - } + if (dirPath.StartsWith(FileSystemRoot)) + dirPath = dirPath.Remove(0, FileSystemRoot.Length); - protected void LogTime(TimeLabel tl) - { - long elapsed = Stopwatch.ElapsedMilliseconds; - StringBuilder!.Append(tl + "=" + elapsed + "ms "); - Times![(int)tl].Add(elapsed); - Stopwatch.Restart(); - } + dirPath = dirPath.TrimStart(Path.DirectorySeparatorChar); - protected string CalcAverages() + var message = new AccessionDirectoryMessage { - var sb = new StringBuilder(); - sb.AppendLine("Averages:"); + DirectoryPath = dirPath, + }; - foreach (TimeLabel label in (TimeLabel[])Enum.GetValues(typeof(TimeLabel))) - { - int count = Times![(int)label].Count; - long average = count == 0 ? 0 : Times[(int)label].Sum() / count; + _directoriesProducerModel.SendMessage(message, isInResponseTo: null, routingKey: null); + ++TotalSent; + } - sb.AppendLine(label + ":\t" + average + "ms"); - } + protected void LogTime(TimeLabel tl) + { + long elapsed = Stopwatch.ElapsedMilliseconds; + StringBuilder!.Append(tl + "=" + elapsed + "ms "); + Times![(int)tl].Add(elapsed); + Stopwatch.Restart(); + } - return sb.ToString(); - } + protected string CalcAverages() + { + var sb = new StringBuilder(); + sb.AppendLine("Averages:"); - protected virtual IEnumerable GetEnumerator(IDirectoryInfo dirInfo) + foreach (TimeLabel label in (TimeLabel[])Enum.GetValues(typeof(TimeLabel))) { - return dirInfo.EnumerateFiles(SearchPattern); + int count = Times![(int)label].Count; + long average = count == 0 ? 0 : Times[(int)label].Sum() / count; + + sb.AppendLine(label + ":\t" + average + "ms"); } + + return sb.ToString(); + } + + protected virtual IEnumerable GetEnumerator(IDirectoryInfo dirInfo) + { + return dirInfo.EnumerateFiles(SearchPattern); } } diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/IDicomDirectoryFinder.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/IDicomDirectoryFinder.cs index 6d9301df0..04739647d 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/IDicomDirectoryFinder.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/IDicomDirectoryFinder.cs @@ -1,21 +1,20 @@ using SmiServices.Common.Messages; -namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders +namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders; + +/// +/// Interface for classes which scan a directory for dicom files +/// +public interface IDicomDirectoryFinder { /// - /// Interface for classes which scan a directory for dicom files + /// Performs the directory scan, sending s where it finds dicom files /// - public interface IDicomDirectoryFinder - { - /// - /// Performs the directory scan, sending s where it finds dicom files - /// - /// The full path to start the scan at - void SearchForDicomDirectories(string rootDir); + /// The full path to start the scan at + void SearchForDicomDirectories(string rootDir); - /// - /// Stops the scan if it is still running. Implementations must ensure they exit promptly when requested. - /// - void Stop(); - } + /// + /// Stops the scan if it is still running. Implementations must ensure they exit promptly when requested. + /// + void Stop(); } diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/PacsDirectoryFinder.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/PacsDirectoryFinder.cs index ff25abba6..f720e2b0d 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/PacsDirectoryFinder.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/PacsDirectoryFinder.cs @@ -5,81 +5,80 @@ using System.Linq; using System.Text.RegularExpressions; -namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders +namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders; + +public class PacsDirectoryFinder : DicomDirectoryFinder { - public class PacsDirectoryFinder : DicomDirectoryFinder - { - // Regex that matches when we are at the yyyy\mm\dd\ directory level - private readonly Regex _dayDirectoryRegex = new(@"(20\d{2}[\\\/]\d{2}[\\\/]\d{2})([\\\/]|$)"); - // Regex that matches when we are at the yyyy\mm\dd\xxxxx directory level - private readonly Regex _accDirectoryRegex = new(@"(20\d{2}[\\\/]\d{2}[\\\/]\d{2}[\\\/][a-zA-Z0-9._-]+[\\\/]$)"); + // Regex that matches when we are at the yyyy\mm\dd\ directory level + private readonly Regex _dayDirectoryRegex = new(@"(20\d{2}[\\\/]\d{2}[\\\/]\d{2})([\\\/]|$)"); + // Regex that matches when we are at the yyyy\mm\dd\xxxxx directory level + private readonly Regex _accDirectoryRegex = new(@"(20\d{2}[\\\/]\d{2}[\\\/]\d{2}[\\\/][a-zA-Z0-9._-]+[\\\/]$)"); + + public PacsDirectoryFinder(string fileSystemRoot, IFileSystem fileSystem, string dicomSearchPattern, IProducerModel directoriesProducerModel) + : base(fileSystemRoot, fileSystem, dicomSearchPattern, directoriesProducerModel) { } - public PacsDirectoryFinder(string fileSystemRoot, IFileSystem fileSystem, string dicomSearchPattern, IProducerModel directoriesProducerModel) - : base(fileSystemRoot, fileSystem, dicomSearchPattern, directoriesProducerModel) { } + public PacsDirectoryFinder(string fileSystemRoot, string dicomSearchPattern, IProducerModel directoriesProducerModel) + : base(fileSystemRoot, new FileSystem(), dicomSearchPattern, directoriesProducerModel) { } - public PacsDirectoryFinder(string fileSystemRoot, string dicomSearchPattern, IProducerModel directoriesProducerModel) - : base(fileSystemRoot, new FileSystem(), dicomSearchPattern, directoriesProducerModel) { } + public override void SearchForDicomDirectories(string rootDir) + { + Logger.Info("Starting directory scan of: " + rootDir); + IsProcessing = true; + TotalSent = 0; + + if (!FileSystem.Directory.Exists(rootDir)) + throw new DirectoryNotFoundException("Could not find the root directory at the start of the scan \"" + rootDir + "\""); - public override void SearchForDicomDirectories(string rootDir) + // Check if we were given an accession directory + if (_accDirectoryRegex.IsMatch(rootDir)) { - Logger.Info("Starting directory scan of: " + rootDir); - IsProcessing = true; - TotalSent = 0; + Logger.Debug("Given an accession directory, sending single message"); + FoundNewDicomDirectory(rootDir.Remove(0, FileSystemRoot.Length)); + } + else + { + Times = []; + for (var i = 0; i < 6; ++i) + Times.Add([]); - if (!FileSystem.Directory.Exists(rootDir)) - throw new DirectoryNotFoundException("Could not find the root directory at the start of the scan \"" + rootDir + "\""); + var dirStack = new Stack(); + dirStack.Push(rootDir); - // Check if we were given an accession directory - if (_accDirectoryRegex.IsMatch(rootDir)) - { - Logger.Debug("Given an accession directory, sending single message"); - FoundNewDicomDirectory(rootDir.Remove(0, FileSystemRoot.Length)); - } - else + while (dirStack.Count > 0 && !TokenSource.IsCancellationRequested) { - Times = []; - for (var i = 0; i < 6; ++i) - Times.Add([]); + string dir = dirStack.Pop(); + Logger.Debug("Scanning " + dir); - var dirStack = new Stack(); - dirStack.Push(rootDir); + IDirectoryInfo dirInfo = FileSystem.DirectoryInfo.New(dir); - while (dirStack.Count > 0 && !TokenSource.IsCancellationRequested) + if (!dirInfo.Exists) { - string dir = dirStack.Pop(); - Logger.Debug("Scanning " + dir); - - IDirectoryInfo dirInfo = FileSystem.DirectoryInfo.New(dir); - - if (!dirInfo.Exists) - { - Logger.Warn("Can no longer find " + dir + ", continuing"); - continue; - } - - IEnumerable subDirs = dirInfo.EnumerateDirectories(); - - if (_dayDirectoryRegex.IsMatch(dir)) - { - Logger.Debug("At the day level, assuming all subdirs are accession directories"); - // At the day level, so each of the subdirectories will be accession directories - foreach (IDirectoryInfo accessionDir in subDirs) - FoundNewDicomDirectory(accessionDir.FullName); - } - else - { - Logger.Debug("Not at the day level, checking subdirectories"); - subDirs.ToList().ForEach(x => dirStack.Push(x.FullName)); - } + Logger.Warn("Can no longer find " + dir + ", continuing"); + continue; } - } - IsProcessing = false; + IEnumerable subDirs = dirInfo.EnumerateDirectories(); - Logger.Info("Directory scan finished"); - Logger.Info("Total messages sent: " + TotalSent); + if (_dayDirectoryRegex.IsMatch(dir)) + { + Logger.Debug("At the day level, assuming all subdirs are accession directories"); + // At the day level, so each of the subdirectories will be accession directories + foreach (IDirectoryInfo accessionDir in subDirs) + FoundNewDicomDirectory(accessionDir.FullName); + } + else + { + Logger.Debug("Not at the day level, checking subdirectories"); + subDirs.ToList().ForEach(x => dirStack.Push(x.FullName)); + } + } } + + IsProcessing = false; + + Logger.Info("Directory scan finished"); + Logger.Info("Total messages sent: " + TotalSent); } } diff --git a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/ZipDicomDirectoryFinder.cs b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/ZipDicomDirectoryFinder.cs index 4cf0f2703..f3aa5bc5f 100644 --- a/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/ZipDicomDirectoryFinder.cs +++ b/src/SmiServices/Applications/DicomDirectoryProcessor/DirectoryFinders/ZipDicomDirectoryFinder.cs @@ -4,27 +4,26 @@ using System.IO.Abstractions; using System.Linq; -namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders +namespace SmiServices.Applications.DicomDirectoryProcessor.DirectoryFinders; + +/// +/// Finds directories that contain zip files or dicom files. Does not require files to be in a directory structure +/// that contains AccessionNumber +/// +public class ZipDicomDirectoryFinder : BasicDicomDirectoryFinder { - /// - /// Finds directories that contain zip files or dicom files. Does not require files to be in a directory structure - /// that contains AccessionNumber - /// - public class ZipDicomDirectoryFinder : BasicDicomDirectoryFinder + public ZipDicomDirectoryFinder(string fileSystemRoot, IFileSystem fileSystem, string dicomSearchPattern, IProducerModel directoriesProducerModel) + : base(fileSystemRoot, fileSystem, dicomSearchPattern, directoriesProducerModel) { - public ZipDicomDirectoryFinder(string fileSystemRoot, IFileSystem fileSystem, string dicomSearchPattern, IProducerModel directoriesProducerModel) - : base(fileSystemRoot, fileSystem, dicomSearchPattern, directoriesProducerModel) - { - AlwaysSearchSubdirectories = true; - } + AlwaysSearchSubdirectories = true; + } - public ZipDicomDirectoryFinder(string fileSystemRoot, string dicomSearchPattern, IProducerModel directoriesProducerModel) - : this(fileSystemRoot, new FileSystem(), dicomSearchPattern, directoriesProducerModel) { } + public ZipDicomDirectoryFinder(string fileSystemRoot, string dicomSearchPattern, IProducerModel directoriesProducerModel) + : this(fileSystemRoot, new FileSystem(), dicomSearchPattern, directoriesProducerModel) { } - protected override IEnumerable GetEnumerator(IDirectoryInfo dirInfo) - { - return dirInfo.EnumerateFiles().Where(f => f.Extension == ".dcm" || ZipHelper.IsZip(f)); - } + protected override IEnumerable GetEnumerator(IDirectoryInfo dirInfo) + { + return dirInfo.EnumerateFiles().Where(f => f.Extension == ".dcm" || ZipHelper.IsZip(f)); } } diff --git a/src/SmiServices/Applications/ExtractImages/CohortCsvParser.cs b/src/SmiServices/Applications/ExtractImages/CohortCsvParser.cs index 07d90710a..9e3ee444b 100644 --- a/src/SmiServices/Applications/ExtractImages/CohortCsvParser.cs +++ b/src/SmiServices/Applications/ExtractImages/CohortCsvParser.cs @@ -9,57 +9,56 @@ using System.Linq; -namespace SmiServices.Applications.ExtractImages +namespace SmiServices.Applications.ExtractImages; + +public class CohortCsvParser { - public class CohortCsvParser + // NOTE(rkm 2021-04-01) Just do a simple line-by-line read through the CSV + private static readonly CsvConfiguration _csvConfiguration = new(CultureInfo.InvariantCulture) { - // NOTE(rkm 2021-04-01) Just do a simple line-by-line read through the CSV - private static readonly CsvConfiguration _csvConfiguration = new(CultureInfo.InvariantCulture) - { - HasHeaderRecord = false - }; + HasHeaderRecord = false + }; - private readonly IFileSystem _fileSystem; + private readonly IFileSystem _fileSystem; - public CohortCsvParser(IFileSystem fileSystem) - { - _fileSystem = fileSystem; - } + public CohortCsvParser(IFileSystem fileSystem) + { + _fileSystem = fileSystem; + } - public Tuple> Parse(string csvFilePath) - { - using var fileStream = _fileSystem.FileStream.New(csvFilePath, FileMode.Open, FileAccess.Read); - using var streamReader = new StreamReader(fileStream); - using var reader = new CsvReader(streamReader, _csvConfiguration); + public Tuple> Parse(string csvFilePath) + { + using var fileStream = _fileSystem.FileStream.New(csvFilePath, FileMode.Open, FileAccess.Read); + using var streamReader = new StreamReader(fileStream); + using var reader = new CsvReader(streamReader, _csvConfiguration); - reader.Read(); - var headerRecord = reader.Parser.Record ?? throw new ApplicationException(message: "CSV is empty"); + reader.Read(); + var headerRecord = reader.Parser.Record ?? throw new ApplicationException(message: "CSV is empty"); - if (headerRecord.Length != 1) - throw new ApplicationException(message: "CSV must have exactly 1 column"); + if (headerRecord.Length != 1) + throw new ApplicationException(message: "CSV must have exactly 1 column"); - if (!Enum.TryParse(headerRecord[0], out var extractionKey)) - { - var keys = string.Join(separator: ',', Enum.GetNames(typeof(ExtractionKey))); - throw new ApplicationException($"CSV header must be a valid ExtractionKey: {keys}"); - } + if (!Enum.TryParse(headerRecord[0], out var extractionKey)) + { + var keys = string.Join(separator: ',', Enum.GetNames(typeof(ExtractionKey))); + throw new ApplicationException($"CSV header must be a valid ExtractionKey: {keys}"); + } - var allIds = new List(); - while (reader.Read()) - { - var record = reader.Parser.Record; - if (record.Length != 1) - throw new ApplicationException(message: "CSV must have exactly 1 column"); + var allIds = new List(); + while (reader.Read()) + { + var record = reader.Parser.Record; + if (record.Length != 1) + throw new ApplicationException(message: "CSV must have exactly 1 column"); - var id = record[0]?.Trim(); - if (!string.IsNullOrWhiteSpace(id)) - allIds.Add(id); - } + var id = record[0]?.Trim(); + if (!string.IsNullOrWhiteSpace(id)) + allIds.Add(id); + } - if (allIds.Count == 0) - throw new ApplicationException(message: "No records in the cohort CSV"); + if (allIds.Count == 0) + throw new ApplicationException(message: "No records in the cohort CSV"); - return new Tuple>(extractionKey, allIds); - } + return new Tuple>(extractionKey, allIds); } } diff --git a/src/SmiServices/Applications/ExtractImages/ExtractImages.cs b/src/SmiServices/Applications/ExtractImages/ExtractImages.cs index f19fbf9bb..e7c355499 100644 --- a/src/SmiServices/Applications/ExtractImages/ExtractImages.cs +++ b/src/SmiServices/Applications/ExtractImages/ExtractImages.cs @@ -4,28 +4,27 @@ using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Applications.ExtractImages +namespace SmiServices.Applications.ExtractImages; + +public static class ExtractImages { - public static class ExtractImages + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit - .ParseAndRun( - args, - nameof(ExtractImages), - OnParse - ); - return ret; - } + int ret = SmiCliInit + .ParseAndRun( + args, + nameof(ExtractImages), + OnParse + ); + return ret; + } - private static int OnParse(GlobalOptions globals, ExtractImagesCliOptions parsedOptions) - { - var bootstrapper = - new MicroserviceHostBootstrapper(() => new ExtractImagesHost(globals, parsedOptions)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, ExtractImagesCliOptions parsedOptions) + { + var bootstrapper = + new MicroserviceHostBootstrapper(() => new ExtractImagesHost(globals, parsedOptions)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Applications/ExtractImages/ExtractImagesCliOptions.cs b/src/SmiServices/Applications/ExtractImages/ExtractImagesCliOptions.cs index 1db2d78f8..cbbb05648 100644 --- a/src/SmiServices/Applications/ExtractImages/ExtractImagesCliOptions.cs +++ b/src/SmiServices/Applications/ExtractImages/ExtractImagesCliOptions.cs @@ -6,76 +6,75 @@ using System.Text; -namespace SmiServices.Applications.ExtractImages +namespace SmiServices.Applications.ExtractImages; + +public class ExtractImagesCliOptions : CliOptions { - public class ExtractImagesCliOptions : CliOptions - { - // Required + // Required - [Option(shortName: 'p', longName: "project-id", Required = true, HelpText = "The project identifier")] - public string ProjectId { get; set; } = null!; + [Option(shortName: 'p', longName: "project-id", Required = true, HelpText = "The project identifier")] + public string ProjectId { get; set; } = null!; - [Option(shortName: 'c', longName: "cohort-csv-file", Required = true, - HelpText = "The CSV file containing IDs of the cohort for extraction")] - public string CohortCsvFile { get; set; } = null!; + [Option(shortName: 'c', longName: "cohort-csv-file", Required = true, + HelpText = "The CSV file containing IDs of the cohort for extraction")] + public string CohortCsvFile { get; set; } = null!; - [Option(shortName: 'm', longName: "modality", Required = true, - HelpText = "The modality to extract. Any non-matching IDs from the input list are ignored")] - public string Modality { get; set; } = null!; + [Option(shortName: 'm', longName: "modality", Required = true, + HelpText = "The modality to extract. Any non-matching IDs from the input list are ignored")] + public string Modality { get; set; } = null!; - // Optional + // Optional - [Option(shortName: 'i', longName: "identifiable-extraction", Required = false, - HelpText = "Extract without performing anonymisation")] - public bool IsIdentifiableExtraction { get; set; } + [Option(shortName: 'i', longName: "identifiable-extraction", Required = false, + HelpText = "Extract without performing anonymisation")] + public bool IsIdentifiableExtraction { get; set; } - [Option(shortName: 'f', longName: "no-filters", Required = false, - HelpText = "Extract without applying any rejection filters")] - public bool IsNoFiltersExtraction { get; set; } + [Option(shortName: 'f', longName: "no-filters", Required = false, + HelpText = "Extract without applying any rejection filters")] + public bool IsNoFiltersExtraction { get; set; } - [Option(shortName: 'n', longName: "non-interactive", Required = false, - HelpText = "Don't pause for manual confirmation before sending messages")] - public bool NonInteractive { get; set; } + [Option(shortName: 'n', longName: "non-interactive", Required = false, + HelpText = "Don't pause for manual confirmation before sending messages")] + public bool NonInteractive { get; set; } - [Option(shortName: 'u', longName: "pooled-extraction", Required = false, Default = false, - HelpText = "True to use the global file pool for this extraction")] - public bool IsPooledExtraction { get; set; } + [Option(shortName: 'u', longName: "pooled-extraction", Required = false, Default = false, + HelpText = "True to use the global file pool for this extraction")] + public bool IsPooledExtraction { get; set; } - [Usage] - [ExcludeFromCodeCoverage] - public static IEnumerable Examples + [Usage] + [ExcludeFromCodeCoverage] + public static IEnumerable Examples + { + get { - get - { - yield return new Example(helpText: "Normal Scenario", - new ExtractImagesCliOptions { CohortCsvFile = "my.csv", ProjectId = "1234-5678" }); - yield return new Example(helpText: "Extract CTs without anonymisation", - new ExtractImagesCliOptions - { - CohortCsvFile = "my.csv", - ProjectId = "1234-5678", - Modality = "CT", - IsIdentifiableExtraction = true - }); - yield return new Example(helpText: "Extract without applying any rejection filters", - new ExtractImagesCliOptions - { CohortCsvFile = "my.csv", ProjectId = "1234-5678", IsNoFiltersExtraction = true }); - } + yield return new Example(helpText: "Normal Scenario", + new ExtractImagesCliOptions { CohortCsvFile = "my.csv", ProjectId = "1234-5678" }); + yield return new Example(helpText: "Extract CTs without anonymisation", + new ExtractImagesCliOptions + { + CohortCsvFile = "my.csv", + ProjectId = "1234-5678", + Modality = "CT", + IsIdentifiableExtraction = true + }); + yield return new Example(helpText: "Extract without applying any rejection filters", + new ExtractImagesCliOptions + { CohortCsvFile = "my.csv", ProjectId = "1234-5678", IsNoFiltersExtraction = true }); } + } - [ExcludeFromCodeCoverage] - public override string ToString() - { - var sb = new StringBuilder(); - sb.Append(base.ToString()); - sb.Append($"ProjectId={ProjectId},"); - sb.Append($"CohortCsvFile={CohortCsvFile},"); - sb.Append($"Modality={Modality},"); - sb.Append($"IdentifiableExtraction={IsIdentifiableExtraction},"); - sb.Append($"NoFiltersExtraction={IsNoFiltersExtraction},"); - sb.Append($"NonInteractive={NonInteractive},"); - return sb.ToString(); - } + [ExcludeFromCodeCoverage] + public override string ToString() + { + var sb = new StringBuilder(); + sb.Append(base.ToString()); + sb.Append($"ProjectId={ProjectId},"); + sb.Append($"CohortCsvFile={CohortCsvFile},"); + sb.Append($"Modality={Modality},"); + sb.Append($"IdentifiableExtraction={IsIdentifiableExtraction},"); + sb.Append($"NoFiltersExtraction={IsNoFiltersExtraction},"); + sb.Append($"NonInteractive={NonInteractive},"); + return sb.ToString(); } } diff --git a/src/SmiServices/Applications/ExtractImages/ExtractImagesHost.cs b/src/SmiServices/Applications/ExtractImages/ExtractImagesHost.cs index 82f90ac2d..fd5062813 100644 --- a/src/SmiServices/Applications/ExtractImages/ExtractImagesHost.cs +++ b/src/SmiServices/Applications/ExtractImages/ExtractImagesHost.cs @@ -11,103 +11,102 @@ using System.Linq; -namespace SmiServices.Applications.ExtractImages +namespace SmiServices.Applications.ExtractImages; + +public class ExtractImagesHost : MicroserviceHost { - public class ExtractImagesHost : MicroserviceHost - { - private readonly IFileSystem _fileSystem; + private readonly IFileSystem _fileSystem; + + private readonly string _csvFilePath; - private readonly string _csvFilePath; + private readonly IExtractionMessageSender _extractionMessageSender; - private readonly IExtractionMessageSender _extractionMessageSender; + private readonly string _absoluteExtractionDir; - private readonly string _absoluteExtractionDir; + private readonly ExtractionKey[]? _allowedKeys; + + public ExtractImagesHost( + GlobalOptions globals, + ExtractImagesCliOptions cliOptions, + IExtractionMessageSender? extractionMessageSender = null, + IMessageBroker? messageBroker = null, + IFileSystem? fileSystem = null, + bool threaded = false + ) + : base( + globals, + messageBroker) + { + ExtractImagesOptions? options = Globals.ExtractImagesOptions ?? throw new ArgumentException(nameof(Globals.ExtractImagesOptions)); + _allowedKeys = options.AllowedExtractionKeys; - private readonly ExtractionKey[]? _allowedKeys; + _fileSystem = fileSystem ?? new FileSystem(); - public ExtractImagesHost( - GlobalOptions globals, - ExtractImagesCliOptions cliOptions, - IExtractionMessageSender? extractionMessageSender = null, - IMessageBroker? messageBroker = null, - IFileSystem? fileSystem = null, - bool threaded = false - ) - : base( - globals, - messageBroker) + string extractRoot = Globals.FileSystemOptions?.ExtractRoot ?? throw new ArgumentException("Some part of Globals.FileSystemOptions.ExtractRoot was null"); + if (!_fileSystem.Directory.Exists(extractRoot)) + throw new DirectoryNotFoundException($"Could not find the extraction root '{extractRoot}'"); + + if (cliOptions.IsPooledExtraction) { - ExtractImagesOptions? options = Globals.ExtractImagesOptions ?? throw new ArgumentException(nameof(Globals.ExtractImagesOptions)); - _allowedKeys = options.AllowedExtractionKeys; - - _fileSystem = fileSystem ?? new FileSystem(); - - string extractRoot = Globals.FileSystemOptions?.ExtractRoot ?? throw new ArgumentException("Some part of Globals.FileSystemOptions.ExtractRoot was null"); - if (!_fileSystem.Directory.Exists(extractRoot)) - throw new DirectoryNotFoundException($"Could not find the extraction root '{extractRoot}'"); - - if (cliOptions.IsPooledExtraction) - { - if (!_fileSystem.Directory.Exists(Globals.FileSystemOptions.ExtractionPoolRoot)) - throw new InvalidOperationException($"{nameof(cliOptions.IsPooledExtraction)} can only be passed if {nameof(Globals.FileSystemOptions.ExtractionPoolRoot)} is a directory"); - - if (cliOptions.IsIdentifiableExtraction) - throw new InvalidOperationException($"{nameof(cliOptions.IsPooledExtraction)} is incompatible with {nameof(cliOptions.IsIdentifiableExtraction)}"); - - if (cliOptions.IsNoFiltersExtraction) - throw new InvalidOperationException($"{nameof(cliOptions.IsPooledExtraction)} is incompatible with {nameof(cliOptions.IsNoFiltersExtraction)}"); - } - - _csvFilePath = cliOptions.CohortCsvFile; - if (string.IsNullOrWhiteSpace(_csvFilePath)) - throw new ArgumentNullException(nameof(cliOptions.CohortCsvFile)); - if (!_fileSystem.File.Exists(_csvFilePath)) - throw new FileNotFoundException($"Could not find the cohort CSV file '{_csvFilePath}'"); - - // TODO(rkm 2021-04-01) Now that all the extraction path code is in C#, we would benefit from refactoring it all out - // to a helper class to support having multiple configurations (and probably prevent some bugs) - string extractionName = _fileSystem.Path.GetFileNameWithoutExtension(_csvFilePath); - string extractionDir = _fileSystem.Path.Join(cliOptions.ProjectId, "extractions", extractionName); - _absoluteExtractionDir = _fileSystem.Path.Join(extractRoot, extractionDir); - - if (_fileSystem.Directory.Exists(_absoluteExtractionDir)) - throw new DirectoryNotFoundException($"Extraction directory already exists '{_absoluteExtractionDir}'"); - - if (extractionMessageSender == null) - { - IProducerModel extractionRequestProducer = MessageBroker.SetupProducer(options.ExtractionRequestProducerOptions!, isBatch: false); - IProducerModel extractionRequestInfoProducer = MessageBroker.SetupProducer(options.ExtractionRequestInfoProducerOptions!, isBatch: false); - - _extractionMessageSender = new ExtractionMessageSender( - options, - cliOptions, - extractionRequestProducer, - extractionRequestInfoProducer, - _fileSystem, - extractRoot, - extractionDir, - new DateTimeProvider(), - new RealConsoleInput() - ); - } - else - { - Logger.Warn($"{nameof(Globals.ExtractImagesOptions.MaxIdentifiersPerMessage)} will be ignored here"); - _extractionMessageSender = extractionMessageSender; - } + if (!_fileSystem.Directory.Exists(Globals.FileSystemOptions.ExtractionPoolRoot)) + throw new InvalidOperationException($"{nameof(cliOptions.IsPooledExtraction)} can only be passed if {nameof(Globals.FileSystemOptions.ExtractionPoolRoot)} is a directory"); + + if (cliOptions.IsIdentifiableExtraction) + throw new InvalidOperationException($"{nameof(cliOptions.IsPooledExtraction)} is incompatible with {nameof(cliOptions.IsIdentifiableExtraction)}"); + + if (cliOptions.IsNoFiltersExtraction) + throw new InvalidOperationException($"{nameof(cliOptions.IsPooledExtraction)} is incompatible with {nameof(cliOptions.IsNoFiltersExtraction)}"); } - public override void Start() - { - var parser = new CohortCsvParser(_fileSystem); - (ExtractionKey extractionKey, List idList) = parser.Parse(_csvFilePath); + _csvFilePath = cliOptions.CohortCsvFile; + if (string.IsNullOrWhiteSpace(_csvFilePath)) + throw new ArgumentNullException(nameof(cliOptions.CohortCsvFile)); + if (!_fileSystem.File.Exists(_csvFilePath)) + throw new FileNotFoundException($"Could not find the cohort CSV file '{_csvFilePath}'"); - if (_allowedKeys?.Contains(extractionKey) == false) - throw new InvalidOperationException($"'{extractionKey}' from CSV not in list of supported extraction keys ({string.Join(',', _allowedKeys)})"); + // TODO(rkm 2021-04-01) Now that all the extraction path code is in C#, we would benefit from refactoring it all out + // to a helper class to support having multiple configurations (and probably prevent some bugs) + string extractionName = _fileSystem.Path.GetFileNameWithoutExtension(_csvFilePath); + string extractionDir = _fileSystem.Path.Join(cliOptions.ProjectId, "extractions", extractionName); + _absoluteExtractionDir = _fileSystem.Path.Join(extractRoot, extractionDir); - _extractionMessageSender.SendMessages(extractionKey, idList); + if (_fileSystem.Directory.Exists(_absoluteExtractionDir)) + throw new DirectoryNotFoundException($"Extraction directory already exists '{_absoluteExtractionDir}'"); - Stop("Completed"); + if (extractionMessageSender == null) + { + IProducerModel extractionRequestProducer = MessageBroker.SetupProducer(options.ExtractionRequestProducerOptions!, isBatch: false); + IProducerModel extractionRequestInfoProducer = MessageBroker.SetupProducer(options.ExtractionRequestInfoProducerOptions!, isBatch: false); + + _extractionMessageSender = new ExtractionMessageSender( + options, + cliOptions, + extractionRequestProducer, + extractionRequestInfoProducer, + _fileSystem, + extractRoot, + extractionDir, + new DateTimeProvider(), + new RealConsoleInput() + ); + } + else + { + Logger.Warn($"{nameof(Globals.ExtractImagesOptions.MaxIdentifiersPerMessage)} will be ignored here"); + _extractionMessageSender = extractionMessageSender; } } + + public override void Start() + { + var parser = new CohortCsvParser(_fileSystem); + (ExtractionKey extractionKey, List idList) = parser.Parse(_csvFilePath); + + if (_allowedKeys?.Contains(extractionKey) == false) + throw new InvalidOperationException($"'{extractionKey}' from CSV not in list of supported extraction keys ({string.Join(',', _allowedKeys)})"); + + _extractionMessageSender.SendMessages(extractionKey, idList); + + Stop("Completed"); + } } diff --git a/src/SmiServices/Applications/ExtractImages/ExtractionMessageSender.cs b/src/SmiServices/Applications/ExtractImages/ExtractionMessageSender.cs index 49b654a46..eaca477b3 100644 --- a/src/SmiServices/Applications/ExtractImages/ExtractionMessageSender.cs +++ b/src/SmiServices/Applications/ExtractImages/ExtractionMessageSender.cs @@ -9,189 +9,188 @@ using System.Linq; using System.Text; -namespace SmiServices.Applications.ExtractImages +namespace SmiServices.Applications.ExtractImages; + +public class ExtractionMessageSender : IExtractionMessageSender { - public class ExtractionMessageSender : IExtractionMessageSender + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + + private readonly IProducerModel _extractionRequestProducer; + private readonly IProducerModel _extractionRequestInfoProducer; + private readonly IFileSystem _fileSystem; + private readonly string _extractionRoot; + private readonly string _extractionDir; + + private readonly DateTimeProvider _dateTimeProvider; + private readonly IConsoleInput _consoleInput; + + private readonly int _maxIdentifiersPerMessage; + + private readonly string _projectId; + private readonly string _modality; + private readonly bool _isIdentifiableExtraction; + private readonly bool _isNoFiltersExtraction; + private readonly bool _isPooledExtraction; + private readonly bool _nonInteractive; + + + public ExtractionMessageSender( + ExtractImagesOptions options, + ExtractImagesCliOptions cliOptions, + IProducerModel extractionRequestProducer, + IProducerModel extractionRequestInfoProducer, + IFileSystem fileSystem, + string extractionRoot, + string extractionDir, + DateTimeProvider dateTimeProvider, + IConsoleInput consoleInput + ) { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - - private readonly IProducerModel _extractionRequestProducer; - private readonly IProducerModel _extractionRequestInfoProducer; - private readonly IFileSystem _fileSystem; - private readonly string _extractionRoot; - private readonly string _extractionDir; - - private readonly DateTimeProvider _dateTimeProvider; - private readonly IConsoleInput _consoleInput; - - private readonly int _maxIdentifiersPerMessage; - - private readonly string _projectId; - private readonly string _modality; - private readonly bool _isIdentifiableExtraction; - private readonly bool _isNoFiltersExtraction; - private readonly bool _isPooledExtraction; - private readonly bool _nonInteractive; - - - public ExtractionMessageSender( - ExtractImagesOptions options, - ExtractImagesCliOptions cliOptions, - IProducerModel extractionRequestProducer, - IProducerModel extractionRequestInfoProducer, - IFileSystem fileSystem, - string extractionRoot, - string extractionDir, - DateTimeProvider dateTimeProvider, - IConsoleInput consoleInput - ) - { - _extractionRequestProducer = extractionRequestProducer; - _extractionRequestInfoProducer = extractionRequestInfoProducer; - - _fileSystem = fileSystem; - _extractionRoot = (!string.IsNullOrWhiteSpace(extractionRoot)) ? extractionRoot : throw new ArgumentOutOfRangeException(nameof(extractionRoot)); - _extractionDir = (!string.IsNullOrWhiteSpace(extractionDir)) ? extractionDir : throw new ArgumentOutOfRangeException(nameof(extractionDir)); - _dateTimeProvider = dateTimeProvider; - _consoleInput = consoleInput; - - _maxIdentifiersPerMessage = options.MaxIdentifiersPerMessage; - if (_maxIdentifiersPerMessage <= 0) - throw new ArgumentOutOfRangeException(nameof(options)); - - _projectId = (!string.IsNullOrWhiteSpace(cliOptions.ProjectId)) ? cliOptions.ProjectId : throw new ArgumentOutOfRangeException(nameof(cliOptions)); - _modality = (!string.IsNullOrWhiteSpace(cliOptions.Modality)) ? cliOptions.Modality : throw new ArgumentOutOfRangeException(nameof(cliOptions)); - _isIdentifiableExtraction = cliOptions.IsIdentifiableExtraction; - _isNoFiltersExtraction = cliOptions.IsNoFiltersExtraction; - _isPooledExtraction = cliOptions.IsPooledExtraction; - _nonInteractive = cliOptions.NonInteractive; - } + _extractionRequestProducer = extractionRequestProducer; + _extractionRequestInfoProducer = extractionRequestInfoProducer; + + _fileSystem = fileSystem; + _extractionRoot = (!string.IsNullOrWhiteSpace(extractionRoot)) ? extractionRoot : throw new ArgumentOutOfRangeException(nameof(extractionRoot)); + _extractionDir = (!string.IsNullOrWhiteSpace(extractionDir)) ? extractionDir : throw new ArgumentOutOfRangeException(nameof(extractionDir)); + _dateTimeProvider = dateTimeProvider; + _consoleInput = consoleInput; + + _maxIdentifiersPerMessage = options.MaxIdentifiersPerMessage; + if (_maxIdentifiersPerMessage <= 0) + throw new ArgumentOutOfRangeException(nameof(options)); + + _projectId = (!string.IsNullOrWhiteSpace(cliOptions.ProjectId)) ? cliOptions.ProjectId : throw new ArgumentOutOfRangeException(nameof(cliOptions)); + _modality = (!string.IsNullOrWhiteSpace(cliOptions.Modality)) ? cliOptions.Modality : throw new ArgumentOutOfRangeException(nameof(cliOptions)); + _isIdentifiableExtraction = cliOptions.IsIdentifiableExtraction; + _isNoFiltersExtraction = cliOptions.IsNoFiltersExtraction; + _isPooledExtraction = cliOptions.IsPooledExtraction; + _nonInteractive = cliOptions.NonInteractive; + } - public void SendMessages(ExtractionKey extractionKey, List idList) - { - if (idList.Count == 0) - throw new ArgumentException("ID list is empty"); + public void SendMessages(ExtractionKey extractionKey, List idList) + { + if (idList.Count == 0) + throw new ArgumentException("ID list is empty"); - var jobId = Guid.NewGuid(); - DateTime now = _dateTimeProvider.UtcNow(); + var jobId = Guid.NewGuid(); + DateTime now = _dateTimeProvider.UtcNow(); - string userName = Environment.UserName; + string userName = Environment.UserName; - var erm = new ExtractionRequestMessage - { - ExtractionJobIdentifier = jobId, - ProjectNumber = _projectId, - ExtractionDirectory = _extractionDir, - JobSubmittedAt = now, - IsIdentifiableExtraction = _isIdentifiableExtraction, - IsNoFilterExtraction = _isNoFiltersExtraction, - IsPooledExtraction = _isPooledExtraction, - - // TODO(rkm 2021-04-01) Change this to an ExtractionKey type - KeyTag = extractionKey.ToString(), - - Modality = _modality, - - // NOTE(rkm 2021-04-01) Set below - ExtractionIdentifiers = null!, - }; - - List ermList = - idList - .Chunk(_maxIdentifiersPerMessage) - .Select(x => - new ExtractionRequestMessage(erm) - { - ExtractionIdentifiers = [.. x] - } - ).ToList(); - - var erim = new ExtractionRequestInfoMessage + var erm = new ExtractionRequestMessage + { + ExtractionJobIdentifier = jobId, + ProjectNumber = _projectId, + ExtractionDirectory = _extractionDir, + JobSubmittedAt = now, + IsIdentifiableExtraction = _isIdentifiableExtraction, + IsNoFilterExtraction = _isNoFiltersExtraction, + IsPooledExtraction = _isPooledExtraction, + + // TODO(rkm 2021-04-01) Change this to an ExtractionKey type + KeyTag = extractionKey.ToString(), + + Modality = _modality, + + // NOTE(rkm 2021-04-01) Set below + ExtractionIdentifiers = null!, + }; + + List ermList = + idList + .Chunk(_maxIdentifiersPerMessage) + .Select(x => + new ExtractionRequestMessage(erm) + { + ExtractionIdentifiers = [.. x] + } + ).ToList(); + + var erim = new ExtractionRequestInfoMessage + { + ExtractionJobIdentifier = jobId, + ProjectNumber = _projectId, + ExtractionDirectory = _extractionDir, + Modality = _modality, + JobSubmittedAt = now, + IsIdentifiableExtraction = _isIdentifiableExtraction, + IsNoFilterExtraction = _isNoFiltersExtraction, + IsPooledExtraction = _isPooledExtraction, + + KeyTag = extractionKey.ToString(), + KeyValueCount = idList.Count, + UserName = userName, + }; + + if (_nonInteractive) + { + LaunchExtraction(jobId, ermList, erim); + } + else + { + var sb = new StringBuilder(); + sb.AppendLine(); + sb.AppendLine($"ExtractionJobIdentifier: {jobId}"); + sb.AppendLine($"Submitted: {now:u}"); + sb.AppendLine($"ProjectNumber: {_projectId}"); + sb.AppendLine($"ExtractionDirectory: {_extractionDir}"); + sb.AppendLine($"Modality: {_modality}"); + sb.AppendLine($"ExtractionKey: {extractionKey}"); + sb.AppendLine($"IsIdentifiableExtraction: {_isIdentifiableExtraction}"); + sb.AppendLine($"IsNoFilterExtraction: {_isNoFiltersExtraction}"); + sb.AppendLine($"IsPooledExtraction: {_isPooledExtraction}"); + sb.AppendLine($"UserName: {userName}"); + sb.AppendLine($"KeyValueCount: {idList.Count}"); + sb.AppendLine($"ExtractionRequestMessage count: {ermList.Count}"); + _logger.Info(sb.ToString()); + LogManager.Flush(); + Console.WriteLine("Confirm you want to start an extract job with the above information"); + + string? key; + do { - ExtractionJobIdentifier = jobId, - ProjectNumber = _projectId, - ExtractionDirectory = _extractionDir, - Modality = _modality, - JobSubmittedAt = now, - IsIdentifiableExtraction = _isIdentifiableExtraction, - IsNoFilterExtraction = _isNoFiltersExtraction, - IsPooledExtraction = _isPooledExtraction, - - KeyTag = extractionKey.ToString(), - KeyValueCount = idList.Count, - UserName = userName, - }; - - if (_nonInteractive) + Console.Write("[y/n]: "); + key = _consoleInput.GetNextLine()?.ToLower(); + } while (key != "y" && key != "n"); + + if (key == "y") { LaunchExtraction(jobId, ermList, erim); } else { - var sb = new StringBuilder(); - sb.AppendLine(); - sb.AppendLine($"ExtractionJobIdentifier: {jobId}"); - sb.AppendLine($"Submitted: {now:u}"); - sb.AppendLine($"ProjectNumber: {_projectId}"); - sb.AppendLine($"ExtractionDirectory: {_extractionDir}"); - sb.AppendLine($"Modality: {_modality}"); - sb.AppendLine($"ExtractionKey: {extractionKey}"); - sb.AppendLine($"IsIdentifiableExtraction: {_isIdentifiableExtraction}"); - sb.AppendLine($"IsNoFilterExtraction: {_isNoFiltersExtraction}"); - sb.AppendLine($"IsPooledExtraction: {_isPooledExtraction}"); - sb.AppendLine($"UserName: {userName}"); - sb.AppendLine($"KeyValueCount: {idList.Count}"); - sb.AppendLine($"ExtractionRequestMessage count: {ermList.Count}"); - _logger.Info(sb.ToString()); - LogManager.Flush(); - Console.WriteLine("Confirm you want to start an extract job with the above information"); - - string? key; - do - { - Console.Write("[y/n]: "); - key = _consoleInput.GetNextLine()?.ToLower(); - } while (key != "y" && key != "n"); - - if (key == "y") - { - LaunchExtraction(jobId, ermList, erim); - } - else - { - _logger.Info("Operation cancelled by user"); - } + _logger.Info("Operation cancelled by user"); } } + } - private void LaunchExtraction(Guid jobId, IEnumerable ermList, ExtractionRequestInfoMessage erim) - { - InitialiseExtractionDir(jobId); - SendMessagesImpl(ermList, erim); - } + private void LaunchExtraction(Guid jobId, IEnumerable ermList, ExtractionRequestInfoMessage erim) + { + InitialiseExtractionDir(jobId); + SendMessagesImpl(ermList, erim); + } - private void InitialiseExtractionDir(Guid jobId) - { - var absoluteExtractionDir = _fileSystem.Path.Combine(_extractionRoot, _extractionDir); - _fileSystem.Directory.CreateDirectory(absoluteExtractionDir); + private void InitialiseExtractionDir(Guid jobId) + { + var absoluteExtractionDir = _fileSystem.Path.Combine(_extractionRoot, _extractionDir); + _fileSystem.Directory.CreateDirectory(absoluteExtractionDir); - // Write the jobId to a file in the extraction dir to help identify the set of files if they are moved - string jobIdFile = _fileSystem.Path.Combine(_extractionRoot, _extractionDir, "jobId.txt"); - _fileSystem.File.WriteAllText(jobIdFile, $"{jobId}\n"); + // Write the jobId to a file in the extraction dir to help identify the set of files if they are moved + string jobIdFile = _fileSystem.Path.Combine(_extractionRoot, _extractionDir, "jobId.txt"); + _fileSystem.File.WriteAllText(jobIdFile, $"{jobId}\n"); - _logger.Info("Created extraction directory and jobId file"); - } + _logger.Info("Created extraction directory and jobId file"); + } - private void SendMessagesImpl(IEnumerable ermList, ExtractionRequestInfoMessage erim) - { - _logger.Info("Sending messages"); + private void SendMessagesImpl(IEnumerable ermList, ExtractionRequestInfoMessage erim) + { + _logger.Info("Sending messages"); - foreach (var msg in ermList) - _extractionRequestProducer.SendMessage(msg, isInResponseTo: null, routingKey: null); + foreach (var msg in ermList) + _extractionRequestProducer.SendMessage(msg, isInResponseTo: null, routingKey: null); - _extractionRequestInfoProducer.SendMessage(erim, isInResponseTo: null, routingKey: null); + _extractionRequestInfoProducer.SendMessage(erim, isInResponseTo: null, routingKey: null); - _logger.Info("All messages sent"); - } + _logger.Info("All messages sent"); } } diff --git a/src/SmiServices/Applications/ExtractImages/IExtractionMessageSender.cs b/src/SmiServices/Applications/ExtractImages/IExtractionMessageSender.cs index fc9325e3a..bd03d57ee 100644 --- a/src/SmiServices/Applications/ExtractImages/IExtractionMessageSender.cs +++ b/src/SmiServices/Applications/ExtractImages/IExtractionMessageSender.cs @@ -2,10 +2,9 @@ using System.Collections.Generic; -namespace SmiServices.Applications.ExtractImages +namespace SmiServices.Applications.ExtractImages; + +public interface IExtractionMessageSender { - public interface IExtractionMessageSender - { - void SendMessages(ExtractionKey extractionKey, List idList); - } + void SendMessages(ExtractionKey extractionKey, List idList); } diff --git a/src/SmiServices/Applications/Setup/EnvironmentProbe.cs b/src/SmiServices/Applications/Setup/EnvironmentProbe.cs index 68d8dca4a..81b3fb072 100644 --- a/src/SmiServices/Applications/Setup/EnvironmentProbe.cs +++ b/src/SmiServices/Applications/Setup/EnvironmentProbe.cs @@ -23,268 +23,267 @@ using System.Linq; using System.Text; -namespace SmiServices.Applications.Setup -{ +namespace SmiServices.Applications.Setup; - public class Probeable - { - public string Name { get; } - public Func Run { get; } - public string Category { get; } - public CheckEventArgs? Result { get; set; } - public Probeable(string name, Func run, string category) - { - Name = name; - this.Run = run; - Category = category; - } - } +public class Probeable +{ + public string Name { get; } + public Func Run { get; } + public string Category { get; } + public CheckEventArgs? Result { get; set; } - internal class EnvironmentProbe + public Probeable(string name, Func run, string category) { - public CheckEventArgs DeserializeYaml { get; } - public GlobalOptions? Options { get; } + Name = name; + this.Run = run; + Category = category; + } +} + +internal class EnvironmentProbe +{ + public CheckEventArgs DeserializeYaml { get; } + public GlobalOptions? Options { get; } - public const string CheckInfrastructureTaskName = "Checking Infrastructure"; - public const string CheckMicroservicesTaskName = "Checking Microservices"; + public const string CheckInfrastructureTaskName = "Checking Infrastructure"; + public const string CheckMicroservicesTaskName = "Checking Microservices"; - public const string Infrastructure = "Infrastructure"; - public const string Microservices = "Microservices"; + public const string Infrastructure = "Infrastructure"; + public const string Microservices = "Microservices"; - public Dictionary Probes = []; + public Dictionary Probes = []; - internal int GetExitCode() + internal int GetExitCode() + { + // get all things we can check + foreach (var prop in typeof(EnvironmentProbe).GetProperties()) { - // get all things we can check - foreach (var prop in typeof(EnvironmentProbe).GetProperties()) - { - var val = prop.GetValue(this); + var val = prop.GetValue(this); - // did any checks run - if (val is CheckEventArgs cea) + // did any checks run + if (val is CheckEventArgs cea) + { + // that failed + if (cea.Result == CheckResult.Fail) { - // that failed - if (cea.Result == CheckResult.Fail) - { - // something failed so exit code is failure (non zero) - return 100; - } + // something failed so exit code is failure (non zero) + return 100; } } - - return 0; } - public EnvironmentProbe(string? yamlFile) - { - Probes = []; - Add(Infrastructure, "RabbitMq", ProbeRabbitMq); - Add(Infrastructure, "MongoDb", ProbeMongoDb); - Add(Infrastructure, "Rdmp", ProbeRdmp); + return 0; + } + + public EnvironmentProbe(string? yamlFile) + { + Probes = []; + Add(Infrastructure, "RabbitMq", ProbeRabbitMq); + Add(Infrastructure, "MongoDb", ProbeMongoDb); + Add(Infrastructure, "Rdmp", ProbeRdmp); - Add(Microservices, "CohortExtractor", () => Probe(nameof(CohortExtractorHost), (o) => new CohortExtractorHost(o, null, null))); - Add(Microservices, "DicomAnonymiser", () => Probe(nameof(DicomAnonymiserHost), (o) => new DicomAnonymiserHost(o))); - Add(Microservices, "IsIdentifiable", () => Probe(nameof(IsIdentifiableHost), (o) => new IsIdentifiableHost(o))); - Add(Microservices, "CohortPackager", () => Probe(nameof(CohortPackagerHost), (o) => new CohortPackagerHost(o))); - Add(Microservices, "DicomRelationalMapper", () => Probe(nameof(DicomRelationalMapperHost), (o) => new DicomRelationalMapperHost(o))); - Add(Microservices, "IdentifierMapper", () => Probe(nameof(IdentifierMapperHost), (o) => new IdentifierMapperHost(o))); - Add(Microservices, "MongoDbPopulator", () => Probe(nameof(MongoDbPopulatorHost), (o) => new MongoDbPopulatorHost(o))); - Add(Microservices, "DicomTagReader", () => Probe(nameof(DicomTagReaderHost), (o) => new DicomTagReaderHost(o))); + Add(Microservices, "CohortExtractor", () => Probe(nameof(CohortExtractorHost), (o) => new CohortExtractorHost(o, null, null))); + Add(Microservices, "DicomAnonymiser", () => Probe(nameof(DicomAnonymiserHost), (o) => new DicomAnonymiserHost(o))); + Add(Microservices, "IsIdentifiable", () => Probe(nameof(IsIdentifiableHost), (o) => new IsIdentifiableHost(o))); + Add(Microservices, "CohortPackager", () => Probe(nameof(CohortPackagerHost), (o) => new CohortPackagerHost(o))); + Add(Microservices, "DicomRelationalMapper", () => Probe(nameof(DicomRelationalMapperHost), (o) => new DicomRelationalMapperHost(o))); + Add(Microservices, "IdentifierMapper", () => Probe(nameof(IdentifierMapperHost), (o) => new IdentifierMapperHost(o))); + Add(Microservices, "MongoDbPopulator", () => Probe(nameof(MongoDbPopulatorHost), (o) => new MongoDbPopulatorHost(o))); + Add(Microservices, "DicomTagReader", () => Probe(nameof(DicomTagReaderHost), (o) => new DicomTagReaderHost(o))); - /* - { - get + /* +{ + get DicomTagReader { - get; - MongoDbPopulator { + get; + MongoDbPopulator { + ge + IdentifierMapper { ge - IdentifierMapper { - ge - DicomRelationalMapper - DicomAnonymiser { - get - IsIdentifiable { + DicomRelationalMapper + DicomAnonymiser { + get + IsIdentifiable { + get; + CohortPackager { get; - CohortPackager { - get; - */ - try - { - if (string.IsNullOrWhiteSpace(yamlFile)) - throw new Exception("You have not yet entered a path for yaml file"); + */ + try + { + if (string.IsNullOrWhiteSpace(yamlFile)) + throw new Exception("You have not yet entered a path for yaml file"); - Options = new GlobalOptionsFactory().Load("Setup", yamlFile); - DeserializeYaml = new CheckEventArgs("Deserialized Yaml File", CheckResult.Success); - } - catch (Exception ex) - { - DeserializeYaml = new CheckEventArgs("Failed to Deserialize Yaml File", CheckResult.Fail, ex); - } + Options = new GlobalOptionsFactory().Load("Setup", yamlFile); + DeserializeYaml = new CheckEventArgs("Deserialized Yaml File", CheckResult.Success); } - - private void Add(string category, string name, Func probeMethod) + catch (Exception ex) { - Probes.Add(name, new Probeable(name, probeMethod, category)); + DeserializeYaml = new CheckEventArgs("Failed to Deserialize Yaml File", CheckResult.Fail, ex); } + } - internal void CheckInfrastructure(IDataLoadEventListener? listener = null) - { - var probes = Probes.Where(p => p.Value.Category == Infrastructure).ToArray(); + private void Add(string category, string name, Func probeMethod) + { + Probes.Add(name, new Probeable(name, probeMethod, category)); + } - var sw = Stopwatch.StartNew(); - var max = probes.Length; - var current = 0; - var task = CheckInfrastructureTaskName; + internal void CheckInfrastructure(IDataLoadEventListener? listener = null) + { + var probes = Probes.Where(p => p.Value.Category == Infrastructure).ToArray(); - listener?.OnProgress(this, new ProgressEventArgs(task, new ProgressMeasurement(current, ProgressType.Records, max), sw.Elapsed)); + var sw = Stopwatch.StartNew(); + var max = probes.Length; + var current = 0; + var task = CheckInfrastructureTaskName; - foreach (var p in probes) - { - // clear old result - p.Value.Result = null; - p.Value.Result = p.Value.Run(); + listener?.OnProgress(this, new ProgressEventArgs(task, new ProgressMeasurement(current, ProgressType.Records, max), sw.Elapsed)); - listener?.OnProgress(this, new ProgressEventArgs(task, new ProgressMeasurement(++current, ProgressType.Records, max), sw.Elapsed)); - } - } - internal void CheckMicroservices(IDataLoadEventListener? listener = null) + foreach (var p in probes) { - var probes = Probes.Where(p => p.Value.Category == Microservices).ToArray(); + // clear old result + p.Value.Result = null; + p.Value.Result = p.Value.Run(); - var sw = Stopwatch.StartNew(); - var max = probes.Length; - var current = 0; - var task = CheckMicroservicesTaskName; + listener?.OnProgress(this, new ProgressEventArgs(task, new ProgressMeasurement(++current, ProgressType.Records, max), sw.Elapsed)); + } + } + internal void CheckMicroservices(IDataLoadEventListener? listener = null) + { + var probes = Probes.Where(p => p.Value.Category == Microservices).ToArray(); - listener?.OnProgress(this, new ProgressEventArgs(task, new ProgressMeasurement(current, ProgressType.Records, max), sw.Elapsed)); + var sw = Stopwatch.StartNew(); + var max = probes.Length; + var current = 0; + var task = CheckMicroservicesTaskName; - foreach (var p in probes) - { - // clear old result - p.Value.Result = null; - p.Value.Result = p.Value.Run(); + listener?.OnProgress(this, new ProgressEventArgs(task, new ProgressMeasurement(current, ProgressType.Records, max), sw.Elapsed)); - listener?.OnProgress(this, new ProgressEventArgs(task, new ProgressMeasurement(++current, ProgressType.Records, max), sw.Elapsed)); - } + foreach (var p in probes) + { + // clear old result + p.Value.Result = null; + p.Value.Result = p.Value.Run(); + + listener?.OnProgress(this, new ProgressEventArgs(task, new ProgressMeasurement(++current, ProgressType.Records, max), sw.Elapsed)); } + } - public CheckEventArgs? ProbeRdmp() + public CheckEventArgs? ProbeRdmp() + { + try { - try - { - if (Options == null) - return null; + if (Options == null) + return null; - if (Options.RDMPOptions == null || + if (Options.RDMPOptions == null || - // Must specify either SqlServer or file system backend for RDMP platform metadata - (string.IsNullOrEmpty(Options.RDMPOptions.CatalogueConnectionString) && - string.IsNullOrWhiteSpace(Options.RDMPOptions.YamlDir))) - { - throw new Exception("No RDMP connection settings specified"); - } + // Must specify either SqlServer or file system backend for RDMP platform metadata + (string.IsNullOrEmpty(Options.RDMPOptions.CatalogueConnectionString) && + string.IsNullOrWhiteSpace(Options.RDMPOptions.YamlDir))) + { + throw new Exception("No RDMP connection settings specified"); + } - var provider = Options.RDMPOptions.GetRepositoryProvider(); + var provider = Options.RDMPOptions.GetRepositoryProvider(); - var startup = new Startup(provider); + var startup = new Startup(provider); - var failed = false; - var sb = new StringBuilder(); - var exceptions = new List(); + var failed = false; + var sb = new StringBuilder(); + var exceptions = new List(); - startup.DatabaseFound += (s, e) => - { - failed = !failed && e.Status != RDMPPlatformDatabaseStatus.Healthy || e.Exception != null; - sb.AppendLine($"{e.Patcher.Name} {e.Status}"); + startup.DatabaseFound += (s, e) => + { + failed = !failed && e.Status != RDMPPlatformDatabaseStatus.Healthy || e.Exception != null; + sb.AppendLine($"{e.Patcher.Name} {e.Status}"); - if (e.Exception != null) - { - sb.AppendLine(ExceptionHelper.ExceptionToListOfInnerMessages(e.Exception)); - exceptions.Add(e.Exception); - } - }; + if (e.Exception != null) + { + sb.AppendLine(ExceptionHelper.ExceptionToListOfInnerMessages(e.Exception)); + exceptions.Add(e.Exception); + } + }; - startup.DoStartup(ThrowImmediatelyCheckNotifier.Quiet); + startup.DoStartup(ThrowImmediatelyCheckNotifier.Quiet); - return new CheckEventArgs(sb.ToString(), failed ? CheckResult.Fail : CheckResult.Success); - } - catch (Exception ex) - { - return new CheckEventArgs("Failed to connect to RDMP", CheckResult.Fail, ex); - } + return new CheckEventArgs(sb.ToString(), failed ? CheckResult.Fail : CheckResult.Success); } - - public CheckEventArgs? ProbeRabbitMq() + catch (Exception ex) { - if (Options?.RabbitOptions == null) - return null; + return new CheckEventArgs("Failed to connect to RDMP", CheckResult.Fail, ex); + } + } - try - { - var adapter = new RabbitMQBroker(Options.RabbitOptions, "setup"); + public CheckEventArgs? ProbeRabbitMq() + { + if (Options?.RabbitOptions == null) + return null; - return new CheckEventArgs("Connected to RabbitMq", CheckResult.Success); - } - catch (Exception ex) - { - return new CheckEventArgs("Failed to connect to RabbitMq", CheckResult.Fail, ex); - } - } + try + { + var adapter = new RabbitMQBroker(Options.RabbitOptions, "setup"); - public CheckEventArgs? ProbeMongoDb() + return new CheckEventArgs("Connected to RabbitMq", CheckResult.Success); + } + catch (Exception ex) { - if (Options?.MongoDatabases?.DicomStoreOptions == null) - return null; + return new CheckEventArgs("Failed to connect to RabbitMq", CheckResult.Fail, ex); + } + } - try - { - // this opens connection to the server and tests for collection existing - _ = new MongoDbAdapter("Setup", Options.MongoDatabases.DicomStoreOptions, - Options.MongoDbPopulatorOptions?.ImageCollection ?? throw new InvalidOperationException()); + public CheckEventArgs? ProbeMongoDb() + { + if (Options?.MongoDatabases?.DicomStoreOptions == null) + return null; + try + { + // this opens connection to the server and tests for collection existing + _ = new MongoDbAdapter("Setup", Options.MongoDatabases.DicomStoreOptions, + Options.MongoDbPopulatorOptions?.ImageCollection ?? throw new InvalidOperationException()); - var mongoDbOptions = Options.MongoDatabases.ExtractionStoreOptions - ?? throw new ArgumentException($"ExtractionStoreOptions was null"); - _ = new MongoExtractJobStore( - MongoClientHelpers.GetMongoClient(mongoDbOptions, "Setup"), - mongoDbOptions.DatabaseName ?? throw new InvalidOperationException(), new DateTimeProvider() - ); + var mongoDbOptions = Options.MongoDatabases.ExtractionStoreOptions + ?? throw new ArgumentException($"ExtractionStoreOptions was null"); - return new CheckEventArgs("MongoDb Checking Succeeded", CheckResult.Success); - } - catch (Exception ex) - { - return new CheckEventArgs("MongoDb Checking Failed", CheckResult.Fail, ex); - } - } + _ = new MongoExtractJobStore( + MongoClientHelpers.GetMongoClient(mongoDbOptions, "Setup"), + mongoDbOptions.DatabaseName ?? throw new InvalidOperationException(), new DateTimeProvider() + ); - private CheckEventArgs? Probe(string probeName, Func hostConstructor) + return new CheckEventArgs("MongoDb Checking Succeeded", CheckResult.Success); + } + catch (Exception ex) { - if (Options == null) - return null; + return new CheckEventArgs("MongoDb Checking Failed", CheckResult.Fail, ex); + } + } - try - { - var host = hostConstructor(Options); + private CheckEventArgs? Probe(string probeName, Func hostConstructor) + { + if (Options == null) + return null; - host.StartAuxConnections(); - host.Start(); + try + { + var host = hostConstructor(Options); - host.Stop("Finished Testing"); + host.StartAuxConnections(); + host.Start(); - return new CheckEventArgs($"{probeName} Succeeded", CheckResult.Success); - } - catch (Exception ex) - { - return new CheckEventArgs($"{probeName} Failed", CheckResult.Fail, ex); - } - } + host.Stop("Finished Testing"); + return new CheckEventArgs($"{probeName} Succeeded", CheckResult.Success); + } + catch (Exception ex) + { + return new CheckEventArgs($"{probeName} Failed", CheckResult.Fail, ex); + } } + } diff --git a/src/SmiServices/Applications/Setup/SetupIsolatedStorage.cs b/src/SmiServices/Applications/Setup/SetupIsolatedStorage.cs index 9c2c59ca9..7903530eb 100644 --- a/src/SmiServices/Applications/Setup/SetupIsolatedStorage.cs +++ b/src/SmiServices/Applications/Setup/SetupIsolatedStorage.cs @@ -9,490 +9,489 @@ using System.IO.IsolatedStorage; using System.Linq; -namespace SmiServices.Applications.Setup +namespace SmiServices.Applications.Setup; + +internal class SetupIsolatedStorage { - internal class SetupIsolatedStorage - { - private readonly IsolatedStorageFile store; - private readonly object locker = new(); + private readonly IsolatedStorageFile store; + private readonly object locker = new(); - public SetupIsolatedStorage() + public SetupIsolatedStorage() + { + try { - try - { - store = IsolatedStorageFile.GetUserStoreForApplication(); - } - catch (Exception) - { - store = IsolatedStorageFile.GetUserStoreForAssembly(); - } + store = IsolatedStorageFile.GetUserStoreForApplication(); + } + catch (Exception) + { + store = IsolatedStorageFile.GetUserStoreForAssembly(); } + } - /// - /// Add or Update - /// - /// - /// - /// - /// - private bool AddOrUpdateValueInternal(string key, T value) + /// + /// Add or Update + /// + /// + /// + /// + /// + private bool AddOrUpdateValueInternal(string key, T value) + { + if (value == null) { - if (value == null) - { - Remove(key); + Remove(key); - return true; - } + return true; + } - var type = value.GetType(); + var type = value.GetType(); - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - type = type.GenericTypeArguments.FirstOrDefault(); - } + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + type = type.GenericTypeArguments.FirstOrDefault(); + } - if ((type == typeof(string)) || - (type == typeof(decimal)) || - (type == typeof(double)) || - (type == typeof(float)) || - (type == typeof(DateTime)) || - (type == typeof(Guid)) || - (type == typeof(bool)) || - (type == typeof(int)) || - (type == typeof(long)) || - (type == typeof(byte))) + if ((type == typeof(string)) || + (type == typeof(decimal)) || + (type == typeof(double)) || + (type == typeof(float)) || + (type == typeof(DateTime)) || + (type == typeof(Guid)) || + (type == typeof(bool)) || + (type == typeof(int)) || + (type == typeof(long)) || + (type == typeof(byte))) + { + lock (locker) { - lock (locker) - { - string? str; - - if (value is decimal) - { - return AddOrUpdateValue(key, - Convert.ToString(Convert.ToDecimal(value), System.Globalization.CultureInfo.InvariantCulture)); - } - else if (value is DateTime) - { - return AddOrUpdateValue(key, - Convert.ToString(-(Convert.ToDateTime(value)).ToUniversalTime().Ticks, - System.Globalization.CultureInfo.InvariantCulture)); - } - else - str = Convert.ToString(value, System.Globalization.CultureInfo.InvariantCulture); + string? str; - string? oldValue = null; + if (value is decimal) + { + return AddOrUpdateValue(key, + Convert.ToString(Convert.ToDecimal(value), System.Globalization.CultureInfo.InvariantCulture)); + } + else if (value is DateTime) + { + return AddOrUpdateValue(key, + Convert.ToString(-(Convert.ToDateTime(value)).ToUniversalTime().Ticks, + System.Globalization.CultureInfo.InvariantCulture)); + } + else + str = Convert.ToString(value, System.Globalization.CultureInfo.InvariantCulture); - if (store.FileExists(key)) - { - using var stream = store.OpenFile(key, FileMode.Open); - using var sr = new StreamReader(stream); - oldValue = sr.ReadToEnd(); - } + string? oldValue = null; - using (var stream = store.OpenFile(key, FileMode.Create, FileAccess.Write)) - { - using var sw = new StreamWriter(stream); - sw.Write(str); - } + if (store.FileExists(key)) + { + using var stream = store.OpenFile(key, FileMode.Open); + using var sr = new StreamReader(stream); + oldValue = sr.ReadToEnd(); + } - return oldValue != str; + using (var stream = store.OpenFile(key, FileMode.Create, FileAccess.Write)) + { + using var sw = new StreamWriter(stream); + sw.Write(str); } - } - throw new ArgumentException(string.Format("Value of type {0} is not supported.", type?.Name)); + return oldValue != str; + } } - /// - /// Get Value - /// - /// - /// - /// - /// - private T? GetValueOrDefaultInternal(string key, T? defaultValue = default) + throw new ArgumentException(string.Format("Value of type {0} is not supported.", type?.Name)); + } + + /// + /// Get Value + /// + /// + /// + /// + /// + private T? GetValueOrDefaultInternal(string key, T? defaultValue = default) + { + object? value = null; + lock (locker) { - object? value = null; - lock (locker) + try { - try - { - string? str = null; - - // If the key exists, retrieve the value. - if (store.FileExists(key)) - { - using var stream = store.OpenFile(key, FileMode.Open); - using var sr = new StreamReader(stream); - str = sr.ReadToEnd(); - } + string? str = null; - if (str == null) - return defaultValue; - - var type = typeof(T); + // If the key exists, retrieve the value. + if (store.FileExists(key)) + { + using var stream = store.OpenFile(key, FileMode.Open); + using var sr = new StreamReader(stream); + str = sr.ReadToEnd(); + } - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) - { - type = type.GenericTypeArguments.FirstOrDefault(); - } + if (str == null) + return defaultValue; - if (type == typeof(string)) - value = str; + var type = typeof(T); - else if (type == typeof(decimal)) - { + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + type = type.GenericTypeArguments.FirstOrDefault(); + } - string savedDecimal = Convert.ToString(str); + if (type == typeof(string)) + value = str; + else if (type == typeof(decimal)) + { - value = Convert.ToDecimal(savedDecimal, System.Globalization.CultureInfo.InvariantCulture); + string savedDecimal = Convert.ToString(str); - return null != value ? (T)value : defaultValue; - } + value = Convert.ToDecimal(savedDecimal, System.Globalization.CultureInfo.InvariantCulture); - else if (type == typeof(double)) - { - value = Convert.ToDouble(str, System.Globalization.CultureInfo.InvariantCulture); - } + return null != value ? (T)value : defaultValue; - else if (type == typeof(Single)) - { - value = Convert.ToSingle(str, System.Globalization.CultureInfo.InvariantCulture); - } - - else if (type == typeof(DateTime)) - { - - var ticks = Convert.ToInt64(str, System.Globalization.CultureInfo.InvariantCulture); - if (ticks >= 0) - { - //Old value, stored before update to UTC values - value = new DateTime(ticks); - } - else - { - //New value, UTC - value = new DateTime(-ticks, DateTimeKind.Utc); - } + } + else if (type == typeof(double)) + { + value = Convert.ToDouble(str, System.Globalization.CultureInfo.InvariantCulture); + } - return (T)value; - } + else if (type == typeof(Single)) + { + value = Convert.ToSingle(str, System.Globalization.CultureInfo.InvariantCulture); + } - else if (type == typeof(Guid)) - { - if (Guid.TryParse(str, out Guid guid)) - value = guid; - } + else if (type == typeof(DateTime)) + { - else if (type == typeof(bool)) + var ticks = Convert.ToInt64(str, System.Globalization.CultureInfo.InvariantCulture); + if (ticks >= 0) { - value = Convert.ToBoolean(str, System.Globalization.CultureInfo.InvariantCulture); + //Old value, stored before update to UTC values + value = new DateTime(ticks); } - - else if (type == typeof(Int32)) + else { - value = Convert.ToInt32(str, System.Globalization.CultureInfo.InvariantCulture); + //New value, UTC + value = new DateTime(-ticks, DateTimeKind.Utc); } - else if (type == typeof(Int64)) - { - value = Convert.ToInt64(str, System.Globalization.CultureInfo.InvariantCulture); - } - else if (type == typeof(byte)) - { - value = Convert.ToByte(str, System.Globalization.CultureInfo.InvariantCulture); - } + return (T)value; + } - else - { - throw new ArgumentException("Value of type " + type + " is not supported."); - } + else if (type == typeof(Guid)) + { + if (Guid.TryParse(str, out Guid guid)) + value = guid; } - catch (FormatException) + + else if (type == typeof(bool)) { - return defaultValue; + value = Convert.ToBoolean(str, System.Globalization.CultureInfo.InvariantCulture); } - } + else if (type == typeof(Int32)) + { + value = Convert.ToInt32(str, System.Globalization.CultureInfo.InvariantCulture); + } - return null != value ? (T)value : defaultValue; - } + else if (type == typeof(Int64)) + { + value = Convert.ToInt64(str, System.Globalization.CultureInfo.InvariantCulture); + } - /// - /// Remove key - /// - /// Key to remove - public void Remove(string key) - { - if (store.FileExists(key)) - store.DeleteFile(key); - } + else if (type == typeof(byte)) + { + value = Convert.ToByte(str, System.Globalization.CultureInfo.InvariantCulture); + } - /// - /// Clear all keys from settings - /// - public void Clear() - { - try - { - foreach (var file in store.GetFileNames()) + else { - store.DeleteFile(file); + throw new ArgumentException("Value of type " + type + " is not supported."); } } - catch (Exception ex) + catch (FormatException) { - Console.WriteLine("Unable to clear all defaults. Message: " + ex.Message); + return defaultValue; } - } - /// - /// Checks to see if the key has been added. - /// - /// Key to check - /// True if contains key, else false - public bool Contains(string key) - { - return store.FileExists(key); } - #region GetValueOrDefault + return null != value ? (T)value : defaultValue; + } - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public decimal GetValueOrDefault(string key, decimal defaultValue) - { - return - GetValueOrDefaultInternal(key, defaultValue); - } + /// + /// Remove key + /// + /// Key to remove + public void Remove(string key) + { + if (store.FileExists(key)) + store.DeleteFile(key); + } - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public bool GetValueOrDefault(string key, bool defaultValue) + /// + /// Clear all keys from settings + /// + public void Clear() + { + try { - return - GetValueOrDefaultInternal(key, defaultValue); + foreach (var file in store.GetFileNames()) + { + store.DeleteFile(file); + } } - - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public long GetValueOrDefault(string key, long defaultValue) + catch (Exception ex) { - return - GetValueOrDefaultInternal(key, defaultValue); + Console.WriteLine("Unable to clear all defaults. Message: " + ex.Message); } + } - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public string? GetValueOrDefault(string key, string? defaultValue) - { - return GetValueOrDefaultInternal(key, defaultValue); + /// + /// Checks to see if the key has been added. + /// + /// Key to check + /// True if contains key, else false + public bool Contains(string key) + { + return store.FileExists(key); + } - } + #region GetValueOrDefault - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public int GetValueOrDefault(string key, int defaultValue) - { - return + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public decimal GetValueOrDefault(string key, decimal defaultValue) + { + return GetValueOrDefaultInternal(key, defaultValue); + } - } + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public bool GetValueOrDefault(string key, bool defaultValue) + { + return + GetValueOrDefaultInternal(key, defaultValue); + } - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public float GetValueOrDefault(string key, float defaultValue) - { - return - GetValueOrDefaultInternal(key, defaultValue); + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public long GetValueOrDefault(string key, long defaultValue) + { + return + GetValueOrDefaultInternal(key, defaultValue); + } - } + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public string? GetValueOrDefault(string key, string? defaultValue) + { + return GetValueOrDefaultInternal(key, defaultValue); - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public DateTime GetValueOrDefault(string key, DateTime defaultValue) - { - return - GetValueOrDefaultInternal(key, defaultValue); + } - } + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public int GetValueOrDefault(string key, int defaultValue) + { + return + GetValueOrDefaultInternal(key, defaultValue); - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public Guid GetValueOrDefault(string key, Guid defaultValue) - { - return - GetValueOrDefaultInternal(key, defaultValue); + } - } + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public float GetValueOrDefault(string key, float defaultValue) + { + return + GetValueOrDefaultInternal(key, defaultValue); - /// - /// Gets the current value or the default that you specify. - /// - /// Key for settings - /// default value if not set - /// Value or default - public double GetValueOrDefault(string key, double defaultValue) - { - return - GetValueOrDefaultInternal(key, defaultValue); - } + } + + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public DateTime GetValueOrDefault(string key, DateTime defaultValue) + { + return + GetValueOrDefaultInternal(key, defaultValue); - #endregion + } - #region AddOrUpdateValue + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public Guid GetValueOrDefault(string key, Guid defaultValue) + { + return + GetValueOrDefaultInternal(key, defaultValue); - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, decimal value) - { - return - AddOrUpdateValueInternal(key, value); + } - } + /// + /// Gets the current value or the default that you specify. + /// + /// Key for settings + /// default value if not set + /// Value or default + public double GetValueOrDefault(string key, double defaultValue) + { + return + GetValueOrDefaultInternal(key, defaultValue); + } - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, bool value) - { - return - AddOrUpdateValueInternal(key, value); + #endregion - } + #region AddOrUpdateValue - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, long value) - { - return AddOrUpdateValueInternal(key, value); - } + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, decimal value) + { + return + AddOrUpdateValueInternal(key, value); - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, string value) - { - return - AddOrUpdateValueInternal(key, value); + } - } + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, bool value) + { + return + AddOrUpdateValueInternal(key, value); - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, int value) - { - return AddOrUpdateValueInternal(key, value); - } + } - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, float value) - { - return AddOrUpdateValueInternal(key, value); - } + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, long value) + { + return AddOrUpdateValueInternal(key, value); + } - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, DateTime value) - { - return AddOrUpdateValueInternal(key, value); - } + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, string value) + { + return + AddOrUpdateValueInternal(key, value); - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, Guid value) - { - return AddOrUpdateValueInternal(key, value); - } + } - /// - /// Adds or updates the value - /// - /// Key for setting - /// Value to set - /// True of was added or updated and you need to save it. - public bool AddOrUpdateValue(string key, double value) - { - return AddOrUpdateValueInternal(key, value); - } + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, int value) + { + return AddOrUpdateValueInternal(key, value); + } - #endregion + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, float value) + { + return AddOrUpdateValueInternal(key, value); + } - /// - /// Attempts to open the app settings page. - /// - /// true if success, else false and not supported - public static bool OpenAppSettings() - { - return false; - } + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, DateTime value) + { + return AddOrUpdateValueInternal(key, value); + } + + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, Guid value) + { + return AddOrUpdateValueInternal(key, value); + } + + /// + /// Adds or updates the value + /// + /// Key for setting + /// Value to set + /// True of was added or updated and you need to save it. + public bool AddOrUpdateValue(string key, double value) + { + return AddOrUpdateValueInternal(key, value); + } + + #endregion + + /// + /// Attempts to open the app settings page. + /// + /// true if success, else false and not supported + public static bool OpenAppSettings() + { + return false; } } diff --git a/src/SmiServices/Applications/Setup/SetupSettings.cs b/src/SmiServices/Applications/Setup/SetupSettings.cs index 8cb140556..8fdeaa2c0 100644 --- a/src/SmiServices/Applications/Setup/SetupSettings.cs +++ b/src/SmiServices/Applications/Setup/SetupSettings.cs @@ -6,41 +6,39 @@ using System; -namespace SmiServices.Applications.Setup +namespace SmiServices.Applications.Setup; + +/// +/// This is the Settings static class that can be used in your Core solution or in any +/// of your client applications. All settings are laid out the same exact way with getters +/// and setters. +/// +public static class SetupSettings { - /// - /// This is the Settings static class that can be used in your Core solution or in any - /// of your client applications. All settings are laid out the same exact way with getters - /// and setters. - /// - public static class SetupSettings - { - static readonly Lazy _implementation = new(static () => CreateSettings(), System.Threading.LazyThreadSafetyMode.PublicationOnly); - - private static SetupIsolatedStorage AppSettings - { - get - { - SetupIsolatedStorage ret = _implementation.Value ?? throw new NotImplementedException("Isolated Storage does not work in this environment..."); - return ret; - } - } + static readonly Lazy _implementation = new(static () => CreateSettings(), System.Threading.LazyThreadSafetyMode.PublicationOnly); - /// - /// Last loaded/selected .yaml file - /// - internal static string YamlFile + private static SetupIsolatedStorage AppSettings + { + get { - get => AppSettings?.GetValueOrDefault("YamlFile", "") ?? throw new InvalidOperationException("AppSettings not yet initialised"); - set => AppSettings.AddOrUpdateValue("YamlFile", value); + SetupIsolatedStorage ret = _implementation.Value ?? throw new NotImplementedException("Isolated Storage does not work in this environment..."); + return ret; } + } + /// + /// Last loaded/selected .yaml file + /// + internal static string YamlFile + { + get => AppSettings?.GetValueOrDefault("YamlFile", "") ?? throw new InvalidOperationException("AppSettings not yet initialised"); + set => AppSettings.AddOrUpdateValue("YamlFile", value); + } - private static SetupIsolatedStorage CreateSettings() - { - return new SetupIsolatedStorage(); - } + private static SetupIsolatedStorage CreateSettings() + { + return new SetupIsolatedStorage(); } } diff --git a/src/SmiServices/Applications/TriggerUpdates/ITriggerUpdatesSource.cs b/src/SmiServices/Applications/TriggerUpdates/ITriggerUpdatesSource.cs index 6100c02d7..b2e7eb935 100644 --- a/src/SmiServices/Applications/TriggerUpdates/ITriggerUpdatesSource.cs +++ b/src/SmiServices/Applications/TriggerUpdates/ITriggerUpdatesSource.cs @@ -2,19 +2,18 @@ using System.Collections.Generic; -namespace SmiServices.Applications.TriggerUpdates +namespace SmiServices.Applications.TriggerUpdates; + +public interface ITriggerUpdatesSource { - public interface ITriggerUpdatesSource - { - /// - /// Returns updates to issue if any - /// - /// - IEnumerable GetUpdates(); + /// + /// Returns updates to issue if any + /// + /// + IEnumerable GetUpdates(); - /// - /// Notifies the source that it should cancel any ongoing queries and attempt to stop issuing updates - /// - void Stop(); - } + /// + /// Notifies the source that it should cancel any ongoing queries and attempt to stop issuing updates + /// + void Stop(); } diff --git a/src/SmiServices/Applications/TriggerUpdates/MapperSource.cs b/src/SmiServices/Applications/TriggerUpdates/MapperSource.cs index 2db41e823..b0f604f54 100644 --- a/src/SmiServices/Applications/TriggerUpdates/MapperSource.cs +++ b/src/SmiServices/Applications/TriggerUpdates/MapperSource.cs @@ -14,246 +14,245 @@ using System.Threading; -namespace SmiServices.Applications.TriggerUpdates -{ - public class MapperSource : ITriggerUpdatesSource - { - private readonly ISwapIdentifiers _swapper; - private readonly TriggerUpdatesFromMapperOptions _cliOptions; - private readonly GlobalOptions _globalOptions; +namespace SmiServices.Applications.TriggerUpdates; - protected readonly CancellationTokenSource TokenSource = new(); - protected readonly ILogger Logger = LogManager.GetCurrentClassLogger(); +public class MapperSource : ITriggerUpdatesSource +{ + private readonly ISwapIdentifiers _swapper; + private readonly TriggerUpdatesFromMapperOptions _cliOptions; + private readonly GlobalOptions _globalOptions; - /// - /// True if the database querying operation is currently executing - /// - public bool IsExecuting { get; private set; } = false; + protected readonly CancellationTokenSource TokenSource = new(); + protected readonly ILogger Logger = LogManager.GetCurrentClassLogger(); - private DbCommand? _currentCommandMainTable; - private DbCommand? _currentCommandOtherTables; + /// + /// True if the database querying operation is currently executing + /// + public bool IsExecuting { get; private set; } = false; - public MapperSource(GlobalOptions globalOptions, TriggerUpdatesFromMapperOptions cliOptions) - { - _cliOptions = cliOptions; - _globalOptions = globalOptions; + private DbCommand? _currentCommandMainTable; + private DbCommand? _currentCommandOtherTables; - FansiImplementations.Load(); + public MapperSource(GlobalOptions globalOptions, TriggerUpdatesFromMapperOptions cliOptions) + { + _cliOptions = cliOptions; + _globalOptions = globalOptions; - ISwapIdentifiers? swapper; - try - { - var objectFactory = new MicroserviceObjectFactory(); - swapper = objectFactory.CreateInstance(globalOptions.IdentifierMapperOptions!.SwapperType!, typeof(ISwapIdentifiers).Assembly); - } - catch (Exception ex) - { - throw new Exception($"Could not create IdentifierMapper Swapper with SwapperType:{globalOptions.IdentifierMapperOptions?.SwapperType ?? "Null"}", ex); - } + FansiImplementations.Load(); - _swapper = swapper ?? throw new ArgumentException("No SwapperType has been specified in GlobalOptions.IdentifierMapperOptions"); + ISwapIdentifiers? swapper; + try + { + var objectFactory = new MicroserviceObjectFactory(); + swapper = objectFactory.CreateInstance(globalOptions.IdentifierMapperOptions!.SwapperType!, typeof(ISwapIdentifiers).Assembly); } + catch (Exception ex) + { + throw new Exception($"Could not create IdentifierMapper Swapper with SwapperType:{globalOptions.IdentifierMapperOptions?.SwapperType ?? "Null"}", ex); + } + + _swapper = swapper ?? throw new ArgumentException("No SwapperType has been specified in GlobalOptions.IdentifierMapperOptions"); + } - public IEnumerable GetUpdates() + public IEnumerable GetUpdates() + { + IsExecuting = true; + + try { - IsExecuting = true; + var mappingTable = _globalOptions.IdentifierMapperOptions!.Discover(); - try - { - var mappingTable = _globalOptions.IdentifierMapperOptions!.Discover(); + if (!mappingTable.Exists()) + throw new Exception($"Mapping table {mappingTable.GetFullyQualifiedName()} did not exist"); - if (!mappingTable.Exists()) - throw new Exception($"Mapping table {mappingTable.GetFullyQualifiedName()} did not exist"); + var syntaxHelper = mappingTable.GetQuerySyntaxHelper(); - var syntaxHelper = mappingTable.GetQuerySyntaxHelper(); + var archiveTableName = mappingTable.GetRuntimeName() + "_Archive"; + var archiveTable = mappingTable.Database.ExpectTable(syntaxHelper.EnsureWrapped(archiveTableName), schema: mappingTable.Schema); - var archiveTableName = mappingTable.GetRuntimeName() + "_Archive"; - var archiveTable = mappingTable.Database.ExpectTable(syntaxHelper.EnsureWrapped(archiveTableName), schema: mappingTable.Schema); + //may be null! + var guidTable = _swapper.GetGuidTableIfAny(_globalOptions.IdentifierMapperOptions); - //may be null! - var guidTable = _swapper.GetGuidTableIfAny(_globalOptions.IdentifierMapperOptions); + if (!archiveTable.Exists()) + throw new Exception($"No Archive table exists for mapping table {mappingTable.GetFullyQualifiedName()}"); - if (!archiveTable.Exists()) - throw new Exception($"No Archive table exists for mapping table {mappingTable.GetFullyQualifiedName()}"); + var swapCol = _globalOptions.IdentifierMapperOptions.SwapColumnName!; + var forCol = _globalOptions.IdentifierMapperOptions.ReplacementColumnName!; - var swapCol = _globalOptions.IdentifierMapperOptions.SwapColumnName!; - var forCol = _globalOptions.IdentifierMapperOptions.ReplacementColumnName!; + // may be null! + var liveDatabaseFieldName = _cliOptions.LiveDatabaseFieldName; - // may be null! - var liveDatabaseFieldName = _cliOptions.LiveDatabaseFieldName; + var archiveFetchSql = GetArchiveFetchSql(archiveTable, swapCol, forCol); - var archiveFetchSql = GetArchiveFetchSql(archiveTable, swapCol, forCol); + using var con = mappingTable.Database.Server.GetConnection(); + con.Open(); - using var con = mappingTable.Database.Server.GetConnection(); - con.Open(); + var dateOfLastUpdate = _cliOptions.DateOfLastUpdate; - var dateOfLastUpdate = _cliOptions.DateOfLastUpdate; + //find all records in the table that are new + var cmd = mappingTable.GetCommand($"SELECT {syntaxHelper.EnsureWrapped(swapCol)}, {syntaxHelper.EnsureWrapped(forCol)} FROM {mappingTable.GetFullyQualifiedName()} WHERE {syntaxHelper.EnsureWrapped(SpecialFieldNames.ValidFrom)} >= @dateOfLastUpdate", con); + cmd.CommandTimeout = _globalOptions.TriggerUpdatesOptions!.CommandTimeoutInSeconds; - //find all records in the table that are new - var cmd = mappingTable.GetCommand($"SELECT {syntaxHelper.EnsureWrapped(swapCol)}, {syntaxHelper.EnsureWrapped(forCol)} FROM {mappingTable.GetFullyQualifiedName()} WHERE {syntaxHelper.EnsureWrapped(SpecialFieldNames.ValidFrom)} >= @dateOfLastUpdate", con); - cmd.CommandTimeout = _globalOptions.TriggerUpdatesOptions!.CommandTimeoutInSeconds; + mappingTable.Database.Server.AddParameterWithValueToCommand("@dateOfLastUpdate", cmd, dateOfLastUpdate); - mappingTable.Database.Server.AddParameterWithValueToCommand("@dateOfLastUpdate", cmd, dateOfLastUpdate); + _currentCommandMainTable = cmd; - _currentCommandMainTable = cmd; + TokenSource.Token.ThrowIfCancellationRequested(); + using var r = cmd.ExecuteReader(); + while (r.Read()) + { TokenSource.Token.ThrowIfCancellationRequested(); - using var r = cmd.ExecuteReader(); - while (r.Read()) - { - TokenSource.Token.ThrowIfCancellationRequested(); - - var currentSwapColValue = r[swapCol]; - var currentForColValue = r[forCol]; + var currentSwapColValue = r[swapCol]; + var currentForColValue = r[forCol]; - //if there is a corresponding record in the archive table - using (var con2 = archiveTable.Database.Server.GetConnection()) - { - con2.Open(); - var cmd2 = archiveTable.GetCommand(archiveFetchSql, con2); - cmd2.CommandTimeout = _globalOptions.TriggerUpdatesOptions.CommandTimeoutInSeconds; - _currentCommandOtherTables = cmd2; + //if there is a corresponding record in the archive table + using (var con2 = archiveTable.Database.Server.GetConnection()) + { + con2.Open(); + var cmd2 = archiveTable.GetCommand(archiveFetchSql, con2); + cmd2.CommandTimeout = _globalOptions.TriggerUpdatesOptions.CommandTimeoutInSeconds; + _currentCommandOtherTables = cmd2; - archiveTable.Database.Server.AddParameterWithValueToCommand("@currentSwapColValue", cmd2, currentSwapColValue); + archiveTable.Database.Server.AddParameterWithValueToCommand("@currentSwapColValue", cmd2, currentSwapColValue); - var oldForColValue = cmd2.ExecuteScalar(); + var oldForColValue = cmd2.ExecuteScalar(); - TokenSource.Token.ThrowIfCancellationRequested(); + TokenSource.Token.ThrowIfCancellationRequested(); - //if there is an entry in the archive for this old one then it is not a brand new record i.e. it is an update - if (oldForColValue != null) + //if there is an entry in the archive for this old one then it is not a brand new record i.e. it is an update + if (oldForColValue != null) + { + //there is an entry in the archive so we need to issue a database update to update the live tables so the old archive + // table swap value (e.g. ECHI) is updated to the new one in the live table + yield return new UpdateValuesMessage { - //there is an entry in the archive so we need to issue a database update to update the live tables so the old archive - // table swap value (e.g. ECHI) is updated to the new one in the live table - yield return new UpdateValuesMessage - { - WhereFields = [liveDatabaseFieldName ?? forCol], - HaveValues = [Qualify(oldForColValue)], - - WriteIntoFields = [liveDatabaseFieldName ?? forCol], - Values = [Qualify(currentForColValue)] - }; - } + WhereFields = [liveDatabaseFieldName ?? forCol], + HaveValues = [Qualify(oldForColValue)], + + WriteIntoFields = [liveDatabaseFieldName ?? forCol], + Values = [Qualify(currentForColValue)] + }; } + } - TokenSource.Token.ThrowIfCancellationRequested(); + TokenSource.Token.ThrowIfCancellationRequested(); - // We should also look at guid mappings that are filled in now because of brand new records - if (guidTable != null) - { - string guidFetchSql = $"SELECT {syntaxHelper.EnsureWrapped(TableLookupWithGuidFallbackSwapper.GuidColumnName)} FROM {guidTable.GetFullyQualifiedName()} WHERE {syntaxHelper.EnsureWrapped(swapCol)}=@currentSwapColValue"; + // We should also look at guid mappings that are filled in now because of brand new records + if (guidTable != null) + { + string guidFetchSql = $"SELECT {syntaxHelper.EnsureWrapped(TableLookupWithGuidFallbackSwapper.GuidColumnName)} FROM {guidTable.GetFullyQualifiedName()} WHERE {syntaxHelper.EnsureWrapped(swapCol)}=@currentSwapColValue"; - using var con3 = guidTable.Database.Server.GetConnection(); - con3.Open(); - var cmd3 = guidTable.GetCommand(guidFetchSql, con3); - cmd3.CommandTimeout = _globalOptions.TriggerUpdatesOptions.CommandTimeoutInSeconds; - _currentCommandOtherTables = cmd3; + using var con3 = guidTable.Database.Server.GetConnection(); + con3.Open(); + var cmd3 = guidTable.GetCommand(guidFetchSql, con3); + cmd3.CommandTimeout = _globalOptions.TriggerUpdatesOptions.CommandTimeoutInSeconds; + _currentCommandOtherTables = cmd3; - guidTable.Database.Server.AddParameterWithValueToCommand("@currentSwapColValue", cmd3, currentSwapColValue); + guidTable.Database.Server.AddParameterWithValueToCommand("@currentSwapColValue", cmd3, currentSwapColValue); - var oldTemporaryMapping = cmd3.ExecuteScalar(); + var oldTemporaryMapping = cmd3.ExecuteScalar(); - TokenSource.Token.ThrowIfCancellationRequested(); + TokenSource.Token.ThrowIfCancellationRequested(); - //if this brand new mapping has a temporary guid assigned to it we need to issue an update of the temporary guid to the legit new mapping - if (oldTemporaryMapping != null) + //if this brand new mapping has a temporary guid assigned to it we need to issue an update of the temporary guid to the legit new mapping + if (oldTemporaryMapping != null) + { + yield return new UpdateValuesMessage { - yield return new UpdateValuesMessage - { - WhereFields = [liveDatabaseFieldName ?? forCol], - HaveValues = [Qualify(oldTemporaryMapping)], - - WriteIntoFields = [liveDatabaseFieldName ?? forCol], - Values = [Qualify(currentForColValue)] - }; - } + WhereFields = [liveDatabaseFieldName ?? forCol], + HaveValues = [Qualify(oldTemporaryMapping)], + + WriteIntoFields = [liveDatabaseFieldName ?? forCol], + Values = [Qualify(currentForColValue)] + }; } } } - finally - { - IsExecuting = false; - } } - - /// - /// Returns DBMS formatted representation for constant - /// - /// - /// - private string Qualify(object value) + finally { - if (value == DBNull.Value || string.IsNullOrWhiteSpace(value?.ToString())) - return "null"; - - if (_cliOptions.Qualifier != '\0') - return _cliOptions.Qualifier + value.ToString() + _cliOptions.Qualifier; - - return value.ToString() ?? throw new ArgumentException("Couldn't convert value to string"); + IsExecuting = false; } + } - /// - /// Returns a query for fetching the latest entry in the archive that matches a given private identifier (query contains parameter @currentSwapColValue) - /// - /// - /// The private identifier column name e.g. CHI - /// The public release identifier column name e.g. ECHI - /// SQL for fetching the latest release identifier value (e.g. ECHI value) from the archive - private static string GetArchiveFetchSql(DiscoveredTable archiveTable, string swapCol, string forCol) - { - // Work out how to get the latest entry in the _Archive table that corresponds to a given private identifier (e.g. CHI) - var syntax = archiveTable.Database.Server.GetQuerySyntaxHelper(); + /// + /// Returns DBMS formatted representation for constant + /// + /// + /// + private string Qualify(object value) + { + if (value == DBNull.Value || string.IsNullOrWhiteSpace(value?.ToString())) + return "null"; - var topX = syntax.HowDoWeAchieveTopX(1); + if (_cliOptions.Qualifier != '\0') + return _cliOptions.Qualifier + value.ToString() + _cliOptions.Qualifier; - StringBuilder sb = new(); - sb.AppendLine("SELECT "); + return value.ToString() ?? throw new ArgumentException("Couldn't convert value to string"); + } - if (topX.Location == QueryComponent.SELECT) - sb.AppendLine(topX.SQL); + /// + /// Returns a query for fetching the latest entry in the archive that matches a given private identifier (query contains parameter @currentSwapColValue) + /// + /// + /// The private identifier column name e.g. CHI + /// The public release identifier column name e.g. ECHI + /// SQL for fetching the latest release identifier value (e.g. ECHI value) from the archive + private static string GetArchiveFetchSql(DiscoveredTable archiveTable, string swapCol, string forCol) + { + // Work out how to get the latest entry in the _Archive table that corresponds to a given private identifier (e.g. CHI) + var syntax = archiveTable.Database.Server.GetQuerySyntaxHelper(); - sb.AppendLine(syntax.EnsureWrapped(forCol)); - sb.AppendLine("FROM " + archiveTable.GetFullyQualifiedName()); - sb.AppendLine("WHERE"); - sb.AppendLine($"{syntax.EnsureWrapped(swapCol)} = @currentSwapColValue"); + var topX = syntax.HowDoWeAchieveTopX(1); - if (topX.Location == QueryComponent.WHERE) - { - sb.AppendLine("AND"); - sb.AppendLine(topX.SQL); - } + StringBuilder sb = new(); + sb.AppendLine("SELECT "); - sb.AppendLine("ORDER BY"); - sb.AppendLine(syntax.EnsureWrapped(SpecialFieldNames.ValidFrom) + " desc"); + if (topX.Location == QueryComponent.SELECT) + sb.AppendLine(topX.SQL); - if (topX.Location == QueryComponent.Postfix) - sb.AppendLine(topX.SQL); + sb.AppendLine(syntax.EnsureWrapped(forCol)); + sb.AppendLine("FROM " + archiveTable.GetFullyQualifiedName()); + sb.AppendLine("WHERE"); + sb.AppendLine($"{syntax.EnsureWrapped(swapCol)} = @currentSwapColValue"); - return sb.ToString(); + if (topX.Location == QueryComponent.WHERE) + { + sb.AppendLine("AND"); + sb.AppendLine(topX.SQL); } - public void Stop() - { - TokenSource.Cancel(); + sb.AppendLine("ORDER BY"); + sb.AppendLine(syntax.EnsureWrapped(SpecialFieldNames.ValidFrom) + " desc"); + + if (topX.Location == QueryComponent.Postfix) + sb.AppendLine(topX.SQL); - _currentCommandMainTable?.Cancel(); + return sb.ToString(); + } - _currentCommandOtherTables?.Cancel(); + public void Stop() + { + TokenSource.Cancel(); - // give application 10 seconds to exit - var timeout = 10_000; - const int delta = 500; - while (IsExecuting && timeout > 0) - { - Thread.Sleep(delta); - timeout -= delta; - } + _currentCommandMainTable?.Cancel(); - if (timeout <= 0) - throw new ApplicationException("Query execution did not exit in time"); + _currentCommandOtherTables?.Cancel(); - Logger.Info("Query execution aborted, exiting"); + // give application 10 seconds to exit + var timeout = 10_000; + const int delta = 500; + while (IsExecuting && timeout > 0) + { + Thread.Sleep(delta); + timeout -= delta; } + + if (timeout <= 0) + throw new ApplicationException("Query execution did not exit in time"); + + Logger.Info("Query execution aborted, exiting"); } } diff --git a/src/SmiServices/Applications/TriggerUpdates/TriggerUpdates.cs b/src/SmiServices/Applications/TriggerUpdates/TriggerUpdates.cs index acd187d16..b8d2a2823 100644 --- a/src/SmiServices/Applications/TriggerUpdates/TriggerUpdates.cs +++ b/src/SmiServices/Applications/TriggerUpdates/TriggerUpdates.cs @@ -5,38 +5,37 @@ using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Applications.TriggerUpdates +namespace SmiServices.Applications.TriggerUpdates; + +public static class TriggerUpdates { - public static class TriggerUpdates + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit - .ParseAndRun( - args, - nameof(TriggerUpdates), - [ - typeof(TriggerUpdatesFromMapperOptions), - ], - OnParse - ); - return ret; - } + int ret = SmiCliInit + .ParseAndRun( + args, + nameof(TriggerUpdates), + [ + typeof(TriggerUpdatesFromMapperOptions), + ], + OnParse + ); + return ret; + } - private static int OnParse(GlobalOptions globals, object opts) - { - var parsedOptions = SmiCliInit.Verify(opts); + private static int OnParse(GlobalOptions globals, object opts) + { + var parsedOptions = SmiCliInit.Verify(opts); - ITriggerUpdatesSource source = parsedOptions switch - { - TriggerUpdatesFromMapperOptions o => new MapperSource(globals, o), - _ => throw new NotImplementedException($"No case for '{parsedOptions.GetType()}'") - }; + ITriggerUpdatesSource source = parsedOptions switch + { + TriggerUpdatesFromMapperOptions o => new MapperSource(globals, o), + _ => throw new NotImplementedException($"No case for '{parsedOptions.GetType()}'") + }; - var bootstrapper = new MicroserviceHostBootstrapper(() => new TriggerUpdatesHost(globals, source)); - int ret = bootstrapper.Main(); - return ret; - } + var bootstrapper = new MicroserviceHostBootstrapper(() => new TriggerUpdatesHost(globals, source)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesFromMapperOptions.cs b/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesFromMapperOptions.cs index 88a971ea5..18fcc979d 100644 --- a/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesFromMapperOptions.cs +++ b/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesFromMapperOptions.cs @@ -2,19 +2,18 @@ using System; -namespace SmiServices.Applications.TriggerUpdates +namespace SmiServices.Applications.TriggerUpdates; + +[Verb("mapper", HelpText = "Triggers updates based on new identifier mapping table updates")] +public class TriggerUpdatesFromMapperOptions : TriggerUpdatesCliOptions { - [Verb("mapper", HelpText = "Triggers updates based on new identifier mapping table updates")] - public class TriggerUpdatesFromMapperOptions : TriggerUpdatesCliOptions - { - [Option('d', "DateOfLastUpdate", Required = true, HelpText = "The last known date where live tables and mapping table were in sync. Updates will be issued for records changed after this date")] - public DateTime DateOfLastUpdate { get; set; } + [Option('d', "DateOfLastUpdate", Required = true, HelpText = "The last known date where live tables and mapping table were in sync. Updates will be issued for records changed after this date")] + public DateTime DateOfLastUpdate { get; set; } - [Option('f', "FieldName", HelpText = "The field name of the release identifier in your databases e.g. PatientID. Only needed if different from the mapping table swap column name e.g. ECHI")] - public string? LiveDatabaseFieldName { get; set; } + [Option('f', "FieldName", HelpText = "The field name of the release identifier in your databases e.g. PatientID. Only needed if different from the mapping table swap column name e.g. ECHI")] + public string? LiveDatabaseFieldName { get; set; } - [Option('q', "Qualifier", HelpText = "Qualifier for values e.g. '. This should be the DBMS qualifier needed for strings/dates. If patient identifiers are numerical then do not specify this option")] - public char? Qualifier { get; set; } - } + [Option('q', "Qualifier", HelpText = "Qualifier for values e.g. '. This should be the DBMS qualifier needed for strings/dates. If patient identifiers are numerical then do not specify this option")] + public char? Qualifier { get; set; } } diff --git a/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesHost.cs b/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesHost.cs index 95bfe035c..6c26be537 100644 --- a/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesHost.cs +++ b/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesHost.cs @@ -4,28 +4,27 @@ using SmiServices.Common.Options; -namespace SmiServices.Applications.TriggerUpdates +namespace SmiServices.Applications.TriggerUpdates; + +public class TriggerUpdatesHost : MicroserviceHost { - public class TriggerUpdatesHost : MicroserviceHost + private readonly ITriggerUpdatesSource _source; + private readonly IProducerModel _producer; + + public TriggerUpdatesHost(GlobalOptions options, ITriggerUpdatesSource source, IMessageBroker? messageBroker = null) + : base(options, messageBroker) { - private readonly ITriggerUpdatesSource _source; - private readonly IProducerModel _producer; + _source = source; + _producer = MessageBroker.SetupProducer(options.TriggerUpdatesOptions!, isBatch: false); + } - public TriggerUpdatesHost(GlobalOptions options, ITriggerUpdatesSource source, IMessageBroker? messageBroker = null) - : base(options, messageBroker) + public override void Start() + { + foreach (var upd in _source.GetUpdates()) { - _source = source; - _producer = MessageBroker.SetupProducer(options.TriggerUpdatesOptions!, isBatch: false); + _producer.SendMessage(upd, isInResponseTo: null, routingKey: null); } - public override void Start() - { - foreach (var upd in _source.GetUpdates()) - { - _producer.SendMessage(upd, isInResponseTo: null, routingKey: null); - } - - Stop("Update detection process finished"); - } + Stop("Update detection process finished"); } } diff --git a/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesOptions.cs b/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesOptions.cs index 60ed433f4..0595363c3 100644 --- a/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesOptions.cs +++ b/src/SmiServices/Applications/TriggerUpdates/TriggerUpdatesOptions.cs @@ -1,10 +1,9 @@ using SmiServices.Common.Options; -namespace SmiServices.Applications.TriggerUpdates +namespace SmiServices.Applications.TriggerUpdates; + +public abstract class TriggerUpdatesCliOptions : CliOptions { - public abstract class TriggerUpdatesCliOptions : CliOptions - { - } } diff --git a/src/SmiServices/Common/Events/ControlEventHandler.cs b/src/SmiServices/Common/Events/ControlEventHandler.cs index 070078f40..cd7149431 100644 --- a/src/SmiServices/Common/Events/ControlEventHandler.cs +++ b/src/SmiServices/Common/Events/ControlEventHandler.cs @@ -1,9 +1,8 @@ -namespace SmiServices.Common.Events -{ - /// - /// Event handler for hosts to implement if they wish to listen to specific control commands - /// - /// - /// - public delegate void ControlEventHandler(string routingKey, string? message = null); -} +namespace SmiServices.Common.Events; + +/// +/// Event handler for hosts to implement if they wish to listen to specific control commands +/// +/// +/// +public delegate void ControlEventHandler(string routingKey, string? message = null); diff --git a/src/SmiServices/Common/Events/FatalErrorEventArgs.cs b/src/SmiServices/Common/Events/FatalErrorEventArgs.cs index d6d3d3f3a..a0e11270f 100644 --- a/src/SmiServices/Common/Events/FatalErrorEventArgs.cs +++ b/src/SmiServices/Common/Events/FatalErrorEventArgs.cs @@ -2,28 +2,27 @@ using RabbitMQ.Client.Events; using System; -namespace SmiServices.Common.Events +namespace SmiServices.Common.Events; + +public class FatalErrorEventArgs : EventArgs { - public class FatalErrorEventArgs : EventArgs - { - public string Message { get; init; } - public Exception? Exception { get; init; } + public string Message { get; init; } + public Exception? Exception { get; init; } - public FatalErrorEventArgs(string msg, Exception exception) - { - Message = msg; - Exception = exception; - } + public FatalErrorEventArgs(string msg, Exception exception) + { + Message = msg; + Exception = exception; + } - public FatalErrorEventArgs(BasicReturnEventArgs ra) - { - Message = $"BasicReturnEventArgs: {ra.ReplyCode} - {ra.ReplyText}. (Exchange: {ra.Exchange}, RoutingKey: {ra.RoutingKey})"; - } + public FatalErrorEventArgs(BasicReturnEventArgs ra) + { + Message = $"BasicReturnEventArgs: {ra.ReplyCode} - {ra.ReplyText}. (Exchange: {ra.Exchange}, RoutingKey: {ra.RoutingKey})"; + } - public override string ToString() - { - return $"{base.ToString()}, Message={Message}, Exception={Exception}, "; - } + public override string ToString() + { + return $"{base.ToString()}, Message={Message}, Exception={Exception}, "; } } diff --git a/src/SmiServices/Common/Events/FatalErrorHandlers.cs b/src/SmiServices/Common/Events/FatalErrorHandlers.cs index 5fc830d8a..c5991d493 100644 --- a/src/SmiServices/Common/Events/FatalErrorHandlers.cs +++ b/src/SmiServices/Common/Events/FatalErrorHandlers.cs @@ -1,11 +1,10 @@ using RabbitMQ.Client.Events; -namespace SmiServices.Common.Events -{ - public delegate void HostFatalHandler(object sender, FatalErrorEventArgs e); +namespace SmiServices.Common.Events; - public delegate void ConsumerFatalHandler(object sender, FatalErrorEventArgs e); +public delegate void HostFatalHandler(object sender, FatalErrorEventArgs e); - public delegate void ProducerFatalHandler(object sender, BasicReturnEventArgs e); -} +public delegate void ConsumerFatalHandler(object sender, FatalErrorEventArgs e); + +public delegate void ProducerFatalHandler(object sender, BasicReturnEventArgs e); diff --git a/src/SmiServices/Common/Events/StopEventHandler.cs b/src/SmiServices/Common/Events/StopEventHandler.cs index 8d103f3a0..fa8b75e34 100644 --- a/src/SmiServices/Common/Events/StopEventHandler.cs +++ b/src/SmiServices/Common/Events/StopEventHandler.cs @@ -1,4 +1,3 @@ -namespace SmiServices.Common.Events -{ - public delegate void StopEventHandler(); -} +namespace SmiServices.Common.Events; + +public delegate void StopEventHandler(); diff --git a/src/SmiServices/Common/Execution/IMicroserviceHost.cs b/src/SmiServices/Common/Execution/IMicroserviceHost.cs index 4a8364fbc..e19f63612 100644 --- a/src/SmiServices/Common/Execution/IMicroserviceHost.cs +++ b/src/SmiServices/Common/Execution/IMicroserviceHost.cs @@ -1,12 +1,11 @@ using SmiServices.Common.Events; -namespace SmiServices.Common.Execution +namespace SmiServices.Common.Execution; + +public interface IMicroserviceHost { - public interface IMicroserviceHost - { - /// - /// - /// - event HostFatalHandler OnFatal; - } + /// + /// + /// + event HostFatalHandler OnFatal; } diff --git a/src/SmiServices/Common/Execution/MicroserviceHost.cs b/src/SmiServices/Common/Execution/MicroserviceHost.cs index 26a6b7c7f..eb0a9c20e 100644 --- a/src/SmiServices/Common/Execution/MicroserviceHost.cs +++ b/src/SmiServices/Common/Execution/MicroserviceHost.cs @@ -8,185 +8,184 @@ using SmiServices.Common.Options; using System; -namespace SmiServices.Common.Execution -{ - public abstract class MicroserviceHost : IMicroserviceHost - { - public event HostFatalHandler OnFatal; - - protected readonly string HostProcessName; - protected readonly int HostProcessID; +namespace SmiServices.Common.Execution; - protected readonly GlobalOptions Globals; - protected readonly ILogger Logger; +public abstract class MicroserviceHost : IMicroserviceHost +{ + public event HostFatalHandler OnFatal; - protected readonly IMessageBroker MessageBroker; + protected readonly string HostProcessName; + protected readonly int HostProcessID; + protected readonly GlobalOptions Globals; + protected readonly ILogger Logger; - private readonly object _oAdapterLock = new(); - private bool _auxConnectionsCreated; + protected readonly IMessageBroker MessageBroker; - private readonly ProducerOptions _fatalLoggingProducerOptions; - private IProducerModel? _fatalLoggingProducer; - private readonly ControlMessageConsumer _controlMessageConsumer = null!; + private readonly object _oAdapterLock = new(); + private bool _auxConnectionsCreated; - private bool _stopCalled; + private readonly ProducerOptions _fatalLoggingProducerOptions; + private IProducerModel? _fatalLoggingProducer; - protected readonly MicroserviceObjectFactory ObjectFactory; + private readonly ControlMessageConsumer _controlMessageConsumer = null!; - /// - /// Loads logging, sets up fatal behaviour, subscribes rabbit etc. - /// - /// Settings for the microservice (location of rabbit, queue names etc) - /// - protected MicroserviceHost( - GlobalOptions globals, - IMessageBroker? messageBroker = null) - { - if (globals == null || globals.FileSystemOptions == null || globals.RabbitOptions == null || globals.LoggingOptions == null) - throw new ArgumentException("All or part of the global options are null"); + private bool _stopCalled; - // Disable fo-dicom's DICOM validation globally from here - new DicomSetupBuilder().SkipValidation(); + protected readonly MicroserviceObjectFactory ObjectFactory; - HostProcessName = globals.HostProcessName; + /// + /// Loads logging, sets up fatal behaviour, subscribes rabbit etc. + /// + /// Settings for the microservice (location of rabbit, queue names etc) + /// + protected MicroserviceHost( + GlobalOptions globals, + IMessageBroker? messageBroker = null) + { + if (globals == null || globals.FileSystemOptions == null || globals.RabbitOptions == null || globals.LoggingOptions == null) + throw new ArgumentException("All or part of the global options are null"); - Logger = LogManager.GetLogger(GetType().Name); - Logger.Info("Host logger created"); + // Disable fo-dicom's DICOM validation globally from here + new DicomSetupBuilder().SkipValidation(); - HostProcessID = Environment.ProcessId; - Logger.Info($"Starting {HostProcessName} (Host={Environment.MachineName} PID={HostProcessID} User={Environment.UserName})"); + HostProcessName = globals.HostProcessName; - // log centrally - Globals = globals; - Logger.Debug($"Loaded global options:\n{globals}"); + Logger = LogManager.GetLogger(GetType().Name); + Logger.Info("Host logger created"); - // should also be centralized for non-host uses - // Ensure this is false in case the default changes - DicomTypeTranslater.SerializeBinaryData = false; + HostProcessID = Environment.ProcessId; + Logger.Info($"Starting {HostProcessName} (Host={Environment.MachineName} PID={HostProcessID} User={Environment.UserName})"); - _fatalLoggingProducerOptions = new ProducerOptions - { - ExchangeName = Globals.RabbitOptions.FatalLoggingExchange - }; + // log centrally + Globals = globals; + Logger.Debug($"Loaded global options:\n{globals}"); - //TODO This won't pass for testing with mocked filesystems - //if(!Directory.Exists(options.FileSystemRoot)) - // throw new ArgumentException("Could not locate the FileSystemRoot \"" + options.FileSystemRoot + "\""); + // should also be centralized for non-host uses + // Ensure this is false in case the default changes + DicomTypeTranslater.SerializeBinaryData = false; - OnFatal += (sender, args) => Fatal(args.Message, args.Exception); + _fatalLoggingProducerOptions = new ProducerOptions + { + ExchangeName = Globals.RabbitOptions.FatalLoggingExchange + }; - if (messageBroker == null) - { - messageBroker = new RabbitMQBroker(globals.RabbitOptions, HostProcessName + HostProcessID, OnFatal); - var controlExchangeName = globals.RabbitOptions.RabbitMqControlExchangeName ?? throw new ArgumentNullException(nameof(globals)); - _controlMessageConsumer = new ControlMessageConsumer(globals.RabbitOptions, HostProcessName, HostProcessID, controlExchangeName, Stop); - } - MessageBroker = messageBroker; + //TODO This won't pass for testing with mocked filesystems + //if(!Directory.Exists(options.FileSystemRoot)) + // throw new ArgumentException("Could not locate the FileSystemRoot \"" + options.FileSystemRoot + "\""); - ObjectFactory = new MicroserviceObjectFactory - { - FatalHandler = (s, e) => Fatal(e.Message, e.Exception) - }; - } + OnFatal += (sender, args) => Fatal(args.Message, args.Exception); - /// - /// Add an event handler to the control message consumer - /// - /// Method to call when invoked. Parameters are the action to perform, and the message body - protected void AddControlHandler(IControlMessageHandler handler) + if (messageBroker == null) { - //(a, m) => action, message content - _controlMessageConsumer.ControlEvent += handler.ControlMessageHandler; + messageBroker = new RabbitMQBroker(globals.RabbitOptions, HostProcessName + HostProcessID, OnFatal); + var controlExchangeName = globals.RabbitOptions.RabbitMqControlExchangeName ?? throw new ArgumentNullException(nameof(globals)); + _controlMessageConsumer = new ControlMessageConsumer(globals.RabbitOptions, HostProcessName, HostProcessID, controlExchangeName, Stop); } + MessageBroker = messageBroker; - /// - /// Start this separately so we don't block the thread if the host constructor throws an exception - /// - public void StartAuxConnections() + ObjectFactory = new MicroserviceObjectFactory { - lock (_oAdapterLock) - { - if (_auxConnectionsCreated) - return; - - _auxConnectionsCreated = true; + FatalHandler = (s, e) => Fatal(e.Message, e.Exception) + }; + } - // Ensures no consumers have been started until we explicitly call Start() - if (MessageBroker.HasConsumers) - throw new ApplicationException("Rabbit adapter has consumers before aux. connections created"); + /// + /// Add an event handler to the control message consumer + /// + /// Method to call when invoked. Parameters are the action to perform, and the message body + protected void AddControlHandler(IControlMessageHandler handler) + { + //(a, m) => action, message content + _controlMessageConsumer.ControlEvent += handler.ControlMessageHandler; + } - _fatalLoggingProducer = MessageBroker.SetupProducer(_fatalLoggingProducerOptions, isBatch: false); - MessageBroker.StartControlConsumer(_controlMessageConsumer); - } - } + /// + /// Start this separately so we don't block the thread if the host constructor throws an exception + /// + public void StartAuxConnections() + { + lock (_oAdapterLock) + { + if (_auxConnectionsCreated) + return; - /// - /// Per-host implementation. objects should not be started outside this method - /// - public abstract void Start(); + _auxConnectionsCreated = true; - //TODO Expose timeout here - public virtual void Stop(string reason) - { - Logger.Info($"Host Stop called: {reason}"); + // Ensures no consumers have been started until we explicitly call Start() + if (MessageBroker.HasConsumers) + throw new ApplicationException("Rabbit adapter has consumers before aux. connections created"); - if (_stopCalled) - Logger.Warn("Host stop called twice"); + _fatalLoggingProducer = MessageBroker.SetupProducer(_fatalLoggingProducerOptions, isBatch: false); + MessageBroker.StartControlConsumer(_controlMessageConsumer); + } + } - _stopCalled = true; + /// + /// Per-host implementation. objects should not be started outside this method + /// + public abstract void Start(); - Logger.Debug("Shutting down RabbitMQ connections"); + //TODO Expose timeout here + public virtual void Stop(string reason) + { + Logger.Info($"Host Stop called: {reason}"); - // Attempt to destroy the control queue + if (_stopCalled) + Logger.Warn("Host stop called twice"); - try - { - _controlMessageConsumer.Shutdown(); - } - catch (Exception e) - { - Logger.Warn($"Could not clean up control queues: {e.Message}"); - } + _stopCalled = true; - lock (_oAdapterLock) - { - MessageBroker.Shutdown(RabbitMQBroker.DefaultOperationTimeout); - } + Logger.Debug("Shutting down RabbitMQ connections"); - Logger.Info("Host stop completed"); + // Attempt to destroy the control queue - // Always remember to flush! - LogManager.Shutdown(); + try + { + _controlMessageConsumer.Shutdown(); + } + catch (Exception e) + { + Logger.Warn($"Could not clean up control queues: {e.Message}"); } - /// - /// Fatal essentially just calls , but attempts to send a FatalErrorMessage to RabbitMQ first - /// - /// - /// - public void Fatal(string msg, Exception? exception) + lock (_oAdapterLock) { - Logger.Fatal(exception, msg); - if (_stopCalled) - return; + MessageBroker.Shutdown(RabbitMQBroker.DefaultOperationTimeout); + } - try - { - _fatalLoggingProducer?.SendMessage(new FatalErrorMessage(msg, exception), null, null); - } - catch (Exception e) - { - Logger.Error(e, "Failed to log fatal error"); - } + Logger.Info("Host stop completed"); - Stop($"Fatal error in MicroserviceHost ({msg})"); - } + // Always remember to flush! + LogManager.Shutdown(); + } - public void Wait() + /// + /// Fatal essentially just calls , but attempts to send a FatalErrorMessage to RabbitMQ first + /// + /// + /// + public void Fatal(string msg, Exception? exception) + { + Logger.Fatal(exception, msg); + if (_stopCalled) + return; + + try + { + _fatalLoggingProducer?.SendMessage(new FatalErrorMessage(msg, exception), null, null); + } + catch (Exception e) { - MessageBroker.Wait(); + Logger.Error(e, "Failed to log fatal error"); } + + Stop($"Fatal error in MicroserviceHost ({msg})"); + } + + public void Wait() + { + MessageBroker.Wait(); } } diff --git a/src/SmiServices/Common/Execution/MicroserviceHostBootstrapper.cs b/src/SmiServices/Common/Execution/MicroserviceHostBootstrapper.cs index df1521ca8..ef66b7403 100644 --- a/src/SmiServices/Common/Execution/MicroserviceHostBootstrapper.cs +++ b/src/SmiServices/Common/Execution/MicroserviceHostBootstrapper.cs @@ -1,76 +1,75 @@ using System; -namespace SmiServices.Common.Execution +namespace SmiServices.Common.Execution; + +/// +/// Wraps construction and startup of your applications MicroserviceHost. Handles Exceptions thrown during construction / setup as well as Ctrl+C support in standardised way +/// +public class MicroserviceHostBootstrapper { + private readonly Func _func; + + /// - /// Wraps construction and startup of your applications MicroserviceHost. Handles Exceptions thrown during construction / setup as well as Ctrl+C support in standardised way + /// Default constructor /// - public class MicroserviceHostBootstrapper + /// Construct with your host constructor call and then 'return bootStrapper.Main();' + public MicroserviceHostBootstrapper(Func func) { - private readonly Func _func; + _func = func; + } + public int Main() + { + Console.WriteLine("Bootstrapper -> Main called, constructing host"); - /// - /// Default constructor - /// - /// Construct with your host constructor call and then 'return bootStrapper.Main();' - public MicroserviceHostBootstrapper(Func func) + // Set up a periodic forced GC to avoid wasting RAM on multi-service hosts: + new System.Timers.Timer { Interval = 3600000, AutoReset = true, Enabled = true }.Elapsed += // lgtm[cs/local-not-disposed] + (o, args) => + { + System.Runtime.GCSettings.LargeObjectHeapCompactionMode = System.Runtime.GCLargeObjectHeapCompactionMode.CompactOnce; + GC.Collect(2, GCCollectionMode.Forced, true, true); + }; + + MicroserviceHost host; + + try { - _func = func; + host = _func(); } - - public int Main() + catch (Exception e) { - Console.WriteLine("Bootstrapper -> Main called, constructing host"); - - // Set up a periodic forced GC to avoid wasting RAM on multi-service hosts: - new System.Timers.Timer { Interval = 3600000, AutoReset = true, Enabled = true }.Elapsed += // lgtm[cs/local-not-disposed] - (o, args) => - { - System.Runtime.GCSettings.LargeObjectHeapCompactionMode = System.Runtime.GCLargeObjectHeapCompactionMode.CompactOnce; - GC.Collect(2, GCCollectionMode.Forced, true, true); - }; - - MicroserviceHost host; - - try - { - host = _func(); - } - catch (Exception e) - { - string nl = Environment.NewLine; - Console.Error.WriteLine($"{e}{nl}{nl}Host constructor threw an exception:{nl}{e.Message}"); - return -1; - } + string nl = Environment.NewLine; + Console.Error.WriteLine($"{e}{nl}{nl}Host constructor threw an exception:{nl}{e.Message}"); + return -1; + } - Console.WriteLine("Bootstrapper -> Host constructed, starting aux connections"); + Console.WriteLine("Bootstrapper -> Host constructed, starting aux connections"); - Console.CancelKeyPress += delegate (object? _, ConsoleCancelEventArgs e) - { - e.Cancel = true; - host.Stop("Ctrl+C pressed"); - }; + Console.CancelKeyPress += delegate (object? _, ConsoleCancelEventArgs e) + { + e.Cancel = true; + host.Stop("Ctrl+C pressed"); + }; - try - { - host.StartAuxConnections(); - Console.WriteLine("Bootstrapper -> Host aux connections started, calling Start()"); + try + { + host.StartAuxConnections(); + Console.WriteLine("Bootstrapper -> Host aux connections started, calling Start()"); - host.Start(); - Console.WriteLine("Bootstrapper -> Host created and started..."); - } - catch (Exception e) - { - host.Fatal("Failed to start host", e); - return -2; - } + host.Start(); + Console.WriteLine("Bootstrapper -> Host created and started..."); + } + catch (Exception e) + { + host.Fatal("Failed to start host", e); + return -2; + } - // Wait until Rabbit tasks are finished: + // Wait until Rabbit tasks are finished: - host.Wait(); - Console.WriteLine("MicroserviceHostBootstrapper exiting. Service will exit when consumer threads are joined"); - return 0; - } + host.Wait(); + Console.WriteLine("MicroserviceHostBootstrapper exiting. Service will exit when consumer threads are joined"); + return 0; } } diff --git a/src/SmiServices/Common/FansiImplementations.cs b/src/SmiServices/Common/FansiImplementations.cs index 23636b23b..fdd53a775 100644 --- a/src/SmiServices/Common/FansiImplementations.cs +++ b/src/SmiServices/Common/FansiImplementations.cs @@ -4,15 +4,14 @@ using FAnsi.Implementations.PostgreSql; -namespace SmiServices.Common +namespace SmiServices.Common; + +public static class FansiImplementations { - public static class FansiImplementations + public static void Load() { - public static void Load() - { - ImplementationManager.Load(); - ImplementationManager.Load(); - ImplementationManager.Load(); - } + ImplementationManager.Load(); + ImplementationManager.Load(); + ImplementationManager.Load(); } } diff --git a/src/SmiServices/Common/Helpers/DateTimeProvider.cs b/src/SmiServices/Common/Helpers/DateTimeProvider.cs index 218b8995a..5dd7b94fb 100644 --- a/src/SmiServices/Common/Helpers/DateTimeProvider.cs +++ b/src/SmiServices/Common/Helpers/DateTimeProvider.cs @@ -1,10 +1,9 @@ using System; -namespace SmiServices.Common.Helpers +namespace SmiServices.Common.Helpers; + +public class DateTimeProvider { - public class DateTimeProvider - { - public virtual DateTime UtcNow() => DateTime.UtcNow; - } + public virtual DateTime UtcNow() => DateTime.UtcNow; } diff --git a/src/SmiServices/Common/Helpers/IConsoleInput.cs b/src/SmiServices/Common/Helpers/IConsoleInput.cs index 0ffd1a338..2f2fa2ca4 100644 --- a/src/SmiServices/Common/Helpers/IConsoleInput.cs +++ b/src/SmiServices/Common/Helpers/IConsoleInput.cs @@ -1,10 +1,9 @@ -namespace SmiServices.Common.Helpers +namespace SmiServices.Common.Helpers; + +/// +/// Interface useful when testing interactive console input +/// +public interface IConsoleInput { - /// - /// Interface useful when testing interactive console input - /// - public interface IConsoleInput - { - public string? GetNextLine(); - } + public string? GetNextLine(); } diff --git a/src/SmiServices/Common/Helpers/MicroserviceObjectFactory.cs b/src/SmiServices/Common/Helpers/MicroserviceObjectFactory.cs index 7aefec30e..956a7053a 100644 --- a/src/SmiServices/Common/Helpers/MicroserviceObjectFactory.cs +++ b/src/SmiServices/Common/Helpers/MicroserviceObjectFactory.cs @@ -4,74 +4,73 @@ using System; using System.Reflection; -namespace SmiServices.Common.Helpers -{ - public class MicroserviceObjectFactory - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); +namespace SmiServices.Common.Helpers; - /// - /// Method called when fails. If not set then the Exception is simply - /// thrown. - /// - public HostFatalHandler? FatalHandler; +public class MicroserviceObjectFactory +{ + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - /// - /// Constructs an instance of the specified and casts it to Type T (e.g. an interface). You can pass any - /// required or optional objects required for invoking the class constructor in via . - /// - /// - /// - /// - /// - public T? CreateInstance(Type toCreate, params object[] optionalConstructorParameters) - { - T? toReturn = default; + /// + /// Method called when fails. If not set then the Exception is simply + /// thrown. + /// + public HostFatalHandler? FatalHandler; - try - { - toReturn = (T)ObjectConstructor.ConstructIfPossible(toCreate, optionalConstructorParameters); + /// + /// Constructs an instance of the specified and casts it to Type T (e.g. an interface). You can pass any + /// required or optional objects required for invoking the class constructor in via . + /// + /// + /// + /// + /// + public T? CreateInstance(Type toCreate, params object[] optionalConstructorParameters) + { + T? toReturn = default; - if (optionalConstructorParameters.Length > 0 && toReturn == null) - toReturn = (T)ObjectConstructor.Construct(toCreate); // Try blank constructor + try + { + toReturn = (T)ObjectConstructor.ConstructIfPossible(toCreate, optionalConstructorParameters); - if (toReturn == null) - throw new Exception("ConstructIfPossible returned null"); + if (optionalConstructorParameters.Length > 0 && toReturn == null) + toReturn = (T)ObjectConstructor.Construct(toCreate); // Try blank constructor - _logger.Info($"Successfully constructed Type '{toReturn.GetType()}'"); - } - catch (Exception e) - { - _logger.Error(e, $"Failed to construct Type '{typeof(T)}'"); + if (toReturn == null) + throw new Exception("ConstructIfPossible returned null"); - if (FatalHandler != null) - FatalHandler(this, new FatalErrorEventArgs($"Error constructing Type {toCreate}", e)); - else - throw; - } + _logger.Info($"Successfully constructed Type '{toReturn.GetType()}'"); + } + catch (Exception e) + { + _logger.Error(e, $"Failed to construct Type '{typeof(T)}'"); - return toReturn; + if (FatalHandler != null) + FatalHandler(this, new FatalErrorEventArgs($"Error constructing Type {toCreate}", e)); + else + throw; } - /// - /// Constructs an instance of the specified in the specified Assembly and casts it to Type T (e.g. an interface). - /// You can pass any required or optional objects required for invoking the class constructor in via . - /// - /// - /// - /// - /// - /// - public T? CreateInstance(string typeName, Assembly assembly, params object[] optionalConstructorParameters) - { - if (string.IsNullOrWhiteSpace(typeName)) - { - _logger.Warn($"No Type name specified for T {typeof(T).Name}"); - return default; - } + return toReturn; + } - Type toCreate = assembly.GetType(typeName, true) ?? throw new Exception($"Could not create type {typeName} from the given Assembly {assembly}"); - return CreateInstance(toCreate, optionalConstructorParameters); + /// + /// Constructs an instance of the specified in the specified Assembly and casts it to Type T (e.g. an interface). + /// You can pass any required or optional objects required for invoking the class constructor in via . + /// + /// + /// + /// + /// + /// + public T? CreateInstance(string typeName, Assembly assembly, params object[] optionalConstructorParameters) + { + if (string.IsNullOrWhiteSpace(typeName)) + { + _logger.Warn($"No Type name specified for T {typeof(T).Name}"); + return default; } + + Type toCreate = assembly.GetType(typeName, true) ?? throw new Exception($"Could not create type {typeName} from the given Assembly {assembly}"); + return CreateInstance(toCreate, optionalConstructorParameters); } } diff --git a/src/SmiServices/Common/Helpers/RealConsoleInput.cs b/src/SmiServices/Common/Helpers/RealConsoleInput.cs index d0c11f952..6a63c6dcc 100644 --- a/src/SmiServices/Common/Helpers/RealConsoleInput.cs +++ b/src/SmiServices/Common/Helpers/RealConsoleInput.cs @@ -1,12 +1,11 @@ using System; -namespace SmiServices.Common.Helpers +namespace SmiServices.Common.Helpers; + +/// +/// Returns the next line from the console +/// +public class RealConsoleInput : IConsoleInput { - /// - /// Returns the next line from the console - /// - public class RealConsoleInput : IConsoleInput - { - public string? GetNextLine() => Console.ReadLine()?.Trim(); - } + public string? GetNextLine() => Console.ReadLine()?.Trim(); } diff --git a/src/SmiServices/Common/IMessageBroker.cs b/src/SmiServices/Common/IMessageBroker.cs index 2adf349f5..dbccdf196 100644 --- a/src/SmiServices/Common/IMessageBroker.cs +++ b/src/SmiServices/Common/IMessageBroker.cs @@ -4,26 +4,25 @@ using SmiServices.Common.Options; using System; -namespace SmiServices.Common +namespace SmiServices.Common; + +public interface IMessageBroker { - public interface IMessageBroker - { - bool HasConsumers { get; } + bool HasConsumers { get; } - Guid StartConsumer(ConsumerOptions consumerOptions, IConsumer consumer, bool isSolo) where T : IMessage; + Guid StartConsumer(ConsumerOptions consumerOptions, IConsumer consumer, bool isSolo) where T : IMessage; - void StartControlConsumer(IControlMessageConsumer controlMessageConsumer); + void StartControlConsumer(IControlMessageConsumer controlMessageConsumer); - void StopConsumer(Guid taskId, TimeSpan timeout); + void StopConsumer(Guid taskId, TimeSpan timeout); - IProducerModel SetupProducer(ProducerOptions producerOptions, bool isBatch); + IProducerModel SetupProducer(ProducerOptions producerOptions, bool isBatch); - IModel GetModel(string connectionName); + IModel GetModel(string connectionName); - void Shutdown(TimeSpan timeout); - public void Wait(); + void Shutdown(TimeSpan timeout); + public void Wait(); - // Dreams of .NET Core 3.0... - // void Shutdown() => Shutdown(TimeSpan.FromSeconds(5)); - } + // Dreams of .NET Core 3.0... + // void Shutdown() => Shutdown(TimeSpan.FromSeconds(5)); } diff --git a/src/SmiServices/Common/MessageSerialization/JsonCompatibleDictionary.cs b/src/SmiServices/Common/MessageSerialization/JsonCompatibleDictionary.cs index 65879644e..cdda40c01 100644 --- a/src/SmiServices/Common/MessageSerialization/JsonCompatibleDictionary.cs +++ b/src/SmiServices/Common/MessageSerialization/JsonCompatibleDictionary.cs @@ -3,60 +3,59 @@ using System.Collections.Generic; using System.Linq; -namespace SmiServices.Common.MessageSerialization +namespace SmiServices.Common.MessageSerialization; + +/// +/// Allows Json serialization of complex key Types. +/// +/// Out of the box Json serializes Dictionary keys using ToString and seems to ignore any custom JsonConverter specified on the key class. This class works around that behaviour +/// by only serializing an array of keys and an array of values. Once both are populated then the underlying Dictionary Key/Values are created. +/// +/// +/// +[JsonObject(MemberSerialization.OptIn)] +public class JsonCompatibleDictionary : Dictionary where TK : notnull { - /// - /// Allows Json serialization of complex key Types. - /// - /// Out of the box Json serializes Dictionary keys using ToString and seems to ignore any custom JsonConverter specified on the key class. This class works around that behaviour - /// by only serializing an array of keys and an array of values. Once both are populated then the underlying Dictionary Key/Values are created. - /// - /// - /// - [JsonObject(MemberSerialization.OptIn)] - public class JsonCompatibleDictionary : Dictionary where TK : notnull + [JsonProperty] + public TK[] SerializeableKeys { - [JsonProperty] - public TK[] SerializeableKeys - { - get { return [.. Keys]; } - set { Hydrate(value); } - } - - [JsonProperty] - public TV[] SerializeableValues - { - get { return [.. Values]; } - set { Hydrate(value); } - } - - private TK[]? _hydrateV1; - private TV[]? _hydrateV2; - - private void Hydrate(TK[] value) - { - _hydrateV1 = value; - Hydrate(_hydrateV1, _hydrateV2); - } - - private void Hydrate(TV[]? value) - { - _hydrateV2 = value; - Hydrate(_hydrateV1, _hydrateV2); - } - - private void Hydrate(TK[]? hydrateV1, TV[]? hydrateV2) - { - if (hydrateV1 == null || hydrateV2 == null) - return; - - if (_hydrateV1!.Length != hydrateV2.Length) - return; - - Clear(); - - for (int i = 0; i < _hydrateV1.Length; i++) - Add(_hydrateV1[i], _hydrateV2![i]); - } + get { return [.. Keys]; } + set { Hydrate(value); } + } + + [JsonProperty] + public TV[] SerializeableValues + { + get { return [.. Values]; } + set { Hydrate(value); } + } + + private TK[]? _hydrateV1; + private TV[]? _hydrateV2; + + private void Hydrate(TK[] value) + { + _hydrateV1 = value; + Hydrate(_hydrateV1, _hydrateV2); + } + + private void Hydrate(TV[]? value) + { + _hydrateV2 = value; + Hydrate(_hydrateV1, _hydrateV2); + } + + private void Hydrate(TK[]? hydrateV1, TV[]? hydrateV2) + { + if (hydrateV1 == null || hydrateV2 == null) + return; + + if (_hydrateV1!.Length != hydrateV2.Length) + return; + + Clear(); + + for (int i = 0; i < _hydrateV1.Length; i++) + Add(_hydrateV1[i], _hydrateV2![i]); } } diff --git a/src/SmiServices/Common/MessageSerialization/JsonConvert.cs b/src/SmiServices/Common/MessageSerialization/JsonConvert.cs index 716090905..299b8a683 100644 --- a/src/SmiServices/Common/MessageSerialization/JsonConvert.cs +++ b/src/SmiServices/Common/MessageSerialization/JsonConvert.cs @@ -5,71 +5,70 @@ using System.Collections.Generic; using System.Text; -namespace SmiServices.Common.MessageSerialization +namespace SmiServices.Common.MessageSerialization; + +/// +/// Helper class to (de)serialize objects from RabbitMQ messages. +/// +public static class JsonConvert { - /// - /// Helper class to (de)serialize objects from RabbitMQ messages. - /// - public static class JsonConvert - { - private static List _errors = []; + private static List _errors = []; - private static readonly JsonSerializerSettings _serializerSettings = new() + private static readonly JsonSerializerSettings _serializerSettings = new() + { + Error = delegate (object? sender, ErrorEventArgs args) { - Error = delegate (object? sender, ErrorEventArgs args) - { - _errors.Add(args.ErrorContext.Error.Message); - args.ErrorContext.Handled = true; - }, - MissingMemberHandling = MissingMemberHandling.Error - }; + _errors.Add(args.ErrorContext.Error.Message); + args.ErrorContext.Handled = true; + }, + MissingMemberHandling = MissingMemberHandling.Error + }; - /// - /// Deserialize a message from a string. - /// - /// The type of to deserialize into. - /// The message to deserialize. - /// - public static T DeserializeObject(string message) where T : IMessage - { - _errors = []; + /// + /// Deserialize a message from a string. + /// + /// The type of to deserialize into. + /// The message to deserialize. + /// + public static T DeserializeObject(string message) where T : IMessage + { + _errors = []; - var messageObj = Newtonsoft.Json.JsonConvert.DeserializeObject(message, _serializerSettings) - ?? throw new JsonSerializationException("Deserialized message object is null, message was empty."); + var messageObj = Newtonsoft.Json.JsonConvert.DeserializeObject(message, _serializerSettings) + ?? throw new JsonSerializationException("Deserialized message object is null, message was empty."); - if (_errors.Count == 0) - return messageObj; + if (_errors.Count == 0) + return messageObj; - var e = new JsonSerializationException("Couldn't deserialize message to " + typeof(T).FullName + ". See exception data."); + var e = new JsonSerializationException("Couldn't deserialize message to " + typeof(T).FullName + ". See exception data."); - for (var i = 0; i < _errors.Count; i++) - e.Data.Add(i, _errors[i]); + for (var i = 0; i < _errors.Count; i++) + e.Data.Add(i, _errors[i]); - throw e; - } + throw e; + } - /// - /// Deserialize a message straight from the . Encoding defaults to UTF8 if not set. - /// - /// The type of to deserialize into. - /// The message and all associated information. - /// - public static T DeserializeObject(BasicDeliverEventArgs deliverArgs) where T : IMessage - { - Encoding enc = Encoding.UTF8; + /// + /// Deserialize a message straight from the . Encoding defaults to UTF8 if not set. + /// + /// The type of to deserialize into. + /// The message and all associated information. + /// + public static T DeserializeObject(BasicDeliverEventArgs deliverArgs) where T : IMessage + { + Encoding enc = Encoding.UTF8; - if (deliverArgs.BasicProperties != null && deliverArgs.BasicProperties.ContentEncoding != null) - enc = Encoding.GetEncoding(deliverArgs.BasicProperties.ContentEncoding); + if (deliverArgs.BasicProperties != null && deliverArgs.BasicProperties.ContentEncoding != null) + enc = Encoding.GetEncoding(deliverArgs.BasicProperties.ContentEncoding); - //TODO This might crash if for some reason we have invalid Unicode points - return DeserializeObject(enc.GetString(deliverArgs.Body.Span)); - } + //TODO This might crash if for some reason we have invalid Unicode points + return DeserializeObject(enc.GetString(deliverArgs.Body.Span)); + } - public static T DeserializeObject(byte[] body) where T : IMessage - { - Encoding enc = Encoding.UTF8; - return DeserializeObject(enc.GetString(body)); - } + public static T DeserializeObject(byte[] body) where T : IMessage + { + Encoding enc = Encoding.UTF8; + return DeserializeObject(enc.GetString(body)); } } diff --git a/src/SmiServices/Common/Messages/AccessionDirectoryMessage.cs b/src/SmiServices/Common/Messages/AccessionDirectoryMessage.cs index e01876eee..45936bbd4 100644 --- a/src/SmiServices/Common/Messages/AccessionDirectoryMessage.cs +++ b/src/SmiServices/Common/Messages/AccessionDirectoryMessage.cs @@ -4,31 +4,30 @@ using System; using System.IO; -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +/// +/// Object representing an accession directory message. +/// +public sealed class AccessionDirectoryMessage : MemberwiseEquatable, IMessage { /// - /// Object representing an accession directory message. + /// Directory path relative to the root path. /// - public sealed class AccessionDirectoryMessage : MemberwiseEquatable, IMessage - { - /// - /// Directory path relative to the root path. - /// - [JsonProperty(Required = Required.Always)] - public string DirectoryPath { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string DirectoryPath { get; set; } = null!; - public AccessionDirectoryMessage() { } + public AccessionDirectoryMessage() { } - public AccessionDirectoryMessage(string root, DirectoryInfo directory) - { - if (!directory.FullName.StartsWith(root, StringComparison.CurrentCultureIgnoreCase)) - throw new Exception("Directory '" + directory + "' did not share a common root with the root '" + root + "'"); + public AccessionDirectoryMessage(string root, DirectoryInfo directory) + { + if (!directory.FullName.StartsWith(root, StringComparison.CurrentCultureIgnoreCase)) + throw new Exception("Directory '" + directory + "' did not share a common root with the root '" + root + "'"); - DirectoryPath = directory.FullName[root.Length..].TrimStart(Path.DirectorySeparatorChar); - } + DirectoryPath = directory.FullName[root.Length..].TrimStart(Path.DirectorySeparatorChar); + } - public string GetAbsolutePath(string rootPath) => Path.Combine(rootPath, DirectoryPath); + public string GetAbsolutePath(string rootPath) => Path.Combine(rootPath, DirectoryPath); - public override string ToString() => $"AccessionDirectoryMessage[DirectoryPath={DirectoryPath}]"; - } + public override string ToString() => $"AccessionDirectoryMessage[DirectoryPath={DirectoryPath}]"; } diff --git a/src/SmiServices/Common/Messages/DicomFileMessage.cs b/src/SmiServices/Common/Messages/DicomFileMessage.cs index a500b6d5b..44b90613e 100644 --- a/src/SmiServices/Common/Messages/DicomFileMessage.cs +++ b/src/SmiServices/Common/Messages/DicomFileMessage.cs @@ -5,107 +5,106 @@ using System.IO; using System.Text; -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +/// +/// +/// Object representing a dicom file message. +/// https://github.com/HicServices/SMIPlugin/wiki/SMI-RabbitMQ-messages-and-queues#dicomfilemessage +/// +public sealed class DicomFileMessage : MemberwiseEquatable, IFileReferenceMessage { - /// /// - /// Object representing a dicom file message. - /// https://github.com/HicServices/SMIPlugin/wiki/SMI-RabbitMQ-messages-and-queues#dicomfilemessage + /// File path relative to the root path. /// - public sealed class DicomFileMessage : MemberwiseEquatable, IFileReferenceMessage - { - /// - /// File path relative to the root path. - /// - [JsonProperty(Required = Required.Always)] - public string DicomFilePath { get; set; } = null!; - - public long DicomFileSize { get; set; } = -1; + [JsonProperty(Required = Required.Always)] + public string DicomFilePath { get; set; } = null!; - /// - /// Dicom tag (0020,000D). - /// - [JsonProperty(Required = Required.Always)] - public string StudyInstanceUID { get; set; } = null!; + public long DicomFileSize { get; set; } = -1; - /// - /// Dicom tag (0020,000E). - /// - [JsonProperty(Required = Required.Always)] - public string SeriesInstanceUID { get; set; } = null!; + /// + /// Dicom tag (0020,000D). + /// + [JsonProperty(Required = Required.Always)] + public string StudyInstanceUID { get; set; } = null!; - /// - /// Dicom tag (0008,0018) - /// - [JsonProperty(Required = Required.Always)] - public string SOPInstanceUID { get; set; } = null!; + /// + /// Dicom tag (0020,000E). + /// + [JsonProperty(Required = Required.Always)] + public string SeriesInstanceUID { get; set; } = null!; - /// - /// Key-value pairs of Dicom tags and their values. - /// - [JsonProperty(Required = Required.Always)] - public string DicomDataset { get; set; } = null!; + /// + /// Dicom tag (0008,0018) + /// + [JsonProperty(Required = Required.Always)] + public string SOPInstanceUID { get; set; } = null!; + /// + /// Key-value pairs of Dicom tags and their values. + /// + [JsonProperty(Required = Required.Always)] + public string DicomDataset { get; set; } = null!; - public DicomFileMessage() { } - public DicomFileMessage(string root, FileInfo file) - : this(root, file.FullName) { } + public DicomFileMessage() { } - public DicomFileMessage(string root, string file) - { - if (!file.StartsWith(root, StringComparison.CurrentCultureIgnoreCase)) - throw new Exception("File '" + file + "' did not share a common root with the root '" + root + "'"); + public DicomFileMessage(string root, FileInfo file) + : this(root, file.FullName) { } - DicomFilePath = file[root.Length..].TrimStart(Path.DirectorySeparatorChar); - } + public DicomFileMessage(string root, string file) + { + if (!file.StartsWith(root, StringComparison.CurrentCultureIgnoreCase)) + throw new Exception("File '" + file + "' did not share a common root with the root '" + root + "'"); - public string GetAbsolutePath(string rootPath) - { - return Path.Combine(rootPath, DicomFilePath); - } + DicomFilePath = file[root.Length..].TrimStart(Path.DirectorySeparatorChar); + } - public bool Validate(string fileSystemRoot) - { - var absolutePath = GetAbsolutePath(fileSystemRoot); + public string GetAbsolutePath(string rootPath) + { + return Path.Combine(rootPath, DicomFilePath); + } - if (string.IsNullOrWhiteSpace(absolutePath)) - return false; + public bool Validate(string fileSystemRoot) + { + var absolutePath = GetAbsolutePath(fileSystemRoot); - try - { - var dir = new FileInfo(absolutePath); + if (string.IsNullOrWhiteSpace(absolutePath)) + return false; - //There file referenced must exist - return dir.Exists; - } - catch (Exception) - { - return false; - } + try + { + var dir = new FileInfo(absolutePath); + //There file referenced must exist + return dir.Exists; } - - public bool VerifyPopulated() + catch (Exception) { - return !string.IsNullOrWhiteSpace(DicomFilePath) && - !string.IsNullOrWhiteSpace(StudyInstanceUID) && - !string.IsNullOrWhiteSpace(SeriesInstanceUID) && - !string.IsNullOrWhiteSpace(SOPInstanceUID) && - !string.IsNullOrWhiteSpace(DicomDataset); + return false; } - public override string ToString() - { - var sb = new StringBuilder(); + } - sb.AppendLine("DicomFilePath: " + DicomFilePath); - sb.AppendLine("StudyInstanceUID: " + StudyInstanceUID); - sb.AppendLine("SeriesInstanceUID: " + SeriesInstanceUID); - sb.AppendLine("SOPInstanceUID: " + SOPInstanceUID); - sb.AppendLine("=== DicomDataset ===\n" + DicomDataset + "\n===================="); + public bool VerifyPopulated() + { + return !string.IsNullOrWhiteSpace(DicomFilePath) && + !string.IsNullOrWhiteSpace(StudyInstanceUID) && + !string.IsNullOrWhiteSpace(SeriesInstanceUID) && + !string.IsNullOrWhiteSpace(SOPInstanceUID) && + !string.IsNullOrWhiteSpace(DicomDataset); + } - return sb.ToString(); - } + public override string ToString() + { + var sb = new StringBuilder(); + + sb.AppendLine("DicomFilePath: " + DicomFilePath); + sb.AppendLine("StudyInstanceUID: " + StudyInstanceUID); + sb.AppendLine("SeriesInstanceUID: " + SeriesInstanceUID); + sb.AppendLine("SOPInstanceUID: " + SOPInstanceUID); + sb.AppendLine("=== DicomDataset ===\n" + DicomDataset + "\n===================="); + + return sb.ToString(); } } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractFileCollectionInfoMessage.cs b/src/SmiServices/Common/Messages/Extraction/ExtractFileCollectionInfoMessage.cs index 35b2b4b3b..f2e723457 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractFileCollectionInfoMessage.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractFileCollectionInfoMessage.cs @@ -3,48 +3,47 @@ using System; using System.Collections.Generic; -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +/// +/// Describes all the sent for a single key Tag (e.g. SeriesInstanceUID) value provided by +/// (i.e. a single entry in ). +/// +public class ExtractFileCollectionInfoMessage : ExtractMessage { /// - /// Describes all the sent for a single key Tag (e.g. SeriesInstanceUID) value provided by - /// (i.e. a single entry in ). + /// Contains the value of the tag which is being extracted + /// + [JsonProperty(Required = Required.Always)] + public string KeyValue { get; set; } = null!; + + /// + /// Collection of all the messages sent out as the result of an (headers only) along with the file path extracted /// - public class ExtractFileCollectionInfoMessage : ExtractMessage + [JsonProperty(Required = Required.Always)] + public JsonCompatibleDictionary ExtractFileMessagesDispatched { get; set; } = null!; + + /// + /// All the reasons for message rejection and count of occurrences + /// + [JsonProperty(Required = Required.Default)] + public Dictionary RejectionReasons { get; set; } = new Dictionary(StringComparer.CurrentCultureIgnoreCase); + + + [JsonConstructor] + public ExtractFileCollectionInfoMessage() + { + ExtractFileMessagesDispatched = []; + } + + public ExtractFileCollectionInfoMessage(ExtractionRequestMessage request) + : base(request) + { + ExtractFileMessagesDispatched = []; + } + + public override string ToString() { - /// - /// Contains the value of the tag which is being extracted - /// - [JsonProperty(Required = Required.Always)] - public string KeyValue { get; set; } = null!; - - /// - /// Collection of all the messages sent out as the result of an (headers only) along with the file path extracted - /// - [JsonProperty(Required = Required.Always)] - public JsonCompatibleDictionary ExtractFileMessagesDispatched { get; set; } = null!; - - /// - /// All the reasons for message rejection and count of occurrences - /// - [JsonProperty(Required = Required.Default)] - public Dictionary RejectionReasons { get; set; } = new Dictionary(StringComparer.CurrentCultureIgnoreCase); - - - [JsonConstructor] - public ExtractFileCollectionInfoMessage() - { - ExtractFileMessagesDispatched = []; - } - - public ExtractFileCollectionInfoMessage(ExtractionRequestMessage request) - : base(request) - { - ExtractFileMessagesDispatched = []; - } - - public override string ToString() - { - return base.ToString() + $",KeyValue={KeyValue},ExtractFileMessagesDispatched={ExtractFileMessagesDispatched.Count},RejectionReasons={RejectionReasons.Count},"; - } + return base.ToString() + $",KeyValue={KeyValue},ExtractFileMessagesDispatched={ExtractFileMessagesDispatched.Count},RejectionReasons={RejectionReasons.Count},"; } } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractFileMessage.cs b/src/SmiServices/Common/Messages/Extraction/ExtractFileMessage.cs index 83f9e0b68..53ae7c9f0 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractFileMessage.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractFileMessage.cs @@ -1,31 +1,30 @@ using Newtonsoft.Json; -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +/// +/// Describes a single image which should be extracted and anonymised using the provided anonymisation script +/// +public class ExtractFileMessage : ExtractMessage, IFileReferenceMessage { /// - /// Describes a single image which should be extracted and anonymised using the provided anonymisation script + /// The file path where the original dicom file can be found, relative to the FileSystemRoot /// - public class ExtractFileMessage : ExtractMessage, IFileReferenceMessage - { - /// - /// The file path where the original dicom file can be found, relative to the FileSystemRoot - /// - [JsonProperty(Required = Required.Always)] - public string DicomFilePath { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string DicomFilePath { get; set; } = null!; - /// - /// The subdirectory and dicom filename within the ExtractionDirectory to extract the identifiable image (specified by ) into. For example - /// "Series132\1234-an.dcm" - /// - [JsonProperty(Required = Required.Always)] - public string OutputPath { get; set; } = null!; + /// + /// The subdirectory and dicom filename within the ExtractionDirectory to extract the identifiable image (specified by ) into. For example + /// "Series132\1234-an.dcm" + /// + [JsonProperty(Required = Required.Always)] + public string OutputPath { get; set; } = null!; - [JsonConstructor] - public ExtractFileMessage() { } + [JsonConstructor] + public ExtractFileMessage() { } - public ExtractFileMessage(ExtractionRequestMessage request) - : base(request) { } - } + public ExtractFileMessage(ExtractionRequestMessage request) + : base(request) { } } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractMessage.cs b/src/SmiServices/Common/Messages/Extraction/ExtractMessage.cs index be037bd53..c7de2ee62 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractMessage.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractMessage.cs @@ -2,86 +2,85 @@ using Newtonsoft.Json; using System; -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +/// +/// Base class for all messages relating to the extract process +/// +public abstract class ExtractMessage : MemberwiseEquatable, IExtractMessage { - /// - /// Base class for all messages relating to the extract process - /// - public abstract class ExtractMessage : MemberwiseEquatable, IExtractMessage - { - [JsonProperty(Required = Required.Always)] - public Guid ExtractionJobIdentifier { get; set; } + [JsonProperty(Required = Required.Always)] + public Guid ExtractionJobIdentifier { get; set; } - [JsonProperty(Required = Required.Always)] - public string ProjectNumber { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string ProjectNumber { get; set; } = null!; - [JsonProperty(Required = Required.Always)] - public string ExtractionDirectory { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string ExtractionDirectory { get; set; } = null!; - [JsonProperty(Required = Required.Always)] - public string Modality { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string Modality { get; set; } = null!; - [JsonProperty(Required = Required.Always)] - public DateTime JobSubmittedAt { get; set; } + [JsonProperty(Required = Required.Always)] + public DateTime JobSubmittedAt { get; set; } - [JsonProperty(Required = Required.Always)] - public bool IsIdentifiableExtraction { get; set; } + [JsonProperty(Required = Required.Always)] + public bool IsIdentifiableExtraction { get; set; } - [JsonProperty(Required = Required.Always)] - public bool IsNoFilterExtraction { get; set; } + [JsonProperty(Required = Required.Always)] + public bool IsNoFilterExtraction { get; set; } - [JsonProperty(Required = Required.Always)] - public bool IsPooledExtraction { get; set; } + [JsonProperty(Required = Required.Always)] + public bool IsPooledExtraction { get; set; } - [JsonConstructor] - protected ExtractMessage() { } + [JsonConstructor] + protected ExtractMessage() { } - protected ExtractMessage( - Guid extractionJobIdentifier, - string projectNumber, - string extractionDirectory, - string modality, - DateTime jobSubmittedAt, - bool isIdentifiableExtraction, - bool isNoFilterExtraction, - bool isPooledExtraction - ) - : this() - { - ExtractionJobIdentifier = extractionJobIdentifier; - ProjectNumber = projectNumber; - ExtractionDirectory = extractionDirectory; - Modality = modality; - JobSubmittedAt = jobSubmittedAt; - IsIdentifiableExtraction = isIdentifiableExtraction; - IsNoFilterExtraction = isNoFilterExtraction; - IsPooledExtraction = isPooledExtraction; - } + protected ExtractMessage( + Guid extractionJobIdentifier, + string projectNumber, + string extractionDirectory, + string modality, + DateTime jobSubmittedAt, + bool isIdentifiableExtraction, + bool isNoFilterExtraction, + bool isPooledExtraction + ) + : this() + { + ExtractionJobIdentifier = extractionJobIdentifier; + ProjectNumber = projectNumber; + ExtractionDirectory = extractionDirectory; + Modality = modality; + JobSubmittedAt = jobSubmittedAt; + IsIdentifiableExtraction = isIdentifiableExtraction; + IsNoFilterExtraction = isNoFilterExtraction; + IsPooledExtraction = isPooledExtraction; + } - protected ExtractMessage(IExtractMessage request) - : this( - request.ExtractionJobIdentifier, - request.ProjectNumber, - request.ExtractionDirectory, - request.Modality, - request.JobSubmittedAt, - request.IsIdentifiableExtraction, - request.IsNoFilterExtraction, - request.IsPooledExtraction - ) - { } + protected ExtractMessage(IExtractMessage request) + : this( + request.ExtractionJobIdentifier, + request.ProjectNumber, + request.ExtractionDirectory, + request.Modality, + request.JobSubmittedAt, + request.IsIdentifiableExtraction, + request.IsNoFilterExtraction, + request.IsPooledExtraction + ) + { } - public override string ToString() => - $"ExtractionJobIdentifier={ExtractionJobIdentifier}, " + - $"ProjectNumber={ProjectNumber}, " + - $"ExtractionDirectory={ExtractionDirectory}, " + - $"Modality={Modality}, " + - $"JobSubmittedAt={JobSubmittedAt:s}, " + - $"IsIdentifiableExtraction={IsIdentifiableExtraction}, " + - $"IsNoFilterExtraction={IsNoFilterExtraction}, " + - $"IsPooledExtraction={IsPooledExtraction}, " + - ""; - } + public override string ToString() => + $"ExtractionJobIdentifier={ExtractionJobIdentifier}, " + + $"ProjectNumber={ProjectNumber}, " + + $"ExtractionDirectory={ExtractionDirectory}, " + + $"Modality={Modality}, " + + $"JobSubmittedAt={JobSubmittedAt:s}, " + + $"IsIdentifiableExtraction={IsIdentifiableExtraction}, " + + $"IsNoFilterExtraction={IsNoFilterExtraction}, " + + $"IsPooledExtraction={IsPooledExtraction}, " + + ""; } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractedFileStatus.cs b/src/SmiServices/Common/Messages/Extraction/ExtractedFileStatus.cs index 2f6672949..57a001808 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractedFileStatus.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractedFileStatus.cs @@ -1,30 +1,29 @@ -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +public enum ExtractedFileStatus { - public enum ExtractedFileStatus - { - /// - /// Unused placeholder value - /// - None = 0, + /// + /// Unused placeholder value + /// + None = 0, - /// - /// The file has been anonymised successfully - /// - Anonymised, + /// + /// The file has been anonymised successfully + /// + Anonymised, - /// - /// The file could not be anonymised and will not be retired - /// - ErrorWontRetry, + /// + /// The file could not be anonymised and will not be retired + /// + ErrorWontRetry, - /// - /// The source file could not be found under the given filesystem root - /// - FileMissing, + /// + /// The source file could not be found under the given filesystem root + /// + FileMissing, - /// - /// The source file was successfully copied to the destination - /// - Copied, - } + /// + /// The source file was successfully copied to the destination + /// + Copied, } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractedFileStatusMessage.cs b/src/SmiServices/Common/Messages/Extraction/ExtractedFileStatusMessage.cs index 9f8b214a4..9e3e8e159 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractedFileStatusMessage.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractedFileStatusMessage.cs @@ -1,53 +1,52 @@ using Newtonsoft.Json; -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +/// +/// Status message sent by services which extract files (CTP, FileCopier) +/// +public class ExtractedFileStatusMessage : ExtractMessage, IFileReferenceMessage { /// - /// Status message sent by services which extract files (CTP, FileCopier) + /// Original file path + /// + [JsonProperty(Required = Required.Always)] + public string DicomFilePath { get; set; } = null!; + + /// + /// The for this file + /// + [JsonProperty(Required = Required.Always)] + public ExtractedFileStatus Status { get; set; } + + /// + /// Output file path, relative to the extraction directory. Only required if an output file has been produced /// - public class ExtractedFileStatusMessage : ExtractMessage, IFileReferenceMessage + [JsonProperty(Required = Required.AllowNull)] + public string? OutputFilePath { get; set; } + + /// + /// Message required if Status is not 0 + /// + [JsonProperty(Required = Required.AllowNull)] + public string? StatusMessage { get; set; } + + + [JsonConstructor] + public ExtractedFileStatusMessage() { } + + public ExtractedFileStatusMessage(ExtractFileMessage request) + : base(request) { - /// - /// Original file path - /// - [JsonProperty(Required = Required.Always)] - public string DicomFilePath { get; set; } = null!; - - /// - /// The for this file - /// - [JsonProperty(Required = Required.Always)] - public ExtractedFileStatus Status { get; set; } - - /// - /// Output file path, relative to the extraction directory. Only required if an output file has been produced - /// - [JsonProperty(Required = Required.AllowNull)] - public string? OutputFilePath { get; set; } - - /// - /// Message required if Status is not 0 - /// - [JsonProperty(Required = Required.AllowNull)] - public string? StatusMessage { get; set; } - - - [JsonConstructor] - public ExtractedFileStatusMessage() { } - - public ExtractedFileStatusMessage(ExtractFileMessage request) - : base(request) - { - DicomFilePath = request.DicomFilePath; - OutputFilePath = request.OutputPath; - } - - public override string ToString() => - $"{base.ToString()}," + - $"DicomFilePath={DicomFilePath}," + - $"ExtractedFileStatus={Status}," + - $"OutputFilePath={OutputFilePath}," + - $"StatusMessage={StatusMessage}," + - ""; + DicomFilePath = request.DicomFilePath; + OutputFilePath = request.OutputPath; } + + public override string ToString() => + $"{base.ToString()}," + + $"DicomFilePath={DicomFilePath}," + + $"ExtractedFileStatus={Status}," + + $"OutputFilePath={OutputFilePath}," + + $"StatusMessage={StatusMessage}," + + ""; } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractedFileVerificationMessage.cs b/src/SmiServices/Common/Messages/Extraction/ExtractedFileVerificationMessage.cs index efbd40b5c..a26e4f7c8 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractedFileVerificationMessage.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractedFileVerificationMessage.cs @@ -1,36 +1,35 @@ using Newtonsoft.Json; using System; -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +public class ExtractedFileVerificationMessage : ExtractMessage, IFileReferenceMessage { - public class ExtractedFileVerificationMessage : ExtractMessage, IFileReferenceMessage - { - [JsonProperty(Required = Required.Always)] - public VerifiedFileStatus Status { get; set; } + [JsonProperty(Required = Required.Always)] + public VerifiedFileStatus Status { get; set; } - [JsonProperty(Required = Required.Always)] - public string Report { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string Report { get; set; } = null!; - /// - /// The originally sourced origin (identifiable file path). - /// - [JsonProperty(Required = Required.Always)] - public string DicomFilePath { get; set; } = null!; + /// + /// The originally sourced origin (identifiable file path). + /// + [JsonProperty(Required = Required.Always)] + public string DicomFilePath { get; set; } = null!; - /// - /// Output file path, relative to the extraction directory. Only required if an output file has been produced - /// - [JsonProperty(Required = Required.Always)] - public string OutputFilePath { get; set; } = null!; + /// + /// Output file path, relative to the extraction directory. Only required if an output file has been produced + /// + [JsonProperty(Required = Required.Always)] + public string OutputFilePath { get; set; } = null!; - [JsonConstructor] - public ExtractedFileVerificationMessage() { } + [JsonConstructor] + public ExtractedFileVerificationMessage() { } - public ExtractedFileVerificationMessage(ExtractedFileStatusMessage request) - : base(request) - { - DicomFilePath = request.DicomFilePath; - OutputFilePath = request.OutputFilePath ?? throw new ArgumentNullException(nameof(request)); - } + public ExtractedFileVerificationMessage(ExtractedFileStatusMessage request) + : base(request) + { + DicomFilePath = request.DicomFilePath; + OutputFilePath = request.OutputFilePath ?? throw new ArgumentNullException(nameof(request)); } } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractionKey.cs b/src/SmiServices/Common/Messages/Extraction/ExtractionKey.cs index c19b83fb4..8d9a9688d 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractionKey.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractionKey.cs @@ -1,21 +1,20 @@ -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +// ReSharper disable InconsistentNaming +public enum ExtractionKey { - // ReSharper disable InconsistentNaming - public enum ExtractionKey - { - /// - /// Dicom Tag (0008,0018) - /// - SOPInstanceUID, + /// + /// Dicom Tag (0008,0018) + /// + SOPInstanceUID, - /// - /// Dicom Tag (0020,000E) - /// - SeriesInstanceUID, + /// + /// Dicom Tag (0020,000E) + /// + SeriesInstanceUID, - /// - /// Dicom Tag (0020,000D) - /// - StudyInstanceUID, - } + /// + /// Dicom Tag (0020,000D) + /// + StudyInstanceUID, } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractionRequestInfoMessage.cs b/src/SmiServices/Common/Messages/Extraction/ExtractionRequestInfoMessage.cs index 4a446096c..b2e02dd3e 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractionRequestInfoMessage.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractionRequestInfoMessage.cs @@ -1,25 +1,24 @@ using Newtonsoft.Json; -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +public class ExtractionRequestInfoMessage : ExtractMessage { - public class ExtractionRequestInfoMessage : ExtractMessage - { - [JsonProperty(Required = Required.Always)] - public string KeyTag { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string KeyTag { get; set; } = null!; - [JsonProperty(Required = Required.Always)] - public int KeyValueCount { get; set; } + [JsonProperty(Required = Required.Always)] + public int KeyValueCount { get; set; } - [JsonProperty(Required = Required.Always)] - public string UserName { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string UserName { get; set; } = null!; - [JsonConstructor] - public ExtractionRequestInfoMessage() { } + [JsonConstructor] + public ExtractionRequestInfoMessage() { } - public override string ToString() - { - return base.ToString() + $",KeyTag={KeyTag},KeyValueCount={KeyValueCount},UserName={UserName}"; - } + public override string ToString() + { + return base.ToString() + $",KeyTag={KeyTag},KeyValueCount={KeyValueCount},UserName={UserName}"; } } diff --git a/src/SmiServices/Common/Messages/Extraction/ExtractionRequestMessage.cs b/src/SmiServices/Common/Messages/Extraction/ExtractionRequestMessage.cs index 21074fa3f..9f9d881db 100644 --- a/src/SmiServices/Common/Messages/Extraction/ExtractionRequestMessage.cs +++ b/src/SmiServices/Common/Messages/Extraction/ExtractionRequestMessage.cs @@ -1,45 +1,44 @@ using Newtonsoft.Json; using System.Collections.Generic; -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +/// +/// Describes a request to extract all images identified by a DicomTag e.g. SeriesInstanceUID with the specified project specific patient identifiers (PatientID) +/// +public class ExtractionRequestMessage : ExtractMessage { /// - /// Describes a request to extract all images identified by a DicomTag e.g. SeriesInstanceUID with the specified project specific patient identifiers (PatientID) + /// Contains the name of the identifier you want to extract based on (this should be a DicomTag e.g. 'SeriesInstanceUID') /// - public class ExtractionRequestMessage : ExtractMessage - { - /// - /// Contains the name of the identifier you want to extract based on (this should be a DicomTag e.g. 'SeriesInstanceUID') - /// - [JsonProperty(Required = Required.Always)] - public string KeyTag { get; set; } = null!; - - /// - /// The unique set of identifiers of Type which should be extracted - /// - [JsonProperty(Required = Required.Always)] - public List ExtractionIdentifiers { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string KeyTag { get; set; } = null!; - [JsonConstructor] - public ExtractionRequestMessage() - { - ExtractionIdentifiers = []; - } + /// + /// The unique set of identifiers of Type which should be extracted + /// + [JsonProperty(Required = Required.Always)] + public List ExtractionIdentifiers { get; set; } = null!; - /// - /// (Shallow) copy constructor - /// - /// - public ExtractionRequestMessage(ExtractionRequestMessage other) - : base(other) - { - KeyTag = other.KeyTag; - ExtractionIdentifiers = other.ExtractionIdentifiers; - } + [JsonConstructor] + public ExtractionRequestMessage() + { + ExtractionIdentifiers = []; + } - public override string ToString() - => base.ToString() + ", " + - $"KeyTag={KeyTag}, " + - $"nIdentifiers={ExtractionIdentifiers.Count}"; + /// + /// (Shallow) copy constructor + /// + /// + public ExtractionRequestMessage(ExtractionRequestMessage other) + : base(other) + { + KeyTag = other.KeyTag; + ExtractionIdentifiers = other.ExtractionIdentifiers; } + + public override string ToString() + => base.ToString() + ", " + + $"KeyTag={KeyTag}, " + + $"nIdentifiers={ExtractionIdentifiers.Count}"; } diff --git a/src/SmiServices/Common/Messages/Extraction/IExtractMessage.cs b/src/SmiServices/Common/Messages/Extraction/IExtractMessage.cs index d646faf1c..028e21a2c 100644 --- a/src/SmiServices/Common/Messages/Extraction/IExtractMessage.cs +++ b/src/SmiServices/Common/Messages/Extraction/IExtractMessage.cs @@ -1,51 +1,50 @@ using Newtonsoft.Json; using System; -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +/// +/// Interface for all messages relating to the extract process +/// +public interface IExtractMessage : IMessage { /// - /// Interface for all messages relating to the extract process - /// - public interface IExtractMessage : IMessage - { - /// - /// Unique identifier to link messages from different extract requests - /// - Guid ExtractionJobIdentifier { get; } - - /// - /// Project number used by eDRIS for reference, and for the base extraction output relative to the ExtractRoot - /// - string ProjectNumber { get; } - - /// - /// Directory relative to the ExtractRoot to place anonymised files into - /// - string ExtractionDirectory { get; } - - /// - /// The modality to extract - /// - public string Modality { get; } - - /// - /// DateTime the job was submitted at - /// - DateTime JobSubmittedAt { get; set; } - - /// - /// True if this is an identifiable extraction (i.e. files should not be anonymised) - /// - bool IsIdentifiableExtraction { get; } - - /// - /// True if this is a "no filters" (i.e. no file rejection filters should be applied) - /// - bool IsNoFilterExtraction { get; } - - /// - /// True if this extraction uses the global pool of DICOM files - /// - bool IsPooledExtraction { get; } - } + /// Unique identifier to link messages from different extract requests + /// + Guid ExtractionJobIdentifier { get; } + + /// + /// Project number used by eDRIS for reference, and for the base extraction output relative to the ExtractRoot + /// + string ProjectNumber { get; } + + /// + /// Directory relative to the ExtractRoot to place anonymised files into + /// + string ExtractionDirectory { get; } + + /// + /// The modality to extract + /// + public string Modality { get; } + + /// + /// DateTime the job was submitted at + /// + DateTime JobSubmittedAt { get; set; } + + /// + /// True if this is an identifiable extraction (i.e. files should not be anonymised) + /// + bool IsIdentifiableExtraction { get; } + + /// + /// True if this is a "no filters" (i.e. no file rejection filters should be applied) + /// + bool IsNoFilterExtraction { get; } + + /// + /// True if this extraction uses the global pool of DICOM files + /// + bool IsPooledExtraction { get; } } diff --git a/src/SmiServices/Common/Messages/Extraction/VerifiedFileStatus.cs b/src/SmiServices/Common/Messages/Extraction/VerifiedFileStatus.cs index 81118ac71..af9a70d3c 100644 --- a/src/SmiServices/Common/Messages/Extraction/VerifiedFileStatus.cs +++ b/src/SmiServices/Common/Messages/Extraction/VerifiedFileStatus.cs @@ -1,30 +1,29 @@ -namespace SmiServices.Common.Messages.Extraction +namespace SmiServices.Common.Messages.Extraction; + +public enum VerifiedFileStatus { - public enum VerifiedFileStatus - { - /// - /// Unused placeholder value - /// - None = 0, + /// + /// Unused placeholder value + /// + None = 0, - /// - /// The file has not (yet) been verified - /// - NotVerified, + /// + /// The file has not (yet) been verified + /// + NotVerified, - /// - /// The file was scanned and determined to not be identifiable - /// - NotIdentifiable, + /// + /// The file was scanned and determined to not be identifiable + /// + NotIdentifiable, - /// - /// The file was scanned and determined to be identifiable - /// - IsIdentifiable, + /// + /// The file was scanned and determined to be identifiable + /// + IsIdentifiable, - /// - /// There was an error processing the file. Identifiability could not be determined - /// - ErrorWontRetry, - } + /// + /// There was an error processing the file. Identifiability could not be determined + /// + ErrorWontRetry, } diff --git a/src/SmiServices/Common/Messages/FatalErrorMessage.cs b/src/SmiServices/Common/Messages/FatalErrorMessage.cs index 75cd102a9..691f1737e 100644 --- a/src/SmiServices/Common/Messages/FatalErrorMessage.cs +++ b/src/SmiServices/Common/Messages/FatalErrorMessage.cs @@ -3,21 +3,20 @@ using Newtonsoft.Json; using System; -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +public class FatalErrorMessage : MemberwiseEquatable, IMessage { - public class FatalErrorMessage : MemberwiseEquatable, IMessage - { - [JsonProperty(Required = Required.Always)] - public string Message { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string Message { get; set; } = null!; - // TODO(rkm 2023-08-04) The nullability is confusing here. We should audit and remove all DisallowNull usages - [JsonProperty(Required = Required.DisallowNull)] - public Exception? Exception { get; set; } + // TODO(rkm 2023-08-04) The nullability is confusing here. We should audit and remove all DisallowNull usages + [JsonProperty(Required = Required.DisallowNull)] + public Exception? Exception { get; set; } - public FatalErrorMessage(string message, Exception? exception) - { - Message = message; - Exception = exception; - } + public FatalErrorMessage(string message, Exception? exception) + { + Message = message; + Exception = exception; } } diff --git a/src/SmiServices/Common/Messages/IFileReferenceMessage.cs b/src/SmiServices/Common/Messages/IFileReferenceMessage.cs index b0e1a3765..ae603d1c7 100644 --- a/src/SmiServices/Common/Messages/IFileReferenceMessage.cs +++ b/src/SmiServices/Common/Messages/IFileReferenceMessage.cs @@ -1,13 +1,12 @@ -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +/// +/// Describes an IMessage that references a dicom file in physical storage +/// +public interface IFileReferenceMessage : IMessage { /// - /// Describes an IMessage that references a dicom file in physical storage + /// File path relative to the FileSystemRoot /// - public interface IFileReferenceMessage : IMessage - { - /// - /// File path relative to the FileSystemRoot - /// - string DicomFilePath { get; set; } - } + string DicomFilePath { get; set; } } diff --git a/src/SmiServices/Common/Messages/IMessage.cs b/src/SmiServices/Common/Messages/IMessage.cs index d3b89720e..c4ca01499 100644 --- a/src/SmiServices/Common/Messages/IMessage.cs +++ b/src/SmiServices/Common/Messages/IMessage.cs @@ -1,7 +1,6 @@ -namespace SmiServices.Common.Messages -{ - /// - /// Interface for any SMI message. Used to allow a constraint on a generic argument. - /// - public interface IMessage { } -} +namespace SmiServices.Common.Messages; + +/// +/// Interface for any SMI message. Used to allow a constraint on a generic argument. +/// +public interface IMessage { } diff --git a/src/SmiServices/Common/Messages/IMessageHeader.cs b/src/SmiServices/Common/Messages/IMessageHeader.cs index 5c2b0df85..75a7a2947 100644 --- a/src/SmiServices/Common/Messages/IMessageHeader.cs +++ b/src/SmiServices/Common/Messages/IMessageHeader.cs @@ -2,26 +2,25 @@ using System; using System.Collections.Generic; -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +public interface IMessageHeader { - public interface IMessageHeader - { - Guid MessageGuid { get; init; } + Guid MessageGuid { get; init; } - int ProducerProcessID { get; init; } + int ProducerProcessID { get; init; } - string ProducerExecutableName { get; init; } + string ProducerExecutableName { get; init; } - long OriginalPublishTimestamp { get; init; } + long OriginalPublishTimestamp { get; init; } - /// - /// The full message chain from origin to here - /// - Guid[] Parents { get; } + /// + /// The full message chain from origin to here + /// + Guid[] Parents { get; } - void Populate(IDictionary props); - void Log(ILogger logger, LogLevel level, string message, Exception? ex = null); + void Populate(IDictionary props); + void Log(ILogger logger, LogLevel level, string message, Exception? ex = null); - bool IsDescendantOf(IMessageHeader other); - } + bool IsDescendantOf(IMessageHeader other); } diff --git a/src/SmiServices/Common/Messages/MessageHeader.cs b/src/SmiServices/Common/Messages/MessageHeader.cs index b5c74b52c..3a7975490 100644 --- a/src/SmiServices/Common/Messages/MessageHeader.cs +++ b/src/SmiServices/Common/Messages/MessageHeader.cs @@ -7,140 +7,139 @@ using System.Linq; using System.Text; -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +public class MessageHeader : MemberwiseEquatable, IMessageHeader { - public class MessageHeader : MemberwiseEquatable, IMessageHeader - { - public Guid MessageGuid { get; init; } + public Guid MessageGuid { get; init; } - public int ProducerProcessID { get; init; } + public int ProducerProcessID { get; init; } - public string ProducerExecutableName { get; init; } + public string ProducerExecutableName { get; init; } - public long OriginalPublishTimestamp { get; init; } + public long OriginalPublishTimestamp { get; init; } - public Guid[] Parents { get; init; } - public const string Splitter = "->"; + public Guid[] Parents { get; init; } + public const string Splitter = "->"; - private static readonly int _producerProcessID; + private static readonly int _producerProcessID; - private static string? _currentProgramName; - public static string CurrentProgramName + private static string? _currentProgramName; + public static string CurrentProgramName + { + get { - get - { - if (string.IsNullOrWhiteSpace(_currentProgramName)) - throw new Exception("Value must be set before use"); - return _currentProgramName; - } - set => _currentProgramName = value; + if (string.IsNullOrWhiteSpace(_currentProgramName)) + throw new Exception("Value must be set before use"); + return _currentProgramName; } + set => _currentProgramName = value; + } - static MessageHeader() - { - _producerProcessID = Environment.ProcessId; - } + static MessageHeader() + { + _producerProcessID = Environment.ProcessId; + } - [JsonConstructor] - public MessageHeader() - : this(parent: default) { } + [JsonConstructor] + public MessageHeader() + : this(parent: default) { } - /// - /// Declares that your process is about to send a message. Optionally as a result of processing another message (). - /// - /// The triggering message that caused you to want to send this message - public MessageHeader(IMessageHeader? parent = null) - { - ProducerProcessID = _producerProcessID; - ProducerExecutableName = CurrentProgramName; - MessageGuid = Guid.NewGuid(); - - if (parent == null) - { - Parents = []; - OriginalPublishTimestamp = UnixTimeNow(); - } - else - { - var p = new List(parent.Parents) { parent.MessageGuid }; - Parents = [.. p]; - OriginalPublishTimestamp = parent.OriginalPublishTimestamp; - } - } + /// + /// Declares that your process is about to send a message. Optionally as a result of processing another message (). + /// + /// The triggering message that caused you to want to send this message + public MessageHeader(IMessageHeader? parent = null) + { + ProducerProcessID = _producerProcessID; + ProducerExecutableName = CurrentProgramName; + MessageGuid = Guid.NewGuid(); - /// - /// Creates a out of a (byte-encoded) header field set from RabbitMQ - /// - /// - /// - public static MessageHeader FromDict(IDictionary encodedHeaders, Encoding enc) - => new() - { - MessageGuid = GetGuidArrayFromEncodedHeader(encodedHeaders["MessageGuid"], enc).Single(), - ProducerProcessID = (int)encodedHeaders["ProducerProcessID"], - ProducerExecutableName = enc.GetString((byte[])encodedHeaders["ProducerExecutableName"]), - Parents = GetGuidArrayFromEncodedHeader(encodedHeaders["Parents"], enc), - OriginalPublishTimestamp = Convert.ToInt64(encodedHeaders["OriginalPublishTimestamp"]), - }; - - /// - /// Populates RabbitMQ header properties with the current MessageHeader - /// - /// - public void Populate(IDictionary headers) + if (parent == null) { - headers.Add("MessageGuid", MessageGuid.ToString()); - headers.Add("ProducerProcessID", ProducerProcessID); - headers.Add("ProducerExecutableName", ProducerExecutableName); - headers.Add("OriginalPublishTimestamp", OriginalPublishTimestamp); - headers.Add("Parents", string.Join(Splitter, Parents)); + Parents = []; + OriginalPublishTimestamp = UnixTimeNow(); } - - public bool IsDescendantOf(IMessageHeader other) + else { - return Parents != null && Parents.Contains(other.MessageGuid); + var p = new List(parent.Parents) { parent.MessageGuid }; + Parents = [.. p]; + OriginalPublishTimestamp = parent.OriginalPublishTimestamp; } + } - public void Log(ILogger logger, LogLevel level, string message, Exception? ex = null) + /// + /// Creates a out of a (byte-encoded) header field set from RabbitMQ + /// + /// + /// + public static MessageHeader FromDict(IDictionary encodedHeaders, Encoding enc) + => new() { - //TODO This is massively over-logging - ProducerProcessID, ProducerExecutableName, OriginalPublishTimestamp are found in the logs anyway - var theEvent = new LogEventInfo(level, logger.Name, message); - theEvent.Properties["MessageGuid"] = MessageGuid.ToString(); - theEvent.Properties["ProducerProcessID"] = ProducerProcessID; - theEvent.Properties["ProducerExecutableName"] = ProducerExecutableName; - theEvent.Properties["OriginalPublishTimestamp"] = OriginalPublishTimestamp; - theEvent.Properties["Parents"] = string.Join(Splitter, Parents); - theEvent.Exception = ex; - - logger.Log(theEvent); - } + MessageGuid = GetGuidArrayFromEncodedHeader(encodedHeaders["MessageGuid"], enc).Single(), + ProducerProcessID = (int)encodedHeaders["ProducerProcessID"], + ProducerExecutableName = enc.GetString((byte[])encodedHeaders["ProducerExecutableName"]), + Parents = GetGuidArrayFromEncodedHeader(encodedHeaders["Parents"], enc), + OriginalPublishTimestamp = Convert.ToInt64(encodedHeaders["OriginalPublishTimestamp"]), + }; + + /// + /// Populates RabbitMQ header properties with the current MessageHeader + /// + /// + public void Populate(IDictionary headers) + { + headers.Add("MessageGuid", MessageGuid.ToString()); + headers.Add("ProducerProcessID", ProducerProcessID); + headers.Add("ProducerExecutableName", ProducerExecutableName); + headers.Add("OriginalPublishTimestamp", OriginalPublishTimestamp); + headers.Add("Parents", string.Join(Splitter, Parents)); + } - public override string ToString() - { - var sb = new StringBuilder(); - sb.Append("MessageGuid: " + MessageGuid); - sb.Append(", ProducerProcessID: " + ProducerProcessID); - sb.Append(", ProducerExecutableName: " + ProducerExecutableName); - sb.Append(", OriginalPublishTimestamp:" + OriginalPublishTimestamp); - sb.Append(", Parents: [" + string.Join(Splitter, Parents) + "]"); - return sb.ToString(); - } + public bool IsDescendantOf(IMessageHeader other) + { + return Parents != null && Parents.Contains(other.MessageGuid); + } - // TODO(rkm 2020-03-08) Can't we just use the DateTime.UnixEpoch value here? - public static long UnixTimeNow() => UnixTime(DateTime.UtcNow); - public static long UnixTime(DateTime dateTime) => (long)(dateTime - new DateTime(1970, 1, 1, 0, 0, 0)).TotalSeconds; - public static DateTime UnixTimeToDateTime(long unixTime) => new DateTime(1970, 1, 1, 0, 0, 0) + TimeSpan.FromSeconds(unixTime); + public void Log(ILogger logger, LogLevel level, string message, Exception? ex = null) + { + //TODO This is massively over-logging - ProducerProcessID, ProducerExecutableName, OriginalPublishTimestamp are found in the logs anyway + var theEvent = new LogEventInfo(level, logger.Name, message); + theEvent.Properties["MessageGuid"] = MessageGuid.ToString(); + theEvent.Properties["ProducerProcessID"] = ProducerProcessID; + theEvent.Properties["ProducerExecutableName"] = ProducerExecutableName; + theEvent.Properties["OriginalPublishTimestamp"] = OriginalPublishTimestamp; + theEvent.Properties["Parents"] = string.Join(Splitter, Parents); + theEvent.Exception = ex; + + logger.Log(theEvent); + } - public static Guid[] GetGuidArray(string str) - { - string[] strings = str.Split(new[] { Splitter }, StringSplitOptions.RemoveEmptyEntries); - return strings.Select(Guid.Parse).ToArray(); - } + public override string ToString() + { + var sb = new StringBuilder(); + sb.Append("MessageGuid: " + MessageGuid); + sb.Append(", ProducerProcessID: " + ProducerProcessID); + sb.Append(", ProducerExecutableName: " + ProducerExecutableName); + sb.Append(", OriginalPublishTimestamp:" + OriginalPublishTimestamp); + sb.Append(", Parents: [" + string.Join(Splitter, Parents) + "]"); + return sb.ToString(); + } - private static Guid[] GetGuidArrayFromEncodedHeader(object o, Encoding enc) - { - return GetGuidArray(enc.GetString((byte[])o)); - } + // TODO(rkm 2020-03-08) Can't we just use the DateTime.UnixEpoch value here? + public static long UnixTimeNow() => UnixTime(DateTime.UtcNow); + public static long UnixTime(DateTime dateTime) => (long)(dateTime - new DateTime(1970, 1, 1, 0, 0, 0)).TotalSeconds; + public static DateTime UnixTimeToDateTime(long unixTime) => new DateTime(1970, 1, 1, 0, 0, 0) + TimeSpan.FromSeconds(unixTime); + + public static Guid[] GetGuidArray(string str) + { + string[] strings = str.Split(new[] { Splitter }, StringSplitOptions.RemoveEmptyEntries); + return strings.Select(Guid.Parse).ToArray(); + } + + private static Guid[] GetGuidArrayFromEncodedHeader(object o, Encoding enc) + { + return GetGuidArray(enc.GetString((byte[])o)); } } diff --git a/src/SmiServices/Common/Messages/RabbitMqXDeathHeaders.cs b/src/SmiServices/Common/Messages/RabbitMqXDeathHeaders.cs index 6d892ba65..a471ae68f 100644 --- a/src/SmiServices/Common/Messages/RabbitMqXDeathHeaders.cs +++ b/src/SmiServices/Common/Messages/RabbitMqXDeathHeaders.cs @@ -7,173 +7,172 @@ using System.Linq; using System.Text; -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +public class RabbitMqXDeathHeaders : MemberwiseEquatable { - public class RabbitMqXDeathHeaders : MemberwiseEquatable - { - public List XDeaths { get; set; } + public List XDeaths { get; set; } - public string XFirstDeathExchange { get; set; } + public string XFirstDeathExchange { get; set; } - public string XFirstDeathQueue { get; set; } + public string XFirstDeathQueue { get; set; } - public string XFirstDeathReason { get; set; } + public string XFirstDeathReason { get; set; } - public const string XDeathKey = "x-death"; - public const string XFirstDeathExchangeKey = "x-first-death-exchange"; - public const string XFirstDeathQueueKey = "x-first-death-queue"; - public const string XFristDeathReasonKey = "x-first-death-reason"; + public const string XDeathKey = "x-death"; + public const string XFirstDeathExchangeKey = "x-first-death-exchange"; + public const string XFirstDeathQueueKey = "x-first-death-queue"; + public const string XFristDeathReasonKey = "x-first-death-reason"; - private static readonly List _requiredKeys; + private static readonly List _requiredKeys; - /// - /// Static constructor - /// - static RabbitMqXDeathHeaders() - { - _requiredKeys = - [ - XDeathKey, - XFirstDeathExchangeKey, - XFirstDeathQueueKey, - XFristDeathReasonKey - ]; - } + /// + /// Static constructor + /// + static RabbitMqXDeathHeaders() + { + _requiredKeys = + [ + XDeathKey, + XFirstDeathExchangeKey, + XFirstDeathQueueKey, + XFristDeathReasonKey + ]; + } - public RabbitMqXDeathHeaders() { } + public RabbitMqXDeathHeaders() { } - /// - /// Creates a out of a (byte-encoded) header field set from RabbitMQ - /// - /// - /// - public RabbitMqXDeathHeaders(IDictionary encodedHeaders, Encoding enc) - { - if (!(encodedHeaders.Any() && _requiredKeys.All(encodedHeaders.ContainsKey))) - throw new ArgumentException("xDeathEntry"); + /// + /// Creates a out of a (byte-encoded) header field set from RabbitMQ + /// + /// + /// + public RabbitMqXDeathHeaders(IDictionary encodedHeaders, Encoding enc) + { + if (!(encodedHeaders.Any() && _requiredKeys.All(encodedHeaders.ContainsKey))) + throw new ArgumentException("xDeathEntry"); - XDeaths = []; + XDeaths = []; - foreach (object xDeathEntry in (List)encodedHeaders[XDeathKey]) - XDeaths.Add(new RabbitMqXDeath((Dictionary)xDeathEntry, enc)); + foreach (object xDeathEntry in (List)encodedHeaders[XDeathKey]) + XDeaths.Add(new RabbitMqXDeath((Dictionary)xDeathEntry, enc)); - XFirstDeathExchange = enc.GetString((byte[])encodedHeaders[XFirstDeathExchangeKey]); - XFirstDeathQueue = enc.GetString((byte[])encodedHeaders[XFirstDeathQueueKey]); - XFirstDeathReason = enc.GetString((byte[])encodedHeaders[XFristDeathReasonKey]); - } + XFirstDeathExchange = enc.GetString((byte[])encodedHeaders[XFirstDeathExchangeKey]); + XFirstDeathQueue = enc.GetString((byte[])encodedHeaders[XFirstDeathQueueKey]); + XFirstDeathReason = enc.GetString((byte[])encodedHeaders[XFristDeathReasonKey]); + } - public void Populate(IDictionary headers) + public void Populate(IDictionary headers) + { + var xDeaths = new List(); + foreach (RabbitMqXDeath item in XDeaths) { - var xDeaths = new List(); - foreach (RabbitMqXDeath item in XDeaths) + xDeaths.Add(new Dictionary { - xDeaths.Add(new Dictionary - { - { RabbitMqXDeath.CountKey, item.Count }, - { RabbitMqXDeath.ExchangeKey, item.Exchange }, - { RabbitMqXDeath.QueueKey, item.Queue }, - { RabbitMqXDeath.ReasonKey, item.Reason }, - { RabbitMqXDeath.RoutingKeysKey, item.RoutingKeys }, - { RabbitMqXDeath.TimeKey, new AmqpTimestamp(item.Time) } - }); - } - - headers.Add(XDeathKey, xDeaths); - headers.Add(XFirstDeathExchangeKey, XFirstDeathExchange); - headers.Add(XFirstDeathQueueKey, XFirstDeathQueue); - headers.Add(XFristDeathReasonKey, XFirstDeathReason); + { RabbitMqXDeath.CountKey, item.Count }, + { RabbitMqXDeath.ExchangeKey, item.Exchange }, + { RabbitMqXDeath.QueueKey, item.Queue }, + { RabbitMqXDeath.ReasonKey, item.Reason }, + { RabbitMqXDeath.RoutingKeysKey, item.RoutingKeys }, + { RabbitMqXDeath.TimeKey, new AmqpTimestamp(item.Time) } + }); } - public static void CopyHeaders(IDictionary from, IDictionary to) - { - // Ensure that /from/ contains all the required headers, and /to/ contains none of them + headers.Add(XDeathKey, xDeaths); + headers.Add(XFirstDeathExchangeKey, XFirstDeathExchange); + headers.Add(XFirstDeathQueueKey, XFirstDeathQueue); + headers.Add(XFristDeathReasonKey, XFirstDeathReason); + } - if (from == null || !(from.Any() && _requiredKeys.All(from.ContainsKey))) - throw new ArgumentException(null, nameof(from)); + public static void CopyHeaders(IDictionary from, IDictionary to) + { + // Ensure that /from/ contains all the required headers, and /to/ contains none of them - if (to == null || _requiredKeys.Any(to.ContainsKey)) - throw new ArgumentException(null, nameof(to)); + if (from == null || !(from.Any() && _requiredKeys.All(from.ContainsKey))) + throw new ArgumentException(null, nameof(from)); - foreach (string key in _requiredKeys) - to.Add(key, from[key]); - } + if (to == null || _requiredKeys.Any(to.ContainsKey)) + throw new ArgumentException(null, nameof(to)); - public override string ToString() - { - var sb = new StringBuilder(); - sb.Append("XFirstDeathExchange: " + XFirstDeathExchange); - sb.Append(", XFirstDeathQueue: " + XFirstDeathQueue); - sb.Append(", XFirstDeathReason: " + XFirstDeathReason); - sb.Append(", XDeaths: {" + string.Join(", ", XDeaths) + "}"); - return sb.ToString(); - } + foreach (string key in _requiredKeys) + to.Add(key, from[key]); } - public class RabbitMqXDeath : MemberwiseEquatable + public override string ToString() { - public const string CountKey = "count"; - public const string ExchangeKey = "exchange"; - public const string QueueKey = "queue"; - public const string ReasonKey = "reason"; - public const string RoutingKeysKey = "routing-keys"; - public const string TimeKey = "time"; - private static readonly List _requiredKeys; + var sb = new StringBuilder(); + sb.Append("XFirstDeathExchange: " + XFirstDeathExchange); + sb.Append(", XFirstDeathQueue: " + XFirstDeathQueue); + sb.Append(", XFirstDeathReason: " + XFirstDeathReason); + sb.Append(", XDeaths: {" + string.Join(", ", XDeaths) + "}"); + return sb.ToString(); + } +} + +public class RabbitMqXDeath : MemberwiseEquatable +{ + public const string CountKey = "count"; + public const string ExchangeKey = "exchange"; + public const string QueueKey = "queue"; + public const string ReasonKey = "reason"; + public const string RoutingKeysKey = "routing-keys"; + public const string TimeKey = "time"; + private static readonly List _requiredKeys; - public long Count { get; set; } + public long Count { get; set; } - public string Exchange { get; set; } + public string Exchange { get; set; } - public string Queue { get; set; } + public string Queue { get; set; } - public string Reason { get; set; } + public string Reason { get; set; } - public List RoutingKeys { get; set; } + public List RoutingKeys { get; set; } - public long Time { get; set; } + public long Time { get; set; } - static RabbitMqXDeath() - { - _requiredKeys = - [ - CountKey, - ExchangeKey, - QueueKey, - ReasonKey, - RoutingKeysKey, - TimeKey, - ]; - } + static RabbitMqXDeath() + { + _requiredKeys = + [ + CountKey, + ExchangeKey, + QueueKey, + ReasonKey, + RoutingKeysKey, + TimeKey, + ]; + } - public RabbitMqXDeath() { } + public RabbitMqXDeath() { } - public RabbitMqXDeath(IDictionary xDeathEntry, Encoding enc) - { - if (xDeathEntry == null || - !(xDeathEntry.Any() && xDeathEntry.All(k => _requiredKeys.Contains(k.Key)))) - throw new ArgumentException(null, nameof(xDeathEntry)); - - Count = (long)xDeathEntry[CountKey]; - Exchange = enc.GetString((byte[])xDeathEntry[ExchangeKey]); - Queue = enc.GetString((byte[])xDeathEntry[QueueKey]); - Reason = enc.GetString((byte[])xDeathEntry[ReasonKey]); - RoutingKeys = ((List)xDeathEntry[RoutingKeysKey]).Select(x => enc.GetString((byte[])x)).ToList(); - Time = ((AmqpTimestamp)xDeathEntry[TimeKey]).UnixTime; - } + public RabbitMqXDeath(IDictionary xDeathEntry, Encoding enc) + { + if (xDeathEntry == null || + !(xDeathEntry.Any() && xDeathEntry.All(k => _requiredKeys.Contains(k.Key)))) + throw new ArgumentException(null, nameof(xDeathEntry)); + + Count = (long)xDeathEntry[CountKey]; + Exchange = enc.GetString((byte[])xDeathEntry[ExchangeKey]); + Queue = enc.GetString((byte[])xDeathEntry[QueueKey]); + Reason = enc.GetString((byte[])xDeathEntry[ReasonKey]); + RoutingKeys = ((List)xDeathEntry[RoutingKeysKey]).Select(x => enc.GetString((byte[])x)).ToList(); + Time = ((AmqpTimestamp)xDeathEntry[TimeKey]).UnixTime; + } - public override string ToString() - { - var sb = new StringBuilder(); - sb.Append("Count: " + Count); - sb.Append(", Exchange: " + Exchange); - sb.Append(", Queue: " + Queue); - sb.Append(", Reason: " + Reason); - sb.Append(", RoutingKeys: {" + string.Join(", ", RoutingKeys) + "}"); - return sb.ToString(); - } + public override string ToString() + { + var sb = new StringBuilder(); + sb.Append("Count: " + Count); + sb.Append(", Exchange: " + Exchange); + sb.Append(", Queue: " + Queue); + sb.Append(", Reason: " + Reason); + sb.Append(", RoutingKeys: {" + string.Join(", ", RoutingKeys) + "}"); + return sb.ToString(); } } diff --git a/src/SmiServices/Common/Messages/SeriesMessage.cs b/src/SmiServices/Common/Messages/SeriesMessage.cs index 6b25dfe45..fdbd10715 100644 --- a/src/SmiServices/Common/Messages/SeriesMessage.cs +++ b/src/SmiServices/Common/Messages/SeriesMessage.cs @@ -2,43 +2,42 @@ using Equ; using Newtonsoft.Json; -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +/// +/// +/// Object representing a series message. +/// https://github.com/HicServices/SMIPlugin/wiki/SMI-RabbitMQ-messages-and-queues#seriesmessage +/// +public sealed class SeriesMessage : MemberwiseEquatable, IMessage { - /// /// - /// Object representing a series message. - /// https://github.com/HicServices/SMIPlugin/wiki/SMI-RabbitMQ-messages-and-queues#seriesmessage + /// Directory path relative to the root path. /// - public sealed class SeriesMessage : MemberwiseEquatable, IMessage - { - /// - /// Directory path relative to the root path. - /// - [JsonProperty(Required = Required.Always)] - public string DirectoryPath { get; set; } = null!; + [JsonProperty(Required = Required.Always)] + public string DirectoryPath { get; set; } = null!; - /// - /// Dicom tag (0020,000D). - /// - [JsonProperty(Required = Required.Always)] - public string StudyInstanceUID { get; set; } = null!; + /// + /// Dicom tag (0020,000D). + /// + [JsonProperty(Required = Required.Always)] + public string StudyInstanceUID { get; set; } = null!; - /// - /// Dicom tag (0020,000E). - /// - [JsonProperty(Required = Required.Always)] - public string SeriesInstanceUID { get; set; } = null!; + /// + /// Dicom tag (0020,000E). + /// + [JsonProperty(Required = Required.Always)] + public string SeriesInstanceUID { get; set; } = null!; - /// - /// Number of images found in the series. - /// - [JsonProperty(Required = Required.Always)] - public int ImagesInSeries { get; set; } + /// + /// Number of images found in the series. + /// + [JsonProperty(Required = Required.Always)] + public int ImagesInSeries { get; set; } - /// - /// Key-value pairs of Dicom tags and their values. - /// - [JsonProperty(Required = Required.Always)] - public string DicomDataset { get; set; } = null!; - } + /// + /// Key-value pairs of Dicom tags and their values. + /// + [JsonProperty(Required = Required.Always)] + public string DicomDataset { get; set; } = null!; } diff --git a/src/SmiServices/Common/Messages/TagPromotionMessage.cs b/src/SmiServices/Common/Messages/TagPromotionMessage.cs index ecb620f12..ce6416e1c 100644 --- a/src/SmiServices/Common/Messages/TagPromotionMessage.cs +++ b/src/SmiServices/Common/Messages/TagPromotionMessage.cs @@ -3,32 +3,31 @@ using Newtonsoft.Json; using System.Collections.Generic; -namespace SmiServices.Common.Messages +namespace SmiServices.Common.Messages; + +public sealed class TagPromotionMessage : MemberwiseEquatable, IMessage { - public sealed class TagPromotionMessage : MemberwiseEquatable, IMessage - { - /// - /// Dicom tag (0020,000D) - /// - [JsonProperty(Required = Required.Always)] - public string StudyInstanceUID { get; set; } = null!; + /// + /// Dicom tag (0020,000D) + /// + [JsonProperty(Required = Required.Always)] + public string StudyInstanceUID { get; set; } = null!; - /// - /// Dicom tag (0020,000E) - /// - [JsonProperty(Required = Required.Always)] - public string SeriesInstanceUID { get; set; } = null!; + /// + /// Dicom tag (0020,000E) + /// + [JsonProperty(Required = Required.Always)] + public string SeriesInstanceUID { get; set; } = null!; - /// - /// Dicom tag (0008,0018) - /// - [JsonProperty(Required = Required.Always)] - public string SOPInstanceUID { get; set; } = null!; + /// + /// Dicom tag (0008,0018) + /// + [JsonProperty(Required = Required.Always)] + public string SOPInstanceUID { get; set; } = null!; - /// - /// The tags to promote. Key is the dictionary entry for the DicomTag - /// - [JsonProperty(Required = Required.Always)] - public Dictionary PromotedTags { get; set; } = null!; - } + /// + /// The tags to promote. Key is the dictionary entry for the DicomTag + /// + [JsonProperty(Required = Required.Always)] + public Dictionary PromotedTags { get; set; } = null!; } diff --git a/src/SmiServices/Common/Messages/Updating/UpdateValuesMessage.cs b/src/SmiServices/Common/Messages/Updating/UpdateValuesMessage.cs index 2d291056b..393c33a8a 100644 --- a/src/SmiServices/Common/Messages/Updating/UpdateValuesMessage.cs +++ b/src/SmiServices/Common/Messages/Updating/UpdateValuesMessage.cs @@ -1,72 +1,71 @@ using Equ; using System; -namespace SmiServices.Common.Messages.Updating +namespace SmiServices.Common.Messages.Updating; + +/// +/// Requests to update the values in the fields to where the value in match +/// +public class UpdateValuesMessage : MemberwiseEquatable, IMessage { /// - /// Requests to update the values in the fields to where the value in match + /// Optional Sql operators e.g. "=", "<" etc to use in WHERE Sql when looking for in . If null or empty "=" is assumed for all WHERE comparisons /// - public class UpdateValuesMessage : MemberwiseEquatable, IMessage - { - /// - /// Optional Sql operators e.g. "=", "<" etc to use in WHERE Sql when looking for in . If null or empty "=" is assumed for all WHERE comparisons - /// - public string[]? Operators { get; set; } = null; + public string[]? Operators { get; set; } = null; - /// - /// The field(s) to search the database for (this should be the human readable name without qualifiers as it may match multiple tables e.g. ECHI) - /// - public string?[] WhereFields { get; set; } = []; + /// + /// The field(s) to search the database for (this should be the human readable name without qualifiers as it may match multiple tables e.g. ECHI) + /// + public string?[] WhereFields { get; set; } = []; - /// - /// The values to search for when deciding which records to update - /// - public string?[] HaveValues { get; set; } = []; + /// + /// The values to search for when deciding which records to update + /// + public string?[] HaveValues { get; set; } = []; - /// - /// The field(s) which should be updated, may be the same as the - /// - public string[] WriteIntoFields { get; set; } = []; + /// + /// The field(s) which should be updated, may be the same as the + /// + public string[] WriteIntoFields { get; set; } = []; - /// - /// The values to write into matching records (see ). Null elements in this array should be treated as - /// - public string[] Values { get; set; } = []; + /// + /// The values to write into matching records (see ). Null elements in this array should be treated as + /// + public string[] Values { get; set; } = []; - /// - /// Optional. Where present indicates the tables which should be updated. If empty then all tables matching the fields should be updated - /// - public int[] ExplicitTableInfo { get; set; } = []; + /// + /// Optional. Where present indicates the tables which should be updated. If empty then all tables matching the fields should be updated + /// + public int[] ExplicitTableInfo { get; set; } = []; - public void Validate() - { - if (WhereFields.Length != HaveValues.Length) - throw new Exception($"{nameof(WhereFields)} length must match {nameof(HaveValues)} length"); + public void Validate() + { + if (WhereFields.Length != HaveValues.Length) + throw new Exception($"{nameof(WhereFields)} length must match {nameof(HaveValues)} length"); - if (WriteIntoFields.Length != Values.Length) - throw new Exception($"{nameof(WriteIntoFields)} length must match {nameof(Values)} length"); + if (WriteIntoFields.Length != Values.Length) + throw new Exception($"{nameof(WriteIntoFields)} length must match {nameof(Values)} length"); - // If operators are specified then the WHERE column count must match the operator count - if (Operators != null && Operators.Length != 0) - if (Operators.Length != WhereFields.Length) - throw new Exception($"{nameof(WhereFields)} length must match {nameof(Operators)} length"); + // If operators are specified then the WHERE column count must match the operator count + if (Operators != null && Operators.Length != 0) + if (Operators.Length != WhereFields.Length) + throw new Exception($"{nameof(WhereFields)} length must match {nameof(Operators)} length"); - if (WhereFields.Length == 0) - throw new Exception("There must be at least one search field for WHERE section. Otherwise this would update entire tables"); + if (WhereFields.Length == 0) + throw new Exception("There must be at least one search field for WHERE section. Otherwise this would update entire tables"); - if (WriteIntoFields.Length == 0) - throw new Exception("There must be at least one value to write"); + if (WriteIntoFields.Length == 0) + throw new Exception("There must be at least one value to write"); - } + } - /// - /// Describes the message in terms of fields that are updated and checked in WHERE logic (but not values) - /// - /// - public override string ToString() - { - return - $"{nameof(UpdateValuesMessage)}: {nameof(WhereFields)}={string.Join(",", WhereFields)} {nameof(WriteIntoFields)}={string.Join(",", WriteIntoFields)}"; - } + /// + /// Describes the message in terms of fields that are updated and checked in WHERE logic (but not values) + /// + /// + public override string ToString() + { + return + $"{nameof(UpdateValuesMessage)}: {nameof(WhereFields)}={string.Join(",", WhereFields)} {nameof(WriteIntoFields)}={string.Join(",", WriteIntoFields)}"; } } diff --git a/src/SmiServices/Common/Messaging/BatchProducerModel.cs b/src/SmiServices/Common/Messaging/BatchProducerModel.cs index 206b7457c..1daa8af0a 100644 --- a/src/SmiServices/Common/Messaging/BatchProducerModel.cs +++ b/src/SmiServices/Common/Messaging/BatchProducerModel.cs @@ -2,40 +2,39 @@ using SmiServices.Common.Messages; using System; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +/// +/// Manual confirms - handle yourself +/// No logging of sent - handle yourself +/// Make sure to WaitForConfirms during host shutdown +/// +public class BatchProducerModel : ProducerModel { + public BatchProducerModel( + string exchangeName, + IModel model, + IBasicProperties properties, + int maxPublishAttempts = 1, + IBackoffProvider? backoffProvider = null, + string? probeQueueName = null, + int probeQueueLimit = 0, + TimeSpan? probeTimeout = null + ) + : base(exchangeName, model, properties, maxPublishAttempts, backoffProvider, probeQueueName, probeQueueLimit, probeTimeout) + { } + + /// - /// Manual confirms - handle yourself - /// No logging of sent - handle yourself - /// Make sure to WaitForConfirms during host shutdown + /// Sends a message but does not wait for the server to confirm the publish. Manually call ProducerModel.WaitForConfirms() + /// to check all previously unacknowledged messages have been sent. /// - public class BatchProducerModel : ProducerModel + /// + /// + /// + /// + public override IMessageHeader SendMessage(IMessage message, IMessageHeader? inResponseTo = null, string? routingKey = null) { - public BatchProducerModel( - string exchangeName, - IModel model, - IBasicProperties properties, - int maxPublishAttempts = 1, - IBackoffProvider? backoffProvider = null, - string? probeQueueName = null, - int probeQueueLimit = 0, - TimeSpan? probeTimeout = null - ) - : base(exchangeName, model, properties, maxPublishAttempts, backoffProvider, probeQueueName, probeQueueLimit, probeTimeout) - { } - - - /// - /// Sends a message but does not wait for the server to confirm the publish. Manually call ProducerModel.WaitForConfirms() - /// to check all previously unacknowledged messages have been sent. - /// - /// - /// - /// - /// - public override IMessageHeader SendMessage(IMessage message, IMessageHeader? inResponseTo = null, string? routingKey = null) - { - return SendMessageImpl(message, inResponseTo, routingKey); - } + return SendMessageImpl(message, inResponseTo, routingKey); } } diff --git a/src/SmiServices/Common/Messaging/Consumer.cs b/src/SmiServices/Common/Messaging/Consumer.cs index ee08fb9e2..a0ea3f4e2 100644 --- a/src/SmiServices/Common/Messaging/Consumer.cs +++ b/src/SmiServices/Common/Messaging/Consumer.cs @@ -8,163 +8,162 @@ using System.Linq; using System.Threading.Tasks; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +public abstract class Consumer : IConsumer where T : IMessage { - public abstract class Consumer : IConsumer where T : IMessage - { - /// - /// Count of the messages Acknowledged by this Consumer, use to increment this - /// - public int AckCount { get; private set; } + /// + /// Count of the messages Acknowledged by this Consumer, use to increment this + /// + public int AckCount { get; private set; } - /// - /// Count of the messages Rejected by this Consumer, use to increment this - /// - public int NackCount { get; private set; } + /// + /// Count of the messages Rejected by this Consumer, use to increment this + /// + public int NackCount { get; private set; } - /// - public bool HoldUnprocessableMessages { get; set; } = false; + /// + public bool HoldUnprocessableMessages { get; set; } = false; - protected int _heldMessages = 0; + protected int _heldMessages = 0; - /// - public int QoSPrefetchCount { get; set; } + /// + public int QoSPrefetchCount { get; set; } - /// - /// Event raised when Fatal method called - /// - public event ConsumerFatalHandler? OnFatal; - public event AckEventHandler? OnAck; - public event NackEventHandler? OnNack; + /// + /// Event raised when Fatal method called + /// + public event ConsumerFatalHandler? OnFatal; + public event AckEventHandler? OnAck; + public event NackEventHandler? OnNack; - protected readonly ILogger Logger; + protected readonly ILogger Logger; - private readonly object _oConsumeLock = new(); - private bool _exiting; + private readonly object _oConsumeLock = new(); + private bool _exiting; - public virtual void Shutdown() - { + public virtual void Shutdown() + { - } + } - protected Consumer() + protected Consumer() + { + string loggerName; + Type consumerType = GetType(); + + if (consumerType.IsGenericType) + { + string namePrefix = consumerType.Name.Split(new[] { '`' }, StringSplitOptions.RemoveEmptyEntries)[0]; + IEnumerable genericParameters = consumerType.GetGenericArguments().Select(x => x.Name); + loggerName = $"{namePrefix}<{string.Join(",", genericParameters)}>"; + } + else { - string loggerName; - Type consumerType = GetType(); + loggerName = consumerType.Name; + } - if (consumerType.IsGenericType) - { - string namePrefix = consumerType.Name.Split(new[] { '`' }, StringSplitOptions.RemoveEmptyEntries)[0]; - IEnumerable genericParameters = consumerType.GetGenericArguments().Select(x => x.Name); - loggerName = $"{namePrefix}<{string.Join(",", genericParameters)}>"; - } - else - { - loggerName = consumerType.Name; - } + Logger = LogManager.GetLogger(loggerName); + } - Logger = LogManager.GetLogger(loggerName); + public void ProcessMessage(IMessageHeader header, T message, ulong tag) + { + lock (_oConsumeLock) + { + if (_exiting) + return; } - public void ProcessMessage(IMessageHeader header, T message, ulong tag) + try { - lock (_oConsumeLock) - { - if (_exiting) - return; - } + ProcessMessageImpl(header, message, tag); + } + catch (Exception e) + { + Logger.Error(e, $"Unhandled exception when processing message {header.MessageGuid}"); - try + if (HoldUnprocessableMessages) { - ProcessMessageImpl(header, message, tag); + ++_heldMessages; + string msg = $"Holding an unprocessable message ({_heldMessages} total message(s) currently held"; + if (_heldMessages >= QoSPrefetchCount) + msg += $". Have now exceeded the configured BasicQos value of {QoSPrefetchCount}. No further messages will be delivered to this consumer!"; + Logger.Warn(msg); } - catch (Exception e) + else { - Logger.Error(e, $"Unhandled exception when processing message {header.MessageGuid}"); - - if (HoldUnprocessableMessages) - { - ++_heldMessages; - string msg = $"Holding an unprocessable message ({_heldMessages} total message(s) currently held"; - if (_heldMessages >= QoSPrefetchCount) - msg += $". Have now exceeded the configured BasicQos value of {QoSPrefetchCount}. No further messages will be delivered to this consumer!"; - Logger.Warn(msg); - } - else - { - Fatal("ProcessMessageImpl threw unhandled exception", e); - } + Fatal("ProcessMessageImpl threw unhandled exception", e); } } + } - protected abstract void ProcessMessageImpl(IMessageHeader header, T message, ulong tag); + protected abstract void ProcessMessageImpl(IMessageHeader header, T message, ulong tag); - /// - /// Instructs RabbitMQ to discard a single message and not requeue it - /// - /// - private void DiscardSingleMessage(ulong tag) - { - OnNack?.Invoke(this, new BasicNackEventArgs { DeliveryTag = tag, Multiple = false, Requeue = false }); - NackCount++; - } + /// + /// Instructs RabbitMQ to discard a single message and not requeue it + /// + /// + private void DiscardSingleMessage(ulong tag) + { + OnNack?.Invoke(this, new BasicNackEventArgs { DeliveryTag = tag, Multiple = false, Requeue = false }); + NackCount++; + } - protected virtual void ErrorAndNack(IMessageHeader header, ulong tag, string message, Exception exception) - { - header.Log(Logger, LogLevel.Error, message, exception); - DiscardSingleMessage(tag); - } + protected virtual void ErrorAndNack(IMessageHeader header, ulong tag, string message, Exception exception) + { + header.Log(Logger, LogLevel.Error, message, exception); + DiscardSingleMessage(tag); + } - protected void Ack(IMessageHeader header, ulong deliveryTag) - { - OnAck?.Invoke(this, new BasicAckEventArgs { DeliveryTag = deliveryTag, Multiple = false }); - header.Log(Logger, LogLevel.Trace, $"Acknowledged {header.MessageGuid}"); - AckCount++; - } + protected void Ack(IMessageHeader header, ulong deliveryTag) + { + OnAck?.Invoke(this, new BasicAckEventArgs { DeliveryTag = deliveryTag, Multiple = false }); + header.Log(Logger, LogLevel.Trace, $"Acknowledged {header.MessageGuid}"); + AckCount++; + } - /// - /// Acknowledges all in batch, this uses multiple which means you are accepting all up to the last message in the batch (including any not in your list - /// for any reason) - /// - /// - /// - protected void Ack(IList batchHeaders, ulong latestDeliveryTag) - { - foreach (IMessageHeader header in batchHeaders) - header.Log(Logger, LogLevel.Trace, "Acknowledged"); + /// + /// Acknowledges all in batch, this uses multiple which means you are accepting all up to the last message in the batch (including any not in your list + /// for any reason) + /// + /// + /// + protected void Ack(IList batchHeaders, ulong latestDeliveryTag) + { + foreach (IMessageHeader header in batchHeaders) + header.Log(Logger, LogLevel.Trace, "Acknowledged"); - AckCount += batchHeaders.Count; + AckCount += batchHeaders.Count; - OnAck?.Invoke(this, new BasicAckEventArgs { DeliveryTag = latestDeliveryTag, Multiple = true }); - } + OnAck?.Invoke(this, new BasicAckEventArgs { DeliveryTag = latestDeliveryTag, Multiple = true }); + } - /// - /// Logs a Fatal in the Logger and triggers the FatalError event which should shutdown the MessageBroker - /// Do not do any further processing after triggering this method - /// - /// - /// - protected void Fatal(string msg, Exception exception) + /// + /// Logs a Fatal in the Logger and triggers the FatalError event which should shutdown the MessageBroker + /// Do not do any further processing after triggering this method + /// + /// + /// + protected void Fatal(string msg, Exception exception) + { + lock (_oConsumeLock) { - lock (_oConsumeLock) - { - if (_exiting) - return; + if (_exiting) + return; - _exiting = true; + _exiting = true; - Logger.Fatal(exception, msg); + Logger.Fatal(exception, msg); - ConsumerFatalHandler? onFatal = OnFatal; + ConsumerFatalHandler? onFatal = OnFatal; - if (onFatal != null) - { - Task.Run(() => onFatal.Invoke(this, new FatalErrorEventArgs(msg, exception))); - } - else - { - throw new Exception("No handlers when attempting to raise OnFatal for this exception", exception); - } + if (onFatal != null) + { + Task.Run(() => onFatal.Invoke(this, new FatalErrorEventArgs(msg, exception))); + } + else + { + throw new Exception("No handlers when attempting to raise OnFatal for this exception", exception); } } } diff --git a/src/SmiServices/Common/Messaging/ControlMessageConsumer.cs b/src/SmiServices/Common/Messaging/ControlMessageConsumer.cs index c32e5c21b..ac0601f5d 100644 --- a/src/SmiServices/Common/Messaging/ControlMessageConsumer.cs +++ b/src/SmiServices/Common/Messaging/ControlMessageConsumer.cs @@ -8,178 +8,177 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +public class ControlMessageConsumer : IControlMessageConsumer { - public class ControlMessageConsumer : IControlMessageConsumer - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - public ConsumerOptions ControlConsumerOptions { get => _controlConsumerOptions; } + public ConsumerOptions ControlConsumerOptions { get => _controlConsumerOptions; } - private readonly ConsumerOptions _controlConsumerOptions = new() - { - QoSPrefetchCount = 1, - AutoAck = true - }; + private readonly ConsumerOptions _controlConsumerOptions = new() + { + QoSPrefetchCount = 1, + AutoAck = true + }; - public event StopEventHandler StopHost; - public event ControlEventHandler? ControlEvent; + public event StopEventHandler StopHost; + public event ControlEventHandler? ControlEvent; - private readonly string _processName; - private readonly string _processId; - private readonly IConnection _connection; + private readonly string _processName; + private readonly string _processId; + private readonly IConnection _connection; - private const string ControlQueueBindingKey = "smi.control.all.*"; + private const string ControlQueueBindingKey = "smi.control.all.*"; - public ControlMessageConsumer( - RabbitOptions rabbitOptions, - string processName, - int processId, - string controlExchangeName, - Action stopEvent) - { - ArgumentNullException.ThrowIfNull(processName); - ArgumentNullException.ThrowIfNull(controlExchangeName); - ArgumentNullException.ThrowIfNull(stopEvent); + public ControlMessageConsumer( + RabbitOptions rabbitOptions, + string processName, + int processId, + string controlExchangeName, + Action stopEvent) + { + ArgumentNullException.ThrowIfNull(processName); + ArgumentNullException.ThrowIfNull(controlExchangeName); + ArgumentNullException.ThrowIfNull(stopEvent); - _connection = rabbitOptions.Connection; - _processName = processName.ToLower(); - _processId = processId.ToString(); + _connection = rabbitOptions.Connection; + _processName = processName.ToLower(); + _processId = processId.ToString(); - _controlConsumerOptions.QueueName = $"Control.{_processName}{_processId}"; + _controlConsumerOptions.QueueName = $"Control.{_processName}{_processId}"; - SetupControlQueueForHost(controlExchangeName); + SetupControlQueueForHost(controlExchangeName); - StopHost += () => stopEvent("Control message stop"); - } + StopHost += () => stopEvent("Control message stop"); + } - /// - /// Recreate ProcessMessage to specifically handle control messages which won't have headers, - /// and shouldn't be included in any Ack/Nack counts - /// - public void ProcessMessage(string body, string routingKey) + /// + /// Recreate ProcessMessage to specifically handle control messages which won't have headers, + /// and shouldn't be included in any Ack/Nack counts + /// + public void ProcessMessage(string body, string routingKey) + { + try { - try - { - // For now we only deal with the simple case of "smi.control..". Can expand later on depending on our needs - // Queues will be deleted when the connection is closed so don't need to worry about messages being leftover + // For now we only deal with the simple case of "smi.control..". Can expand later on depending on our needs + // Queues will be deleted when the connection is closed so don't need to worry about messages being leftover - _logger.Info("Control message received with routing key: " + routingKey); + _logger.Info("Control message received with routing key: " + routingKey); - string[] split = routingKey.ToLower().Split('.'); + string[] split = routingKey.ToLower().Split('.'); - if (split.Length < 4) - { - _logger.Debug("Control command shorter than the minimum format"); - return; - } + if (split.Length < 4) + { + _logger.Debug("Control command shorter than the minimum format"); + return; + } - // Who, what - string actor = string.Join(".", split.Skip(2).Take(split.Length - 3)); - string action = split[^1]; + // Who, what + string actor = string.Join(".", split.Skip(2).Take(split.Length - 3)); + string action = split[^1]; - // If action contains a numeric and it's not our PID, then ignore - if (action.Any(char.IsDigit) && !action.EndsWith(_processId)) - return; + // If action contains a numeric and it's not our PID, then ignore + if (action.Any(char.IsDigit) && !action.EndsWith(_processId)) + return; - // Ignore any messages not meant for us - if (!actor.Equals("all") && !actor.Equals(_processName)) - { - _logger.Debug("Control command did not match this service"); - return; - } + // Ignore any messages not meant for us + if (!actor.Equals("all") && !actor.Equals(_processName)) + { + _logger.Debug("Control command did not match this service"); + return; + } - // Handle any general actions - just stop and ping for now + // Handle any general actions - just stop and ping for now - if (action.StartsWith("stop")) + if (action.StartsWith("stop")) + { + if (StopHost == null) { - if (StopHost == null) - { - // This should never really happen - _logger.Info("Received stop command but no stop event registered"); - return; - } - - _logger.Info("Stop request received, raising StopHost event"); - Task.Run(() => StopHost.Invoke()); - + // This should never really happen + _logger.Info("Received stop command but no stop event registered"); return; } - if (action.StartsWith("ping")) - { - _logger.Info("Pong!"); - return; - } + _logger.Info("Stop request received, raising StopHost event"); + Task.Run(() => StopHost.Invoke()); - // Don't pass any unhandled broadcast (to "all") messages down to the hosts - if (actor.Equals("all")) - return; + return; + } - // Else raise the event if any hosts have specific control needs - if (ControlEvent != null) - { - _logger.Debug("Control message not handled, raising registered ControlEvent(s)"); - ControlEvent(Regex.Replace(action, @"[\d]", ""), body); + if (action.StartsWith("ping")) + { + _logger.Info("Pong!"); + return; + } - return; - } + // Don't pass any unhandled broadcast (to "all") messages down to the hosts + if (actor.Equals("all")) + return; - // Else we should ignore it? - _logger.Warn("Unhandled control message with routing key: " + routingKey); - } - catch (Exception exception) + // Else raise the event if any hosts have specific control needs + if (ControlEvent != null) { - _logger.Error(exception, "ProcessMessageImpl threw unhandled exception"); + _logger.Debug("Control message not handled, raising registered ControlEvent(s)"); + ControlEvent(Regex.Replace(action, @"[\d]", ""), body); + + return; } - } - /// - /// Ensures the control queue is cleaned up on exit. Should have been deleted already, but this ensures it - /// - public void Shutdown() + // Else we should ignore it? + _logger.Warn("Unhandled control message with routing key: " + routingKey); + } + catch (Exception exception) { - using var model = _connection.CreateModel(); - _logger.Debug($"Deleting control queue: {_controlConsumerOptions.QueueName}"); - model.QueueDelete(_controlConsumerOptions.QueueName); + _logger.Error(exception, "ProcessMessageImpl threw unhandled exception"); } + } - /// - /// Creates a one-time connection to set up the required control queue and bindings on the RabbitMQ server. - /// The connection is disposed and StartConsumer(...) can then be called on the parent MessageBroker with ControlConsumerOptions - /// - /// - private void SetupControlQueueForHost(string controlExchangeName) + /// + /// Ensures the control queue is cleaned up on exit. Should have been deleted already, but this ensures it + /// + public void Shutdown() + { + using var model = _connection.CreateModel(); + _logger.Debug($"Deleting control queue: {_controlConsumerOptions.QueueName}"); + model.QueueDelete(_controlConsumerOptions.QueueName); + } + + /// + /// Creates a one-time connection to set up the required control queue and bindings on the RabbitMQ server. + /// The connection is disposed and StartConsumer(...) can then be called on the parent MessageBroker with ControlConsumerOptions + /// + /// + private void SetupControlQueueForHost(string controlExchangeName) + { + using var model = _connection.CreateModel(); + try { - using var model = _connection.CreateModel(); - try - { - model.ExchangeDeclarePassive(controlExchangeName); - } - catch (OperationInterruptedException e) - { - throw new ApplicationException($"The given control exchange was not found on the server: \"{controlExchangeName}\"", e); - } + model.ExchangeDeclarePassive(controlExchangeName); + } + catch (OperationInterruptedException e) + { + throw new ApplicationException($"The given control exchange was not found on the server: \"{controlExchangeName}\"", e); + } - _logger.Debug($"Creating control queue {_controlConsumerOptions.QueueName}"); + _logger.Debug($"Creating control queue {_controlConsumerOptions.QueueName}"); - // Declare our queue with: - // durable = false (queue will not persist over restarts of the RabbitMq server) - // exclusive = false (queue won't be deleted when THIS connection closes) - // autoDelete = true (queue will be deleted after a consumer connects and then disconnects) - model.QueueDeclare(_controlConsumerOptions.QueueName, durable: false, exclusive: false, autoDelete: true); + // Declare our queue with: + // durable = false (queue will not persist over restarts of the RabbitMq server) + // exclusive = false (queue won't be deleted when THIS connection closes) + // autoDelete = true (queue will be deleted after a consumer connects and then disconnects) + model.QueueDeclare(_controlConsumerOptions.QueueName, durable: false, exclusive: false, autoDelete: true); - // Binding for any control requests, i.e. "stop" - _logger.Debug($"Creating binding {controlExchangeName}->{_controlConsumerOptions.QueueName} with key {ControlQueueBindingKey}"); - model.QueueBind(_controlConsumerOptions.QueueName, controlExchangeName, ControlQueueBindingKey); + // Binding for any control requests, i.e. "stop" + _logger.Debug($"Creating binding {controlExchangeName}->{_controlConsumerOptions.QueueName} with key {ControlQueueBindingKey}"); + model.QueueBind(_controlConsumerOptions.QueueName, controlExchangeName, ControlQueueBindingKey); - // Specific microservice binding key, ignoring the id at the end of the process name - string bindingKey = $"smi.control.{_processName}.*"; + // Specific microservice binding key, ignoring the id at the end of the process name + string bindingKey = $"smi.control.{_processName}.*"; - _logger.Debug($"Creating binding {controlExchangeName}->{_controlConsumerOptions.QueueName} with key {bindingKey}"); - model.QueueBind(_controlConsumerOptions.QueueName, controlExchangeName, bindingKey); - } + _logger.Debug($"Creating binding {controlExchangeName}->{_controlConsumerOptions.QueueName} with key {bindingKey}"); + model.QueueBind(_controlConsumerOptions.QueueName, controlExchangeName, bindingKey); } } diff --git a/src/SmiServices/Common/Messaging/IConsumer.cs b/src/SmiServices/Common/Messaging/IConsumer.cs index 4f7e93a58..0283c12fb 100644 --- a/src/SmiServices/Common/Messaging/IConsumer.cs +++ b/src/SmiServices/Common/Messaging/IConsumer.cs @@ -3,46 +3,45 @@ using SmiServices.Common.Messages; using SmiServices.Common.Options; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +/// +/// Interface for an object which handles messages obtained by a MessageBroker. +/// +public interface IConsumer where T : IMessage { /// - /// Interface for an object which handles messages obtained by a MessageBroker. + /// Process a message received by the adapter. + /// + void ProcessMessage(IMessageHeader header, T message, ulong tag); + + /// + /// Callback raised when Ack-ing a message + /// + event AckEventHandler OnAck; + + /// + /// Callback raised when Nack-ing a message + /// + event NackEventHandler OnNack; + + /// + /// + /// + event ConsumerFatalHandler? OnFatal; + + /// + /// Trigger a clean shutdown of worker threads etc + /// + void Shutdown(); + + /// + /// If set, consumer will not call Fatal when an unhandled exception occurs when processing a message. Requires to be false + /// + bool HoldUnprocessableMessages { get; set; } + + /// + /// The BasicQos value configured on the /// - public interface IConsumer where T : IMessage - { - /// - /// Process a message received by the adapter. - /// - void ProcessMessage(IMessageHeader header, T message, ulong tag); - - /// - /// Callback raised when Ack-ing a message - /// - event AckEventHandler OnAck; - - /// - /// Callback raised when Nack-ing a message - /// - event NackEventHandler OnNack; - - /// - /// - /// - event ConsumerFatalHandler? OnFatal; - - /// - /// Trigger a clean shutdown of worker threads etc - /// - void Shutdown(); - - /// - /// If set, consumer will not call Fatal when an unhandled exception occurs when processing a message. Requires to be false - /// - bool HoldUnprocessableMessages { get; set; } - - /// - /// The BasicQos value configured on the - /// - int QoSPrefetchCount { get; set; } - } + int QoSPrefetchCount { get; set; } } diff --git a/src/SmiServices/Common/Messaging/IControlMessageConsumer.cs b/src/SmiServices/Common/Messaging/IControlMessageConsumer.cs index 73b2dba11..e5555900d 100644 --- a/src/SmiServices/Common/Messaging/IControlMessageConsumer.cs +++ b/src/SmiServices/Common/Messaging/IControlMessageConsumer.cs @@ -1,11 +1,10 @@ using SmiServices.Common.Options; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +public interface IControlMessageConsumer { - public interface IControlMessageConsumer - { - ConsumerOptions ControlConsumerOptions { get; } + ConsumerOptions ControlConsumerOptions { get; } - void ProcessMessage(string body, string routingKey); - } + void ProcessMessage(string body, string routingKey); } diff --git a/src/SmiServices/Common/Messaging/IControlMessageHandler.cs b/src/SmiServices/Common/Messaging/IControlMessageHandler.cs index 6c359c108..d071b7a0f 100644 --- a/src/SmiServices/Common/Messaging/IControlMessageHandler.cs +++ b/src/SmiServices/Common/Messaging/IControlMessageHandler.cs @@ -1,7 +1,6 @@ -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +public interface IControlMessageHandler { - public interface IControlMessageHandler - { - void ControlMessageHandler(string action, string? message); - } + void ControlMessageHandler(string action, string? message); } diff --git a/src/SmiServices/Common/Messaging/IProducerModel.cs b/src/SmiServices/Common/Messaging/IProducerModel.cs index bb0ced60c..afe57b627 100644 --- a/src/SmiServices/Common/Messaging/IProducerModel.cs +++ b/src/SmiServices/Common/Messaging/IProducerModel.cs @@ -1,30 +1,29 @@ using SmiServices.Common.Events; using SmiServices.Common.Messages; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +// TODO(rkm 2021-04-01) Make this generic over the message type it's expected to send(?) +/// +/// Interface for an object which can send messages to RabbitMQ. +/// +public interface IProducerModel { - // TODO(rkm 2021-04-01) Make this generic over the message type it's expected to send(?) /// - /// Interface for an object which can send messages to RabbitMQ. + /// Sends a to a RabbitMQ exchange with the appropriate . /// - public interface IProducerModel - { - /// - /// Sends a to a RabbitMQ exchange with the appropriate . - /// - /// Message object to serialise and send. - /// If you are responding to a message, pass that messages header in here (otherwise pass null) - /// Routing key for the exchange to direct the message. - IMessageHeader SendMessage(IMessage message, IMessageHeader? isInResponseTo, string? routingKey); + /// Message object to serialise and send. + /// If you are responding to a message, pass that messages header in here (otherwise pass null) + /// Routing key for the exchange to direct the message. + IMessageHeader SendMessage(IMessage message, IMessageHeader? isInResponseTo, string? routingKey); - /// - /// Waits until all sent messages are confirmed by RabbitMQ - /// - void WaitForConfirms(); + /// + /// Waits until all sent messages are confirmed by RabbitMQ + /// + void WaitForConfirms(); - /// - /// - /// - event ProducerFatalHandler OnFatal; - } + /// + /// + /// + event ProducerFatalHandler OnFatal; } diff --git a/src/SmiServices/Common/Messaging/ProducerModel.cs b/src/SmiServices/Common/Messaging/ProducerModel.cs index 7ff02710e..8876655e8 100644 --- a/src/SmiServices/Common/Messaging/ProducerModel.cs +++ b/src/SmiServices/Common/Messaging/ProducerModel.cs @@ -9,197 +9,196 @@ using System.Text; using System.Threading; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +/// +/// Class to implement sending of messages to a RabbitMQ exchange. +/// +public class ProducerModel : IProducerModel { + public event ProducerFatalHandler? OnFatal; + + private readonly ILogger _logger; + + private readonly IModel _model; + private readonly IBasicProperties _messageBasicProperties; + + private readonly string _exchangeName; + + private readonly int _maxRetryAttempts; + private const int ConfirmTimeoutMs = 5000; + + // Used to stop messages being produced if we are in the process of crashing out + private readonly object _oSendLock = new(); + + private readonly IBackoffProvider? _backoffProvider; + + // TODO(rkm 2024-10-02) Make this configurable + private const int _probeCounterLimit = 1_000; + + private readonly TimeSpan _probeTimeout = TimeSpan.FromMinutes(1); + + // Start at the limit so an initial check is performed + private int _probeMessageCounter = _probeCounterLimit; + private readonly string? _probeQueueName; + private readonly int _probeQueueLimit; + /// - /// Class to implement sending of messages to a RabbitMQ exchange. - /// - public class ProducerModel : IProducerModel + /// + /// + /// + /// + /// + /// Max number of times to retry message confirmations + /// + /// + /// + /// + public ProducerModel( + string exchangeName, IModel model, + IBasicProperties properties, + int maxRetryAttempts = 1, + IBackoffProvider? backoffProvider = null, + string? probeQueueName = null, + int probeQueueLimit = 0, + TimeSpan? probeTimeout = null + ) { - public event ProducerFatalHandler? OnFatal; - - private readonly ILogger _logger; - - private readonly IModel _model; - private readonly IBasicProperties _messageBasicProperties; - - private readonly string _exchangeName; - - private readonly int _maxRetryAttempts; - private const int ConfirmTimeoutMs = 5000; - - // Used to stop messages being produced if we are in the process of crashing out - private readonly object _oSendLock = new(); - - private readonly IBackoffProvider? _backoffProvider; - - // TODO(rkm 2024-10-02) Make this configurable - private const int _probeCounterLimit = 1_000; - - private readonly TimeSpan _probeTimeout = TimeSpan.FromMinutes(1); - - // Start at the limit so an initial check is performed - private int _probeMessageCounter = _probeCounterLimit; - private readonly string? _probeQueueName; - private readonly int _probeQueueLimit; - - /// - /// - /// - /// - /// - /// - /// Max number of times to retry message confirmations - /// - /// - /// - /// - public ProducerModel( - string exchangeName, IModel model, - IBasicProperties properties, - int maxRetryAttempts = 1, - IBackoffProvider? backoffProvider = null, - string? probeQueueName = null, - int probeQueueLimit = 0, - TimeSpan? probeTimeout = null - ) - { - if (string.IsNullOrWhiteSpace(exchangeName)) - throw new ArgumentException("exchangeName parameter is invalid: \"" + exchangeName + "\""); + if (string.IsNullOrWhiteSpace(exchangeName)) + throw new ArgumentException("exchangeName parameter is invalid: \"" + exchangeName + "\""); - _exchangeName = exchangeName; + _exchangeName = exchangeName; - if (maxRetryAttempts < 0) - throw new ArgumentException("maxRetryAttempts must be greater than 0. Given: " + maxRetryAttempts); + if (maxRetryAttempts < 0) + throw new ArgumentException("maxRetryAttempts must be greater than 0. Given: " + maxRetryAttempts); - _maxRetryAttempts = maxRetryAttempts; + _maxRetryAttempts = maxRetryAttempts; - _logger = LogManager.GetLogger(GetType().Name); + _logger = LogManager.GetLogger(GetType().Name); - _model = model; - _messageBasicProperties = properties; + _model = model; + _messageBasicProperties = properties; - //TODO Understand this a bit better and investigate whether this also happens on consumer processes + //TODO Understand this a bit better and investigate whether this also happens on consumer processes - // Handle messages 'returned' by RabbitMQ - occurs when a messages published as persistent can't be routed to a queue - _model.BasicReturn += (s, a) => _logger.Warn("BasicReturn for Exchange '{0}' Routing Key '{1}' ReplyCode '{2}' ({3})", a.Exchange, a.RoutingKey, a.ReplyCode, a.ReplyText); - _model.BasicReturn += (s, a) => Fatal(a); + // Handle messages 'returned' by RabbitMQ - occurs when a messages published as persistent can't be routed to a queue + _model.BasicReturn += (s, a) => _logger.Warn("BasicReturn for Exchange '{0}' Routing Key '{1}' ReplyCode '{2}' ({3})", a.Exchange, a.RoutingKey, a.ReplyCode, a.ReplyText); + _model.BasicReturn += (s, a) => Fatal(a); - // Handle RabbitMQ putting the queue into flow control mode - _model.FlowControl += (s, a) => _logger.Warn("FlowControl for " + exchangeName); + // Handle RabbitMQ putting the queue into flow control mode + _model.FlowControl += (s, a) => _logger.Warn("FlowControl for " + exchangeName); - _backoffProvider = backoffProvider; + _backoffProvider = backoffProvider; - _probeQueueName = probeQueueName; - _probeQueueLimit = probeQueueLimit; - if (probeTimeout != null) - _probeTimeout = probeTimeout.Value; + _probeQueueName = probeQueueName; + _probeQueueLimit = probeQueueLimit; + if (probeTimeout != null) + _probeTimeout = probeTimeout.Value; - if (_probeQueueName != null) - { - var messageCount = model.MessageCount(_probeQueueName); - _logger.Debug($"Probe queue has {messageCount} message(s)"); - } + if (_probeQueueName != null) + { + var messageCount = model.MessageCount(_probeQueueName); + _logger.Debug($"Probe queue has {messageCount} message(s)"); } + } - /// - /// - /// - /// - /// - /// - /// - public virtual IMessageHeader SendMessage(IMessage message, IMessageHeader? inResponseTo = null, string? routingKey = null) - { - IMessageHeader header = SendMessageImpl(message, inResponseTo, routingKey); - WaitForConfirms(); - header.Log(_logger, LogLevel.Trace, "Sent " + header.MessageGuid + " to " + _exchangeName); + /// + /// + /// + /// + /// + /// + /// + public virtual IMessageHeader SendMessage(IMessage message, IMessageHeader? inResponseTo = null, string? routingKey = null) + { + IMessageHeader header = SendMessageImpl(message, inResponseTo, routingKey); + WaitForConfirms(); + header.Log(_logger, LogLevel.Trace, "Sent " + header.MessageGuid + " to " + _exchangeName); - return header; - } + return header; + } - public void WaitForConfirms() - { - // Attempt to get a publish confirmation from RabbitMQ, with some retry/timeout + public void WaitForConfirms() + { + // Attempt to get a publish confirmation from RabbitMQ, with some retry/timeout - var keepTrying = true; - var numAttempts = 0; + var keepTrying = true; + var numAttempts = 0; - while (keepTrying) + while (keepTrying) + { + if (_model.WaitForConfirms(TimeSpan.FromMilliseconds(ConfirmTimeoutMs), out var timedOut)) { - if (_model.WaitForConfirms(TimeSpan.FromMilliseconds(ConfirmTimeoutMs), out var timedOut)) - { - _backoffProvider?.Reset(); - return; - } - - if (timedOut) - { - keepTrying = (++numAttempts < _maxRetryAttempts); - _logger.Warn($"RabbitMQ WaitForConfirms timed out. numAttempts: {numAttempts}"); + _backoffProvider?.Reset(); + return; + } - TimeSpan? backoff = _backoffProvider?.GetNextBackoff(); - if (backoff.HasValue) - { - _logger.Warn($"Backing off for {backoff}"); - Thread.Sleep(backoff.Value); - } + if (timedOut) + { + keepTrying = (++numAttempts < _maxRetryAttempts); + _logger.Warn($"RabbitMQ WaitForConfirms timed out. numAttempts: {numAttempts}"); - continue; + TimeSpan? backoff = _backoffProvider?.GetNextBackoff(); + if (backoff.HasValue) + { + _logger.Warn($"Backing off for {backoff}"); + Thread.Sleep(backoff.Value); } - throw new ApplicationException("RabbitMQ got a Nack"); + continue; } - throw new ApplicationException("Could not confirm message published after timeout"); + throw new ApplicationException("RabbitMQ got a Nack"); } - /// - /// - /// - /// - /// - /// - /// - protected IMessageHeader SendMessageImpl(IMessage message, IMessageHeader? inResponseTo = null, string? routingKey = null) + throw new ApplicationException("Could not confirm message published after timeout"); + } + + /// + /// + /// + /// + /// + /// + /// + protected IMessageHeader SendMessageImpl(IMessage message, IMessageHeader? inResponseTo = null, string? routingKey = null) + { + lock (_oSendLock) { - lock (_oSendLock) - { - byte[] body = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(message)); + byte[] body = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(message)); - _messageBasicProperties.Timestamp = new AmqpTimestamp(MessageHeader.UnixTimeNow()); - _messageBasicProperties.Headers = new Dictionary(); + _messageBasicProperties.Timestamp = new AmqpTimestamp(MessageHeader.UnixTimeNow()); + _messageBasicProperties.Headers = new Dictionary(); - IMessageHeader header = inResponseTo != null ? new MessageHeader(inResponseTo) : new MessageHeader(); - header.Populate(_messageBasicProperties.Headers); + IMessageHeader header = inResponseTo != null ? new MessageHeader(inResponseTo) : new MessageHeader(); + header.Populate(_messageBasicProperties.Headers); - if (_probeQueueName != null && _probeMessageCounter >= _probeCounterLimit) + if (_probeQueueName != null && _probeMessageCounter >= _probeCounterLimit) + { + while (_model.MessageCount(_probeQueueName) >= _probeQueueLimit) { - while (_model.MessageCount(_probeQueueName) >= _probeQueueLimit) - { - _logger.Warn($"Probe queue ({_probeQueueName}) over message limit ({_probeCounterLimit}). Sleeping for {_probeTimeout}"); - Thread.Sleep(_probeTimeout); - } - _probeMessageCounter = 0; + _logger.Warn($"Probe queue ({_probeQueueName}) over message limit ({_probeCounterLimit}). Sleeping for {_probeTimeout}"); + Thread.Sleep(_probeTimeout); } + _probeMessageCounter = 0; + } - _model.BasicPublish(_exchangeName, routingKey ?? "", true, _messageBasicProperties, body); - ++_probeMessageCounter; + _model.BasicPublish(_exchangeName, routingKey ?? "", true, _messageBasicProperties, body); + ++_probeMessageCounter; - return header; - } + return header; } + } - private void Fatal(BasicReturnEventArgs a) + private void Fatal(BasicReturnEventArgs a) + { + lock (_oSendLock) { - lock (_oSendLock) - { - if (OnFatal != null) - OnFatal.Invoke(this, a); - else - throw new ApplicationException("No subscribers for fatal error event"); - } + if (OnFatal != null) + OnFatal.Invoke(this, a); + else + throw new ApplicationException("No subscribers for fatal error event"); } } } diff --git a/src/SmiServices/Common/Messaging/RabbitMQBroker.cs b/src/SmiServices/Common/Messaging/RabbitMQBroker.cs index e161faea4..628d6efb2 100644 --- a/src/SmiServices/Common/Messaging/RabbitMQBroker.cs +++ b/src/SmiServices/Common/Messaging/RabbitMQBroker.cs @@ -12,488 +12,487 @@ using System.Text; using System.Threading; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +/// +/// Adapter for the RabbitMQ API. +/// +public class RabbitMQBroker : IMessageBroker { /// - /// Adapter for the RabbitMQ API. + /// Used to ensure we can't create any new connections after we have called Shutdown() /// - public class RabbitMQBroker : IMessageBroker - { - /// - /// Used to ensure we can't create any new connections after we have called Shutdown() - /// - public bool ShutdownCalled { get; private set; } + public bool ShutdownCalled { get; private set; } - public bool HasConsumers + public bool HasConsumers + { + get { - get + lock (_oResourceLock) { - lock (_oResourceLock) - { - return _rabbitResources.Any(x => x.Value is ConsumerResources); - } + return _rabbitResources.Any(x => x.Value is ConsumerResources); } } + } - public const string RabbitMqRoutingKey_MatchAnything = "#"; - public const string RabbitMqRoutingKey_MatchOneWord = "*"; - - public static readonly TimeSpan DefaultOperationTimeout = TimeSpan.FromSeconds(5); - - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - - private readonly HostFatalHandler? _hostFatalHandler; - - private readonly IConnection _connection; - private readonly Dictionary _rabbitResources = []; - private readonly object _oResourceLock = new(); - private readonly object _exitLock = new(); + public const string RabbitMqRoutingKey_MatchAnything = "#"; + public const string RabbitMqRoutingKey_MatchOneWord = "*"; - private const int MinRabbitServerVersionMajor = 3; - private const int MinRabbitServerVersionMinor = 7; - private const int MinRabbitServerVersionPatch = 0; + public static readonly TimeSpan DefaultOperationTimeout = TimeSpan.FromSeconds(5); - /// - /// - /// - /// - /// Identifier for this host instance - /// - public RabbitMQBroker(RabbitOptions rabbitOptions, string hostId, HostFatalHandler? hostFatalHandler = null) - { - if (string.IsNullOrWhiteSpace(hostId)) - throw new ArgumentException("RabbitMQ host ID required", nameof(hostId)); + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - _connection = rabbitOptions.Connection; - _connection.ConnectionBlocked += (s, a) => _logger.Warn($"ConnectionBlocked (Reason: {a.Reason})"); - _connection.ConnectionUnblocked += (s, a) => _logger.Warn("ConnectionUnblocked"); + private readonly HostFatalHandler? _hostFatalHandler; - if (hostFatalHandler == null) - _logger.Warn("No handler given for fatal events"); + private readonly IConnection _connection; + private readonly Dictionary _rabbitResources = []; + private readonly object _oResourceLock = new(); + private readonly object _exitLock = new(); - _hostFatalHandler = hostFatalHandler; + private const int MinRabbitServerVersionMajor = 3; + private const int MinRabbitServerVersionMinor = 7; + private const int MinRabbitServerVersionPatch = 0; - CheckValidServerSettings(); - } + /// + /// + /// + /// + /// Identifier for this host instance + /// + public RabbitMQBroker(RabbitOptions rabbitOptions, string hostId, HostFatalHandler? hostFatalHandler = null) + { + if (string.IsNullOrWhiteSpace(hostId)) + throw new ArgumentException("RabbitMQ host ID required", nameof(hostId)); + _connection = rabbitOptions.Connection; + _connection.ConnectionBlocked += (s, a) => _logger.Warn($"ConnectionBlocked (Reason: {a.Reason})"); + _connection.ConnectionUnblocked += (s, a) => _logger.Warn("ConnectionUnblocked"); - /// - /// Setup a subscription to a queue which sends messages to the . - /// - /// The connection options. - /// Consumer that will be sent any received messages. - /// If specified, will ensure that it is the only consumer on the provided queue - /// Identifier for the consumer task, can be used to stop the consumer without shutting down the whole adapter - public Guid StartConsumer(ConsumerOptions consumerOptions, IConsumer consumer, bool isSolo = false) where T : IMessage - { - if (ShutdownCalled) - throw new ApplicationException("Adapter has been shut down"); + if (hostFatalHandler == null) + _logger.Warn("No handler given for fatal events"); - if (!consumerOptions.VerifyPopulated()) - throw new ArgumentException($"The given {nameof(consumerOptions)} has invalid values"); + _hostFatalHandler = hostFatalHandler; - var model = _connection.CreateModel(); - model.BasicQos(0, consumerOptions.QoSPrefetchCount, false); - consumer.QoSPrefetchCount = consumerOptions.QoSPrefetchCount; + CheckValidServerSettings(); + } - // Check queue exists - try - { - // Passively declare the queue (equivalent to checking the queue exists) - model.QueueDeclarePassive(consumerOptions.QueueName); - } - catch (OperationInterruptedException e) - { - model.Close(200, "StartConsumer - Queue missing"); - throw new ApplicationException($"Expected queue \"{consumerOptions.QueueName}\" to exist", e); - } - if (isSolo && model.ConsumerCount(consumerOptions.QueueName) > 0) - { - model.Close(200, "StartConsumer - Already a consumer on the queue"); - throw new ApplicationException($"Already a consumer on queue {consumerOptions.QueueName} and solo consumer was specified"); - } + /// + /// Setup a subscription to a queue which sends messages to the . + /// + /// The connection options. + /// Consumer that will be sent any received messages. + /// If specified, will ensure that it is the only consumer on the provided queue + /// Identifier for the consumer task, can be used to stop the consumer without shutting down the whole adapter + public Guid StartConsumer(ConsumerOptions consumerOptions, IConsumer consumer, bool isSolo = false) where T : IMessage + { + if (ShutdownCalled) + throw new ApplicationException("Adapter has been shut down"); - EventingBasicConsumer ebc = new(model); - ebc.Received += (o, a) => HandleMessage(o, a, consumer); + if (!consumerOptions.VerifyPopulated()) + throw new ArgumentException($"The given {nameof(consumerOptions)} has invalid values"); - void shutdown(object? o, ShutdownEventArgs a) - { - var reason = "cancellation was requested"; - if (ebc.Model.IsClosed) - reason = "channel is closed"; - if (ShutdownCalled) - reason = "shutdown was called"; - _logger.Debug($"Consumer for {consumerOptions.QueueName} exiting ({reason})"); - } - model.ModelShutdown += shutdown; - ebc.Shutdown += shutdown; + var model = _connection.CreateModel(); + model.BasicQos(0, consumerOptions.QoSPrefetchCount, false); + consumer.QoSPrefetchCount = consumerOptions.QoSPrefetchCount; - var resources = new ConsumerResources(ebc, consumerOptions.QueueName!, model); - Guid taskId = Guid.NewGuid(); + // Check queue exists + try + { + // Passively declare the queue (equivalent to checking the queue exists) + model.QueueDeclarePassive(consumerOptions.QueueName); + } + catch (OperationInterruptedException e) + { + model.Close(200, "StartConsumer - Queue missing"); + throw new ApplicationException($"Expected queue \"{consumerOptions.QueueName}\" to exist", e); + } - lock (_oResourceLock) - { - _rabbitResources.Add(taskId, resources); - } + if (isSolo && model.ConsumerCount(consumerOptions.QueueName) > 0) + { + model.Close(200, "StartConsumer - Already a consumer on the queue"); + throw new ApplicationException($"Already a consumer on queue {consumerOptions.QueueName} and solo consumer was specified"); + } - consumer.OnFatal += (s, e) => - { - resources.Dispose(); - _hostFatalHandler?.Invoke(s, e); - }; + EventingBasicConsumer ebc = new(model); + ebc.Received += (o, a) => HandleMessage(o, a, consumer); - if (consumerOptions.HoldUnprocessableMessages && !consumerOptions.AutoAck) - consumer.HoldUnprocessableMessages = true; + void shutdown(object? o, ShutdownEventArgs a) + { + var reason = "cancellation was requested"; + if (ebc.Model.IsClosed) + reason = "channel is closed"; + if (ShutdownCalled) + reason = "shutdown was called"; + _logger.Debug($"Consumer for {consumerOptions.QueueName} exiting ({reason})"); + } + model.ModelShutdown += shutdown; + ebc.Shutdown += shutdown; - consumer.OnAck += (_, a) => { ebc.Model.BasicAck(a.DeliveryTag, a.Multiple); }; - consumer.OnNack += (_, a) => { ebc.Model.BasicNack(a.DeliveryTag, a.Multiple, a.Requeue); }; + var resources = new ConsumerResources(ebc, consumerOptions.QueueName!, model); + Guid taskId = Guid.NewGuid(); - model.BasicConsume(ebc, consumerOptions.QueueName, consumerOptions.AutoAck); - _logger.Debug($"Consumer task started [QueueName={consumerOptions?.QueueName}]"); - return taskId; + lock (_oResourceLock) + { + _rabbitResources.Add(taskId, resources); } - private void HandleMessage(object? sender, BasicDeliverEventArgs deliverArgs, IConsumer consumer) where T : IMessage + consumer.OnFatal += (s, e) => { - var model = ((EventingBasicConsumer)sender!).Model; + resources.Dispose(); + _hostFatalHandler?.Invoke(s, e); + }; - Encoding enc = Encoding.UTF8; - MessageHeader header; + if (consumerOptions.HoldUnprocessableMessages && !consumerOptions.AutoAck) + consumer.HoldUnprocessableMessages = true; - try - { - if (deliverArgs.BasicProperties?.ContentEncoding != null) - enc = Encoding.GetEncoding(deliverArgs.BasicProperties.ContentEncoding); + consumer.OnAck += (_, a) => { ebc.Model.BasicAck(a.DeliveryTag, a.Multiple); }; + consumer.OnNack += (_, a) => { ebc.Model.BasicNack(a.DeliveryTag, a.Multiple, a.Requeue); }; - var headers = deliverArgs.BasicProperties?.Headers - ?? throw new ArgumentNullException(nameof(deliverArgs), "A part of deliverArgs.BasicProperties.Headers was null"); + model.BasicConsume(ebc, consumerOptions.QueueName, consumerOptions.AutoAck); + _logger.Debug($"Consumer task started [QueueName={consumerOptions?.QueueName}]"); + return taskId; + } - header = MessageHeader.FromDict(headers, enc); - header.Log(_logger, LogLevel.Trace, "Received"); - } - catch (Exception e) - { - _logger.Error("Message header content was null, or could not be parsed into a MessageHeader object: " + e); - model.BasicNack(deliverArgs.DeliveryTag, multiple: false, requeue: false); - return; - } + private void HandleMessage(object? sender, BasicDeliverEventArgs deliverArgs, IConsumer consumer) where T : IMessage + { + var model = ((EventingBasicConsumer)sender!).Model; - T message; + Encoding enc = Encoding.UTF8; + MessageHeader header; - try - { - message = JsonConvert.DeserializeObject(deliverArgs); - } - catch (Newtonsoft.Json.JsonSerializationException e) - { - // Deserialization exception - Can never process this message + try + { + if (deliverArgs.BasicProperties?.ContentEncoding != null) + enc = Encoding.GetEncoding(deliverArgs.BasicProperties.ContentEncoding); - _logger.Debug($"JsonSerializationException, doing ErrorAndNack for message (DeliveryTag {deliverArgs.DeliveryTag})"); - var errorMessage = $"Could not deserialize message to {typeof(T).Name} object. Likely an issue with the message content"; - header.Log(_logger, LogLevel.Error, errorMessage, e); - model.BasicNack(deliverArgs.DeliveryTag, multiple: false, requeue: false); - return; - } + var headers = deliverArgs.BasicProperties?.Headers + ?? throw new ArgumentNullException(nameof(deliverArgs), "A part of deliverArgs.BasicProperties.Headers was null"); - consumer.ProcessMessage(header, message, deliverArgs.DeliveryTag); + header = MessageHeader.FromDict(headers, enc); + header.Log(_logger, LogLevel.Trace, "Received"); + } + catch (Exception e) + { + _logger.Error("Message header content was null, or could not be parsed into a MessageHeader object: " + e); + model.BasicNack(deliverArgs.DeliveryTag, multiple: false, requeue: false); + return; } - public void StartControlConsumer(IControlMessageConsumer controlMessageConsumer) + T message; + + try { - var consumerOptions = controlMessageConsumer.ControlConsumerOptions; + message = JsonConvert.DeserializeObject(deliverArgs); + } + catch (Newtonsoft.Json.JsonSerializationException e) + { + // Deserialization exception - Can never process this message - if (ShutdownCalled) - throw new ApplicationException("Adapter has been shut down"); + _logger.Debug($"JsonSerializationException, doing ErrorAndNack for message (DeliveryTag {deliverArgs.DeliveryTag})"); + var errorMessage = $"Could not deserialize message to {typeof(T).Name} object. Likely an issue with the message content"; + header.Log(_logger, LogLevel.Error, errorMessage, e); + model.BasicNack(deliverArgs.DeliveryTag, multiple: false, requeue: false); + return; + } - if (!consumerOptions.VerifyPopulated()) - throw new ArgumentException($"The given {nameof(controlMessageConsumer)} has invalid values"); + consumer.ProcessMessage(header, message, deliverArgs.DeliveryTag); + } - var model = _connection.CreateModel(); - model.BasicQos(0, consumerOptions.QoSPrefetchCount, false); + public void StartControlConsumer(IControlMessageConsumer controlMessageConsumer) + { + var consumerOptions = controlMessageConsumer.ControlConsumerOptions; - EventingBasicConsumer ebc = new(model); - ebc.Received += (o, a) => HandleControlMessage(o, a, controlMessageConsumer); + if (ShutdownCalled) + throw new ApplicationException("Adapter has been shut down"); - void shutdown(object? o, ShutdownEventArgs a) - { - var reason = "cancellation was requested"; - if (ebc.Model.IsClosed) - reason = "channel is closed"; - if (ShutdownCalled) - reason = "shutdown was called"; - _logger.Debug($"Consumer for {consumerOptions.QueueName} exiting ({reason})"); - } - model.ModelShutdown += shutdown; - ebc.Shutdown += shutdown; + if (!consumerOptions.VerifyPopulated()) + throw new ArgumentException($"The given {nameof(controlMessageConsumer)} has invalid values"); - var resources = new ConsumerResources(ebc, consumerOptions.QueueName!, model); - lock (_oResourceLock) - _rabbitResources.Add(Guid.NewGuid(), resources); + var model = _connection.CreateModel(); + model.BasicQos(0, consumerOptions.QoSPrefetchCount, false); - model.BasicConsume(ebc, consumerOptions.QueueName, consumerOptions.AutoAck); - _logger.Debug($"Consumer task started [QueueName={consumerOptions?.QueueName}]"); - } + EventingBasicConsumer ebc = new(model); + ebc.Received += (o, a) => HandleControlMessage(o, a, controlMessageConsumer); - private static void HandleControlMessage(object? _, BasicDeliverEventArgs deliverArgs, IControlMessageConsumer controlMessageConsumer) + void shutdown(object? o, ShutdownEventArgs a) { - if (deliverArgs.Body.Length == 0) - return; + var reason = "cancellation was requested"; + if (ebc.Model.IsClosed) + reason = "channel is closed"; + if (ShutdownCalled) + reason = "shutdown was called"; + _logger.Debug($"Consumer for {consumerOptions.QueueName} exiting ({reason})"); + } + model.ModelShutdown += shutdown; + ebc.Shutdown += shutdown; - Encoding? enc = null; + var resources = new ConsumerResources(ebc, consumerOptions.QueueName!, model); + lock (_oResourceLock) + _rabbitResources.Add(Guid.NewGuid(), resources); - if (!string.IsNullOrWhiteSpace(deliverArgs.BasicProperties.ContentEncoding)) - { - try - { - enc = Encoding.GetEncoding(deliverArgs.BasicProperties.ContentEncoding); - } - catch (ArgumentException) - { - /* Ignored */ - } - } + model.BasicConsume(ebc, consumerOptions.QueueName, consumerOptions.AutoAck); + _logger.Debug($"Consumer task started [QueueName={consumerOptions?.QueueName}]"); + } - enc ??= Encoding.UTF8; - var body = enc.GetString(deliverArgs.Body.Span); + private static void HandleControlMessage(object? _, BasicDeliverEventArgs deliverArgs, IControlMessageConsumer controlMessageConsumer) + { + if (deliverArgs.Body.Length == 0) + return; - controlMessageConsumer.ProcessMessage(body, deliverArgs.RoutingKey); - } + Encoding? enc = null; - /// - /// - /// - /// - /// - public void StopConsumer(Guid taskId, TimeSpan timeout) + if (!string.IsNullOrWhiteSpace(deliverArgs.BasicProperties.ContentEncoding)) { - if (ShutdownCalled) - return; - - lock (_oResourceLock) + try { - if (!_rabbitResources.TryGetValue(taskId, out RabbitResources? value)) - throw new ApplicationException("Guid was not found in the task register"); - value.Dispose(); - _rabbitResources.Remove(taskId); + enc = Encoding.GetEncoding(deliverArgs.BasicProperties.ContentEncoding); + } + catch (ArgumentException) + { + /* Ignored */ } } - /// - /// Setup a to send messages with. - /// - /// The producer options class to setup which must include the exchange name. - /// - /// Object which can send messages to a RabbitMQ exchange. - public IProducerModel SetupProducer(ProducerOptions producerOptions, bool isBatch = false) + enc ??= Encoding.UTF8; + var body = enc.GetString(deliverArgs.Body.Span); + + controlMessageConsumer.ProcessMessage(body, deliverArgs.RoutingKey); + } + + /// + /// + /// + /// + /// + public void StopConsumer(Guid taskId, TimeSpan timeout) + { + if (ShutdownCalled) + return; + + lock (_oResourceLock) { - if (ShutdownCalled) - throw new ApplicationException("Adapter has been shut down"); + if (!_rabbitResources.TryGetValue(taskId, out RabbitResources? value)) + throw new ApplicationException("Guid was not found in the task register"); + value.Dispose(); + _rabbitResources.Remove(taskId); + } + } - if (!producerOptions.VerifyPopulated()) - throw new ArgumentException("The given producer options have invalid values"); + /// + /// Setup a to send messages with. + /// + /// The producer options class to setup which must include the exchange name. + /// + /// Object which can send messages to a RabbitMQ exchange. + public IProducerModel SetupProducer(ProducerOptions producerOptions, bool isBatch = false) + { + if (ShutdownCalled) + throw new ApplicationException("Adapter has been shut down"); - //NOTE: IModel objects are /not/ thread safe - var model = _connection.CreateModel(); - model.ConfirmSelect(); + if (!producerOptions.VerifyPopulated()) + throw new ArgumentException("The given producer options have invalid values"); - try - { - // Passively declare the exchange (equivalent to checking the exchange exists) - model.ExchangeDeclarePassive(producerOptions.ExchangeName); - } - catch (OperationInterruptedException e) - { - model.Close(200, "SetupProducer - Exchange missing"); - throw new ApplicationException($"Expected exchange \"{producerOptions.ExchangeName}\" to exist", e); - } + //NOTE: IModel objects are /not/ thread safe + var model = _connection.CreateModel(); + model.ConfirmSelect(); - var props = model.CreateBasicProperties(); - props.ContentEncoding = "UTF-8"; - props.ContentType = "application/json"; - props.Persistent = true; + try + { + // Passively declare the exchange (equivalent to checking the exchange exists) + model.ExchangeDeclarePassive(producerOptions.ExchangeName); + } + catch (OperationInterruptedException e) + { + model.Close(200, "SetupProducer - Exchange missing"); + throw new ApplicationException($"Expected exchange \"{producerOptions.ExchangeName}\" to exist", e); + } - IBackoffProvider? backoffProvider = null; - if (producerOptions.BackoffProviderType != null) - { - try - { - backoffProvider = BackoffProviderFactory.Create(producerOptions.BackoffProviderType); - } - catch (Exception) - { - model.Close(200, "SetupProducer - Couldn't create BackoffProvider"); - throw; - } - } + var props = model.CreateBasicProperties(); + props.ContentEncoding = "UTF-8"; + props.ContentType = "application/json"; + props.Persistent = true; - IProducerModel producerModel; + IBackoffProvider? backoffProvider = null; + if (producerOptions.BackoffProviderType != null) + { try { - producerModel = isBatch ? - new BatchProducerModel(producerOptions.ExchangeName!, model, props, producerOptions.MaxConfirmAttempts, backoffProvider, producerOptions.ProbeQueueName, producerOptions.ProbeQueueLimit, producerOptions.ProbeTimeout) : - new ProducerModel(producerOptions.ExchangeName!, model, props, producerOptions.MaxConfirmAttempts, backoffProvider, producerOptions.ProbeQueueName, producerOptions.ProbeQueueLimit, producerOptions.ProbeTimeout); + backoffProvider = BackoffProviderFactory.Create(producerOptions.BackoffProviderType); } catch (Exception) { - model.Close(200, "SetupProducer - Couldn't create ProducerModel"); + model.Close(200, "SetupProducer - Couldn't create BackoffProvider"); throw; } + } - var resources = new ProducerResources(model, producerModel); - lock (_oResourceLock) - { - _rabbitResources.Add(Guid.NewGuid(), resources); - } - - producerModel.OnFatal += (s, ra) => - { - resources.Dispose(); - _hostFatalHandler?.Invoke(s, new FatalErrorEventArgs(ra)); - }; + IProducerModel producerModel; + try + { + producerModel = isBatch ? + new BatchProducerModel(producerOptions.ExchangeName!, model, props, producerOptions.MaxConfirmAttempts, backoffProvider, producerOptions.ProbeQueueName, producerOptions.ProbeQueueLimit, producerOptions.ProbeTimeout) : + new ProducerModel(producerOptions.ExchangeName!, model, props, producerOptions.MaxConfirmAttempts, backoffProvider, producerOptions.ProbeQueueName, producerOptions.ProbeQueueLimit, producerOptions.ProbeTimeout); + } + catch (Exception) + { + model.Close(200, "SetupProducer - Couldn't create ProducerModel"); + throw; + } - return producerModel; + var resources = new ProducerResources(model, producerModel); + lock (_oResourceLock) + { + _rabbitResources.Add(Guid.NewGuid(), resources); } - /// - /// Get a blank model with no options set - /// - /// - /// - public IModel GetModel(string connectionName) + producerModel.OnFatal += (s, ra) => { - //TODO This method has no callback available for fatal errors + resources.Dispose(); + _hostFatalHandler?.Invoke(s, new FatalErrorEventArgs(ra)); + }; - if (ShutdownCalled) - throw new ApplicationException("Adapter has been shut down"); + return producerModel; + } - var model = _connection.CreateModel(); + /// + /// Get a blank model with no options set + /// + /// + /// + public IModel GetModel(string connectionName) + { + //TODO This method has no callback available for fatal errors - lock (_oResourceLock) - { - _rabbitResources.Add(Guid.NewGuid(), new RabbitResources(model)); - } + if (ShutdownCalled) + throw new ApplicationException("Adapter has been shut down"); - return model; - } + var model = _connection.CreateModel(); - /// - /// Close all open connections and stop any consumers - /// - /// Max time given for each consumer to exit - public void Shutdown(TimeSpan timeout) + lock (_oResourceLock) { - if (ShutdownCalled) - return; - if (timeout.Equals(TimeSpan.Zero)) - throw new ApplicationException($"Invalid {nameof(timeout)} value"); + _rabbitResources.Add(Guid.NewGuid(), new RabbitResources(model)); + } - ShutdownCalled = true; + return model; + } - lock (_oResourceLock) - { - foreach (var res in _rabbitResources.Values) - { - res.Dispose(); - } - _rabbitResources.Clear(); - } - lock (_exitLock) - Monitor.PulseAll(_exitLock); - } + /// + /// Close all open connections and stop any consumers + /// + /// Max time given for each consumer to exit + public void Shutdown(TimeSpan timeout) + { + if (ShutdownCalled) + return; + if (timeout.Equals(TimeSpan.Zero)) + throw new ApplicationException($"Invalid {nameof(timeout)} value"); - /// - /// Checks that the minimum RabbitMQ server version is met - /// - private void CheckValidServerSettings() + ShutdownCalled = true; + + lock (_oResourceLock) { - if (!_connection.ServerProperties.TryGetValue("version", out object? value)) - throw new ApplicationException("Could not get RabbitMQ server version"); - - var version = Encoding.UTF8.GetString((byte[])value); - var split = version.Split('.'); - - if (int.Parse(split[0]) < MinRabbitServerVersionMajor || - (int.Parse(split[0]) == MinRabbitServerVersionMajor && - int.Parse(split[1]) < MinRabbitServerVersionMinor) || - (int.Parse(split[0]) == MinRabbitServerVersionMajor && - int.Parse(split[1]) == MinRabbitServerVersionMinor && - int.Parse(split[2]) < MinRabbitServerVersionPatch)) + foreach (var res in _rabbitResources.Values) { - throw new ApplicationException( - $"Connected to RabbitMQ server version {version}, but minimum required is {MinRabbitServerVersionMajor}.{MinRabbitServerVersionMinor}.{MinRabbitServerVersionPatch}"); + res.Dispose(); } + _rabbitResources.Clear(); + } + lock (_exitLock) + Monitor.PulseAll(_exitLock); + } - _logger.Debug($"Connected to RabbitMQ server version {version}"); + /// + /// Checks that the minimum RabbitMQ server version is met + /// + private void CheckValidServerSettings() + { + if (!_connection.ServerProperties.TryGetValue("version", out object? value)) + throw new ApplicationException("Could not get RabbitMQ server version"); + + var version = Encoding.UTF8.GetString((byte[])value); + var split = version.Split('.'); + + if (int.Parse(split[0]) < MinRabbitServerVersionMajor || + (int.Parse(split[0]) == MinRabbitServerVersionMajor && + int.Parse(split[1]) < MinRabbitServerVersionMinor) || + (int.Parse(split[0]) == MinRabbitServerVersionMajor && + int.Parse(split[1]) == MinRabbitServerVersionMinor && + int.Parse(split[2]) < MinRabbitServerVersionPatch)) + { + throw new ApplicationException( + $"Connected to RabbitMQ server version {version}, but minimum required is {MinRabbitServerVersionMajor}.{MinRabbitServerVersionMinor}.{MinRabbitServerVersionPatch}"); } - #region Resource Classes + _logger.Debug($"Connected to RabbitMQ server version {version}"); + } - private class RabbitResources : IDisposable - { - public IModel Model { get; } + #region Resource Classes - protected readonly object OResourceLock = new(); + private class RabbitResources : IDisposable + { + public IModel Model { get; } - protected readonly ILogger Logger; + protected readonly object OResourceLock = new(); - public RabbitResources(IModel model) - { - Logger = LogManager.GetLogger(GetType().Name); - Model = model; - } + protected readonly ILogger Logger; - public virtual void Dispose() - { - if (Model.IsOpen) - Model.Close(); - } + public RabbitResources(IModel model) + { + Logger = LogManager.GetLogger(GetType().Name); + Model = model; } - private class ProducerResources : RabbitResources + public virtual void Dispose() { - public IProducerModel? ProducerModel { get; set; } - - public ProducerResources(IModel model, IProducerModel ipm) : base(model) - { - ProducerModel = ipm; - } + if (Model.IsOpen) + Model.Close(); } + } + + private class ProducerResources : RabbitResources + { + public IProducerModel? ProducerModel { get; set; } - private class ConsumerResources : RabbitResources + public ProducerResources(IModel model, IProducerModel ipm) : base(model) { - internal readonly EventingBasicConsumer ebc; - internal readonly string QueueName; + ProducerModel = ipm; + } + } - public override void Dispose() - { - foreach (var tag in ebc.ConsumerTags) - { - Model.BasicCancel(tag); - } - if (!Model.IsOpen) - return; - Model.Close(); - Model.Dispose(); - } + private class ConsumerResources : RabbitResources + { + internal readonly EventingBasicConsumer ebc; + internal readonly string QueueName; - internal ConsumerResources(EventingBasicConsumer ebc, string q, IModel model) : base(model) + public override void Dispose() + { + foreach (var tag in ebc.ConsumerTags) { - this.ebc = ebc; - this.QueueName = q; + Model.BasicCancel(tag); } + if (!Model.IsOpen) + return; + Model.Close(); + Model.Dispose(); } - #endregion + internal ConsumerResources(EventingBasicConsumer ebc, string q, IModel model) : base(model) + { + this.ebc = ebc; + this.QueueName = q; + } + } - public void Wait() + #endregion + + public void Wait() + { + lock (_exitLock) { - lock (_exitLock) + while (!ShutdownCalled) { - while (!ShutdownCalled) - { - Monitor.Wait(_exitLock); - } + Monitor.Wait(_exitLock); } } } diff --git a/src/SmiServices/Common/Messaging/StaticBackoffProvider.cs b/src/SmiServices/Common/Messaging/StaticBackoffProvider.cs index 05b96c96c..0ab633c2a 100644 --- a/src/SmiServices/Common/Messaging/StaticBackoffProvider.cs +++ b/src/SmiServices/Common/Messaging/StaticBackoffProvider.cs @@ -1,18 +1,17 @@ using System; -namespace SmiServices.Common.Messaging +namespace SmiServices.Common.Messaging; + +public class StaticBackoffProvider : IBackoffProvider { - public class StaticBackoffProvider : IBackoffProvider - { - private readonly TimeSpan _initialBackoff; + private readonly TimeSpan _initialBackoff; - public StaticBackoffProvider(TimeSpan? initialBackoff = null) - { - _initialBackoff = initialBackoff ?? new TimeSpan(hours: 0, minutes: 1, seconds: 0); - } + public StaticBackoffProvider(TimeSpan? initialBackoff = null) + { + _initialBackoff = initialBackoff ?? new TimeSpan(hours: 0, minutes: 1, seconds: 0); + } - public TimeSpan GetNextBackoff() => _initialBackoff; + public TimeSpan GetNextBackoff() => _initialBackoff; - public void Reset() { } - } + public void Reset() { } } diff --git a/src/SmiServices/Common/MongoDB/MongoClientHelpers.cs b/src/SmiServices/Common/MongoDB/MongoClientHelpers.cs index 997156767..d083e687d 100644 --- a/src/SmiServices/Common/MongoDB/MongoClientHelpers.cs +++ b/src/SmiServices/Common/MongoDB/MongoClientHelpers.cs @@ -6,89 +6,88 @@ using System; using System.Linq; -namespace SmiServices.Common.MongoDB +namespace SmiServices.Common.MongoDB; + +public static class MongoClientHelpers { - public static class MongoClientHelpers - { - private const string AuthDatabase = "admin"; // Always authenticate against the admin database + private const string AuthDatabase = "admin"; // Always authenticate against the admin database - private static readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private static readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private static readonly ListCollectionNamesOptions _listOptions = new(); + private static readonly ListCollectionNamesOptions _listOptions = new(); - /// - /// Creates a from the given options, and checks that the user has the "readWrite" role for the given database - /// - /// - /// - /// - /// - /// - public static MongoClient GetMongoClient(MongoDbOptions options, string applicationName, bool skipAuthentication = false, bool skipJournal = false) - { - if (!options.AreValid(skipAuthentication)) - throw new ApplicationException($"Invalid MongoDB options: {options}"); - - var settings = new MongoClientSettings - { - ApplicationName = applicationName, - Server = new MongoServerAddress(options.HostName, options.Port), - WriteConcern = new WriteConcern(journal: !skipJournal) - }; + /// + /// Creates a from the given options, and checks that the user has the "readWrite" role for the given database + /// + /// + /// + /// + /// + /// + public static MongoClient GetMongoClient(MongoDbOptions options, string applicationName, bool skipAuthentication = false, bool skipJournal = false) + { + if (!options.AreValid(skipAuthentication)) + throw new ApplicationException($"Invalid MongoDB options: {options}"); - if (skipAuthentication || options.UserName == string.Empty) - return new MongoClient(settings); + var settings = new MongoClientSettings + { + ApplicationName = applicationName, + Server = new MongoServerAddress(options.HostName, options.Port), + WriteConcern = new WriteConcern(journal: !skipJournal) + }; - if (string.IsNullOrWhiteSpace(options.Password)) - throw new ApplicationException($"MongoDB password must be set"); + if (skipAuthentication || options.UserName == string.Empty) + return new MongoClient(settings); - settings.Credential = MongoCredential.CreateCredential(AuthDatabase, options.UserName, options.Password); - var client = new MongoClient(settings); + if (string.IsNullOrWhiteSpace(options.Password)) + throw new ApplicationException($"MongoDB password must be set"); - try - { - IMongoDatabase db = client.GetDatabase(AuthDatabase); - var queryResult = db.RunCommand(new BsonDocument("usersInfo", options.UserName)); + settings.Credential = MongoCredential.CreateCredential(AuthDatabase, options.UserName, options.Password); + var client = new MongoClient(settings); - if (!(queryResult["ok"] == 1)) - throw new ApplicationException($"Could not check authentication for user \"{options.UserName}\""); + try + { + IMongoDatabase db = client.GetDatabase(AuthDatabase); + var queryResult = db.RunCommand(new BsonDocument("usersInfo", options.UserName)); - var roles = (BsonArray)queryResult[0][0]["roles"]; + if (!(queryResult["ok"] == 1)) + throw new ApplicationException($"Could not check authentication for user \"{options.UserName}\""); - var hasReadWrite = false; - foreach (BsonDocument role in roles.Select(x => x.AsBsonDocument)) - if (role["db"].AsString == options.DatabaseName && role["role"].AsString == "readWrite") - hasReadWrite = true; + var roles = (BsonArray)queryResult[0][0]["roles"]; - if (!hasReadWrite) - throw new ApplicationException($"User \"{options.UserName}\" does not have readWrite permissions on database \"{options.DatabaseName}\""); + var hasReadWrite = false; + foreach (BsonDocument role in roles.Select(x => x.AsBsonDocument)) + if (role["db"].AsString == options.DatabaseName && role["role"].AsString == "readWrite") + hasReadWrite = true; - _logger.Debug($"User \"{options.UserName}\" successfully authenticated to MongoDB database \"{options.DatabaseName}\""); - } - catch (MongoAuthenticationException e) - { - throw new ApplicationException($"Could not verify authentication for user \"{options.UserName}\" on database \"{options.DatabaseName}\"", e); - } + if (!hasReadWrite) + throw new ApplicationException($"User \"{options.UserName}\" does not have readWrite permissions on database \"{options.DatabaseName}\""); - return client; + _logger.Debug($"User \"{options.UserName}\" successfully authenticated to MongoDB database \"{options.DatabaseName}\""); } - - public static IMongoDatabase TryGetDatabase(this MongoClient client, string dbName) + catch (MongoAuthenticationException e) { - if (!client.ListDatabaseNames().ToList().Contains(dbName)) - throw new MongoException("Database \'" + dbName + "\' does not exist on the server"); - - return client.GetDatabase(dbName); + throw new ApplicationException($"Could not verify authentication for user \"{options.UserName}\" on database \"{options.DatabaseName}\"", e); } - public static IMongoCollection TryGetCollection(this IMongoDatabase database, string collectionName) - { - _listOptions.Filter = new BsonDocument("name", collectionName); + return client; + } - if (!database.ListCollectionNames(_listOptions).Any()) - throw new MongoException("Collection \'" + collectionName + "\' does not exist in database " + database.DatabaseNamespace); + public static IMongoDatabase TryGetDatabase(this MongoClient client, string dbName) + { + if (!client.ListDatabaseNames().ToList().Contains(dbName)) + throw new MongoException("Database \'" + dbName + "\' does not exist on the server"); - return database.GetCollection(collectionName); - } + return client.GetDatabase(dbName); + } + + public static IMongoCollection TryGetCollection(this IMongoDatabase database, string collectionName) + { + _listOptions.Filter = new BsonDocument("name", collectionName); + + if (!database.ListCollectionNames(_listOptions).Any()) + throw new MongoException("Collection \'" + collectionName + "\' does not exist in database " + database.DatabaseNamespace); + + return database.GetCollection(collectionName); } } diff --git a/src/SmiServices/Common/MongoDB/MongoDocumentHeaders.cs b/src/SmiServices/Common/MongoDB/MongoDocumentHeaders.cs index f08a31dc6..eb5857f9b 100644 --- a/src/SmiServices/Common/MongoDB/MongoDocumentHeaders.cs +++ b/src/SmiServices/Common/MongoDB/MongoDocumentHeaders.cs @@ -4,57 +4,56 @@ using System; -namespace SmiServices.Common.MongoDB +namespace SmiServices.Common.MongoDB; + +public static class MongoDocumentHeaders { - public static class MongoDocumentHeaders + /// + /// Generate a header for an image document + /// + /// + /// + /// + public static BsonDocument ImageDocumentHeader(DicomFileMessage message, IMessageHeader header) { - /// - /// Generate a header for an image document - /// - /// - /// - /// - public static BsonDocument ImageDocumentHeader(DicomFileMessage message, IMessageHeader header) + return new BsonDocument { - return new BsonDocument - { - { "DicomFilePath", message.DicomFilePath }, - { "DicomFileSize", message.DicomFileSize }, - { "MessageHeader", new BsonDocument - { - { "MessageGuid", header.MessageGuid.ToString() }, - { "ProducerProcessID", header.ProducerProcessID }, - { "ProducerExecutableName", header.ProducerExecutableName }, - { "Parents", string.Join(MessageHeader.Splitter, header.Parents) }, - { "OriginalPublishTimestamp", header.OriginalPublishTimestamp } - }} - }; - } + { "DicomFilePath", message.DicomFilePath }, + { "DicomFileSize", message.DicomFileSize }, + { "MessageHeader", new BsonDocument + { + { "MessageGuid", header.MessageGuid.ToString() }, + { "ProducerProcessID", header.ProducerProcessID }, + { "ProducerExecutableName", header.ProducerExecutableName }, + { "Parents", string.Join(MessageHeader.Splitter, header.Parents) }, + { "OriginalPublishTimestamp", header.OriginalPublishTimestamp } + }} + }; + } - /// - /// Generate a header for a series document - /// - /// - /// - public static BsonDocument SeriesDocumentHeader(SeriesMessage message) + /// + /// Generate a header for a series document + /// + /// + /// + public static BsonDocument SeriesDocumentHeader(SeriesMessage message) + { + return new BsonDocument { - return new BsonDocument - { - { "DirectoryPath", message.DirectoryPath }, - { "ImagesInSeries", message.ImagesInSeries } - }; - } + { "DirectoryPath", message.DirectoryPath }, + { "ImagesInSeries", message.ImagesInSeries } + }; + } - public static IMessageHeader RebuildMessageHeader(BsonDocument bsonDoc) + public static IMessageHeader RebuildMessageHeader(BsonDocument bsonDoc) + { + return new MessageHeader { - return new MessageHeader - { - MessageGuid = Guid.Parse(bsonDoc["MessageGuid"].AsString), - ProducerProcessID = bsonDoc["ProducerProcessID"].AsInt32, - ProducerExecutableName = bsonDoc["ProducerExecutableName"].AsString, - Parents = MessageHeader.GetGuidArray(bsonDoc["Parents"].AsString), - OriginalPublishTimestamp = bsonDoc["OriginalPublishTimestamp"].AsInt64 - }; - } + MessageGuid = Guid.Parse(bsonDoc["MessageGuid"].AsString), + ProducerProcessID = bsonDoc["ProducerProcessID"].AsInt32, + ProducerExecutableName = bsonDoc["ProducerExecutableName"].AsString, + Parents = MessageHeader.GetGuidArray(bsonDoc["Parents"].AsString), + OriginalPublishTimestamp = bsonDoc["OriginalPublishTimestamp"].AsInt64 + }; } } diff --git a/src/SmiServices/Common/MongoDB/MongoModalityGroups.cs b/src/SmiServices/Common/MongoDB/MongoModalityGroups.cs index 08f22addc..db1da9834 100644 --- a/src/SmiServices/Common/MongoDB/MongoModalityGroups.cs +++ b/src/SmiServices/Common/MongoDB/MongoModalityGroups.cs @@ -5,55 +5,54 @@ using System.Linq; -namespace SmiServices.Common.MongoDB +namespace SmiServices.Common.MongoDB; + +public static class MongoModalityGroups { - public static class MongoModalityGroups + /// + /// Modalities which are grouped into their own collections in MongoDB + /// + public static readonly string[] MajorModalities = + [ + "CR", + "CT", + "DX", + "MG", + "MR", + "NM", + "OT", + "PR", + "PT", + "RF", + "SR", + "US", + "XA", + ]; + + /// + /// Splits a collection of Dicom Bson documents into groups determined by their Modality. + /// Groups are defined by the array. + /// + /// + /// + public static IEnumerable>> GetModalityChunks(IEnumerable toProcess) { - /// - /// Modalities which are grouped into their own collections in MongoDB - /// - public static readonly string[] MajorModalities = - [ - "CR", - "CT", - "DX", - "MG", - "MR", - "NM", - "OT", - "PR", - "PT", - "RF", - "SR", - "US", - "XA", - ]; - - /// - /// Splits a collection of Dicom Bson documents into groups determined by their Modality. - /// Groups are defined by the array. - /// - /// - /// - public static IEnumerable>> GetModalityChunks(IEnumerable toProcess) - { - ILookup areInvalid = toProcess.ToLookup(x => !x.Contains("Modality") || x["Modality"].IsBsonNull); - - // Pull out nulls first - List others = areInvalid[true].ToList(); + ILookup areInvalid = toProcess.ToLookup(x => !x.Contains("Modality") || x["Modality"].IsBsonNull); - foreach (IGrouping grouping in areInvalid[false].GroupBy(x => x["Modality"].AsString)) - { - List groupDocs = [.. grouping]; + // Pull out nulls first + List others = areInvalid[true].ToList(); - if (MajorModalities.Contains(grouping.Key)) - yield return new Tuple>(grouping.Key, groupDocs); - else - others.AddRange(groupDocs); - } + foreach (IGrouping grouping in areInvalid[false].GroupBy(x => x["Modality"].AsString)) + { + List groupDocs = [.. grouping]; - if (others.Count > 0) - yield return new Tuple>("OTHER", others); + if (MajorModalities.Contains(grouping.Key)) + yield return new Tuple>(grouping.Key, groupDocs); + else + others.AddRange(groupDocs); } + + if (others.Count > 0) + yield return new Tuple>("OTHER", others); } } diff --git a/src/SmiServices/Common/MongoDB/MongoQueryParser.cs b/src/SmiServices/Common/MongoDB/MongoQueryParser.cs index 3b425e5fc..1bc991d65 100644 --- a/src/SmiServices/Common/MongoDB/MongoQueryParser.cs +++ b/src/SmiServices/Common/MongoDB/MongoQueryParser.cs @@ -7,98 +7,97 @@ using System.Diagnostics.CodeAnalysis; using System.Threading.Tasks; -namespace SmiServices.Common.MongoDB +namespace SmiServices.Common.MongoDB; + +public static class MongoQueryParser { - public static class MongoQueryParser + private static readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + + //TODO(RKM 05/07) Make Modality a required property of the query + //TODO(Ruairidh): Refactor out the IMongoCollection object + public static async Task> GetCursor(IMongoCollection coll, FindOptions findOptions, string? jsonQuery) { - private static readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + if (string.IsNullOrWhiteSpace(jsonQuery)) + { + _logger.Info("No query specified, fetching all records in collection"); + return await coll.FindAsync(FilterDefinition.Empty, findOptions); + } + + BsonDocument docQuery; - //TODO(RKM 05/07) Make Modality a required property of the query - //TODO(Ruairidh): Refactor out the IMongoCollection object - public static async Task> GetCursor(IMongoCollection coll, FindOptions findOptions, string? jsonQuery) + try { - if (string.IsNullOrWhiteSpace(jsonQuery)) - { - _logger.Info("No query specified, fetching all records in collection"); - return await coll.FindAsync(FilterDefinition.Empty, findOptions); - } + docQuery = BsonSerializer.Deserialize(jsonQuery); + _logger.Info("Deserialized BsonDocument from string: " + docQuery); + } + catch (FormatException e) + { + throw new ApplicationException("Could not deserialize the string into a json object", e); + } - BsonDocument docQuery; + // Required - try - { - docQuery = BsonSerializer.Deserialize(jsonQuery); - _logger.Info("Deserialized BsonDocument from string: " + docQuery); - } - catch (FormatException e) - { - throw new ApplicationException("Could not deserialize the string into a json object", e); - } + if (!TryParseDocumentProperty(docQuery, "find", out BsonDocument? find)) + throw new ApplicationException("Parsed document did not contain a \"find\" node"); - // Required + // Optional - if (!TryParseDocumentProperty(docQuery, "find", out BsonDocument? find)) - throw new ApplicationException("Parsed document did not contain a \"find\" node"); + if (TryParseDocumentProperty(docQuery, "sort", out BsonDocument? sort)) + findOptions.Sort = sort; - // Optional + if (TryParseIntProperty(docQuery, "limit", out int limit)) + findOptions.Limit = limit; - if (TryParseDocumentProperty(docQuery, "sort", out BsonDocument? sort)) - findOptions.Sort = sort; + if (TryParseIntProperty(docQuery, "skip", out int skip)) + findOptions.Skip = skip; - if (TryParseIntProperty(docQuery, "limit", out int limit)) - findOptions.Limit = limit; - if (TryParseIntProperty(docQuery, "skip", out int skip)) - findOptions.Skip = skip; + return await coll.FindAsync(find, findOptions); + } + private static bool TryParseDocumentProperty(BsonDocument docQuery, string propertyName, [NotNullWhen(true)] out BsonDocument? propertyDocument) + { + if (docQuery.TryGetValue(propertyName, out BsonValue value)) + try + { + propertyDocument = value.AsBsonDocument; + _logger.Info("Parsed document " + propertyDocument + " for property " + propertyName); - return await coll.FindAsync(find, findOptions); - } + return true; + } + catch (InvalidCastException e) + { + throw new ApplicationException("Could not cast value " + value + " to a document for property " + propertyName, e); + } - private static bool TryParseDocumentProperty(BsonDocument docQuery, string propertyName, [NotNullWhen(true)] out BsonDocument? propertyDocument) - { - if (docQuery.TryGetValue(propertyName, out BsonValue value)) - try - { - propertyDocument = value.AsBsonDocument; - _logger.Info("Parsed document " + propertyDocument + " for property " + propertyName); - - return true; - } - catch (InvalidCastException e) - { - throw new ApplicationException("Could not cast value " + value + " to a document for property " + propertyName, e); - } - - _logger.Info("No document found for property " + propertyName); - propertyDocument = null; - return false; - } + _logger.Info("No document found for property " + propertyName); + propertyDocument = null; + return false; + } - private static bool TryParseIntProperty(BsonDocument docQuery, string propertyName, out int propertyValue) + private static bool TryParseIntProperty(BsonDocument docQuery, string propertyName, out int propertyValue) + { + if (docQuery.TryGetValue(propertyName, out BsonValue value)) { - if (docQuery.TryGetValue(propertyName, out BsonValue value)) + try { - try - { - propertyValue = value.AsInt32; - _logger.Info("Parsed value " + propertyValue + " for property " + propertyName); - } - catch (InvalidCastException e) - { - throw new ApplicationException("Could not cast value " + value + " to an int for property " + propertyName, e); - } - - if (propertyValue < 0) - throw new ApplicationException("Property value for " + propertyName + " must be greater than 0"); - - return true; + propertyValue = value.AsInt32; + _logger.Info("Parsed value " + propertyValue + " for property " + propertyName); + } + catch (InvalidCastException e) + { + throw new ApplicationException("Could not cast value " + value + " to an int for property " + propertyName, e); } - _logger.Info("No value found for property " + propertyName); - propertyValue = -1; + if (propertyValue < 0) + throw new ApplicationException("Property value for " + propertyName + " must be greater than 0"); - return false; + return true; } + + _logger.Info("No value found for property " + propertyName); + propertyValue = -1; + + return false; } } diff --git a/src/SmiServices/Common/Options/CliOptions.cs b/src/SmiServices/Common/Options/CliOptions.cs index 8d44ed25e..e866f748d 100644 --- a/src/SmiServices/Common/Options/CliOptions.cs +++ b/src/SmiServices/Common/Options/CliOptions.cs @@ -1,20 +1,19 @@ using CommandLine; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +public class CliOptions { - public class CliOptions + [Option( + 'y', + "yaml-file", + Default = "default.yaml", + Required = false, + HelpText = "[Optional] Name of the yaml config file to load" + )] + public string YamlFile { get; set; } = null!; + public override string ToString() { - [Option( - 'y', - "yaml-file", - Default = "default.yaml", - Required = false, - HelpText = "[Optional] Name of the yaml config file to load" - )] - public string YamlFile { get; set; } = null!; - public override string ToString() - { - return $"YamlFile={YamlFile},"; - } + return $"YamlFile={YamlFile},"; } } diff --git a/src/SmiServices/Common/Options/ConsumerOptions.cs b/src/SmiServices/Common/Options/ConsumerOptions.cs index bf3b6fc14..0a3b1fa14 100644 --- a/src/SmiServices/Common/Options/ConsumerOptions.cs +++ b/src/SmiServices/Common/Options/ConsumerOptions.cs @@ -1,50 +1,49 @@ using System.Text; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +/// +/// Configuration options needed to receive messages from a RabbitMQ queue. +/// +public class ConsumerOptions : IOptions { /// - /// Configuration options needed to receive messages from a RabbitMQ queue. + /// Name of the queue to consume from. /// - public class ConsumerOptions : IOptions + public string? QueueName { get; set; } + + /// + /// Max number of messages the queue will send the consumer before receiving an acknowledgement. + /// + public ushort QoSPrefetchCount { get; set; } = 1; + + /// + /// Automatically acknowledge any messages sent to the consumer. + /// + public bool AutoAck { get; set; } + + /// + /// If set, consumer will not call Fatal when an unhandled exception occurs when processing a message, up to the limit. Requires to be false + /// + public bool HoldUnprocessableMessages { get; set; } + + + /// + /// Verifies that the individual options have been populated + /// + /// + public bool VerifyPopulated() + { + return !string.IsNullOrWhiteSpace(QueueName) && (QoSPrefetchCount != 0); + } + + public override string ToString() { - /// - /// Name of the queue to consume from. - /// - public string? QueueName { get; set; } - - /// - /// Max number of messages the queue will send the consumer before receiving an acknowledgement. - /// - public ushort QoSPrefetchCount { get; set; } = 1; - - /// - /// Automatically acknowledge any messages sent to the consumer. - /// - public bool AutoAck { get; set; } - - /// - /// If set, consumer will not call Fatal when an unhandled exception occurs when processing a message, up to the limit. Requires to be false - /// - public bool HoldUnprocessableMessages { get; set; } - - - /// - /// Verifies that the individual options have been populated - /// - /// - public bool VerifyPopulated() - { - return !string.IsNullOrWhiteSpace(QueueName) && (QoSPrefetchCount != 0); - } - - public override string ToString() - { - var sb = new StringBuilder(); - sb.Append("QueueName: " + QueueName); - sb.Append(", AutoAck: " + AutoAck); - sb.Append(", QoSPrefetchCount: " + QoSPrefetchCount); - sb.Append(", HoldUnprocessableMessages: " + HoldUnprocessableMessages); - return sb.ToString(); - } + var sb = new StringBuilder(); + sb.Append("QueueName: " + QueueName); + sb.Append(", AutoAck: " + AutoAck); + sb.Append(", QoSPrefetchCount: " + QoSPrefetchCount); + sb.Append(", HoldUnprocessableMessages: " + HoldUnprocessableMessages); + return sb.ToString(); } } diff --git a/src/SmiServices/Common/Options/EnvironmentVariableDecorator.cs b/src/SmiServices/Common/Options/EnvironmentVariableDecorator.cs index 9c12eda7c..8a5d2f73f 100644 --- a/src/SmiServices/Common/Options/EnvironmentVariableDecorator.cs +++ b/src/SmiServices/Common/Options/EnvironmentVariableDecorator.cs @@ -1,28 +1,27 @@ using System; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +/// +/// Populates values in based on environment variables +/// +public class EnvironmentVariableDecorator : OptionsDecorator { - /// - /// Populates values in based on environment variables - /// - public class EnvironmentVariableDecorator : OptionsDecorator + public override GlobalOptions Decorate(GlobalOptions options) { - public override GlobalOptions Decorate(GlobalOptions options) - { - ForAll(options, SetMongoPassword); - return options; - } + ForAll(options, SetMongoPassword); + return options; + } - private static MongoDbOptions SetMongoPassword(MongoDbOptions opt) - { - //get the environment variables current value - var envVar = Environment.GetEnvironmentVariable("MONGO_SERVICE_PASSWORD"); + private static MongoDbOptions SetMongoPassword(MongoDbOptions opt) + { + //get the environment variables current value + var envVar = Environment.GetEnvironmentVariable("MONGO_SERVICE_PASSWORD"); - //if there's an env var for it and there are mongodb options being used - if (!string.IsNullOrWhiteSpace(envVar)) - opt.Password = envVar; + //if there's an env var for it and there are mongodb options being used + if (!string.IsNullOrWhiteSpace(envVar)) + opt.Password = envVar; - return opt; - } + return opt; } } diff --git a/src/SmiServices/Common/Options/GlobalOptions.cs b/src/SmiServices/Common/Options/GlobalOptions.cs index 48fbff343..efe96e72b 100644 --- a/src/SmiServices/Common/Options/GlobalOptions.cs +++ b/src/SmiServices/Common/Options/GlobalOptions.cs @@ -18,653 +18,652 @@ using YamlDotNet.Serialization; using DatabaseType = FAnsi.DatabaseType; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +public interface IOptions { - public interface IOptions - { - } +} - public class GlobalOptions : IOptions - { - #region AllOptions +public class GlobalOptions : IOptions +{ + #region AllOptions - private string? _hostProcessName; + private string? _hostProcessName; - public string HostProcessName + public string HostProcessName + { + get { - get - { - if (string.IsNullOrWhiteSpace(_hostProcessName)) - throw new ArgumentException("HostProcessName not set"); - return _hostProcessName; - } - set - { - if (_hostProcessName != null) - throw new ArgumentException("HostProcessName already set"); - _hostProcessName = value; - } + if (string.IsNullOrWhiteSpace(_hostProcessName)) + throw new ArgumentException("HostProcessName not set"); + return _hostProcessName; } + set + { + if (_hostProcessName != null) + throw new ArgumentException("HostProcessName already set"); + _hostProcessName = value; + } + } - public MessageBrokerType? MessageBrokerType { get; set; } = Messaging.MessageBrokerType.RabbitMQ; + public MessageBrokerType? MessageBrokerType { get; set; } = Messaging.MessageBrokerType.RabbitMQ; - public LoggingOptions? LoggingOptions { get; set; } = new LoggingOptions(); - public RabbitOptions? RabbitOptions { get; set; } = new RabbitOptions(); - public FileSystemOptions? FileSystemOptions { get; set; } = new FileSystemOptions(); - public RDMPOptions? RDMPOptions { get; set; } = new RDMPOptions(); - public MongoDatabases? MongoDatabases { get; set; } = new MongoDatabases(); - public DicomRelationalMapperOptions? DicomRelationalMapperOptions { get; set; } = new DicomRelationalMapperOptions(); - public UpdateValuesOptions? UpdateValuesOptions { get; set; } = new UpdateValuesOptions(); - public CohortExtractorOptions? CohortExtractorOptions { get; set; } = new CohortExtractorOptions(); - public CohortPackagerOptions? CohortPackagerOptions { get; set; } = new CohortPackagerOptions(); - public DicomReprocessorOptions? DicomReprocessorOptions { get; set; } = new DicomReprocessorOptions(); - public DicomTagReaderOptions? DicomTagReaderOptions { get; set; } = new DicomTagReaderOptions(); - public FileCopierOptions? FileCopierOptions { get; set; } = new FileCopierOptions(); - public IdentifierMapperOptions? IdentifierMapperOptions { get; set; } = new IdentifierMapperOptions(); - public MongoDbPopulatorOptions? MongoDbPopulatorOptions { get; set; } = new MongoDbPopulatorOptions(); - public ProcessDirectoryOptions? ProcessDirectoryOptions { get; set; } = new ProcessDirectoryOptions(); + public LoggingOptions? LoggingOptions { get; set; } = new LoggingOptions(); + public RabbitOptions? RabbitOptions { get; set; } = new RabbitOptions(); + public FileSystemOptions? FileSystemOptions { get; set; } = new FileSystemOptions(); + public RDMPOptions? RDMPOptions { get; set; } = new RDMPOptions(); + public MongoDatabases? MongoDatabases { get; set; } = new MongoDatabases(); + public DicomRelationalMapperOptions? DicomRelationalMapperOptions { get; set; } = new DicomRelationalMapperOptions(); + public UpdateValuesOptions? UpdateValuesOptions { get; set; } = new UpdateValuesOptions(); + public CohortExtractorOptions? CohortExtractorOptions { get; set; } = new CohortExtractorOptions(); + public CohortPackagerOptions? CohortPackagerOptions { get; set; } = new CohortPackagerOptions(); + public DicomReprocessorOptions? DicomReprocessorOptions { get; set; } = new DicomReprocessorOptions(); + public DicomTagReaderOptions? DicomTagReaderOptions { get; set; } = new DicomTagReaderOptions(); + public FileCopierOptions? FileCopierOptions { get; set; } = new FileCopierOptions(); + public IdentifierMapperOptions? IdentifierMapperOptions { get; set; } = new IdentifierMapperOptions(); + public MongoDbPopulatorOptions? MongoDbPopulatorOptions { get; set; } = new MongoDbPopulatorOptions(); + public ProcessDirectoryOptions? ProcessDirectoryOptions { get; set; } = new ProcessDirectoryOptions(); - public TriggerUpdatesOptions? TriggerUpdatesOptions { get; set; } = new TriggerUpdatesOptions(); + public TriggerUpdatesOptions? TriggerUpdatesOptions { get; set; } = new TriggerUpdatesOptions(); - public IsIdentifiableServiceOptions? IsIdentifiableServiceOptions { get; set; } = new IsIdentifiableServiceOptions(); - public IsIdentifiableDicomFileOptions? IsIdentifiableOptions { get; set; } = new IsIdentifiableDicomFileOptions(); + public IsIdentifiableServiceOptions? IsIdentifiableServiceOptions { get; set; } = new IsIdentifiableServiceOptions(); + public IsIdentifiableDicomFileOptions? IsIdentifiableOptions { get; set; } = new IsIdentifiableDicomFileOptions(); - public ExtractImagesOptions? ExtractImagesOptions { get; set; } = new ExtractImagesOptions(); - public DicomAnonymiserOptions? DicomAnonymiserOptions { get; set; } = new DicomAnonymiserOptions(); + public ExtractImagesOptions? ExtractImagesOptions { get; set; } = new ExtractImagesOptions(); + public DicomAnonymiserOptions? DicomAnonymiserOptions { get; set; } = new DicomAnonymiserOptions(); - #endregion + #endregion - public static string GenerateToString(object o) - { - var sb = new StringBuilder(); + public static string GenerateToString(object o) + { + var sb = new StringBuilder(); - foreach (var prop in o.GetType().GetProperties().Where( - static prop => - !prop.Name.Contains("password", StringComparison.OrdinalIgnoreCase) && - !Attribute.IsDefined(prop, typeof(YamlIgnoreAttribute)) - ) + foreach (var prop in o.GetType().GetProperties().Where( + static prop => + !prop.Name.Contains("password", StringComparison.OrdinalIgnoreCase) && + !Attribute.IsDefined(prop, typeof(YamlIgnoreAttribute)) ) - { - sb.Append($"{prop.Name}: {prop.GetValue(o)}, "); - } - - return sb.ToString(); - } - - public override string ToString() + ) { - return GenerateToString(this); + sb.Append($"{prop.Name}: {prop.GetValue(o)}, "); } + + return sb.ToString(); } - public class LoggingOptions + public override string ToString() { - public string? LogConfigFile { get; set; } - public string? LogsRoot { get; set; } - public bool TraceLogging { get; set; } = true; - - public override string ToString() => GlobalOptions.GenerateToString(this); + return GenerateToString(this); } +} - public class IsIdentifiableServiceOptions : ConsumerOptions - { - /// - /// The full name of the classifier you want to run - /// - public string? ClassifierType { get; set; } +public class LoggingOptions +{ + public string? LogConfigFile { get; set; } + public string? LogsRoot { get; set; } + public bool TraceLogging { get; set; } = true; - public ProducerOptions? IsIdentifiableProducerOptions { get; set; } + public override string ToString() => GlobalOptions.GenerateToString(this); +} - public string? DataDirectory { get; set; } - } +public class IsIdentifiableServiceOptions : ConsumerOptions +{ + /// + /// The full name of the classifier you want to run + /// + public string? ClassifierType { get; set; } - public class ProcessDirectoryOptions : IOptions - { - public ProducerOptions? AccessionDirectoryProducerOptions { get; set; } + public ProducerOptions? IsIdentifiableProducerOptions { get; set; } - public override string ToString() - { - return GlobalOptions.GenerateToString(this); - } - } + public string? DataDirectory { get; set; } +} + +public class ProcessDirectoryOptions : IOptions +{ + public ProducerOptions? AccessionDirectoryProducerOptions { get; set; } - public class MongoDbPopulatorOptions : IOptions + public override string ToString() { - public ConsumerOptions? SeriesQueueConsumerOptions { get; set; } - public ConsumerOptions? ImageQueueConsumerOptions { get; set; } - public string? SeriesCollection { get; set; } = "series"; - public string? ImageCollection { get; set; } = "image"; - - /// - /// Seconds - /// - public int MongoDbFlushTime { get; set; } - public int FailedWriteLimit { get; set; } - - public override string ToString() - { - return GlobalOptions.GenerateToString(this); - } + return GlobalOptions.GenerateToString(this); } +} + +public class MongoDbPopulatorOptions : IOptions +{ + public ConsumerOptions? SeriesQueueConsumerOptions { get; set; } + public ConsumerOptions? ImageQueueConsumerOptions { get; set; } + public string? SeriesCollection { get; set; } = "series"; + public string? ImageCollection { get; set; } = "image"; + + /// + /// Seconds + /// + public int MongoDbFlushTime { get; set; } + public int FailedWriteLimit { get; set; } - public class IdentifierMapperOptions : ConsumerOptions, IMappingTableOptions + public override string ToString() { - public ProducerOptions? AnonImagesProducerOptions { get; set; } - public string? MappingConnectionString { get; set; } - public DatabaseType MappingDatabaseType { get; set; } - public int TimeoutInSeconds { get; set; } - public string? MappingTableName { get; set; } - public string? SwapColumnName { get; set; } - public string? ReplacementColumnName { get; set; } - public string? SwapperType { get; set; } - - /// - /// True - Changes behaviour of swapper host to pick up the PatientID tag using regex from the JSON string directly - /// rather than deserializing it to first. - /// - public bool AllowRegexMatching { get; set; } - - /// - /// Optional, if set then your will be wrapped and it's answers cached in this Redis database. - /// The Redis database will always be consulted for a known answer first and used - /// as a fallback. - /// See https://stackexchange.github.io/StackExchange.Redis/Configuration.html#basic-configuration-strings for the format. - /// - public string? RedisConnectionString { get; set; } - - public override string ToString() - { - return GlobalOptions.GenerateToString(this); - } + return GlobalOptions.GenerateToString(this); + } +} - public DiscoveredTable Discover() - { - var server = new DiscoveredServer(MappingConnectionString, MappingDatabaseType); +public class IdentifierMapperOptions : ConsumerOptions, IMappingTableOptions +{ + public ProducerOptions? AnonImagesProducerOptions { get; set; } + public string? MappingConnectionString { get; set; } + public DatabaseType MappingDatabaseType { get; set; } + public int TimeoutInSeconds { get; set; } + public string? MappingTableName { get; set; } + public string? SwapColumnName { get; set; } + public string? ReplacementColumnName { get; set; } + public string? SwapperType { get; set; } - if (string.IsNullOrWhiteSpace(MappingTableName)) - throw new ArgumentException($"MappingTableName must be set"); + /// + /// True - Changes behaviour of swapper host to pick up the PatientID tag using regex from the JSON string directly + /// rather than deserializing it to first. + /// + public bool AllowRegexMatching { get; set; } + + /// + /// Optional, if set then your will be wrapped and it's answers cached in this Redis database. + /// The Redis database will always be consulted for a known answer first and used + /// as a fallback. + /// See https://stackexchange.github.io/StackExchange.Redis/Configuration.html#basic-configuration-strings for the format. + /// + public string? RedisConnectionString { get; set; } - var idx = MappingTableName.LastIndexOf('.'); - var tableNameUnqualified = MappingTableName[(idx + 1)..]; + public override string ToString() + { + return GlobalOptions.GenerateToString(this); + } - idx = MappingTableName.IndexOf('.'); - if (idx == -1) - throw new ArgumentException($"MappingTableName did not contain the database/user section:'{MappingTableName}'"); + public DiscoveredTable Discover() + { + var server = new DiscoveredServer(MappingConnectionString, MappingDatabaseType); - // TODO This can definitely be simplified if we refactor code that calls this - if (server.DatabaseType == DatabaseType.PostgreSql) - { - var db = server.GetCurrentDatabase() ?? throw new ArgumentException("Database must be set in cnonection string"); - var split = MappingTableName.Split('.'); - return db.ExpectTable(split[1], schema: split[0]); - } + if (string.IsNullOrWhiteSpace(MappingTableName)) + throw new ArgumentException($"MappingTableName must be set"); - var databaseName = server.GetQuerySyntaxHelper().GetRuntimeName(MappingTableName[..idx]); - if (string.IsNullOrWhiteSpace(databaseName)) - throw new ArgumentException($"Could not get database/username from MappingTableName {MappingTableName}"); + var idx = MappingTableName.LastIndexOf('.'); + var tableNameUnqualified = MappingTableName[(idx + 1)..]; - return server.ExpectDatabase(databaseName).ExpectTable(tableNameUnqualified); - } + idx = MappingTableName.IndexOf('.'); + if (idx == -1) + throw new ArgumentException($"MappingTableName did not contain the database/user section:'{MappingTableName}'"); - public IMappingTableOptions Clone() + // TODO This can definitely be simplified if we refactor code that calls this + if (server.DatabaseType == DatabaseType.PostgreSql) { - return (IMappingTableOptions)this.MemberwiseClone(); + var db = server.GetCurrentDatabase() ?? throw new ArgumentException("Database must be set in cnonection string"); + var split = MappingTableName.Split('.'); + return db.ExpectTable(split[1], schema: split[0]); } + + var databaseName = server.GetQuerySyntaxHelper().GetRuntimeName(MappingTableName[..idx]); + if (string.IsNullOrWhiteSpace(databaseName)) + throw new ArgumentException($"Could not get database/username from MappingTableName {MappingTableName}"); + + return server.ExpectDatabase(databaseName).ExpectTable(tableNameUnqualified); } - public interface IMappingTableOptions : IOptions + public IMappingTableOptions Clone() { - string? MappingConnectionString { get; } - string? MappingTableName { get; set; } - string? SwapColumnName { get; set; } - string? ReplacementColumnName { get; set; } - DatabaseType MappingDatabaseType { get; } - int TimeoutInSeconds { get; } - - DiscoveredTable Discover(); - IMappingTableOptions Clone(); + return (IMappingTableOptions)this.MemberwiseClone(); } +} + +public interface IMappingTableOptions : IOptions +{ + string? MappingConnectionString { get; } + string? MappingTableName { get; set; } + string? SwapColumnName { get; set; } + string? ReplacementColumnName { get; set; } + DatabaseType MappingDatabaseType { get; } + int TimeoutInSeconds { get; } + + DiscoveredTable Discover(); + IMappingTableOptions Clone(); +} +/// +/// Contains names of the series and image exchanges that serialized image tag data will be written to +/// +public class DicomTagReaderOptions : ConsumerOptions +{ /// - /// Contains names of the series and image exchanges that serialized image tag data will be written to + /// If true, any errors processing a file will cause the entire to be NACK'd, + /// and no messages will be sent related to that directory. If false, file errors will be logged but any valid files + /// found will be processed as normal /// - public class DicomTagReaderOptions : ConsumerOptions + public bool NackIfAnyFileErrors { get; set; } + public ProducerOptions? ImageProducerOptions { get; set; } + public ProducerOptions? SeriesProducerOptions { get; set; } + public string? FileReadOption { get; set; } + public TagProcessorMode TagProcessorMode { get; set; } + public int MaxIoThreads { get; set; } = 1; + + public FileReadOption GetReadOption() { - /// - /// If true, any errors processing a file will cause the entire to be NACK'd, - /// and no messages will be sent related to that directory. If false, file errors will be logged but any valid files - /// found will be processed as normal - /// - public bool NackIfAnyFileErrors { get; set; } - public ProducerOptions? ImageProducerOptions { get; set; } - public ProducerOptions? SeriesProducerOptions { get; set; } - public string? FileReadOption { get; set; } - public TagProcessorMode TagProcessorMode { get; set; } - public int MaxIoThreads { get; set; } = 1; - - public FileReadOption GetReadOption() + try { - try - { - var opt = (FileReadOption)Enum.Parse(typeof(FileReadOption), FileReadOption!); - - if (opt == FellowOakDicom.FileReadOption.SkipLargeTags) - throw new ApplicationException("SkipLargeTags is disallowed here to ensure data consistency"); - - return opt; - } - catch (ArgumentNullException ex) - { - throw new ArgumentNullException("DicomTagReaderOptions.FileReadOption is not set in the config file", ex); - } - } + var opt = (FileReadOption)Enum.Parse(typeof(FileReadOption), FileReadOption!); + + if (opt == FellowOakDicom.FileReadOption.SkipLargeTags) + throw new ApplicationException("SkipLargeTags is disallowed here to ensure data consistency"); - public override string ToString() + return opt; + } + catch (ArgumentNullException ex) { - return GlobalOptions.GenerateToString(this); + throw new ArgumentNullException("DicomTagReaderOptions.FileReadOption is not set in the config file", ex); } } - public class FileCopierOptions : ConsumerOptions + public override string ToString() { - public ProducerOptions? CopyStatusProducerOptions { get; set; } - public string? NoVerifyRoutingKey { get; set; } - - public override string ToString() => GlobalOptions.GenerateToString(this); + return GlobalOptions.GenerateToString(this); } +} - public enum TagProcessorMode - { - Serial, - Parallel - } +public class FileCopierOptions : ConsumerOptions +{ + public ProducerOptions? CopyStatusProducerOptions { get; set; } + public string? NoVerifyRoutingKey { get; set; } - public class DicomReprocessorOptions : IOptions - { - public ProcessingMode? ProcessingMode { get; set; } + public override string ToString() => GlobalOptions.GenerateToString(this); +} - public ProducerOptions? ReprocessingProducerOptions { get; set; } +public enum TagProcessorMode +{ + Serial, + Parallel +} - public TimeSpan SleepTime { get; set; } +public class DicomReprocessorOptions : IOptions +{ + public ProcessingMode? ProcessingMode { get; set; } - public override string ToString() => GlobalOptions.GenerateToString(this); - } + public ProducerOptions? ReprocessingProducerOptions { get; set; } + + public TimeSpan SleepTime { get; set; } + + public override string ToString() => GlobalOptions.GenerateToString(this); +} +/// +/// Represents the different modes of operation of the reprocessor +/// +public enum ProcessingMode +{ /// - /// Represents the different modes of operation of the reprocessor + /// Unknown / Undefined. Used for null-checking /// - public enum ProcessingMode - { - /// - /// Unknown / Undefined. Used for null-checking - /// - Unknown = -1, - - /// - /// Reprocessing of entire image documents - /// - ImageReprocessing, - - /// - /// Promotion of one or more tags - /// - TagPromotion - } + Unknown = -1, - public class CohortPackagerOptions : IOptions - { - public ConsumerOptions? ExtractRequestInfoOptions { get; set; } - public ConsumerOptions? FileCollectionInfoOptions { get; set; } - public ConsumerOptions? NoVerifyStatusOptions { get; set; } - public ConsumerOptions? VerificationStatusOptions { get; set; } - public uint JobWatcherTimeoutInSeconds { get; set; } - public string? NotifierType { get; set; } + /// + /// Reprocessing of entire image documents + /// + ImageReprocessing, - public bool VerificationMessageQueueProcessBatches { get; set; } - public int? VerificationMessageQueueFlushTimeSeconds { get; set; } + /// + /// Promotion of one or more tags + /// + TagPromotion +} - public static readonly TimeSpan DefaultVerificationMessageQueueFlushTime = TimeSpan.FromSeconds(5); +public class CohortPackagerOptions : IOptions +{ + public ConsumerOptions? ExtractRequestInfoOptions { get; set; } + public ConsumerOptions? FileCollectionInfoOptions { get; set; } + public ConsumerOptions? NoVerifyStatusOptions { get; set; } + public ConsumerOptions? VerificationStatusOptions { get; set; } + public uint JobWatcherTimeoutInSeconds { get; set; } + public string? NotifierType { get; set; } + public bool VerificationMessageQueueProcessBatches { get; set; } + public int? VerificationMessageQueueFlushTimeSeconds { get; set; } - /// - /// The newline to use when writing extraction report files. Note that a "\r\n" string - /// in the YAML config will bee automatically escaped to "\\r\\n" in this string. - /// - public string? ReportNewLine { get; set; } + public static readonly TimeSpan DefaultVerificationMessageQueueFlushTime = TimeSpan.FromSeconds(5); - public override string ToString() - { - return GlobalOptions.GenerateToString(this); - } + + /// + /// The newline to use when writing extraction report files. Note that a "\r\n" string + /// in the YAML config will bee automatically escaped to "\\r\\n" in this string. + /// + public string? ReportNewLine { get; set; } + + public override string ToString() + { + return GlobalOptions.GenerateToString(this); } +} + +public class CohortExtractorOptions : ConsumerOptions +{ + private string? _auditorType; - public class CohortExtractorOptions : ConsumerOptions + /// + /// The Type of a class implementing IAuditExtractions which is responsible for auditing the extraction process. If null then no auditing happens + /// + public string AuditorType { - private string? _auditorType; + get => string.IsNullOrWhiteSpace(_auditorType) + ? "SmiServices.Microservices.CohortExtractor.Audit.NullAuditExtractions" + : _auditorType; + set => _auditorType = value; + } - /// - /// The Type of a class implementing IAuditExtractions which is responsible for auditing the extraction process. If null then no auditing happens - /// - public string AuditorType - { - get => string.IsNullOrWhiteSpace(_auditorType) - ? "SmiServices.Microservices.CohortExtractor.Audit.NullAuditExtractions" - : _auditorType; - set => _auditorType = value; - } + /// + /// The Type of a class implementing IExtractionRequestFulfiller which is responsible for mapping requested image identifiers to image file paths. Mandatory + /// + public string? RequestFulfillerType { get; set; } - /// - /// The Type of a class implementing IExtractionRequestFulfiller which is responsible for mapping requested image identifiers to image file paths. Mandatory - /// - public string? RequestFulfillerType { get; set; } - - /// - /// The Type of a class implementing IProjectPathResolver which is responsible for deciding the folder hierarchy to output into - /// - public string? ProjectPathResolverType { get; set; } - - /// - /// Controls how modalities are matched to Catalogues. Must contain a single capture group which - /// returns a modality code (e.g. CT) when applies to a Catalogue name. E.g. ^([A-Z]+)_.*$ would result - /// in Modalities being routed based on the start of the table name e.g. CT => CT_MyTable and MR=> MR_MyTable - /// - public string? ModalityRoutingRegex { get; set; } - - /// - /// The Type of a class implementing IRejector which is responsible for deciding individual records/images are not extractable (after fetching from database) - /// - public string? RejectorType { get; set; } - - /// - /// Modality specific rejection rules that can either override the for specific Modalities or be applied in addition - /// - public ModalitySpecificRejectorOptions[]? ModalitySpecificRejectors { get; set; } - - /// - /// Path to the rules file to use for the - /// - public string? DynamicRulesPath { get; set; } = "DynamicRules.txt"; - - public bool AllCatalogues { get; private set; } - public List OnlyCatalogues { get; private set; } = null!; - - /// - /// Optional list of datasets which contain information about when NOT to extract an image. This should be a manually curated blacklist - not just general rules (for those use ). Referenced datasets must include one or more of the UID columns (StudyInstanceUID, SeriesInstanceUID or SOPInstanceUID) - /// - public List? Blacklists { get; set; } - - public string? ExtractAnonRoutingKey { get; set; } - public string? ExtractIdentRoutingKey { get; set; } - - public ProducerOptions? ExtractFilesProducerOptions { get; set; } - public ProducerOptions? ExtractFilesInfoProducerOptions { get; set; } - - /// - /// ID(s) of ColumnInfo that contains a list of values which should not have data extracted for them. e.g. opt out. The name of the column referenced must match a column in the extraction table - /// - public List? RejectColumnInfos { get; set; } - - public override string ToString() - { - return GlobalOptions.GenerateToString(this); - } + /// + /// The Type of a class implementing IProjectPathResolver which is responsible for deciding the folder hierarchy to output into + /// + public string? ProjectPathResolverType { get; set; } + + /// + /// Controls how modalities are matched to Catalogues. Must contain a single capture group which + /// returns a modality code (e.g. CT) when applies to a Catalogue name. E.g. ^([A-Z]+)_.*$ would result + /// in Modalities being routed based on the start of the table name e.g. CT => CT_MyTable and MR=> MR_MyTable + /// + public string? ModalityRoutingRegex { get; set; } - public void Validate() - { - if (ModalitySpecificRejectors != null && ModalitySpecificRejectors.Length != 0 && string.IsNullOrWhiteSpace(ModalityRoutingRegex)) - { - throw new Exception("ModalitySpecificRejectors requires providing a ModalityRoutingRegex"); - } + /// + /// The Type of a class implementing IRejector which is responsible for deciding individual records/images are not extractable (after fetching from database) + /// + public string? RejectorType { get; set; } - if (string.IsNullOrEmpty(RequestFulfillerType)) - throw new Exception("No RequestFulfillerType set on CohortExtractorOptions. This must be set to a class implementing IExtractionRequestFulfiller"); + /// + /// Modality specific rejection rules that can either override the for specific Modalities or be applied in addition + /// + public ModalitySpecificRejectorOptions[]? ModalitySpecificRejectors { get; set; } - } - } + /// + /// Path to the rules file to use for the + /// + public string? DynamicRulesPath { get; set; } = "DynamicRules.txt"; - public class UpdateValuesOptions : ConsumerOptions - { - /// - /// Number of seconds the updater will wait when running a single value UPDATE on the live table e.g. ECHI A needs to be replaced with ECHI B - /// - public int UpdateTimeout { get; set; } = 5000; + public bool AllCatalogues { get; private set; } + public List OnlyCatalogues { get; private set; } = null!; - /// - /// IDs of TableInfos that should be updated - /// - public int[] TableInfosToUpdate { get; set; } = []; + /// + /// Optional list of datasets which contain information about when NOT to extract an image. This should be a manually curated blacklist - not just general rules (for those use ). Referenced datasets must include one or more of the UID columns (StudyInstanceUID, SeriesInstanceUID or SOPInstanceUID) + /// + public List? Blacklists { get; set; } - } + public string? ExtractAnonRoutingKey { get; set; } + public string? ExtractIdentRoutingKey { get; set; } - public class TriggerUpdatesOptions : ProducerOptions + public ProducerOptions? ExtractFilesProducerOptions { get; set; } + public ProducerOptions? ExtractFilesInfoProducerOptions { get; set; } + + /// + /// ID(s) of ColumnInfo that contains a list of values which should not have data extracted for them. e.g. opt out. The name of the column referenced must match a column in the extraction table + /// + public List? RejectColumnInfos { get; set; } + + public override string ToString() { - /// - /// The number of seconds database commands should be allowed to execute for before timing out. - /// - public int CommandTimeoutInSeconds = 500; + return GlobalOptions.GenerateToString(this); } - public class DicomRelationalMapperOptions : ConsumerOptions + public void Validate() { - /// - /// The ID of the LoadMetadata load configuration to run. A load configuration is a sequence of steps to modify/clean data such that it is loadable into the final live - /// tables. The LoadMetadata is designed to be modified through the RMDP user interface and is persisted in the LoadMetadata table (and other related tables) of the - /// RDMP platform database. - /// - public int LoadMetadataId { get; set; } - public Guid Guid { get; set; } - public string? DatabaseNamerType { get; set; } - public int MinimumBatchSize { get; set; } - public bool UseInsertIntoForRAWMigration { get; set; } - public int RetryOnFailureCount { get; set; } - public int RetryDelayInSeconds { get; set; } - public int MaximumRunDelayInSeconds { get; set; } - - /// - /// True to run before the data load accepting all proposed fixes (e.g. dropping RAW) - /// Default is false - /// - public bool RunChecks { get; set; } - - - public override string ToString() + if (ModalitySpecificRejectors != null && ModalitySpecificRejectors.Length != 0 && string.IsNullOrWhiteSpace(ModalityRoutingRegex)) { - return GlobalOptions.GenerateToString(this); + throw new Exception("ModalitySpecificRejectors requires providing a ModalityRoutingRegex"); } + + if (string.IsNullOrEmpty(RequestFulfillerType)) + throw new Exception("No RequestFulfillerType set on CohortExtractorOptions. This must be set to a class implementing IExtractionRequestFulfiller"); + } +} - public class ExtractImagesOptions : IOptions - { - public const int MaxIdentifiersPerMessageDefault = 1000; +public class UpdateValuesOptions : ConsumerOptions +{ + /// + /// Number of seconds the updater will wait when running a single value UPDATE on the live table e.g. ECHI A needs to be replaced with ECHI B + /// + public int UpdateTimeout { get; set; } = 5000; - /// - /// The maximum number of identifiers in each - /// - public int MaxIdentifiersPerMessage { get; set; } = MaxIdentifiersPerMessageDefault; + /// + /// IDs of TableInfos that should be updated + /// + public int[] TableInfosToUpdate { get; set; } = []; - /// - /// Options for publishing s - /// - public ProducerOptions? ExtractionRequestProducerOptions { get; set; } +} - /// - /// Options for publishing s - /// - public ProducerOptions? ExtractionRequestInfoProducerOptions { get; set; } +public class TriggerUpdatesOptions : ProducerOptions +{ + /// + /// The number of seconds database commands should be allowed to execute for before timing out. + /// + public int CommandTimeoutInSeconds = 500; +} - /// - /// The list of possible s that are allowed for use - /// - public ExtractionKey[]? AllowedExtractionKeys { get; set; } +public class DicomRelationalMapperOptions : ConsumerOptions +{ + /// + /// The ID of the LoadMetadata load configuration to run. A load configuration is a sequence of steps to modify/clean data such that it is loadable into the final live + /// tables. The LoadMetadata is designed to be modified through the RMDP user interface and is persisted in the LoadMetadata table (and other related tables) of the + /// RDMP platform database. + /// + public int LoadMetadataId { get; set; } + public Guid Guid { get; set; } + public string? DatabaseNamerType { get; set; } + public int MinimumBatchSize { get; set; } + public bool UseInsertIntoForRAWMigration { get; set; } + public int RetryOnFailureCount { get; set; } + public int RetryDelayInSeconds { get; set; } + public int MaximumRunDelayInSeconds { get; set; } - public override string ToString() => GlobalOptions.GenerateToString(this); - } + /// + /// True to run before the data load accepting all proposed fixes (e.g. dropping RAW) + /// Default is false + /// + public bool RunChecks { get; set; } - public class DicomAnonymiserOptions : IOptions + + public override string ToString() { - public string? AnonymiserType { get; set; } - public ConsumerOptions? AnonFileConsumerOptions { get; set; } - public ProducerOptions? ExtractFileStatusProducerOptions { get; set; } - public string? RoutingKeySuccess { get; set; } - public string? RoutingKeyFailure { get; set; } - public bool FailIfSourceWriteable { get; set; } = true; - public string? VirtualEnvPath { get; set; } - public string? DicomPixelAnonPath { get; set; } - public string? SmiServicesPath { get; set; } - public string? CtpAnonCliJar { get; set; } - public string? CtpAllowlistScript { get; set; } - public string? SRAnonymiserToolPath { get; set; } - - public override string ToString() => GlobalOptions.GenerateToString(this); + return GlobalOptions.GenerateToString(this); } +} - public class MongoDatabases : IOptions - { - public MongoDbOptions? DicomStoreOptions { get; set; } +public class ExtractImagesOptions : IOptions +{ + public const int MaxIdentifiersPerMessageDefault = 1000; - public MongoDbOptions? ExtractionStoreOptions { get; set; } + /// + /// The maximum number of identifiers in each + /// + public int MaxIdentifiersPerMessage { get; set; } = MaxIdentifiersPerMessageDefault; - public override string ToString() - { - return GlobalOptions.GenerateToString(this); - } - } + /// + /// Options for publishing s + /// + public ProducerOptions? ExtractionRequestProducerOptions { get; set; } - public class MongoDbOptions : IOptions - { - public string? HostName { get; set; } = "127.0.0.1"; - public int Port { get; set; } = 27017; - /// - /// UserName for authentication. If empty, authentication will be skipped. - /// - public string? UserName { get; set; } + /// + /// Options for publishing s + /// + public ProducerOptions? ExtractionRequestInfoProducerOptions { get; set; } - public string? Password { get; set; } + /// + /// The list of possible s that are allowed for use + /// + public ExtractionKey[]? AllowedExtractionKeys { get; set; } - public string? DatabaseName { get; set; } + public override string ToString() => GlobalOptions.GenerateToString(this); +} - public bool AreValid(bool skipAuthentication) - { - return (skipAuthentication || UserName != null) - && Port > 0 - && !string.IsNullOrWhiteSpace(HostName) - && !string.IsNullOrWhiteSpace(DatabaseName); - } +public class DicomAnonymiserOptions : IOptions +{ + public string? AnonymiserType { get; set; } + public ConsumerOptions? AnonFileConsumerOptions { get; set; } + public ProducerOptions? ExtractFileStatusProducerOptions { get; set; } + public string? RoutingKeySuccess { get; set; } + public string? RoutingKeyFailure { get; set; } + public bool FailIfSourceWriteable { get; set; } = true; + public string? VirtualEnvPath { get; set; } + public string? DicomPixelAnonPath { get; set; } + public string? SmiServicesPath { get; set; } + public string? CtpAnonCliJar { get; set; } + public string? CtpAllowlistScript { get; set; } + public string? SRAnonymiserToolPath { get; set; } + + public override string ToString() => GlobalOptions.GenerateToString(this); +} - public override string ToString() - { - return GlobalOptions.GenerateToString(this); - } +public class MongoDatabases : IOptions +{ + public MongoDbOptions? DicomStoreOptions { get; set; } + + public MongoDbOptions? ExtractionStoreOptions { get; set; } + + public override string ToString() + { + return GlobalOptions.GenerateToString(this); } +} +public class MongoDbOptions : IOptions +{ + public string? HostName { get; set; } = "127.0.0.1"; + public int Port { get; set; } = 27017; /// - /// Describes the location of the Microsoft Sql Server RDMP platform databases which keep track of load configurations, available datasets (tables) etc + /// UserName for authentication. If empty, authentication will be skipped. /// - public class RDMPOptions : IOptions - { - public string? CatalogueConnectionString { get; set; } - public string? DataExportConnectionString { get; set; } + public string? UserName { get; set; } - /// - /// Alternative to connection strings for if you have RDMP running with a YAML file system backend. - /// If specified then this will override the connection strings - /// - public string? YamlDir { get; set; } + public string? Password { get; set; } - public IRDMPPlatformRepositoryServiceLocator GetRepositoryProvider() - { - CatalogueRepository.SuppressHelpLoading = true; + public string? DatabaseName { get; set; } + + public bool AreValid(bool skipAuthentication) + { + return (skipAuthentication || UserName != null) + && Port > 0 + && !string.IsNullOrWhiteSpace(HostName) + && !string.IsNullOrWhiteSpace(DatabaseName); + } + + public override string ToString() + { + return GlobalOptions.GenerateToString(this); + } +} - // if using file system backend for RDMP create that repo instead - if (!string.IsNullOrWhiteSpace(YamlDir)) - { - return new RepositoryProvider(new YamlRepository(new System.IO.DirectoryInfo(YamlDir))); - } +/// +/// Describes the location of the Microsoft Sql Server RDMP platform databases which keep track of load configurations, available datasets (tables) etc +/// +public class RDMPOptions : IOptions +{ + public string? CatalogueConnectionString { get; set; } + public string? DataExportConnectionString { get; set; } - // We are using database backend for RDMP (i.e. Sql Server) - var cata = new SqlConnectionStringBuilder(CatalogueConnectionString); - var dx = new SqlConnectionStringBuilder(DataExportConnectionString); + /// + /// Alternative to connection strings for if you have RDMP running with a YAML file system backend. + /// If specified then this will override the connection strings + /// + public string? YamlDir { get; set; } - return new LinkedRepositoryProvider(cata.ConnectionString, dx.ConnectionString); - } + public IRDMPPlatformRepositoryServiceLocator GetRepositoryProvider() + { + CatalogueRepository.SuppressHelpLoading = true; - public override string ToString() + // if using file system backend for RDMP create that repo instead + if (!string.IsNullOrWhiteSpace(YamlDir)) { - return GlobalOptions.GenerateToString(this); + return new RepositoryProvider(new YamlRepository(new System.IO.DirectoryInfo(YamlDir))); } + + // We are using database backend for RDMP (i.e. Sql Server) + var cata = new SqlConnectionStringBuilder(CatalogueConnectionString); + var dx = new SqlConnectionStringBuilder(DataExportConnectionString); + + return new LinkedRepositoryProvider(cata.ConnectionString, dx.ConnectionString); } - /// - /// Describes the root location of all images, file names should be expressed as relative paths (relative to this root). - /// - public class FileSystemOptions : IOptions + public override string ToString() { - public string? DicomSearchPattern { get; set; } = "*.dcm"; + return GlobalOptions.GenerateToString(this); + } +} - private string? _fileSystemRoot; - private string? _extractRoot; - private string? _extractionPoolRoot; +/// +/// Describes the root location of all images, file names should be expressed as relative paths (relative to this root). +/// +public class FileSystemOptions : IOptions +{ + public string? DicomSearchPattern { get; set; } = "*.dcm"; - public string? FileSystemRoot - { - get => _fileSystemRoot; - set => _fileSystemRoot = value?.Length > 1 ? value.TrimEnd('/', '\\') : value; - } + private string? _fileSystemRoot; + private string? _extractRoot; + private string? _extractionPoolRoot; - public string? ExtractRoot - { - get => _extractRoot; - set => _extractRoot = value?.Length > 1 ? value.TrimEnd('/', '\\') : value; - } + public string? FileSystemRoot + { + get => _fileSystemRoot; + set => _fileSystemRoot = value?.Length > 1 ? value.TrimEnd('/', '\\') : value; + } - public string? ExtractionPoolRoot - { - get => _extractionPoolRoot; - set => _extractionPoolRoot = value?.Length > 1 ? value.TrimEnd('/', '\\') : value; - } + public string? ExtractRoot + { + get => _extractRoot; + set => _extractRoot = value?.Length > 1 ? value.TrimEnd('/', '\\') : value; + } - public override string ToString() - { - return GlobalOptions.GenerateToString(this); - } + public string? ExtractionPoolRoot + { + get => _extractionPoolRoot; + set => _extractionPoolRoot = value?.Length > 1 ? value.TrimEnd('/', '\\') : value; } - /// - /// Describes the location of the rabbit server for sending messages to - /// - public class RabbitOptions : IOptions + public override string ToString() { - private IConnection CreateConnection() => - new ConnectionFactory - { - HostName = RabbitMqHostName, - Port = RabbitMqHostPort, - VirtualHost = RabbitMqVirtualHost, - UserName = RabbitMqUserName, - Password = RabbitMqPassword - }.CreateConnection(); - - private readonly Lazy _connectionCache; - - public RabbitOptions() - { - _connectionCache = new Lazy(CreateConnection); - } + return GlobalOptions.GenerateToString(this); + } +} - [YamlIgnore] - public IConnection Connection => _connectionCache.Value; - public string RabbitMqHostName { get; set; } = "localhost"; - public int RabbitMqHostPort { get; set; } = 5672; - public string? RabbitMqVirtualHost { get; set; } = "/"; - public string? RabbitMqUserName { get; set; } = "guest"; - public string? RabbitMqPassword { get; set; } = "guest"; - public string? FatalLoggingExchange { get; set; } - public string? RabbitMqControlExchangeName { get; set; } - - public override string ToString() +/// +/// Describes the location of the rabbit server for sending messages to +/// +public class RabbitOptions : IOptions +{ + private IConnection CreateConnection() => + new ConnectionFactory { - return GlobalOptions.GenerateToString(this); - } + HostName = RabbitMqHostName, + Port = RabbitMqHostPort, + VirtualHost = RabbitMqVirtualHost, + UserName = RabbitMqUserName, + Password = RabbitMqPassword + }.CreateConnection(); + + private readonly Lazy _connectionCache; + + public RabbitOptions() + { + _connectionCache = new Lazy(CreateConnection); + } + + [YamlIgnore] + public IConnection Connection => _connectionCache.Value; + public string RabbitMqHostName { get; set; } = "localhost"; + public int RabbitMqHostPort { get; set; } = 5672; + public string? RabbitMqVirtualHost { get; set; } = "/"; + public string? RabbitMqUserName { get; set; } = "guest"; + public string? RabbitMqPassword { get; set; } = "guest"; + public string? FatalLoggingExchange { get; set; } + public string? RabbitMqControlExchangeName { get; set; } + + public override string ToString() + { + return GlobalOptions.GenerateToString(this); } } diff --git a/src/SmiServices/Common/Options/GlobalOptionsFactory.cs b/src/SmiServices/Common/Options/GlobalOptionsFactory.cs index 431bf517f..27e1b3158 100644 --- a/src/SmiServices/Common/Options/GlobalOptionsFactory.cs +++ b/src/SmiServices/Common/Options/GlobalOptionsFactory.cs @@ -4,88 +4,87 @@ using System.IO.Abstractions; using YamlDotNet.Serialization; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +public class GlobalOptionsFactory { - public class GlobalOptionsFactory - { - private readonly List _decorators = []; + private readonly List _decorators = []; - /// - /// Create a GlobalOptionsFactory with the given set of s. Adds a single by default if passed a null value. - /// - /// - public GlobalOptionsFactory( - ICollection? decorators = null - ) - { - if (decorators != null) - _decorators.AddRange(decorators); - else - _decorators.Add(new EnvironmentVariableDecorator()); - } + /// + /// Create a GlobalOptionsFactory with the given set of s. Adds a single by default if passed a null value. + /// + /// + public GlobalOptionsFactory( + ICollection? decorators = null + ) + { + if (decorators != null) + _decorators.AddRange(decorators); + else + _decorators.Add(new EnvironmentVariableDecorator()); + } - /// - /// Loads and decorates a GlobalOptions object from the specified YAML config file - /// - /// - /// - /// - /// - public GlobalOptions Load(string hostProcessName, string configFilePath = "default.yaml", IFileSystem? fileSystem = null) - { - fileSystem ??= new FileSystem(); + /// + /// Loads and decorates a GlobalOptions object from the specified YAML config file + /// + /// + /// + /// + /// + public GlobalOptions Load(string hostProcessName, string configFilePath = "default.yaml", IFileSystem? fileSystem = null) + { + fileSystem ??= new FileSystem(); - var deserializer = new DeserializerBuilder() - .WithObjectFactory(GetGlobalOption) - .IgnoreUnmatchedProperties() - .Build(); + var deserializer = new DeserializerBuilder() + .WithObjectFactory(GetGlobalOption) + .IgnoreUnmatchedProperties() + .Build(); - if (!fileSystem.File.Exists(configFilePath)) - throw new ArgumentException($"Could not find config file '{configFilePath}'"); + if (!fileSystem.File.Exists(configFilePath)) + throw new ArgumentException($"Could not find config file '{configFilePath}'"); - var yamlContents = fileSystem.File.ReadAllText(configFilePath); - var globals = deserializer.Deserialize(yamlContents) - ?? throw new Exception("Did not deserialize a GlobalOptions object from the provided YAML file. Does it contain at least one valid key?"); + var yamlContents = fileSystem.File.ReadAllText(configFilePath); + var globals = deserializer.Deserialize(yamlContents) + ?? throw new Exception("Did not deserialize a GlobalOptions object from the provided YAML file. Does it contain at least one valid key?"); - globals.HostProcessName = hostProcessName; + globals.HostProcessName = hostProcessName; - return Decorate(globals); - } + return Decorate(globals); + } - /// - /// Applies all to - /// - /// - /// - private GlobalOptions Decorate(GlobalOptions globals) - { - foreach (var d in _decorators) - globals = d.Decorate(globals); + /// + /// Applies all to + /// + /// + /// + private GlobalOptions Decorate(GlobalOptions globals) + { + foreach (var d in _decorators) + globals = d.Decorate(globals); - return globals; - } + return globals; + } - /// - /// Loads and decorates a GlobalOptions object from the YAML config file specified in the CliOptions - /// - /// - /// - /// - /// - public GlobalOptions Load(string hostProcessName, CliOptions cliOptions, IFileSystem? fileSystem = null) - { - GlobalOptions globalOptions = Load(hostProcessName, cliOptions.YamlFile, fileSystem); + /// + /// Loads and decorates a GlobalOptions object from the YAML config file specified in the CliOptions + /// + /// + /// + /// + /// + public GlobalOptions Load(string hostProcessName, CliOptions cliOptions, IFileSystem? fileSystem = null) + { + GlobalOptions globalOptions = Load(hostProcessName, cliOptions.YamlFile, fileSystem); - // The above Load call does the decoration - don't do it here. - return globalOptions; - } + // The above Load call does the decoration - don't do it here. + return globalOptions; + } - private object GetGlobalOption(Type arg) - { - var opts = arg == typeof(GlobalOptions) ? - new GlobalOptions() : - Activator.CreateInstance(arg); - return opts ?? throw new ArgumentException(null, nameof(arg)); - } + private object GetGlobalOption(Type arg) + { + var opts = arg == typeof(GlobalOptions) ? + new GlobalOptions() : + Activator.CreateInstance(arg); + return opts ?? throw new ArgumentException(null, nameof(arg)); } } diff --git a/src/SmiServices/Common/Options/IOptionsDecorator.cs b/src/SmiServices/Common/Options/IOptionsDecorator.cs index 7d94ccc3c..f33e8a522 100644 --- a/src/SmiServices/Common/Options/IOptionsDecorator.cs +++ b/src/SmiServices/Common/Options/IOptionsDecorator.cs @@ -1,10 +1,9 @@ -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +/// +/// For classes that modify e.g. populate passwords from a vault etc +/// +public interface IOptionsDecorator { - /// - /// For classes that modify e.g. populate passwords from a vault etc - /// - public interface IOptionsDecorator - { - GlobalOptions Decorate(GlobalOptions options); - } + GlobalOptions Decorate(GlobalOptions options); } diff --git a/src/SmiServices/Common/Options/ModalitySpecificRejectorOptions.cs b/src/SmiServices/Common/Options/ModalitySpecificRejectorOptions.cs index de7875dd2..202134140 100644 --- a/src/SmiServices/Common/Options/ModalitySpecificRejectorOptions.cs +++ b/src/SmiServices/Common/Options/ModalitySpecificRejectorOptions.cs @@ -1,31 +1,30 @@ using System; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +/// +/// Modality specific rejectors +/// +public class ModalitySpecificRejectorOptions { /// - /// Modality specific rejectors + /// Comma separated list of modalities that this class applies to /// - public class ModalitySpecificRejectorOptions - { - /// - /// Comma separated list of modalities that this class applies to - /// - public string? Modalities { get; set; } - - /// - /// True to override base modalities. False to make both apply (i.e. this rejector should be used in addition to basic rejectors) - /// - public bool Overrides { get; set; } + public string? Modalities { get; set; } - /// - /// The Type of IRejector to use when evaluating the releaseability of dicom files of given - /// - public string? RejectorType { get; set; } + /// + /// True to override base modalities. False to make both apply (i.e. this rejector should be used in addition to basic rejectors) + /// + public bool Overrides { get; set; } - public string[] GetModalities() - { - return string.IsNullOrWhiteSpace(Modalities) ? [] : Modalities.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); - } + /// + /// The Type of IRejector to use when evaluating the releaseability of dicom files of given + /// + public string? RejectorType { get; set; } + public string[] GetModalities() + { + return string.IsNullOrWhiteSpace(Modalities) ? [] : Modalities.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); } + } diff --git a/src/SmiServices/Common/Options/OptionsDecorator.cs b/src/SmiServices/Common/Options/OptionsDecorator.cs index cafb62f83..90eb3ecad 100644 --- a/src/SmiServices/Common/Options/OptionsDecorator.cs +++ b/src/SmiServices/Common/Options/OptionsDecorator.cs @@ -2,36 +2,35 @@ using System.Reflection; using YamlDotNet.Serialization; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +public abstract class OptionsDecorator : IOptionsDecorator { - public abstract class OptionsDecorator : IOptionsDecorator - { - public abstract GlobalOptions Decorate(GlobalOptions options); + public abstract GlobalOptions Decorate(GlobalOptions options); - protected static void ForAll(IOptions globals, Func setter) where T : IOptions + protected static void ForAll(IOptions globals, Func setter) where T : IOptions + { + //for each property on branch + foreach (var p in globals.GetType().GetProperties()) { - //for each property on branch - foreach (var p in globals.GetType().GetProperties()) - { - if (p.GetCustomAttribute(typeof(YamlIgnoreAttribute)) is not null) continue; + if (p.GetCustomAttribute(typeof(YamlIgnoreAttribute)) is not null) continue; - var currentValue = p.GetValue(globals)!; + var currentValue = p.GetValue(globals)!; - //if it's a T then call the action (note that we check the property Type because we are interested in the property even if it is null - if (p.PropertyType.IsAssignableFrom(typeof(T))) - { - //the delegate changes the value of the property of Type T (or creates a new instance from scratch) - var result = setter((T)currentValue); + //if it's a T then call the action (note that we check the property Type because we are interested in the property even if it is null + if (p.PropertyType.IsAssignableFrom(typeof(T))) + { + //the delegate changes the value of the property of Type T (or creates a new instance from scratch) + var result = setter((T)currentValue); - //store the result of the delegate for this property - p.SetValue(globals, result); - } + //store the result of the delegate for this property + p.SetValue(globals, result); + } - //process its children - if (currentValue is IOptions subOptions) - { - ForAll(subOptions, setter); - } + //process its children + if (currentValue is IOptions subOptions) + { + ForAll(subOptions, setter); } } } diff --git a/src/SmiServices/Common/Options/ProducerOptions.cs b/src/SmiServices/Common/Options/ProducerOptions.cs index 16afc167e..437ba4f78 100644 --- a/src/SmiServices/Common/Options/ProducerOptions.cs +++ b/src/SmiServices/Common/Options/ProducerOptions.cs @@ -2,52 +2,51 @@ using SmiServices.Common.Messaging; using System; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +/// +/// Configuration options needed to send messages to a RabbitMQ exchange +/// +public class ProducerOptions : MemberwiseEquatable, IOptions { /// - /// Configuration options needed to send messages to a RabbitMQ exchange + /// Name of the RabbitMQ exchange to send messages to + /// + public string? ExchangeName { get; set; } + + /// + /// Maximum number of times to retry the publish confirmations + /// + public int MaxConfirmAttempts { get; set; } = 1; + + /// + /// Specify the to use when handling publish failures + /// + public string? BackoffProviderType { get; set; } + + /// + /// Downstream queue to monitor /// - public class ProducerOptions : MemberwiseEquatable, IOptions + public string? ProbeQueueName { get; set; } + + /// + /// Message limit of the downstream queue + /// + public int ProbeQueueLimit { get; set; } = 0; + + /// + /// Sleep time between each check of the probe queue when it is over the message limit + /// + public TimeSpan? ProbeTimeout { get; set; } + + /// + /// Verifies that the individual options have been populated + /// + /// + public bool VerifyPopulated() { - /// - /// Name of the RabbitMQ exchange to send messages to - /// - public string? ExchangeName { get; set; } - - /// - /// Maximum number of times to retry the publish confirmations - /// - public int MaxConfirmAttempts { get; set; } = 1; - - /// - /// Specify the to use when handling publish failures - /// - public string? BackoffProviderType { get; set; } - - /// - /// Downstream queue to monitor - /// - public string? ProbeQueueName { get; set; } - - /// - /// Message limit of the downstream queue - /// - public int ProbeQueueLimit { get; set; } = 0; - - /// - /// Sleep time between each check of the probe queue when it is over the message limit - /// - public TimeSpan? ProbeTimeout { get; set; } - - /// - /// Verifies that the individual options have been populated - /// - /// - public bool VerifyPopulated() - { - return !string.IsNullOrWhiteSpace(ExchangeName); - } - - public override string ToString() => $"ExchangeName={ExchangeName}, MaxConfirmAttempts={MaxConfirmAttempts}, BackoffProviderType={BackoffProviderType}"; + return !string.IsNullOrWhiteSpace(ExchangeName); } + + public override string ToString() => $"ExchangeName={ExchangeName}, MaxConfirmAttempts={MaxConfirmAttempts}, BackoffProviderType={BackoffProviderType}"; } diff --git a/src/SmiServices/Common/Options/SmiCliInit.cs b/src/SmiServices/Common/Options/SmiCliInit.cs index 23c7e5966..74ca4f99c 100644 --- a/src/SmiServices/Common/Options/SmiCliInit.cs +++ b/src/SmiServices/Common/Options/SmiCliInit.cs @@ -8,151 +8,150 @@ using System.Linq; -namespace SmiServices.Common.Options +namespace SmiServices.Common.Options; + +/// +/// Base class for all Program entry points. Parses Cli options and sets-up a standard logging configuration +/// +public static class SmiCliInit { + public static bool InitSmiLogging { get; set; } = true; + + private static readonly Parser _parser; + + static SmiCliInit() + { + _parser = GetDefaultParser(); + } + /// - /// Base class for all Program entry points. Parses Cli options and sets-up a standard logging configuration + /// Create an instance of the default Parser /// - public static class SmiCliInit + /// + public static Parser GetDefaultParser() { - public static bool InitSmiLogging { get; set; } = true; - - private static readonly Parser _parser; - - static SmiCliInit() - { - _parser = GetDefaultParser(); - } - - /// - /// Create an instance of the default Parser - /// - /// - public static Parser GetDefaultParser() + ParserSettings defaults = Parser.Default.Settings; + return new Parser(settings => { - ParserSettings defaults = Parser.Default.Settings; - return new Parser(settings => - { - settings.CaseInsensitiveEnumValues = true; - settings.CaseSensitive = false; - settings.EnableDashDash = defaults.EnableDashDash; - settings.HelpWriter = defaults.HelpWriter; - settings.IgnoreUnknownArguments = false; - settings.MaximumDisplayWidth = defaults.MaximumDisplayWidth; - settings.ParsingCulture = defaults.ParsingCulture; - }); - } - - /// - /// Parse CLI arguments to the specified type, and runs the provided function if parsing is successful - /// - /// Arguments passed to Main - /// - /// The function to call on a successful parse - /// The return code from the onParse function - public static int ParseAndRun(IEnumerable args, string programName, Func onParse) where T : CliOptions - { - int ret = _parser - .ParseArguments(args) - .MapResult( - parsed => + settings.CaseInsensitiveEnumValues = true; + settings.CaseSensitive = false; + settings.EnableDashDash = defaults.EnableDashDash; + settings.HelpWriter = defaults.HelpWriter; + settings.IgnoreUnknownArguments = false; + settings.MaximumDisplayWidth = defaults.MaximumDisplayWidth; + settings.ParsingCulture = defaults.ParsingCulture; + }); + } + + /// + /// Parse CLI arguments to the specified type, and runs the provided function if parsing is successful + /// + /// Arguments passed to Main + /// + /// The function to call on a successful parse + /// The return code from the onParse function + public static int ParseAndRun(IEnumerable args, string programName, Func onParse) where T : CliOptions + { + int ret = _parser + .ParseArguments(args) + .MapResult( + parsed => + { + GlobalOptions globals = new GlobalOptionsFactory().Load(programName, parsed); + + if (InitSmiLogging) { - GlobalOptions globals = new GlobalOptionsFactory().Load(programName, parsed); - - if (InitSmiLogging) - { - ArgumentNullException.ThrowIfNull(globals.LoggingOptions); - SmiLogging.Setup(globals.LoggingOptions, programName); - } - - MessageHeader.CurrentProgramName = programName; - - return onParse(globals, parsed); - }, - OnErrors - ); - return ret; - } - - /// - /// Parse CLI arguments to one of the specified types, and runs the provided function if parsing is successful - /// - /// Arguments passed to Main - /// - /// The list of possible target verb types to construct from the args - /// The function to call on a successful parse - /// The return code from the onParse function - public static int ParseAndRun(IEnumerable args, string programName, Type[] targetVerbTypes, Func onParse) - { - int ret = _parser - .ParseArguments( - args, - targetVerbTypes - ) - .MapResult( - parsed => + ArgumentNullException.ThrowIfNull(globals.LoggingOptions); + SmiLogging.Setup(globals.LoggingOptions, programName); + } + + MessageHeader.CurrentProgramName = programName; + + return onParse(globals, parsed); + }, + OnErrors + ); + return ret; + } + + /// + /// Parse CLI arguments to one of the specified types, and runs the provided function if parsing is successful + /// + /// Arguments passed to Main + /// + /// The list of possible target verb types to construct from the args + /// The function to call on a successful parse + /// The return code from the onParse function + public static int ParseAndRun(IEnumerable args, string programName, Type[] targetVerbTypes, Func onParse) + { + int ret = _parser + .ParseArguments( + args, + targetVerbTypes + ) + .MapResult( + parsed => + { + var cliOptions = Verify(parsed); + GlobalOptions globals = new GlobalOptionsFactory().Load(programName, cliOptions); + + if (InitSmiLogging) { - var cliOptions = Verify(parsed); - GlobalOptions globals = new GlobalOptionsFactory().Load(programName, cliOptions); + ArgumentNullException.ThrowIfNull(globals.LoggingOptions); + SmiLogging.Setup(globals.LoggingOptions, programName); + } - if (InitSmiLogging) - { - ArgumentNullException.ThrowIfNull(globals.LoggingOptions); - SmiLogging.Setup(globals.LoggingOptions, programName); - } + MessageHeader.CurrentProgramName = programName; - MessageHeader.CurrentProgramName = programName; + return onParse(globals, parsed); + }, + OnErrors + ); + return ret; + } - return onParse(globals, parsed); - }, - OnErrors - ); - return ret; - } + public static int ParseServiceVerbAndRun(IEnumerable args, Type[] targetVerbTypes, Func onParse) + { + int ret = _parser + .ParseArguments( + args, + targetVerbTypes + ) + .MapResult( + parsed => onParse(parsed), + OnErrors + ); + return ret; + } - public static int ParseServiceVerbAndRun(IEnumerable args, Type[] targetVerbTypes, Func onParse) - { - int ret = _parser - .ParseArguments( - args, - targetVerbTypes - ) - .MapResult( - parsed => onParse(parsed), - OnErrors - ); - return ret; - } - - /// - /// Verify the parsedOptions is of the specified type, or throw an exception - /// - /// The type to check for - /// - /// - public static T Verify(object parsedOptions) - { - if (parsedOptions is not T asExpected) - throw new NotImplementedException($"Did not construct expected type '{typeof(T).Name}'"); - return asExpected; - } + /// + /// Verify the parsedOptions is of the specified type, or throw an exception + /// + /// The type to check for + /// + /// + public static T Verify(object parsedOptions) + { + if (parsedOptions is not T asExpected) + throw new NotImplementedException($"Did not construct expected type '{typeof(T).Name}'"); + return asExpected; + } - private static int OnErrors(IEnumerable errors) - { - // Create a default console logger - SMI one may not be available at this point - var config = new LoggingConfiguration(); - using var consoleTarget = new ConsoleTarget(nameof(SmiCliInit)); - config.AddRule(LogLevel.Trace, LogLevel.Fatal, consoleTarget); - Logger logger = LogManager.GetCurrentClassLogger(); + private static int OnErrors(IEnumerable errors) + { + // Create a default console logger - SMI one may not be available at this point + var config = new LoggingConfiguration(); + using var consoleTarget = new ConsoleTarget(nameof(SmiCliInit)); + config.AddRule(LogLevel.Trace, LogLevel.Fatal, consoleTarget); + Logger logger = LogManager.GetCurrentClassLogger(); - List errorsList = errors.ToList(); - if (errorsList.Count == 1 && errorsList.Single().Tag == ErrorType.HelpRequestedError) - return 0; + List errorsList = errors.ToList(); + if (errorsList.Count == 1 && errorsList.Single().Tag == ErrorType.HelpRequestedError) + return 0; - foreach (Error error in errorsList) - logger.Error(error); + foreach (Error error in errorsList) + logger.Error(error); - return 1; - } + return 1; } } diff --git a/src/SmiServices/Common/SmiLogging.cs b/src/SmiServices/Common/SmiLogging.cs index 9253a43b3..e8d91a453 100644 --- a/src/SmiServices/Common/SmiLogging.cs +++ b/src/SmiServices/Common/SmiLogging.cs @@ -4,72 +4,71 @@ using System.IO; -namespace SmiServices.Common +namespace SmiServices.Common; + +public static class SmiLogging { - public static class SmiLogging - { - private const string DefaultLogConfigName = "Smi.NLog.config"; + private const string DefaultLogConfigName = "Smi.NLog.config"; - private static bool _initialised; + private static bool _initialised; - public static void Setup(LoggingOptions loggingOptions, string hostProcessName) - { - if (_initialised) - throw new Exception("SmiLogging already initialised"); - _initialised = true; + public static void Setup(LoggingOptions loggingOptions, string hostProcessName) + { + if (_initialised) + throw new Exception("SmiLogging already initialised"); + _initialised = true; - string localConfig = Path.Combine(Directory.GetCurrentDirectory(), DefaultLogConfigName); - string configFilePathToLoad = !string.IsNullOrWhiteSpace(loggingOptions.LogConfigFile) - ? loggingOptions.LogConfigFile - : localConfig; + string localConfig = Path.Combine(Directory.GetCurrentDirectory(), DefaultLogConfigName); + string configFilePathToLoad = !string.IsNullOrWhiteSpace(loggingOptions.LogConfigFile) + ? loggingOptions.LogConfigFile + : localConfig; - if (!File.Exists(configFilePathToLoad)) - throw new FileNotFoundException($"Could not find the specified logging configuration '{configFilePathToLoad})'"); + if (!File.Exists(configFilePathToLoad)) + throw new FileNotFoundException($"Could not find the specified logging configuration '{configFilePathToLoad})'"); - LogManager.ThrowConfigExceptions = true; - LogManager.Configuration = new NLog.Config.XmlLoggingConfiguration(configFilePathToLoad); + LogManager.ThrowConfigExceptions = true; + LogManager.Configuration = new NLog.Config.XmlLoggingConfiguration(configFilePathToLoad); - if (!string.IsNullOrWhiteSpace(loggingOptions.LogsRoot)) - { - if (!Directory.Exists(loggingOptions.LogsRoot)) - throw new ApplicationException($"Invalid log root '{loggingOptions.LogsRoot}'"); + if (!string.IsNullOrWhiteSpace(loggingOptions.LogsRoot)) + { + if (!Directory.Exists(loggingOptions.LogsRoot)) + throw new ApplicationException($"Invalid log root '{loggingOptions.LogsRoot}'"); - VerifyCanWrite(loggingOptions.LogsRoot); + VerifyCanWrite(loggingOptions.LogsRoot); - LogManager.Configuration.Variables["baseFileName"] = - $"{loggingOptions.LogsRoot}/" + - $"{hostProcessName}/" + - $"${{cached:cached=true:clearCache=None:inner=${{date:format=yyyy-MM-dd-HH-mm-ss}}}}-${{processid}}"; - } - else - VerifyCanWrite(Directory.GetCurrentDirectory()); + LogManager.Configuration.Variables["baseFileName"] = + $"{loggingOptions.LogsRoot}/" + + $"{hostProcessName}/" + + $"${{cached:cached=true:clearCache=None:inner=${{date:format=yyyy-MM-dd-HH-mm-ss}}}}-${{processid}}"; + } + else + VerifyCanWrite(Directory.GetCurrentDirectory()); - Logger logger = LogManager.GetLogger(nameof(SmiLogging)); - LogManager.GlobalThreshold = LogLevel.Trace; + Logger logger = LogManager.GetLogger(nameof(SmiLogging)); + LogManager.GlobalThreshold = LogLevel.Trace; - if (!loggingOptions.TraceLogging) - LogManager.GlobalThreshold = LogLevel.Debug; - logger.Trace("Trace logging enabled!"); + if (!loggingOptions.TraceLogging) + LogManager.GlobalThreshold = LogLevel.Debug; + logger.Trace("Trace logging enabled!"); - logger.Info($"Logging config loaded from {configFilePathToLoad}"); - } + logger.Info($"Logging config loaded from {configFilePathToLoad}"); + } - private static void VerifyCanWrite(string logsRoot) + private static void VerifyCanWrite(string logsRoot) + { + string[] tmpFileParts = Path.GetTempFileName().Split(Path.DirectorySeparatorChar); + string tmpFileInLogsRoot = Path.Combine(logsRoot, tmpFileParts[^1]); + try + { + File.WriteAllText(tmpFileInLogsRoot, ""); + } + catch (UnauthorizedAccessException e) + { + throw new UnauthorizedAccessException($"Couldn't create a file in the logs root '{logsRoot}'; possible permissions error", e); + } + finally { - string[] tmpFileParts = Path.GetTempFileName().Split(Path.DirectorySeparatorChar); - string tmpFileInLogsRoot = Path.Combine(logsRoot, tmpFileParts[^1]); - try - { - File.WriteAllText(tmpFileInLogsRoot, ""); - } - catch (UnauthorizedAccessException e) - { - throw new UnauthorizedAccessException($"Couldn't create a file in the logs root '{logsRoot}'; possible permissions error", e); - } - finally - { - File.Delete(tmpFileInLogsRoot); - } + File.Delete(tmpFileInLogsRoot); } } } diff --git a/src/SmiServices/Common/TimeTracker.cs b/src/SmiServices/Common/TimeTracker.cs index 25290cd31..5736f85d1 100644 --- a/src/SmiServices/Common/TimeTracker.cs +++ b/src/SmiServices/Common/TimeTracker.cs @@ -1,32 +1,31 @@ using System; using System.Diagnostics; -namespace SmiServices.Common +namespace SmiServices.Common; + +/// +/// Runs a for the duration of the using statement (this class is ) +/// +public class TimeTracker : IDisposable { + private readonly Stopwatch _sw; + /// - /// Runs a for the duration of the using statement (this class is ) + /// Starts the and runs it until disposal (use this in a using statement) /// - public class TimeTracker : IDisposable + /// + public TimeTracker(Stopwatch sw) { - private readonly Stopwatch _sw; - - /// - /// Starts the and runs it until disposal (use this in a using statement) - /// - /// - public TimeTracker(Stopwatch sw) - { - _sw = sw; - _sw.Start(); - } + _sw = sw; + _sw.Start(); + } - /// - /// Stops the - /// - public void Dispose() - { - GC.SuppressFinalize(this); - _sw.Stop(); - } + /// + /// Stops the + /// + public void Dispose() + { + GC.SuppressFinalize(this); + _sw.Stop(); } } diff --git a/src/SmiServices/Common/ZipHelper.cs b/src/SmiServices/Common/ZipHelper.cs index 8c388f768..11916e2a6 100644 --- a/src/SmiServices/Common/ZipHelper.cs +++ b/src/SmiServices/Common/ZipHelper.cs @@ -2,35 +2,34 @@ using System.IO; using System.IO.Abstractions; -namespace SmiServices.Common -{ - public class ZipHelper - { - readonly static List SupportedExtensions = - [ - ".zip", - ".tar" - ]; +namespace SmiServices.Common; - /// - /// Returns true if looks like a compressed archive compatible with smi e.g. zip, tar etc - /// - /// - /// - public static bool IsZip(IFileInfo f) - { - return SupportedExtensions.Contains(f.Extension); - } +public class ZipHelper +{ + readonly static List SupportedExtensions = + [ + ".zip", + ".tar" + ]; - /// - /// Returns true if looks like a compressed archive compatible with smi e.g. zip, tar etc - /// - /// - /// - public static bool IsZip(string path) - { - return SupportedExtensions.Contains(Path.GetExtension(path)); - } + /// + /// Returns true if looks like a compressed archive compatible with smi e.g. zip, tar etc + /// + /// + /// + public static bool IsZip(IFileInfo f) + { + return SupportedExtensions.Contains(f.Extension); + } + /// + /// Returns true if looks like a compressed archive compatible with smi e.g. zip, tar etc + /// + /// + /// + public static bool IsZip(string path) + { + return SupportedExtensions.Contains(Path.GetExtension(path)); } + } diff --git a/src/SmiServices/Microservices/CohortExtractor/CohortExtractor.cs b/src/SmiServices/Microservices/CohortExtractor/CohortExtractor.cs index 62420a35f..ca6ee23be 100644 --- a/src/SmiServices/Microservices/CohortExtractor/CohortExtractor.cs +++ b/src/SmiServices/Microservices/CohortExtractor/CohortExtractor.cs @@ -3,29 +3,28 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.CohortExtractor +namespace SmiServices.Microservices.CohortExtractor; + +public static class CohortExtractor { - public static class CohortExtractor + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(CohortExtractor), OnParse); - return ret; - } + int ret = SmiCliInit.ParseAndRun(args, nameof(CohortExtractor), OnParse); + return ret; + } - private static int OnParse(GlobalOptions globals, CliOptions opts) - { - //Use the auditor and request fullfilers specified in the yaml - var bootstrapper = new MicroserviceHostBootstrapper( - () => new CohortExtractorHost( - globals, - fulfiller: null - ) - ); + private static int OnParse(GlobalOptions globals, CliOptions opts) + { + //Use the auditor and request fullfilers specified in the yaml + var bootstrapper = new MicroserviceHostBootstrapper( + () => new CohortExtractorHost( + globals, + fulfiller: null + ) + ); - int ret = bootstrapper.Main(); - return ret; - } + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/CohortExtractor/CohortExtractorHost.cs b/src/SmiServices/Microservices/CohortExtractor/CohortExtractorHost.cs index 1eae1605b..1d67bd590 100644 --- a/src/SmiServices/Microservices/CohortExtractor/CohortExtractorHost.cs +++ b/src/SmiServices/Microservices/CohortExtractor/CohortExtractorHost.cs @@ -19,144 +19,143 @@ using System.Linq; using System.Text.RegularExpressions; -namespace SmiServices.Microservices.CohortExtractor +namespace SmiServices.Microservices.CohortExtractor; + + +/// +/// Microservice for handling requests to extract images for specific UIDs. UIDs arrive as instances. These +/// requests are fed to an for database lookup and the resulting file matches sent for extraction in the +/// form of . +/// +/// This microservice may filter which images are sent for extraction e.g. only PRIMARY/ORIGINAL images (depending on system configuration). +/// +public class CohortExtractorHost : MicroserviceHost { + /// + /// RabbitMQ queue subscriber responsible for dequeuing messages and feeding them to the + /// + public ExtractionRequestQueueConsumer? Consumer { get; set; } + private readonly CohortExtractorOptions _consumerOptions; + + private IExtractionRequestFulfiller? _fulfiller; + private IProjectPathResolver? _pathResolver; + private IProducerModel? _fileMessageProducer; + + private readonly IFileSystem _fileSystem; /// - /// Microservice for handling requests to extract images for specific UIDs. UIDs arrive as instances. These - /// requests are fed to an for database lookup and the resulting file matches sent for extraction in the - /// form of . - /// - /// This microservice may filter which images are sent for extraction e.g. only PRIMARY/ORIGINAL images (depending on system configuration). + /// Creates a new instance of the host with the given /// - public class CohortExtractorHost : MicroserviceHost + /// Settings for the microservice (location of rabbit, queue names etc) + /// Optional override for the value specified in + /// + public CohortExtractorHost(GlobalOptions options, IExtractionRequestFulfiller? fulfiller, IFileSystem? fileSystem = null) + : base(options) { - /// - /// RabbitMQ queue subscriber responsible for dequeuing messages and feeding them to the - /// - public ExtractionRequestQueueConsumer? Consumer { get; set; } - private readonly CohortExtractorOptions _consumerOptions; - - private IExtractionRequestFulfiller? _fulfiller; - private IProjectPathResolver? _pathResolver; - private IProducerModel? _fileMessageProducer; - - private readonly IFileSystem _fileSystem; - - /// - /// Creates a new instance of the host with the given - /// - /// Settings for the microservice (location of rabbit, queue names etc) - /// Optional override for the value specified in - /// - public CohortExtractorHost(GlobalOptions options, IExtractionRequestFulfiller? fulfiller, IFileSystem? fileSystem = null) - : base(options) - { - _consumerOptions = options.CohortExtractorOptions!; - _consumerOptions.Validate(); + _consumerOptions = options.CohortExtractorOptions!; + _consumerOptions.Validate(); - _fulfiller = fulfiller; - _fileSystem = fileSystem ?? new FileSystem(); - } + _fulfiller = fulfiller; + _fileSystem = fileSystem ?? new FileSystem(); + } - /// - /// Starts up service and begins listening with the - /// - public override void Start() - { - FansiImplementations.Load(); + /// + /// Starts up service and begins listening with the + /// + public override void Start() + { + FansiImplementations.Load(); - var repositoryLocator = Globals.RDMPOptions?.GetRepositoryProvider() ?? throw new ApplicationException("RDMPOptions missing"); + var repositoryLocator = Globals.RDMPOptions?.GetRepositoryProvider() ?? throw new ApplicationException("RDMPOptions missing"); - var startup = new Startup(repositoryLocator); + var startup = new Startup(repositoryLocator); - var toMemory = new ToMemoryCheckNotifier(); - startup.DoStartup(toMemory); + var toMemory = new ToMemoryCheckNotifier(); + startup.DoStartup(toMemory); - foreach (var args in toMemory.Messages.Where(static m => m.Result == CheckResult.Fail)) - Logger.Log(LogLevel.Warn, args.Ex, args.Message); + foreach (var args in toMemory.Messages.Where(static m => m.Result == CheckResult.Fail)) + Logger.Log(LogLevel.Warn, args.Ex, args.Message); - _fileMessageProducer = MessageBroker.SetupProducer(Globals.CohortExtractorOptions!.ExtractFilesProducerOptions!, isBatch: true); - var fileMessageInfoProducer = MessageBroker.SetupProducer(Globals.CohortExtractorOptions.ExtractFilesInfoProducerOptions!, isBatch: false); + _fileMessageProducer = MessageBroker.SetupProducer(Globals.CohortExtractorOptions!.ExtractFilesProducerOptions!, isBatch: true); + var fileMessageInfoProducer = MessageBroker.SetupProducer(Globals.CohortExtractorOptions.ExtractFilesInfoProducerOptions!, isBatch: false); - InitializeExtractionSources(repositoryLocator); + InitializeExtractionSources(repositoryLocator); - Consumer = new ExtractionRequestQueueConsumer(Globals.CohortExtractorOptions, _fulfiller!, _pathResolver!, _fileMessageProducer, fileMessageInfoProducer); + Consumer = new ExtractionRequestQueueConsumer(Globals.CohortExtractorOptions, _fulfiller!, _pathResolver!, _fileMessageProducer, fileMessageInfoProducer); - MessageBroker.StartConsumer(_consumerOptions, Consumer, isSolo: false); - } + MessageBroker.StartConsumer(_consumerOptions, Consumer, isSolo: false); + } - public override void Stop(string reason) - { - if (_fileMessageProducer != null) - try - { - _fileMessageProducer.WaitForConfirms(); - } - catch (AlreadyClosedException) { /* Ignored */ } - - base.Stop(reason); - } + public override void Stop(string reason) + { + if (_fileMessageProducer != null) + try + { + _fileMessageProducer.WaitForConfirms(); + } + catch (AlreadyClosedException) { /* Ignored */ } + + base.Stop(reason); + } - /// - /// Connects to RDMP platform databases to retrieve extractable catalogues and initializes the (if none - /// was specified in the constructor override). - /// - /// - private void InitializeExtractionSources(IRDMPPlatformRepositoryServiceLocator repositoryLocator) + /// + /// Connects to RDMP platform databases to retrieve extractable catalogues and initializes the (if none + /// was specified in the constructor override). + /// + /// + private void InitializeExtractionSources(IRDMPPlatformRepositoryServiceLocator repositoryLocator) + { + // Get all extractable catalogues + var catalogues = repositoryLocator + .DataExportRepository + .GetAllObjects() + .Select(eds => eds.Catalogue) + .Where(catalogue => catalogue != null) + .ToArray(); + + if (!_consumerOptions.AllCatalogues) + catalogues = catalogues.Where(c => _consumerOptions.OnlyCatalogues.Contains(c.ID)).ToArray(); + + if (_fulfiller == null) { - // Get all extractable catalogues - var catalogues = repositoryLocator - .DataExportRepository - .GetAllObjects() - .Select(eds => eds.Catalogue) - .Where(catalogue => catalogue != null) - .ToArray(); - - if (!_consumerOptions.AllCatalogues) - catalogues = catalogues.Where(c => _consumerOptions.OnlyCatalogues.Contains(c.ID)).ToArray(); - - if (_fulfiller == null) - { - var extractionRequestFulfillerTypeStr = _consumerOptions.RequestFulfillerType; - if (!Enum.TryParse(extractionRequestFulfillerTypeStr, out ExtractionRequestFulfillerType extractionRequestFulfillerType)) - throw new ArgumentException($"Could not parse '{extractionRequestFulfillerTypeStr}' to a valid {nameof(ExtractionRequestFulfillerType)}"); + var extractionRequestFulfillerTypeStr = _consumerOptions.RequestFulfillerType; + if (!Enum.TryParse(extractionRequestFulfillerTypeStr, out ExtractionRequestFulfillerType extractionRequestFulfillerType)) + throw new ArgumentException($"Could not parse '{extractionRequestFulfillerTypeStr}' to a valid {nameof(ExtractionRequestFulfillerType)}"); - Regex? modalityRoutingRegex = null; - if (!string.IsNullOrWhiteSpace(_consumerOptions.ModalityRoutingRegex)) - modalityRoutingRegex = new Regex(_consumerOptions.ModalityRoutingRegex, RegexOptions.Compiled); + Regex? modalityRoutingRegex = null; + if (!string.IsNullOrWhiteSpace(_consumerOptions.ModalityRoutingRegex)) + modalityRoutingRegex = new Regex(_consumerOptions.ModalityRoutingRegex, RegexOptions.Compiled); - _fulfiller = ExtractionRequestFulfillerFactory.Create(extractionRequestFulfillerType, catalogues, modalityRoutingRegex); - } + _fulfiller = ExtractionRequestFulfillerFactory.Create(extractionRequestFulfillerType, catalogues, modalityRoutingRegex); + } + + // Bit of a hack until we remove the ObjectFactory calls + if (!string.IsNullOrWhiteSpace(_consumerOptions.DynamicRulesPath)) + DynamicRejector.DefaultDynamicRulesPath = _consumerOptions.DynamicRulesPath; - // Bit of a hack until we remove the ObjectFactory calls - if (!string.IsNullOrWhiteSpace(_consumerOptions.DynamicRulesPath)) - DynamicRejector.DefaultDynamicRulesPath = _consumerOptions.DynamicRulesPath; + if (!string.IsNullOrWhiteSpace(_consumerOptions.RejectorType)) + _fulfiller.Rejectors.Add(ObjectFactory.CreateInstance(_consumerOptions.RejectorType, typeof(IRejector).Assembly)!); - if (!string.IsNullOrWhiteSpace(_consumerOptions.RejectorType)) - _fulfiller.Rejectors.Add(ObjectFactory.CreateInstance(_consumerOptions.RejectorType, typeof(IRejector).Assembly)!); + foreach (var modalitySpecific in _consumerOptions.ModalitySpecificRejectors ?? []) + { + var r = ObjectFactory.CreateInstance(modalitySpecific.RejectorType!, typeof(IRejector).Assembly)!; + _fulfiller.ModalitySpecificRejectors.Add(modalitySpecific, r); + } - foreach (var modalitySpecific in _consumerOptions.ModalitySpecificRejectors ?? []) + if (_consumerOptions.RejectColumnInfos != null) + foreach (var id in _consumerOptions.RejectColumnInfos) + _fulfiller.Rejectors.Add(new ColumnInfoValuesRejector(repositoryLocator.CatalogueRepository.GetObjectByID(id))); + + if (_consumerOptions.Blacklists != null) + foreach (var id in _consumerOptions.Blacklists) { - var r = ObjectFactory.CreateInstance(modalitySpecific.RejectorType!, typeof(IRejector).Assembly)!; - _fulfiller.ModalitySpecificRejectors.Add(modalitySpecific, r); + var cata = repositoryLocator.CatalogueRepository.GetObjectByID(id); + var rejector = new BlacklistRejector(cata); + _fulfiller.Rejectors.Add(rejector); } - if (_consumerOptions.RejectColumnInfos != null) - foreach (var id in _consumerOptions.RejectColumnInfos) - _fulfiller.Rejectors.Add(new ColumnInfoValuesRejector(repositoryLocator.CatalogueRepository.GetObjectByID(id))); - - if (_consumerOptions.Blacklists != null) - foreach (var id in _consumerOptions.Blacklists) - { - var cata = repositoryLocator.CatalogueRepository.GetObjectByID(id); - var rejector = new BlacklistRejector(cata); - _fulfiller.Rejectors.Add(rejector); - } - - _pathResolver = string.IsNullOrWhiteSpace(_consumerOptions.ProjectPathResolverType) - ? new StudySeriesSOPProjectPathResolver(_fileSystem) - : ProjectPathResolverFactory.Create(_consumerOptions.ProjectPathResolverType, _fileSystem); - } + _pathResolver = string.IsNullOrWhiteSpace(_consumerOptions.ProjectPathResolverType) + ? new StudySeriesSOPProjectPathResolver(_fileSystem) + : ProjectPathResolverFactory.Create(_consumerOptions.ProjectPathResolverType, _fileSystem); } } diff --git a/src/SmiServices/Microservices/CohortExtractor/ExtractImageCollection.cs b/src/SmiServices/Microservices/CohortExtractor/ExtractImageCollection.cs index 807772e82..00ad9d3fc 100644 --- a/src/SmiServices/Microservices/CohortExtractor/ExtractImageCollection.cs +++ b/src/SmiServices/Microservices/CohortExtractor/ExtractImageCollection.cs @@ -3,46 +3,45 @@ using System.Collections.Generic; using System.Linq; -namespace SmiServices.Microservices.CohortExtractor +namespace SmiServices.Microservices.CohortExtractor; + +/// +/// Results object produced by an . +/// +public class ExtractImageCollection : Dictionary> { /// - /// Results object produced by an . + /// The value of a single for which we have + /// identified results /// - public class ExtractImageCollection : Dictionary> - { - /// - /// The value of a single for which we have - /// identified results - /// - public string KeyValue { get; private set; } + public string KeyValue { get; private set; } - /// - /// Unique SeriesInstanceUIDs amongst all results stored - /// - public string? SeriesInstanceUID => Values.SelectMany(v => v.Select(e => e.SeriesTagValue)).Distinct().Single(); //TODO: could be multiple series under a study + /// + /// Unique SeriesInstanceUIDs amongst all results stored + /// + public string? SeriesInstanceUID => Values.SelectMany(v => v.Select(e => e.SeriesTagValue)).Distinct().Single(); //TODO: could be multiple series under a study - public IReadOnlyCollection Accepted => GetWhereRejected(false); - public IReadOnlyCollection Rejected => GetWhereRejected(true); + public IReadOnlyCollection Accepted => GetWhereRejected(false); + public IReadOnlyCollection Rejected => GetWhereRejected(true); - private IReadOnlyCollection GetWhereRejected(bool isRejected) - { + private IReadOnlyCollection GetWhereRejected(bool isRejected) + { - HashSet result = []; + HashSet result = []; - foreach (HashSet v in Values) + foreach (HashSet v in Values) + { + foreach (QueryToExecuteResult queryToExecuteResult in v) { - foreach (QueryToExecuteResult queryToExecuteResult in v) - { - if (queryToExecuteResult.Reject == isRejected) - result.Add(queryToExecuteResult); - } + if (queryToExecuteResult.Reject == isRejected) + result.Add(queryToExecuteResult); } - return result; } + return result; + } - public ExtractImageCollection(string keyValue) - { - KeyValue = keyValue; - } + public ExtractImageCollection(string keyValue) + { + KeyValue = keyValue; } } diff --git a/src/SmiServices/Microservices/CohortExtractor/ExtractionRequestQueueConsumer.cs b/src/SmiServices/Microservices/CohortExtractor/ExtractionRequestQueueConsumer.cs index 4624d12f5..fa97f92fd 100644 --- a/src/SmiServices/Microservices/CohortExtractor/ExtractionRequestQueueConsumer.cs +++ b/src/SmiServices/Microservices/CohortExtractor/ExtractionRequestQueueConsumer.cs @@ -7,97 +7,96 @@ using System; using System.ComponentModel; -namespace SmiServices.Microservices.CohortExtractor +namespace SmiServices.Microservices.CohortExtractor; + +public class ExtractionRequestQueueConsumer : Consumer { - public class ExtractionRequestQueueConsumer : Consumer - { - private readonly CohortExtractorOptions _options; + private readonly CohortExtractorOptions _options; + + private readonly IExtractionRequestFulfiller _fulfiller; + private readonly IProducerModel _fileMessageProducer; + private readonly IProducerModel _fileMessageInfoProducer; - private readonly IExtractionRequestFulfiller _fulfiller; - private readonly IProducerModel _fileMessageProducer; - private readonly IProducerModel _fileMessageInfoProducer; + private readonly IProjectPathResolver _resolver; + + public ExtractionRequestQueueConsumer( + CohortExtractorOptions options, + IExtractionRequestFulfiller fulfiller, + IProjectPathResolver pathResolver, IProducerModel fileMessageProducer, + IProducerModel fileMessageInfoProducer) + { + _options = options; + _fulfiller = fulfiller; + _resolver = pathResolver; + _fileMessageProducer = fileMessageProducer; + _fileMessageInfoProducer = fileMessageInfoProducer; + } - private readonly IProjectPathResolver _resolver; + protected override void ProcessMessageImpl(IMessageHeader header, ExtractionRequestMessage request, ulong tag) + { + Logger.Info($"Received message {header.MessageGuid}: {request}"); - public ExtractionRequestQueueConsumer( - CohortExtractorOptions options, - IExtractionRequestFulfiller fulfiller, - IProjectPathResolver pathResolver, IProducerModel fileMessageProducer, - IProducerModel fileMessageInfoProducer) + if (!request.ExtractionDirectory.StartsWith(request.ProjectNumber)) { - _options = options; - _fulfiller = fulfiller; - _resolver = pathResolver; - _fileMessageProducer = fileMessageProducer; - _fileMessageInfoProducer = fileMessageInfoProducer; + Logger.Debug("ExtractionDirectory did not start with the project number, doing ErrorAndNack"); + ErrorAndNack(header, tag, "", new InvalidEnumArgumentException("ExtractionDirectory")); } - protected override void ProcessMessageImpl(IMessageHeader header, ExtractionRequestMessage request, ulong tag) - { - Logger.Info($"Received message {header.MessageGuid}: {request}"); + string extractionDirectory = request.ExtractionDirectory.TrimEnd('/', '\\'); + string? extractFileRoutingKey = request.IsIdentifiableExtraction ? _options.ExtractIdentRoutingKey : _options.ExtractAnonRoutingKey; - if (!request.ExtractionDirectory.StartsWith(request.ProjectNumber)) - { - Logger.Debug("ExtractionDirectory did not start with the project number, doing ErrorAndNack"); - ErrorAndNack(header, tag, "", new InvalidEnumArgumentException("ExtractionDirectory")); - } + foreach (ExtractImageCollection matchedFiles in _fulfiller.GetAllMatchingFiles(request)) + { + Logger.Info($"Accepted {matchedFiles.Accepted.Count} and rejected {matchedFiles.Rejected.Count} files for KeyValue {matchedFiles.KeyValue}"); - string extractionDirectory = request.ExtractionDirectory.TrimEnd('/', '\\'); - string? extractFileRoutingKey = request.IsIdentifiableExtraction ? _options.ExtractIdentRoutingKey : _options.ExtractAnonRoutingKey; + var infoMessage = new ExtractFileCollectionInfoMessage(request); - foreach (ExtractImageCollection matchedFiles in _fulfiller.GetAllMatchingFiles(request)) + foreach (QueryToExecuteResult accepted in matchedFiles.Accepted) { - Logger.Info($"Accepted {matchedFiles.Accepted.Count} and rejected {matchedFiles.Rejected.Count} files for KeyValue {matchedFiles.KeyValue}"); + var extractFileMessage = new ExtractFileMessage(request) + { + // Path to the original file + DicomFilePath = accepted.FilePathValue.TrimStart('/', '\\'), + // Extraction directory relative to the extract root + ExtractionDirectory = extractionDirectory, + // Output path for the anonymised file, relative to the extraction directory + OutputPath = _resolver.GetOutputPath(accepted, request).Replace('\\', '/') + }; - var infoMessage = new ExtractFileCollectionInfoMessage(request); + Logger.Debug($"DicomFilePath={extractFileMessage.DicomFilePath}, OutputPath={extractFileMessage.OutputPath}"); - foreach (QueryToExecuteResult accepted in matchedFiles.Accepted) - { - var extractFileMessage = new ExtractFileMessage(request) - { - // Path to the original file - DicomFilePath = accepted.FilePathValue.TrimStart('/', '\\'), - // Extraction directory relative to the extract root - ExtractionDirectory = extractionDirectory, - // Output path for the anonymised file, relative to the extraction directory - OutputPath = _resolver.GetOutputPath(accepted, request).Replace('\\', '/') - }; - - Logger.Debug($"DicomFilePath={extractFileMessage.DicomFilePath}, OutputPath={extractFileMessage.OutputPath}"); - - // Send the extract file message - var sentHeader = (MessageHeader)_fileMessageProducer.SendMessage(extractFileMessage, header, extractFileRoutingKey); - - // Record that we sent it - infoMessage.ExtractFileMessagesDispatched.Add(sentHeader, extractFileMessage.OutputPath); - } - - // Wait for confirms from the batched messages - Logger.Debug($"All ExtractFileMessage(s) sent for {matchedFiles.KeyValue}, calling WaitForConfirms"); - _fileMessageProducer.WaitForConfirms(); - - // For all the rejected messages log why (in the info message) - foreach (QueryToExecuteResult rejectedResults in matchedFiles.Rejected) - { - var rejectReason = rejectedResults.RejectReason - ?? throw new InvalidOperationException(nameof(rejectedResults.RejectReason)); + // Send the extract file message + var sentHeader = (MessageHeader)_fileMessageProducer.SendMessage(extractFileMessage, header, extractFileRoutingKey); - infoMessage.RejectionReasons.TryAdd(rejectReason, 0); - infoMessage.RejectionReasons[rejectReason]++; - } + // Record that we sent it + infoMessage.ExtractFileMessagesDispatched.Add(sentHeader, extractFileMessage.OutputPath); + } - infoMessage.KeyValue = matchedFiles.KeyValue; - _fileMessageInfoProducer.SendMessage(infoMessage, header, routingKey: null); + // Wait for confirms from the batched messages + Logger.Debug($"All ExtractFileMessage(s) sent for {matchedFiles.KeyValue}, calling WaitForConfirms"); + _fileMessageProducer.WaitForConfirms(); - if (_fileMessageInfoProducer.GetType() == typeof(BatchProducerModel)) - _fileMessageInfoProducer.WaitForConfirms(); + // For all the rejected messages log why (in the info message) + foreach (QueryToExecuteResult rejectedResults in matchedFiles.Rejected) + { + var rejectReason = rejectedResults.RejectReason + ?? throw new InvalidOperationException(nameof(rejectedResults.RejectReason)); - Logger.Info($"All ExtractFileCollectionInfoMessage(s) sent for {matchedFiles.KeyValue}"); + infoMessage.RejectionReasons.TryAdd(rejectReason, 0); + infoMessage.RejectionReasons[rejectReason]++; } - Logger.Info($"Finished processing message {header.MessageGuid}"); + infoMessage.KeyValue = matchedFiles.KeyValue; + _fileMessageInfoProducer.SendMessage(infoMessage, header, routingKey: null); + + if (_fileMessageInfoProducer.GetType() == typeof(BatchProducerModel)) + _fileMessageInfoProducer.WaitForConfirms(); - Ack(header, tag); + Logger.Info($"All ExtractFileCollectionInfoMessage(s) sent for {matchedFiles.KeyValue}"); } + + Logger.Info($"Finished processing message {header.MessageGuid}"); + + Ack(header, tag); } } diff --git a/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/IProjectPathResolver.cs b/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/IProjectPathResolver.cs index e40c6c22b..5d2ff6da1 100644 --- a/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/IProjectPathResolver.cs +++ b/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/IProjectPathResolver.cs @@ -1,19 +1,18 @@ using SmiServices.Common.Messages.Extraction; using SmiServices.Microservices.CohortExtractor.RequestFulfillers; -namespace SmiServices.Microservices.CohortExtractor.ProjectPathResolvers +namespace SmiServices.Microservices.CohortExtractor.ProjectPathResolvers; + +public interface IProjectPathResolver { - public interface IProjectPathResolver - { - /// - /// Get the output path, build from the original file path, plus any separating - /// directories (such as by SeriesInstanceUID) - /// - /// The file path (and UIDs) of the original dcm file in the identifiable repository. Some portions - /// of the may be null e.g. StudyInstanceUID if the corresponding column does not appear in the - /// extraction table which this result was fetched from - /// Contains information about the original request e.g. project number - /// - string GetOutputPath(QueryToExecuteResult result, ExtractionRequestMessage request); - } + /// + /// Get the output path, build from the original file path, plus any separating + /// directories (such as by SeriesInstanceUID) + /// + /// The file path (and UIDs) of the original dcm file in the identifiable repository. Some portions + /// of the may be null e.g. StudyInstanceUID if the corresponding column does not appear in the + /// extraction table which this result was fetched from + /// Contains information about the original request e.g. project number + /// + string GetOutputPath(QueryToExecuteResult result, ExtractionRequestMessage request); } diff --git a/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/NoSuffixProjectPathResolver.cs b/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/NoSuffixProjectPathResolver.cs index 01b16e948..078337951 100644 --- a/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/NoSuffixProjectPathResolver.cs +++ b/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/NoSuffixProjectPathResolver.cs @@ -2,18 +2,17 @@ using SmiServices.Microservices.CohortExtractor.RequestFulfillers; using System.IO.Abstractions; -namespace SmiServices.Microservices.CohortExtractor.ProjectPathResolvers +namespace SmiServices.Microservices.CohortExtractor.ProjectPathResolvers; + +/// +/// Acts like but with no "-an" component to indicate files have been anonymised. In most cases this results in an identical filename to the source file but can include the addition of a .dcm extension where it is missing +/// +public class NoSuffixProjectPathResolver : StudySeriesOriginalFilenameProjectPathResolver { - /// - /// Acts like but with no "-an" component to indicate files have been anonymised. In most cases this results in an identical filename to the source file but can include the addition of a .dcm extension where it is missing - /// - public class NoSuffixProjectPathResolver : StudySeriesOriginalFilenameProjectPathResolver - { - public NoSuffixProjectPathResolver(IFileSystem fileSystem) : base(fileSystem) { } + public NoSuffixProjectPathResolver(IFileSystem fileSystem) : base(fileSystem) { } - public override string GetOutputPath(QueryToExecuteResult result, ExtractionRequestMessage message) - { - return base.GetOutputPath(result, message).Replace("-an", ""); - } + public override string GetOutputPath(QueryToExecuteResult result, ExtractionRequestMessage message) + { + return base.GetOutputPath(result, message).Replace("-an", ""); } } diff --git a/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/StudySeriesOriginalFilenameProjectPathResolver.cs b/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/StudySeriesOriginalFilenameProjectPathResolver.cs index 67dcb3ed7..c0a529e4d 100644 --- a/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/StudySeriesOriginalFilenameProjectPathResolver.cs +++ b/src/SmiServices/Microservices/CohortExtractor/ProjectPathResolvers/StudySeriesOriginalFilenameProjectPathResolver.cs @@ -3,51 +3,50 @@ using System; using System.IO.Abstractions; -namespace SmiServices.Microservices.CohortExtractor.ProjectPathResolvers +namespace SmiServices.Microservices.CohortExtractor.ProjectPathResolvers; + +public class StudySeriesOriginalFilenameProjectPathResolver : IProjectPathResolver { - public class StudySeriesOriginalFilenameProjectPathResolver : IProjectPathResolver + private static readonly string[] _replaceableExtensions = [".dcm", ".dicom"]; + + private readonly IFileSystem _fileSystem; + + public StudySeriesOriginalFilenameProjectPathResolver(IFileSystem fileSystem) + { + _fileSystem = fileSystem; + } + + /// + /// Returns the output path for the anonymised file, relative to the ExtractionDirectory + /// + /// + /// + /// + public virtual string GetOutputPath(QueryToExecuteResult result, ExtractionRequestMessage message) { - private static readonly string[] _replaceableExtensions = [".dcm", ".dicom"]; - - private readonly IFileSystem _fileSystem; - - public StudySeriesOriginalFilenameProjectPathResolver(IFileSystem fileSystem) - { - _fileSystem = fileSystem; - } - - /// - /// Returns the output path for the anonymised file, relative to the ExtractionDirectory - /// - /// - /// - /// - public virtual string GetOutputPath(QueryToExecuteResult result, ExtractionRequestMessage message) - { - string extToUse = message.IsIdentifiableExtraction ? ProjectPathResolverConstants.IDENT_EXT : ProjectPathResolverConstants.ANON_EXT; - - // The extension of the input DICOM file can be anything (or nothing), but here we try to standardise the output file name to have the required extension - string fileName = _fileSystem.Path.GetFileName(result.FilePathValue); - if (string.IsNullOrWhiteSpace(fileName)) - throw new ArgumentNullException(nameof(result)); - - var replaced = false; - foreach (string ext in _replaceableExtensions) - if (fileName.EndsWith(ext)) - { - fileName = fileName.Replace(ext, extToUse); - replaced = true; - break; - } - - if (!replaced) - fileName += extToUse; - - return _fileSystem.Path.Combine( - result.StudyTagValue, - result.SeriesTagValue, - fileName - ); - } + string extToUse = message.IsIdentifiableExtraction ? ProjectPathResolverConstants.IDENT_EXT : ProjectPathResolverConstants.ANON_EXT; + + // The extension of the input DICOM file can be anything (or nothing), but here we try to standardise the output file name to have the required extension + string fileName = _fileSystem.Path.GetFileName(result.FilePathValue); + if (string.IsNullOrWhiteSpace(fileName)) + throw new ArgumentNullException(nameof(result)); + + var replaced = false; + foreach (string ext in _replaceableExtensions) + if (fileName.EndsWith(ext)) + { + fileName = fileName.Replace(ext, extToUse); + replaced = true; + break; + } + + if (!replaced) + fileName += extToUse; + + return _fileSystem.Path.Combine( + result.StudyTagValue, + result.SeriesTagValue, + fileName + ); } } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/BlacklistRejector.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/BlacklistRejector.cs index 1576d25be..1a7aca184 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/BlacklistRejector.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/BlacklistRejector.cs @@ -9,146 +9,145 @@ using System.Diagnostics.CodeAnalysis; using System.Linq; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + +public class BlacklistRejector : IRejector { - public class BlacklistRejector : IRejector + private readonly ICatalogue _catalogue; + private QueryToExecuteColumnSet? _columnSet; + private SpontaneouslyInventedFilter? _studyFilter; + private SpontaneouslyInventedFilter? _seriesFilter; + private SpontaneouslyInventedFilter? _instanceFilter; + private QueryBuilder? _queryBuilder; + private DiscoveredServer? _server; + + public BlacklistRejector(ICatalogue catalogue) { - private readonly ICatalogue _catalogue; - private QueryToExecuteColumnSet? _columnSet; - private SpontaneouslyInventedFilter? _studyFilter; - private SpontaneouslyInventedFilter? _seriesFilter; - private SpontaneouslyInventedFilter? _instanceFilter; - private QueryBuilder? _queryBuilder; - private DiscoveredServer? _server; - - public BlacklistRejector(ICatalogue catalogue) - { - _catalogue = catalogue; - - Initialize(); - } + _catalogue = catalogue; - private void Initialize() - { - //Figure out which UID columns exist in the Catalogue, do not require file path to be in Catalogue - _columnSet = QueryToExecuteColumnSet.Create(_catalogue, false); + Initialize(); + } - //Tells us the DBMS type - var syntax = _catalogue.GetQuerySyntaxHelper(); + private void Initialize() + { + //Figure out which UID columns exist in the Catalogue, do not require file path to be in Catalogue + _columnSet = QueryToExecuteColumnSet.Create(_catalogue, false); - //For storing the OR container and filter(s) - var memory = new MemoryCatalogueRepository(); + //Tells us the DBMS type + var syntax = _catalogue.GetQuerySyntaxHelper(); - //builds SQL we will run in lookup stage - _queryBuilder = new QueryBuilder(null, null) - { - //all we care about is if the uid appears if it does then we are rejecting it - TopX = 1 - }; + //For storing the OR container and filter(s) + var memory = new MemoryCatalogueRepository(); - //Filter is OR i.e. StudyInstanceUID = @StudyInstanceUID OR SeriesInstanceUID = @SeriesInstanceUID - var container = _queryBuilder.RootFilterContainer = new SpontaneouslyInventedFilterContainer(memory, null, null, FilterContainerOperation.OR); + //builds SQL we will run in lookup stage + _queryBuilder = new QueryBuilder(null, null) + { + //all we care about is if the uid appears if it does then we are rejecting it + TopX = 1 + }; - //Build SELECT and WHERE bits of the query - if (_columnSet?.StudyTagColumn != null) - { - _queryBuilder.AddColumn(_columnSet.StudyTagColumn); + //Filter is OR i.e. StudyInstanceUID = @StudyInstanceUID OR SeriesInstanceUID = @SeriesInstanceUID + var container = _queryBuilder.RootFilterContainer = new SpontaneouslyInventedFilterContainer(memory, null, null, FilterContainerOperation.OR); - string whereSql = - $"{_columnSet.StudyTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultStudyIdColumnName}"; + //Build SELECT and WHERE bits of the query + if (_columnSet?.StudyTagColumn != null) + { + _queryBuilder.AddColumn(_columnSet.StudyTagColumn); - _studyFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Study UID Filter", "", null); - container.AddChild(_studyFilter); - } + string whereSql = + $"{_columnSet.StudyTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultStudyIdColumnName}"; + _studyFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Study UID Filter", "", null); + container.AddChild(_studyFilter); + } - if (_columnSet?.SeriesTagColumn != null) - { - _queryBuilder.AddColumn(_columnSet.SeriesTagColumn); - string whereSql = - $"{_columnSet.SeriesTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultSeriesIdColumnName}"; + if (_columnSet?.SeriesTagColumn != null) + { + _queryBuilder.AddColumn(_columnSet.SeriesTagColumn); - _seriesFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Series UID Filter", "", null); - container.AddChild(_seriesFilter); - } + string whereSql = + $"{_columnSet.SeriesTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultSeriesIdColumnName}"; - if (_columnSet?.InstanceTagColumn != null) - { - _queryBuilder.AddColumn(_columnSet.InstanceTagColumn); + _seriesFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Series UID Filter", "", null); + container.AddChild(_seriesFilter); + } - string whereSql = - $"{_columnSet.InstanceTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultInstanceIdColumnName}"; + if (_columnSet?.InstanceTagColumn != null) + { + _queryBuilder.AddColumn(_columnSet.InstanceTagColumn); - _instanceFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Instance UID Filter", "", null); - container.AddChild(_instanceFilter); - } + string whereSql = + $"{_columnSet.InstanceTagColumn.SelectSQL} = {syntax.ParameterSymbol}{QueryToExecuteColumnSet.DefaultInstanceIdColumnName}"; - // Make sure the query builder looks valid - if (_queryBuilder.SelectColumns.Count == 0) - throw new NotSupportedException($"Blacklist Catalogue {_catalogue} (ID={_catalogue.ID}) did not have any Core ExtractionInformation columns corresponding to any of the image UID tags (e.g. StudyInstanceUID, SeriesInstanceUID, SOPInstanceUID)."); + _instanceFilter = new SpontaneouslyInventedFilter(memory, container, whereSql, "Instance UID Filter", "", null); + container.AddChild(_instanceFilter); + } - try - { - // make sure we can connect to the server - _server = _catalogue.GetDistinctLiveDatabaseServer(DataAccessContext.DataExport, true); - _server.TestConnection(); - } - catch (Exception e) - { - throw new Exception($"Failed to test connection for Catalogue {_catalogue}", e); - } + // Make sure the query builder looks valid + if (_queryBuilder.SelectColumns.Count == 0) + throw new NotSupportedException($"Blacklist Catalogue {_catalogue} (ID={_catalogue.ID}) did not have any Core ExtractionInformation columns corresponding to any of the image UID tags (e.g. StudyInstanceUID, SeriesInstanceUID, SOPInstanceUID)."); - // run a test lookup query against the remote database - DoLookup("test1", "test2", "test3"); + try + { + // make sure we can connect to the server + _server = _catalogue.GetDistinctLiveDatabaseServer(DataAccessContext.DataExport, true); + _server.TestConnection(); } - - /// - /// Looks up data stored in the Catalogue with a query matching on any of the provided uids. All values must be supplied if the Catalogue has a column of the corresponding name (i.e. if Catalogue has SeriesInstanceUID you must supply ) - /// - /// - /// - /// - /// - public bool DoLookup(string studyuid, string seriesuid, string imageuid) + catch (Exception e) { - string sql = _queryBuilder!.SQL; + throw new Exception($"Failed to test connection for Catalogue {_catalogue}", e); + } - using var con = _server!.GetConnection(); - con.Open(); - using var cmd = _server.GetCommand(sql, con); - //Add the current row UIDs to the parameters of the command - if (_studyFilter != null) - _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultStudyIdColumnName, cmd, studyuid); + // run a test lookup query against the remote database + DoLookup("test1", "test2", "test3"); + } - if (_seriesFilter != null) - _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultSeriesIdColumnName, cmd, seriesuid); + /// + /// Looks up data stored in the Catalogue with a query matching on any of the provided uids. All values must be supplied if the Catalogue has a column of the corresponding name (i.e. if Catalogue has SeriesInstanceUID you must supply ) + /// + /// + /// + /// + /// + public bool DoLookup(string studyuid, string seriesuid, string imageuid) + { + string sql = _queryBuilder!.SQL; - if (_instanceFilter != null) - _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultInstanceIdColumnName, cmd, imageuid); + using var con = _server!.GetConnection(); + con.Open(); + using var cmd = _server.GetCommand(sql, con); + //Add the current row UIDs to the parameters of the command + if (_studyFilter != null) + _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultStudyIdColumnName, cmd, studyuid); - using var r = cmd.ExecuteReader(); - //if we can read a record then we have an entry in the blacklist - return r.Read(); - } + if (_seriesFilter != null) + _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultSeriesIdColumnName, cmd, seriesuid); - /// - /// Rejects the if it appears in the blacklisting Catalogue - /// - /// - /// - /// - public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) - { - //row is bad if the query matches any records (in the blacklist) - var bad = DoLookup( - row[QueryToExecuteColumnSet.DefaultStudyIdColumnName].ToString()!, - row[QueryToExecuteColumnSet.DefaultSeriesIdColumnName].ToString()!, - row[QueryToExecuteColumnSet.DefaultInstanceIdColumnName].ToString()! - ); - - reason = bad ? $"Blacklisted in {_catalogue.Name}" : null; - return bad; - } + if (_instanceFilter != null) + _server.AddParameterWithValueToCommand(QueryToExecuteColumnSet.DefaultInstanceIdColumnName, cmd, imageuid); + + using var r = cmd.ExecuteReader(); + //if we can read a record then we have an entry in the blacklist + return r.Read(); + } + + /// + /// Rejects the if it appears in the blacklisting Catalogue + /// + /// + /// + /// + public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) + { + //row is bad if the query matches any records (in the blacklist) + var bad = DoLookup( + row[QueryToExecuteColumnSet.DefaultStudyIdColumnName].ToString()!, + row[QueryToExecuteColumnSet.DefaultSeriesIdColumnName].ToString()!, + row[QueryToExecuteColumnSet.DefaultInstanceIdColumnName].ToString()! + ); + + reason = bad ? $"Blacklisted in {_catalogue.Name}" : null; + return bad; } } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/ColumnInfoValuesRejector.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/ColumnInfoValuesRejector.cs index fddece7c6..180714362 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/ColumnInfoValuesRejector.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/ColumnInfoValuesRejector.cs @@ -6,43 +6,42 @@ using System; using System.Collections.Generic; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + + +public class ColumnInfoValuesRejector : ColumnValuesRejector { - public class ColumnInfoValuesRejector : ColumnValuesRejector + public ColumnInfoValuesRejector(ColumnInfo columnInfo) : base(columnInfo.GetRuntimeName(), FetchTable(columnInfo)) { - public ColumnInfoValuesRejector(ColumnInfo columnInfo) : base(columnInfo.GetRuntimeName(), FetchTable(columnInfo)) - { - - } + } - private static HashSet FetchTable(ColumnInfo columnInfo) - { - var logger = LogManager.GetCurrentClassLogger(); - HashSet toReturn = new(StringComparer.CurrentCultureIgnoreCase); + private static HashSet FetchTable(ColumnInfo columnInfo) + { + var logger = LogManager.GetCurrentClassLogger(); + HashSet toReturn = new(StringComparer.CurrentCultureIgnoreCase); - var qb = new QueryBuilder(limitationSQL: null, hashingAlgorithm: null); - qb.AddColumn(new ColumnInfoToIColumn(new MemoryRepository(), columnInfo)); + var qb = new QueryBuilder(limitationSQL: null, hashingAlgorithm: null); + qb.AddColumn(new ColumnInfoToIColumn(new MemoryRepository(), columnInfo)); - var sql = qb.SQL; - logger.Info($"Running rejection-id fetch SQL:{sql}"); + var sql = qb.SQL; + logger.Info($"Running rejection-id fetch SQL:{sql}"); - var server = columnInfo.TableInfo.Discover(DataAccessContext.DataExport); + var server = columnInfo.TableInfo.Discover(DataAccessContext.DataExport); - using (var con = server.Database.Server.GetConnection()) - { - con.Open(); - var cmd = server.GetCommand(sql, con); - var reader = cmd.ExecuteReader(); + using (var con = server.Database.Server.GetConnection()) + { + con.Open(); + var cmd = server.GetCommand(sql, con); + var reader = cmd.ExecuteReader(); - while (reader.Read()) - toReturn.Add(reader[0].ToString()!); - } + while (reader.Read()) + toReturn.Add(reader[0].ToString()!); + } - logger.Info($"Found {toReturn.Count} identifiers in the reject list"); + logger.Info($"Found {toReturn.Count} identifiers in the reject list"); - return toReturn; - } + return toReturn; } } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/ColumnValuesRejector.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/ColumnValuesRejector.cs index 5726271c9..ae5b2658d 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/ColumnValuesRejector.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/ColumnValuesRejector.cs @@ -3,48 +3,47 @@ using System.Data; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + +public class ColumnValuesRejector : IRejector { - public class ColumnValuesRejector : IRejector + private readonly HashSet _rejectPatients; + private readonly string _columnToCheck; + + public ColumnValuesRejector(string column, HashSet values) { - private readonly HashSet _rejectPatients; - private readonly string _columnToCheck; + _columnToCheck = column; + _rejectPatients = values; + } - public ColumnValuesRejector(string column, HashSet values) - { - _columnToCheck = column; - _rejectPatients = values; - } + public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) + { + string patientId; - public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) + try { - string patientId; - - try - { - // The patient ID is null - if (row[_columnToCheck] == DBNull.Value) - { - reason = null; - return false; - } - - patientId = (string)row[_columnToCheck]; - } - catch (IndexOutOfRangeException ex) + // The patient ID is null + if (row[_columnToCheck] == DBNull.Value) { - throw new IndexOutOfRangeException($"An error occurred determining the identifier of the record(s) being extracted. Expected a column called {_columnToCheck}", ex); + reason = null; + return false; } - if (_rejectPatients.Contains(patientId)) - { - reason = "Patient or Identifier was in reject list"; - return true; - } + patientId = (string)row[_columnToCheck]; + } + catch (IndexOutOfRangeException ex) + { + throw new IndexOutOfRangeException($"An error occurred determining the identifier of the record(s) being extracted. Expected a column called {_columnToCheck}", ex); + } - reason = null; - return false; + if (_rejectPatients.Contains(patientId)) + { + reason = "Patient or Identifier was in reject list"; + return true; } + reason = null; + return false; } + } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/Dynamic/DynamicRejector.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/Dynamic/DynamicRejector.cs index 293f5b2c7..afe64220b 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/Dynamic/DynamicRejector.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/Dynamic/DynamicRejector.cs @@ -5,62 +5,61 @@ using System.Diagnostics.CodeAnalysis; using System.IO.Abstractions; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers.Dynamic +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers.Dynamic; + +public class DynamicRejector : IRejector { - public class DynamicRejector : IRejector - { - private readonly Script _script; + private readonly Script _script; #pragma warning disable CA2211 // Non-constant fields should not be visible - public static string DefaultDynamicRulesPath = "./DynamicRules.txt"; + public static string DefaultDynamicRulesPath = "./DynamicRules.txt"; #pragma warning restore CA2211 - public DynamicRejector() - : this(null) { } + public DynamicRejector() + : this(null) { } - public DynamicRejector(string? dynamicRulesPath, IFileSystem? fileSystem = null) - { - dynamicRulesPath ??= DefaultDynamicRulesPath; - fileSystem ??= new FileSystem(); + public DynamicRejector(string? dynamicRulesPath, IFileSystem? fileSystem = null) + { + dynamicRulesPath ??= DefaultDynamicRulesPath; + fileSystem ??= new FileSystem(); - if (!fileSystem.File.Exists(dynamicRulesPath)) - throw new System.IO.FileNotFoundException($"Could not find rules file '{dynamicRulesPath}'"); + if (!fileSystem.File.Exists(dynamicRulesPath)) + throw new System.IO.FileNotFoundException($"Could not find rules file '{dynamicRulesPath}'"); - var dynamicRules = fileSystem.File.ReadAllText(dynamicRulesPath); + var dynamicRules = fileSystem.File.ReadAllText(dynamicRulesPath); - if (string.IsNullOrWhiteSpace(dynamicRules)) - throw new ArgumentOutOfRangeException(nameof(dynamicRulesPath), "Rules file is empty"); + if (string.IsNullOrWhiteSpace(dynamicRules)) + throw new ArgumentOutOfRangeException(nameof(dynamicRulesPath), "Rules file is empty"); - _script = CSharpScript.Create( - dynamicRules, - ScriptOptions.Default.WithReferences(typeof(Convert).Assembly).WithWarningLevel(0), - typeof(Payload) - ); - } + _script = CSharpScript.Create( + dynamicRules, + ScriptOptions.Default.WithReferences(typeof(Convert).Assembly).WithWarningLevel(0), + typeof(Payload) + ); + } - public class Payload + public class Payload + { + public Payload(IDataRecord dbDataReader) { - public Payload(IDataRecord dbDataReader) - { - row = dbDataReader; - } + row = dbDataReader; + } #pragma warning disable IDE1006 // Naming Styles - public IDataRecord? row { get; set; } + public IDataRecord? row { get; set; } #pragma warning restore IDE1006 // Naming Styles - } + } - /// - public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) - { - var result = _script.RunAsync(globals: new Payload(row)).Result; + /// + public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) + { + var result = _script.RunAsync(globals: new Payload(row)).Result; - if (result.Exception != null) - throw result.Exception; + if (result.Exception != null) + throw result.Exception; - reason = result.ReturnValue; + reason = result.ReturnValue; - return !string.IsNullOrWhiteSpace(reason); - } + return !string.IsNullOrWhiteSpace(reason); } } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/FromCataloguesExtractionRequestFulfiller.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/FromCataloguesExtractionRequestFulfiller.cs index 8b030a92a..4330cca9b 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/FromCataloguesExtractionRequestFulfiller.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/FromCataloguesExtractionRequestFulfiller.cs @@ -7,118 +7,117 @@ using System.Linq; using System.Text.RegularExpressions; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + +public class FromCataloguesExtractionRequestFulfiller : IExtractionRequestFulfiller { - public class FromCataloguesExtractionRequestFulfiller : IExtractionRequestFulfiller + private readonly QueryToExecuteColumnSet[] _catalogues; + private readonly ILogger _logger; + + public List Rejectors { get; set; } = []; + public Dictionary ModalitySpecificRejectors { get; set; } = []; + + private readonly Regex _defaultModalityRoutingRegex = new("^([A-Z]+)_.*$", RegexOptions.Compiled); + private readonly Regex _modalityRoutingRegex; + + /// + /// + /// + /// + /// Controls how modalities are matched to Catalogues. Must contain a single capture group which + /// returns a modality code (e.g. CT) when applies to a Catalogue name. E.g. ^([A-Z]+)_.*$ would result + /// in Modalities being routed based on the start of the table name e.g. CT => CT_MyTable and MR=> MR_MyTable + /// + /// + public FromCataloguesExtractionRequestFulfiller(ICatalogue[] cataloguesToUseForImageLookup, Regex? modalityRoutingRegex = null) { - private readonly QueryToExecuteColumnSet[] _catalogues; - private readonly ILogger _logger; - - public List Rejectors { get; set; } = []; - public Dictionary ModalitySpecificRejectors { get; set; } = []; - - private readonly Regex _defaultModalityRoutingRegex = new("^([A-Z]+)_.*$", RegexOptions.Compiled); - private readonly Regex _modalityRoutingRegex; - - /// - /// - /// - /// - /// Controls how modalities are matched to Catalogues. Must contain a single capture group which - /// returns a modality code (e.g. CT) when applies to a Catalogue name. E.g. ^([A-Z]+)_.*$ would result - /// in Modalities being routed based on the start of the table name e.g. CT => CT_MyTable and MR=> MR_MyTable - /// - /// - public FromCataloguesExtractionRequestFulfiller(ICatalogue[] cataloguesToUseForImageLookup, Regex? modalityRoutingRegex = null) - { - _logger = LogManager.GetCurrentClassLogger(); + _logger = LogManager.GetCurrentClassLogger(); - _logger.Debug($"Preparing to filter {cataloguesToUseForImageLookup.Length} Catalogues to look for compatible ones"); + _logger.Debug($"Preparing to filter {cataloguesToUseForImageLookup.Length} Catalogues to look for compatible ones"); - _catalogues = cataloguesToUseForImageLookup.OrderBy(c => c.ID).Select(QueryToExecuteColumnSet.Create).Where(s => s != null).ToArray()!; + _catalogues = cataloguesToUseForImageLookup.OrderBy(c => c.ID).Select(QueryToExecuteColumnSet.Create).Where(s => s != null).ToArray()!; - _logger.Debug($"Found {_catalogues.Length} Catalogues matching filter criteria"); + _logger.Debug($"Found {_catalogues.Length} Catalogues matching filter criteria"); - if (_catalogues.Length == 0) - throw new ArgumentOutOfRangeException(nameof(cataloguesToUseForImageLookup), "There are no compatible Catalogues in the repository (See QueryToExecuteColumnSet for required columns)"); + if (_catalogues.Length == 0) + throw new ArgumentOutOfRangeException(nameof(cataloguesToUseForImageLookup), "There are no compatible Catalogues in the repository (See QueryToExecuteColumnSet for required columns)"); - _modalityRoutingRegex = modalityRoutingRegex ?? _defaultModalityRoutingRegex; - if (_modalityRoutingRegex.GetGroupNumbers().Length != 2) - throw new ArgumentOutOfRangeException(nameof(modalityRoutingRegex), $"Must have exactly one non-default capture group"); - } + _modalityRoutingRegex = modalityRoutingRegex ?? _defaultModalityRoutingRegex; + if (_modalityRoutingRegex.GetGroupNumbers().Length != 2) + throw new ArgumentOutOfRangeException(nameof(modalityRoutingRegex), $"Must have exactly one non-default capture group"); + } + + public IEnumerable GetAllMatchingFiles(ExtractionRequestMessage message) + { + var queries = new List(); + var rejectors = GetRejectorsFor(message); - public IEnumerable GetAllMatchingFiles(ExtractionRequestMessage message) + foreach (var c in _catalogues.Where(x => x.Contains(message.KeyTag))) { - var queries = new List(); - var rejectors = GetRejectorsFor(message); + var match = _modalityRoutingRegex.Match(c.Catalogue.Name); + if (!match.Success) + continue; - foreach (var c in _catalogues.Where(x => x.Contains(message.KeyTag))) - { - var match = _modalityRoutingRegex.Match(c.Catalogue.Name); - if (!match.Success) - continue; + // NOTE: Match will always have two groups as we check the regex in the constructor + if (match.Groups[1].Value != message.Modality) + continue; - // NOTE: Match will always have two groups as we check the regex in the constructor - if (match.Groups[1].Value != message.Modality) - continue; + var query = new QueryToExecute(c, message.KeyTag, rejectors); + queries.Add(query); + } - var query = new QueryToExecute(c, message.KeyTag, rejectors); - queries.Add(query); - } + _logger.Debug($"Found {queries.Count} Catalogues which support extracting based on '{message.KeyTag}'"); - _logger.Debug($"Found {queries.Count} Catalogues which support extracting based on '{message.KeyTag}'"); + if (queries.Count == 0) + throw new Exception($"Couldn't find any compatible Catalogues to run extraction queries against for query {message}"); - if (queries.Count == 0) - throw new Exception($"Couldn't find any compatible Catalogues to run extraction queries against for query {message}"); + foreach (string valueToLookup in message.ExtractionIdentifiers) + { + var results = new ExtractImageCollection(valueToLookup); - foreach (string valueToLookup in message.ExtractionIdentifiers) + foreach (QueryToExecute query in queries) { - var results = new ExtractImageCollection(valueToLookup); - - foreach (QueryToExecute query in queries) + foreach (QueryToExecuteResult result in query.Execute(valueToLookup)) { - foreach (QueryToExecuteResult result in query.Execute(valueToLookup)) - { - var seriesTagValue = result.SeriesTagValue - ?? throw new Exception(nameof(result.SeriesTagValue)); + var seriesTagValue = result.SeriesTagValue + ?? throw new Exception(nameof(result.SeriesTagValue)); - if (!results.ContainsKey(seriesTagValue)) - results.Add(seriesTagValue, []); + if (!results.ContainsKey(seriesTagValue)) + results.Add(seriesTagValue, []); - results[seriesTagValue].Add(result); - } + results[seriesTagValue].Add(result); } - - yield return results; } + + yield return results; } + } - private IEnumerable GetRejectorsFor(ExtractionRequestMessage message) + private IEnumerable GetRejectorsFor(ExtractionRequestMessage message) + { + if (message.IsNoFilterExtraction) + return []; + + var applicableRejectors = + ModalitySpecificRejectors + .Where( + // Do the modalities covered by this rejector apply to the images returned by the query + k => k.Key.GetModalities().Any(m => string.Equals(m, message.Modality, StringComparison.CurrentCultureIgnoreCase)) + ) + .ToArray(); + + // if modality specific rejectors override regular rejectors + if (applicableRejectors.Any(r => r.Key.Overrides)) { - if (message.IsNoFilterExtraction) - return []; - - var applicableRejectors = - ModalitySpecificRejectors - .Where( - // Do the modalities covered by this rejector apply to the images returned by the query - k => k.Key.GetModalities().Any(m => string.Equals(m, message.Modality, StringComparison.CurrentCultureIgnoreCase)) - ) - .ToArray(); - - // if modality specific rejectors override regular rejectors - if (applicableRejectors.Any(r => r.Key.Overrides)) - { - // they had better all override or none of them! - if (!applicableRejectors.All(r => r.Key.Overrides)) - throw new Exception($"You cannot mix Overriding and non Overriding ModalitySpecificRejectors. Bad Modality was '{message.Modality}'"); - - // yes we have custom rejection rules for this modality - return applicableRejectors.Select(r => r.Value); - } + // they had better all override or none of them! + if (!applicableRejectors.All(r => r.Key.Overrides)) + throw new Exception($"You cannot mix Overriding and non Overriding ModalitySpecificRejectors. Bad Modality was '{message.Modality}'"); - // The modality specific rejectors run in addition to the basic Rejectors so serve both - return applicableRejectors.Select(r => r.Value).Union(Rejectors); + // yes we have custom rejection rules for this modality + return applicableRejectors.Select(r => r.Value); } + + // The modality specific rejectors run in addition to the basic Rejectors so serve both + return applicableRejectors.Select(r => r.Value).Union(Rejectors); } } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/IExtractionRequestFulfiller.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/IExtractionRequestFulfiller.cs index 6e13407f6..d0f2aa786 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/IExtractionRequestFulfiller.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/IExtractionRequestFulfiller.cs @@ -3,27 +3,26 @@ using System.Collections.Generic; using System.Text.RegularExpressions; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + +public interface IExtractionRequestFulfiller { - public interface IExtractionRequestFulfiller - { - /// - /// When implemented in a derived class will connect to data sources and return all dicom files on disk which - /// correspond to the identifiers in the . - /// - /// The request you want answered (contains the list of UIDs to extract) - /// - IEnumerable GetAllMatchingFiles(ExtractionRequestMessage message); + /// + /// When implemented in a derived class will connect to data sources and return all dicom files on disk which + /// correspond to the identifiers in the . + /// + /// The request you want answered (contains the list of UIDs to extract) + /// + IEnumerable GetAllMatchingFiles(ExtractionRequestMessage message); - /// - /// Controls what records that are fetched back should be reported as non extractable (including the reason why) - /// - List Rejectors { get; set; } + /// + /// Controls what records that are fetched back should be reported as non extractable (including the reason why) + /// + List Rejectors { get; set; } - /// - /// Collection of that are to be used only on specific Modality(s) either instead of or - /// in addition to the basic - /// - Dictionary ModalitySpecificRejectors { get; set; } - } + /// + /// Collection of that are to be used only on specific Modality(s) either instead of or + /// in addition to the basic + /// + Dictionary ModalitySpecificRejectors { get; set; } } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/IRejector.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/IRejector.cs index c80478c63..74c3f0671 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/IRejector.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/IRejector.cs @@ -1,16 +1,15 @@ using System.Data; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + +public interface IRejector { - public interface IRejector - { - /// - /// Test whether the given should be rejected - /// - /// - /// The reason for rejection, if any - /// True if the record was rejected, else false - bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason); - } + /// + /// Test whether the given should be rejected + /// + /// + /// The reason for rejection, if any + /// True if the record was rejected, else false + bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason); } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecute.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecute.cs index 06abbe860..6c0f0fb96 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecute.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecute.cs @@ -9,162 +9,161 @@ using System.Collections.Generic; using System.Data.Common; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + +public class QueryToExecute { - public class QueryToExecute - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private readonly QueryToExecuteColumnSet _columns; + private readonly QueryToExecuteColumnSet _columns; - /// - /// The column to search for in the WHERE logic - /// - private readonly string _keyTag; + /// + /// The column to search for in the WHERE logic + /// + private readonly string _keyTag; - private readonly DiscoveredServer _server; - private string? _sql; + private readonly DiscoveredServer _server; + private string? _sql; - /// - /// Lock to ensure we don't build multiple at once if someone decides to multi - /// thread the method - /// - private readonly object _oLockExecute = new(); + /// + /// Lock to ensure we don't build multiple at once if someone decides to multi + /// thread the method + /// + private readonly object _oLockExecute = new(); - private readonly IEnumerable _rejectors; + private readonly IEnumerable _rejectors; - public QueryToExecute(QueryToExecuteColumnSet columns, string keyTag, IEnumerable rejectors) - { - _columns = columns; - _server = columns.Catalogue.GetDistinctLiveDatabaseServer(DataAccessContext.DataExport, setInitialDatabase: true); - _keyTag = _server.GetQuerySyntaxHelper().EnsureWrapped(keyTag); - _rejectors = rejectors; - } + public QueryToExecute(QueryToExecuteColumnSet columns, string keyTag, IEnumerable rejectors) + { + _columns = columns; + _server = columns.Catalogue.GetDistinctLiveDatabaseServer(DataAccessContext.DataExport, setInitialDatabase: true); + _keyTag = _server.GetQuerySyntaxHelper().EnsureWrapped(keyTag); + _rejectors = rejectors; + } - /// - /// Creates a query builder with all the columns required to match rows on the - /// - /// - protected virtual QueryBuilder GetQueryBuilder() - { - var qb = new QueryBuilder("distinct", null); + /// + /// Creates a query builder with all the columns required to match rows on the + /// + /// + protected virtual QueryBuilder GetQueryBuilder() + { + var qb = new QueryBuilder("distinct", null); - foreach (var col in _columns.AllColumns) - qb.AddColumn(col); + foreach (var col in _columns.AllColumns) + qb.AddColumn(col); - qb.RootFilterContainer = GetWhereLogic(); + qb.RootFilterContainer = GetWhereLogic(); - return qb; - } + return qb; + } - /// - /// Generates the WHERE logic for the query. Adds a single root container with AND operation and then adds - /// all filters in . It is better to override unless you want - /// to create a nested container tree for the query. - /// - /// - protected virtual IContainer GetWhereLogic() - { - //make a root WHERE container in memory - var memory = new MemoryCatalogueRepository(); - var container = new SpontaneouslyInventedFilterContainer(memory, null, null, FilterContainerOperation.AND); + /// + /// Generates the WHERE logic for the query. Adds a single root container with AND operation and then adds + /// all filters in . It is better to override unless you want + /// to create a nested container tree for the query. + /// + /// + protected virtual IContainer GetWhereLogic() + { + //make a root WHERE container in memory + var memory = new MemoryCatalogueRepository(); + var container = new SpontaneouslyInventedFilterContainer(memory, null, null, FilterContainerOperation.AND); - //Get all filters that we are to add and add them to the root - foreach (IFilter filter in GetFilters(memory, container)) - container.AddChild(new SpontaneouslyInventedFilter(memory, container, filter.WhereSQL, filter.Name, filter.Description, filter.GetAllParameters())); + //Get all filters that we are to add and add them to the root + foreach (IFilter filter in GetFilters(memory, container)) + container.AddChild(new SpontaneouslyInventedFilter(memory, container, filter.WhereSQL, filter.Name, filter.Description, filter.GetAllParameters())); - return container; - } + return container; + } - /// - /// Override to change what filters are included in the WHERE Sql of your query. Default behaviour is to match on the - /// supplied keyTag and AND with all listed on the - /// - /// - /// - /// - protected virtual IEnumerable GetFilters(MemoryCatalogueRepository memoryRepo, IContainer rootContainer) - { - yield return new SpontaneouslyInventedFilter(memoryRepo, rootContainer, _keyTag + "= '{0}'", - "Filter Series", "Filters by series UID", null); + /// + /// Override to change what filters are included in the WHERE Sql of your query. Default behaviour is to match on the + /// supplied keyTag and AND with all listed on the + /// + /// + /// + /// + protected virtual IEnumerable GetFilters(MemoryCatalogueRepository memoryRepo, IContainer rootContainer) + { + yield return new SpontaneouslyInventedFilter(memoryRepo, rootContainer, _keyTag + "= '{0}'", + "Filter Series", "Filters by series UID", null); - foreach (var filter in _columns.Catalogue.GetAllMandatoryFilters()) - yield return filter; - } + foreach (var filter in _columns.Catalogue.GetAllMandatoryFilters()) + yield return filter; + } - private string GetSqlForKeyValue(string value) - { - return string.Format(_sql!, value); - } + private string GetSqlForKeyValue(string value) + { + return string.Format(_sql!, value); + } - /// - /// Returns the SeriesInstanceUID and a set of any file paths matching the query - /// - /// - /// - public IEnumerable Execute(string valueToLookup) - { - if (_sql == null) - lock (_oLockExecute) + /// + /// Returns the SeriesInstanceUID and a set of any file paths matching the query + /// + /// + /// + public IEnumerable Execute(string valueToLookup) + { + if (_sql == null) + lock (_oLockExecute) + { + if (_sql == null) { - if (_sql == null) - { - var qb = GetQueryBuilder(); - _sql = qb.SQL; - } + var qb = GetQueryBuilder(); + _sql = qb.SQL; } + } - string path = _columns.FilePathColumn?.GetRuntimeName() ?? throw new NullReferenceException("No FilePathColumn set"); - string study = _columns.StudyTagColumn?.GetRuntimeName() ?? throw new NullReferenceException("No StudyTagColumn set"); - string series = _columns.SeriesTagColumn?.GetRuntimeName() ?? throw new NullReferenceException("No SeriesTagColumn set"); - string instance = _columns.InstanceTagColumn?.GetRuntimeName() ?? throw new NullReferenceException("No InstanceTagColumn set"); + string path = _columns.FilePathColumn?.GetRuntimeName() ?? throw new NullReferenceException("No FilePathColumn set"); + string study = _columns.StudyTagColumn?.GetRuntimeName() ?? throw new NullReferenceException("No StudyTagColumn set"); + string series = _columns.SeriesTagColumn?.GetRuntimeName() ?? throw new NullReferenceException("No SeriesTagColumn set"); + string instance = _columns.InstanceTagColumn?.GetRuntimeName() ?? throw new NullReferenceException("No InstanceTagColumn set"); - using DbConnection con = _server!.GetConnection(); - con.Open(); + using DbConnection con = _server!.GetConnection(); + con.Open(); - string? sqlString = GetSqlForKeyValue(valueToLookup); + string? sqlString = GetSqlForKeyValue(valueToLookup); - DbDataReader reader; - try - { - reader = _server.GetCommand(sqlString, con).ExecuteReader(); - } - catch (DbException) - { - _logger.Error($"The following query resulted in an exception: {sqlString}"); - throw; - } + DbDataReader reader; + try + { + reader = _server.GetCommand(sqlString, con).ExecuteReader(); + } + catch (DbException) + { + _logger.Error($"The following query resulted in an exception: {sqlString}"); + throw; + } - while (reader.Read()) - { - object imagePath = reader[path]; + while (reader.Read()) + { + object imagePath = reader[path]; - if (imagePath == DBNull.Value) - continue; + if (imagePath == DBNull.Value) + continue; - bool reject = false; - string? rejectReason = null; + bool reject = false; + string? rejectReason = null; - //Ask the rejectors how good this record is - foreach (IRejector rejector in _rejectors) + //Ask the rejectors how good this record is + foreach (IRejector rejector in _rejectors) + { + if (rejector.Reject(reader, out rejectReason)) { - if (rejector.Reject(reader, out rejectReason)) - { - reject = true; - break; - } + reject = true; + break; } - - yield return new QueryToExecuteResult( - (string)imagePath, - (string)reader[study], - (string)reader[series], - (string)reader[instance], - reject, - rejectReason - ); } + + yield return new QueryToExecuteResult( + (string)imagePath, + (string)reader[study], + (string)reader[series], + (string)reader[instance], + reject, + rejectReason + ); } } } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecuteColumnSet.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecuteColumnSet.cs index 4f514871c..05edf7afb 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecuteColumnSet.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecuteColumnSet.cs @@ -3,111 +3,110 @@ using System.Collections.ObjectModel; using System.Linq; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + +public class QueryToExecuteColumnSet { - public class QueryToExecuteColumnSet + public const string DefaultImagePathColumnName = "RelativeFileArchiveURI"; + public const string DefaultStudyIdColumnName = "StudyInstanceUID"; + public const string DefaultSeriesIdColumnName = "SeriesInstanceUID"; + public const string DefaultInstanceIdColumnName = "SOPInstanceUID"; + + /// + /// The dataset to query + /// + public readonly ICatalogue Catalogue; + + /// + /// The column in the that stores the location on disk of the image + /// + public readonly ExtractionInformation? FilePathColumn; + + /// + /// The column in the that stores the StudyInstanceUID + /// + public readonly ExtractionInformation? StudyTagColumn; + + /// + /// The column in the that stores the SeriesInstanceUID + /// + public readonly ExtractionInformation? SeriesTagColumn; + + /// + /// The column in the that stores the SOPInstanceUID + /// + public readonly ExtractionInformation? InstanceTagColumn; + + /// + /// All the extractable columns in the (includes , etc) + /// + public readonly ReadOnlyCollection AllColumns; + + public bool HasAllUIDs => StudyTagColumn != null && SeriesTagColumn != null && InstanceTagColumn != null; + + public QueryToExecuteColumnSet(ICatalogue catalogue, + ExtractionInformation? filePathColumn, + ExtractionInformation? studyTagColumn, + ExtractionInformation? seriesTagColumn, + ExtractionInformation? instanceTagColumn, + bool requireFilePath = true) + { + Catalogue = catalogue ?? throw new ArgumentNullException(nameof(catalogue)); + + AllColumns = new ReadOnlyCollection(Catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); Catalogue.GetAllExtractionInformation(ExtractionCategory.Any); + + if (filePathColumn == null && requireFilePath) + throw new ArgumentNullException(nameof(filePathColumn)); + + FilePathColumn = filePathColumn; + StudyTagColumn = studyTagColumn; + SeriesTagColumn = seriesTagColumn; + InstanceTagColumn = instanceTagColumn; + } + + /// + /// Generates a column set based on columns found in (using the default expected column names + /// e.g. ). Returns null if the does not have all the required + /// columns + /// + /// + public static QueryToExecuteColumnSet? Create(ICatalogue catalogue) + { + return Create(catalogue, true); + } + + /// + /// Generates a column set based on columns found in (using the default expected column names + /// e.g. ). Returns null if the does not have all the required + /// columns + /// + /// + /// + public static QueryToExecuteColumnSet? Create(ICatalogue catalogue, bool requireFilePath) + { + ArgumentNullException.ThrowIfNull(catalogue); + + var eis = catalogue.GetAllExtractionInformation(ExtractionCategory.Any); + + var filePathColumn = eis.SingleOrDefault(ei => ei.GetRuntimeName().Equals(DefaultImagePathColumnName, StringComparison.CurrentCultureIgnoreCase)); + var studyTagColumn = eis.SingleOrDefault(ei => ei.GetRuntimeName().Equals(DefaultStudyIdColumnName, StringComparison.CurrentCultureIgnoreCase)); + var seriesTagColumn = eis.SingleOrDefault(ei => ei.GetRuntimeName().Equals(DefaultSeriesIdColumnName, StringComparison.CurrentCultureIgnoreCase)); + var instanceTagColumn = eis.SingleOrDefault(ei => ei.GetRuntimeName().Equals(DefaultInstanceIdColumnName, StringComparison.CurrentCultureIgnoreCase)); + + if (filePathColumn == null && requireFilePath) + return null; + + return new QueryToExecuteColumnSet(catalogue, filePathColumn, studyTagColumn, seriesTagColumn, instanceTagColumn, requireFilePath); + + } + + /// + /// Returns true if the contains an extractable column with the given + /// + /// + /// + public bool Contains(string column) { - public const string DefaultImagePathColumnName = "RelativeFileArchiveURI"; - public const string DefaultStudyIdColumnName = "StudyInstanceUID"; - public const string DefaultSeriesIdColumnName = "SeriesInstanceUID"; - public const string DefaultInstanceIdColumnName = "SOPInstanceUID"; - - /// - /// The dataset to query - /// - public readonly ICatalogue Catalogue; - - /// - /// The column in the that stores the location on disk of the image - /// - public readonly ExtractionInformation? FilePathColumn; - - /// - /// The column in the that stores the StudyInstanceUID - /// - public readonly ExtractionInformation? StudyTagColumn; - - /// - /// The column in the that stores the SeriesInstanceUID - /// - public readonly ExtractionInformation? SeriesTagColumn; - - /// - /// The column in the that stores the SOPInstanceUID - /// - public readonly ExtractionInformation? InstanceTagColumn; - - /// - /// All the extractable columns in the (includes , etc) - /// - public readonly ReadOnlyCollection AllColumns; - - public bool HasAllUIDs => StudyTagColumn != null && SeriesTagColumn != null && InstanceTagColumn != null; - - public QueryToExecuteColumnSet(ICatalogue catalogue, - ExtractionInformation? filePathColumn, - ExtractionInformation? studyTagColumn, - ExtractionInformation? seriesTagColumn, - ExtractionInformation? instanceTagColumn, - bool requireFilePath = true) - { - Catalogue = catalogue ?? throw new ArgumentNullException(nameof(catalogue)); - - AllColumns = new ReadOnlyCollection(Catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); Catalogue.GetAllExtractionInformation(ExtractionCategory.Any); - - if (filePathColumn == null && requireFilePath) - throw new ArgumentNullException(nameof(filePathColumn)); - - FilePathColumn = filePathColumn; - StudyTagColumn = studyTagColumn; - SeriesTagColumn = seriesTagColumn; - InstanceTagColumn = instanceTagColumn; - } - - /// - /// Generates a column set based on columns found in (using the default expected column names - /// e.g. ). Returns null if the does not have all the required - /// columns - /// - /// - public static QueryToExecuteColumnSet? Create(ICatalogue catalogue) - { - return Create(catalogue, true); - } - - /// - /// Generates a column set based on columns found in (using the default expected column names - /// e.g. ). Returns null if the does not have all the required - /// columns - /// - /// - /// - public static QueryToExecuteColumnSet? Create(ICatalogue catalogue, bool requireFilePath) - { - ArgumentNullException.ThrowIfNull(catalogue); - - var eis = catalogue.GetAllExtractionInformation(ExtractionCategory.Any); - - var filePathColumn = eis.SingleOrDefault(ei => ei.GetRuntimeName().Equals(DefaultImagePathColumnName, StringComparison.CurrentCultureIgnoreCase)); - var studyTagColumn = eis.SingleOrDefault(ei => ei.GetRuntimeName().Equals(DefaultStudyIdColumnName, StringComparison.CurrentCultureIgnoreCase)); - var seriesTagColumn = eis.SingleOrDefault(ei => ei.GetRuntimeName().Equals(DefaultSeriesIdColumnName, StringComparison.CurrentCultureIgnoreCase)); - var instanceTagColumn = eis.SingleOrDefault(ei => ei.GetRuntimeName().Equals(DefaultInstanceIdColumnName, StringComparison.CurrentCultureIgnoreCase)); - - if (filePathColumn == null && requireFilePath) - return null; - - return new QueryToExecuteColumnSet(catalogue, filePathColumn, studyTagColumn, seriesTagColumn, instanceTagColumn, requireFilePath); - - } - - /// - /// Returns true if the contains an extractable column with the given - /// - /// - /// - public bool Contains(string column) - { - return AllColumns.Any(c => c.GetRuntimeName().Equals(column, StringComparison.CurrentCultureIgnoreCase)); - } + return AllColumns.Any(c => c.GetRuntimeName().Equals(column, StringComparison.CurrentCultureIgnoreCase)); } } diff --git a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecuteResult.cs b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecuteResult.cs index 919e64935..d456022aa 100644 --- a/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecuteResult.cs +++ b/src/SmiServices/Microservices/CohortExtractor/RequestFulfillers/QueryToExecuteResult.cs @@ -2,41 +2,40 @@ using System; -namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers +namespace SmiServices.Microservices.CohortExtractor.RequestFulfillers; + +public class QueryToExecuteResult : MemberwiseEquatable { - public class QueryToExecuteResult : MemberwiseEquatable - { - public readonly string FilePathValue; - public readonly string StudyTagValue; - public readonly string SeriesTagValue; - public readonly string InstanceTagValue; + public readonly string FilePathValue; + public readonly string StudyTagValue; + public readonly string SeriesTagValue; + public readonly string InstanceTagValue; - public readonly bool Reject; - public readonly string? RejectReason; + public readonly bool Reject; + public readonly string? RejectReason; - public QueryToExecuteResult( - string filePathValue, - string studyTagValue, - string seriesTagValue, - string instanceTagValue, - bool rejection, - string? rejectionReason - ) - { - FilePathValue = filePathValue; - StudyTagValue = studyTagValue; - SeriesTagValue = seriesTagValue; - InstanceTagValue = instanceTagValue; - Reject = rejection; - RejectReason = rejectionReason; + public QueryToExecuteResult( + string filePathValue, + string studyTagValue, + string seriesTagValue, + string instanceTagValue, + bool rejection, + string? rejectionReason + ) + { + FilePathValue = filePathValue; + StudyTagValue = studyTagValue; + SeriesTagValue = seriesTagValue; + InstanceTagValue = instanceTagValue; + Reject = rejection; + RejectReason = rejectionReason; - if (Reject && string.IsNullOrWhiteSpace(RejectReason)) - throw new ArgumentException("RejectReason must be specified if Reject=true"); - } + if (Reject && string.IsNullOrWhiteSpace(RejectReason)) + throw new ArgumentException("RejectReason must be specified if Reject=true"); + } - public override string ToString() - { - return $"{FilePathValue}(Reject={Reject})"; - } + public override string ToString() + { + return $"{FilePathValue}(Reject={Reject})"; } } diff --git a/src/SmiServices/Microservices/CohortPackager/AnonFailedMessageConsumer.cs b/src/SmiServices/Microservices/CohortPackager/AnonFailedMessageConsumer.cs index 8a7e7ae32..05d65eb8b 100644 --- a/src/SmiServices/Microservices/CohortPackager/AnonFailedMessageConsumer.cs +++ b/src/SmiServices/Microservices/CohortPackager/AnonFailedMessageConsumer.cs @@ -5,36 +5,35 @@ using System; -namespace SmiServices.Microservices.CohortPackager +namespace SmiServices.Microservices.CohortPackager; + +// TODO Naming +/// +/// Consumer for (s) +/// +public class AnonFailedMessageConsumer : Consumer { - // TODO Naming - /// - /// Consumer for (s) - /// - public class AnonFailedMessageConsumer : Consumer - { - private readonly IExtractJobStore _store; + private readonly IExtractJobStore _store; + + public AnonFailedMessageConsumer(IExtractJobStore store) + { + _store = store; + } - public AnonFailedMessageConsumer(IExtractJobStore store) + protected override void ProcessMessageImpl(IMessageHeader header, ExtractedFileStatusMessage message, ulong tag) + { + try { - _store = store; + _store.PersistMessageToStore(message, header); } - - protected override void ProcessMessageImpl(IMessageHeader header, ExtractedFileStatusMessage message, ulong tag) + catch (ApplicationException e) { - try - { - _store.PersistMessageToStore(message, header); - } - catch (ApplicationException e) - { - // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - ErrorAndNack(header, tag, "Error while processing ExtractedFileStatusMessage", e); - return; - } - - Ack(header, tag); + // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage + ErrorAndNack(header, tag, "Error while processing ExtractedFileStatusMessage", e); + return; } + + Ack(header, tag); } } diff --git a/src/SmiServices/Microservices/CohortPackager/AnonVerificationMessageConsumer.cs b/src/SmiServices/Microservices/CohortPackager/AnonVerificationMessageConsumer.cs index eb55bae87..1c7f71e8f 100644 --- a/src/SmiServices/Microservices/CohortPackager/AnonVerificationMessageConsumer.cs +++ b/src/SmiServices/Microservices/CohortPackager/AnonVerificationMessageConsumer.cs @@ -9,131 +9,130 @@ using System.Timers; -namespace SmiServices.Microservices.CohortPackager +namespace SmiServices.Microservices.CohortPackager; + +/// +/// Consumer for (s) +/// +public sealed class AnonVerificationMessageConsumer : Consumer, IDisposable { - /// - /// Consumer for (s) - /// - public sealed class AnonVerificationMessageConsumer : Consumer, IDisposable + private readonly IExtractJobStore _store; + private readonly bool _processBatches; + private readonly int _maxUnacknowledgedMessages; + private int _unacknowledgedMessages = 0; + private readonly Timer _verificationStatusQueueTimer; + private bool _ignoreNewMessages = false; + private bool _queueIsProcessing = false; + + public AnonVerificationMessageConsumer(IExtractJobStore store, bool processBatches, int maxUnacknowledgedMessages, TimeSpan verificationMessageQueueFlushTime) { - private readonly IExtractJobStore _store; - private readonly bool _processBatches; - private readonly int _maxUnacknowledgedMessages; - private int _unacknowledgedMessages = 0; - private readonly Timer _verificationStatusQueueTimer; - private bool _ignoreNewMessages = false; - private bool _queueIsProcessing = false; - - public AnonVerificationMessageConsumer(IExtractJobStore store, bool processBatches, int maxUnacknowledgedMessages, TimeSpan verificationMessageQueueFlushTime) - { - _store = store; - _maxUnacknowledgedMessages = maxUnacknowledgedMessages; - _processBatches = processBatches; + _store = store; + _maxUnacknowledgedMessages = maxUnacknowledgedMessages; + _processBatches = processBatches; - // NOTE: Timer rejects values larger than int.MaxValue - if (verificationMessageQueueFlushTime.TotalMilliseconds >= int.MaxValue) - verificationMessageQueueFlushTime = TimeSpan.FromMilliseconds(int.MaxValue); + // NOTE: Timer rejects values larger than int.MaxValue + if (verificationMessageQueueFlushTime.TotalMilliseconds >= int.MaxValue) + verificationMessageQueueFlushTime = TimeSpan.FromMilliseconds(int.MaxValue); - _verificationStatusQueueTimer = new Timer(verificationMessageQueueFlushTime); + _verificationStatusQueueTimer = new Timer(verificationMessageQueueFlushTime); - _verificationStatusQueueTimer.Elapsed += TimerHandler; + _verificationStatusQueueTimer.Elapsed += TimerHandler; - if (_processBatches) - { - Logger.Debug($"Starting {nameof(_verificationStatusQueueTimer)}"); - _verificationStatusQueueTimer.Start(); - } + if (_processBatches) + { + Logger.Debug($"Starting {nameof(_verificationStatusQueueTimer)}"); + _verificationStatusQueueTimer.Start(); } + } - private void TimerHandler(object? sender, ElapsedEventArgs args) - { - if (_queueIsProcessing) - return; + private void TimerHandler(object? sender, ElapsedEventArgs args) + { + if (_queueIsProcessing) + return; - _queueIsProcessing = true; + _queueIsProcessing = true; - try - { - _store.ProcessVerificationMessageQueue(); - AckAvailableMessages(); - } - catch (Exception e) - { - _ignoreNewMessages = true; - _verificationStatusQueueTimer.Stop(); - Logger.Error(e); - } - finally - { - _queueIsProcessing = false; - } + try + { + _store.ProcessVerificationMessageQueue(); + AckAvailableMessages(); + } + catch (Exception e) + { + _ignoreNewMessages = true; + _verificationStatusQueueTimer.Stop(); + Logger.Error(e); + } + finally + { + _queueIsProcessing = false; } + } + + protected override void ProcessMessageImpl(IMessageHeader header, ExtractedFileVerificationMessage message, ulong tag) + { + if (_ignoreNewMessages) + return; - protected override void ProcessMessageImpl(IMessageHeader header, ExtractedFileVerificationMessage message, ulong tag) + try + { + // Check the report contents are valid here, since we just treat it as a JSON string from now on + _ = JsonConvert.DeserializeObject>(message.Report); + } + catch (JsonException e) { - if (_ignoreNewMessages) - return; - - try - { - // Check the report contents are valid here, since we just treat it as a JSON string from now on - _ = JsonConvert.DeserializeObject>(message.Report); - } - catch (JsonException e) - { - ErrorAndNack(header, tag, "Could not deserialize message report to Failure object", e); - return; - } - - try - { - if (_processBatches) - _store.AddToWriteQueue(message, header, tag); - else - _store.PersistMessageToStore(message, header); - } - catch (ApplicationException e) - { - // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - ErrorAndNack(header, tag, "Error while processing ExtractedFileVerificationMessage", e); - return; - } + ErrorAndNack(header, tag, "Could not deserialize message report to Failure object", e); + return; + } + try + { if (_processBatches) - { - if (++_unacknowledgedMessages >= _maxUnacknowledgedMessages) - _store.ProcessVerificationMessageQueue(); - AckAvailableMessages(); - } + _store.AddToWriteQueue(message, header, tag); else - { - Ack(header, tag); - } + _store.PersistMessageToStore(message, header); + } + catch (ApplicationException e) + { + // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage + ErrorAndNack(header, tag, "Error while processing ExtractedFileVerificationMessage", e); + return; } - private void AckAvailableMessages() + if (_processBatches) + { + if (++_unacknowledgedMessages >= _maxUnacknowledgedMessages) + _store.ProcessVerificationMessageQueue(); + AckAvailableMessages(); + } + else { - while (_store.ProcessedVerificationMessages.TryDequeue(out var processed)) - { - Ack(processed.Item1, processed.Item2); - _unacknowledgedMessages--; - } + Ack(header, tag); } + } - public void Dispose() + private void AckAvailableMessages() + { + while (_store.ProcessedVerificationMessages.TryDequeue(out var processed)) { - _ignoreNewMessages = true; - _verificationStatusQueueTimer.Stop(); + Ack(processed.Item1, processed.Item2); + _unacknowledgedMessages--; + } + } - try - { - _store.ProcessVerificationMessageQueue(); - AckAvailableMessages(); - } - catch (Exception e) - { - Logger.Error(e, $"Error when processing outstanding messages on Dispose. Some messages in the store may unacknowledged"); - } + public void Dispose() + { + _ignoreNewMessages = true; + _verificationStatusQueueTimer.Stop(); + + try + { + _store.ProcessVerificationMessageQueue(); + AckAvailableMessages(); + } + catch (Exception e) + { + Logger.Error(e, $"Error when processing outstanding messages on Dispose. Some messages in the store may unacknowledged"); } } } diff --git a/src/SmiServices/Microservices/CohortPackager/CohortPackager.cs b/src/SmiServices/Microservices/CohortPackager/CohortPackager.cs index 8ebeedbe6..4dae688d1 100644 --- a/src/SmiServices/Microservices/CohortPackager/CohortPackager.cs +++ b/src/SmiServices/Microservices/CohortPackager/CohortPackager.cs @@ -12,69 +12,68 @@ using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.CohortPackager +namespace SmiServices.Microservices.CohortPackager; + +public static class CohortPackager { - public static class CohortPackager + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(CohortPackager), OnParse); - return ret; - } - - private static int OnParse(GlobalOptions globals, CohortPackagerCliOptions opts) - { - if (opts.ExtractionId != default) - return RecreateReports(globals, opts); + int ret = SmiCliInit.ParseAndRun(args, nameof(CohortPackager), OnParse); + return ret; + } - var bootstrapper = new MicroserviceHostBootstrapper(() => new CohortPackagerHost(globals)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, CohortPackagerCliOptions opts) + { + if (opts.ExtractionId != default) + return RecreateReports(globals, opts); - private static int RecreateReports(GlobalOptions globalOptions, CohortPackagerCliOptions cliOptions) - { - Logger logger = LogManager.GetCurrentClassLogger(); + var bootstrapper = new MicroserviceHostBootstrapper(() => new CohortPackagerHost(globals)); + int ret = bootstrapper.Main(); + return ret; + } - var mongoDbOptions = globalOptions.MongoDatabases?.ExtractionStoreOptions; - if (mongoDbOptions == null) - { - logger.Error($"{nameof(MongoDatabases.ExtractionStoreOptions)} must be set"); - return 1; - } + private static int RecreateReports(GlobalOptions globalOptions, CohortPackagerCliOptions cliOptions) + { + Logger logger = LogManager.GetCurrentClassLogger(); - var databaseName = mongoDbOptions.DatabaseName; - if (databaseName == null) - { - logger.Error($"{nameof(mongoDbOptions.DatabaseName)} must be set"); - return 1; - } + var mongoDbOptions = globalOptions.MongoDatabases?.ExtractionStoreOptions; + if (mongoDbOptions == null) + { + logger.Error($"{nameof(MongoDatabases.ExtractionStoreOptions)} must be set"); + return 1; + } - logger.Info($"Recreating report for job {cliOptions.ExtractionId}"); + var databaseName = mongoDbOptions.DatabaseName; + if (databaseName == null) + { + logger.Error($"{nameof(mongoDbOptions.DatabaseName)} must be set"); + return 1; + } - MongoClient client = MongoClientHelpers.GetMongoClient(mongoDbOptions, globalOptions.HostProcessName); - var jobStore = new MongoExtractJobStore(client, databaseName); + logger.Info($"Recreating report for job {cliOptions.ExtractionId}"); - // NOTE(rkm 2020-10-22) Sets the extraction root to the current directory - var reporter = new JobReporter( - jobStore, - new FileSystem(), - Directory.GetCurrentDirectory(), - cliOptions.OutputNewLine ?? globalOptions.CohortPackagerOptions?.ReportNewLine - ); + MongoClient client = MongoClientHelpers.GetMongoClient(mongoDbOptions, globalOptions.HostProcessName); + var jobStore = new MongoExtractJobStore(client, databaseName); - try - { - reporter.CreateReports(cliOptions.ExtractionId); - } - catch (Exception e) - { - logger.Error(e); - return 1; - } + // NOTE(rkm 2020-10-22) Sets the extraction root to the current directory + var reporter = new JobReporter( + jobStore, + new FileSystem(), + Directory.GetCurrentDirectory(), + cliOptions.OutputNewLine ?? globalOptions.CohortPackagerOptions?.ReportNewLine + ); - return 0; + try + { + reporter.CreateReports(cliOptions.ExtractionId); + } + catch (Exception e) + { + logger.Error(e); + return 1; } + + return 0; } } diff --git a/src/SmiServices/Microservices/CohortPackager/CohortPackagerCliOptions.cs b/src/SmiServices/Microservices/CohortPackager/CohortPackagerCliOptions.cs index 8da6b991e..134232226 100644 --- a/src/SmiServices/Microservices/CohortPackager/CohortPackagerCliOptions.cs +++ b/src/SmiServices/Microservices/CohortPackager/CohortPackagerCliOptions.cs @@ -6,36 +6,35 @@ using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.CohortPackager +namespace SmiServices.Microservices.CohortPackager; + +[ExcludeFromCodeCoverage] +public class CohortPackagerCliOptions : CliOptions { - [ExcludeFromCodeCoverage] - public class CohortPackagerCliOptions : CliOptions - { - [Option( - 'r', - "recreate-report", - Required = false, - HelpText = "[Optional] Recreate the report for the specified extraction ID, and exit. The extraction root will be set to the current directory." - )] - public Guid ExtractionId { get; set; } + [Option( + 'r', + "recreate-report", + Required = false, + HelpText = "[Optional] Recreate the report for the specified extraction ID, and exit. The extraction root will be set to the current directory." + )] + public Guid ExtractionId { get; set; } - [Option( - 'o', - "output-newline", - Required = false, - HelpText = "[Optional] The newline string to use when creating the validation reports. Can be specified to create reports for a different platform. Defaults to Environment.NewLine if not set, and overrides any value in the YAML config." - )] - public string? OutputNewLine { get; set; } + [Option( + 'o', + "output-newline", + Required = false, + HelpText = "[Optional] The newline string to use when creating the validation reports. Can be specified to create reports for a different platform. Defaults to Environment.NewLine if not set, and overrides any value in the YAML config." + )] + public string? OutputNewLine { get; set; } - [Usage] - public static IEnumerable Examples + [Usage] + public static IEnumerable Examples + { + get { - get - { - yield return new Example("Run as a service", new CohortPackagerCliOptions { ExtractionId = Guid.Empty }); - yield return new Example("Recreate a single report", new CohortPackagerCliOptions { ExtractionId = Guid.NewGuid() }); - } + yield return new Example("Run as a service", new CohortPackagerCliOptions { ExtractionId = Guid.Empty }); + yield return new Example("Recreate a single report", new CohortPackagerCliOptions { ExtractionId = Guid.NewGuid() }); } } } diff --git a/src/SmiServices/Microservices/CohortPackager/CohortPackagerControlMessageHandler.cs b/src/SmiServices/Microservices/CohortPackager/CohortPackagerControlMessageHandler.cs index ea12e16c5..02c8442a8 100644 --- a/src/SmiServices/Microservices/CohortPackager/CohortPackagerControlMessageHandler.cs +++ b/src/SmiServices/Microservices/CohortPackager/CohortPackagerControlMessageHandler.cs @@ -4,48 +4,47 @@ using SmiServices.Microservices.CohortPackager.JobProcessing; using System; -namespace SmiServices.Microservices.CohortPackager -{ - public class CohortPackagerControlMessageHandler : IControlMessageHandler - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); +namespace SmiServices.Microservices.CohortPackager; - private readonly IExtractJobWatcher _jobWatcher; +public class CohortPackagerControlMessageHandler : IControlMessageHandler +{ + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly IExtractJobWatcher _jobWatcher; - public CohortPackagerControlMessageHandler(IExtractJobWatcher jobWatcher) - { - _jobWatcher = jobWatcher; - } - public void ControlMessageHandler(string action, string? message = null) - { - _logger.Info("Received control event with action: " + action + " and message: " + (message ?? "")); + public CohortPackagerControlMessageHandler(IExtractJobWatcher jobWatcher) + { + _jobWatcher = jobWatcher; + } - // Only have 1 case to handle here - if (action != "processjobs") - return; + public void ControlMessageHandler(string action, string? message = null) + { + _logger.Info("Received control event with action: " + action + " and message: " + (message ?? "")); - _logger.Info("Received request to process jobs now"); + // Only have 1 case to handle here + if (action != "processjobs") + return; - Guid toProcess = default; + _logger.Info("Received request to process jobs now"); - if (message != null) - { - if (!Guid.TryParse(message, out toProcess)) - { - _logger.Warn("Could not parse \"" + message + "\" to a job GUID"); - return; - } + Guid toProcess = default; - _logger.Info("Calling process for job " + toProcess); - } - else + if (message != null) + { + if (!Guid.TryParse(message, out toProcess)) { - _logger.Info("No message content, doing process for all jobs"); + _logger.Warn("Could not parse \"" + message + "\" to a job GUID"); + return; } - _jobWatcher.ProcessJobs(toProcess); + _logger.Info("Calling process for job " + toProcess); + } + else + { + _logger.Info("No message content, doing process for all jobs"); } + + _jobWatcher.ProcessJobs(toProcess); } } diff --git a/src/SmiServices/Microservices/CohortPackager/CohortPackagerHost.cs b/src/SmiServices/Microservices/CohortPackager/CohortPackagerHost.cs index f132e05c9..f11b3569f 100644 --- a/src/SmiServices/Microservices/CohortPackager/CohortPackagerHost.cs +++ b/src/SmiServices/Microservices/CohortPackager/CohortPackagerHost.cs @@ -12,138 +12,137 @@ using System.IO.Abstractions; -namespace SmiServices.Microservices.CohortPackager +namespace SmiServices.Microservices.CohortPackager; + +public class CohortPackagerHost : MicroserviceHost { - public class CohortPackagerHost : MicroserviceHost + /// + /// The process which monitors for extract jobs being completed + /// + private readonly ExtractJobWatcher _jobWatcher; + + private readonly ExtractionRequestInfoMessageConsumer _requestInfoMessageConsumer; + private readonly ExtractFileCollectionMessageConsumer _fileCollectionMessageConsumer; + private readonly AnonVerificationMessageConsumer _anonVerificationMessageConsumer; + private readonly AnonFailedMessageConsumer _anonFailedMessageConsumer; + + + /// + /// Default constructor for CohortPackagerHost + /// + /// + /// + /// + /// + /// Pass to override the default IJobReporter that will be created from + /// Globals.CohortPackagerOptions.ReportFormat. That value should not be set if a reporter is passed. + /// + /// + /// + /// + public CohortPackagerHost( + GlobalOptions globals, + IExtractJobStore? jobStore = null, + IFileSystem? fileSystem = null, + IJobReporter? reporter = null, + IJobCompleteNotifier? notifier = null, + IMessageBroker? messageBroker = null, + DateTimeProvider? dateTimeProvider = null + ) + : base(globals, messageBroker) { - /// - /// The process which monitors for extract jobs being completed - /// - private readonly ExtractJobWatcher _jobWatcher; - - private readonly ExtractionRequestInfoMessageConsumer _requestInfoMessageConsumer; - private readonly ExtractFileCollectionMessageConsumer _fileCollectionMessageConsumer; - private readonly AnonVerificationMessageConsumer _anonVerificationMessageConsumer; - private readonly AnonFailedMessageConsumer _anonFailedMessageConsumer; - - - /// - /// Default constructor for CohortPackagerHost - /// - /// - /// - /// - /// - /// Pass to override the default IJobReporter that will be created from - /// Globals.CohortPackagerOptions.ReportFormat. That value should not be set if a reporter is passed. - /// - /// - /// - /// - public CohortPackagerHost( - GlobalOptions globals, - IExtractJobStore? jobStore = null, - IFileSystem? fileSystem = null, - IJobReporter? reporter = null, - IJobCompleteNotifier? notifier = null, - IMessageBroker? messageBroker = null, - DateTimeProvider? dateTimeProvider = null - ) - : base(globals, messageBroker) + var cohortPackagerOptions = globals.CohortPackagerOptions ?? + throw new ArgumentNullException(nameof(globals), "CohortPackagerOptions cannot be null"); + + if (jobStore == null) { - var cohortPackagerOptions = globals.CohortPackagerOptions ?? - throw new ArgumentNullException(nameof(globals), "CohortPackagerOptions cannot be null"); - - if (jobStore == null) - { - MongoDbOptions mongoDbOptions = Globals.MongoDatabases?.ExtractionStoreOptions - ?? throw new ArgumentException("Some part of Globals.MongoDatabases.ExtractionStoreOptions is null"); - - jobStore = new MongoExtractJobStore( - MongoClientHelpers.GetMongoClient(mongoDbOptions, HostProcessName), - mongoDbOptions.DatabaseName!, - dateTimeProvider - ); - } - else if (dateTimeProvider != null) - throw new ArgumentException("jobStore and dateTimeProvider are mutually exclusive arguments"); - - if (reporter == null) - { - // Globals.FileSystemOptions checked in base constructor - var extractRoot = Globals.FileSystemOptions!.ExtractRoot; - if (string.IsNullOrWhiteSpace(extractRoot)) - throw new ArgumentOutOfRangeException(nameof(globals)); - - reporter = new JobReporter( - jobStore, - fileSystem ?? new FileSystem(), - extractRoot, - cohortPackagerOptions.ReportNewLine - ); - } - - notifier ??= JobCompleteNotifierFactory.GetNotifier( - cohortPackagerOptions.NotifierType! + MongoDbOptions mongoDbOptions = Globals.MongoDatabases?.ExtractionStoreOptions + ?? throw new ArgumentException("Some part of Globals.MongoDatabases.ExtractionStoreOptions is null"); + + jobStore = new MongoExtractJobStore( + MongoClientHelpers.GetMongoClient(mongoDbOptions, HostProcessName), + mongoDbOptions.DatabaseName!, + dateTimeProvider ); + } + else if (dateTimeProvider != null) + throw new ArgumentException("jobStore and dateTimeProvider are mutually exclusive arguments"); + + if (reporter == null) + { + // Globals.FileSystemOptions checked in base constructor + var extractRoot = Globals.FileSystemOptions!.ExtractRoot; + if (string.IsNullOrWhiteSpace(extractRoot)) + throw new ArgumentOutOfRangeException(nameof(globals)); - _jobWatcher = new ExtractJobWatcher( - cohortPackagerOptions, + reporter = new JobReporter( jobStore, - ExceptionCallback, - notifier, - reporter + fileSystem ?? new FileSystem(), + extractRoot, + cohortPackagerOptions.ReportNewLine ); + } - AddControlHandler(new CohortPackagerControlMessageHandler(_jobWatcher)); + notifier ??= JobCompleteNotifierFactory.GetNotifier( + cohortPackagerOptions.NotifierType! + ); - var maxUnacknowledgedMessages = cohortPackagerOptions.VerificationStatusOptions?.QoSPrefetchCount ?? - throw new ArgumentNullException(nameof(globals), "CohortPackagerOptions.VerificationStatusOptions cannot be null"); + _jobWatcher = new ExtractJobWatcher( + cohortPackagerOptions, + jobStore, + ExceptionCallback, + notifier, + reporter + ); - // Setup our consumers + AddControlHandler(new CohortPackagerControlMessageHandler(_jobWatcher)); - _requestInfoMessageConsumer = new ExtractionRequestInfoMessageConsumer(jobStore); - _fileCollectionMessageConsumer = new ExtractFileCollectionMessageConsumer(jobStore); - _anonFailedMessageConsumer = new AnonFailedMessageConsumer(jobStore); + var maxUnacknowledgedMessages = cohortPackagerOptions.VerificationStatusOptions?.QoSPrefetchCount ?? + throw new ArgumentNullException(nameof(globals), "CohortPackagerOptions.VerificationStatusOptions cannot be null"); - var verificationMessageQueueFlushTime = - cohortPackagerOptions.VerificationMessageQueueFlushTimeSeconds != null - ? TimeSpan.FromSeconds((double)cohortPackagerOptions.VerificationMessageQueueFlushTimeSeconds) - : CohortPackagerOptions.DefaultVerificationMessageQueueFlushTime; + // Setup our consumers - _anonVerificationMessageConsumer = new AnonVerificationMessageConsumer( - jobStore, - cohortPackagerOptions.VerificationMessageQueueProcessBatches, - maxUnacknowledgedMessages, - verificationMessageQueueFlushTime - ); - } + _requestInfoMessageConsumer = new ExtractionRequestInfoMessageConsumer(jobStore); + _fileCollectionMessageConsumer = new ExtractFileCollectionMessageConsumer(jobStore); + _anonFailedMessageConsumer = new AnonFailedMessageConsumer(jobStore); - public override void Start() - { - Logger.Debug("Starting host"); + var verificationMessageQueueFlushTime = + cohortPackagerOptions.VerificationMessageQueueFlushTimeSeconds != null + ? TimeSpan.FromSeconds((double)cohortPackagerOptions.VerificationMessageQueueFlushTimeSeconds) + : CohortPackagerOptions.DefaultVerificationMessageQueueFlushTime; + + _anonVerificationMessageConsumer = new AnonVerificationMessageConsumer( + jobStore, + cohortPackagerOptions.VerificationMessageQueueProcessBatches, + maxUnacknowledgedMessages, + verificationMessageQueueFlushTime + ); + } - _jobWatcher.Start(); + public override void Start() + { + Logger.Debug("Starting host"); - // TODO(rkm 2020-03-02) Once this is transactional, we can have one "master" service which actually does the job checking - MessageBroker.StartConsumer(Globals.CohortPackagerOptions!.ExtractRequestInfoOptions!, _requestInfoMessageConsumer, isSolo: true); - MessageBroker.StartConsumer(Globals.CohortPackagerOptions.FileCollectionInfoOptions!, _fileCollectionMessageConsumer, isSolo: true); - MessageBroker.StartConsumer(Globals.CohortPackagerOptions.NoVerifyStatusOptions!, _anonFailedMessageConsumer, isSolo: true); - MessageBroker.StartConsumer(Globals.CohortPackagerOptions.VerificationStatusOptions!, _anonVerificationMessageConsumer, isSolo: true); - } + _jobWatcher.Start(); - public override void Stop(string reason) - { - _jobWatcher.StopProcessing("Host - " + reason); + // TODO(rkm 2020-03-02) Once this is transactional, we can have one "master" service which actually does the job checking + MessageBroker.StartConsumer(Globals.CohortPackagerOptions!.ExtractRequestInfoOptions!, _requestInfoMessageConsumer, isSolo: true); + MessageBroker.StartConsumer(Globals.CohortPackagerOptions.FileCollectionInfoOptions!, _fileCollectionMessageConsumer, isSolo: true); + MessageBroker.StartConsumer(Globals.CohortPackagerOptions.NoVerifyStatusOptions!, _anonFailedMessageConsumer, isSolo: true); + MessageBroker.StartConsumer(Globals.CohortPackagerOptions.VerificationStatusOptions!, _anonVerificationMessageConsumer, isSolo: true); + } + + public override void Stop(string reason) + { + _jobWatcher.StopProcessing("Host - " + reason); - _anonVerificationMessageConsumer.Dispose(); + _anonVerificationMessageConsumer.Dispose(); - base.Stop(reason); - } + base.Stop(reason); + } - private void ExceptionCallback(Exception e) - { - Fatal("ExtractJobWatcher threw an exception", e); - } + private void ExceptionCallback(Exception e) + { + Fatal("ExtractJobWatcher threw an exception", e); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractFileCollectionMessageConsumer.cs b/src/SmiServices/Microservices/CohortPackager/ExtractFileCollectionMessageConsumer.cs index 899acfbf6..ad305a31a 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractFileCollectionMessageConsumer.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractFileCollectionMessageConsumer.cs @@ -4,35 +4,34 @@ using SmiServices.Microservices.CohortPackager.ExtractJobStorage; using System; -namespace SmiServices.Microservices.CohortPackager +namespace SmiServices.Microservices.CohortPackager; + +/// +/// Consumer for (s) +/// +public class ExtractFileCollectionMessageConsumer : Consumer { - /// - /// Consumer for (s) - /// - public class ExtractFileCollectionMessageConsumer : Consumer - { - private readonly IExtractJobStore _store; + private readonly IExtractJobStore _store; + + public ExtractFileCollectionMessageConsumer(IExtractJobStore store) + { + _store = store; + } - public ExtractFileCollectionMessageConsumer(IExtractJobStore store) + protected override void ProcessMessageImpl(IMessageHeader header, ExtractFileCollectionInfoMessage message, ulong tag) + { + try { - _store = store; + _store.PersistMessageToStore(message, header); } - - protected override void ProcessMessageImpl(IMessageHeader header, ExtractFileCollectionInfoMessage message, ulong tag) + catch (ApplicationException e) { - try - { - _store.PersistMessageToStore(message, header); - } - catch (ApplicationException e) - { - // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - ErrorAndNack(header, tag, "Error while processing ExtractFileCollectionInfoMessage", e); - return; - } - - Ack(header, tag); + // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage + ErrorAndNack(header, tag, "Error while processing ExtractFileCollectionInfoMessage", e); + return; } + + Ack(header, tag); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/CompletedExtractJobInfo.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/CompletedExtractJobInfo.cs index a37e5453c..141ebe596 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/CompletedExtractJobInfo.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/CompletedExtractJobInfo.cs @@ -1,44 +1,43 @@ using System; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage; + +public class CompletedExtractJobInfo : ExtractJobInfo { - public class CompletedExtractJobInfo : ExtractJobInfo - { - /// - /// DateTime the job was completed at (the time when CohortPackager ran its final checks) - /// - public DateTime JobCompletedAt { get; } + /// + /// DateTime the job was completed at (the time when CohortPackager ran its final checks) + /// + public DateTime JobCompletedAt { get; } - public CompletedExtractJobInfo( - Guid extractionJobIdentifier, - DateTime jobSubmittedAt, - DateTime completedAt, - string projectNumber, - string extractionDirectory, - string keyTag, - uint keyCount, - string userName, - string? extractionModality, - bool isIdentifiableExtraction, - bool isNoFilterExtraction + public CompletedExtractJobInfo( + Guid extractionJobIdentifier, + DateTime jobSubmittedAt, + DateTime completedAt, + string projectNumber, + string extractionDirectory, + string keyTag, + uint keyCount, + string userName, + string? extractionModality, + bool isIdentifiableExtraction, + bool isNoFilterExtraction + ) + : base( + extractionJobIdentifier, + jobSubmittedAt, + projectNumber, + extractionDirectory, + keyTag, + keyCount, + userName, + extractionModality, + ExtractJobStatus.Completed, + isIdentifiableExtraction, + isNoFilterExtraction ) - : base( - extractionJobIdentifier, - jobSubmittedAt, - projectNumber, - extractionDirectory, - keyTag, - keyCount, - userName, - extractionModality, - ExtractJobStatus.Completed, - isIdentifiableExtraction, - isNoFilterExtraction - ) - { - JobCompletedAt = completedAt != default ? completedAt : throw new ArgumentException(null, nameof(completedAt)); - } + { + JobCompletedAt = completedAt != default ? completedAt : throw new ArgumentException(null, nameof(completedAt)); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobInfo.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobInfo.cs index 277ab2c26..52410631c 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobInfo.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobInfo.cs @@ -3,125 +3,124 @@ using System.Diagnostics.CodeAnalysis; using System.Text; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage; + +/// +/// Class to wrap up all information about an extract job. Built by the when loading job information. +/// +public class ExtractJobInfo : MemberwiseEquatable { /// - /// Class to wrap up all information about an extract job. Built by the when loading job information. + /// Unique identifier for this extract job. In the Mongo store implementation, this is also the _id of the document + /// + public Guid ExtractionJobIdentifier { get; } + + /// + /// DateTime the job was submitted at (the time the ExtractImages service was run) + /// + public DateTime JobSubmittedAt { get; } + + /// + /// Reference number for the project + /// + public string ProjectNumber { get; } + + /// + /// Directory to extract files into, relative to the extraction root. Should be of the format projName/extractions/extractName + /// + public string ExtractionDirectory { get; } + + /// + /// The DICOM tag of the identifier we are extracting (i.e. "SeriesInstanceUID") + /// + public string KeyTag { get; } + + /// + /// Total number of expected distinct identifiers (i.e. number of distinct SeriesInstanceUIDs that are expected to be extracted) + /// + public uint KeyValueCount { get; } + + /// + /// Username of the person who submitted the job /// - public class ExtractJobInfo : MemberwiseEquatable + public string UserName { get; } + + /// + /// The modality being extracted + /// + public string? ExtractionModality { get; } + + /// + /// Current status of the extract job + /// + public ExtractJobStatus JobStatus { get; } + + public bool IsIdentifiableExtraction { get; } + + public bool IsNoFilterExtraction { get; } + + + public ExtractJobInfo( + Guid extractionJobIdentifier, + DateTime jobSubmittedAt, + string projectNumber, + string extractionDirectory, + string keyTag, + uint keyValueCount, + string userName, + string? extractionModality, + ExtractJobStatus jobStatus, + bool isIdentifiableExtraction, + bool isNoFilterExtraction + ) + { + ExtractionJobIdentifier = extractionJobIdentifier != default ? extractionJobIdentifier : throw new ArgumentOutOfRangeException(nameof(extractionJobIdentifier), $"Must not be the default {nameof(Guid)}"); + JobSubmittedAt = jobSubmittedAt != default ? jobSubmittedAt : throw new ArgumentOutOfRangeException(nameof(jobSubmittedAt), $"Must not be the default {nameof(DateTime)}"); + ProjectNumber = !string.IsNullOrWhiteSpace(projectNumber) ? projectNumber : throw new ArgumentOutOfRangeException(nameof(projectNumber), "Must not be null or whitespace"); + ExtractionDirectory = !string.IsNullOrWhiteSpace(extractionDirectory) ? extractionDirectory : throw new ArgumentOutOfRangeException(nameof(extractionDirectory), "Must not be null or whitespace"); + KeyTag = !string.IsNullOrWhiteSpace(keyTag) ? keyTag : throw new ArgumentOutOfRangeException(nameof(keyTag), "Must not be null or whitespace"); + KeyValueCount = keyValueCount > 0 ? keyValueCount : throw new ArgumentOutOfRangeException(nameof(keyValueCount), "Must not be zero"); + UserName = !string.IsNullOrWhiteSpace(userName) ? userName : throw new ArgumentOutOfRangeException(nameof(userName), "Must not be null or whitespace"); + if (extractionModality != null) + ExtractionModality = !string.IsNullOrWhiteSpace(extractionModality) ? extractionModality : throw new ArgumentOutOfRangeException(nameof(extractionModality), "Must not be whitespace if passed"); + JobStatus = jobStatus != default ? jobStatus : throw new ArgumentOutOfRangeException(nameof(jobStatus), $"Must not be the default {nameof(ExtractJobStatus)}"); + IsIdentifiableExtraction = isIdentifiableExtraction; + IsNoFilterExtraction = isNoFilterExtraction; + } + + /// + /// Returns the extraction name (last part of projName/extractions/extractName) + /// + /// + public string ExtractionName() + { + string[] split = ExtractionDirectory.Split('/', '\\'); + return split[^1]; + } + + /// + /// Returns the project extraction directory (first two parts of projName/extractions/extractName) + /// + /// + public string ProjectExtractionDir() + { + int idx = ExtractionDirectory.LastIndexOfAny(['/', '\\']); + return ExtractionDirectory[..idx]; + } + + [ExcludeFromCodeCoverage] + public override string ToString() { - /// - /// Unique identifier for this extract job. In the Mongo store implementation, this is also the _id of the document - /// - public Guid ExtractionJobIdentifier { get; } - - /// - /// DateTime the job was submitted at (the time the ExtractImages service was run) - /// - public DateTime JobSubmittedAt { get; } - - /// - /// Reference number for the project - /// - public string ProjectNumber { get; } - - /// - /// Directory to extract files into, relative to the extraction root. Should be of the format projName/extractions/extractName - /// - public string ExtractionDirectory { get; } - - /// - /// The DICOM tag of the identifier we are extracting (i.e. "SeriesInstanceUID") - /// - public string KeyTag { get; } - - /// - /// Total number of expected distinct identifiers (i.e. number of distinct SeriesInstanceUIDs that are expected to be extracted) - /// - public uint KeyValueCount { get; } - - /// - /// Username of the person who submitted the job - /// - public string UserName { get; } - - /// - /// The modality being extracted - /// - public string? ExtractionModality { get; } - - /// - /// Current status of the extract job - /// - public ExtractJobStatus JobStatus { get; } - - public bool IsIdentifiableExtraction { get; } - - public bool IsNoFilterExtraction { get; } - - - public ExtractJobInfo( - Guid extractionJobIdentifier, - DateTime jobSubmittedAt, - string projectNumber, - string extractionDirectory, - string keyTag, - uint keyValueCount, - string userName, - string? extractionModality, - ExtractJobStatus jobStatus, - bool isIdentifiableExtraction, - bool isNoFilterExtraction - ) - { - ExtractionJobIdentifier = extractionJobIdentifier != default ? extractionJobIdentifier : throw new ArgumentOutOfRangeException(nameof(extractionJobIdentifier), $"Must not be the default {nameof(Guid)}"); - JobSubmittedAt = jobSubmittedAt != default ? jobSubmittedAt : throw new ArgumentOutOfRangeException(nameof(jobSubmittedAt), $"Must not be the default {nameof(DateTime)}"); - ProjectNumber = !string.IsNullOrWhiteSpace(projectNumber) ? projectNumber : throw new ArgumentOutOfRangeException(nameof(projectNumber), "Must not be null or whitespace"); - ExtractionDirectory = !string.IsNullOrWhiteSpace(extractionDirectory) ? extractionDirectory : throw new ArgumentOutOfRangeException(nameof(extractionDirectory), "Must not be null or whitespace"); - KeyTag = !string.IsNullOrWhiteSpace(keyTag) ? keyTag : throw new ArgumentOutOfRangeException(nameof(keyTag), "Must not be null or whitespace"); - KeyValueCount = keyValueCount > 0 ? keyValueCount : throw new ArgumentOutOfRangeException(nameof(keyValueCount), "Must not be zero"); - UserName = !string.IsNullOrWhiteSpace(userName) ? userName : throw new ArgumentOutOfRangeException(nameof(userName), "Must not be null or whitespace"); - if (extractionModality != null) - ExtractionModality = !string.IsNullOrWhiteSpace(extractionModality) ? extractionModality : throw new ArgumentOutOfRangeException(nameof(extractionModality), "Must not be whitespace if passed"); - JobStatus = jobStatus != default ? jobStatus : throw new ArgumentOutOfRangeException(nameof(jobStatus), $"Must not be the default {nameof(ExtractJobStatus)}"); - IsIdentifiableExtraction = isIdentifiableExtraction; - IsNoFilterExtraction = isNoFilterExtraction; - } - - /// - /// Returns the extraction name (last part of projName/extractions/extractName) - /// - /// - public string ExtractionName() - { - string[] split = ExtractionDirectory.Split('/', '\\'); - return split[^1]; - } - - /// - /// Returns the project extraction directory (first two parts of projName/extractions/extractName) - /// - /// - public string ProjectExtractionDir() - { - int idx = ExtractionDirectory.LastIndexOfAny(['/', '\\']); - return ExtractionDirectory[..idx]; - } - - [ExcludeFromCodeCoverage] - public override string ToString() - { - var sb = new StringBuilder(); - sb.AppendLine("ExtractionJobIdentifier: " + ExtractionJobIdentifier); - sb.AppendLine("JobStatus: " + JobStatus); - sb.AppendLine("ExtractionDirectory: " + ExtractionDirectory); - sb.AppendLine("KeyTag: " + KeyTag); - sb.AppendLine("KeyCount: " + KeyValueCount); - sb.AppendLine("UserName: " + UserName); - sb.AppendLine("ExtractionModality: " + ExtractionModality); - sb.AppendLine("IsIdentifiableExtraction: " + IsIdentifiableExtraction); - sb.AppendLine("IsNoFilterExtraction: " + IsNoFilterExtraction); - return sb.ToString(); - } + var sb = new StringBuilder(); + sb.AppendLine("ExtractionJobIdentifier: " + ExtractionJobIdentifier); + sb.AppendLine("JobStatus: " + JobStatus); + sb.AppendLine("ExtractionDirectory: " + ExtractionDirectory); + sb.AppendLine("KeyTag: " + KeyTag); + sb.AppendLine("KeyCount: " + KeyValueCount); + sb.AppendLine("UserName: " + UserName); + sb.AppendLine("ExtractionModality: " + ExtractionModality); + sb.AppendLine("IsIdentifiableExtraction: " + IsIdentifiableExtraction); + sb.AppendLine("IsNoFilterExtraction: " + IsNoFilterExtraction); + return sb.ToString(); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobStatus.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobStatus.cs index 0dd098227..8d05a16f6 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobStatus.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobStatus.cs @@ -1,15 +1,14 @@ -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage; + +/// +/// Possible job statuses used by +/// +public enum ExtractJobStatus { - /// - /// Possible job statuses used by - /// - public enum ExtractJobStatus - { - Unknown, - WaitingForCollectionInfo, - WaitingForStatuses, - ReadyForChecks, - Completed, - Failed, - } + Unknown, + WaitingForCollectionInfo, + WaitingForStatuses, + ReadyForChecks, + Completed, + Failed, } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobStore.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobStore.cs index c9b31d4ba..3f9291c16 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobStore.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractJobStore.cs @@ -6,154 +6,153 @@ using System.Collections.Generic; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage; + +/// +/// Base class for any extract job store implementation +/// +public abstract class ExtractJobStore : IExtractJobStore { - /// - /// Base class for any extract job store implementation - /// - public abstract class ExtractJobStore : IExtractJobStore - { - protected readonly ILogger Logger; + protected readonly ILogger Logger; - protected ExtractJobStore() - { - Logger = LogManager.GetLogger(GetType().Name); - } + protected ExtractJobStore() + { + Logger = LogManager.GetLogger(GetType().Name); + } - public void PersistMessageToStore( - ExtractionRequestInfoMessage message, - IMessageHeader header) - { - Logger.Info($"Received new job info {message}"); - PersistMessageToStoreImpl(message, header); - } + public void PersistMessageToStore( + ExtractionRequestInfoMessage message, + IMessageHeader header) + { + Logger.Info($"Received new job info {message}"); + PersistMessageToStoreImpl(message, header); + } - public void PersistMessageToStore(ExtractFileCollectionInfoMessage message, IMessageHeader header) - { - PersistMessageToStoreImpl(message, header); - } + public void PersistMessageToStore(ExtractFileCollectionInfoMessage message, IMessageHeader header) + { + PersistMessageToStoreImpl(message, header); + } - public void PersistMessageToStore( - ExtractedFileStatusMessage message, - IMessageHeader header) + public void PersistMessageToStore( + ExtractedFileStatusMessage message, + IMessageHeader header) + { + switch (message.Status) { - switch (message.Status) - { - case ExtractedFileStatus.None: - throw new ApplicationException("ExtractedFileStatus was None"); - case ExtractedFileStatus.Anonymised: - throw new ApplicationException("Received an anonymisation successful message from the failure queue"); - default: - PersistMessageToStoreImpl(message, header); - break; - } + case ExtractedFileStatus.None: + throw new ApplicationException("ExtractedFileStatus was None"); + case ExtractedFileStatus.Anonymised: + throw new ApplicationException("Received an anonymisation successful message from the failure queue"); + default: + PersistMessageToStoreImpl(message, header); + break; } + } - public void PersistMessageToStore( - ExtractedFileVerificationMessage message, - IMessageHeader header) - { - ValidateMessage(message); - PersistMessageToStoreImpl(message, header); - } + public void PersistMessageToStore( + ExtractedFileVerificationMessage message, + IMessageHeader header) + { + ValidateMessage(message); + PersistMessageToStoreImpl(message, header); + } - public List GetReadyJobs(Guid jobId = new Guid()) - { - Logger.Debug("Getting job info for " + (jobId != Guid.Empty ? jobId.ToString() : "all active jobs")); - return GetReadyJobsImpl(jobId); - } + public List GetReadyJobs(Guid jobId = new Guid()) + { + Logger.Debug("Getting job info for " + (jobId != Guid.Empty ? jobId.ToString() : "all active jobs")); + return GetReadyJobsImpl(jobId); + } - public void MarkJobCompleted(Guid jobId) - { - if (jobId == default) - throw new ArgumentNullException(nameof(jobId)); + public void MarkJobCompleted(Guid jobId) + { + if (jobId == default) + throw new ArgumentNullException(nameof(jobId)); - CompleteJobImpl(jobId); - Logger.Debug($"Marked job {jobId} as completed"); - } + CompleteJobImpl(jobId); + Logger.Debug($"Marked job {jobId} as completed"); + } - public void MarkJobFailed( - Guid jobId, - Exception cause) - { - if (jobId == default) - throw new ArgumentNullException(nameof(jobId)); + public void MarkJobFailed( + Guid jobId, + Exception cause) + { + if (jobId == default) + throw new ArgumentNullException(nameof(jobId)); - MarkJobFailedImpl(jobId, cause); - Logger.Debug($"Marked job {jobId} as failed"); - } + MarkJobFailedImpl(jobId, cause); + Logger.Debug($"Marked job {jobId} as failed"); + } - public CompletedExtractJobInfo GetCompletedJobInfo(Guid jobId) - { - if (jobId == default) - throw new ArgumentNullException(nameof(jobId)); + public CompletedExtractJobInfo GetCompletedJobInfo(Guid jobId) + { + if (jobId == default) + throw new ArgumentNullException(nameof(jobId)); - return GetCompletedJobInfoImpl(jobId) ?? throw new ApplicationException("The job store implementation returned a null ExtractJobInfo object"); - } + return GetCompletedJobInfoImpl(jobId) ?? throw new ApplicationException("The job store implementation returned a null ExtractJobInfo object"); + } - public IEnumerable GetCompletedJobRejections(Guid jobId) - { - if (jobId == default) - throw new ArgumentNullException(nameof(jobId)); + public IEnumerable GetCompletedJobRejections(Guid jobId) + { + if (jobId == default) + throw new ArgumentNullException(nameof(jobId)); - return GetCompletedJobRejectionsImpl(jobId); - } + return GetCompletedJobRejectionsImpl(jobId); + } - public IEnumerable GetCompletedJobAnonymisationFailures(Guid jobId) - { - if (jobId == default) - throw new ArgumentNullException(nameof(jobId)); + public IEnumerable GetCompletedJobAnonymisationFailures(Guid jobId) + { + if (jobId == default) + throw new ArgumentNullException(nameof(jobId)); - return GetCompletedJobAnonymisationFailuresImpl(jobId); - } + return GetCompletedJobAnonymisationFailuresImpl(jobId); + } - public IEnumerable GetCompletedJobVerificationFailures(Guid jobId) - { - if (jobId == default) - throw new ArgumentNullException(nameof(jobId)); + public IEnumerable GetCompletedJobVerificationFailures(Guid jobId) + { + if (jobId == default) + throw new ArgumentNullException(nameof(jobId)); - return GetCompletedJobVerificationFailuresImpl(jobId); - } + return GetCompletedJobVerificationFailuresImpl(jobId); + } - public IEnumerable GetCompletedJobMissingFileList(Guid jobId) - { - if (jobId == default) - throw new ArgumentNullException(nameof(jobId)); + public IEnumerable GetCompletedJobMissingFileList(Guid jobId) + { + if (jobId == default) + throw new ArgumentNullException(nameof(jobId)); - return GetCompletedJobMissingFileListImpl(jobId); - } + return GetCompletedJobMissingFileListImpl(jobId); + } - public void AddToWriteQueue(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag) - { - ValidateMessage(message); - AddToWriteQueueImpl(message, header, tag); - } + public void AddToWriteQueue(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag) + { + ValidateMessage(message); + AddToWriteQueueImpl(message, header, tag); + } - protected abstract void PersistMessageToStoreImpl(ExtractionRequestInfoMessage message, IMessageHeader header); - protected abstract void PersistMessageToStoreImpl(ExtractFileCollectionInfoMessage collectionInfoMessage, IMessageHeader header); - protected abstract void PersistMessageToStoreImpl(ExtractedFileStatusMessage message, IMessageHeader header); - protected abstract void PersistMessageToStoreImpl(ExtractedFileVerificationMessage message, IMessageHeader header); - protected abstract List GetReadyJobsImpl(Guid specificJobId = new Guid()); - protected abstract void CompleteJobImpl(Guid jobId); - protected abstract void MarkJobFailedImpl(Guid jobId, Exception e); - protected abstract CompletedExtractJobInfo GetCompletedJobInfoImpl(Guid jobId); - protected abstract IEnumerable GetCompletedJobRejectionsImpl(Guid jobId); - protected abstract IEnumerable GetCompletedJobAnonymisationFailuresImpl(Guid jobId); - protected abstract IEnumerable GetCompletedJobVerificationFailuresImpl(Guid jobId); - protected abstract IEnumerable GetCompletedJobMissingFileListImpl(Guid jobId); - protected abstract void AddToWriteQueueImpl(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag); - public abstract void ProcessVerificationMessageQueue(); - public abstract ConcurrentQueue> ProcessedVerificationMessages { get; } - - private static void ValidateMessage(ExtractedFileVerificationMessage message) - { - if (string.IsNullOrWhiteSpace(message.OutputFilePath)) - throw new ApplicationException("Received a verification message without the AnonymisedFileName set"); - if (string.IsNullOrWhiteSpace(message.Report)) - throw new ApplicationException("Null or empty report data"); - if (message.Status == VerifiedFileStatus.IsIdentifiable && message.Report == "[]") - throw new ApplicationException("No report data for message marked as identifiable"); - } + protected abstract void PersistMessageToStoreImpl(ExtractionRequestInfoMessage message, IMessageHeader header); + protected abstract void PersistMessageToStoreImpl(ExtractFileCollectionInfoMessage collectionInfoMessage, IMessageHeader header); + protected abstract void PersistMessageToStoreImpl(ExtractedFileStatusMessage message, IMessageHeader header); + protected abstract void PersistMessageToStoreImpl(ExtractedFileVerificationMessage message, IMessageHeader header); + protected abstract List GetReadyJobsImpl(Guid specificJobId = new Guid()); + protected abstract void CompleteJobImpl(Guid jobId); + protected abstract void MarkJobFailedImpl(Guid jobId, Exception e); + protected abstract CompletedExtractJobInfo GetCompletedJobInfoImpl(Guid jobId); + protected abstract IEnumerable GetCompletedJobRejectionsImpl(Guid jobId); + protected abstract IEnumerable GetCompletedJobAnonymisationFailuresImpl(Guid jobId); + protected abstract IEnumerable GetCompletedJobVerificationFailuresImpl(Guid jobId); + protected abstract IEnumerable GetCompletedJobMissingFileListImpl(Guid jobId); + protected abstract void AddToWriteQueueImpl(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag); + public abstract void ProcessVerificationMessageQueue(); + public abstract ConcurrentQueue> ProcessedVerificationMessages { get; } + + private static void ValidateMessage(ExtractedFileVerificationMessage message) + { + if (string.IsNullOrWhiteSpace(message.OutputFilePath)) + throw new ApplicationException("Received a verification message without the AnonymisedFileName set"); + if (string.IsNullOrWhiteSpace(message.Report)) + throw new ApplicationException("Null or empty report data"); + if (message.Status == VerifiedFileStatus.IsIdentifiable && message.Report == "[]") + throw new ApplicationException("No report data for message marked as identifiable"); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractionIdentifierRejectionInfo.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractionIdentifierRejectionInfo.cs index 4901a2a57..05f86df1b 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractionIdentifierRejectionInfo.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/ExtractionIdentifierRejectionInfo.cs @@ -3,48 +3,47 @@ using System.Linq; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage; + +/// +/// Provides a list of all files which were rejected from the extraction for a given extraction identifier (e.g. SeriesInstanceUID), and a reason for each rejection +/// +public class ExtractionIdentifierRejectionInfo { /// - /// Provides a list of all files which were rejected from the extraction for a given extraction identifier (e.g. SeriesInstanceUID), and a reason for each rejection + /// The ID of the key which this file was matched from + /// + public readonly string ExtractionIdentifier; + + // TODO(rkm 2020-10-28) This API is a bit odd -- might be more useful to get a list of file,reason from CohortExtractor instead? + /// + /// The list of unique reasons for files being blocked, and a count of each reason /// - public class ExtractionIdentifierRejectionInfo + public readonly Dictionary RejectionItems; + + + public ExtractionIdentifierRejectionInfo( + string keyValue, + Dictionary rejectionItems + ) { - /// - /// The ID of the key which this file was matched from - /// - public readonly string ExtractionIdentifier; - - // TODO(rkm 2020-10-28) This API is a bit odd -- might be more useful to get a list of file,reason from CohortExtractor instead? - /// - /// The list of unique reasons for files being blocked, and a count of each reason - /// - public readonly Dictionary RejectionItems; - - - public ExtractionIdentifierRejectionInfo( - string keyValue, - Dictionary rejectionItems - ) - { - ExtractionIdentifier = string.IsNullOrWhiteSpace(keyValue) ? throw new ArgumentException(null, nameof(keyValue)) : keyValue; - - CheckRejectionDict(rejectionItems); - RejectionItems = rejectionItems; - } - - // NOTE(rkm 2020-10-27) A bit heavy-handed, but might help to track-down why some of the rejection reasons were empty in the final report - private static void CheckRejectionDict(Dictionary rejectionItems) - { - if (rejectionItems.Count == 0) - throw new ArgumentException("Null or empty dictionary"); - - if (rejectionItems.Any(x => string.IsNullOrWhiteSpace(x.Key))) - throw new ArgumentException("Dict contains a whitespace-only key"); - - List zeroKeys = rejectionItems.Where(x => x.Value == 0).Select(x => x.Key).ToList(); - if (zeroKeys.Count != 0) - throw new ArgumentException($"Dict contains key(s) with a zero count: {string.Join(',', zeroKeys)}"); - } + ExtractionIdentifier = string.IsNullOrWhiteSpace(keyValue) ? throw new ArgumentException(null, nameof(keyValue)) : keyValue; + + CheckRejectionDict(rejectionItems); + RejectionItems = rejectionItems; + } + + // NOTE(rkm 2020-10-27) A bit heavy-handed, but might help to track-down why some of the rejection reasons were empty in the final report + private static void CheckRejectionDict(Dictionary rejectionItems) + { + if (rejectionItems.Count == 0) + throw new ArgumentException("Null or empty dictionary"); + + if (rejectionItems.Any(x => string.IsNullOrWhiteSpace(x.Key))) + throw new ArgumentException("Dict contains a whitespace-only key"); + + List zeroKeys = rejectionItems.Where(x => x.Value == 0).Select(x => x.Key).ToList(); + if (zeroKeys.Count != 0) + throw new ArgumentException($"Dict contains key(s) with a zero count: {string.Join(',', zeroKeys)}"); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/FileAnonFailureInfo.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/FileAnonFailureInfo.cs index ab44ee526..c8b44c72d 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/FileAnonFailureInfo.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/FileAnonFailureInfo.cs @@ -1,30 +1,29 @@ using System; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage; + +/// +/// Provides information on why a requested file could not be anonymised +/// +public class FileAnonFailureInfo { /// - /// Provides information on why a requested file could not be anonymised + /// The path of the original DICOM file which could not be extracted /// - public class FileAnonFailureInfo - { - /// - /// The path of the original DICOM file which could not be extracted - /// - public readonly string DicomFilePath; + public readonly string DicomFilePath; - /// - /// The reason for the file not being extracted - /// - public readonly string Reason; + /// + /// The reason for the file not being extracted + /// + public readonly string Reason; - public FileAnonFailureInfo( - string dicomFilePath, - string reason - ) - { - DicomFilePath = string.IsNullOrWhiteSpace(dicomFilePath) ? throw new ArgumentException(null, nameof(dicomFilePath)) : dicomFilePath; - Reason = string.IsNullOrWhiteSpace(reason) ? throw new ArgumentException(null, nameof(reason)) : reason; - } + public FileAnonFailureInfo( + string dicomFilePath, + string reason + ) + { + DicomFilePath = string.IsNullOrWhiteSpace(dicomFilePath) ? throw new ArgumentException(null, nameof(dicomFilePath)) : dicomFilePath; + Reason = string.IsNullOrWhiteSpace(reason) ? throw new ArgumentException(null, nameof(reason)) : reason; } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/FileVerificationFailureInfo.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/FileVerificationFailureInfo.cs index 7126f29ee..0f4e3207e 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/FileVerificationFailureInfo.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/FileVerificationFailureInfo.cs @@ -1,32 +1,31 @@ using System; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage; + +/// +/// Contains information for an anonymised file which failed the validation checks +/// +public class FileVerificationFailureInfo { /// - /// Contains information for an anonymised file which failed the validation checks + /// The anonymised file path which has failed, relative to the extraction directory /// - public class FileVerificationFailureInfo - { - /// - /// The anonymised file path which has failed, relative to the extraction directory - /// - public readonly string AnonFilePath; + public readonly string AnonFilePath; - // NOTE(rkm 2020-10-28) This is a JSON string for now, but might be worth deserializing it into a Failure object here (instead of in JobReporter) - /// - /// The failure data from the validation checks, as a JSON string - /// - public readonly string Data; + // NOTE(rkm 2020-10-28) This is a JSON string for now, but might be worth deserializing it into a Failure object here (instead of in JobReporter) + /// + /// The failure data from the validation checks, as a JSON string + /// + public readonly string Data; - public FileVerificationFailureInfo( - string anonFilePath, - string failureData - ) - { - AnonFilePath = string.IsNullOrWhiteSpace(anonFilePath) ? throw new ArgumentException(null, nameof(anonFilePath)) : anonFilePath; - Data = string.IsNullOrWhiteSpace(failureData) ? throw new ArgumentException(null, nameof(failureData)) : failureData; - } + public FileVerificationFailureInfo( + string anonFilePath, + string failureData + ) + { + AnonFilePath = string.IsNullOrWhiteSpace(anonFilePath) ? throw new ArgumentException(null, nameof(anonFilePath)) : anonFilePath; + Data = string.IsNullOrWhiteSpace(failureData) ? throw new ArgumentException(null, nameof(failureData)) : failureData; } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/IExtractJobStore.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/IExtractJobStore.cs index 283185283..1e49f444a 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/IExtractJobStore.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/IExtractJobStore.cs @@ -4,113 +4,112 @@ using System.Collections.Concurrent; using System.Collections.Generic; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage; + +/// +/// Interface for objects which represent a store of extract job information +/// +public interface IExtractJobStore { /// - /// Interface for objects which represent a store of extract job information + /// Serializes a and it's and stores it + /// + /// + /// + void PersistMessageToStore(ExtractionRequestInfoMessage requestInfoMessage, IMessageHeader header); + + /// + /// Serializes a and it's and stores it + /// + /// + /// + void PersistMessageToStore(ExtractFileCollectionInfoMessage collectionInfoMessage, IMessageHeader header); + + /// + /// Serializes a and it's and stores it + /// + /// + /// + void PersistMessageToStore(ExtractedFileStatusMessage fileStatusMessage, IMessageHeader header); + + /// + /// Serializes a and it's and stores it + /// + /// + /// + void PersistMessageToStore(ExtractedFileVerificationMessage anonVerificationMessage, IMessageHeader header); + + /// + /// Returns a list of all jobs which are ready for final checks + /// + /// A specific job to get for. Empty returns all jobs in progress + /// + List GetReadyJobs(Guid extractionJobIdentifier = new Guid()); + + /// + /// Cleanup/archive any data in the database related to an extract job + /// + /// + void MarkJobCompleted(Guid extractionJobIdentifier); + + /// + /// Quarantines a job if there is some issue processing it + /// + /// + /// + void MarkJobFailed(Guid extractionJobIdentifier, Exception cause); + + /// + /// Returns the ExtractJobInfo for a completed job + /// + /// + /// + CompletedExtractJobInfo GetCompletedJobInfo(Guid jobId); + + /// + /// Returns the rejection reasons for a completed job + /// + /// + /// + IEnumerable GetCompletedJobRejections(Guid jobId); + + /// + /// Returns the anonymisation failures for a completed job + /// + /// + /// + IEnumerable GetCompletedJobAnonymisationFailures(Guid jobId); + + /// + /// Returns the verification failures for a completed job + /// + /// + /// + IEnumerable GetCompletedJobVerificationFailures(Guid jobId); + + /// + /// Returns the full list of files that were matched from an input identifier but could not be found + /// + /// + /// + IEnumerable GetCompletedJobMissingFileList(Guid jobId); + + /// + /// Add a to the write queue. The message should not be acknowledged + /// until the corresponding tag is reutrned by + /// + /// + /// + /// + void AddToWriteQueue(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag); + + /// + /// Process all queued s into the store + /// + void ProcessVerificationMessageQueue(); + + /// + /// All processed status messages which can be acknowledged /// - public interface IExtractJobStore - { - /// - /// Serializes a and it's and stores it - /// - /// - /// - void PersistMessageToStore(ExtractionRequestInfoMessage requestInfoMessage, IMessageHeader header); - - /// - /// Serializes a and it's and stores it - /// - /// - /// - void PersistMessageToStore(ExtractFileCollectionInfoMessage collectionInfoMessage, IMessageHeader header); - - /// - /// Serializes a and it's and stores it - /// - /// - /// - void PersistMessageToStore(ExtractedFileStatusMessage fileStatusMessage, IMessageHeader header); - - /// - /// Serializes a and it's and stores it - /// - /// - /// - void PersistMessageToStore(ExtractedFileVerificationMessage anonVerificationMessage, IMessageHeader header); - - /// - /// Returns a list of all jobs which are ready for final checks - /// - /// A specific job to get for. Empty returns all jobs in progress - /// - List GetReadyJobs(Guid extractionJobIdentifier = new Guid()); - - /// - /// Cleanup/archive any data in the database related to an extract job - /// - /// - void MarkJobCompleted(Guid extractionJobIdentifier); - - /// - /// Quarantines a job if there is some issue processing it - /// - /// - /// - void MarkJobFailed(Guid extractionJobIdentifier, Exception cause); - - /// - /// Returns the ExtractJobInfo for a completed job - /// - /// - /// - CompletedExtractJobInfo GetCompletedJobInfo(Guid jobId); - - /// - /// Returns the rejection reasons for a completed job - /// - /// - /// - IEnumerable GetCompletedJobRejections(Guid jobId); - - /// - /// Returns the anonymisation failures for a completed job - /// - /// - /// - IEnumerable GetCompletedJobAnonymisationFailures(Guid jobId); - - /// - /// Returns the verification failures for a completed job - /// - /// - /// - IEnumerable GetCompletedJobVerificationFailures(Guid jobId); - - /// - /// Returns the full list of files that were matched from an input identifier but could not be found - /// - /// - /// - IEnumerable GetCompletedJobMissingFileList(Guid jobId); - - /// - /// Add a to the write queue. The message should not be acknowledged - /// until the corresponding tag is reutrned by - /// - /// - /// - /// - void AddToWriteQueue(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag); - - /// - /// Process all queued s into the store - /// - void ProcessVerificationMessageQueue(); - - /// - /// All processed status messages which can be acknowledged - /// - ConcurrentQueue> ProcessedVerificationMessages { get; } - } + ConcurrentQueue> ProcessedVerificationMessages { get; } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/MongoExtractJobInfoExtensions.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/MongoExtractJobInfoExtensions.cs index e43d5bf0d..40a300457 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/MongoExtractJobInfoExtensions.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/MongoExtractJobInfoExtensions.cs @@ -1,37 +1,36 @@ using SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB -{ - public static class MongoExtractJobInfoExtensions - { - public static ExtractJobInfo ToExtractJobInfo(this MongoExtractJobDoc mongoExtractJobDoc) - => new( - mongoExtractJobDoc.ExtractionJobIdentifier, - mongoExtractJobDoc.JobSubmittedAt, - mongoExtractJobDoc.ProjectNumber, - mongoExtractJobDoc.ExtractionDirectory, - mongoExtractJobDoc.KeyTag, - mongoExtractJobDoc.KeyCount, - mongoExtractJobDoc.UserName, - mongoExtractJobDoc.ExtractionModality, - mongoExtractJobDoc.JobStatus, - mongoExtractJobDoc.IsIdentifiableExtraction, - mongoExtractJobDoc.IsNoFilterExtraction - ); +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB; - public static CompletedExtractJobInfo ToExtractJobInfo(this MongoCompletedExtractJobDoc mongoCompletedExtractJobDoc) - => new( - mongoCompletedExtractJobDoc.ExtractionJobIdentifier, - mongoCompletedExtractJobDoc.JobSubmittedAt, - mongoCompletedExtractJobDoc.CompletedAt, - mongoCompletedExtractJobDoc.ProjectNumber, - mongoCompletedExtractJobDoc.ExtractionDirectory, - mongoCompletedExtractJobDoc.KeyTag, - mongoCompletedExtractJobDoc.KeyCount, - mongoCompletedExtractJobDoc.UserName, - mongoCompletedExtractJobDoc.ExtractionModality, - mongoCompletedExtractJobDoc.IsIdentifiableExtraction, - mongoCompletedExtractJobDoc.IsNoFilterExtraction +public static class MongoExtractJobInfoExtensions +{ + public static ExtractJobInfo ToExtractJobInfo(this MongoExtractJobDoc mongoExtractJobDoc) + => new( + mongoExtractJobDoc.ExtractionJobIdentifier, + mongoExtractJobDoc.JobSubmittedAt, + mongoExtractJobDoc.ProjectNumber, + mongoExtractJobDoc.ExtractionDirectory, + mongoExtractJobDoc.KeyTag, + mongoExtractJobDoc.KeyCount, + mongoExtractJobDoc.UserName, + mongoExtractJobDoc.ExtractionModality, + mongoExtractJobDoc.JobStatus, + mongoExtractJobDoc.IsIdentifiableExtraction, + mongoExtractJobDoc.IsNoFilterExtraction ); - } + + public static CompletedExtractJobInfo ToExtractJobInfo(this MongoCompletedExtractJobDoc mongoCompletedExtractJobDoc) + => new( + mongoCompletedExtractJobDoc.ExtractionJobIdentifier, + mongoCompletedExtractJobDoc.JobSubmittedAt, + mongoCompletedExtractJobDoc.CompletedAt, + mongoCompletedExtractJobDoc.ProjectNumber, + mongoCompletedExtractJobDoc.ExtractionDirectory, + mongoCompletedExtractJobDoc.KeyTag, + mongoCompletedExtractJobDoc.KeyCount, + mongoCompletedExtractJobDoc.UserName, + mongoCompletedExtractJobDoc.ExtractionModality, + mongoCompletedExtractJobDoc.IsIdentifiableExtraction, + mongoCompletedExtractJobDoc.IsNoFilterExtraction + ); } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/MongoExtractJobStore.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/MongoExtractJobStore.cs index 86290bd42..f2db915a9 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/MongoExtractJobStore.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/MongoExtractJobStore.cs @@ -10,436 +10,435 @@ using System.Linq; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB; + +// ReSharper disable InconsistentlySynchronizedField +public class MongoExtractJobStore : ExtractJobStore { - // ReSharper disable InconsistentlySynchronizedField - public class MongoExtractJobStore : ExtractJobStore - { - private const string ExtractJobCollectionName = "inProgressJobs"; - private const string ExpectedFilesCollectionPrefix = "expectedFiles"; - private const string StatusCollectionPrefix = "statuses"; - private const string CompletedCollectionName = "completedJobs"; + private const string ExtractJobCollectionName = "inProgressJobs"; + private const string ExpectedFilesCollectionPrefix = "expectedFiles"; + private const string StatusCollectionPrefix = "statuses"; + private const string CompletedCollectionName = "completedJobs"; - private readonly IMongoClient _client; - private readonly IMongoDatabase _database; + private readonly IMongoClient _client; + private readonly IMongoDatabase _database; - // NOTE(rkm 2020-03-08) The collections listed here are persistent. Job-specific collections for the files info and statuses are temporary and fetched when needed - private readonly IMongoCollection _inProgressJobCollection; - private readonly IMongoCollection _completedExpectedFilesCollection; - private readonly IMongoCollection _completedStatusCollection; - private readonly IMongoCollection _completedJobCollection; + // NOTE(rkm 2020-03-08) The collections listed here are persistent. Job-specific collections for the files info and statuses are temporary and fetched when needed + private readonly IMongoCollection _inProgressJobCollection; + private readonly IMongoCollection _completedExpectedFilesCollection; + private readonly IMongoCollection _completedStatusCollection; + private readonly IMongoCollection _completedJobCollection; - private readonly DateTimeProvider _dateTimeProvider; + private readonly DateTimeProvider _dateTimeProvider; - private readonly object _writeQueueLock = new(); - private readonly Dictionary> _verificationStatusWriteQueue = []; - private readonly ConcurrentQueue> _processedVerificationMessages = new(); - public override ConcurrentQueue> ProcessedVerificationMessages => _processedVerificationMessages; + private readonly object _writeQueueLock = new(); + private readonly Dictionary> _verificationStatusWriteQueue = []; + private readonly ConcurrentQueue> _processedVerificationMessages = new(); + public override ConcurrentQueue> ProcessedVerificationMessages => _processedVerificationMessages; - public MongoExtractJobStore( - IMongoClient client, string extractionDatabaseName, - DateTimeProvider? dateTimeProvider = null - ) - { - _client = client; - _database = _client.GetDatabase(extractionDatabaseName); + public MongoExtractJobStore( + IMongoClient client, string extractionDatabaseName, + DateTimeProvider? dateTimeProvider = null + ) + { + _client = client; + _database = _client.GetDatabase(extractionDatabaseName); - _dateTimeProvider = dateTimeProvider ?? new DateTimeProvider(); + _dateTimeProvider = dateTimeProvider ?? new DateTimeProvider(); - _inProgressJobCollection = _database.GetCollection(ExtractJobCollectionName); - _completedExpectedFilesCollection = _database.GetCollection(ExpectedFilesCollectionName("completed")); - _completedStatusCollection = _database.GetCollection(StatusCollectionName("completed")); - _completedJobCollection = _database.GetCollection(CompletedCollectionName); + _inProgressJobCollection = _database.GetCollection(ExtractJobCollectionName); + _completedExpectedFilesCollection = _database.GetCollection(ExpectedFilesCollectionName("completed")); + _completedStatusCollection = _database.GetCollection(StatusCollectionName("completed")); + _completedJobCollection = _database.GetCollection(CompletedCollectionName); - long count = CountExistingJobs(); - Logger.Info(count > 0 ? $"Connected to job store with {count} existing jobs" : "Empty job store created successfully"); - } + long count = CountExistingJobs(); + Logger.Info(count > 0 ? $"Connected to job store with {count} existing jobs" : "Empty job store created successfully"); + } - public override void ProcessVerificationMessageQueue() + public override void ProcessVerificationMessageQueue() + { + lock (_writeQueueLock) { - lock (_writeQueueLock) + foreach (var (collectionName, processItemList) in _verificationStatusWriteQueue) { - foreach (var (collectionName, processItemList) in _verificationStatusWriteQueue) - { - if (processItemList.Count == 0) - continue; + if (processItemList.Count == 0) + continue; - Logger.Debug($"InsertMany for {collectionName} with {processItemList.Count} item(s)"); - _database - .GetCollection(collectionName) - .InsertMany(processItemList.Select(x => x.StatusDoc)); + Logger.Debug($"InsertMany for {collectionName} with {processItemList.Count} item(s)"); + _database + .GetCollection(collectionName) + .InsertMany(processItemList.Select(x => x.StatusDoc)); - foreach (var processItem in processItemList) - _processedVerificationMessages.Enqueue(new Tuple(processItem.Header, processItem.Tag)); + foreach (var processItem in processItemList) + _processedVerificationMessages.Enqueue(new Tuple(processItem.Header, processItem.Tag)); - processItemList.Clear(); - } + processItemList.Clear(); } } + } - protected override void PersistMessageToStoreImpl(ExtractionRequestInfoMessage message, IMessageHeader header) - { - if (InCompletedJobCollection(message.ExtractionJobIdentifier)) - throw new ApplicationException("Received an ExtractionRequestInfoMessage for a job that is already completed"); - - MongoExtractJobDoc newJobInfo = MongoExtractJobDoc.FromMessage(message, header, _dateTimeProvider); - - _inProgressJobCollection.InsertOne(newJobInfo); - } - - protected override void PersistMessageToStoreImpl(ExtractFileCollectionInfoMessage message, IMessageHeader header) - { - if (InCompletedJobCollection(message.ExtractionJobIdentifier)) - throw new ApplicationException("Received an ExtractFileCollectionInfoMessage for a job that is already completed"); - - MongoExpectedFilesDoc expectedFilesForKey = MongoExpectedFilesDoc.FromMessage(message, header, _dateTimeProvider); + protected override void PersistMessageToStoreImpl(ExtractionRequestInfoMessage message, IMessageHeader header) + { + if (InCompletedJobCollection(message.ExtractionJobIdentifier)) + throw new ApplicationException("Received an ExtractionRequestInfoMessage for a job that is already completed"); - _database - .GetCollection(ExpectedFilesCollectionName(message.ExtractionJobIdentifier)) - .InsertOne(expectedFilesForKey); - } + MongoExtractJobDoc newJobInfo = MongoExtractJobDoc.FromMessage(message, header, _dateTimeProvider); - protected override void PersistMessageToStoreImpl(ExtractedFileStatusMessage message, IMessageHeader header) - { - if (InCompletedJobCollection(message.ExtractionJobIdentifier)) - throw new ApplicationException("Received an ExtractedFileStatusMessage for a job that is already completed"); - - var newStatus = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, _dateTimeProvider), - message.DicomFilePath, - message.OutputFilePath, - message.Status, - VerifiedFileStatus.NotVerified, - statusMessage: message.StatusMessage - ); - - _database - .GetCollection(StatusCollectionName(message.ExtractionJobIdentifier)) - .InsertOne(newStatus); - } - - protected override void PersistMessageToStoreImpl(ExtractedFileVerificationMessage message, IMessageHeader header) - { - var statusDoc = MongoFileStatusDocFor(message, header); - _database - .GetCollection(StatusCollectionName(message.ExtractionJobIdentifier)) - .InsertOne(statusDoc); - } + _inProgressJobCollection.InsertOne(newJobInfo); + } - //TODO(rkm 2020-03-09) Test this with a large volume of messages - protected override List GetReadyJobsImpl(Guid specificJobId = default) - { - //TODO Docs + protected override void PersistMessageToStoreImpl(ExtractFileCollectionInfoMessage message, IMessageHeader header) + { + if (InCompletedJobCollection(message.ExtractionJobIdentifier)) + throw new ApplicationException("Received an ExtractFileCollectionInfoMessage for a job that is already completed"); - FilterDefinition filter = FilterDefinition.Empty; + MongoExpectedFilesDoc expectedFilesForKey = MongoExpectedFilesDoc.FromMessage(message, header, _dateTimeProvider); - // If we have been passed a specific GUID, search for that job only - if (specificJobId != default) - filter &= Builders.Filter.Eq(x => x.ExtractionJobIdentifier, specificJobId); + _database + .GetCollection(ExpectedFilesCollectionName(message.ExtractionJobIdentifier)) + .InsertOne(expectedFilesForKey); + } - // NOTE(rkm 2020-03-03) Get all extract jobs that are not in the Failed state - var activeJobs = new List(); - using (IAsyncCursor cursor = _inProgressJobCollection.FindSync(filter)) - { - while (cursor.MoveNext()) - foreach (MongoExtractJobDoc job in cursor.Current) - { - if (job.JobStatus == ExtractJobStatus.Failed) - { - Logger.Warn($"Job {job.ExtractionJobIdentifier} is marked as Failed - ignoring"); - continue; - } - activeJobs.Add(job); - } - } + protected override void PersistMessageToStoreImpl(ExtractedFileStatusMessage message, IMessageHeader header) + { + if (InCompletedJobCollection(message.ExtractionJobIdentifier)) + throw new ApplicationException("Received an ExtractedFileStatusMessage for a job that is already completed"); + + var newStatus = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, _dateTimeProvider), + message.DicomFilePath, + message.OutputFilePath, + message.Status, + VerifiedFileStatus.NotVerified, + statusMessage: message.StatusMessage + ); + + _database + .GetCollection(StatusCollectionName(message.ExtractionJobIdentifier)) + .InsertOne(newStatus); + } - // Calculate the current status of each job and return those that are ready for completion - var readyJobs = new List(); - foreach (MongoExtractJobDoc job in activeJobs) - { - Guid jobId = job.ExtractionJobIdentifier; + protected override void PersistMessageToStoreImpl(ExtractedFileVerificationMessage message, IMessageHeader header) + { + var statusDoc = MongoFileStatusDocFor(message, header); + _database + .GetCollection(StatusCollectionName(message.ExtractionJobIdentifier)) + .InsertOne(statusDoc); + } - Logger.Debug($"Checking progress for {jobId}"); + //TODO(rkm 2020-03-09) Test this with a large volume of messages + protected override List GetReadyJobsImpl(Guid specificJobId = default) + { + //TODO Docs - // Check if the job has progressed - var changed = false; + FilterDefinition filter = FilterDefinition.Empty; - string expectedTempCollectionName = ExpectedFilesCollectionName(jobId); - IMongoCollection expectedTempCollection = _database.GetCollection(expectedTempCollectionName); + // If we have been passed a specific GUID, search for that job only + if (specificJobId != default) + filter &= Builders.Filter.Eq(x => x.ExtractionJobIdentifier, specificJobId); - if (job.JobStatus == ExtractJobStatus.WaitingForCollectionInfo) + // NOTE(rkm 2020-03-03) Get all extract jobs that are not in the Failed state + var activeJobs = new List(); + using (IAsyncCursor cursor = _inProgressJobCollection.FindSync(filter)) + { + while (cursor.MoveNext()) + foreach (MongoExtractJobDoc job in cursor.Current) { - long collectionInfoCount = expectedTempCollection.CountDocuments(FilterDefinition.Empty); - - if (job.KeyCount == collectionInfoCount) + if (job.JobStatus == ExtractJobStatus.Failed) { - Logger.Debug($"Have all collection messages for job {jobId}"); - job.JobStatus = ExtractJobStatus.WaitingForStatuses; - changed = true; + Logger.Warn($"Job {job.ExtractionJobIdentifier} is marked as Failed - ignoring"); + continue; } - else - Logger.Debug($"Job {jobId} is in state WaitingForCollectionInfo. Expected count is {job.KeyCount}, actual is {collectionInfoCount}"); + activeJobs.Add(job); } + } - if (job.JobStatus == ExtractJobStatus.WaitingForStatuses) - { - string statusTempCollectionName = StatusCollectionName(jobId); - IMongoCollection statusTempCollection = _database.GetCollection(statusTempCollectionName); + // Calculate the current status of each job and return those that are ready for completion + var readyJobs = new List(); + foreach (MongoExtractJobDoc job in activeJobs) + { + Guid jobId = job.ExtractionJobIdentifier; - // If we have (at least) one status message for each expected file, then we can continue - var expectedStatusesCount = 0; - IAsyncCursor cursor = expectedTempCollection.FindSync(FilterDefinition.Empty); - while (cursor.MoveNext()) - expectedStatusesCount += cursor.Current.Sum(doc => doc.ExpectedFiles.Count); + Logger.Debug($"Checking progress for {jobId}"); - long actualStatusCount = statusTempCollection.CountDocuments(FilterDefinition.Empty); + // Check if the job has progressed + var changed = false; - if (expectedStatusesCount == actualStatusCount) - { - Logger.Debug($"Have all status messages for job {jobId}"); - job.JobStatus = ExtractJobStatus.ReadyForChecks; - changed = true; - } - else - Logger.Debug($"Job {jobId} is in state WaitingForStatuses. Expected count is {expectedStatusesCount}, actual is {actualStatusCount}"); - } + string expectedTempCollectionName = ExpectedFilesCollectionName(jobId); + IMongoCollection expectedTempCollection = _database.GetCollection(expectedTempCollectionName); - // If the status has moved on then update the document in the database - if (changed) - { - ReplaceOneResult res = _inProgressJobCollection.ReplaceOne(GetFilterForSpecificJob(jobId), job); - if (!res.IsAcknowledged) - throw new ApplicationException($"Received invalid ReplaceOneResult: {res}"); - } + if (job.JobStatus == ExtractJobStatus.WaitingForCollectionInfo) + { + long collectionInfoCount = expectedTempCollection.CountDocuments(FilterDefinition.Empty); - if (job.JobStatus != ExtractJobStatus.ReadyForChecks) + if (job.KeyCount == collectionInfoCount) { - Logger.Debug($"Job {jobId} is not ready - currently in state {job.JobStatus} (changed={changed})"); - continue; + Logger.Debug($"Have all collection messages for job {jobId}"); + job.JobStatus = ExtractJobStatus.WaitingForStatuses; + changed = true; } - - readyJobs.Add(job.ToExtractJobInfo()); + else + Logger.Debug($"Job {jobId} is in state WaitingForCollectionInfo. Expected count is {job.KeyCount}, actual is {collectionInfoCount}"); } - return readyJobs; - } - - protected override void CompleteJobImpl(Guid jobId) - { - //TODO Docs - - using IClientSessionHandle session = _client.StartSession(); - session.StartTransaction(); - string expectedCollNameForJob = ExpectedFilesCollectionName(jobId); - string statusCollNameForJob = StatusCollectionName(jobId); - - try + if (job.JobStatus == ExtractJobStatus.WaitingForStatuses) { - if (!TryGetMongoExtractJobDoc(jobId, out MongoExtractJobDoc toComplete)) - throw new ApplicationException($"Could not find job {jobId} in the job store"); - - if (toComplete.JobStatus == ExtractJobStatus.Failed) - throw new ApplicationException($"Job {jobId} is marked as failed"); - - var completedJob = new MongoCompletedExtractJobDoc(toComplete, _dateTimeProvider.UtcNow()); - _completedJobCollection.InsertOne(completedJob); - - DeleteResult res = _inProgressJobCollection.DeleteOne(GetFilterForSpecificJob(jobId)); - if (!res.IsAcknowledged) - throw new ApplicationException("Job data was archived but could not delete original from job store"); + string statusTempCollectionName = StatusCollectionName(jobId); + IMongoCollection statusTempCollection = _database.GetCollection(statusTempCollectionName); - // Move the associated docs from each collection to the archives + // If we have (at least) one status message for each expected file, then we can continue + var expectedStatusesCount = 0; + IAsyncCursor cursor = expectedTempCollection.FindSync(FilterDefinition.Empty); + while (cursor.MoveNext()) + expectedStatusesCount += cursor.Current.Sum(doc => doc.ExpectedFiles.Count); - IMongoCollection expectedTempCollection = _database.GetCollection(expectedCollNameForJob); - if (expectedTempCollection.CountDocuments(FilterDefinition.Empty) == 0) - throw new ApplicationException($"Collection of MongoExpectedFilesDoc for job {jobId} was missing or empty"); - using (IAsyncCursor cursor = expectedTempCollection.FindSync(FilterDefinition.Empty)) - { - while (cursor.MoveNext()) - _completedExpectedFilesCollection.InsertMany(cursor.Current); - } + long actualStatusCount = statusTempCollection.CountDocuments(FilterDefinition.Empty); - IMongoCollection statusTemp = _database.GetCollection(statusCollNameForJob); - if (statusTemp.CountDocuments(FilterDefinition.Empty) == 0) - throw new ApplicationException($"Collection of MongoFileStatusDoc for job {jobId} was missing or empty"); - using (IAsyncCursor cursor = statusTemp.FindSync(FilterDefinition.Empty)) + if (expectedStatusesCount == actualStatusCount) { - while (cursor.MoveNext()) - _completedStatusCollection.InsertMany(cursor.Current); + Logger.Debug($"Have all status messages for job {jobId}"); + job.JobStatus = ExtractJobStatus.ReadyForChecks; + changed = true; } + else + Logger.Debug($"Job {jobId} is in state WaitingForStatuses. Expected count is {expectedStatusesCount}, actual is {actualStatusCount}"); } - catch (Exception) + + // If the status has moved on then update the document in the database + if (changed) { - Logger.Debug("Caught exception from transaction. Aborting before re-throwing"); - session.AbortTransaction(); - throw; + ReplaceOneResult res = _inProgressJobCollection.ReplaceOne(GetFilterForSpecificJob(jobId), job); + if (!res.IsAcknowledged) + throw new ApplicationException($"Received invalid ReplaceOneResult: {res}"); } - // TODO(rkm 2020-03-03) Can potentially add a retry here - session.CommitTransaction(); + if (job.JobStatus != ExtractJobStatus.ReadyForChecks) + { + Logger.Debug($"Job {jobId} is not ready - currently in state {job.JobStatus} (changed={changed})"); + continue; + } - // NOTE(rkm 2020-03-09) "Operations that affect the database catalog, such as creating or dropping a collection or an index, are not allowed in transactions" - _database.DropCollection(expectedCollNameForJob); - _database.DropCollection(statusCollNameForJob); + readyJobs.Add(job.ToExtractJobInfo()); } - protected override void MarkJobFailedImpl(Guid jobId, Exception cause) + return readyJobs; + } + + protected override void CompleteJobImpl(Guid jobId) + { + //TODO Docs + + using IClientSessionHandle session = _client.StartSession(); + session.StartTransaction(); + string expectedCollNameForJob = ExpectedFilesCollectionName(jobId); + string statusCollNameForJob = StatusCollectionName(jobId); + + try { - //TODO Docs + if (!TryGetMongoExtractJobDoc(jobId, out MongoExtractJobDoc toComplete)) + throw new ApplicationException($"Could not find job {jobId} in the job store"); - using IClientSessionHandle session = _client.StartSession(); - session.StartTransaction(); + if (toComplete.JobStatus == ExtractJobStatus.Failed) + throw new ApplicationException($"Job {jobId} is marked as failed"); - try - { - if (!TryGetMongoExtractJobDoc(jobId, out MongoExtractJobDoc toFail)) - throw new ApplicationException($"Could not find job {jobId} in the job store"); + var completedJob = new MongoCompletedExtractJobDoc(toComplete, _dateTimeProvider.UtcNow()); + _completedJobCollection.InsertOne(completedJob); - if (toFail.JobStatus == ExtractJobStatus.Failed || toFail.FailedJobInfoDoc != null) - throw new ApplicationException($"Job {jobId} is already marked as failed"); + DeleteResult res = _inProgressJobCollection.DeleteOne(GetFilterForSpecificJob(jobId)); + if (!res.IsAcknowledged) + throw new ApplicationException("Job data was archived but could not delete original from job store"); - toFail.JobStatus = ExtractJobStatus.Failed; - toFail.FailedJobInfoDoc = new MongoFailedJobInfoDoc(cause, _dateTimeProvider); + // Move the associated docs from each collection to the archives - ReplaceOneResult res = _inProgressJobCollection.ReplaceOne(GetFilterForSpecificJob(jobId), toFail); - if (!res.IsAcknowledged || res.ModifiedCount != 1) - throw new ApplicationException($"Received invalid ReplaceOneResult: {res}"); - } - catch (Exception) + IMongoCollection expectedTempCollection = _database.GetCollection(expectedCollNameForJob); + if (expectedTempCollection.CountDocuments(FilterDefinition.Empty) == 0) + throw new ApplicationException($"Collection of MongoExpectedFilesDoc for job {jobId} was missing or empty"); + using (IAsyncCursor cursor = expectedTempCollection.FindSync(FilterDefinition.Empty)) { - Logger.Debug("Caught exception from transaction. Aborting before re-throwing"); - session.AbortTransaction(); - throw; + while (cursor.MoveNext()) + _completedExpectedFilesCollection.InsertMany(cursor.Current); } - // TODO(rkm 2020-03-03) Can potentially add a retry here - session.CommitTransaction(); + IMongoCollection statusTemp = _database.GetCollection(statusCollNameForJob); + if (statusTemp.CountDocuments(FilterDefinition.Empty) == 0) + throw new ApplicationException($"Collection of MongoFileStatusDoc for job {jobId} was missing or empty"); + using (IAsyncCursor cursor = statusTemp.FindSync(FilterDefinition.Empty)) + { + while (cursor.MoveNext()) + _completedStatusCollection.InsertMany(cursor.Current); + } } - - protected override CompletedExtractJobInfo GetCompletedJobInfoImpl(Guid jobId) + catch (Exception) { - MongoCompletedExtractJobDoc jobDoc = - _completedJobCollection - .FindSync(Builders.Filter.Eq(x => x.ExtractionJobIdentifier, jobId)) - .SingleOrDefault() ?? throw new ApplicationException($"No completed document for job {jobId}"); - return jobDoc.ToExtractJobInfo(); + Logger.Debug("Caught exception from transaction. Aborting before re-throwing"); + session.AbortTransaction(); + throw; } - protected override IEnumerable GetCompletedJobRejectionsImpl(Guid jobId) - { - var filter = FilterDefinition.Empty; - filter &= Builders.Filter.Eq(x => x.Header.ExtractionJobIdentifier, jobId); - // TODO(rkm 2020-10-28) This doesn't work for some reason, so for now we're using the check inside the foreach loop - //filter &= Builders.Filter.Gt(x => x.RejectedKeys.RejectionInfo.Count, 0); - IAsyncCursor cursor = _completedExpectedFilesCollection.FindSync(filter); - while (cursor.MoveNext()) - foreach (MongoExpectedFilesDoc expectedFilesDoc in cursor.Current) - { - if (expectedFilesDoc.RejectedKeys.RejectionInfo.Count == 0) - continue; - yield return new ExtractionIdentifierRejectionInfo(expectedFilesDoc.Key, expectedFilesDoc.RejectedKeys.RejectionInfo); - } - } + // TODO(rkm 2020-03-03) Can potentially add a retry here + session.CommitTransaction(); - protected override IEnumerable GetCompletedJobAnonymisationFailuresImpl(Guid jobId) - { - var filter = FilterDefinition.Empty; - filter &= Builders.Filter.Eq(x => x.Header.ExtractionJobIdentifier, jobId); + // NOTE(rkm 2020-03-09) "Operations that affect the database catalog, such as creating or dropping a collection or an index, are not allowed in transactions" + _database.DropCollection(expectedCollNameForJob); + _database.DropCollection(statusCollNameForJob); + } - // Anonymisation failures have VerifiedFileStatus == NotVerified (they did not go through IsIdentifiable) and - // ExtractedFileStatus != Copied (as these are not anonymised) - filter &= Builders.Filter.Eq(x => x.VerifiedFileStatus, VerifiedFileStatus.NotVerified); - filter &= Builders.Filter.Ne(x => x.ExtractedFileStatus, ExtractedFileStatus.Copied); + protected override void MarkJobFailedImpl(Guid jobId, Exception cause) + { + //TODO Docs - IAsyncCursor cursor = _completedStatusCollection.FindSync(filter); - while (cursor.MoveNext()) - foreach (MongoFileStatusDoc doc in cursor.Current) - yield return new FileAnonFailureInfo(doc.DicomFilePath, doc.StatusMessage!); - } + using IClientSessionHandle session = _client.StartSession(); + session.StartTransaction(); - protected override IEnumerable GetCompletedJobVerificationFailuresImpl(Guid jobId) + try { - var filter = FilterDefinition.Empty; - filter &= Builders.Filter.Eq(x => x.Header.ExtractionJobIdentifier, jobId); - filter &= Builders.Filter.Eq(x => x.VerifiedFileStatus, VerifiedFileStatus.IsIdentifiable); + if (!TryGetMongoExtractJobDoc(jobId, out MongoExtractJobDoc toFail)) + throw new ApplicationException($"Could not find job {jobId} in the job store"); - IAsyncCursor cursor = _completedStatusCollection.FindSync(filter); - while (cursor.MoveNext()) - foreach (MongoFileStatusDoc doc in cursor.Current) - yield return new FileVerificationFailureInfo(doc.OutputFileName!, doc.StatusMessage!); - } + if (toFail.JobStatus == ExtractJobStatus.Failed || toFail.FailedJobInfoDoc != null) + throw new ApplicationException($"Job {jobId} is already marked as failed"); + + toFail.JobStatus = ExtractJobStatus.Failed; + toFail.FailedJobInfoDoc = new MongoFailedJobInfoDoc(cause, _dateTimeProvider); - protected override IEnumerable GetCompletedJobMissingFileListImpl(Guid jobId) + ReplaceOneResult res = _inProgressJobCollection.ReplaceOne(GetFilterForSpecificJob(jobId), toFail); + if (!res.IsAcknowledged || res.ModifiedCount != 1) + throw new ApplicationException($"Received invalid ReplaceOneResult: {res}"); + } + catch (Exception) { - FilterDefinition filter = FilterDefinition.Empty; - filter &= Builders.Filter.Eq(x => x.Header.ExtractionJobIdentifier, jobId); - filter &= Builders.Filter.Eq(x => x.ExtractedFileStatus, ExtractedFileStatus.FileMissing); - IAsyncCursor cursor = _completedStatusCollection.FindSync(filter); - while (cursor.MoveNext()) - foreach (MongoFileStatusDoc doc in cursor.Current) - yield return doc.DicomFilePath; + Logger.Debug("Caught exception from transaction. Aborting before re-throwing"); + session.AbortTransaction(); + throw; } - protected override void AddToWriteQueueImpl(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag) - { - var statusCollName = StatusCollectionName(message.ExtractionJobIdentifier); - var statusDoc = MongoFileStatusDocFor(message, header); + // TODO(rkm 2020-03-03) Can potentially add a retry here + session.CommitTransaction(); + } - lock (_writeQueueLock) - { - if (!_verificationStatusWriteQueue.ContainsKey(statusCollName)) - _verificationStatusWriteQueue.Add(statusCollName, []); + protected override CompletedExtractJobInfo GetCompletedJobInfoImpl(Guid jobId) + { + MongoCompletedExtractJobDoc jobDoc = + _completedJobCollection + .FindSync(Builders.Filter.Eq(x => x.ExtractionJobIdentifier, jobId)) + .SingleOrDefault() ?? throw new ApplicationException($"No completed document for job {jobId}"); + return jobDoc.ToExtractJobInfo(); + } - _verificationStatusWriteQueue[statusCollName].Add(new(statusDoc, header, tag)); + protected override IEnumerable GetCompletedJobRejectionsImpl(Guid jobId) + { + var filter = FilterDefinition.Empty; + filter &= Builders.Filter.Eq(x => x.Header.ExtractionJobIdentifier, jobId); + // TODO(rkm 2020-10-28) This doesn't work for some reason, so for now we're using the check inside the foreach loop + //filter &= Builders.Filter.Gt(x => x.RejectedKeys.RejectionInfo.Count, 0); + IAsyncCursor cursor = _completedExpectedFilesCollection.FindSync(filter); + while (cursor.MoveNext()) + foreach (MongoExpectedFilesDoc expectedFilesDoc in cursor.Current) + { + if (expectedFilesDoc.RejectedKeys.RejectionInfo.Count == 0) + continue; + yield return new ExtractionIdentifierRejectionInfo(expectedFilesDoc.Key, expectedFilesDoc.RejectedKeys.RejectionInfo); } - } + } - #region Helper Methods + protected override IEnumerable GetCompletedJobAnonymisationFailuresImpl(Guid jobId) + { + var filter = FilterDefinition.Empty; + filter &= Builders.Filter.Eq(x => x.Header.ExtractionJobIdentifier, jobId); + + // Anonymisation failures have VerifiedFileStatus == NotVerified (they did not go through IsIdentifiable) and + // ExtractedFileStatus != Copied (as these are not anonymised) + filter &= Builders.Filter.Eq(x => x.VerifiedFileStatus, VerifiedFileStatus.NotVerified); + filter &= Builders.Filter.Ne(x => x.ExtractedFileStatus, ExtractedFileStatus.Copied); + + IAsyncCursor cursor = _completedStatusCollection.FindSync(filter); + while (cursor.MoveNext()) + foreach (MongoFileStatusDoc doc in cursor.Current) + yield return new FileAnonFailureInfo(doc.DicomFilePath, doc.StatusMessage!); + } - private static string StatusCollectionName(string name) => $"{StatusCollectionPrefix}_{name}"; - private static string StatusCollectionName(Guid jobId) => StatusCollectionName(jobId.ToString()); - private static string ExpectedFilesCollectionName(string name) => $"{ExpectedFilesCollectionPrefix}_{name}"; - private static string ExpectedFilesCollectionName(Guid jobId) => ExpectedFilesCollectionName(jobId.ToString()); + protected override IEnumerable GetCompletedJobVerificationFailuresImpl(Guid jobId) + { + var filter = FilterDefinition.Empty; + filter &= Builders.Filter.Eq(x => x.Header.ExtractionJobIdentifier, jobId); + filter &= Builders.Filter.Eq(x => x.VerifiedFileStatus, VerifiedFileStatus.IsIdentifiable); + + IAsyncCursor cursor = _completedStatusCollection.FindSync(filter); + while (cursor.MoveNext()) + foreach (MongoFileStatusDoc doc in cursor.Current) + yield return new FileVerificationFailureInfo(doc.OutputFileName!, doc.StatusMessage!); + } - private static FilterDefinition GetFilterForSpecificJob(Guid extractionJobIdentifier) where T : MongoExtractJobDoc - => Builders.Filter.Eq(x => x.ExtractionJobIdentifier, extractionJobIdentifier); + protected override IEnumerable GetCompletedJobMissingFileListImpl(Guid jobId) + { + FilterDefinition filter = FilterDefinition.Empty; + filter &= Builders.Filter.Eq(x => x.Header.ExtractionJobIdentifier, jobId); + filter &= Builders.Filter.Eq(x => x.ExtractedFileStatus, ExtractedFileStatus.FileMissing); + IAsyncCursor cursor = _completedStatusCollection.FindSync(filter); + while (cursor.MoveNext()) + foreach (MongoFileStatusDoc doc in cursor.Current) + yield return doc.DicomFilePath; + } - private bool TryGetMongoExtractJobDoc(Guid extractionJobIdentifier, out MongoExtractJobDoc mongoExtractJobDoc) - { - mongoExtractJobDoc = _inProgressJobCollection - .Find(GetFilterForSpecificJob(extractionJobIdentifier)) - .SingleOrDefault(); - return mongoExtractJobDoc != null; - } + protected override void AddToWriteQueueImpl(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag) + { + var statusCollName = StatusCollectionName(message.ExtractionJobIdentifier); + var statusDoc = MongoFileStatusDocFor(message, header); - private bool InCompletedJobCollection(Guid extractionJobIdentifier) + lock (_writeQueueLock) { - return _completedJobCollection - .Find(GetFilterForSpecificJob(extractionJobIdentifier)) - .SingleOrDefault() != null; - } + if (!_verificationStatusWriteQueue.ContainsKey(statusCollName)) + _verificationStatusWriteQueue.Add(statusCollName, []); - private MongoFileStatusDoc MongoFileStatusDocFor(ExtractedFileVerificationMessage message, IMessageHeader header) - { - if (InCompletedJobCollection(message.ExtractionJobIdentifier)) - throw new ApplicationException($"Received an {nameof(ExtractedFileVerificationMessage)} for a job that is already completed"); - - return new( - MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, _dateTimeProvider), - message.DicomFilePath, - message.OutputFilePath, - ExtractedFileStatus.Anonymised, - message.Status, - statusMessage: message.Report - ); + _verificationStatusWriteQueue[statusCollName].Add(new(statusDoc, header, tag)); } + } + + #region Helper Methods + + private static string StatusCollectionName(string name) => $"{StatusCollectionPrefix}_{name}"; + private static string StatusCollectionName(Guid jobId) => StatusCollectionName(jobId.ToString()); + private static string ExpectedFilesCollectionName(string name) => $"{ExpectedFilesCollectionPrefix}_{name}"; + private static string ExpectedFilesCollectionName(Guid jobId) => ExpectedFilesCollectionName(jobId.ToString()); + + private static FilterDefinition GetFilterForSpecificJob(Guid extractionJobIdentifier) where T : MongoExtractJobDoc + => Builders.Filter.Eq(x => x.ExtractionJobIdentifier, extractionJobIdentifier); - private record struct VerificationMessageProcessItem(MongoFileStatusDoc StatusDoc, IMessageHeader Header, ulong Tag); + private bool TryGetMongoExtractJobDoc(Guid extractionJobIdentifier, out MongoExtractJobDoc mongoExtractJobDoc) + { + mongoExtractJobDoc = _inProgressJobCollection + .Find(GetFilterForSpecificJob(extractionJobIdentifier)) + .SingleOrDefault(); + return mongoExtractJobDoc != null; + } - [ExcludeFromCodeCoverage] - private long CountExistingJobs() => _inProgressJobCollection.CountDocuments(FilterDefinition.Empty); + private bool InCompletedJobCollection(Guid extractionJobIdentifier) + { + return _completedJobCollection + .Find(GetFilterForSpecificJob(extractionJobIdentifier)) + .SingleOrDefault() != null; + } - #endregion + private MongoFileStatusDoc MongoFileStatusDocFor(ExtractedFileVerificationMessage message, IMessageHeader header) + { + if (InCompletedJobCollection(message.ExtractionJobIdentifier)) + throw new ApplicationException($"Received an {nameof(ExtractedFileVerificationMessage)} for a job that is already completed"); + + return new( + MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, _dateTimeProvider), + message.DicomFilePath, + message.OutputFilePath, + ExtractedFileStatus.Anonymised, + message.Status, + statusMessage: message.Report + ); } + + private record struct VerificationMessageProcessItem(MongoFileStatusDoc StatusDoc, IMessageHeader Header, ulong Tag); + + [ExcludeFromCodeCoverage] + private long CountExistingJobs() => _inProgressJobCollection.CountDocuments(FilterDefinition.Empty); + + #endregion } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoCompletedExtractJobDoc.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoCompletedExtractJobDoc.cs index d10a5e9b0..68fa78e97 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoCompletedExtractJobDoc.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoCompletedExtractJobDoc.cs @@ -2,20 +2,19 @@ using System; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel; + +public class MongoCompletedExtractJobDoc : MongoExtractJobDoc { - public class MongoCompletedExtractJobDoc : MongoExtractJobDoc - { - [BsonElement("completedAt")] - public DateTime CompletedAt { get; set; } + [BsonElement("completedAt")] + public DateTime CompletedAt { get; set; } - public MongoCompletedExtractJobDoc( - MongoExtractJobDoc extractJobDoc, - DateTime completedAt - ) : base(extractJobDoc) - { - JobStatus = ExtractJobStatus.Completed; - CompletedAt = completedAt != default ? completedAt : throw new ArgumentException(null, nameof(completedAt)); - } + public MongoCompletedExtractJobDoc( + MongoExtractJobDoc extractJobDoc, + DateTime completedAt + ) : base(extractJobDoc) + { + JobStatus = ExtractJobStatus.Completed; + CompletedAt = completedAt != default ? completedAt : throw new ArgumentException(null, nameof(completedAt)); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExpectedFilesDoc.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExpectedFilesDoc.cs index c59cf0908..0e0b7aa9b 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExpectedFilesDoc.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExpectedFilesDoc.cs @@ -9,98 +9,97 @@ using System.Linq; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel; + +/// +/// MongoDB document model representing a set of files which are expected to be extracted +/// +[BsonIgnoreExtraElements] // NOTE(rkm 2020-08-28) Required for classes which don't contain a field marked with BsonId +public class MongoExpectedFilesDoc : MemberwiseEquatable +{ + [BsonElement("header")] + public MongoExtractionMessageHeaderDoc Header { get; set; } + + [BsonElement("key")] + public string Key { get; set; } + + [BsonElement("expectedFiles")] + public HashSet ExpectedFiles { get; set; } + + [BsonElement("rejectedKeys")] + public MongoRejectedKeyInfoDoc RejectedKeys { get; set; } + + + public MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc header, + string key, + HashSet expectedFiles, + MongoRejectedKeyInfoDoc rejectedKeys) + { + Header = header ?? throw new ArgumentNullException(nameof(header)); + Key = !string.IsNullOrWhiteSpace(key) ? key : throw new ArgumentNullException(nameof(key)); + ExpectedFiles = expectedFiles ?? throw new ArgumentNullException(nameof(expectedFiles)); + RejectedKeys = rejectedKeys ?? throw new ArgumentNullException(nameof(rejectedKeys)); + } + + public static MongoExpectedFilesDoc FromMessage( + ExtractFileCollectionInfoMessage message, + IMessageHeader header, + DateTimeProvider dateTimeProvider) + { + return new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, dateTimeProvider), + message.KeyValue, + new HashSet(message.ExtractFileMessagesDispatched.Select(x => new MongoExpectedFileInfoDoc(x.Key.MessageGuid, x.Value))), + MongoRejectedKeyInfoDoc.FromMessage(message, header, dateTimeProvider)); + } +} + +public class MongoExpectedFileInfoDoc : MemberwiseEquatable { - /// - /// MongoDB document model representing a set of files which are expected to be extracted - /// - [BsonIgnoreExtraElements] // NOTE(rkm 2020-08-28) Required for classes which don't contain a field marked with BsonId - public class MongoExpectedFilesDoc : MemberwiseEquatable + [BsonElement("extractFileMessageGuid")] + [BsonRepresentation(BsonType.String)] + public Guid ExtractFileMessageGuid { get; set; } + + [BsonElement("anonymisedFilePath")] + public string AnonymisedFilePath { get; set; } + + public MongoExpectedFileInfoDoc( + Guid extractFileMessageGuid, + string anonymisedFilePath) { - [BsonElement("header")] - public MongoExtractionMessageHeaderDoc Header { get; set; } - - [BsonElement("key")] - public string Key { get; set; } - - [BsonElement("expectedFiles")] - public HashSet ExpectedFiles { get; set; } - - [BsonElement("rejectedKeys")] - public MongoRejectedKeyInfoDoc RejectedKeys { get; set; } - - - public MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc header, - string key, - HashSet expectedFiles, - MongoRejectedKeyInfoDoc rejectedKeys) - { - Header = header ?? throw new ArgumentNullException(nameof(header)); - Key = !string.IsNullOrWhiteSpace(key) ? key : throw new ArgumentNullException(nameof(key)); - ExpectedFiles = expectedFiles ?? throw new ArgumentNullException(nameof(expectedFiles)); - RejectedKeys = rejectedKeys ?? throw new ArgumentNullException(nameof(rejectedKeys)); - } - - public static MongoExpectedFilesDoc FromMessage( - ExtractFileCollectionInfoMessage message, - IMessageHeader header, - DateTimeProvider dateTimeProvider) - { - return new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, dateTimeProvider), - message.KeyValue, - new HashSet(message.ExtractFileMessagesDispatched.Select(x => new MongoExpectedFileInfoDoc(x.Key.MessageGuid, x.Value))), - MongoRejectedKeyInfoDoc.FromMessage(message, header, dateTimeProvider)); - } + ExtractFileMessageGuid = extractFileMessageGuid != default ? extractFileMessageGuid : throw new ArgumentException(null, nameof(extractFileMessageGuid)); + AnonymisedFilePath = !string.IsNullOrWhiteSpace(anonymisedFilePath) ? anonymisedFilePath : throw new ArgumentException(null, nameof(anonymisedFilePath)); } +} + +/// +/// MongoDB document model representing the rejection reasons for a specific key +/// +public class MongoRejectedKeyInfoDoc : MemberwiseEquatable +{ + [BsonElement("header")] + public MongoExtractionMessageHeaderDoc Header { get; set; } + + [BsonElement("rejectionInfo")] + public Dictionary RejectionInfo { get; set; } - public class MongoExpectedFileInfoDoc : MemberwiseEquatable + public MongoRejectedKeyInfoDoc( + MongoExtractionMessageHeaderDoc header, + Dictionary rejectionInfo) { - [BsonElement("extractFileMessageGuid")] - [BsonRepresentation(BsonType.String)] - public Guid ExtractFileMessageGuid { get; set; } - - [BsonElement("anonymisedFilePath")] - public string AnonymisedFilePath { get; set; } - - public MongoExpectedFileInfoDoc( - Guid extractFileMessageGuid, - string anonymisedFilePath) - { - ExtractFileMessageGuid = extractFileMessageGuid != default ? extractFileMessageGuid : throw new ArgumentException(null, nameof(extractFileMessageGuid)); - AnonymisedFilePath = !string.IsNullOrWhiteSpace(anonymisedFilePath) ? anonymisedFilePath : throw new ArgumentException(null, nameof(anonymisedFilePath)); - } + Header = header ?? throw new ArgumentNullException(nameof(header)); + RejectionInfo = rejectionInfo ?? throw new ArgumentNullException(nameof(rejectionInfo)); } - /// - /// MongoDB document model representing the rejection reasons for a specific key - /// - public class MongoRejectedKeyInfoDoc : MemberwiseEquatable + public static MongoRejectedKeyInfoDoc FromMessage( + ExtractFileCollectionInfoMessage message, + IMessageHeader header, + DateTimeProvider dateTimeProvider) { - [BsonElement("header")] - public MongoExtractionMessageHeaderDoc Header { get; set; } - - [BsonElement("rejectionInfo")] - public Dictionary RejectionInfo { get; set; } - - public MongoRejectedKeyInfoDoc( - MongoExtractionMessageHeaderDoc header, - Dictionary rejectionInfo) - { - Header = header ?? throw new ArgumentNullException(nameof(header)); - RejectionInfo = rejectionInfo ?? throw new ArgumentNullException(nameof(rejectionInfo)); - } - - public static MongoRejectedKeyInfoDoc FromMessage( - ExtractFileCollectionInfoMessage message, - IMessageHeader header, - DateTimeProvider dateTimeProvider) - { - return new MongoRejectedKeyInfoDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, dateTimeProvider), - message.RejectionReasons - ); - } + return new MongoRejectedKeyInfoDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, dateTimeProvider), + message.RejectionReasons + ); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractJobDoc.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractJobDoc.cs index 1bb7f89f9..2a8633afb 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractJobDoc.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractJobDoc.cs @@ -7,150 +7,149 @@ using System; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel; + +public class MongoExtractJobDoc : MemberwiseEquatable { - public class MongoExtractJobDoc : MemberwiseEquatable + [BsonId] + [BsonRepresentation(BsonType.String)] + public Guid ExtractionJobIdentifier { get; set; } + + [BsonElement("header")] + public MongoExtractionMessageHeaderDoc Header { get; set; } + + [BsonElement("projectNumber")] + public string ProjectNumber { get; set; } + + [BsonElement("jobStatus")] + [BsonRepresentation(BsonType.String)] + public ExtractJobStatus JobStatus { get; set; } + + [BsonElement("extractionDirectory")] + public string ExtractionDirectory { get; set; } + + [BsonElement("jobSubmittedAt")] + public DateTime JobSubmittedAt { get; set; } + + [BsonElement("keyTag")] + public string KeyTag { get; set; } + + [BsonElement("keyCount")] + public uint KeyCount { get; set; } + + [BsonElement("userName")] + public string UserName { get; set; } + + [BsonElement("extractionModality")] + public string? ExtractionModality { get; set; } + + [BsonElement("isIdentifiableExtraction")] + public bool IsIdentifiableExtraction { get; set; } + + [BsonElement("IsNoFilterExtraction")] + public bool IsNoFilterExtraction { get; set; } + + [BsonElement("failedJobInfo")] + public MongoFailedJobInfoDoc? FailedJobInfoDoc { get; set; } + + + public MongoExtractJobDoc( + Guid extractionJobIdentifier, + MongoExtractionMessageHeaderDoc header, + string projectNumber, + ExtractJobStatus jobStatus, + string extractionDirectory, + DateTime jobSubmittedAt, + string keyTag, + uint keyCount, + string userName, + string? extractionModality, + bool isIdentifiableExtraction, + bool isNoFilterExtraction, + MongoFailedJobInfoDoc? failedJobInfoDoc) + { + ExtractionJobIdentifier = extractionJobIdentifier != default ? extractionJobIdentifier : throw new ArgumentException(null, nameof(extractionJobIdentifier)); + Header = header ?? throw new ArgumentNullException(nameof(header)); + ProjectNumber = !string.IsNullOrWhiteSpace(projectNumber) ? projectNumber : throw new ArgumentNullException(nameof(projectNumber)); + JobStatus = jobStatus != ExtractJobStatus.Unknown ? jobStatus : throw new ArgumentNullException(nameof(jobStatus)); + ExtractionDirectory = !string.IsNullOrWhiteSpace(extractionDirectory) ? extractionDirectory : throw new ArgumentNullException(nameof(extractionDirectory)); + JobSubmittedAt = jobSubmittedAt != default ? jobSubmittedAt : throw new ArgumentException(null, nameof(jobSubmittedAt)); + KeyTag = !string.IsNullOrWhiteSpace(keyTag) ? keyTag : throw new ArgumentNullException(nameof(keyTag)); + KeyCount = keyCount > 0 ? keyCount : throw new ArgumentNullException(nameof(keyCount)); + UserName = !string.IsNullOrWhiteSpace(userName) ? userName : throw new ArgumentNullException(nameof(userName)); + if (extractionModality != null) + ExtractionModality = !string.IsNullOrWhiteSpace(extractionModality) ? extractionModality : throw new ArgumentNullException(nameof(extractionModality)); + IsIdentifiableExtraction = isIdentifiableExtraction; + IsNoFilterExtraction = isNoFilterExtraction; + FailedJobInfoDoc = failedJobInfoDoc; + } + + /// + /// Copy constructor + /// + public MongoExtractJobDoc(MongoExtractJobDoc existing) + { + ExtractionJobIdentifier = existing.ExtractionJobIdentifier; + Header = existing.Header; + ProjectNumber = existing.ProjectNumber; + JobStatus = existing.JobStatus; + ExtractionDirectory = existing.ExtractionDirectory; + JobSubmittedAt = existing.JobSubmittedAt; + KeyTag = existing.KeyTag; + KeyCount = existing.KeyCount; + UserName = existing.UserName; + ExtractionModality = existing.ExtractionModality; + IsIdentifiableExtraction = existing.IsIdentifiableExtraction; + FailedJobInfoDoc = existing.FailedJobInfoDoc; + IsNoFilterExtraction = existing.IsNoFilterExtraction; + } + + public static MongoExtractJobDoc FromMessage( + ExtractionRequestInfoMessage message, + IMessageHeader header, + DateTimeProvider dateTimeProvider) { - [BsonId] - [BsonRepresentation(BsonType.String)] - public Guid ExtractionJobIdentifier { get; set; } - - [BsonElement("header")] - public MongoExtractionMessageHeaderDoc Header { get; set; } - - [BsonElement("projectNumber")] - public string ProjectNumber { get; set; } - - [BsonElement("jobStatus")] - [BsonRepresentation(BsonType.String)] - public ExtractJobStatus JobStatus { get; set; } - - [BsonElement("extractionDirectory")] - public string ExtractionDirectory { get; set; } - - [BsonElement("jobSubmittedAt")] - public DateTime JobSubmittedAt { get; set; } - - [BsonElement("keyTag")] - public string KeyTag { get; set; } - - [BsonElement("keyCount")] - public uint KeyCount { get; set; } - - [BsonElement("userName")] - public string UserName { get; set; } - - [BsonElement("extractionModality")] - public string? ExtractionModality { get; set; } - - [BsonElement("isIdentifiableExtraction")] - public bool IsIdentifiableExtraction { get; set; } - - [BsonElement("IsNoFilterExtraction")] - public bool IsNoFilterExtraction { get; set; } - - [BsonElement("failedJobInfo")] - public MongoFailedJobInfoDoc? FailedJobInfoDoc { get; set; } - - - public MongoExtractJobDoc( - Guid extractionJobIdentifier, - MongoExtractionMessageHeaderDoc header, - string projectNumber, - ExtractJobStatus jobStatus, - string extractionDirectory, - DateTime jobSubmittedAt, - string keyTag, - uint keyCount, - string userName, - string? extractionModality, - bool isIdentifiableExtraction, - bool isNoFilterExtraction, - MongoFailedJobInfoDoc? failedJobInfoDoc) - { - ExtractionJobIdentifier = extractionJobIdentifier != default ? extractionJobIdentifier : throw new ArgumentException(null, nameof(extractionJobIdentifier)); - Header = header ?? throw new ArgumentNullException(nameof(header)); - ProjectNumber = !string.IsNullOrWhiteSpace(projectNumber) ? projectNumber : throw new ArgumentNullException(nameof(projectNumber)); - JobStatus = jobStatus != ExtractJobStatus.Unknown ? jobStatus : throw new ArgumentNullException(nameof(jobStatus)); - ExtractionDirectory = !string.IsNullOrWhiteSpace(extractionDirectory) ? extractionDirectory : throw new ArgumentNullException(nameof(extractionDirectory)); - JobSubmittedAt = jobSubmittedAt != default ? jobSubmittedAt : throw new ArgumentException(null, nameof(jobSubmittedAt)); - KeyTag = !string.IsNullOrWhiteSpace(keyTag) ? keyTag : throw new ArgumentNullException(nameof(keyTag)); - KeyCount = keyCount > 0 ? keyCount : throw new ArgumentNullException(nameof(keyCount)); - UserName = !string.IsNullOrWhiteSpace(userName) ? userName : throw new ArgumentNullException(nameof(userName)); - if (extractionModality != null) - ExtractionModality = !string.IsNullOrWhiteSpace(extractionModality) ? extractionModality : throw new ArgumentNullException(nameof(extractionModality)); - IsIdentifiableExtraction = isIdentifiableExtraction; - IsNoFilterExtraction = isNoFilterExtraction; - FailedJobInfoDoc = failedJobInfoDoc; - } - - /// - /// Copy constructor - /// - public MongoExtractJobDoc(MongoExtractJobDoc existing) - { - ExtractionJobIdentifier = existing.ExtractionJobIdentifier; - Header = existing.Header; - ProjectNumber = existing.ProjectNumber; - JobStatus = existing.JobStatus; - ExtractionDirectory = existing.ExtractionDirectory; - JobSubmittedAt = existing.JobSubmittedAt; - KeyTag = existing.KeyTag; - KeyCount = existing.KeyCount; - UserName = existing.UserName; - ExtractionModality = existing.ExtractionModality; - IsIdentifiableExtraction = existing.IsIdentifiableExtraction; - FailedJobInfoDoc = existing.FailedJobInfoDoc; - IsNoFilterExtraction = existing.IsNoFilterExtraction; - } - - public static MongoExtractJobDoc FromMessage( - ExtractionRequestInfoMessage message, - IMessageHeader header, - DateTimeProvider dateTimeProvider) - { - return new MongoExtractJobDoc( - message.ExtractionJobIdentifier, - MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, dateTimeProvider), - message.ProjectNumber, - ExtractJobStatus.WaitingForCollectionInfo, - message.ExtractionDirectory, - message.JobSubmittedAt, - message.KeyTag, - (uint)message.KeyValueCount, - message.UserName, - message.Modality, - message.IsIdentifiableExtraction, - message.IsNoFilterExtraction, - null - ); - } + return new MongoExtractJobDoc( + message.ExtractionJobIdentifier, + MongoExtractionMessageHeaderDoc.FromMessageHeader(message.ExtractionJobIdentifier, header, dateTimeProvider), + message.ProjectNumber, + ExtractJobStatus.WaitingForCollectionInfo, + message.ExtractionDirectory, + message.JobSubmittedAt, + message.KeyTag, + (uint)message.KeyValueCount, + message.UserName, + message.Modality, + message.IsIdentifiableExtraction, + message.IsNoFilterExtraction, + null + ); } +} + +public class MongoFailedJobInfoDoc : MemberwiseEquatable, IEquatable +{ + [BsonElement("failedAt")] + public DateTime FailedAt { get; set; } + + [BsonElement("exceptionMessage")] + public string ExceptionMessage { get; set; } + + [BsonElement("stackTrace")] + public string StackTrace { get; set; } + + [BsonElement("innerException")] + public string? InnerException { get; set; } - public class MongoFailedJobInfoDoc : MemberwiseEquatable, IEquatable + public MongoFailedJobInfoDoc( + Exception exception, + DateTimeProvider dateTimeProvider + ) { - [BsonElement("failedAt")] - public DateTime FailedAt { get; set; } - - [BsonElement("exceptionMessage")] - public string ExceptionMessage { get; set; } - - [BsonElement("stackTrace")] - public string StackTrace { get; set; } - - [BsonElement("innerException")] - public string? InnerException { get; set; } - - public MongoFailedJobInfoDoc( - Exception exception, - DateTimeProvider dateTimeProvider - ) - { - FailedAt = dateTimeProvider.UtcNow(); - ArgumentNullException.ThrowIfNull(exception); - ExceptionMessage = exception.Message; - StackTrace = exception.StackTrace!; - InnerException = exception.InnerException?.ToString(); - } + FailedAt = dateTimeProvider.UtcNow(); + ArgumentNullException.ThrowIfNull(exception); + ExceptionMessage = exception.Message; + StackTrace = exception.StackTrace!; + InnerException = exception.InnerException?.ToString(); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractionMessageHeaderDoc.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractionMessageHeaderDoc.cs index 67c935ad4..972e9f5f5 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractionMessageHeaderDoc.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractionMessageHeaderDoc.cs @@ -6,69 +6,68 @@ using System; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel; + +/// +/// Class which represents a document created from an extraction message header +/// +public class MongoExtractionMessageHeaderDoc : MemberwiseEquatable { - /// - /// Class which represents a document created from an extraction message header - /// - public class MongoExtractionMessageHeaderDoc : MemberwiseEquatable - { - [BsonElement("extractionJobIdentifier")] - [BsonRepresentation(BsonType.String)] - public Guid ExtractionJobIdentifier { get; set; } + [BsonElement("extractionJobIdentifier")] + [BsonRepresentation(BsonType.String)] + public Guid ExtractionJobIdentifier { get; set; } - [BsonElement("messageGuid")] - [BsonRepresentation(BsonType.String)] - public Guid MessageGuid { get; set; } + [BsonElement("messageGuid")] + [BsonRepresentation(BsonType.String)] + public Guid MessageGuid { get; set; } - [BsonElement("producerExecutableName")] - public string ProducerExecutableName { get; set; } + [BsonElement("producerExecutableName")] + public string ProducerExecutableName { get; set; } - [BsonElement("producerProcessID")] - public int ProducerProcessID { get; set; } + [BsonElement("producerProcessID")] + public int ProducerProcessID { get; set; } - [BsonElement("originalPublishTimestamp")] - public DateTime OriginalPublishTimestamp { get; set; } + [BsonElement("originalPublishTimestamp")] + public DateTime OriginalPublishTimestamp { get; set; } - [BsonElement("parents")] - public string? Parents { get; set; } + [BsonElement("parents")] + public string? Parents { get; set; } - [BsonElement("receivedAt")] - public DateTime ReceivedAt { get; set; } + [BsonElement("receivedAt")] + public DateTime ReceivedAt { get; set; } - public MongoExtractionMessageHeaderDoc( - Guid extractionJobIdentifier, - Guid messageGuid, - string producerExecutableName, - int producerProcessId, - DateTime originalPublishTimestamp, - string? parents, - DateTime receivedAt) - { - ExtractionJobIdentifier = extractionJobIdentifier != default ? extractionJobIdentifier : throw new ArgumentNullException(nameof(extractionJobIdentifier)); - MessageGuid = messageGuid != default ? messageGuid : throw new ArgumentNullException(nameof(messageGuid)); - ProducerExecutableName = !string.IsNullOrWhiteSpace(producerExecutableName) ? producerExecutableName : throw new ArgumentNullException(nameof(producerExecutableName)); - ProducerProcessID = producerProcessId > 0 ? producerProcessId : throw new ArgumentNullException(nameof(producerProcessId)); - OriginalPublishTimestamp = originalPublishTimestamp != default ? originalPublishTimestamp : throw new ArgumentNullException(nameof(originalPublishTimestamp)); - Parents = parents; - ReceivedAt = receivedAt != default ? receivedAt : throw new ArgumentNullException(nameof(receivedAt)); - } + public MongoExtractionMessageHeaderDoc( + Guid extractionJobIdentifier, + Guid messageGuid, + string producerExecutableName, + int producerProcessId, + DateTime originalPublishTimestamp, + string? parents, + DateTime receivedAt) + { + ExtractionJobIdentifier = extractionJobIdentifier != default ? extractionJobIdentifier : throw new ArgumentNullException(nameof(extractionJobIdentifier)); + MessageGuid = messageGuid != default ? messageGuid : throw new ArgumentNullException(nameof(messageGuid)); + ProducerExecutableName = !string.IsNullOrWhiteSpace(producerExecutableName) ? producerExecutableName : throw new ArgumentNullException(nameof(producerExecutableName)); + ProducerProcessID = producerProcessId > 0 ? producerProcessId : throw new ArgumentNullException(nameof(producerProcessId)); + OriginalPublishTimestamp = originalPublishTimestamp != default ? originalPublishTimestamp : throw new ArgumentNullException(nameof(originalPublishTimestamp)); + Parents = parents; + ReceivedAt = receivedAt != default ? receivedAt : throw new ArgumentNullException(nameof(receivedAt)); + } - public static MongoExtractionMessageHeaderDoc FromMessageHeader( - Guid extractionJobIdentifier, - IMessageHeader header, - DateTimeProvider dateTimeProvider) - { - return new MongoExtractionMessageHeaderDoc( - extractionJobIdentifier, - header.MessageGuid, - header.ProducerExecutableName, - header.ProducerProcessID, - MessageHeader.UnixTimeToDateTime(header.OriginalPublishTimestamp), - string.Join(MessageHeader.Splitter, header.Parents), - dateTimeProvider.UtcNow() - ); - } + public static MongoExtractionMessageHeaderDoc FromMessageHeader( + Guid extractionJobIdentifier, + IMessageHeader header, + DateTimeProvider dateTimeProvider) + { + return new MongoExtractionMessageHeaderDoc( + extractionJobIdentifier, + header.MessageGuid, + header.ProducerExecutableName, + header.ProducerProcessID, + MessageHeader.UnixTimeToDateTime(header.OriginalPublishTimestamp), + string.Join(MessageHeader.Splitter, header.Parents), + dateTimeProvider.UtcNow() + ); } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoFileStatusDoc.cs b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoFileStatusDoc.cs index 470098869..5ec5fb80f 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoFileStatusDoc.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractJobStorage/MongoDB/ObjectModel/MongoFileStatusDoc.cs @@ -7,86 +7,85 @@ using System.ComponentModel; -namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.Microservices.CohortPackager.ExtractJobStorage.MongoDB.ObjectModel; + +[BsonIgnoreExtraElements] // NOTE(rkm 2020-08-28) Required for classes which don't contain a field marked with BsonId +public class MongoFileStatusDoc : MemberwiseEquatable, ISupportInitialize { - [BsonIgnoreExtraElements] // NOTE(rkm 2020-08-28) Required for classes which don't contain a field marked with BsonId - public class MongoFileStatusDoc : MemberwiseEquatable, ISupportInitialize + [BsonElement("header")] + public MongoExtractionMessageHeaderDoc Header { get; set; } + + [BsonElement("dicomFilePath")] + public string DicomFilePath { get; set; } + + [BsonElement("outputFileName")] + public string? OutputFileName { get; set; } + + [BsonElement("extractedFileStatus")] + [BsonRepresentation(BsonType.String)] + public ExtractedFileStatus ExtractedFileStatus { get; set; } + + [BsonElement("verifiedFileStatus")] + [BsonRepresentation(BsonType.String)] + public VerifiedFileStatus VerifiedFileStatus { get; set; } + + /// + /// Should only be null for identifiable extractions where the file was successfully copied. Otherwise will be the failure reason from CTP or the report content from the IsIdentifiable verification + /// + [BsonElement("statusMessage")] + public string? StatusMessage { get; set; } + + /// + /// Used only to handle old-format documents when deserializing + /// + [BsonExtraElements] + public IDictionary? ExtraElements { get; set; } + + + public MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc header, + string dicomFilePath, + string? outputFileName, + ExtractedFileStatus extractedFileStatus, + VerifiedFileStatus verifiedFileStatus, + string? statusMessage + ) + { + Header = header ?? throw new ArgumentNullException(nameof(header)); + DicomFilePath = dicomFilePath ?? throw new ArgumentNullException(nameof(dicomFilePath)); + OutputFileName = outputFileName; + ExtractedFileStatus = extractedFileStatus != ExtractedFileStatus.None ? extractedFileStatus : throw new ArgumentException("Cannot be None", nameof(extractedFileStatus)); + VerifiedFileStatus = verifiedFileStatus != VerifiedFileStatus.None ? verifiedFileStatus : throw new ArgumentException("Cannot be None", nameof(verifiedFileStatus)); + + StatusMessage = statusMessage; + if (string.IsNullOrWhiteSpace(StatusMessage) && ExtractedFileStatus != ExtractedFileStatus.Copied) + throw new ArgumentException("Cannot be null or whitespace except for successful file copies", nameof(statusMessage)); + } + + // ^ISupportInitialize + public void BeginInit() { } + + // ^ISupportInitialize + public void EndInit() { - [BsonElement("header")] - public MongoExtractionMessageHeaderDoc Header { get; set; } - - [BsonElement("dicomFilePath")] - public string DicomFilePath { get; set; } - - [BsonElement("outputFileName")] - public string? OutputFileName { get; set; } - - [BsonElement("extractedFileStatus")] - [BsonRepresentation(BsonType.String)] - public ExtractedFileStatus ExtractedFileStatus { get; set; } - - [BsonElement("verifiedFileStatus")] - [BsonRepresentation(BsonType.String)] - public VerifiedFileStatus VerifiedFileStatus { get; set; } - - /// - /// Should only be null for identifiable extractions where the file was successfully copied. Otherwise will be the failure reason from CTP or the report content from the IsIdentifiable verification - /// - [BsonElement("statusMessage")] - public string? StatusMessage { get; set; } - - /// - /// Used only to handle old-format documents when deserializing - /// - [BsonExtraElements] - public IDictionary? ExtraElements { get; set; } - - - public MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc header, - string dicomFilePath, - string? outputFileName, - ExtractedFileStatus extractedFileStatus, - VerifiedFileStatus verifiedFileStatus, - string? statusMessage - ) + if (ExtraElements == null) + return; + + // NOTE(rkm 2022-07-28) Removed after v1.11.1 + if (ExtraElements.TryGetValue("anonymisedFileName", out object? anonFileNameValue)) { - Header = header ?? throw new ArgumentNullException(nameof(header)); - DicomFilePath = dicomFilePath ?? throw new ArgumentNullException(nameof(dicomFilePath)); - OutputFileName = outputFileName; - ExtractedFileStatus = extractedFileStatus != ExtractedFileStatus.None ? extractedFileStatus : throw new ArgumentException("Cannot be None", nameof(extractedFileStatus)); - VerifiedFileStatus = verifiedFileStatus != VerifiedFileStatus.None ? verifiedFileStatus : throw new ArgumentException("Cannot be None", nameof(verifiedFileStatus)); - - StatusMessage = statusMessage; - if (string.IsNullOrWhiteSpace(StatusMessage) && ExtractedFileStatus != ExtractedFileStatus.Copied) - throw new ArgumentException("Cannot be null or whitespace except for successful file copies", nameof(statusMessage)); + OutputFileName = (string)anonFileNameValue; + DicomFilePath = ""; + ExtractedFileStatus = OutputFileName == null ? ExtractedFileStatus.ErrorWontRetry : ExtractedFileStatus.Anonymised; } - // ^ISupportInitialize - public void BeginInit() { } - - // ^ISupportInitialize - public void EndInit() + // NOTE(rkm 2022-07-28) Removed after v5.1.3 + if (ExtraElements.TryGetValue("isIdentifiable", out object? isIdentifiableValue)) { - if (ExtraElements == null) - return; - - // NOTE(rkm 2022-07-28) Removed after v1.11.1 - if (ExtraElements.TryGetValue("anonymisedFileName", out object? anonFileNameValue)) - { - OutputFileName = (string)anonFileNameValue; - DicomFilePath = ""; - ExtractedFileStatus = OutputFileName == null ? ExtractedFileStatus.ErrorWontRetry : ExtractedFileStatus.Anonymised; - } - - // NOTE(rkm 2022-07-28) Removed after v5.1.3 - if (ExtraElements.TryGetValue("isIdentifiable", out object? isIdentifiableValue)) - { - if (OutputFileName == null) - VerifiedFileStatus = VerifiedFileStatus.NotVerified; - else - VerifiedFileStatus = (bool)isIdentifiableValue ? VerifiedFileStatus.IsIdentifiable : VerifiedFileStatus.NotIdentifiable; - } + if (OutputFileName == null) + VerifiedFileStatus = VerifiedFileStatus.NotVerified; + else + VerifiedFileStatus = (bool)isIdentifiableValue ? VerifiedFileStatus.IsIdentifiable : VerifiedFileStatus.NotIdentifiable; } } } diff --git a/src/SmiServices/Microservices/CohortPackager/ExtractionRequestInfoMessageConsumer.cs b/src/SmiServices/Microservices/CohortPackager/ExtractionRequestInfoMessageConsumer.cs index 4993a152b..bad9b0977 100644 --- a/src/SmiServices/Microservices/CohortPackager/ExtractionRequestInfoMessageConsumer.cs +++ b/src/SmiServices/Microservices/CohortPackager/ExtractionRequestInfoMessageConsumer.cs @@ -4,35 +4,34 @@ using SmiServices.Microservices.CohortPackager.ExtractJobStorage; using System; -namespace SmiServices.Microservices.CohortPackager +namespace SmiServices.Microservices.CohortPackager; + +/// + /// Consumer for (s) + /// +public class ExtractionRequestInfoMessageConsumer : Consumer +{ + private readonly IExtractJobStore _store; -{ /// - /// Consumer for (s) - /// - public class ExtractionRequestInfoMessageConsumer : Consumer - { - private readonly IExtractJobStore _store; + public ExtractionRequestInfoMessageConsumer(IExtractJobStore store) + { + _store = store; + } - public ExtractionRequestInfoMessageConsumer(IExtractJobStore store) + protected override void ProcessMessageImpl(IMessageHeader header, ExtractionRequestInfoMessage message, ulong tag) + { + try { - _store = store; + _store.PersistMessageToStore(message, header); } - - protected override void ProcessMessageImpl(IMessageHeader header, ExtractionRequestInfoMessage message, ulong tag) + catch (ApplicationException e) { - try - { - _store.PersistMessageToStore(message, header); - } - catch (ApplicationException e) - { - // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - ErrorAndNack(header, tag, "Error while processing ExtractionRequestInfoMessage", e); - return; - } - - Ack(header, tag); + // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage + ErrorAndNack(header, tag, "Error while processing ExtractionRequestInfoMessage", e); + return; } + + Ack(header, tag); } } diff --git a/src/SmiServices/Microservices/CohortPackager/JobProcessing/ExtractJobWatcher.cs b/src/SmiServices/Microservices/CohortPackager/JobProcessing/ExtractJobWatcher.cs index 7c0d581bb..958c271f8 100644 --- a/src/SmiServices/Microservices/CohortPackager/JobProcessing/ExtractJobWatcher.cs +++ b/src/SmiServices/Microservices/CohortPackager/JobProcessing/ExtractJobWatcher.cs @@ -7,127 +7,126 @@ using System.Collections.Generic; using SysTimers = System.Timers; -namespace SmiServices.Microservices.CohortPackager.JobProcessing +namespace SmiServices.Microservices.CohortPackager.JobProcessing; + +/// +/// Class which periodically queries the job store for any ready jobs and performs any final checks +/// +public class ExtractJobWatcher : IExtractJobWatcher { - /// - /// Class which periodically queries the job store for any ready jobs and performs any final checks - /// - public class ExtractJobWatcher : IExtractJobWatcher - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private readonly IExtractJobStore _jobStore; + private readonly IExtractJobStore _jobStore; - private readonly IJobReporter _reporter; - private readonly IJobCompleteNotifier _notifier; + private readonly IJobReporter _reporter; + private readonly IJobCompleteNotifier _notifier; - private readonly SysTimers.Timer _processTimer; - private readonly Action _exceptionCallback; - private readonly object _oProcessorLock = new(); + private readonly SysTimers.Timer _processTimer; + private readonly Action _exceptionCallback; + private readonly object _oProcessorLock = new(); - private bool _startCalled; + private bool _startCalled; - public ExtractJobWatcher( - CohortPackagerOptions options, - IExtractJobStore jobStore, - Action exceptionCallback, - IJobCompleteNotifier jobCompleteNotifier, - IJobReporter reporter) - { - _jobStore = jobStore; - _exceptionCallback = exceptionCallback; + public ExtractJobWatcher( + CohortPackagerOptions options, + IExtractJobStore jobStore, + Action exceptionCallback, + IJobCompleteNotifier jobCompleteNotifier, + IJobReporter reporter) + { + _jobStore = jobStore; + _exceptionCallback = exceptionCallback; - _reporter = reporter; - _notifier = jobCompleteNotifier; + _reporter = reporter; + _notifier = jobCompleteNotifier; - _processTimer = new SysTimers.Timer(TimeSpan.FromSeconds(options.JobWatcherTimeoutInSeconds).TotalMilliseconds); - _processTimer.Elapsed += TimerElapsedEvent; - } + _processTimer = new SysTimers.Timer(TimeSpan.FromSeconds(options.JobWatcherTimeoutInSeconds).TotalMilliseconds); + _processTimer.Elapsed += TimerElapsedEvent; + } - public void Start() - { - _logger.Debug("JobWatcher starting"); + public void Start() + { + _logger.Debug("JobWatcher starting"); - // Do an initial run - ProcessJobs(); + // Do an initial run + ProcessJobs(); - _processTimer.Start(); - _startCalled = true; - } + _processTimer.Start(); + _startCalled = true; + } - public void StopProcessing(string reason) - { - _logger.Info($"Stopping ({reason})"); + public void StopProcessing(string reason) + { + _logger.Info($"Stopping ({reason})"); - _processTimer.Stop(); + _processTimer.Stop(); - // Ensures any currently running process finishes - lock (_oProcessorLock) - { - _logger.Debug("Lock released, no more jobs will be processed"); - } + // Ensures any currently running process finishes + lock (_oProcessorLock) + { + _logger.Debug("Lock released, no more jobs will be processed"); } + } + + public void ProcessJobs(Guid specificJob = new Guid()) + { + _processTimer.Stop(); - public void ProcessJobs(Guid specificJob = new Guid()) + lock (_oProcessorLock) { - _processTimer.Stop(); + List jobs = _jobStore.GetReadyJobs(specificJob); - lock (_oProcessorLock) - { - List jobs = _jobStore.GetReadyJobs(specificJob); + if (jobs.Count == 0) + _logger.Debug("No jobs ready for checks"); - if (jobs.Count == 0) - _logger.Debug("No jobs ready for checks"); + foreach (ExtractJobInfo job in jobs) + { + Guid jobId = job.ExtractionJobIdentifier; - foreach (ExtractJobInfo job in jobs) + try + { + DoJobCompletionTasks(job); + } + catch (ApplicationException e) { - Guid jobId = job.ExtractionJobIdentifier; - - try - { - DoJobCompletionTasks(job); - } - catch (ApplicationException e) - { - _logger.Warn(e, $"Issue with job {jobId}, marking as failed"); - _jobStore.MarkJobFailed(jobId, e); - } - catch (Exception e) - { - StopProcessing("ProcessJob threw an unhandled exception"); - _exceptionCallback(e); - return; - } + _logger.Warn(e, $"Issue with job {jobId}, marking as failed"); + _jobStore.MarkJobFailed(jobId, e); + } + catch (Exception e) + { + StopProcessing("ProcessJob threw an unhandled exception"); + _exceptionCallback(e); + return; } } - - // Only restart the timer if it was initially running - if (_startCalled) - _processTimer.Start(); } - private void TimerElapsedEvent(object? source, SysTimers.ElapsedEventArgs ea) - { - _logger.Debug("Checking job statuses"); - ProcessJobs(); - } + // Only restart the timer if it was initially running + if (_startCalled) + _processTimer.Start(); + } - private void DoJobCompletionTasks(ExtractJobInfo jobInfo) - { - Guid jobId = jobInfo.ExtractionJobIdentifier; + private void TimerElapsedEvent(object? source, SysTimers.ElapsedEventArgs ea) + { + _logger.Debug("Checking job statuses"); + ProcessJobs(); + } + + private void DoJobCompletionTasks(ExtractJobInfo jobInfo) + { + Guid jobId = jobInfo.ExtractionJobIdentifier; - if (jobInfo.JobStatus != ExtractJobStatus.ReadyForChecks) - throw new ApplicationException($"Job {jobId} is not ready for checks"); + if (jobInfo.JobStatus != ExtractJobStatus.ReadyForChecks) + throw new ApplicationException($"Job {jobId} is not ready for checks"); - _logger.Info($"All files for job {jobId} present, running completion tasks"); + _logger.Info($"All files for job {jobId} present, running completion tasks"); - _jobStore.MarkJobCompleted(jobId); + _jobStore.MarkJobCompleted(jobId); - _reporter.CreateReports(jobId); - _logger.Info($"Report for {jobId} created"); + _reporter.CreateReports(jobId); + _logger.Info($"Report for {jobId} created"); - _notifier.NotifyJobCompleted(jobInfo); - } + _notifier.NotifyJobCompleted(jobInfo); } } diff --git a/src/SmiServices/Microservices/CohortPackager/JobProcessing/IExtractJobWatcher.cs b/src/SmiServices/Microservices/CohortPackager/JobProcessing/IExtractJobWatcher.cs index 8f1aeb835..4766e0357 100644 --- a/src/SmiServices/Microservices/CohortPackager/JobProcessing/IExtractJobWatcher.cs +++ b/src/SmiServices/Microservices/CohortPackager/JobProcessing/IExtractJobWatcher.cs @@ -1,9 +1,8 @@ using System; -namespace SmiServices.Microservices.CohortPackager.JobProcessing +namespace SmiServices.Microservices.CohortPackager.JobProcessing; + +public interface IExtractJobWatcher { - public interface IExtractJobWatcher - { - void ProcessJobs(Guid specificJob = new Guid()); - } + void ProcessJobs(Guid specificJob = new Guid()); } diff --git a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/IJobCompleteNotifier.cs b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/IJobCompleteNotifier.cs index ca22c597d..8dd96a961 100644 --- a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/IJobCompleteNotifier.cs +++ b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/IJobCompleteNotifier.cs @@ -1,10 +1,9 @@ using SmiServices.Microservices.CohortPackager.ExtractJobStorage; -namespace SmiServices.Microservices.CohortPackager.JobProcessing.Notifying +namespace SmiServices.Microservices.CohortPackager.JobProcessing.Notifying; + +public interface IJobCompleteNotifier { - public interface IJobCompleteNotifier - { - void NotifyJobCompleted(ExtractJobInfo jobInfo); - } + void NotifyJobCompleted(ExtractJobInfo jobInfo); } diff --git a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/JobCompleteNotifierFactory.cs b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/JobCompleteNotifierFactory.cs index a90cb26bf..f92a41292 100644 --- a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/JobCompleteNotifierFactory.cs +++ b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/JobCompleteNotifierFactory.cs @@ -1,19 +1,18 @@ using System; -namespace SmiServices.Microservices.CohortPackager.JobProcessing.Notifying +namespace SmiServices.Microservices.CohortPackager.JobProcessing.Notifying; + +public static class JobCompleteNotifierFactory { - public static class JobCompleteNotifierFactory + public static IJobCompleteNotifier GetNotifier( + string notifierTypeStr + ) { - public static IJobCompleteNotifier GetNotifier( - string notifierTypeStr - ) + return notifierTypeStr switch { - return notifierTypeStr switch - { - nameof(LoggingNotifier) => new LoggingNotifier(), - _ => throw new ArgumentException($"No case for type, or invalid type string '{notifierTypeStr}'") - }; - } + nameof(LoggingNotifier) => new LoggingNotifier(), + _ => throw new ArgumentException($"No case for type, or invalid type string '{notifierTypeStr}'") + }; } } diff --git a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/LoggingNotifier.cs b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/LoggingNotifier.cs index 4b54b113e..34a450c2a 100644 --- a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/LoggingNotifier.cs +++ b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Notifying/LoggingNotifier.cs @@ -1,18 +1,17 @@ using NLog; using SmiServices.Microservices.CohortPackager.ExtractJobStorage; -namespace SmiServices.Microservices.CohortPackager.JobProcessing.Notifying +namespace SmiServices.Microservices.CohortPackager.JobProcessing.Notifying; + +/// +/// Basic notifier which outputs to its logger. Should be used for testing only +/// +public class LoggingNotifier : IJobCompleteNotifier { - /// - /// Basic notifier which outputs to its logger. Should be used for testing only - /// - public class LoggingNotifier : IJobCompleteNotifier - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - public void NotifyJobCompleted(ExtractJobInfo jobInfo) - { - _logger.Info("Job " + jobInfo.ExtractionJobIdentifier + " completed!"); - } + public void NotifyJobCompleted(ExtractJobInfo jobInfo) + { + _logger.Info("Job " + jobInfo.ExtractionJobIdentifier + " completed!"); } } diff --git a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Reporting/IJobReporter.cs b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Reporting/IJobReporter.cs index 2c614b014..52e22d9b4 100644 --- a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Reporting/IJobReporter.cs +++ b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Reporting/IJobReporter.cs @@ -1,10 +1,9 @@ using System; -namespace SmiServices.Microservices.CohortPackager.JobProcessing.Reporting +namespace SmiServices.Microservices.CohortPackager.JobProcessing.Reporting; + +public interface IJobReporter { - public interface IJobReporter - { - void CreateReports(Guid jobId); - } + void CreateReports(Guid jobId); } diff --git a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Reporting/JobReporter.cs b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Reporting/JobReporter.cs index a030c1ebd..c17e9e9f8 100644 --- a/src/SmiServices/Microservices/CohortPackager/JobProcessing/Reporting/JobReporter.cs +++ b/src/SmiServices/Microservices/CohortPackager/JobProcessing/Reporting/JobReporter.cs @@ -15,188 +15,187 @@ using System.Linq; using System.Text.RegularExpressions; -namespace SmiServices.Microservices.CohortPackager.JobProcessing.Reporting +namespace SmiServices.Microservices.CohortPackager.JobProcessing.Reporting; + +public sealed class JobReporter : IJobReporter { - public sealed class JobReporter : IJobReporter + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly string _reportNewLine; + private readonly IExtractJobStore _jobStore; + private readonly IFileSystem _fileSystem; + private readonly string _extractionRoot; + private readonly CsvConfiguration _csvConfiguration; + private const string PROCESSING_ERRORS_FILE_NAME = "processing_errors.csv"; + + + public JobReporter( + IExtractJobStore jobStore, + IFileSystem fileSystem, + string extractionRoot, + string? reportNewLine + ) { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private readonly string _reportNewLine; - private readonly IExtractJobStore _jobStore; - private readonly IFileSystem _fileSystem; - private readonly string _extractionRoot; - private readonly CsvConfiguration _csvConfiguration; - private const string PROCESSING_ERRORS_FILE_NAME = "processing_errors.csv"; - - - public JobReporter( - IExtractJobStore jobStore, - IFileSystem fileSystem, - string extractionRoot, - string? reportNewLine - ) - { - _jobStore = jobStore; - _fileSystem = fileSystem; - _extractionRoot = extractionRoot; - - if (!_fileSystem.Path.IsPathRooted(extractionRoot)) - throw new ArgumentException("Path must be rooted", nameof(extractionRoot)); + _jobStore = jobStore; + _fileSystem = fileSystem; + _extractionRoot = extractionRoot; - // NOTE(rkm 2020-11-20) IsNullOrWhiteSpace returns true for newline characters! - if (!string.IsNullOrEmpty(reportNewLine)) - { - if (reportNewLine != "\n" && reportNewLine != "\r\n") - throw new ArgumentOutOfRangeException(nameof(reportNewLine), "Must be a Unix or Windows newline"); - _reportNewLine = reportNewLine; - } - else - { - // NOTE(rkm 2021-04-06) Escape the newline here so it prints correctly... - _logger.Warn($"Not passed a specific newline string for creating reports. Defaulting to Environment.NewLine ('{Regex.Escape(Environment.NewLine)}')"); - // ... and just use the (unescaped) value as-is - _reportNewLine = Environment.NewLine; - } + if (!_fileSystem.Path.IsPathRooted(extractionRoot)) + throw new ArgumentException("Path must be rooted", nameof(extractionRoot)); - _csvConfiguration = new CsvConfiguration(CultureInfo.InvariantCulture) - { - NewLine = _reportNewLine, - }; + // NOTE(rkm 2020-11-20) IsNullOrWhiteSpace returns true for newline characters! + if (!string.IsNullOrEmpty(reportNewLine)) + { + if (reportNewLine != "\n" && reportNewLine != "\r\n") + throw new ArgumentOutOfRangeException(nameof(reportNewLine), "Must be a Unix or Windows newline"); + _reportNewLine = reportNewLine; + } + else + { + // NOTE(rkm 2021-04-06) Escape the newline here so it prints correctly... + _logger.Warn($"Not passed a specific newline string for creating reports. Defaulting to Environment.NewLine ('{Regex.Escape(Environment.NewLine)}')"); + // ... and just use the (unescaped) value as-is + _reportNewLine = Environment.NewLine; } - public void CreateReports(Guid jobId) + _csvConfiguration = new CsvConfiguration(CultureInfo.InvariantCulture) { - if (jobId == default) - throw new ArgumentOutOfRangeException(nameof(jobId), "Must provide a non-zero jobId"); + NewLine = _reportNewLine, + }; + } - _logger.Info($"Creating reports for {jobId}"); + public void CreateReports(Guid jobId) + { + if (jobId == default) + throw new ArgumentOutOfRangeException(nameof(jobId), "Must provide a non-zero jobId"); - var completedJobInfo = _jobStore.GetCompletedJobInfo(jobId); + _logger.Info($"Creating reports for {jobId}"); - var jobReportsDirAbsolute = _fileSystem.Path.Combine( - _extractionRoot, - completedJobInfo.ProjectExtractionDir(), - "reports", - completedJobInfo.ExtractionName() - ); + var completedJobInfo = _jobStore.GetCompletedJobInfo(jobId); - if (_fileSystem.Directory.Exists(jobReportsDirAbsolute)) - throw new ApplicationException($"Job reports directory already exists: {jobReportsDirAbsolute}"); + var jobReportsDirAbsolute = _fileSystem.Path.Combine( + _extractionRoot, + completedJobInfo.ProjectExtractionDir(), + "reports", + completedJobInfo.ExtractionName() + ); - _fileSystem.Directory.CreateDirectory(jobReportsDirAbsolute); + if (_fileSystem.Directory.Exists(jobReportsDirAbsolute)) + throw new ApplicationException($"Job reports directory already exists: {jobReportsDirAbsolute}"); - WriteReadme(completedJobInfo, jobReportsDirAbsolute); + _fileSystem.Directory.CreateDirectory(jobReportsDirAbsolute); - WriteRejectedFilesCsv(completedJobInfo, jobReportsDirAbsolute); + WriteReadme(completedJobInfo, jobReportsDirAbsolute); - if (WriteProcessingErrorsCsv(completedJobInfo, jobReportsDirAbsolute)) - _logger.Warn($"Job {jobId} had errors during proecssing. Check {PROCESSING_ERRORS_FILE_NAME}"); + WriteRejectedFilesCsv(completedJobInfo, jobReportsDirAbsolute); - if (!completedJobInfo.IsIdentifiableExtraction) - WriteVerificationFailuresCsv(completedJobInfo, jobReportsDirAbsolute); + if (WriteProcessingErrorsCsv(completedJobInfo, jobReportsDirAbsolute)) + _logger.Warn($"Job {jobId} had errors during proecssing. Check {PROCESSING_ERRORS_FILE_NAME}"); - _logger.Info($"Reports for {jobId} written to {jobReportsDirAbsolute}"); - } + if (!completedJobInfo.IsIdentifiableExtraction) + WriteVerificationFailuresCsv(completedJobInfo, jobReportsDirAbsolute); - private void WriteReadme(CompletedExtractJobInfo jobInfo, string jobReportsDirAbsolute) - { - var lines = new List - { - $"# SMI extraction validation report for {jobInfo.ProjectNumber} {jobInfo.ExtractionName()}", - "", - "Job info:", - $"- Job submitted at: {jobInfo.JobSubmittedAt.ToString("s", CultureInfo.InvariantCulture)}", - $"- Job completed at: {jobInfo.JobCompletedAt.ToString("s", CultureInfo.InvariantCulture)}", - $"- Job duration: {jobInfo.JobCompletedAt - jobInfo.JobSubmittedAt}", - $"- Job extraction id: {jobInfo.ExtractionJobIdentifier}", - $"- Extraction tag: {jobInfo.KeyTag}", - $"- Extraction modality: {jobInfo.ExtractionModality ?? "Unspecified"}", - $"- Requested identifier count: {jobInfo.KeyValueCount}", - $"- User name: {jobInfo.UserName}", - $"- Identifiable extraction: {(jobInfo.IsIdentifiableExtraction ? "Yes" : "No")}", - $"- Filtered extraction: {(!jobInfo.IsNoFilterExtraction ? "Yes" : "No")}", - "" - }; - - var jobReadmePath = _fileSystem.Path.Combine(jobReportsDirAbsolute, "README.md"); - using var fileStream = _fileSystem.File.OpenWrite(jobReadmePath); - using var streamWriter = GetStreamWriter(fileStream); - streamWriter.Write(string.Join(_reportNewLine, lines)); - } + _logger.Info($"Reports for {jobId} written to {jobReportsDirAbsolute}"); + } - private readonly record struct DicomFileFailure(string DicomFilePath, string Reason); - private bool WriteProcessingErrorsCsv(CompletedExtractJobInfo jobInfo, string jobReportsDirAbsolute) + private void WriteReadme(CompletedExtractJobInfo jobInfo, string jobReportsDirAbsolute) + { + var lines = new List { - var errorsPath = _fileSystem.Path.Combine(jobReportsDirAbsolute, PROCESSING_ERRORS_FILE_NAME); - using var fileStream = _fileSystem.File.OpenWrite(errorsPath); - using var streamWriter = GetStreamWriter(fileStream); - using var csv = new CsvWriter(streamWriter, _csvConfiguration); + $"# SMI extraction validation report for {jobInfo.ProjectNumber} {jobInfo.ExtractionName()}", + "", + "Job info:", + $"- Job submitted at: {jobInfo.JobSubmittedAt.ToString("s", CultureInfo.InvariantCulture)}", + $"- Job completed at: {jobInfo.JobCompletedAt.ToString("s", CultureInfo.InvariantCulture)}", + $"- Job duration: {jobInfo.JobCompletedAt - jobInfo.JobSubmittedAt}", + $"- Job extraction id: {jobInfo.ExtractionJobIdentifier}", + $"- Extraction tag: {jobInfo.KeyTag}", + $"- Extraction modality: {jobInfo.ExtractionModality ?? "Unspecified"}", + $"- Requested identifier count: {jobInfo.KeyValueCount}", + $"- User name: {jobInfo.UserName}", + $"- Identifiable extraction: {(jobInfo.IsIdentifiableExtraction ? "Yes" : "No")}", + $"- Filtered extraction: {(!jobInfo.IsNoFilterExtraction ? "Yes" : "No")}", + "" + }; + + var jobReadmePath = _fileSystem.Path.Combine(jobReportsDirAbsolute, "README.md"); + using var fileStream = _fileSystem.File.OpenWrite(jobReadmePath); + using var streamWriter = GetStreamWriter(fileStream); + streamWriter.Write(string.Join(_reportNewLine, lines)); + } - var missing = _jobStore.GetCompletedJobMissingFileList(jobInfo.ExtractionJobIdentifier) - .Select(static f => new DicomFileFailure(f, "Missing")); - csv.WriteRecords(missing); + private readonly record struct DicomFileFailure(string DicomFilePath, string Reason); + private bool WriteProcessingErrorsCsv(CompletedExtractJobInfo jobInfo, string jobReportsDirAbsolute) + { + var errorsPath = _fileSystem.Path.Combine(jobReportsDirAbsolute, PROCESSING_ERRORS_FILE_NAME); + using var fileStream = _fileSystem.File.OpenWrite(errorsPath); + using var streamWriter = GetStreamWriter(fileStream); + using var csv = new CsvWriter(streamWriter, _csvConfiguration); - if (!jobInfo.IsIdentifiableExtraction) - csv.WriteRecords(_jobStore.GetCompletedJobAnonymisationFailures(jobInfo.ExtractionJobIdentifier) - .Select(static fi => new DicomFileFailure(fi.DicomFilePath, fi.Reason))); + var missing = _jobStore.GetCompletedJobMissingFileList(jobInfo.ExtractionJobIdentifier) + .Select(static f => new DicomFileFailure(f, "Missing")); + csv.WriteRecords(missing); - // Row == 2 => only header written => no failures recorded. - return csv.Row > 2; - } + if (!jobInfo.IsIdentifiableExtraction) + csv.WriteRecords(_jobStore.GetCompletedJobAnonymisationFailures(jobInfo.ExtractionJobIdentifier) + .Select(static fi => new DicomFileFailure(fi.DicomFilePath, fi.Reason))); - private readonly record struct Rejection(string ExtractionKey, int Count, string Reason); - private void WriteRejectedFilesCsv(CompletedExtractJobInfo jobInfo, string jobReportsDirAbsolute) - { - var rejectionsReportPath = _fileSystem.Path.Combine(jobReportsDirAbsolute, "rejected_files.csv"); - using var fileStream = _fileSystem.File.OpenWrite(rejectionsReportPath); - using var streamWriter = GetStreamWriter(fileStream); - using var csv = new CsvWriter(streamWriter, _csvConfiguration); - - var jobRejections = _jobStore - .GetCompletedJobRejections(jobInfo.ExtractionJobIdentifier) - .OrderByDescending(static x => x.RejectionItems.Sum(static y => y.Value)) - .SelectMany(static info => info.RejectionItems.OrderByDescending(static x => x.Value) - .Select(reason => new Rejection(info.ExtractionIdentifier, reason.Value, reason.Key))); - csv.WriteRecords(jobRejections); - } + // Row == 2 => only header written => no failures recorded. + return csv.Row > 2; + } - private void WriteVerificationFailuresCsv(CompletedExtractJobInfo jobInfo, string jobReportsDirAbsolute) - { - var verificationFailuresReportName = "verification_failures"; - var report = new FailureStoreReport(targetName: "", maxSize: 1_000, _fileSystem); + private readonly record struct Rejection(string ExtractionKey, int Count, string Reason); + private void WriteRejectedFilesCsv(CompletedExtractJobInfo jobInfo, string jobReportsDirAbsolute) + { + var rejectionsReportPath = _fileSystem.Path.Combine(jobReportsDirAbsolute, "rejected_files.csv"); + using var fileStream = _fileSystem.File.OpenWrite(rejectionsReportPath); + using var streamWriter = GetStreamWriter(fileStream); + using var csv = new CsvWriter(streamWriter, _csvConfiguration); + + var jobRejections = _jobStore + .GetCompletedJobRejections(jobInfo.ExtractionJobIdentifier) + .OrderByDescending(static x => x.RejectionItems.Sum(static y => y.Value)) + .SelectMany(static info => info.RejectionItems.OrderByDescending(static x => x.Value) + .Select(reason => new Rejection(info.ExtractionIdentifier, reason.Value, reason.Key))); + csv.WriteRecords(jobRejections); + } - // TODO(rkm 2022-03-22) Can we pass this directly? - var isIdentOptions = new IsIdentifiableFileOptions - { - DestinationCsvFolder = jobReportsDirAbsolute - }; + private void WriteVerificationFailuresCsv(CompletedExtractJobInfo jobInfo, string jobReportsDirAbsolute) + { + var verificationFailuresReportName = "verification_failures"; + var report = new FailureStoreReport(targetName: "", maxSize: 1_000, _fileSystem); - using var csvDest = new CsvDestination(isIdentOptions, verificationFailuresReportName, _fileSystem, false, _csvConfiguration); - report.AddDestination(csvDest); + // TODO(rkm 2022-03-22) Can we pass this directly? + var isIdentOptions = new IsIdentifiableFileOptions + { + DestinationCsvFolder = jobReportsDirAbsolute + }; + + using var csvDest = new CsvDestination(isIdentOptions, verificationFailuresReportName, _fileSystem, false, _csvConfiguration); + report.AddDestination(csvDest); - foreach (var fileVerificationFailureInfo in _jobStore.GetCompletedJobVerificationFailures(jobInfo.ExtractionJobIdentifier)) + foreach (var fileVerificationFailureInfo in _jobStore.GetCompletedJobVerificationFailures(jobInfo.ExtractionJobIdentifier)) + { + try { - try - { - // NOTE(rkm 2024-02-09) fileVerificationFailureInfo.Data can never be null, so neither can fileFailures - var fileFailures = JsonConvert.DeserializeObject>(fileVerificationFailureInfo.Data)!; - foreach (var failure in fileFailures) - { - // NOTE(rkm 2022-03-17) Updates the Resource to be the relative path in the output directory - failure.Resource = fileVerificationFailureInfo.AnonFilePath; - - report.Add(failure); - } - } - catch (JsonException e) + // NOTE(rkm 2024-02-09) fileVerificationFailureInfo.Data can never be null, so neither can fileFailures + var fileFailures = JsonConvert.DeserializeObject>(fileVerificationFailureInfo.Data)!; + foreach (var failure in fileFailures) { - throw new ApplicationException($"Could not deserialize report content for {fileVerificationFailureInfo.AnonFilePath}", e); - } + // NOTE(rkm 2022-03-17) Updates the Resource to be the relative path in the output directory + failure.Resource = fileVerificationFailureInfo.AnonFilePath; + report.Add(failure); + } + } + catch (JsonException e) + { + throw new ApplicationException($"Could not deserialize report content for {fileVerificationFailureInfo.AnonFilePath}", e); } - report.CloseReport(); } - private StreamWriter GetStreamWriter(Stream stream) => new(stream) { NewLine = _reportNewLine }; + report.CloseReport(); } + + private StreamWriter GetStreamWriter(Stream stream) => new(stream) { NewLine = _reportNewLine }; } diff --git a/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/AnonymiserFactory.cs b/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/AnonymiserFactory.cs index 62181966d..2a5ca308b 100644 --- a/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/AnonymiserFactory.cs +++ b/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/AnonymiserFactory.cs @@ -1,22 +1,21 @@ using SmiServices.Common.Options; using System; -namespace SmiServices.Microservices.DicomAnonymiser.Anonymisers +namespace SmiServices.Microservices.DicomAnonymiser.Anonymisers; + +public static class AnonymiserFactory { - public static class AnonymiserFactory + public static IDicomAnonymiser CreateAnonymiser(GlobalOptions options) { - public static IDicomAnonymiser CreateAnonymiser(GlobalOptions options) - { - var anonymiserTypeStr = options.DicomAnonymiserOptions!.AnonymiserType; - if (!Enum.TryParse(anonymiserTypeStr, ignoreCase: true, out AnonymiserType anonymiserType)) - throw new ArgumentException($"Could not parse '{anonymiserTypeStr}' to a valid AnonymiserType"); + var anonymiserTypeStr = options.DicomAnonymiserOptions!.AnonymiserType; + if (!Enum.TryParse(anonymiserTypeStr, ignoreCase: true, out AnonymiserType anonymiserType)) + throw new ArgumentException($"Could not parse '{anonymiserTypeStr}' to a valid AnonymiserType"); - return anonymiserType switch - { - AnonymiserType.DefaultAnonymiser => new DefaultAnonymiser(options), - // TODO(rkm 2021-12-07) Can remove the LGTM ignore once an AnonymiserType is implemented - _ => throw new NotImplementedException($"No case for AnonymiserType '{anonymiserType}'"), // lgtm[cs/constant-condition] - }; - } + return anonymiserType switch + { + AnonymiserType.DefaultAnonymiser => new DefaultAnonymiser(options), + // TODO(rkm 2021-12-07) Can remove the LGTM ignore once an AnonymiserType is implemented + _ => throw new NotImplementedException($"No case for AnonymiserType '{anonymiserType}'"), // lgtm[cs/constant-condition] + }; } } diff --git a/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/AnonymiserType.cs b/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/AnonymiserType.cs index 366205b0d..60f85e7da 100644 --- a/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/AnonymiserType.cs +++ b/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/AnonymiserType.cs @@ -1,11 +1,10 @@ -namespace SmiServices.Microservices.DicomAnonymiser.Anonymisers +namespace SmiServices.Microservices.DicomAnonymiser.Anonymisers; + +public enum AnonymiserType { - public enum AnonymiserType - { - /// - /// Unused placeholder value - /// - None = 0, - DefaultAnonymiser = 1, - } + /// + /// Unused placeholder value + /// + None = 0, + DefaultAnonymiser = 1, } diff --git a/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/DefaultAnonymiser.cs b/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/DefaultAnonymiser.cs index c7f700d0e..9f058d5d6 100644 --- a/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/DefaultAnonymiser.cs +++ b/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/DefaultAnonymiser.cs @@ -6,134 +6,133 @@ using System.Diagnostics; using System.IO.Abstractions; -namespace SmiServices.Microservices.DicomAnonymiser.Anonymisers +namespace SmiServices.Microservices.DicomAnonymiser.Anonymisers; + +public class DefaultAnonymiser : IDicomAnonymiser { - public class DefaultAnonymiser : IDicomAnonymiser - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private readonly DicomAnonymiserOptions _options; - private const string _bash = "/bin/bash"; + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly DicomAnonymiserOptions _options; + private const string _bash = "/bin/bash"; - public DefaultAnonymiser(GlobalOptions globalOptions) - { - if (globalOptions.DicomAnonymiserOptions == null) - throw new ArgumentNullException(nameof(globalOptions)); + public DefaultAnonymiser(GlobalOptions globalOptions) + { + if (globalOptions.DicomAnonymiserOptions == null) + throw new ArgumentNullException(nameof(globalOptions)); - if (globalOptions.LoggingOptions == null) - throw new ArgumentNullException(nameof(globalOptions)); + if (globalOptions.LoggingOptions == null) + throw new ArgumentNullException(nameof(globalOptions)); - _options = globalOptions.DicomAnonymiserOptions; - } + _options = globalOptions.DicomAnonymiserOptions; + } - /// - /// Creates a process with the given parameters - /// - private static Process CreateProcess(string fileName, string arguments, string? workingDirectory = null, Dictionary? environmentVariables = null) + /// + /// Creates a process with the given parameters + /// + private static Process CreateProcess(string fileName, string arguments, string? workingDirectory = null, Dictionary? environmentVariables = null) + { + var process = new Process { - var process = new Process - { - StartInfo = new ProcessStartInfo - { - FileName = fileName, - Arguments = arguments, - UseShellExecute = false, - RedirectStandardOutput = true, - RedirectStandardError = true, - WorkingDirectory = workingDirectory ?? string.Empty - } - }; - - if (environmentVariables != null) + StartInfo = new ProcessStartInfo { - foreach (var variable in environmentVariables) - { - process.StartInfo.EnvironmentVariables[variable.Key] = variable.Value; - } + FileName = fileName, + Arguments = arguments, + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true, + WorkingDirectory = workingDirectory ?? string.Empty } + }; - return process; + if (environmentVariables != null) + { + foreach (var variable in environmentVariables) + { + process.StartInfo.EnvironmentVariables[variable.Key] = variable.Value; + } } - private Process CreateCTPAnonProcess(IFileInfo sourceFile, IFileInfo destFile) - { - string arguments = $"-jar {_options.CtpAnonCliJar} -a {_options.CtpAllowlistScript} -s false {sourceFile} {destFile}"; + return process; + } - return CreateProcess("java", arguments); - } + private Process CreateCTPAnonProcess(IFileInfo sourceFile, IFileInfo destFile) + { + string arguments = $"-jar {_options.CtpAnonCliJar} -a {_options.CtpAllowlistScript} -s false {sourceFile} {destFile}"; - private Process CreatePixelAnonProcess(IFileInfo sourceFile, IFileInfo destFile) - { - string activateCommand = $"source {_options.VirtualEnvPath}/bin/activate"; - string arguments = $"-c \"{activateCommand} && {_options.DicomPixelAnonPath}/dicom_pixel_anon.sh -o {destFile} {sourceFile}\""; + return CreateProcess("java", arguments); + } - return CreateProcess(_bash, arguments, _options.DicomPixelAnonPath); - } + private Process CreatePixelAnonProcess(IFileInfo sourceFile, IFileInfo destFile) + { + string activateCommand = $"source {_options.VirtualEnvPath}/bin/activate"; + string arguments = $"-c \"{activateCommand} && {_options.DicomPixelAnonPath}/dicom_pixel_anon.sh -o {destFile} {sourceFile}\""; - // TODO (da 2024-02-23) Use StructuredReports repository to access SRAnonTool - private Process CreateSRAnonProcess(IFileInfo sourceFile, IFileInfo destFile) - { - string arguments = $"{_options.SRAnonymiserToolPath} -i {sourceFile} -o {destFile} -s /Users/daniyalarshad/EPCC/github/NationalSafeHaven/opt/semehr"; - _ = new Dictionary { { "SMI_ROOT", $"{_options.SmiServicesPath}" } }; + return CreateProcess(_bash, arguments, _options.DicomPixelAnonPath); + } - return CreateProcess(_bash, arguments); - } + // TODO (da 2024-02-23) Use StructuredReports repository to access SRAnonTool + private Process CreateSRAnonProcess(IFileInfo sourceFile, IFileInfo destFile) + { + string arguments = $"{_options.SRAnonymiserToolPath} -i {sourceFile} -o {destFile} -s /Users/daniyalarshad/EPCC/github/NationalSafeHaven/opt/semehr"; + _ = new Dictionary { { "SMI_ROOT", $"{_options.SmiServicesPath}" } }; - /// - /// Anonymises a DICOM file based on image modality - /// - public ExtractedFileStatus Anonymise(ExtractFileMessage message, IFileInfo sourceFile, IFileInfo destFile, out string anonymiserStatusMessage) + return CreateProcess(_bash, arguments); + } + + /// + /// Anonymises a DICOM file based on image modality + /// + public ExtractedFileStatus Anonymise(ExtractFileMessage message, IFileInfo sourceFile, IFileInfo destFile, out string anonymiserStatusMessage) + { + _logger.Info($"Anonymising {sourceFile} to {destFile}"); + + if (!RunProcessAndCheckSuccess(CreateCTPAnonProcess(sourceFile, destFile), "CTP Anonymiser")) { - _logger.Info($"Anonymising {sourceFile} to {destFile}"); + anonymiserStatusMessage = "Error running CTP anonymiser"; + return ExtractedFileStatus.ErrorWontRetry; + } - if (!RunProcessAndCheckSuccess(CreateCTPAnonProcess(sourceFile, destFile), "CTP Anonymiser")) + if (message.Modality == "SR") + { + if (!RunProcessAndCheckSuccess(CreateSRAnonProcess(sourceFile, destFile), "SR Anonymiser")) { - anonymiserStatusMessage = "Error running CTP anonymiser"; + anonymiserStatusMessage = "Error running SR anonymiser"; return ExtractedFileStatus.ErrorWontRetry; } - - if (message.Modality == "SR") - { - if (!RunProcessAndCheckSuccess(CreateSRAnonProcess(sourceFile, destFile), "SR Anonymiser")) - { - anonymiserStatusMessage = "Error running SR anonymiser"; - return ExtractedFileStatus.ErrorWontRetry; - } - } - else + } + else + { + if (!RunProcessAndCheckSuccess(CreatePixelAnonProcess(sourceFile, destFile), "Pixel Anonymiser")) { - if (!RunProcessAndCheckSuccess(CreatePixelAnonProcess(sourceFile, destFile), "Pixel Anonymiser")) - { - anonymiserStatusMessage = "Error running PIXEL anonymiser"; - return ExtractedFileStatus.ErrorWontRetry; - } + anonymiserStatusMessage = "Error running PIXEL anonymiser"; + return ExtractedFileStatus.ErrorWontRetry; } - - anonymiserStatusMessage = "Anonymisation successful"; - return ExtractedFileStatus.Anonymised; } - /// - /// Runs a process and logs the result - /// - private bool RunProcessAndCheckSuccess(Process process, string processName) - { - process.Start(); - process.WaitForExit(); + anonymiserStatusMessage = "Anonymisation successful"; + return ExtractedFileStatus.Anonymised; + } - var returnCode = process.ExitCode.ToString(); - LogProcessResult(processName, returnCode, process); + /// + /// Runs a process and logs the result + /// + private bool RunProcessAndCheckSuccess(Process process, string processName) + { + process.Start(); + process.WaitForExit(); - return returnCode == "0"; - } + var returnCode = process.ExitCode.ToString(); + LogProcessResult(processName, returnCode, process); - /// - /// Logs the result of a process - /// - private void LogProcessResult(string processName, string returnCode, Process process) - { - var output = returnCode == "0" ? process.StandardOutput.ReadToEnd() : process.StandardError.ReadToEnd(); - _logger.Info($"{(returnCode == "0" ? "SUCCESS" : "ERROR")} [{processName}]: Return Code {returnCode}\n{output}"); - } + return returnCode == "0"; + } + /// + /// Logs the result of a process + /// + private void LogProcessResult(string processName, string returnCode, Process process) + { + var output = returnCode == "0" ? process.StandardOutput.ReadToEnd() : process.StandardError.ReadToEnd(); + _logger.Info($"{(returnCode == "0" ? "SUCCESS" : "ERROR")} [{processName}]: Return Code {returnCode}\n{output}"); } + } diff --git a/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/IDicomAnonymiser.cs b/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/IDicomAnonymiser.cs index f8207b819..4b3e9b7e8 100644 --- a/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/IDicomAnonymiser.cs +++ b/src/SmiServices/Microservices/DicomAnonymiser/Anonymisers/IDicomAnonymiser.cs @@ -1,19 +1,18 @@ using SmiServices.Common.Messages.Extraction; using System.IO.Abstractions; -namespace SmiServices.Microservices.DicomAnonymiser.Anonymisers +namespace SmiServices.Microservices.DicomAnonymiser.Anonymisers; + +public interface IDicomAnonymiser { - public interface IDicomAnonymiser - { - /// - /// Anonymise the specified to based on the provided modality. - /// Implementations should assume that already exists, and does not exist. - /// - /// - /// - /// - /// - /// - ExtractedFileStatus Anonymise(ExtractFileMessage message, IFileInfo sourceFile, IFileInfo destFile, out string anonymiserStatusMessage); - } + /// + /// Anonymise the specified to based on the provided modality. + /// Implementations should assume that already exists, and does not exist. + /// + /// + /// + /// + /// + /// + ExtractedFileStatus Anonymise(ExtractFileMessage message, IFileInfo sourceFile, IFileInfo destFile, out string anonymiserStatusMessage); } diff --git a/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiser.cs b/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiser.cs index 09c31b0a3..628ba321c 100644 --- a/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiser.cs +++ b/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiser.cs @@ -3,26 +3,25 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.DicomAnonymiser +namespace SmiServices.Microservices.DicomAnonymiser; + +public static class DicomAnonymiser { - public static class DicomAnonymiser + /// + /// Program entry point when run from the command line + /// + /// + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - /// - /// Program entry point when run from the command line - /// - /// - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(DicomAnonymiser), OnParse); - return ret; - } + int ret = SmiCliInit.ParseAndRun(args, nameof(DicomAnonymiser), OnParse); + return ret; + } - private static int OnParse(GlobalOptions globals, CliOptions opts) - { - var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomAnonymiserHost(globals)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, CliOptions opts) + { + var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomAnonymiserHost(globals)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiserConsumer.cs b/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiserConsumer.cs index 29e8e2ba3..232336cbb 100644 --- a/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiserConsumer.cs +++ b/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiserConsumer.cs @@ -9,139 +9,138 @@ using System.IO; using System.IO.Abstractions; -namespace SmiServices.Microservices.DicomAnonymiser +namespace SmiServices.Microservices.DicomAnonymiser; + +public class DicomAnonymiserConsumer : Consumer { - public class DicomAnonymiserConsumer : Consumer + // TODO (da 2024-02-23) Additional Requirement: Message Batching + // https://github.com/SMI/SmiServices/blob/main/src/microservices/Microservices.CohortPackager/Messaging/AnonVerificationMessageConsumer.cs#L72 + // https://github.com/SMI/SmiServices/blob/main/src/microservices/Microservices.MongoDbPopulator/Messaging/MongoDbPopulatorMessageConsumer.cs#L56 + + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly DicomAnonymiserOptions _options; + private readonly IFileSystem _fileSystem; + private readonly string _fileSystemRoot; + private readonly string _extractRoot; + private readonly IDicomAnonymiser _anonymiser; + private readonly IProducerModel _statusMessageProducer; + + public DicomAnonymiserConsumer( + DicomAnonymiserOptions options, + string fileSystemRoot, + string extractRoot, + IDicomAnonymiser anonymiser, + IProducerModel statusMessageProducer, + IFileSystem? fileSystem = null + ) { - // TODO (da 2024-02-23) Additional Requirement: Message Batching - // https://github.com/SMI/SmiServices/blob/main/src/microservices/Microservices.CohortPackager/Messaging/AnonVerificationMessageConsumer.cs#L72 - // https://github.com/SMI/SmiServices/blob/main/src/microservices/Microservices.MongoDbPopulator/Messaging/MongoDbPopulatorMessageConsumer.cs#L56 - - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private readonly DicomAnonymiserOptions _options; - private readonly IFileSystem _fileSystem; - private readonly string _fileSystemRoot; - private readonly string _extractRoot; - private readonly IDicomAnonymiser _anonymiser; - private readonly IProducerModel _statusMessageProducer; - - public DicomAnonymiserConsumer( - DicomAnonymiserOptions options, - string fileSystemRoot, - string extractRoot, - IDicomAnonymiser anonymiser, - IProducerModel statusMessageProducer, - IFileSystem? fileSystem = null - ) + _options = options ?? throw new ArgumentNullException(nameof(options)); + _fileSystemRoot = fileSystemRoot ?? throw new ArgumentNullException(nameof(fileSystemRoot)); + _extractRoot = extractRoot ?? throw new ArgumentNullException(nameof(extractRoot)); + _anonymiser = anonymiser ?? throw new ArgumentNullException(nameof(anonymiser)); + _statusMessageProducer = statusMessageProducer ?? throw new ArgumentNullException(nameof(statusMessageProducer)); + _fileSystem = fileSystem ?? new FileSystem(); + + if (!_fileSystem.Directory.Exists(_fileSystemRoot)) + throw new Exception($"Filesystem root does not exist: '{fileSystemRoot}'"); + + if (!_fileSystem.Directory.Exists(_extractRoot)) + throw new Exception($"Extract root does not exist: '{extractRoot}'"); + } + + protected override void ProcessMessageImpl(IMessageHeader header, ExtractFileMessage message, ulong tag) + { + if (message.IsIdentifiableExtraction) + throw new Exception("DicomAnonymiserConsumer should not handle identifiable extraction messages"); + + var statusMessage = new ExtractedFileStatusMessage(message); + + var sourceFileAbs = _fileSystem.FileInfo.New(_fileSystem.Path.Combine(_fileSystemRoot, message.DicomFilePath)); + + if (!sourceFileAbs.Exists) { - _options = options ?? throw new ArgumentNullException(nameof(options)); - _fileSystemRoot = fileSystemRoot ?? throw new ArgumentNullException(nameof(fileSystemRoot)); - _extractRoot = extractRoot ?? throw new ArgumentNullException(nameof(extractRoot)); - _anonymiser = anonymiser ?? throw new ArgumentNullException(nameof(anonymiser)); - _statusMessageProducer = statusMessageProducer ?? throw new ArgumentNullException(nameof(statusMessageProducer)); - _fileSystem = fileSystem ?? new FileSystem(); - - if (!_fileSystem.Directory.Exists(_fileSystemRoot)) - throw new Exception($"Filesystem root does not exist: '{fileSystemRoot}'"); - - if (!_fileSystem.Directory.Exists(_extractRoot)) - throw new Exception($"Extract root does not exist: '{extractRoot}'"); + statusMessage.Status = ExtractedFileStatus.FileMissing; + statusMessage.StatusMessage = $"Could not find file to anonymise: '{sourceFileAbs}'"; + statusMessage.OutputFilePath = null; + _statusMessageProducer.SendMessage(statusMessage, header, _options.RoutingKeyFailure); + + Ack(header, tag); + return; } - protected override void ProcessMessageImpl(IMessageHeader header, ExtractFileMessage message, ulong tag) + if (_options.FailIfSourceWriteable && !sourceFileAbs.Attributes.HasFlag(FileAttributes.ReadOnly)) { - if (message.IsIdentifiableExtraction) - throw new Exception("DicomAnonymiserConsumer should not handle identifiable extraction messages"); - - var statusMessage = new ExtractedFileStatusMessage(message); - - var sourceFileAbs = _fileSystem.FileInfo.New(_fileSystem.Path.Combine(_fileSystemRoot, message.DicomFilePath)); - - if (!sourceFileAbs.Exists) - { - statusMessage.Status = ExtractedFileStatus.FileMissing; - statusMessage.StatusMessage = $"Could not find file to anonymise: '{sourceFileAbs}'"; - statusMessage.OutputFilePath = null; - _statusMessageProducer.SendMessage(statusMessage, header, _options.RoutingKeyFailure); - - Ack(header, tag); - return; - } - - if (_options.FailIfSourceWriteable && !sourceFileAbs.Attributes.HasFlag(FileAttributes.ReadOnly)) - { - statusMessage.Status = ExtractedFileStatus.ErrorWontRetry; - statusMessage.StatusMessage = $"Source file was writeable and FailIfSourceWriteable is set: '{sourceFileAbs}'"; - statusMessage.OutputFilePath = null; - _statusMessageProducer.SendMessage(statusMessage, header, _options.RoutingKeyFailure); - - Ack(header, tag); - return; - } - - var extractionDirAbs = _fileSystem.Path.Combine(_extractRoot, message.ExtractionDirectory); - - // NOTE(rkm 2021-12-07) Since this directory should have already been created, we treat this more like an assertion and throw if not found. - // This helps prevent a flood of messages if e.g. the filesystem is temporarily unavailable. - if (!_fileSystem.Directory.Exists(extractionDirAbs)) - throw new DirectoryNotFoundException($"Expected extraction directory to exist: '{extractionDirAbs}'"); - - var destFileAbs = _fileSystem.FileInfo.New(_fileSystem.Path.Combine(extractionDirAbs, message.OutputPath)); - - destFileAbs.Directory!.Create(); - - _logger.Debug($"Anonymising '{sourceFileAbs}' to '{destFileAbs}'"); - - // TODO (rkm 2024-02-09) Extract modality from cohort extractor instead of opening DICOM file - DicomFile dicomFile = DicomFile.Open(sourceFileAbs.FullName); - message.Modality = dicomFile.Dataset.GetSingleValue(DicomTag.Modality); - - Console.WriteLine("[DICOM] Modality: " + message.Modality); - Console.WriteLine("[DICOM] Source File: " + message.DicomFilePath); - Console.WriteLine("[DICOM] Dest File: " + message.OutputPath); - - ExtractedFileStatus anonymiserStatus = ExtractedFileStatus.None; - string anonymiserStatusMessage = ""; - - try - { - anonymiserStatus = _anonymiser.Anonymise(message, sourceFileAbs, destFileAbs, out anonymiserStatusMessage); - } - catch (Exception e) - { - var msg = $"Error anonymising '{sourceFileAbs}'"; - _logger.Error(e, msg); - - statusMessage.Status = ExtractedFileStatus.ErrorWontRetry; - statusMessage.StatusMessage = $"{msg}. Exception message: {e.Message}"; - statusMessage.OutputFilePath = null; - _statusMessageProducer.SendMessage(statusMessage, header, _options.RoutingKeyFailure); - - Ack(header, tag); - } - - string routingKey; - - if (anonymiserStatus == ExtractedFileStatus.Anonymised) - { - _logger.Info($"Anonymisation of '{sourceFileAbs}' successful"); - statusMessage.Status = anonymiserStatus; - statusMessage.StatusMessage = anonymiserStatusMessage; - routingKey = _options.RoutingKeySuccess ?? "verify"; - } - else - { - _logger.Info($"Anonymisation of '{sourceFileAbs}' failed"); - statusMessage.OutputFilePath = null; - statusMessage.Status = anonymiserStatus; - statusMessage.StatusMessage = anonymiserStatusMessage; - routingKey = _options.RoutingKeyFailure ?? "noverify"; - } - - _statusMessageProducer.SendMessage(statusMessage, header, routingKey); + statusMessage.Status = ExtractedFileStatus.ErrorWontRetry; + statusMessage.StatusMessage = $"Source file was writeable and FailIfSourceWriteable is set: '{sourceFileAbs}'"; + statusMessage.OutputFilePath = null; + _statusMessageProducer.SendMessage(statusMessage, header, _options.RoutingKeyFailure); Ack(header, tag); return; } + + var extractionDirAbs = _fileSystem.Path.Combine(_extractRoot, message.ExtractionDirectory); + + // NOTE(rkm 2021-12-07) Since this directory should have already been created, we treat this more like an assertion and throw if not found. + // This helps prevent a flood of messages if e.g. the filesystem is temporarily unavailable. + if (!_fileSystem.Directory.Exists(extractionDirAbs)) + throw new DirectoryNotFoundException($"Expected extraction directory to exist: '{extractionDirAbs}'"); + + var destFileAbs = _fileSystem.FileInfo.New(_fileSystem.Path.Combine(extractionDirAbs, message.OutputPath)); + + destFileAbs.Directory!.Create(); + + _logger.Debug($"Anonymising '{sourceFileAbs}' to '{destFileAbs}'"); + + // TODO (rkm 2024-02-09) Extract modality from cohort extractor instead of opening DICOM file + DicomFile dicomFile = DicomFile.Open(sourceFileAbs.FullName); + message.Modality = dicomFile.Dataset.GetSingleValue(DicomTag.Modality); + + Console.WriteLine("[DICOM] Modality: " + message.Modality); + Console.WriteLine("[DICOM] Source File: " + message.DicomFilePath); + Console.WriteLine("[DICOM] Dest File: " + message.OutputPath); + + ExtractedFileStatus anonymiserStatus = ExtractedFileStatus.None; + string anonymiserStatusMessage = ""; + + try + { + anonymiserStatus = _anonymiser.Anonymise(message, sourceFileAbs, destFileAbs, out anonymiserStatusMessage); + } + catch (Exception e) + { + var msg = $"Error anonymising '{sourceFileAbs}'"; + _logger.Error(e, msg); + + statusMessage.Status = ExtractedFileStatus.ErrorWontRetry; + statusMessage.StatusMessage = $"{msg}. Exception message: {e.Message}"; + statusMessage.OutputFilePath = null; + _statusMessageProducer.SendMessage(statusMessage, header, _options.RoutingKeyFailure); + + Ack(header, tag); + } + + string routingKey; + + if (anonymiserStatus == ExtractedFileStatus.Anonymised) + { + _logger.Info($"Anonymisation of '{sourceFileAbs}' successful"); + statusMessage.Status = anonymiserStatus; + statusMessage.StatusMessage = anonymiserStatusMessage; + routingKey = _options.RoutingKeySuccess ?? "verify"; + } + else + { + _logger.Info($"Anonymisation of '{sourceFileAbs}' failed"); + statusMessage.OutputFilePath = null; + statusMessage.Status = anonymiserStatus; + statusMessage.StatusMessage = anonymiserStatusMessage; + routingKey = _options.RoutingKeyFailure ?? "noverify"; + } + + _statusMessageProducer.SendMessage(statusMessage, header, routingKey); + + Ack(header, tag); + return; } } diff --git a/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiserHost.cs b/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiserHost.cs index 08867db04..1dd5d81a9 100644 --- a/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiserHost.cs +++ b/src/SmiServices/Microservices/DicomAnonymiser/DicomAnonymiserHost.cs @@ -4,48 +4,47 @@ using System; using System.IO.Abstractions; -namespace SmiServices.Microservices.DicomAnonymiser +namespace SmiServices.Microservices.DicomAnonymiser; + +public class DicomAnonymiserHost : MicroserviceHost { - public class DicomAnonymiserHost : MicroserviceHost + private readonly IDicomAnonymiser _anonymiser; + private readonly DicomAnonymiserConsumer _consumer; + + public DicomAnonymiserHost( + GlobalOptions options, + IDicomAnonymiser? anonymiser = null, + IFileSystem? fileSystem = null + ) + : base(options) { - private readonly IDicomAnonymiser _anonymiser; - private readonly DicomAnonymiserConsumer _consumer; - - public DicomAnonymiserHost( - GlobalOptions options, - IDicomAnonymiser? anonymiser = null, - IFileSystem? fileSystem = null - ) - : base(options) - { - _anonymiser = anonymiser ?? AnonymiserFactory.CreateAnonymiser(options!); - - var producerModel = MessageBroker.SetupProducer(options.DicomAnonymiserOptions!.ExtractFileStatusProducerOptions!, isBatch: false); - - _consumer = new DicomAnonymiserConsumer( - Globals.DicomAnonymiserOptions!, - Globals.FileSystemOptions!.FileSystemRoot!, - Globals.FileSystemOptions.ExtractRoot!, - _anonymiser, - producerModel, - fileSystem - ); - } + _anonymiser = anonymiser ?? AnonymiserFactory.CreateAnonymiser(options!); - public override void Start() - { - MessageBroker.StartConsumer(Globals.DicomAnonymiserOptions!.AnonFileConsumerOptions!, _consumer, isSolo: false); - } + var producerModel = MessageBroker.SetupProducer(options.DicomAnonymiserOptions!.ExtractFileStatusProducerOptions!, isBatch: false); - public override void Stop(string reason) - { - if (_anonymiser is IDisposable disposable) - { - Logger.Info("Disposing anonymiser"); - disposable.Dispose(); - } + _consumer = new DicomAnonymiserConsumer( + Globals.DicomAnonymiserOptions!, + Globals.FileSystemOptions!.FileSystemRoot!, + Globals.FileSystemOptions.ExtractRoot!, + _anonymiser, + producerModel, + fileSystem + ); + } + + public override void Start() + { + MessageBroker.StartConsumer(Globals.DicomAnonymiserOptions!.AnonFileConsumerOptions!, _consumer, isSolo: false); + } - base.Stop(reason); + public override void Stop(string reason) + { + if (_anonymiser is IDisposable disposable) + { + Logger.Info("Disposing anonymiser"); + disposable.Dispose(); } + + base.Stop(reason); } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/DicomFileMessageToDatasetListProvider.cs b/src/SmiServices/Microservices/DicomRelationalMapper/DicomFileMessageToDatasetListProvider.cs index ad56a103e..7e2f56fae 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/DicomFileMessageToDatasetListProvider.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/DicomFileMessageToDatasetListProvider.cs @@ -3,53 +3,52 @@ using System.Collections.Generic; using System.Linq; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +public class DicomFileMessageToDatasetListWorklist : IDicomDatasetWorklist { - public class DicomFileMessageToDatasetListWorklist : IDicomDatasetWorklist + private readonly List _messages; + private int _progress; + + public HashSet CorruptMessages = []; + + public DicomFileMessageToDatasetListWorklist(List messages) { - private readonly List _messages; - private int _progress; + _messages = messages; + } - public HashSet CorruptMessages = []; + /// + /// Resets the progress through the work list e.g. if half the list is consumed and you want to + /// start again. + /// + public void ResetProgress() + { + _progress = 0; + } - public DicomFileMessageToDatasetListWorklist(List messages) - { - _messages = messages; - } + public DicomDataset? GetNextDatasetToProcess(out string? filename, out Dictionary otherValuesToStoreInRow) + { + otherValuesToStoreInRow = []; - /// - /// Resets the progress through the work list e.g. if half the list is consumed and you want to - /// start again. - /// - public void ResetProgress() + if (_progress >= _messages.Count) { - _progress = 0; + filename = null; + return null; } - public DicomDataset? GetNextDatasetToProcess(out string? filename, out Dictionary otherValuesToStoreInRow) - { - otherValuesToStoreInRow = []; - - if (_progress >= _messages.Count) - { - filename = null; - return null; - } + QueuedImage toReturn = _messages[_progress]; + filename = toReturn.DicomFileMessage.DicomFilePath; - QueuedImage toReturn = _messages[_progress]; - filename = toReturn.DicomFileMessage.DicomFilePath; + otherValuesToStoreInRow.Add("MessageGuid", _messages[_progress].Header.MessageGuid.ToString()); + otherValuesToStoreInRow.Add("DicomFileSize", toReturn.DicomFileMessage.DicomFileSize.ToString()); //TN: It won't be a string when it hits the database but the API supports only string/string for this out Dictionary - otherValuesToStoreInRow.Add("MessageGuid", _messages[_progress].Header.MessageGuid.ToString()); - otherValuesToStoreInRow.Add("DicomFileSize", toReturn.DicomFileMessage.DicomFileSize.ToString()); //TN: It won't be a string when it hits the database but the API supports only string/string for this out Dictionary + _progress++; - _progress++; - - return toReturn.DicomDataset; - } + return toReturn.DicomDataset; + } - public void MarkCorrupt(DicomDataset ds) - { - CorruptMessages.Add(_messages.Single(m => m.DicomDataset == ds)); - } + public void MarkCorrupt(DicomDataset ds) + { + CorruptMessages.Add(_messages.Single(m => m.DicomDataset == ds)); } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapper.cs b/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapper.cs index cdd9ae1df..22c7a1912 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapper.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapper.cs @@ -3,22 +3,21 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +public static class DicomRelationalMapper { - public static class DicomRelationalMapper + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(DicomRelationalMapper), OnParse); - return ret; - } + int ret = SmiCliInit.ParseAndRun(args, nameof(DicomRelationalMapper), OnParse); + return ret; + } - private static int OnParse(GlobalOptions globals, CliOptions opts) - { - var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomRelationalMapperHost(globals)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, CliOptions opts) + { + var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomRelationalMapperHost(globals)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapperHost.cs b/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapperHost.cs index 2922e9d26..f40152445 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapperHost.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapperHost.cs @@ -13,78 +13,77 @@ using System; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +public class DicomRelationalMapperHost : MicroserviceHost, IDisposable { - public class DicomRelationalMapperHost : MicroserviceHost, IDisposable - { - public DicomRelationalMapperQueueConsumer? Consumer { get; private set; } + public DicomRelationalMapperQueueConsumer? Consumer { get; private set; } - public DicomRelationalMapperHost(GlobalOptions globals) - : base(globals) - { - FansiImplementations.Load(); - } + public DicomRelationalMapperHost(GlobalOptions globals) + : base(globals) + { + FansiImplementations.Load(); + } - //TODO Should most of this not be in the constructor? - public override void Start() - { - var repositoryLocator = Globals.RDMPOptions!.GetRepositoryProvider(); + //TODO Should most of this not be in the constructor? + public override void Start() + { + var repositoryLocator = Globals.RDMPOptions!.GetRepositoryProvider(); - Logger.Info("About to run Startup"); + Logger.Info("About to run Startup"); - var startup = new Startup(repositoryLocator); - startup.DatabaseFound += Startup_DatabaseFound; + var startup = new Startup(repositoryLocator); + startup.DatabaseFound += Startup_DatabaseFound; - var toMemory = new ToMemoryCheckNotifier(); - startup.DoStartup(toMemory); + var toMemory = new ToMemoryCheckNotifier(); + startup.DoStartup(toMemory); - foreach (var args in toMemory.Messages) - Logger.Log(args.ToLogLevel(), args.Ex, args.Message); + foreach (var args in toMemory.Messages) + Logger.Log(args.ToLogLevel(), args.Ex, args.Message); - Logger.Info("Startup Completed"); + Logger.Info("Startup Completed"); - var lmd = repositoryLocator.CatalogueRepository.GetObjectByID(Globals.DicomRelationalMapperOptions!.LoadMetadataId); + var lmd = repositoryLocator.CatalogueRepository.GetObjectByID(Globals.DicomRelationalMapperOptions!.LoadMetadataId); - var databaseNamerType = MEF.GetType(Globals.DicomRelationalMapperOptions.DatabaseNamerType) ?? throw new Exception($"Could not find Type '{Globals.DicomRelationalMapperOptions.DatabaseNamerType}'"); + var databaseNamerType = MEF.GetType(Globals.DicomRelationalMapperOptions.DatabaseNamerType) ?? throw new Exception($"Could not find Type '{Globals.DicomRelationalMapperOptions.DatabaseNamerType}'"); - var liveDatabaseName = lmd.GetDistinctLiveDatabaseServer().GetCurrentDatabase()?.GetRuntimeName() ?? throw new Exception("Unable to find database name"); + var liveDatabaseName = lmd.GetDistinctLiveDatabaseServer().GetCurrentDatabase()?.GetRuntimeName() ?? throw new Exception("Unable to find database name"); - var instance = ObjectFactory.CreateInstance(databaseNamerType, liveDatabaseName, Globals.DicomRelationalMapperOptions.Guid) - ?? throw new Exception("Could not create an INameDatabasesAndTablesDuringLoads"); + var instance = ObjectFactory.CreateInstance(databaseNamerType, liveDatabaseName, Globals.DicomRelationalMapperOptions.Guid) + ?? throw new Exception("Could not create an INameDatabasesAndTablesDuringLoads"); - Consumer = new DicomRelationalMapperQueueConsumer(repositoryLocator, - lmd, - instance, - Globals.DicomRelationalMapperOptions) - { - RunChecks = Globals.DicomRelationalMapperOptions.RunChecks - }; + Consumer = new DicomRelationalMapperQueueConsumer(repositoryLocator, + lmd, + instance, + Globals.DicomRelationalMapperOptions) + { + RunChecks = Globals.DicomRelationalMapperOptions.RunChecks + }; - MessageBroker.StartConsumer(Globals.DicomRelationalMapperOptions, Consumer, isSolo: false); - } + MessageBroker.StartConsumer(Globals.DicomRelationalMapperOptions, Consumer, isSolo: false); + } - private void Startup_DatabaseFound(object sender, PlatformDatabaseFoundEventArgs e) - { + private void Startup_DatabaseFound(object sender, PlatformDatabaseFoundEventArgs e) + { - var msg = "RDMPPlatformDatabaseStatus is " + e.Status + " for tier " + e.Patcher.Tier + - (e.Exception == null - ? "No exception" - : ExceptionHelper.ExceptionToListOfInnerMessages(e.Exception)); + var msg = "RDMPPlatformDatabaseStatus is " + e.Status + " for tier " + e.Patcher.Tier + + (e.Exception == null + ? "No exception" + : ExceptionHelper.ExceptionToListOfInnerMessages(e.Exception)); - Logger.Log(e.Status == RDMPPlatformDatabaseStatus.Healthy ? LogLevel.Info : LogLevel.Error, e.Exception, msg); - } + Logger.Log(e.Status == RDMPPlatformDatabaseStatus.Healthy ? LogLevel.Info : LogLevel.Error, e.Exception, msg); + } - public override void Stop(string reason) - { - Consumer?.Stop(reason); + public override void Stop(string reason) + { + Consumer?.Stop(reason); - base.Stop(reason); - } + base.Stop(reason); + } - public void Dispose() - { - Consumer?.Dispose(); - GC.SuppressFinalize(this); - } + public void Dispose() + { + Consumer?.Dispose(); + GC.SuppressFinalize(this); } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapperQueueConsumer.cs b/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapperQueueConsumer.cs index d9a3896fa..56fdb07fa 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapperQueueConsumer.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/DicomRelationalMapperQueueConsumer.cs @@ -19,294 +19,293 @@ using System.Threading; using System.Threading.Tasks; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +public class DicomRelationalMapperQueueConsumer : Consumer, IDisposable { - public class DicomRelationalMapperQueueConsumer : Consumer, IDisposable - { - //TODO This is literally only public for testing purposes - public INameDatabasesAndTablesDuringLoads DatabaseNamer { get; private set; } + //TODO This is literally only public for testing purposes + public INameDatabasesAndTablesDuringLoads DatabaseNamer { get; private set; } - public int MessagesProcessed { get { return NackCount + AckCount; } } + public int MessagesProcessed { get { return NackCount + AckCount; } } - /// - /// Collection of all DLE crash messages (including those where successful restart runs were performed). - /// - public IReadOnlyCollection DleErrors => new ReadOnlyCollection(_dleExceptions); + /// + /// Collection of all DLE crash messages (including those where successful restart runs were performed). + /// + public IReadOnlyCollection DleErrors => new ReadOnlyCollection(_dleExceptions); - private readonly List _dleExceptions = []; + private readonly List _dleExceptions = []; - private readonly LoadMetadata _lmd; - private readonly IRDMPPlatformRepositoryServiceLocator _repositoryLocator; + private readonly LoadMetadata _lmd; + private readonly IRDMPPlatformRepositoryServiceLocator _repositoryLocator; - private DateTime _lastRanDle = DateTime.Now; + private DateTime _lastRanDle = DateTime.Now; - // Unprocessed messages awaiting an opportunity to be run - private readonly Queue _imageQueue = new(); - private readonly object _oQueueLock = new(); + // Unprocessed messages awaiting an opportunity to be run + private readonly Queue _imageQueue = new(); + private readonly object _oQueueLock = new(); - private bool _stopCalled; + private bool _stopCalled; - private readonly int _minimumBatchSize; - private readonly bool _useInsertIntoForRawMigration; - private readonly int _retryOnFailureCount; - private readonly int _retryDelayInSeconds; + private readonly int _minimumBatchSize; + private readonly bool _useInsertIntoForRawMigration; + private readonly int _retryOnFailureCount; + private readonly int _retryDelayInSeconds; - /// - /// The maximum number of seconds to wait for MinimumBatchSize to be reached before emptying the Queue anyway - /// - private readonly TimeSpan _maximumRunDelayInSeconds; + /// + /// The maximum number of seconds to wait for MinimumBatchSize to be reached before emptying the Queue anyway + /// + private readonly TimeSpan _maximumRunDelayInSeconds; - private Task? _dleTask; - private readonly CancellationTokenSource _stopTokenSource = new(); + private Task? _dleTask; + private readonly CancellationTokenSource _stopTokenSource = new(); - /// - /// True to run before the data load accepting all proposed fixes (e.g. dropping RAW) - /// Default is false - /// - public bool RunChecks { get; set; } + /// + /// True to run before the data load accepting all proposed fixes (e.g. dropping RAW) + /// Default is false + /// + public bool RunChecks { get; set; } - public DicomRelationalMapperQueueConsumer( - IRDMPPlatformRepositoryServiceLocator repositoryLocator, - LoadMetadata lmd, - INameDatabasesAndTablesDuringLoads namer, - DicomRelationalMapperOptions options - ) - { - _lmd = lmd; - _repositoryLocator = repositoryLocator; - DatabaseNamer = namer; + public DicomRelationalMapperQueueConsumer( + IRDMPPlatformRepositoryServiceLocator repositoryLocator, + LoadMetadata lmd, + INameDatabasesAndTablesDuringLoads namer, + DicomRelationalMapperOptions options + ) + { + _lmd = lmd; + _repositoryLocator = repositoryLocator; + DatabaseNamer = namer; - _minimumBatchSize = options.MinimumBatchSize; - _useInsertIntoForRawMigration = options.UseInsertIntoForRAWMigration; - _retryOnFailureCount = options.RetryOnFailureCount; - _retryDelayInSeconds = Math.Max(10, options.RetryDelayInSeconds); - _maximumRunDelayInSeconds = new TimeSpan(0, 0, 0, options.MaximumRunDelayInSeconds <= 0 ? 15 : 0); + _minimumBatchSize = options.MinimumBatchSize; + _useInsertIntoForRawMigration = options.UseInsertIntoForRAWMigration; + _retryOnFailureCount = options.RetryOnFailureCount; + _retryDelayInSeconds = Math.Max(10, options.RetryDelayInSeconds); + _maximumRunDelayInSeconds = new TimeSpan(0, 0, 0, options.MaximumRunDelayInSeconds <= 0 ? 15 : 0); + + StartDleRunnerTask(); + } - StartDleRunnerTask(); - } + protected override void ProcessMessageImpl(IMessageHeader header, DicomFileMessage message, ulong tag) + { + DicomDataset dataset; - protected override void ProcessMessageImpl(IMessageHeader header, DicomFileMessage message, ulong tag) + try + { + dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); + } + catch (Exception e) { - DicomDataset dataset; + ErrorAndNack(header, tag, "Could not rebuild DicomDataset from message", e); + return; + } - try - { - dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); - } - catch (Exception e) - { - ErrorAndNack(header, tag, "Could not rebuild DicomDataset from message", e); - return; - } + var toQueue = new QueuedImage(header, tag, message, dataset); - var toQueue = new QueuedImage(header, tag, message, dataset); + lock (_oQueueLock) + _imageQueue.Enqueue(toQueue); + } - lock (_oQueueLock) - _imageQueue.Enqueue(toQueue); - } + public void Stop(string reason) + { + Logger.Debug("Stop called: {0}", reason); - public void Stop(string reason) + if (_stopCalled) { - Logger.Debug("Stop called: {0}", reason); - - if (_stopCalled) - { - Logger.Warn("Stop called twice"); - return; - } + Logger.Warn("Stop called twice"); + return; + } - _stopCalled = true; + _stopCalled = true; - // Cancel the DLE runner task and wait for it to exit. This will deadlock if the DLE task ever calls Stop directly - _stopTokenSource.Cancel(); - _dleTask!.Wait(); + // Cancel the DLE runner task and wait for it to exit. This will deadlock if the DLE task ever calls Stop directly + _stopTokenSource.Cancel(); + _dleTask!.Wait(); - if (DatabaseNamer is ICreateAndDestroyStagingDuringLoads createAndDestroyStaging) - createAndDestroyStaging.DestroyStagingIfExists(); - } + if (DatabaseNamer is ICreateAndDestroyStagingDuringLoads createAndDestroyStaging) + createAndDestroyStaging.DestroyStagingIfExists(); + } - private void StartDleRunnerTask() + private void StartDleRunnerTask() + { + _dleTask = Task.Factory.StartNew(() => { - _dleTask = Task.Factory.StartNew(() => - { - Exception? faultCause = null; + Exception? faultCause = null; - while (!_stopTokenSource.IsCancellationRequested) + while (!_stopTokenSource.IsCancellationRequested) + { + try { - try - { - RunDleIfRequired(); - } - catch (Exception e) - { - // Handles any exceptions not caused by the DLE returning an error code - _stopTokenSource.Cancel(); - faultCause = e; - _dleExceptions.Add(e); - Logger.Log(LogLevel.Error, e, "DLE crashed during RunDleIfRequired"); - } + RunDleIfRequired(); + } + catch (Exception e) + { + // Handles any exceptions not caused by the DLE returning an error code + _stopTokenSource.Cancel(); + faultCause = e; + _dleExceptions.Add(e); + Logger.Log(LogLevel.Error, e, "DLE crashed during RunDleIfRequired"); } + } - if (faultCause != null) - Fatal("Unhandled exception in DLE runner task", faultCause); + if (faultCause != null) + Fatal("Unhandled exception in DLE runner task", faultCause); - Logger.Debug("DLE runner task exiting"); - }); + Logger.Debug("DLE runner task exiting"); + }); - Logger.Debug("DLE task started"); - } + Logger.Debug("DLE task started"); + } - private void RunDleIfRequired() - { - //if there are a decent number ready to go or we haven't run in a while (and there is at least 1) - if (GetQueueCount() < (DateTime.Now.Subtract(_lastRanDle) > _maximumRunDelayInSeconds ? 1 : _minimumBatchSize)) - return; + private void RunDleIfRequired() + { + //if there are a decent number ready to go or we haven't run in a while (and there is at least 1) + if (GetQueueCount() < (DateTime.Now.Subtract(_lastRanDle) > _maximumRunDelayInSeconds ? 1 : _minimumBatchSize)) + return; - var toProcess = new List(); - var duplicates = new List(); - var seenSoFar = new HashSet(); + var toProcess = new List(); + var duplicates = new List(); + var seenSoFar = new HashSet(); - // Get the messages we will start this DLE with, accounting for duplicates - lock (_oQueueLock) + // Get the messages we will start this DLE with, accounting for duplicates + lock (_oQueueLock) + { + while (_imageQueue.Count > 0) { - while (_imageQueue.Count > 0) - { - QueuedImage queuedImage = _imageQueue.Dequeue(); + QueuedImage queuedImage = _imageQueue.Dequeue(); - if (seenSoFar.Contains(queuedImage.DicomFileMessage.DicomDataset)) - { - duplicates.Add(queuedImage); - } - else - { - toProcess.Add(queuedImage); - seenSoFar.Add(queuedImage.DicomFileMessage.DicomDataset); - } + if (seenSoFar.Contains(queuedImage.DicomFileMessage.DicomDataset)) + { + duplicates.Add(queuedImage); + } + else + { + toProcess.Add(queuedImage); + seenSoFar.Add(queuedImage.DicomFileMessage.DicomDataset); } } + } - //All messages were rejected - if (toProcess.Count == 0) - return; + //All messages were rejected + if (toProcess.Count == 0) + return; - if (duplicates.Count != 0) - { - Logger.Log(LogLevel.Warn, "Acking " + duplicates.Count + " duplicate Datasets"); - duplicates.ForEach(x => Ack(x.Header, x.Tag)); - } + if (duplicates.Count != 0) + { + Logger.Log(LogLevel.Warn, "Acking " + duplicates.Count + " duplicate Datasets"); + duplicates.ForEach(x => Ack(x.Header, x.Tag)); + } - var parallelDleHost = new ParallelDLEHost(_repositoryLocator, DatabaseNamer, _useInsertIntoForRawMigration); - Logger.Info("Starting DLE with " + toProcess.Count + " messages"); + var parallelDleHost = new ParallelDLEHost(_repositoryLocator, DatabaseNamer, _useInsertIntoForRawMigration); + Logger.Info("Starting DLE with " + toProcess.Count + " messages"); - if (RunChecks) - RunDleChecks(); + if (RunChecks) + RunDleChecks(); - int remainingRetries = _retryOnFailureCount; - Exception? firstException = null; + int remainingRetries = _retryOnFailureCount; + Exception? firstException = null; - ExitCodeType exitCode; + ExitCodeType exitCode; - var datasetProvider = new DicomFileMessageToDatasetListWorklist(toProcess); + var datasetProvider = new DicomFileMessageToDatasetListWorklist(toProcess); - do - { - Logger.Debug("Starting a ParallelDLEHost"); + do + { + Logger.Debug("Starting a ParallelDLEHost"); - // We last ran now! - _lastRanDle = DateTime.Now; + // We last ran now! + _lastRanDle = DateTime.Now; - //reset the progress e.g. if we crashed later on in the load - datasetProvider.ResetProgress(); + //reset the progress e.g. if we crashed later on in the load + datasetProvider.ResetProgress(); - try - { - exitCode = parallelDleHost.RunDLE(_lmd, datasetProvider); - } - catch (Exception e) - { - Logger.Debug(e, "ParallelDLEHost threw exception of type " + e.GetType()); - _dleExceptions.Add(e); - exitCode = ExitCodeType.Error; + try + { + exitCode = parallelDleHost.RunDLE(_lmd, datasetProvider); + } + catch (Exception e) + { + Logger.Debug(e, "ParallelDLEHost threw exception of type " + e.GetType()); + _dleExceptions.Add(e); + exitCode = ExitCodeType.Error; - if (remainingRetries > 0) - { - //wait a random length of time averaging the _retryDelayInSeconds to avoid retrying at the same time as other processes - //where there is resource contention that results in simultaneous failures. - var r = new Random(); + if (remainingRetries > 0) + { + //wait a random length of time averaging the _retryDelayInSeconds to avoid retrying at the same time as other processes + //where there is resource contention that results in simultaneous failures. + var r = new Random(); - var wait = r.Next(_retryDelayInSeconds * 2); + var wait = r.Next(_retryDelayInSeconds * 2); - Logger.Info("Sleeping " + wait + "s after failure"); - Task.Delay(new TimeSpan(0, 0, 0, wait)).Wait(); + Logger.Info("Sleeping " + wait + "s after failure"); + Task.Delay(new TimeSpan(0, 0, 0, wait)).Wait(); - if (RunChecks) - { - Logger.Warn(e, "Running checks before we retry"); - RunDleChecks(); - } + if (RunChecks) + { + Logger.Warn(e, "Running checks before we retry"); + RunDleChecks(); } - - firstException ??= e; } + + firstException ??= e; } - while (remainingRetries-- > 0 && (exitCode == ExitCodeType.Error || exitCode == ExitCodeType.Abort)); + } + while (remainingRetries-- > 0 && (exitCode == ExitCodeType.Error || exitCode == ExitCodeType.Abort)); - Logger.Info("DLE exited with code " + exitCode); + Logger.Info("DLE exited with code " + exitCode); - switch (exitCode) - { - case ExitCodeType.Success: - case ExitCodeType.OperationNotRequired: - { - foreach (QueuedImage corrupt in datasetProvider.CorruptMessages) - ErrorAndNack(corrupt.Header, corrupt.Tag, "Nacking Corrupt image", new Exception()); + switch (exitCode) + { + case ExitCodeType.Success: + case ExitCodeType.OperationNotRequired: + { + foreach (QueuedImage corrupt in datasetProvider.CorruptMessages) + ErrorAndNack(corrupt.Header, corrupt.Tag, "Nacking Corrupt image", new Exception()); - QueuedImage[] successes = toProcess.Except(datasetProvider.CorruptMessages).ToArray(); + QueuedImage[] successes = toProcess.Except(datasetProvider.CorruptMessages).ToArray(); - Ack(successes.Select(x => x.Header).ToList(), - successes.Select(x => x.Tag).Max(x => x)); + Ack(successes.Select(x => x.Header).ToList(), + successes.Select(x => x.Tag).Max(x => x)); - break; - } - case ExitCodeType.Error: - case ExitCodeType.Abort: - { - _stopTokenSource.Cancel(); - Fatal("DLE Crashed " + (_retryOnFailureCount + 1) + " time(s) on the same batch", firstException!); - break; - } - default: - { - _stopTokenSource.Cancel(); - Fatal("No case for DLE exit code " + exitCode, new Exception()); - break; - } - } + break; + } + case ExitCodeType.Error: + case ExitCodeType.Abort: + { + _stopTokenSource.Cancel(); + Fatal("DLE Crashed " + (_retryOnFailureCount + 1) + " time(s) on the same batch", firstException!); + break; + } + default: + { + _stopTokenSource.Cancel(); + Fatal("No case for DLE exit code " + exitCode, new Exception()); + break; + } } + } - private void RunDleChecks() - { - var preChecker = new PreExecutionChecker(_lmd, new HICDatabaseConfiguration(_lmd, DatabaseNamer)); - preChecker.Check(new AcceptAllCheckNotifier()); - } + private void RunDleChecks() + { + var preChecker = new PreExecutionChecker(_lmd, new HICDatabaseConfiguration(_lmd, DatabaseNamer)); + preChecker.Check(new AcceptAllCheckNotifier()); + } - private int GetQueueCount() + private int GetQueueCount() + { + lock (_oQueueLock) { - lock (_oQueueLock) - { - return _imageQueue.Count; - } + return _imageQueue.Count; } + } - public void Dispose() - { - //make sure we stop the consume loop if it hasn't already stopped - if (_stopTokenSource != null && !_stopTokenSource.IsCancellationRequested) - _stopTokenSource.Cancel(); - GC.SuppressFinalize(this); - } + public void Dispose() + { + //make sure we stop the consume loop if it hasn't already stopped + if (_stopTokenSource != null && !_stopTokenSource.IsCancellationRequested) + _stopTokenSource.Cancel(); + GC.SuppressFinalize(this); } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/ExplicitListDicomProcessListProvider.cs b/src/SmiServices/Microservices/DicomRelationalMapper/ExplicitListDicomProcessListProvider.cs index 27735d607..752a64d06 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/ExplicitListDicomProcessListProvider.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/ExplicitListDicomProcessListProvider.cs @@ -3,43 +3,42 @@ using System; using System.IO; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +public class ExplicitListDicomFileWorklist : IDicomFileWorklist { - public class ExplicitListDicomFileWorklist : IDicomFileWorklist + private readonly string[] _filesAndOrDirectories; + private int index = 0; + + public ExplicitListDicomFileWorklist(string[] filesAndOrDirectories) + { + _filesAndOrDirectories = filesAndOrDirectories; + } + + public bool GetNextFileOrDirectoryToProcess(out DirectoryInfo? directory, out AmbiguousFilePath? file) { - private readonly string[] _filesAndOrDirectories; - private int index = 0; + directory = null; + file = null; - public ExplicitListDicomFileWorklist(string[] filesAndOrDirectories) + if (index >= _filesAndOrDirectories.Length) { - _filesAndOrDirectories = filesAndOrDirectories; + return false; } - public bool GetNextFileOrDirectoryToProcess(out DirectoryInfo? directory, out AmbiguousFilePath? file) + if (File.Exists(_filesAndOrDirectories[index])) { - directory = null; - file = null; - - if (index >= _filesAndOrDirectories.Length) - { - return false; - } - - if (File.Exists(_filesAndOrDirectories[index])) - { - file = new AmbiguousFilePath(_filesAndOrDirectories[index]); - index++; - return true; - } - - if (Directory.Exists(_filesAndOrDirectories[index])) - { - directory = new DirectoryInfo(_filesAndOrDirectories[index]); - index++; - return true; - } - - throw new Exception("Array element " + index + " of filesAndOrDirectories was not a File or Directory (or the referenced file did not exist). Array element is '" + _filesAndOrDirectories[index] + "'"); + file = new AmbiguousFilePath(_filesAndOrDirectories[index]); + index++; + return true; } + + if (Directory.Exists(_filesAndOrDirectories[index])) + { + directory = new DirectoryInfo(_filesAndOrDirectories[index]); + index++; + return true; + } + + throw new Exception("Array element " + index + " of filesAndOrDirectories was not a File or Directory (or the referenced file did not exist). Array element is '" + _filesAndOrDirectories[index] + "'"); } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/MigrateRawToStagingWithSelectIntoStatements.cs b/src/SmiServices/Microservices/DicomRelationalMapper/MigrateRawToStagingWithSelectIntoStatements.cs index bfb74849b..d32febf9b 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/MigrateRawToStagingWithSelectIntoStatements.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/MigrateRawToStagingWithSelectIntoStatements.cs @@ -10,87 +10,86 @@ using System.Diagnostics; using System.Linq; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +internal class MigrateRawToStagingWithSelectIntoStatements : DataLoadComponent { - internal class MigrateRawToStagingWithSelectIntoStatements : DataLoadComponent - { - public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) - { - if (Skip(job)) - return ExitCodeType.Error; + public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) + { + if (Skip(job)) + return ExitCodeType.Error; - var configuration = job.Configuration; - var namer = configuration.DatabaseNamer; + var configuration = job.Configuration; + var namer = configuration.DatabaseNamer; - var server = job.LoadMetadata.GetDistinctLiveDatabaseServer(); + var server = job.LoadMetadata.GetDistinctLiveDatabaseServer(); - //Drop any STAGING tables that already exist - foreach (var table in job.RegularTablesToLoad) - { - var stagingDbName = table.GetDatabaseRuntimeName(LoadStage.AdjustStaging, namer); - var stagingTableName = table.GetRuntimeName(LoadStage.AdjustStaging, namer); + //Drop any STAGING tables that already exist + foreach (var table in job.RegularTablesToLoad) + { + var stagingDbName = table.GetDatabaseRuntimeName(LoadStage.AdjustStaging, namer); + var stagingTableName = table.GetRuntimeName(LoadStage.AdjustStaging, namer); - var stagingDb = server.ExpectDatabase(stagingDbName); - var stagingTable = stagingDb.ExpectTable(stagingTableName); + var stagingDb = server.ExpectDatabase(stagingDbName); + var stagingTable = stagingDb.ExpectTable(stagingTableName); - if (stagingDb.Exists() && stagingTable.Exists()) - { - job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Dropping existing STAGING table remnant {stagingTable.GetFullyQualifiedName()}")); - stagingTable.Drop(); - } + if (stagingDb.Exists() && stagingTable.Exists()) + { + job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Dropping existing STAGING table remnant {stagingTable.GetFullyQualifiedName()}")); + stagingTable.Drop(); } + } - //Now create STAGING tables (empty) - var cloner = new DatabaseCloner(configuration); - job.CreateTablesInStage(cloner, LoadBubble.Staging); + //Now create STAGING tables (empty) + var cloner = new DatabaseCloner(configuration); + job.CreateTablesInStage(cloner, LoadBubble.Staging); - using var con = server.GetConnection(); - con.Open(); + using var con = server.GetConnection(); + con.Open(); - var sw = Stopwatch.StartNew(); + var sw = Stopwatch.StartNew(); - foreach (TableInfo table in job.RegularTablesToLoad.Cast()) - { - var fromDb = table.GetDatabaseRuntimeName(LoadStage.AdjustRaw, namer); - var toDb = table.GetDatabaseRuntimeName(LoadStage.AdjustStaging, namer); + foreach (TableInfo table in job.RegularTablesToLoad.Cast()) + { + var fromDb = table.GetDatabaseRuntimeName(LoadStage.AdjustRaw, namer); + var toDb = table.GetDatabaseRuntimeName(LoadStage.AdjustStaging, namer); - var fromTable = table.GetRuntimeName(LoadStage.AdjustRaw, namer); - var toTable = table.GetRuntimeName(LoadStage.AdjustStaging, namer); + var fromTable = table.GetRuntimeName(LoadStage.AdjustRaw, namer); + var toTable = table.GetRuntimeName(LoadStage.AdjustStaging, namer); - var syntaxHelper = table.GetQuerySyntaxHelper(); + var syntaxHelper = table.GetQuerySyntaxHelper(); - var fromCols = server.ExpectDatabase(fromDb).ExpectTable(fromTable).DiscoverColumns(); - var toCols = server.ExpectDatabase(toDb).ExpectTable(toTable).DiscoverColumns(); + var fromCols = server.ExpectDatabase(fromDb).ExpectTable(fromTable).DiscoverColumns(); + var toCols = server.ExpectDatabase(toDb).ExpectTable(toTable).DiscoverColumns(); - //Migrate only columns that appear in both tables - var commonColumns = fromCols.Select(f => f.GetRuntimeName()).Intersect(toCols.Select(t => t.GetRuntimeName())).ToArray(); + //Migrate only columns that appear in both tables + var commonColumns = fromCols.Select(f => f.GetRuntimeName()).Intersect(toCols.Select(t => t.GetRuntimeName())).ToArray(); - var sql = string.Format(@"INSERT INTO {1}({2}) SELECT DISTINCT {2} FROM {0}", - syntaxHelper.EnsureFullyQualified(fromDb, null, fromTable), - syntaxHelper.EnsureFullyQualified(toDb, null, toTable), - string.Join(",", commonColumns.Select(c => syntaxHelper.EnsureWrapped(c)))); + var sql = string.Format(@"INSERT INTO {1}({2}) SELECT DISTINCT {2} FROM {0}", + syntaxHelper.EnsureFullyQualified(fromDb, null, fromTable), + syntaxHelper.EnsureFullyQualified(toDb, null, toTable), + string.Join(",", commonColumns.Select(c => syntaxHelper.EnsureWrapped(c)))); - job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to send SQL:" + sql)); + job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "About to send SQL:" + sql)); - var cmd = server.GetCommand(sql, con); + var cmd = server.GetCommand(sql, con); - try - { - cmd.ExecuteNonQuery(); - } - catch (Exception ex) - { - job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to migrate rows", ex)); - throw; - } + try + { + cmd.ExecuteNonQuery(); } + catch (Exception ex) + { + job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Error, "Failed to migrate rows", ex)); + throw; + } + } - sw.Stop(); + sw.Stop(); - job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Migrated all rows using INSERT INTO in " + sw.ElapsedMilliseconds + "ms")); - return ExitCodeType.Success; - } + job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Migrated all rows using INSERT INTO in " + sw.ElapsedMilliseconds + "ms")); + return ExitCodeType.Success; } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/NLogThrowerDataLoadEventListener.cs b/src/SmiServices/Microservices/DicomRelationalMapper/NLogThrowerDataLoadEventListener.cs index ece1c073e..a14c8a571 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/NLogThrowerDataLoadEventListener.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/NLogThrowerDataLoadEventListener.cs @@ -2,38 +2,37 @@ using Rdmp.Core.ReusableLibraryCode.Progress; using System; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +internal sealed class NLogThrowerDataLoadEventListener : IDataLoadEventListener { - internal sealed class NLogThrowerDataLoadEventListener : IDataLoadEventListener + private readonly Logger _logger; + private static readonly ThrowImmediatelyDataLoadEventListener _thrower = ThrowImmediatelyDataLoadEventListener.Quiet; + + public NLogThrowerDataLoadEventListener(Logger logger) { - private readonly Logger _logger; - private static readonly ThrowImmediatelyDataLoadEventListener _thrower = ThrowImmediatelyDataLoadEventListener.Quiet; + _logger = logger; + } - public NLogThrowerDataLoadEventListener(Logger logger) - { - _logger = logger; - } + public void OnNotify(object sender, NotifyEventArgs e) + { + _logger.Log(ToLogLevel(e.ProgressEventType), e.Exception, e.Message); + _thrower.OnNotify(sender, e); + } - public void OnNotify(object sender, NotifyEventArgs e) + private static LogLevel ToLogLevel(ProgressEventType t) => + t switch { - _logger.Log(ToLogLevel(e.ProgressEventType), e.Exception, e.Message); - _thrower.OnNotify(sender, e); - } - - private static LogLevel ToLogLevel(ProgressEventType t) => - t switch - { - ProgressEventType.Trace => LogLevel.Trace, - ProgressEventType.Debug => LogLevel.Debug, - ProgressEventType.Information => LogLevel.Info, - ProgressEventType.Warning => LogLevel.Warn, - ProgressEventType.Error => LogLevel.Error, - _ => throw new ArgumentOutOfRangeException(nameof(t)) - }; + ProgressEventType.Trace => LogLevel.Trace, + ProgressEventType.Debug => LogLevel.Debug, + ProgressEventType.Information => LogLevel.Info, + ProgressEventType.Warning => LogLevel.Warn, + ProgressEventType.Error => LogLevel.Error, + _ => throw new ArgumentOutOfRangeException(nameof(t)) + }; - public void OnProgress(object sender, ProgressEventArgs e) - { - _thrower.OnProgress(sender, e); - } + public void OnProgress(object sender, ProgressEventArgs e) + { + _thrower.OnProgress(sender, e); } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/Namers/GuidDatabaseNamer.cs b/src/SmiServices/Microservices/DicomRelationalMapper/Namers/GuidDatabaseNamer.cs index 3c202074f..9cb1ec00e 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/Namers/GuidDatabaseNamer.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/Namers/GuidDatabaseNamer.cs @@ -3,68 +3,67 @@ using Rdmp.Core.Curation.Data.EntityNaming; using System; -namespace SmiServices.Microservices.DicomRelationalMapper.Namers +namespace SmiServices.Microservices.DicomRelationalMapper.Namers; + +/// +/// Handles naming RAW/STAGING databases in a data load with unique names (by prefixing a GUID to the name). +/// +/// Since the RDMP currently expects STAGING to always be there and won't automatically create it we have to also support creating +/// it externally and destroying it after successful loads. +/// +public class GuidDatabaseNamer : FixedStagingDatabaseNamer, ICreateAndDestroyStagingDuringLoads { + private readonly string _guid; + private DiscoveredDatabase? _stagingDatabase; + /// - /// Handles naming RAW/STAGING databases in a data load with unique names (by prefixing a GUID to the name). - /// - /// Since the RDMP currently expects STAGING to always be there and won't automatically create it we have to also support creating - /// it externally and destroying it after successful loads. + /// Defines how to name Staging databases by appending a Guid. You can pass a specific guid if you want or pass Guid.Empty to + /// assign a new random one /// - public class GuidDatabaseNamer : FixedStagingDatabaseNamer, ICreateAndDestroyStagingDuringLoads + /// + /// + public GuidDatabaseNamer(string databaseName, Guid explicitGuid) + : base(databaseName) { - private readonly string _guid; - private DiscoveredDatabase? _stagingDatabase; - - /// - /// Defines how to name Staging databases by appending a Guid. You can pass a specific guid if you want or pass Guid.Empty to - /// assign a new random one - /// - /// - /// - public GuidDatabaseNamer(string databaseName, Guid explicitGuid) - : base(databaseName) - { - _guid = explicitGuid == Guid.Empty ? Guid.NewGuid().ToString("N") : explicitGuid.ToString(); - } + _guid = explicitGuid == Guid.Empty ? Guid.NewGuid().ToString("N") : explicitGuid.ToString(); + } - /// - /// Prefixes STAGING and RAW databases with the guid - /// - /// - /// - /// - public override string GetDatabaseName(string? rootDatabaseName, LoadBubble stage) - { - var basic = base.GetDatabaseName(rootDatabaseName, stage); + /// + /// Prefixes STAGING and RAW databases with the guid + /// + /// + /// + /// + public override string GetDatabaseName(string? rootDatabaseName, LoadBubble stage) + { + var basic = base.GetDatabaseName(rootDatabaseName, stage); - if (stage == LoadBubble.Live || stage == LoadBubble.Archive) - return basic; + if (stage == LoadBubble.Live || stage == LoadBubble.Archive) + return basic; - return "t" + _guid.Replace("-", "") + basic; - } + return "t" + _guid.Replace("-", "") + basic; + } - public void CreateStaging(DiscoveredServer liveServer) - { - _stagingDatabase = liveServer.ExpectDatabase(GetDatabaseName(null, LoadBubble.Staging)); + public void CreateStaging(DiscoveredServer liveServer) + { + _stagingDatabase = liveServer.ExpectDatabase(GetDatabaseName(null, LoadBubble.Staging)); - if (!_stagingDatabase.Exists()) - _stagingDatabase.Create(); + if (!_stagingDatabase.Exists()) + _stagingDatabase.Create(); - //get rid of any old data from previous load - foreach (var t in _stagingDatabase.DiscoverTables(false)) - t.Truncate(); - } + //get rid of any old data from previous load + foreach (var t in _stagingDatabase.DiscoverTables(false)) + t.Truncate(); + } - public void DestroyStagingIfExists() - { - if (_stagingDatabase != null && _stagingDatabase.Exists()) - _stagingDatabase.Drop(); - } + public void DestroyStagingIfExists() + { + if (_stagingDatabase != null && _stagingDatabase.Exists()) + _stagingDatabase.Drop(); + } - public override string ToString() - { - return base.ToString() + "(GUID:" + _guid + ")"; - } + public override string ToString() + { + return base.ToString() + "(GUID:" + _guid + ")"; } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/Namers/GuidTableNamer.cs b/src/SmiServices/Microservices/DicomRelationalMapper/Namers/GuidTableNamer.cs index 44621a986..533f6b20d 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/Namers/GuidTableNamer.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/Namers/GuidTableNamer.cs @@ -2,45 +2,44 @@ using Rdmp.Core.Curation.Data.EntityNaming; using System; -namespace SmiServices.Microservices.DicomRelationalMapper.Namers +namespace SmiServices.Microservices.DicomRelationalMapper.Namers; + +/// +/// Handles naming RAW/STAGING databases in a data load with unique names. +/// +/// Database names stay the same but table names get the prefix +/// +public class GuidTableNamer : FixedStagingDatabaseNamer { + private readonly string _guid; + /// - /// Handles naming RAW/STAGING databases in a data load with unique names. - /// - /// Database names stay the same but table names get the prefix + /// Defines how to name RAW and Staging tables by appending a Guid. You can pass a specific guid if you want or pass Guid.Empty to + /// assign a new random one /// - public class GuidTableNamer : FixedStagingDatabaseNamer + /// + /// + public GuidTableNamer(string databaseName, Guid explicitGuid) : base(databaseName) { - private readonly string _guid; - - /// - /// Defines how to name RAW and Staging tables by appending a Guid. You can pass a specific guid if you want or pass Guid.Empty to - /// assign a new random one - /// - /// - /// - public GuidTableNamer(string databaseName, Guid explicitGuid) : base(databaseName) - { - _guid = explicitGuid == Guid.Empty ? Guid.NewGuid().ToString("N") : explicitGuid.ToString(); + _guid = explicitGuid == Guid.Empty ? Guid.NewGuid().ToString("N") : explicitGuid.ToString(); - //MySql can't handle long table names - _guid = _guid[..8]; - } + //MySql can't handle long table names + _guid = _guid[..8]; + } - public override string GetName(string tableName, LoadBubble convention) - { + public override string GetName(string tableName, LoadBubble convention) + { - var basic = base.GetName(tableName, convention); + var basic = base.GetName(tableName, convention); - if (convention == LoadBubble.Live || convention == LoadBubble.Archive) - return basic; + if (convention == LoadBubble.Live || convention == LoadBubble.Archive) + return basic; - return "t" + _guid.Replace("-", "") + basic; - } + return "t" + _guid.Replace("-", "") + basic; + } - public override string ToString() - { - return base.ToString() + "(GUID:" + _guid + ")"; - } + public override string ToString() + { + return base.ToString() + "(GUID:" + _guid + ")"; } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/Namers/ICanCreateStagingMyself.cs b/src/SmiServices/Microservices/DicomRelationalMapper/Namers/ICanCreateStagingMyself.cs index f253feef7..3204e9eb9 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/Namers/ICanCreateStagingMyself.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/Namers/ICanCreateStagingMyself.cs @@ -1,15 +1,14 @@ using FAnsi.Discovery; using Rdmp.Core.Curation.Data.EntityNaming; -namespace SmiServices.Microservices.DicomRelationalMapper.Namers +namespace SmiServices.Microservices.DicomRelationalMapper.Namers; + +/// +/// interface for implementations which can on demand create the STAGING database +/// (which must be on the same server as LIVE). +/// +public interface ICreateAndDestroyStagingDuringLoads : INameDatabasesAndTablesDuringLoads { - /// - /// interface for implementations which can on demand create the STAGING database - /// (which must be on the same server as LIVE). - /// - public interface ICreateAndDestroyStagingDuringLoads : INameDatabasesAndTablesDuringLoads - { - void CreateStaging(DiscoveredServer liveServer); - void DestroyStagingIfExists(); - } + void CreateStaging(DiscoveredServer liveServer); + void DestroyStagingIfExists(); } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/Namers/MyFixedStagingDatabaseNamer.cs b/src/SmiServices/Microservices/DicomRelationalMapper/Namers/MyFixedStagingDatabaseNamer.cs index 46d7eadb3..eef6c8e4f 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/Namers/MyFixedStagingDatabaseNamer.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/Namers/MyFixedStagingDatabaseNamer.cs @@ -1,13 +1,12 @@ using Rdmp.Core.Curation.Data.EntityNaming; using System; -namespace SmiServices.Microservices.DicomRelationalMapper.Namers +namespace SmiServices.Microservices.DicomRelationalMapper.Namers; + +public class MyFixedStagingDatabaseNamer : FixedStagingDatabaseNamer { - public class MyFixedStagingDatabaseNamer : FixedStagingDatabaseNamer + //all injectable constructors must match + public MyFixedStagingDatabaseNamer(string databaseName, Guid _) : base(databaseName) { - //all injectable constructors must match - public MyFixedStagingDatabaseNamer(string databaseName, Guid _) : base(databaseName) - { - } } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/ParallelDLEHost.cs b/src/SmiServices/Microservices/DicomRelationalMapper/ParallelDLEHost.cs index 3f30e0db1..3496fe59a 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/ParallelDLEHost.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/ParallelDLEHost.cs @@ -18,106 +18,105 @@ using System.Collections.Generic; using System.Text.RegularExpressions; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +/// +/// Sets up an ad-hoc data load in which the namer is changed to the specified and +/// an ExplicitListDicomProcessListProvider is injected into the loads AutoRoutingAttacher component (Payload field) +/// +public class ParallelDLEHost { - /// - /// Sets up an ad-hoc data load in which the namer is changed to the specified and - /// an ExplicitListDicomProcessListProvider is injected into the loads AutoRoutingAttacher component (Payload field) - /// - public class ParallelDLEHost + private readonly IRDMPPlatformRepositoryServiceLocator _repositoryLocator; + private readonly INameDatabasesAndTablesDuringLoads _namer; + private readonly bool _useInsertIntoForRawMigration; + private HICDatabaseConfiguration? _configuration; + + public ParallelDLEHost(IRDMPPlatformRepositoryServiceLocator repositoryLocator, INameDatabasesAndTablesDuringLoads namer, bool useInsertIntoForRawMigration) { - private readonly IRDMPPlatformRepositoryServiceLocator _repositoryLocator; - private readonly INameDatabasesAndTablesDuringLoads _namer; - private readonly bool _useInsertIntoForRawMigration; - private HICDatabaseConfiguration? _configuration; + _repositoryLocator = repositoryLocator; + _namer = namer; + _useInsertIntoForRawMigration = useInsertIntoForRawMigration; + } - public ParallelDLEHost(IRDMPPlatformRepositoryServiceLocator repositoryLocator, INameDatabasesAndTablesDuringLoads namer, bool useInsertIntoForRawMigration) - { - _repositoryLocator = repositoryLocator; - _namer = namer; - _useInsertIntoForRawMigration = useInsertIntoForRawMigration; - } + public ExitCodeType RunDLE(LoadMetadata lmd, IDicomFileWorklist payload) + { + return RunDLE(lmd, (object)payload); + } + public ExitCodeType RunDLE(LoadMetadata lmd, IDicomDatasetWorklist payload) + { + return RunDLE(lmd, (object)payload); + } - public ExitCodeType RunDLE(LoadMetadata lmd, IDicomFileWorklist payload) - { - return RunDLE(lmd, (object)payload); - } - public ExitCodeType RunDLE(LoadMetadata lmd, IDicomDatasetWorklist payload) - { - return RunDLE(lmd, (object)payload); - } + /// + /// Runs the DLE using a custom names for RAW/STAGING. Pass in the load to execute and the files/directories to process + /// in the batch. + /// + /// + /// + /// The exit code of the data load after it completes + private ExitCodeType RunDLE(LoadMetadata lmd, object payload) + { + var catalogueRepository = lmd.CatalogueRepository; - /// - /// Runs the DLE using a custom names for RAW/STAGING. Pass in the load to execute and the files/directories to process - /// in the batch. - /// - /// - /// - /// The exit code of the data load after it completes - private ExitCodeType RunDLE(LoadMetadata lmd, object payload) + //ensures that RAW/STAGING always have unique names + _configuration = new HICDatabaseConfiguration(lmd, _namer) { - var catalogueRepository = lmd.CatalogueRepository; - - //ensures that RAW/STAGING always have unique names - _configuration = new HICDatabaseConfiguration(lmd, _namer) - { - UpdateButDoNotDiff = new Regex("^MessageGuid") - }; + UpdateButDoNotDiff = new Regex("^MessageGuid") + }; - var logManager = catalogueRepository.GetDefaultLogManager(); + var logManager = catalogueRepository.GetDefaultLogManager(); - logManager.CreateNewLoggingTaskIfNotExists(lmd.GetDistinctLoggingTask()); + logManager.CreateNewLoggingTaskIfNotExists(lmd.GetDistinctLoggingTask()); - // Create the pipeline to pass into the DataLoadProcess object - var dataLoadFactory = new HICDataLoadFactory(lmd, _configuration, new HICLoadConfigurationFlags(), - catalogueRepository, logManager); + // Create the pipeline to pass into the DataLoadProcess object + var dataLoadFactory = new HICDataLoadFactory(lmd, _configuration, new HICLoadConfigurationFlags(), + catalogueRepository, logManager); - if (_namer is ICreateAndDestroyStagingDuringLoads stagingCreator) - stagingCreator.CreateStaging(lmd.GetDistinctLiveDatabaseServer()); + if (_namer is ICreateAndDestroyStagingDuringLoads stagingCreator) + stagingCreator.CreateStaging(lmd.GetDistinctLiveDatabaseServer()); - var listener = new NLogThrowerDataLoadEventListener(NLog.LogManager.GetCurrentClassLogger()); + var listener = new NLogThrowerDataLoadEventListener(NLog.LogManager.GetCurrentClassLogger()); - IDataLoadExecution execution = dataLoadFactory.Create(listener); + IDataLoadExecution execution = dataLoadFactory.Create(listener); - IExternalDatabaseServer raw = catalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer); + IExternalDatabaseServer raw = catalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer); - DiscoveredServer liveDb = lmd.GetDistinctLiveDatabaseServer(); + DiscoveredServer liveDb = lmd.GetDistinctLiveDatabaseServer(); - //do we want to try to cut down the time it takes to do RAW=>STAGING by using INSERT INTO instead of running anonymisation/migration pipeline - if (_useInsertIntoForRawMigration) - //if it is on the same server swap out the migration engine for INSERT INTO - if (raw == null || raw.Server != null && raw.Server.Equals(liveDb.Name) && raw.DatabaseType == liveDb.DatabaseType) - { - listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "SWAPPING RAW=>STAGING migration strategy to INSERT INTO")); - SwapMigrateRAWToStagingComponent(execution.Components); - } - else - { - //Cannot use because different servers / DatabaseTypes. - listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "CANNOT SWAP RAW=>STAGING migration strategy to INSERT INTO because RAW is on '" + raw.Server + "' (" + raw.DatabaseType + ") and STAGING is on '" + liveDb.Name + "' (" + liveDb.DatabaseType + ")")); - } + //do we want to try to cut down the time it takes to do RAW=>STAGING by using INSERT INTO instead of running anonymisation/migration pipeline + if (_useInsertIntoForRawMigration) + //if it is on the same server swap out the migration engine for INSERT INTO + if (raw == null || raw.Server != null && raw.Server.Equals(liveDb.Name) && raw.DatabaseType == liveDb.DatabaseType) + { + listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "SWAPPING RAW=>STAGING migration strategy to INSERT INTO")); + SwapMigrateRAWToStagingComponent(execution.Components); + } else - listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Flag is false for SWAP RAW=>STAGING migration strategy to INSERT INTO So won't do it")); + { + //Cannot use because different servers / DatabaseTypes. + listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Warning, "CANNOT SWAP RAW=>STAGING migration strategy to INSERT INTO because RAW is on '" + raw.Server + "' (" + raw.DatabaseType + ") and STAGING is on '" + liveDb.Name + "' (" + liveDb.DatabaseType + ")")); + } + else + listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Flag is false for SWAP RAW=>STAGING migration strategy to INSERT INTO So won't do it")); - var procedure = new DataLoadProcess(_repositoryLocator, lmd, null, logManager, listener, execution, _configuration); + var procedure = new DataLoadProcess(_repositoryLocator, lmd, null, logManager, listener, execution, _configuration); - ExitCodeType exitCode = procedure.Run(new GracefulCancellationToken(), payload); + ExitCodeType exitCode = procedure.Run(new GracefulCancellationToken(), payload); - return exitCode; - } + return exitCode; + } - private static void SwapMigrateRAWToStagingComponent(IList components) + private static void SwapMigrateRAWToStagingComponent(IList components) + { + for (var i = 0; i < components.Count; i++) { - for (var i = 0; i < components.Count; i++) - { - if (components[i] is CompositeDataLoadComponent composite) - SwapMigrateRAWToStagingComponent(composite.Components); + if (components[i] is CompositeDataLoadComponent composite) + SwapMigrateRAWToStagingComponent(composite.Components); - if (components[i] is MigrateRAWToStaging) - components[i] = new MigrateRawToStagingWithSelectIntoStatements(); - } + if (components[i] is MigrateRAWToStaging) + components[i] = new MigrateRawToStagingWithSelectIntoStatements(); + } - } } } diff --git a/src/SmiServices/Microservices/DicomRelationalMapper/QueuedImage.cs b/src/SmiServices/Microservices/DicomRelationalMapper/QueuedImage.cs index 7e3a33dfb..4e969f3df 100644 --- a/src/SmiServices/Microservices/DicomRelationalMapper/QueuedImage.cs +++ b/src/SmiServices/Microservices/DicomRelationalMapper/QueuedImage.cs @@ -2,27 +2,26 @@ using FellowOakDicom; using SmiServices.Common.Messages; -namespace SmiServices.Microservices.DicomRelationalMapper +namespace SmiServices.Microservices.DicomRelationalMapper; + +/// +/// +/// +public class QueuedImage { - /// - /// - /// - public class QueuedImage - { - public IMessageHeader Header { get; init; } + public IMessageHeader Header { get; init; } - public ulong Tag { get; init; } + public ulong Tag { get; init; } - public DicomFileMessage DicomFileMessage { get; init; } + public DicomFileMessage DicomFileMessage { get; init; } - public DicomDataset DicomDataset { get; init; } + public DicomDataset DicomDataset { get; init; } - public QueuedImage(IMessageHeader header, ulong tag, DicomFileMessage dicomFileMessage, DicomDataset dataset) - { - Header = header; - Tag = tag; - DicomFileMessage = dicomFileMessage; - DicomDataset = dataset; - } + public QueuedImage(IMessageHeader header, ulong tag, DicomFileMessage dicomFileMessage, DicomDataset dataset) + { + Header = header; + Tag = tag; + DicomFileMessage = dicomFileMessage; + DicomDataset = dataset; } } diff --git a/src/SmiServices/Microservices/DicomReprocessor/DicomFileProcessor.cs b/src/SmiServices/Microservices/DicomReprocessor/DicomFileProcessor.cs index 552252cae..b4843cae2 100644 --- a/src/SmiServices/Microservices/DicomReprocessor/DicomFileProcessor.cs +++ b/src/SmiServices/Microservices/DicomReprocessor/DicomFileProcessor.cs @@ -11,123 +11,122 @@ using System.Collections.Generic; using System.Threading; -namespace SmiServices.Microservices.DicomReprocessor +namespace SmiServices.Microservices.DicomReprocessor; + +/// +/// Processes whole BsonDocuments (whole dicom files) into DicomFileMessage(s) and publishes them to config-defined image exchange +/// +public class DicomFileProcessor : IDocumentProcessor { - /// - /// Processes whole BsonDocuments (whole dicom files) into DicomFileMessage(s) and publishes them to config-defined image exchange - /// - public class DicomFileProcessor : IDocumentProcessor - { - public long TotalProcessed { get; private set; } + public long TotalProcessed { get; private set; } - public long TotalFailed + public long TotalFailed + { + get { - get - { - // Simple way of getting value of _totalFailed in an atomic op. - return Interlocked.CompareExchange(ref _totalFailed, 0, 0); - } + // Simple way of getting value of _totalFailed in an atomic op. + return Interlocked.CompareExchange(ref _totalFailed, 0, 0); } + } + + + /// + /// Backing field for TotalFailed. Only incremented in an atomic context + /// + private long _totalFailed; + + private readonly ILogger _logger; + + private readonly IProducerModel _producerModel; + private readonly string _reprocessingRoutingKey; + private readonly List> _messageBuffer = []; + private readonly object _oBufferLock = new(); - /// - /// Backing field for TotalFailed. Only incremented in an atomic context - /// - private long _totalFailed; - private readonly ILogger _logger; + public DicomFileProcessor(IProducerModel producerModel, string reprocessingRoutingKey) + { + _logger = LogManager.GetLogger(GetType().Name); - private readonly IProducerModel _producerModel; - private readonly string _reprocessingRoutingKey; + _producerModel = producerModel; + _reprocessingRoutingKey = reprocessingRoutingKey; + } - private readonly List> _messageBuffer = []; - private readonly object _oBufferLock = new(); + public void ProcessDocument(BsonDocument document) + { + string documentId = document["_id"].ToString()!; - public DicomFileProcessor(IProducerModel producerModel, string reprocessingRoutingKey) - { - _logger = LogManager.GetLogger(GetType().Name); + var headerDoc = document["header"] as BsonDocument; - _producerModel = producerModel; - _reprocessingRoutingKey = reprocessingRoutingKey; + if (headerDoc == null) + { + LogUnprocessedDocument(documentId, new ApplicationException("Document did not contain a header field")); + return; } + var message = new DicomFileMessage + { + DicomFilePath = (string)headerDoc["DicomFilePath"], + DicomFileSize = headerDoc.Contains("DicomFileSize") ? (long)headerDoc["DicomFileSize"] : -1 + }; - public void ProcessDocument(BsonDocument document) + try + { + // Rebuild the dataset from the document, then serialize it to JSON to send + DicomDataset ds = DicomTypeTranslaterWriter.BuildDicomDataset(document); + message.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); + + // Add the header information + message.StudyInstanceUID = ds.GetValue(DicomTag.StudyInstanceUID, 0); + message.SeriesInstanceUID = ds.GetValue(DicomTag.SeriesInstanceUID, 0); + message.SOPInstanceUID = ds.GetValue(DicomTag.SOPInstanceUID, 0); + } + catch (Exception e) { - string documentId = document["_id"].ToString()!; - - var headerDoc = document["header"] as BsonDocument; - - if (headerDoc == null) - { - LogUnprocessedDocument(documentId, new ApplicationException("Document did not contain a header field")); - return; - } - - var message = new DicomFileMessage - { - DicomFilePath = (string)headerDoc["DicomFilePath"], - DicomFileSize = headerDoc.Contains("DicomFileSize") ? (long)headerDoc["DicomFileSize"] : -1 - }; - - try - { - // Rebuild the dataset from the document, then serialize it to JSON to send - DicomDataset ds = DicomTypeTranslaterWriter.BuildDicomDataset(document); - message.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); - - // Add the header information - message.StudyInstanceUID = ds.GetValue(DicomTag.StudyInstanceUID, 0); - message.SeriesInstanceUID = ds.GetValue(DicomTag.SeriesInstanceUID, 0); - message.SOPInstanceUID = ds.GetValue(DicomTag.SOPInstanceUID, 0); - } - catch (Exception e) - { - LogUnprocessedDocument(documentId, e); - return; - } - - if (!message.VerifyPopulated()) - { - LogUnprocessedDocument(documentId, new ApplicationException("Message was not valid")); - return; - } - - IMessageHeader header = MongoDocumentHeaders.RebuildMessageHeader(headerDoc["MessageHeader"].AsBsonDocument); - - lock (_oBufferLock) - _messageBuffer.Add(new Tuple(message, header)); + LogUnprocessedDocument(documentId, e); + return; } - public void SendMessages() + if (!message.VerifyPopulated()) { - _logger.Debug("Sending messages in buffer"); + LogUnprocessedDocument(documentId, new ApplicationException("Message was not valid")); + return; + } - lock (_oBufferLock) - { - var newBatchHeaders = new List(); - foreach ((DicomFileMessage message, IMessageHeader header) in _messageBuffer) - newBatchHeaders.Add(_producerModel.SendMessage(message, header, _reprocessingRoutingKey)); + IMessageHeader header = MongoDocumentHeaders.RebuildMessageHeader(headerDoc["MessageHeader"].AsBsonDocument); - // Confirm all messages in the batch - _producerModel.WaitForConfirms(); + lock (_oBufferLock) + _messageBuffer.Add(new Tuple(message, header)); + } - TotalProcessed += _messageBuffer.Count; + public void SendMessages() + { + _logger.Debug("Sending messages in buffer"); - foreach (IMessageHeader newHeader in newBatchHeaders) - newHeader.Log(_logger, LogLevel.Trace, "Sent"); + lock (_oBufferLock) + { + var newBatchHeaders = new List(); + foreach ((DicomFileMessage message, IMessageHeader header) in _messageBuffer) + newBatchHeaders.Add(_producerModel.SendMessage(message, header, _reprocessingRoutingKey)); - _messageBuffer.Clear(); - } - } + // Confirm all messages in the batch + _producerModel.WaitForConfirms(); - public void LogProgress() => _logger.Info($"Total messages sent: {TotalProcessed}. Total failed to reprocess: {TotalFailed}"); + TotalProcessed += _messageBuffer.Count; - private void LogUnprocessedDocument(string documentId, Exception e) - { - _logger.Error(e, "Error when processing document with _id " + documentId); - Interlocked.Increment(ref _totalFailed); + foreach (IMessageHeader newHeader in newBatchHeaders) + newHeader.Log(_logger, LogLevel.Trace, "Sent"); + + _messageBuffer.Clear(); } } + + public void LogProgress() => _logger.Info($"Total messages sent: {TotalProcessed}. Total failed to reprocess: {TotalFailed}"); + + private void LogUnprocessedDocument(string documentId, Exception e) + { + _logger.Error(e, "Error when processing document with _id " + documentId); + Interlocked.Increment(ref _totalFailed); + } } diff --git a/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessor.cs b/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessor.cs index 11f15015c..87ea79d58 100644 --- a/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessor.cs +++ b/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessor.cs @@ -3,22 +3,21 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.DicomReprocessor +namespace SmiServices.Microservices.DicomReprocessor; + +public static class DicomReprocessor { - public static class DicomReprocessor + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(DicomReprocessor), OnParse); - return ret; - } + int ret = SmiCliInit.ParseAndRun(args, nameof(DicomReprocessor), OnParse); + return ret; + } - private static int OnParse(GlobalOptions globals, DicomReprocessorCliOptions opts) - { - var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomReprocessorHost(globals, opts)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, DicomReprocessorCliOptions opts) + { + var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomReprocessorHost(globals, opts)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorCliOptions.cs b/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorCliOptions.cs index 662fb039a..c45cf1e8c 100644 --- a/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorCliOptions.cs +++ b/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorCliOptions.cs @@ -6,105 +6,104 @@ using System.Collections.Generic; using System.Text; -namespace SmiServices.Microservices.DicomReprocessor +namespace SmiServices.Microservices.DicomReprocessor; + +public class DicomReprocessorCliOptions : CliOptions { - public class DicomReprocessorCliOptions : CliOptions + private string _sourceCollection = null!; + [Option( + 'c', + "collection-name", + Required = true, + HelpText = "The collection to reprocess documents from. This is the collection (table) only not the database which is determined by \"MongoDatabases.DicomStoreOptions\" in the YAML config" + )] + public string SourceCollection { - private string _sourceCollection = null!; - [Option( - 'c', - "collection-name", - Required = true, - HelpText = "The collection to reprocess documents from. This is the collection (table) only not the database which is determined by \"MongoDatabases.DicomStoreOptions\" in the YAML config" - )] - public string SourceCollection + get => _sourceCollection; + set { - get => _sourceCollection; - set - { - if (value.Contains('.')) - throw new ArgumentException(null, nameof(value)); - _sourceCollection = value; - } + if (value.Contains('.')) + throw new ArgumentException(null, nameof(value)); + _sourceCollection = value; } + } - [Option( - 'q', - "query-file", - Required = false, - HelpText = "[Optional] The file to build the reprocessing query from (if you only want a subset of the collection)" - )] - public string? QueryFile { get; set; } + [Option( + 'q', + "query-file", + Required = false, + HelpText = "[Optional] The file to build the reprocessing query from (if you only want a subset of the collection)" + )] + public string? QueryFile { get; set; } - [Option( - "batch-size", - Default = 0, - Required = false, - HelpText = "[Optional] The batch size to set for queries executed on MongoDB. If not set, MongoDB will adjust the batch size automatically" - )] - public int MongoDbBatchSize { get; set; } + [Option( + "batch-size", + Default = 0, + Required = false, + HelpText = "[Optional] The batch size to set for queries executed on MongoDB. If not set, MongoDB will adjust the batch size automatically" + )] + public int MongoDbBatchSize { get; set; } - [Option( - "sleep-time-ms", - Default = 0, - Required = false, - HelpText = "[Optional] Sleep this number of ms between batches" - )] - public int SleepTimeMs { get; set; } + [Option( + "sleep-time-ms", + Default = 0, + Required = false, + HelpText = "[Optional] Sleep this number of ms between batches" + )] + public int SleepTimeMs { get; set; } - /// - /// Routing key to republish messages with. Must not be null, otherwise the messages will end up back in MongoDB. - /// Must match the routing key of the binding to the queue you wish the messages to end up in. - /// - [Option( - "reprocessing-key", - Default = "reprocessed", - Required = false, - HelpText = "[Optional] Routing key for output messages sent to the RabbitMQ exchange, which may depend on your RabbitMQ configuration. The exchange must have a valid route mapped for this routing key" - )] - public string? ReprocessingRoutingKey { get; set; } + /// + /// Routing key to republish messages with. Must not be null, otherwise the messages will end up back in MongoDB. + /// Must match the routing key of the binding to the queue you wish the messages to end up in. + /// + [Option( + "reprocessing-key", + Default = "reprocessed", + Required = false, + HelpText = "[Optional] Routing key for output messages sent to the RabbitMQ exchange, which may depend on your RabbitMQ configuration. The exchange must have a valid route mapped for this routing key" + )] + public string? ReprocessingRoutingKey { get; set; } - [Option( - "auto-run", - Default = false, - Required = false, - HelpText = "[Optional] False (default) waits for user confirmation that the query is correct before continuing" - )] - public bool AutoRun { get; set; } + [Option( + "auto-run", + Default = false, + Required = false, + HelpText = "[Optional] False (default) waits for user confirmation that the query is correct before continuing" + )] + public bool AutoRun { get; set; } - [Usage] - public static IEnumerable Examples + [Usage] + public static IEnumerable Examples + { + get { - get - { - yield return - new Example("Normal Scenario", new DicomReprocessorCliOptions { SourceCollection = "image_CT" }); - yield return - new Example("Unattended with non-default parameters", new DicomReprocessorCliOptions - { - SourceCollection = "image_CT", - QueryFile = "test", - MongoDbBatchSize = 123, - SleepTimeMs = 1000, - ReprocessingRoutingKey = "test", - AutoRun = true - }); - } + yield return + new Example("Normal Scenario", new DicomReprocessorCliOptions { SourceCollection = "image_CT" }); + yield return + new Example("Unattended with non-default parameters", new DicomReprocessorCliOptions + { + SourceCollection = "image_CT", + QueryFile = "test", + MongoDbBatchSize = 123, + SleepTimeMs = 1000, + ReprocessingRoutingKey = "test", + AutoRun = true + }); } + } - public override string ToString() - { - var sb = new StringBuilder(); + public override string ToString() + { + var sb = new StringBuilder(); - sb.Append("SourceCollection: " + SourceCollection); - sb.Append(", QueryFile: " + QueryFile); - sb.Append(", MongoDbBatchSize: " + MongoDbBatchSize); - sb.Append(", ReprocessingRoutingKey: " + ReprocessingRoutingKey); - sb.Append(", AutoRun: " + AutoRun); + sb.Append("SourceCollection: " + SourceCollection); + sb.Append(", QueryFile: " + QueryFile); + sb.Append(", MongoDbBatchSize: " + MongoDbBatchSize); + sb.Append(", ReprocessingRoutingKey: " + ReprocessingRoutingKey); + sb.Append(", AutoRun: " + AutoRun); - return base.ToString() + ", " + sb; - } + return base.ToString() + ", " + sb; } } diff --git a/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorControlMessageHandler.cs b/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorControlMessageHandler.cs index 8e742eaea..36f95b075 100644 --- a/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorControlMessageHandler.cs +++ b/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorControlMessageHandler.cs @@ -5,43 +5,42 @@ using System; -namespace SmiServices.Microservices.DicomReprocessor +namespace SmiServices.Microservices.DicomReprocessor; + +public class DicomReprocessorControlMessageHandler : IControlMessageHandler { - public class DicomReprocessorControlMessageHandler : IControlMessageHandler - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private readonly DicomReprocessorOptions _options; + private readonly DicomReprocessorOptions _options; - private const string Key = "set-sleep-time-ms"; + private const string Key = "set-sleep-time-ms"; - public DicomReprocessorControlMessageHandler(DicomReprocessorOptions options) - { - _options = options; - } + public DicomReprocessorControlMessageHandler(DicomReprocessorOptions options) + { + _options = options; + } - public void ControlMessageHandler(string action, string? message = null) - { - _logger.Info($"Received control event with action \"{action}\" and message \"{message}\""); + public void ControlMessageHandler(string action, string? message = null) + { + _logger.Info($"Received control event with action \"{action}\" and message \"{message}\""); - if (!action.StartsWith(Key)) - { - _logger.Info("Ignoring unknown action"); - return; - } + if (!action.StartsWith(Key)) + { + _logger.Info("Ignoring unknown action"); + return; + } - if (!int.TryParse(message, out int intTimeMs)) - { - _logger.Error($"Couldn't parse a valid int from \"{message}\""); - return; - } + if (!int.TryParse(message, out int intTimeMs)) + { + _logger.Error($"Couldn't parse a valid int from \"{message}\""); + return; + } - TimeSpan newTime = TimeSpan.FromMilliseconds(intTimeMs); + TimeSpan newTime = TimeSpan.FromMilliseconds(intTimeMs); - _logger.Info($"Setting batch sleep time to {newTime.TotalMilliseconds}ms"); - _options.SleepTime = newTime; - } + _logger.Info($"Setting batch sleep time to {newTime.TotalMilliseconds}ms"); + _options.SleepTime = newTime; } } diff --git a/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorHost.cs b/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorHost.cs index 657734438..93ed0b359 100644 --- a/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorHost.cs +++ b/src/SmiServices/Microservices/DicomReprocessor/DicomReprocessorHost.cs @@ -5,82 +5,81 @@ using System.IO; using System.Threading.Tasks; -namespace SmiServices.Microservices.DicomReprocessor +namespace SmiServices.Microservices.DicomReprocessor; + +public class DicomReprocessorHost : MicroserviceHost { - public class DicomReprocessorHost : MicroserviceHost + private readonly MongoDbReader _mongoReader; + private readonly IDocumentProcessor _processor; + private Task? _processorTask; + + private readonly string? _queryString; + + public DicomReprocessorHost(GlobalOptions options, DicomReprocessorCliOptions cliOptions) + : base(options) { - private readonly MongoDbReader _mongoReader; - private readonly IDocumentProcessor _processor; - private Task? _processorTask; + string? key = cliOptions.ReprocessingRoutingKey; - private readonly string? _queryString; + if (string.IsNullOrWhiteSpace(key)) + throw new ArgumentException("ReprocessingRoutingKey"); - public DicomReprocessorHost(GlobalOptions options, DicomReprocessorCliOptions cliOptions) - : base(options) - { - string? key = cliOptions.ReprocessingRoutingKey; + // Set the initial sleep time + Globals.DicomReprocessorOptions!.SleepTime = TimeSpan.FromMilliseconds(cliOptions.SleepTimeMs); - if (string.IsNullOrWhiteSpace(key)) - throw new ArgumentException("ReprocessingRoutingKey"); + IProducerModel reprocessingProducerModel = MessageBroker.SetupProducer(options.DicomReprocessorOptions!.ReprocessingProducerOptions!, true); - // Set the initial sleep time - Globals.DicomReprocessorOptions!.SleepTime = TimeSpan.FromMilliseconds(cliOptions.SleepTimeMs); + Logger.Info("Documents will be reprocessed to " + + options.DicomReprocessorOptions.ReprocessingProducerOptions!.ExchangeName + " on vhost " + + options.RabbitOptions!.RabbitMqVirtualHost + " with routing key \"" + key + "\""); - IProducerModel reprocessingProducerModel = MessageBroker.SetupProducer(options.DicomReprocessorOptions!.ReprocessingProducerOptions!, true); + if (!string.IsNullOrWhiteSpace(cliOptions.QueryFile)) + _queryString = File.ReadAllText(cliOptions.QueryFile); - Logger.Info("Documents will be reprocessed to " + - options.DicomReprocessorOptions.ReprocessingProducerOptions!.ExchangeName + " on vhost " + - options.RabbitOptions!.RabbitMqVirtualHost + " with routing key \"" + key + "\""); + //TODO Make this into a CreateInstance<> call + _processor = options.DicomReprocessorOptions.ProcessingMode switch + { + ProcessingMode.TagPromotion => new TagPromotionProcessor(reprocessingProducerModel), + ProcessingMode.ImageReprocessing => new DicomFileProcessor(reprocessingProducerModel, key), + _ => throw new ArgumentException("ProcessingMode " + options.DicomReprocessorOptions.ProcessingMode + " not supported"), + }; + _mongoReader = new MongoDbReader(options.MongoDatabases!.DicomStoreOptions!, cliOptions, HostProcessName + "-" + HostProcessID); - if (!string.IsNullOrWhiteSpace(cliOptions.QueryFile)) - _queryString = File.ReadAllText(cliOptions.QueryFile); + AddControlHandler(new DicomReprocessorControlMessageHandler(Globals.DicomReprocessorOptions)); + } - //TODO Make this into a CreateInstance<> call - _processor = options.DicomReprocessorOptions.ProcessingMode switch - { - ProcessingMode.TagPromotion => new TagPromotionProcessor(reprocessingProducerModel), - ProcessingMode.ImageReprocessing => new DicomFileProcessor(reprocessingProducerModel, key), - _ => throw new ArgumentException("ProcessingMode " + options.DicomReprocessorOptions.ProcessingMode + " not supported"), - }; - _mongoReader = new MongoDbReader(options.MongoDatabases!.DicomStoreOptions!, cliOptions, HostProcessName + "-" + HostProcessID); + public override void Start() + { + _processorTask = _mongoReader.RunQuery(_queryString, _processor, Globals.DicomReprocessorOptions!); + TimeSpan queryTime = _processorTask.Result; - AddControlHandler(new DicomReprocessorControlMessageHandler(Globals.DicomReprocessorOptions)); - } + if (_processor.TotalProcessed == 0) + Logger.Warn("Nothing reprocessed"); + else + _processor.LogProgress(); - public override void Start() - { - _processorTask = _mongoReader.RunQuery(_queryString, _processor, Globals.DicomReprocessorOptions!); - TimeSpan queryTime = _processorTask.Result; + if (queryTime != default) + Logger.Info("Average documents processed per second: " + Convert.ToInt32(_processor.TotalProcessed / queryTime.TotalSeconds)); - if (_processor.TotalProcessed == 0) - Logger.Warn("Nothing reprocessed"); - else - _processor.LogProgress(); + // Only call stop if we exited normally + if (_mongoReader.WasCancelled) + return; - if (queryTime != default) - Logger.Info("Average documents processed per second: " + Convert.ToInt32(_processor.TotalProcessed / queryTime.TotalSeconds)); + Stop("Reprocessing completed"); + } - // Only call stop if we exited normally - if (_mongoReader.WasCancelled) - return; + public override void Stop(string reason) + { + _mongoReader.Stop(); - Stop("Reprocessing completed"); + try + { + _processorTask!.Wait(); } - - public override void Stop(string reason) + catch (AggregateException e) { - _mongoReader.Stop(); - - try - { - _processorTask!.Wait(); - } - catch (AggregateException e) - { - Logger.Error(e, "Exceptions thrown by ProcessorTask during Stop (Stop Reason Was {0})", reason); - } - - base.Stop(reason); + Logger.Error(e, "Exceptions thrown by ProcessorTask during Stop (Stop Reason Was {0})", reason); } + + base.Stop(reason); } } diff --git a/src/SmiServices/Microservices/DicomReprocessor/IDocumentProcessor.cs b/src/SmiServices/Microservices/DicomReprocessor/IDocumentProcessor.cs index f6d9dab7b..e0a9b6891 100644 --- a/src/SmiServices/Microservices/DicomReprocessor/IDocumentProcessor.cs +++ b/src/SmiServices/Microservices/DicomReprocessor/IDocumentProcessor.cs @@ -1,34 +1,33 @@ using MongoDB.Bson; -namespace SmiServices.Microservices.DicomReprocessor +namespace SmiServices.Microservices.DicomReprocessor; + +/// +/// Interface for classes which process documents from MongoDb into a specific message type +/// +public interface IDocumentProcessor { /// - /// Interface for classes which process documents from MongoDb into a specific message type + /// Total number of documents successfully processed /// - public interface IDocumentProcessor - { - /// - /// Total number of documents successfully processed - /// - long TotalProcessed { get; } + long TotalProcessed { get; } - /// - /// Total number of documents not successfully processed - /// - long TotalFailed { get; } + /// + /// Total number of documents not successfully processed + /// + long TotalFailed { get; } - /// - /// Method called by the MongoDbReader for every document. Will be run in parallel, so be careful with updating state variables. - /// - /// - void ProcessDocument(BsonDocument document); + /// + /// Method called by the MongoDbReader for every document. Will be run in parallel, so be careful with updating state variables. + /// + /// + void ProcessDocument(BsonDocument document); - /// - /// - /// - void SendMessages(); + /// + /// + /// + void SendMessages(); - void LogProgress(); - } + void LogProgress(); } diff --git a/src/SmiServices/Microservices/DicomReprocessor/MongoDbReader.cs b/src/SmiServices/Microservices/DicomReprocessor/MongoDbReader.cs index 78bcb196e..b73af23a8 100644 --- a/src/SmiServices/Microservices/DicomReprocessor/MongoDbReader.cs +++ b/src/SmiServices/Microservices/DicomReprocessor/MongoDbReader.cs @@ -10,132 +10,131 @@ using System.Threading.Tasks; -namespace SmiServices.Microservices.DicomReprocessor +namespace SmiServices.Microservices.DicomReprocessor; + +public class MongoDbReader { - public class MongoDbReader + public bool WasCancelled { - public bool WasCancelled - { - get { return _tokenSource.IsCancellationRequested; } - } + get { return _tokenSource.IsCancellationRequested; } + } - private readonly ILogger _logger; + private readonly ILogger _logger; - private readonly string _collNamespace; - private readonly IMongoCollection _collection; + private readonly string _collNamespace; + private readonly IMongoCollection _collection; - private readonly FindOptions _findOptionsBase = new() - { - NoCursorTimeout = true - }; + private readonly FindOptions _findOptionsBase = new() + { + NoCursorTimeout = true + }; - private readonly ParallelOptions _parallelOptions = new() - { - MaxDegreeOfParallelism = Environment.ProcessorCount > 1 ? Environment.ProcessorCount / 2 : 1 - }; + private readonly ParallelOptions _parallelOptions = new() + { + MaxDegreeOfParallelism = Environment.ProcessorCount > 1 ? Environment.ProcessorCount / 2 : 1 + }; - private readonly CancellationTokenSource _tokenSource = new(); - private bool _stopping; + private readonly CancellationTokenSource _tokenSource = new(); + private bool _stopping; - private readonly bool _autoRun; + private readonly bool _autoRun; - public MongoDbReader(MongoDbOptions mongoOptions, DicomReprocessorCliOptions reprocessorOptions, string appId) - { - _logger = LogManager.GetLogger(GetType().Name); + public MongoDbReader(MongoDbOptions mongoOptions, DicomReprocessorCliOptions reprocessorOptions, string appId) + { + _logger = LogManager.GetLogger(GetType().Name); - MongoClient mongoClient = MongoClientHelpers.GetMongoClient(mongoOptions, appId); + MongoClient mongoClient = MongoClientHelpers.GetMongoClient(mongoOptions, appId); - if (string.IsNullOrWhiteSpace(reprocessorOptions.SourceCollection)) - throw new ArgumentException("SourceCollection"); + if (string.IsNullOrWhiteSpace(reprocessorOptions.SourceCollection)) + throw new ArgumentException("SourceCollection"); - _collection = mongoClient.GetDatabase(mongoOptions.DatabaseName).GetCollection(reprocessorOptions.SourceCollection); - _collNamespace = mongoOptions.DatabaseName + "." + reprocessorOptions.SourceCollection; + _collection = mongoClient.GetDatabase(mongoOptions.DatabaseName).GetCollection(reprocessorOptions.SourceCollection); + _collNamespace = mongoOptions.DatabaseName + "." + reprocessorOptions.SourceCollection; - // if specified, batch size must be gt 1: - // https://docs.mongodb.com/manual/reference/method/cursor.batchSize/ - if (reprocessorOptions.MongoDbBatchSize > 1) - _findOptionsBase.BatchSize = reprocessorOptions.MongoDbBatchSize; + // if specified, batch size must be gt 1: + // https://docs.mongodb.com/manual/reference/method/cursor.batchSize/ + if (reprocessorOptions.MongoDbBatchSize > 1) + _findOptionsBase.BatchSize = reprocessorOptions.MongoDbBatchSize; - _autoRun = reprocessorOptions.AutoRun; - } + _autoRun = reprocessorOptions.AutoRun; + } - public async Task RunQuery(string? query, IDocumentProcessor processor, DicomReprocessorOptions options) - { - DateTime start; + public async Task RunQuery(string? query, IDocumentProcessor processor, DicomReprocessorOptions options) + { + DateTime start; + + _logger.Info($"Performing query on {_collNamespace}"); - _logger.Info($"Performing query on {_collNamespace}"); + using (IAsyncCursor cursor = await MongoQueryParser.GetCursor(_collection, _findOptionsBase, query)) + { + _logger.Info($"Using MaxDegreeOfParallelism: {_parallelOptions.MaxDegreeOfParallelism}"); + _logger.Info($"Batch size is: {(_findOptionsBase.BatchSize.HasValue ? _findOptionsBase.BatchSize.ToString() : "unspecified")}"); + _logger.Info($"Sleeping for {options.SleepTime.TotalMilliseconds}ms between batches"); - using (IAsyncCursor cursor = await MongoQueryParser.GetCursor(_collection, _findOptionsBase, query)) + if (!_autoRun) { - _logger.Info($"Using MaxDegreeOfParallelism: {_parallelOptions.MaxDegreeOfParallelism}"); - _logger.Info($"Batch size is: {(_findOptionsBase.BatchSize.HasValue ? _findOptionsBase.BatchSize.ToString() : "unspecified")}"); - _logger.Info($"Sleeping for {options.SleepTime.TotalMilliseconds}ms between batches"); + LogManager.Flush(); + + Console.Write($"Confirm you want to reprocess documents using the above query in {_collNamespace} [y/N]: "); - if (!_autoRun) + // Anything other than y/Y cancels the operation + string? key = Console.ReadLine(); + if (key == null || !key.Equals("y", StringComparison.CurrentCultureIgnoreCase)) { - LogManager.Flush(); + _logger.Warn("User cancelled reprocessing by not answering 'y', exiting"); + return default; + } - Console.Write($"Confirm you want to reprocess documents using the above query in {_collNamespace} [y/N]: "); + _logger.Info("User chose to continue with query execution"); + } - // Anything other than y/Y cancels the operation - string? key = Console.ReadLine(); - if (key == null || !key.Equals("y", StringComparison.CurrentCultureIgnoreCase)) - { - _logger.Warn("User cancelled reprocessing by not answering 'y', exiting"); - return default; - } + _logger.Info("Starting reprocess operation"); + start = DateTime.Now; + var totalBatches = 0; - _logger.Info("User chose to continue with query execution"); - } + //Note: Can only check for the cancellation request every time we start to process a new batch + while (await cursor.MoveNextAsync() && !_tokenSource.IsCancellationRequested) + { + _logger.Debug("Received new batch"); - _logger.Info("Starting reprocess operation"); - start = DateTime.Now; - var totalBatches = 0; + IEnumerable batch = cursor.Current; + var batchCount = 0; - //Note: Can only check for the cancellation request every time we start to process a new batch - while (await cursor.MoveNextAsync() && !_tokenSource.IsCancellationRequested) + Parallel.ForEach(batch, _parallelOptions, document => { - _logger.Debug("Received new batch"); - - IEnumerable batch = cursor.Current; - var batchCount = 0; - - Parallel.ForEach(batch, _parallelOptions, document => - { - processor.ProcessDocument(document); + processor.ProcessDocument(document); - Interlocked.Increment(ref batchCount); - }); + Interlocked.Increment(ref batchCount); + }); - _logger.Debug("Batch converted to messages, count was: " + batchCount); + _logger.Debug("Batch converted to messages, count was: " + batchCount); - processor.SendMessages(); + processor.SendMessages(); - if (++totalBatches % 100 == 0) - processor.LogProgress(); + if (++totalBatches % 100 == 0) + processor.LogProgress(); - _logger.Debug($"Batch processed, sleeping for {options.SleepTime.TotalMilliseconds}ms"); - Thread.Sleep(options.SleepTime); - } + _logger.Debug($"Batch processed, sleeping for {options.SleepTime.TotalMilliseconds}ms"); + Thread.Sleep(options.SleepTime); } + } - TimeSpan queryTime = DateTime.Now - start; - _logger.Info("Reprocessing finished or cancelled, time elapsed: " + queryTime.ToString("g")); + TimeSpan queryTime = DateTime.Now - start; + _logger.Info("Reprocessing finished or cancelled, time elapsed: " + queryTime.ToString("g")); - return queryTime; - } + return queryTime; + } - public void Stop() - { - if (_stopping) - return; + public void Stop() + { + if (_stopping) + return; - _stopping = true; + _stopping = true; - _logger.Info("Cancelling the running query"); - _tokenSource.Cancel(); - } + _logger.Info("Cancelling the running query"); + _tokenSource.Cancel(); } } diff --git a/src/SmiServices/Microservices/DicomReprocessor/TagPromotionProcessor.cs b/src/SmiServices/Microservices/DicomReprocessor/TagPromotionProcessor.cs index af4f064dd..6b8cf547b 100644 --- a/src/SmiServices/Microservices/DicomReprocessor/TagPromotionProcessor.cs +++ b/src/SmiServices/Microservices/DicomReprocessor/TagPromotionProcessor.cs @@ -4,36 +4,35 @@ using SmiServices.Common.Options; using System; -namespace SmiServices.Microservices.DicomReprocessor +namespace SmiServices.Microservices.DicomReprocessor; + +/// +/// Processes tags from documents into TagPromotionMessage(s) and publishes them to the config-defined tag promotion exchange +/// +public class TagPromotionProcessor : IDocumentProcessor { - /// - /// Processes tags from documents into TagPromotionMessage(s) and publishes them to the config-defined tag promotion exchange - /// - public class TagPromotionProcessor : IDocumentProcessor - { - public long TotalProcessed { get; private set; } - public long TotalFailed { get; private set; } + public long TotalProcessed { get; private set; } + public long TotalFailed { get; private set; } - public TagPromotionProcessor(IProducerModel producerModel) - { - if (producerModel is not BatchProducerModel) - throw new ArgumentException("producerModel must be a batch producer"); - } + public TagPromotionProcessor(IProducerModel producerModel) + { + if (producerModel is not BatchProducerModel) + throw new ArgumentException("producerModel must be a batch producer"); + } - public void ProcessDocument(BsonDocument document) - { - throw new NotImplementedException(); - } + public void ProcessDocument(BsonDocument document) + { + throw new NotImplementedException(); + } - public void SendMessages() - { - throw new NotImplementedException(); - } + public void SendMessages() + { + throw new NotImplementedException(); + } - public void LogProgress() - { - throw new NotImplementedException(); - } + public void LogProgress() + { + throw new NotImplementedException(); } } diff --git a/src/SmiServices/Microservices/DicomTagReader/DicomTagReader.cs b/src/SmiServices/Microservices/DicomTagReader/DicomTagReader.cs index 0114d5e71..0e0d05c3b 100644 --- a/src/SmiServices/Microservices/DicomTagReader/DicomTagReader.cs +++ b/src/SmiServices/Microservices/DicomTagReader/DicomTagReader.cs @@ -6,41 +6,40 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.DicomTagReader +namespace SmiServices.Microservices.DicomTagReader; + +public static class DicomTagReader { - public static class DicomTagReader + /// + /// Program entry point when run from the command line + /// + /// + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - /// - /// Program entry point when run from the command line - /// - /// - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(DicomTagReader), OnParse); - return ret; - } + int ret = SmiCliInit.ParseAndRun(args, nameof(DicomTagReader), OnParse); + return ret; + } - private static int OnParse(GlobalOptions globals, DicomTagReaderCliOptions opts) + private static int OnParse(GlobalOptions globals, DicomTagReaderCliOptions opts) + { + if (opts.File != null) { - if (opts.File != null) + try { - try - { - var host = new DicomTagReaderHost(globals); - host.AccessionDirectoryMessageConsumer.RunSingleFile(opts.File); - return 0; - } - catch (Exception ex) - { - LogManager.GetCurrentClassLogger().Error(ex); - return 1; - } + var host = new DicomTagReaderHost(globals); + host.AccessionDirectoryMessageConsumer.RunSingleFile(opts.File); + return 0; + } + catch (Exception ex) + { + LogManager.GetCurrentClassLogger().Error(ex); + return 1; } - - var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomTagReaderHost(globals)); - int ret = bootstrapper.Main(); - return ret; } + + var bootstrapper = new MicroserviceHostBootstrapper(() => new DicomTagReaderHost(globals)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/DicomTagReader/DicomTagReaderCliOptions.cs b/src/SmiServices/Microservices/DicomTagReader/DicomTagReaderCliOptions.cs index 75551f5ec..bc1d744a1 100644 --- a/src/SmiServices/Microservices/DicomTagReader/DicomTagReaderCliOptions.cs +++ b/src/SmiServices/Microservices/DicomTagReader/DicomTagReaderCliOptions.cs @@ -2,19 +2,18 @@ using SmiServices.Common.Options; using System.IO; -namespace SmiServices.Microservices.DicomTagReader +namespace SmiServices.Microservices.DicomTagReader; + +public class DicomTagReaderCliOptions : CliOptions { - public class DicomTagReaderCliOptions : CliOptions - { - /// - /// When not null this is the single file that should be considered instead of subscribing to RabbitMQ input queue - /// - [Option( - 'f', - "file", - Required = false, - HelpText = "[Optional] Name of a specific dicom or zip file to process instead of subscribing to rabbit" - )] - public FileInfo? File { get; set; } - } + /// + /// When not null this is the single file that should be considered instead of subscribing to RabbitMQ input queue + /// + [Option( + 'f', + "file", + Required = false, + HelpText = "[Optional] Name of a specific dicom or zip file to process instead of subscribing to rabbit" + )] + public FileInfo? File { get; set; } } diff --git a/src/SmiServices/Microservices/DicomTagReader/Execution/DicomTagReaderHost.cs b/src/SmiServices/Microservices/DicomTagReader/Execution/DicomTagReaderHost.cs index bda3f364c..648576f87 100644 --- a/src/SmiServices/Microservices/DicomTagReader/Execution/DicomTagReaderHost.cs +++ b/src/SmiServices/Microservices/DicomTagReader/Execution/DicomTagReaderHost.cs @@ -7,66 +7,65 @@ using System.IO; using System.IO.Abstractions; -namespace SmiServices.Microservices.DicomTagReader.Execution -{ - public class DicomTagReaderHost : MicroserviceHost - { - public readonly DicomTagReaderConsumer AccessionDirectoryMessageConsumer; - private readonly TagReaderBase _tagReader; - - - public DicomTagReaderHost(GlobalOptions options) - : base(options) - { - if (!Directory.Exists(options.FileSystemOptions!.FileSystemRoot)) - throw new ArgumentException( - $"Cannot find the FileSystemRoot specified in the given MicroservicesOptions ({options.FileSystemOptions.FileSystemRoot})"); +namespace SmiServices.Microservices.DicomTagReader.Execution; - Logger.Debug($"Creating DicomTagReaderHost with FileSystemRoot: {options.FileSystemOptions.FileSystemRoot}"); - Logger.Debug($"NackIfAnyFileErrors option set to {options.DicomTagReaderOptions!.NackIfAnyFileErrors}"); +public class DicomTagReaderHost : MicroserviceHost +{ + public readonly DicomTagReaderConsumer AccessionDirectoryMessageConsumer; + private readonly TagReaderBase _tagReader; - IProducerModel seriesProducerModel; - IProducerModel imageProducerModel; - try - { - Logger.Debug( - $"Creating seriesProducerModel with ExchangeName: {options.DicomTagReaderOptions.SeriesProducerOptions!.ExchangeName}"); - seriesProducerModel = MessageBroker.SetupProducer(options.DicomTagReaderOptions.SeriesProducerOptions, true); + public DicomTagReaderHost(GlobalOptions options) + : base(options) + { + if (!Directory.Exists(options.FileSystemOptions!.FileSystemRoot)) + throw new ArgumentException( + $"Cannot find the FileSystemRoot specified in the given MicroservicesOptions ({options.FileSystemOptions.FileSystemRoot})"); - Logger.Debug( - $"Creating imageProducerModel with ExchangeName: {options.DicomTagReaderOptions.ImageProducerOptions!.ExchangeName}"); - imageProducerModel = MessageBroker.SetupProducer(options.DicomTagReaderOptions.ImageProducerOptions, true); - } - catch (Exception e) - { - throw new ApplicationException("Couldn't create series producer model on startup", e); - } + Logger.Debug($"Creating DicomTagReaderHost with FileSystemRoot: {options.FileSystemOptions.FileSystemRoot}"); + Logger.Debug($"NackIfAnyFileErrors option set to {options.DicomTagReaderOptions!.NackIfAnyFileErrors}"); - Logger.Debug("Creating AccessionDirectoryMessageConsumer"); + IProducerModel seriesProducerModel; + IProducerModel imageProducerModel; - _tagReader = options.DicomTagReaderOptions.TagProcessorMode switch - { - TagProcessorMode.Serial => new SerialTagReader(options.DicomTagReaderOptions, options.FileSystemOptions, seriesProducerModel, imageProducerModel, new FileSystem()), - TagProcessorMode.Parallel => new ParallelTagReader(options.DicomTagReaderOptions, options.FileSystemOptions, seriesProducerModel, imageProducerModel, new FileSystem()), - _ => throw new ArgumentException($"No case for mode {options.DicomTagReaderOptions.TagProcessorMode}"), - }; + try + { + Logger.Debug( + $"Creating seriesProducerModel with ExchangeName: {options.DicomTagReaderOptions.SeriesProducerOptions!.ExchangeName}"); + seriesProducerModel = MessageBroker.SetupProducer(options.DicomTagReaderOptions.SeriesProducerOptions, true); - // Setup our consumer - AccessionDirectoryMessageConsumer = new DicomTagReaderConsumer(_tagReader, options); + Logger.Debug( + $"Creating imageProducerModel with ExchangeName: {options.DicomTagReaderOptions.ImageProducerOptions!.ExchangeName}"); + imageProducerModel = MessageBroker.SetupProducer(options.DicomTagReaderOptions.ImageProducerOptions, true); } - - public override void Start() + catch (Exception e) { - // Start the consumer to await callbacks when messages arrive - MessageBroker.StartConsumer(Globals.DicomTagReaderOptions!, AccessionDirectoryMessageConsumer, isSolo: false); - Logger.Debug("Consumer started"); + throw new ApplicationException("Couldn't create series producer model on startup", e); } - public override void Stop(string reason) + Logger.Debug("Creating AccessionDirectoryMessageConsumer"); + + _tagReader = options.DicomTagReaderOptions.TagProcessorMode switch { - _tagReader.Stop(); - base.Stop(reason); - } + TagProcessorMode.Serial => new SerialTagReader(options.DicomTagReaderOptions, options.FileSystemOptions, seriesProducerModel, imageProducerModel, new FileSystem()), + TagProcessorMode.Parallel => new ParallelTagReader(options.DicomTagReaderOptions, options.FileSystemOptions, seriesProducerModel, imageProducerModel, new FileSystem()), + _ => throw new ArgumentException($"No case for mode {options.DicomTagReaderOptions.TagProcessorMode}"), + }; + + // Setup our consumer + AccessionDirectoryMessageConsumer = new DicomTagReaderConsumer(_tagReader, options); + } + + public override void Start() + { + // Start the consumer to await callbacks when messages arrive + MessageBroker.StartConsumer(Globals.DicomTagReaderOptions!, AccessionDirectoryMessageConsumer, isSolo: false); + Logger.Debug("Consumer started"); + } + + public override void Stop(string reason) + { + _tagReader.Stop(); + base.Stop(reason); } } diff --git a/src/SmiServices/Microservices/DicomTagReader/Execution/ParallelTagReader.cs b/src/SmiServices/Microservices/DicomTagReader/Execution/ParallelTagReader.cs index 5c3762863..5cefe76a3 100644 --- a/src/SmiServices/Microservices/DicomTagReader/Execution/ParallelTagReader.cs +++ b/src/SmiServices/Microservices/DicomTagReader/Execution/ParallelTagReader.cs @@ -10,61 +10,60 @@ using System.Threading.Tasks; -namespace SmiServices.Microservices.DicomTagReader.Execution +namespace SmiServices.Microservices.DicomTagReader.Execution; + +public class ParallelTagReader : TagReaderBase { - public class ParallelTagReader : TagReaderBase - { - private readonly ParallelOptions _parallelOptions; + private readonly ParallelOptions _parallelOptions; - public ParallelTagReader(DicomTagReaderOptions options, FileSystemOptions fileSystemOptions, - IProducerModel seriesMessageProducerModel, IProducerModel fileMessageProducerModel, IFileSystem fs) - : base(options, fileSystemOptions, seriesMessageProducerModel, fileMessageProducerModel, fs) + public ParallelTagReader(DicomTagReaderOptions options, FileSystemOptions fileSystemOptions, + IProducerModel seriesMessageProducerModel, IProducerModel fileMessageProducerModel, IFileSystem fs) + : base(options, fileSystemOptions, seriesMessageProducerModel, fileMessageProducerModel, fs) + { + _parallelOptions = new ParallelOptions { - _parallelOptions = new ParallelOptions - { - MaxDegreeOfParallelism = options.MaxIoThreads - }; + MaxDegreeOfParallelism = options.MaxIoThreads + }; - Logger.Info($"Using MaxDegreeOfParallelism={_parallelOptions.MaxDegreeOfParallelism} for parallel IO operations"); - } + Logger.Info($"Using MaxDegreeOfParallelism={_parallelOptions.MaxDegreeOfParallelism} for parallel IO operations"); + } - protected override List ReadTagsImpl(IEnumerable dicomFilePaths, - AccessionDirectoryMessage accMessage) - { - var fileMessages = new List(); - var fileMessagesLock = new object(); + protected override List ReadTagsImpl(IEnumerable dicomFilePaths, + AccessionDirectoryMessage accMessage) + { + var fileMessages = new List(); + var fileMessagesLock = new object(); - Parallel.ForEach(dicomFilePaths, _parallelOptions, dicomFilePath => - { - Logger.Trace("TagReader: Processing " + dicomFilePath); + Parallel.ForEach(dicomFilePaths, _parallelOptions, dicomFilePath => + { + Logger.Trace("TagReader: Processing " + dicomFilePath); - DicomFileMessage fileMessage; + DicomFileMessage fileMessage; - try - { - fileMessage = ReadTagsFromFile(dicomFilePath); - } - catch (Exception e) - { - if (NackIfAnyFileErrors) - throw new ApplicationException( - "Exception processing file and NackIfAnyFileErrors option set. File was: " + dicomFilePath, - e); + try + { + fileMessage = ReadTagsFromFile(dicomFilePath); + } + catch (Exception e) + { + if (NackIfAnyFileErrors) + throw new ApplicationException( + "Exception processing file and NackIfAnyFileErrors option set. File was: " + dicomFilePath, + e); - Logger.Error(e, - "Error processing file " + dicomFilePath + - ". Ignoring and moving on since NackIfAnyFileErrors is false"); + Logger.Error(e, + "Error processing file " + dicomFilePath + + ". Ignoring and moving on since NackIfAnyFileErrors is false"); - return; - } + return; + } - lock (fileMessagesLock) - fileMessages.Add(fileMessage); + lock (fileMessagesLock) + fileMessages.Add(fileMessage); - Interlocked.Increment(ref NFilesProcessed); - }); + Interlocked.Increment(ref NFilesProcessed); + }); - return fileMessages; - } + return fileMessages; } } diff --git a/src/SmiServices/Microservices/DicomTagReader/Execution/SerialTagReader.cs b/src/SmiServices/Microservices/DicomTagReader/Execution/SerialTagReader.cs index 513c6a4ea..df8e2b5ec 100644 --- a/src/SmiServices/Microservices/DicomTagReader/Execution/SerialTagReader.cs +++ b/src/SmiServices/Microservices/DicomTagReader/Execution/SerialTagReader.cs @@ -8,47 +8,46 @@ using System.IO.Abstractions; -namespace SmiServices.Microservices.DicomTagReader.Execution +namespace SmiServices.Microservices.DicomTagReader.Execution; + +public class SerialTagReader : TagReaderBase { - public class SerialTagReader : TagReaderBase + public SerialTagReader(DicomTagReaderOptions options, FileSystemOptions fileSystemOptions, + IProducerModel seriesMessageProducerModel, IProducerModel fileMessageProducerModel, IFileSystem fs) + : base(options, fileSystemOptions, seriesMessageProducerModel, fileMessageProducerModel, fs) { } + + protected override List ReadTagsImpl(IEnumerable dicomFilePaths, AccessionDirectoryMessage accMessage) { - public SerialTagReader(DicomTagReaderOptions options, FileSystemOptions fileSystemOptions, - IProducerModel seriesMessageProducerModel, IProducerModel fileMessageProducerModel, IFileSystem fs) - : base(options, fileSystemOptions, seriesMessageProducerModel, fileMessageProducerModel, fs) { } + var fileMessages = new List(); - protected override List ReadTagsImpl(IEnumerable dicomFilePaths, AccessionDirectoryMessage accMessage) + foreach (FileInfo dicomFilePath in dicomFilePaths) { - var fileMessages = new List(); + Logger.Trace("TagReader: Processing " + dicomFilePath); - foreach (FileInfo dicomFilePath in dicomFilePaths) + DicomFileMessage fileMessage; + + try + { + fileMessage = ReadTagsFromFile(dicomFilePath); + } + catch (Exception e) { - Logger.Trace("TagReader: Processing " + dicomFilePath); - - DicomFileMessage fileMessage; - - try - { - fileMessage = ReadTagsFromFile(dicomFilePath); - } - catch (Exception e) - { - if (NackIfAnyFileErrors) - throw new ApplicationException( - "Exception processing file and NackIfAnyFileErrors option set. File was: " + dicomFilePath, - e); - - Logger.Error(e, - "Error processing file " + dicomFilePath + - ". Ignoring and moving on since NackIfAnyFileErrors is false"); - - continue; - } - - fileMessages.Add(fileMessage); - ++NFilesProcessed; + if (NackIfAnyFileErrors) + throw new ApplicationException( + "Exception processing file and NackIfAnyFileErrors option set. File was: " + dicomFilePath, + e); + + Logger.Error(e, + "Error processing file " + dicomFilePath + + ". Ignoring and moving on since NackIfAnyFileErrors is false"); + + continue; } - return fileMessages; + fileMessages.Add(fileMessage); + ++NFilesProcessed; } + + return fileMessages; } } diff --git a/src/SmiServices/Microservices/DicomTagReader/Execution/TagReaderBase.cs b/src/SmiServices/Microservices/DicomTagReader/Execution/TagReaderBase.cs index b3c0b60a9..2d1b4836a 100644 --- a/src/SmiServices/Microservices/DicomTagReader/Execution/TagReaderBase.cs +++ b/src/SmiServices/Microservices/DicomTagReader/Execution/TagReaderBase.cs @@ -15,321 +15,320 @@ using System.Linq; using System.Text; -namespace SmiServices.Microservices.DicomTagReader.Execution +namespace SmiServices.Microservices.DicomTagReader.Execution; + +public abstract class TagReaderBase { - public abstract class TagReaderBase + private readonly string _filesystemRoot; + private readonly IFileSystem _fs; + + private readonly IProducerModel _seriesMessageProducerModel; + private readonly IProducerModel _fileMessageProducerModel; + protected readonly ILogger Logger; + + protected readonly bool NackIfAnyFileErrors; + + private readonly string _searchPattern; + + private static FileReadOption _fileReadOption; + + private readonly Stopwatch _stopwatch = new(); + private int _nAccMessagesProcessed; + protected int NFilesProcessed; + private int _nMessagesSent; + private readonly long[] _swTotals = new long[4]; // Enumerate, Read, Send, Total + + public bool IsExiting; + public readonly object TagReaderProcessLock = new(); + + /// + /// Optional function for last minute filtering of which files in an folder get processed + /// + public Func? IncludeFile { get; set; } + + /// + /// Interrogates directory tree for dicom files and produces series info and individual file info + /// + /// + /// + /// + /// + /// File system to use + public TagReaderBase(DicomTagReaderOptions options, FileSystemOptions fileSystemOptions, IProducerModel seriesMessageProducerModel, IProducerModel fileMessageProducerModel, IFileSystem fs) { - private readonly string _filesystemRoot; - private readonly IFileSystem _fs; - - private readonly IProducerModel _seriesMessageProducerModel; - private readonly IProducerModel _fileMessageProducerModel; - protected readonly ILogger Logger; - - protected readonly bool NackIfAnyFileErrors; - - private readonly string _searchPattern; - - private static FileReadOption _fileReadOption; - - private readonly Stopwatch _stopwatch = new(); - private int _nAccMessagesProcessed; - protected int NFilesProcessed; - private int _nMessagesSent; - private readonly long[] _swTotals = new long[4]; // Enumerate, Read, Send, Total - - public bool IsExiting; - public readonly object TagReaderProcessLock = new(); - - /// - /// Optional function for last minute filtering of which files in an folder get processed - /// - public Func? IncludeFile { get; set; } - - /// - /// Interrogates directory tree for dicom files and produces series info and individual file info - /// - /// - /// - /// - /// - /// File system to use - public TagReaderBase(DicomTagReaderOptions options, FileSystemOptions fileSystemOptions, IProducerModel seriesMessageProducerModel, IProducerModel fileMessageProducerModel, IFileSystem fs) - { - Logger = LogManager.GetLogger(GetType().Name); + Logger = LogManager.GetLogger(GetType().Name); - _filesystemRoot = fileSystemOptions.FileSystemRoot ?? throw new ArgumentNullException(nameof(fileSystemOptions)); - NackIfAnyFileErrors = options.NackIfAnyFileErrors; - _searchPattern = fileSystemOptions.DicomSearchPattern ?? throw new ArgumentNullException(nameof(fileSystemOptions)); + _filesystemRoot = fileSystemOptions.FileSystemRoot ?? throw new ArgumentNullException(nameof(fileSystemOptions)); + NackIfAnyFileErrors = options.NackIfAnyFileErrors; + _searchPattern = fileSystemOptions.DicomSearchPattern ?? throw new ArgumentNullException(nameof(fileSystemOptions)); - _fileReadOption = options.GetReadOption(); + _fileReadOption = options.GetReadOption(); - Logger.Debug($"FileReadOption is: {_fileReadOption}"); + Logger.Debug($"FileReadOption is: {_fileReadOption}"); - _seriesMessageProducerModel = seriesMessageProducerModel; - _fileMessageProducerModel = fileMessageProducerModel; - _fs = fs; + _seriesMessageProducerModel = seriesMessageProducerModel; + _fileMessageProducerModel = fileMessageProducerModel; + _fs = fs; - Logger.Info($"Stopwatch implementation - IsHighResolution: {Stopwatch.IsHighResolution}. Frequency: {Stopwatch.Frequency} ticks/s"); - } + Logger.Info($"Stopwatch implementation - IsHighResolution: {Stopwatch.IsHighResolution}. Frequency: {Stopwatch.Frequency} ticks/s"); + } - /// - /// Process files from the directory referenced in the message - /// - /// - /// - public void ReadTags(IMessageHeader? header, AccessionDirectoryMessage message) - { - _stopwatch.Restart(); + /// + /// Process files from the directory referenced in the message + /// + /// + /// + public void ReadTags(IMessageHeader? header, AccessionDirectoryMessage message) + { + _stopwatch.Restart(); - string dirPath = message.GetAbsolutePath(_filesystemRoot); - Logger.Debug("TagReader: About to process files in " + dirPath); + string dirPath = message.GetAbsolutePath(_filesystemRoot); + Logger.Debug("TagReader: About to process files in " + dirPath); - if (!_fs.Directory.Exists(dirPath)) - throw new ApplicationException("Directory not found: " + dirPath); + if (!_fs.Directory.Exists(dirPath)) + throw new ApplicationException("Directory not found: " + dirPath); - if (!dirPath.StartsWith(_filesystemRoot, StringComparison.CurrentCultureIgnoreCase)) - throw new ApplicationException("Directory " + dirPath + " is not below the given FileSystemRoot (" + - _filesystemRoot + ")"); - long beginEnumerate = _stopwatch.ElapsedTicks; - string[] dicomFilePaths = _fs.Directory.EnumerateFiles(dirPath, _searchPattern).Where(Include).ToArray(); - string[] zipFilePaths = _fs.Directory.EnumerateFiles(dirPath).Where(ZipHelper.IsZip).Where(Include).ToArray(); + if (!dirPath.StartsWith(_filesystemRoot, StringComparison.CurrentCultureIgnoreCase)) + throw new ApplicationException("Directory " + dirPath + " is not below the given FileSystemRoot (" + + _filesystemRoot + ")"); + long beginEnumerate = _stopwatch.ElapsedTicks; + string[] dicomFilePaths = _fs.Directory.EnumerateFiles(dirPath, _searchPattern).Where(Include).ToArray(); + string[] zipFilePaths = _fs.Directory.EnumerateFiles(dirPath).Where(ZipHelper.IsZip).Where(Include).ToArray(); - _swTotals[0] += _stopwatch.ElapsedTicks - beginEnumerate; - Logger.Debug("TagReader: Found " + dicomFilePaths.Length + " dicom files to process"); - Logger.Debug("TagReader: Found " + zipFilePaths.Length + " zip files to process"); + _swTotals[0] += _stopwatch.ElapsedTicks - beginEnumerate; + Logger.Debug("TagReader: Found " + dicomFilePaths.Length + " dicom files to process"); + Logger.Debug("TagReader: Found " + zipFilePaths.Length + " zip files to process"); - int toProcess = dicomFilePaths.Length + zipFilePaths.Length; + int toProcess = dicomFilePaths.Length + zipFilePaths.Length; - if (toProcess == 0) - throw new ApplicationException("No dicom/zip files found in " + dirPath); + if (toProcess == 0) + throw new ApplicationException("No dicom/zip files found in " + dirPath); - // We have files to process, let's do it! + // We have files to process, let's do it! - long beginRead = _stopwatch.ElapsedTicks; + long beginRead = _stopwatch.ElapsedTicks; - List fileMessages = ReadTagsImpl(dicomFilePaths.Select(p => new FileInfo(p)), message); - fileMessages.AddRange(ReadZipFilesImpl(zipFilePaths.Select(p => new FileInfo(p)), message)); + List fileMessages = ReadTagsImpl(dicomFilePaths.Select(p => new FileInfo(p)), message); + fileMessages.AddRange(ReadZipFilesImpl(zipFilePaths.Select(p => new FileInfo(p)), message)); - _swTotals[1] += (_stopwatch.ElapsedTicks - beginRead) / toProcess; + _swTotals[1] += (_stopwatch.ElapsedTicks - beginRead) / toProcess; - var seriesMessages = new Dictionary(); + var seriesMessages = new Dictionary(); - foreach (DicomFileMessage fileMessage in fileMessages) - { - string seriesUID = fileMessage.SeriesInstanceUID; + foreach (DicomFileMessage fileMessage in fileMessages) + { + string seriesUID = fileMessage.SeriesInstanceUID; - // If we've already seen this seriesUID, just update the image count - if (seriesMessages.TryGetValue(seriesUID, out SeriesMessage? value)) - { - value.ImagesInSeries++; - continue; - } + // If we've already seen this seriesUID, just update the image count + if (seriesMessages.TryGetValue(seriesUID, out SeriesMessage? value)) + { + value.ImagesInSeries++; + continue; + } - // Else create a new SeriesMessage - var seriesMessage = new SeriesMessage - { - DirectoryPath = message.DirectoryPath, + // Else create a new SeriesMessage + var seriesMessage = new SeriesMessage + { + DirectoryPath = message.DirectoryPath, - StudyInstanceUID = fileMessage.StudyInstanceUID, - SeriesInstanceUID = seriesUID, + StudyInstanceUID = fileMessage.StudyInstanceUID, + SeriesInstanceUID = seriesUID, - ImagesInSeries = 1, + ImagesInSeries = 1, - DicomDataset = fileMessage.DicomDataset - }; + DicomDataset = fileMessage.DicomDataset + }; - seriesMessages.Add(seriesUID, seriesMessage); - } + seriesMessages.Add(seriesUID, seriesMessage); + } - Logger.Debug("TagReader: Finished processing directory, sending messages"); + Logger.Debug("TagReader: Finished processing directory, sending messages"); - // Only send if have processed all files in the directory ok + // Only send if have processed all files in the directory ok - if (fileMessages.Count == 0) - throw new ApplicationException("No DicomFileMessage(s) to send after processing the directory"); + if (fileMessages.Count == 0) + throw new ApplicationException("No DicomFileMessage(s) to send after processing the directory"); - if (seriesMessages.Count == 0) - throw new ApplicationException("No SeriesMessage(s) to send but we have file messages"); + if (seriesMessages.Count == 0) + throw new ApplicationException("No SeriesMessage(s) to send but we have file messages"); - Logger.Info($"Sending {fileMessages.Count} DicomFileMessage(s)"); + Logger.Info($"Sending {fileMessages.Count} DicomFileMessage(s)"); - long beginSend = _stopwatch.ElapsedTicks; - var headers = new List(); - foreach (DicomFileMessage fileMessage in fileMessages) - headers.Add(_fileMessageProducerModel.SendMessage(fileMessage, header, routingKey: null)); + long beginSend = _stopwatch.ElapsedTicks; + var headers = new List(); + foreach (DicomFileMessage fileMessage in fileMessages) + headers.Add(_fileMessageProducerModel.SendMessage(fileMessage, header, routingKey: null)); - _fileMessageProducerModel.WaitForConfirms(); + _fileMessageProducerModel.WaitForConfirms(); - headers.ForEach(x => x.Log(Logger, LogLevel.Trace, $"Sent {header?.MessageGuid}")); + headers.ForEach(x => x.Log(Logger, LogLevel.Trace, $"Sent {header?.MessageGuid}")); - Logger.Info($"Sending {seriesMessages.Count} SeriesMessage(s)"); + Logger.Info($"Sending {seriesMessages.Count} SeriesMessage(s)"); - headers.Clear(); - foreach (KeyValuePair kvp in seriesMessages) - headers.Add(_seriesMessageProducerModel.SendMessage(kvp.Value, header, routingKey: null)); + headers.Clear(); + foreach (KeyValuePair kvp in seriesMessages) + headers.Add(_seriesMessageProducerModel.SendMessage(kvp.Value, header, routingKey: null)); - _seriesMessageProducerModel.WaitForConfirms(); - headers.ForEach(x => x.Log(Logger, LogLevel.Trace, $"Sent {x.MessageGuid}")); + _seriesMessageProducerModel.WaitForConfirms(); + headers.ForEach(x => x.Log(Logger, LogLevel.Trace, $"Sent {x.MessageGuid}")); - _swTotals[2] += _stopwatch.ElapsedTicks - beginSend; - _swTotals[3] += _stopwatch.ElapsedTicks; - _nMessagesSent += fileMessages.Count + seriesMessages.Count; + _swTotals[2] += _stopwatch.ElapsedTicks - beginSend; + _swTotals[3] += _stopwatch.ElapsedTicks; + _nMessagesSent += fileMessages.Count + seriesMessages.Count; - if (++_nAccMessagesProcessed % 10 == 0) - LogRates(); - } + if (++_nAccMessagesProcessed % 10 == 0) + LogRates(); + } - public bool Include(string filePath) - { - return IncludeFile?.Invoke(filePath) ?? true; - } + public bool Include(string filePath) + { + return IncludeFile?.Invoke(filePath) ?? true; + } - /// - /// Opens all zip files and generates a for each dcm file in the archive - /// - /// All the zip files that must be explored for dcm files - /// The upstream message that suggested we look for dicom files in a given directory - /// - protected virtual IEnumerable ReadZipFilesImpl(IEnumerable zipFilePaths, AccessionDirectoryMessage accMessage) + /// + /// Opens all zip files and generates a for each dcm file in the archive + /// + /// All the zip files that must be explored for dcm files + /// The upstream message that suggested we look for dicom files in a given directory + /// + protected virtual IEnumerable ReadZipFilesImpl(IEnumerable zipFilePaths, AccessionDirectoryMessage accMessage) + { + foreach (FileInfo zipFilePath in zipFilePaths) { - foreach (FileInfo zipFilePath in zipFilePaths) + using var archive = ZipFile.Open(zipFilePath.FullName, ZipArchiveMode.Read); + foreach (var entry in archive.Entries) { - using var archive = ZipFile.Open(zipFilePath.FullName, ZipArchiveMode.Read); - foreach (var entry in archive.Entries) - { - if (!entry.FullName.EndsWith(".dcm", StringComparison.CurrentCultureIgnoreCase)) continue; - byte[]? buffer = null; + if (!entry.FullName.EndsWith(".dcm", StringComparison.CurrentCultureIgnoreCase)) continue; + byte[]? buffer = null; - buffer = ReadFully(entry.Open()); + buffer = ReadFully(entry.Open()); - using var memoryStream = new MemoryStream(buffer); - var dicom = DicomFile.Open(memoryStream); + using var memoryStream = new MemoryStream(buffer); + var dicom = DicomFile.Open(memoryStream); - yield return DicomFileToMessage(dicom.Dataset, $"{zipFilePath.FullName}!{entry.FullName}", null); - } + yield return DicomFileToMessage(dicom.Dataset, $"{zipFilePath.FullName}!{entry.FullName}", null); } } + } - /// - /// Creates a new by reading tags - /// - /// - /// The full path that was read from - /// File size if known otherwise null - /// - /// If is missing required UIDS or serializing the dataset went wrong - protected DicomFileMessage DicomFileToMessage(DicomDataset ds, string dicomFilePath, long? fileSize) + /// + /// Creates a new by reading tags + /// + /// + /// The full path that was read from + /// File size if known otherwise null + /// + /// If is missing required UIDS or serializing the dataset went wrong + protected DicomFileMessage DicomFileToMessage(DicomDataset ds, string dicomFilePath, long? fileSize) + { + var IDs = new string[3]; + + try { - var IDs = new string[3]; + // Pre-fetch these to ensure they exist before we go further + IDs[0] = ds.GetValue(DicomTag.StudyInstanceUID, 0); + IDs[1] = ds.GetValue(DicomTag.SeriesInstanceUID, 0); + IDs[2] = ds.GetValue(DicomTag.SOPInstanceUID, 0); - try - { - // Pre-fetch these to ensure they exist before we go further - IDs[0] = ds.GetValue(DicomTag.StudyInstanceUID, 0); - IDs[1] = ds.GetValue(DicomTag.SeriesInstanceUID, 0); - IDs[2] = ds.GetValue(DicomTag.SOPInstanceUID, 0); + if (IDs.Any(string.IsNullOrWhiteSpace)) + throw new DicomDataException("A required ID tag existed but its value was invalid"); + } + catch (DicomDataException dde) + { + throw new ApplicationException("File opened but had a missing ID", dde); + } - if (IDs.Any(string.IsNullOrWhiteSpace)) - throw new DicomDataException("A required ID tag existed but its value was invalid"); - } - catch (DicomDataException dde) - { - throw new ApplicationException("File opened but had a missing ID", dde); - } + string serializedDataset; - string serializedDataset; + try + { + DicomDataset filtered = new(ds.Where(i => i is not DicomOtherByteFragment).ToArray()); + serializedDataset = DicomTypeTranslater.SerializeDatasetToJson(filtered); + } + catch (Exception e) + { + throw new ApplicationException("Failed to serialize dataset", e); + } - try - { - DicomDataset filtered = new(ds.Where(i => i is not DicomOtherByteFragment).ToArray()); - serializedDataset = DicomTypeTranslater.SerializeDatasetToJson(filtered); - } - catch (Exception e) - { - throw new ApplicationException("Failed to serialize dataset", e); - } + return new DicomFileMessage(_filesystemRoot, dicomFilePath) + { + //TODO(Ruairidh 04/07) Where are these used? + StudyInstanceUID = IDs[0], + SeriesInstanceUID = IDs[1], + SOPInstanceUID = IDs[2], - return new DicomFileMessage(_filesystemRoot, dicomFilePath) - { - //TODO(Ruairidh 04/07) Where are these used? - StudyInstanceUID = IDs[0], - SeriesInstanceUID = IDs[1], - SOPInstanceUID = IDs[2], + DicomDataset = serializedDataset, + DicomFileSize = fileSize ?? -1 + }; - DicomDataset = serializedDataset, - DicomFileSize = fileSize ?? -1 - }; + } + private static byte[] ReadFully(Stream stream) + { + var buffer = new byte[32768]; + int len; + try + { + len = (int)stream.Length; } - - private static byte[] ReadFully(Stream stream) + catch { - var buffer = new byte[32768]; - int len; - try - { - len = (int)stream.Length; - } - catch - { - len = 32768; - } - using var ms = new MemoryStream(len); - while (true) - { - var read = stream.Read(buffer, 0, buffer.Length); - if (read <= 0) - return ms.ToArray(); - ms.Write(buffer, 0, read); - } + len = 32768; } - protected abstract List ReadTagsImpl(IEnumerable dicomFilePaths, - AccessionDirectoryMessage accMessage); - - /// - /// Builds a from a single dicom file - /// - /// - /// - protected DicomFileMessage ReadTagsFromFile(FileInfo dicomFilePath) + using var ms = new MemoryStream(len); + while (true) { - try - { - return DicomFileToMessage(DicomFile.Open(dicomFilePath.FullName, _fileReadOption).Dataset, dicomFilePath.FullName, dicomFilePath.Length); - } - catch (DicomFileException dfe) - { - throw new ApplicationException($"Could not open dicom file: {dicomFilePath}", dfe); - } + var read = stream.Read(buffer, 0, buffer.Length); + if (read <= 0) + return ms.ToArray(); + ms.Write(buffer, 0, read); } - - private void LogRates() + } + protected abstract List ReadTagsImpl(IEnumerable dicomFilePaths, + AccessionDirectoryMessage accMessage); + + /// + /// Builds a from a single dicom file + /// + /// + /// + protected DicomFileMessage ReadTagsFromFile(FileInfo dicomFilePath) + { + try { - if (_nAccMessagesProcessed == 0) - { - Logger.Info("No messages processed - can't calculate averages"); - return; - } - - long freq = Stopwatch.Frequency; - var sb = new StringBuilder("Average rates - "); - sb.Append($"enumerate dir (per acc. message): {_swTotals[0] * 1.0 / (freq * _nAccMessagesProcessed):f6}s, "); - sb.Append($"file process: {_swTotals[1] * 1.0 / (freq * NFilesProcessed):f6}s, "); - sb.Append($"send messages: {_swTotals[2] * 1.0 / (freq * _nMessagesSent):f6}s, "); - sb.Append($"overall: {_swTotals[3] * 1.0 / (freq * _nAccMessagesProcessed):f6}s"); - Logger.Info(sb.ToString); + return DicomFileToMessage(DicomFile.Open(dicomFilePath.FullName, _fileReadOption).Dataset, dicomFilePath.FullName, dicomFilePath.Length); + } + catch (DicomFileException dfe) + { + throw new ApplicationException($"Could not open dicom file: {dicomFilePath}", dfe); } + } - public void Stop() + private void LogRates() + { + if (_nAccMessagesProcessed == 0) { - lock (TagReaderProcessLock) - IsExiting = true; + Logger.Info("No messages processed - can't calculate averages"); + return; + } - Logger.Info("Lock released, no more messages will be processed"); + long freq = Stopwatch.Frequency; + var sb = new StringBuilder("Average rates - "); + sb.Append($"enumerate dir (per acc. message): {_swTotals[0] * 1.0 / (freq * _nAccMessagesProcessed):f6}s, "); + sb.Append($"file process: {_swTotals[1] * 1.0 / (freq * NFilesProcessed):f6}s, "); + sb.Append($"send messages: {_swTotals[2] * 1.0 / (freq * _nMessagesSent):f6}s, "); + sb.Append($"overall: {_swTotals[3] * 1.0 / (freq * _nAccMessagesProcessed):f6}s"); + Logger.Info(sb.ToString); + } - LogRates(); - } + public void Stop() + { + lock (TagReaderProcessLock) + IsExiting = true; + + Logger.Info("Lock released, no more messages will be processed"); + + LogRates(); } } diff --git a/src/SmiServices/Microservices/DicomTagReader/Messaging/DicomTagReaderConsumer.cs b/src/SmiServices/Microservices/DicomTagReader/Messaging/DicomTagReaderConsumer.cs index 5ec1ebc88..cee7e77a3 100644 --- a/src/SmiServices/Microservices/DicomTagReader/Messaging/DicomTagReaderConsumer.cs +++ b/src/SmiServices/Microservices/DicomTagReader/Messaging/DicomTagReaderConsumer.cs @@ -6,69 +6,68 @@ using System; using System.IO; -namespace SmiServices.Microservices.DicomTagReader.Messaging +namespace SmiServices.Microservices.DicomTagReader.Messaging; + +/// +/// Consumer class for AccessionDirectoryMessage(s) +/// +public class DicomTagReaderConsumer : Consumer { + private readonly TagReaderBase _reader; + private readonly GlobalOptions _opts; + + /// - /// Consumer class for AccessionDirectoryMessage(s) + /// Default constructor /// - public class DicomTagReaderConsumer : Consumer + /// + /// > + public DicomTagReaderConsumer(TagReaderBase reader, GlobalOptions dicomTagReaderOptions) { - private readonly TagReaderBase _reader; - private readonly GlobalOptions _opts; - + _reader = reader; + _opts = dicomTagReaderOptions; + } - /// - /// Default constructor - /// - /// - /// > - public DicomTagReaderConsumer(TagReaderBase reader, GlobalOptions dicomTagReaderOptions) + /// + /// Callback method for received messages + /// + /// The audit trail and origin of the IMessage contained in deliverArgs + /// The message and associated information + /// + protected override void ProcessMessageImpl(IMessageHeader header, AccessionDirectoryMessage message, ulong tag) + { + lock (_reader.TagReaderProcessLock) { - _reader = reader; - _opts = dicomTagReaderOptions; - } + if (_reader.IsExiting) + return; - /// - /// Callback method for received messages - /// - /// The audit trail and origin of the IMessage contained in deliverArgs - /// The message and associated information - /// - protected override void ProcessMessageImpl(IMessageHeader header, AccessionDirectoryMessage message, ulong tag) - { - lock (_reader.TagReaderProcessLock) + try { - if (_reader.IsExiting) - return; - - try - { - _reader.ReadTags(header, message); - } - catch (ApplicationException e) - { - // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - - ErrorAndNack(header, tag, "Error while processing AccessionDirectoryMessage", e); - return; - } + _reader.ReadTags(header, message); } + catch (ApplicationException e) + { + // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - Ack(header, tag); + ErrorAndNack(header, tag, "Error while processing AccessionDirectoryMessage", e); + return; + } } - /// - /// Runs a single file (dicom or zip) through tag reading process - /// - /// - public void RunSingleFile(FileInfo file) - { - // tell reader only to consider our specific file - _reader.IncludeFile = f => new FileInfo(f).FullName.Equals(file.FullName, StringComparison.CurrentCultureIgnoreCase); - _reader.ReadTags(null, new AccessionDirectoryMessage(_opts.FileSystemOptions!.FileSystemRoot!, file.Directory!)); + Ack(header, tag); + } - // good practice to clear this afterwards - _reader.IncludeFile = null; - } + /// + /// Runs a single file (dicom or zip) through tag reading process + /// + /// + public void RunSingleFile(FileInfo file) + { + // tell reader only to consider our specific file + _reader.IncludeFile = f => new FileInfo(f).FullName.Equals(file.FullName, StringComparison.CurrentCultureIgnoreCase); + _reader.ReadTags(null, new AccessionDirectoryMessage(_opts.FileSystemOptions!.FileSystemRoot!, file.Directory!)); + + // good practice to clear this afterwards + _reader.IncludeFile = null; } } diff --git a/src/SmiServices/Microservices/FileCopier/ExtractionFileCopier.cs b/src/SmiServices/Microservices/FileCopier/ExtractionFileCopier.cs index e541cb331..9984dc33e 100644 --- a/src/SmiServices/Microservices/FileCopier/ExtractionFileCopier.cs +++ b/src/SmiServices/Microservices/FileCopier/ExtractionFileCopier.cs @@ -7,87 +7,86 @@ using System.IO.Abstractions; -namespace SmiServices.Microservices.FileCopier +namespace SmiServices.Microservices.FileCopier; + +public class ExtractionFileCopier : IFileCopier { - public class ExtractionFileCopier : IFileCopier - { - private readonly FileCopierOptions _options; + private readonly FileCopierOptions _options; - private readonly IProducerModel _copyStatusProducerModel; + private readonly IProducerModel _copyStatusProducerModel; - private readonly string _fileSystemRoot; - private readonly string _extractionRoot; - private readonly IFileSystem _fileSystem; + private readonly string _fileSystemRoot; + private readonly string _extractionRoot; + private readonly IFileSystem _fileSystem; - private readonly ILogger _logger; + private readonly ILogger _logger; - public ExtractionFileCopier( - FileCopierOptions options, - IProducerModel copyStatusCopyStatusProducerModel, - string fileSystemRoot, - string extractionRoot, - IFileSystem? fileSystem = null) - { - _options = options; - _copyStatusProducerModel = copyStatusCopyStatusProducerModel; - _fileSystemRoot = fileSystemRoot; - _extractionRoot = extractionRoot; - _fileSystem = fileSystem ?? new FileSystem(); - - if (!_fileSystem.Directory.Exists(_fileSystemRoot)) - throw new ArgumentException($"Cannot find the specified fileSystemRoot: '{_fileSystemRoot}'"); - if (!_fileSystem.Directory.Exists(_extractionRoot)) - throw new ArgumentException($"Cannot find the specified extractionRoot: '{_extractionRoot}'"); - - _logger = LogManager.GetLogger(GetType().Name); - _logger.Info($"fileSystemRoot={_fileSystemRoot}, extractionRoot={_extractionRoot}"); - } + public ExtractionFileCopier( + FileCopierOptions options, + IProducerModel copyStatusCopyStatusProducerModel, + string fileSystemRoot, + string extractionRoot, + IFileSystem? fileSystem = null) + { + _options = options; + _copyStatusProducerModel = copyStatusCopyStatusProducerModel; + _fileSystemRoot = fileSystemRoot; + _extractionRoot = extractionRoot; + _fileSystem = fileSystem ?? new FileSystem(); + + if (!_fileSystem.Directory.Exists(_fileSystemRoot)) + throw new ArgumentException($"Cannot find the specified fileSystemRoot: '{_fileSystemRoot}'"); + if (!_fileSystem.Directory.Exists(_extractionRoot)) + throw new ArgumentException($"Cannot find the specified extractionRoot: '{_extractionRoot}'"); + + _logger = LogManager.GetLogger(GetType().Name); + _logger.Info($"fileSystemRoot={_fileSystemRoot}, extractionRoot={_extractionRoot}"); + } - public void ProcessMessage( - ExtractFileMessage message, - IMessageHeader header) - { - string fullSrc = _fileSystem.Path.Combine(_fileSystemRoot, message.DicomFilePath); + public void ProcessMessage( + ExtractFileMessage message, + IMessageHeader header) + { + string fullSrc = _fileSystem.Path.Combine(_fileSystemRoot, message.DicomFilePath); - ExtractedFileStatusMessage statusMessage; + ExtractedFileStatusMessage statusMessage; - if (!_fileSystem.File.Exists(fullSrc)) + if (!_fileSystem.File.Exists(fullSrc)) + { + statusMessage = new ExtractedFileStatusMessage(message) { - statusMessage = new ExtractedFileStatusMessage(message) - { - DicomFilePath = message.DicomFilePath, - Status = ExtractedFileStatus.FileMissing, - StatusMessage = $"Could not find '{fullSrc}'" - }; - _ = _copyStatusProducerModel.SendMessage(statusMessage, header, _options.NoVerifyRoutingKey); - return; - } + DicomFilePath = message.DicomFilePath, + Status = ExtractedFileStatus.FileMissing, + StatusMessage = $"Could not find '{fullSrc}'" + }; + _ = _copyStatusProducerModel.SendMessage(statusMessage, header, _options.NoVerifyRoutingKey); + return; + } - string fullDest = _fileSystem.Path.Combine(_extractionRoot, message.ExtractionDirectory, message.OutputPath); + string fullDest = _fileSystem.Path.Combine(_extractionRoot, message.ExtractionDirectory, message.OutputPath); - if (_fileSystem.File.Exists(fullDest)) - _logger.Warn($"Output file '{fullDest}' already exists. Will overwrite."); + if (_fileSystem.File.Exists(fullDest)) + _logger.Warn($"Output file '{fullDest}' already exists. Will overwrite."); - IDirectoryInfo parent = _fileSystem.Directory.GetParent(fullDest) - ?? throw new ArgumentException($"Parameter {fullDest} is the filesystem root"); + IDirectoryInfo parent = _fileSystem.Directory.GetParent(fullDest) + ?? throw new ArgumentException($"Parameter {fullDest} is the filesystem root"); - if (!parent.Exists) - { - _logger.Debug($"Creating directory '{parent}'"); - parent.Create(); - } + if (!parent.Exists) + { + _logger.Debug($"Creating directory '{parent}'"); + parent.Create(); + } - _logger.Debug($"Copying source file to '{message.OutputPath}'"); - _fileSystem.File.Copy(fullSrc, fullDest, overwrite: true); + _logger.Debug($"Copying source file to '{message.OutputPath}'"); + _fileSystem.File.Copy(fullSrc, fullDest, overwrite: true); - statusMessage = new ExtractedFileStatusMessage(message) - { - DicomFilePath = message.DicomFilePath, - Status = ExtractedFileStatus.Copied, - OutputFilePath = message.OutputPath, - }; - _ = _copyStatusProducerModel.SendMessage(statusMessage, header, _options.NoVerifyRoutingKey); - } + statusMessage = new ExtractedFileStatusMessage(message) + { + DicomFilePath = message.DicomFilePath, + Status = ExtractedFileStatus.Copied, + OutputFilePath = message.OutputPath, + }; + _ = _copyStatusProducerModel.SendMessage(statusMessage, header, _options.NoVerifyRoutingKey); } } diff --git a/src/SmiServices/Microservices/FileCopier/FileCopier.cs b/src/SmiServices/Microservices/FileCopier/FileCopier.cs index e1a365fd4..d6f9c18a8 100644 --- a/src/SmiServices/Microservices/FileCopier/FileCopier.cs +++ b/src/SmiServices/Microservices/FileCopier/FileCopier.cs @@ -3,26 +3,25 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.FileCopier +namespace SmiServices.Microservices.FileCopier; + +public static class FileCopier { - public static class FileCopier + /// + /// Program entry point when run from the command line + /// + /// + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - /// - /// Program entry point when run from the command line - /// - /// - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(FileCopier), OnParse); - return ret; - } + int ret = SmiCliInit.ParseAndRun(args, nameof(FileCopier), OnParse); + return ret; + } - private static int OnParse(GlobalOptions globals, CliOptions opts) - { - var bootstrapper = new MicroserviceHostBootstrapper(() => new FileCopierHost(globals)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, CliOptions opts) + { + var bootstrapper = new MicroserviceHostBootstrapper(() => new FileCopierHost(globals)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/FileCopier/FileCopierHost.cs b/src/SmiServices/Microservices/FileCopier/FileCopierHost.cs index c8f2fd3d0..bef9b4101 100644 --- a/src/SmiServices/Microservices/FileCopier/FileCopierHost.cs +++ b/src/SmiServices/Microservices/FileCopier/FileCopierHost.cs @@ -4,37 +4,36 @@ using System.IO.Abstractions; -namespace SmiServices.Microservices.FileCopier +namespace SmiServices.Microservices.FileCopier; + +public class FileCopierHost : MicroserviceHost { - public class FileCopierHost : MicroserviceHost - { - private readonly FileCopyQueueConsumer _consumer; + private readonly FileCopyQueueConsumer _consumer; - public FileCopierHost( - GlobalOptions options, - IFileSystem? fileSystem = null - ) - : base( - options - ) - { - Logger.Debug("Creating FileCopierHost with FileSystemRoot: " + Globals.FileSystemOptions!.FileSystemRoot); + public FileCopierHost( + GlobalOptions options, + IFileSystem? fileSystem = null + ) + : base( + options + ) + { + Logger.Debug("Creating FileCopierHost with FileSystemRoot: " + Globals.FileSystemOptions!.FileSystemRoot); - IProducerModel copyStatusProducerModel = MessageBroker.SetupProducer(Globals.FileCopierOptions!.CopyStatusProducerOptions!, isBatch: false); + IProducerModel copyStatusProducerModel = MessageBroker.SetupProducer(Globals.FileCopierOptions!.CopyStatusProducerOptions!, isBatch: false); - var fileCopier = new ExtractionFileCopier( - Globals.FileCopierOptions, - copyStatusProducerModel, - Globals.FileSystemOptions.FileSystemRoot!, - Globals.FileSystemOptions.ExtractRoot!, - fileSystem - ); - _consumer = new FileCopyQueueConsumer(fileCopier); - } + var fileCopier = new ExtractionFileCopier( + Globals.FileCopierOptions, + copyStatusProducerModel, + Globals.FileSystemOptions.FileSystemRoot!, + Globals.FileSystemOptions.ExtractRoot!, + fileSystem + ); + _consumer = new FileCopyQueueConsumer(fileCopier); + } - public override void Start() - { - MessageBroker.StartConsumer(Globals.FileCopierOptions!, _consumer, isSolo: false); - } + public override void Start() + { + MessageBroker.StartConsumer(Globals.FileCopierOptions!, _consumer, isSolo: false); } } diff --git a/src/SmiServices/Microservices/FileCopier/FileCopyQueueConsumer.cs b/src/SmiServices/Microservices/FileCopier/FileCopyQueueConsumer.cs index e92c79f0a..e41221a35 100644 --- a/src/SmiServices/Microservices/FileCopier/FileCopyQueueConsumer.cs +++ b/src/SmiServices/Microservices/FileCopier/FileCopyQueueConsumer.cs @@ -3,38 +3,37 @@ using SmiServices.Common.Messaging; using System; -namespace SmiServices.Microservices.FileCopier +namespace SmiServices.Microservices.FileCopier; + +public class FileCopyQueueConsumer : Consumer { - public class FileCopyQueueConsumer : Consumer + private readonly IFileCopier _fileCopier; + + public FileCopyQueueConsumer( + IFileCopier fileCopier) + { + _fileCopier = fileCopier; + } + + protected override void ProcessMessageImpl( + IMessageHeader header, + ExtractFileMessage message, + ulong tag) { - private readonly IFileCopier _fileCopier; + if (!message.IsIdentifiableExtraction) + throw new ArgumentException("Received a message with IsIdentifiableExtraction not set"); - public FileCopyQueueConsumer( - IFileCopier fileCopier) + try { - _fileCopier = fileCopier; + _fileCopier.ProcessMessage(message, header); } - - protected override void ProcessMessageImpl( - IMessageHeader header, - ExtractFileMessage message, - ulong tag) + catch (ApplicationException e) { - if (!message.IsIdentifiableExtraction) - throw new ArgumentException("Received a message with IsIdentifiableExtraction not set"); - - try - { - _fileCopier.ProcessMessage(message, header); - } - catch (ApplicationException e) - { - // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - ErrorAndNack(header, tag, "Error while processing ExtractedFileStatusMessage", e); - return; - } - - Ack(header, tag); + // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage + ErrorAndNack(header, tag, "Error while processing ExtractedFileStatusMessage", e); + return; } + + Ack(header, tag); } } diff --git a/src/SmiServices/Microservices/FileCopier/IFileCopier.cs b/src/SmiServices/Microservices/FileCopier/IFileCopier.cs index 754881582..f0e1021ec 100644 --- a/src/SmiServices/Microservices/FileCopier/IFileCopier.cs +++ b/src/SmiServices/Microservices/FileCopier/IFileCopier.cs @@ -2,10 +2,9 @@ using SmiServices.Common.Messages.Extraction; -namespace SmiServices.Microservices.FileCopier +namespace SmiServices.Microservices.FileCopier; + +public interface IFileCopier { - public interface IFileCopier - { - void ProcessMessage(ExtractFileMessage message, IMessageHeader header); - } + void ProcessMessage(ExtractFileMessage message, IMessageHeader header); } diff --git a/src/SmiServices/Microservices/IdentifierMapper/BadPatientIDException.cs b/src/SmiServices/Microservices/IdentifierMapper/BadPatientIDException.cs index 95669c9db..1d027b8d4 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/BadPatientIDException.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/BadPatientIDException.cs @@ -1,22 +1,21 @@ using System; -namespace SmiServices.Microservices.IdentifierMapper +namespace SmiServices.Microservices.IdentifierMapper; + +/// +/// Exception thrown when the PatientID tag of a dicom file contains invalid/corrupt data +/// +public class BadPatientIDException : Exception { - /// - /// Exception thrown when the PatientID tag of a dicom file contains invalid/corrupt data - /// - public class BadPatientIDException : Exception + public BadPatientIDException() { - public BadPatientIDException() - { - } + } - public BadPatientIDException(string message) : base(message) - { - } + public BadPatientIDException(string message) : base(message) + { + } - public BadPatientIDException(string message, Exception innerException) : base(message, innerException) - { - } + public BadPatientIDException(string message, Exception innerException) : base(message, innerException) + { } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapper.cs b/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapper.cs index 65113ada8..f6fd2b54a 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapper.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapper.cs @@ -3,22 +3,21 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.IdentifierMapper +namespace SmiServices.Microservices.IdentifierMapper; + +public static class IdentifierMapper { - public static class IdentifierMapper + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(IdentifierMapper), OnParse); - return ret; - } + int ret = SmiCliInit.ParseAndRun(args, nameof(IdentifierMapper), OnParse); + return ret; + } - private static int OnParse(GlobalOptions globals, CliOptions opts) - { - var bootstrapper = new MicroserviceHostBootstrapper(() => new IdentifierMapperHost(globals)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, CliOptions opts) + { + var bootstrapper = new MicroserviceHostBootstrapper(() => new IdentifierMapperHost(globals)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperControlMessageHandler.cs b/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperControlMessageHandler.cs index bc3a2f9da..73d87a937 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperControlMessageHandler.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperControlMessageHandler.cs @@ -2,33 +2,32 @@ using SmiServices.Common.Messaging; using SmiServices.Microservices.IdentifierMapper.Swappers; -namespace SmiServices.Microservices.IdentifierMapper +namespace SmiServices.Microservices.IdentifierMapper; + +public class IdentifierMapperControlMessageHandler : IControlMessageHandler { - public class IdentifierMapperControlMessageHandler : IControlMessageHandler - { - private readonly ILogger _logger; + private readonly ILogger _logger; - private readonly ISwapIdentifiers _swapper; + private readonly ISwapIdentifiers _swapper; - public IdentifierMapperControlMessageHandler(ISwapIdentifiers swapper) - { - _swapper = swapper; - _logger = LogManager.GetCurrentClassLogger(); - } + public IdentifierMapperControlMessageHandler(ISwapIdentifiers swapper) + { + _swapper = swapper; + _logger = LogManager.GetCurrentClassLogger(); + } - public void ControlMessageHandler(string action, string? message = null) - { - _logger.Info("Received control event with action " + action); + public void ControlMessageHandler(string action, string? message = null) + { + _logger.Info("Received control event with action " + action); - // Only 1 event to handle - cache refresh + // Only 1 event to handle - cache refresh - if (action != "refresh") - return; + if (action != "refresh") + return; - _logger.Info("Refreshing cached swapper dictionary"); + _logger.Info("Refreshing cached swapper dictionary"); - _swapper.ClearCache(); - } + _swapper.ClearCache(); } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperHost.cs b/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperHost.cs index 0530ca9d9..32c55e1c8 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperHost.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperHost.cs @@ -9,91 +9,90 @@ using System; -namespace SmiServices.Microservices.IdentifierMapper -{ - public class IdentifierMapperHost : MicroserviceHost - { - public readonly IdentifierMapperQueueConsumer Consumer; +namespace SmiServices.Microservices.IdentifierMapper; - private Guid _consumerId; - private readonly IdentifierMapperOptions _consumerOptions; +public class IdentifierMapperHost : MicroserviceHost +{ + public readonly IdentifierMapperQueueConsumer Consumer; - private readonly IProducerModel _producerModel; + private Guid _consumerId; + private readonly IdentifierMapperOptions _consumerOptions; - private readonly ISwapIdentifiers _swapper; + private readonly IProducerModel _producerModel; + private readonly ISwapIdentifiers _swapper; - public IdentifierMapperHost(GlobalOptions options, ISwapIdentifiers? swapper = null) - : base(options) - { - _consumerOptions = options.IdentifierMapperOptions!; - FansiImplementations.Load(); - - if (swapper == null) - { - Logger.Info("Not passed a swapper, creating one of type " + options.IdentifierMapperOptions!.SwapperType); - _swapper = ObjectFactory.CreateInstance(options.IdentifierMapperOptions.SwapperType!, typeof(ISwapIdentifiers).Assembly) - ?? throw new Exception("Could not create a swapper"); - } - else - { - _swapper = swapper; - } + public IdentifierMapperHost(GlobalOptions options, ISwapIdentifiers? swapper = null) + : base(options) + { + _consumerOptions = options.IdentifierMapperOptions!; - // If we want to use a Redis server to cache answers then wrap the mapper in a Redis caching swapper - if (!string.IsNullOrWhiteSpace(options.IdentifierMapperOptions!.RedisConnectionString)) - try - { - _swapper = new RedisSwapper(options.IdentifierMapperOptions.RedisConnectionString, _swapper); - } - catch (RedisConnectionException e) - { - // NOTE(rkm 2020-03-30) Log & throw! I hate this, but if we don't log here using NLog, then the exception will bubble-up - // and only be printed to STDERR instead of to the log file and may be lost - Logger.Error(e, "Could not connect to Redis"); - throw; - } - - _swapper.Setup(_consumerOptions); - Logger.Info($"Swapper of type {_swapper.GetType()} created"); - - // Batching now handled implicitly as backlog demands - _producerModel = MessageBroker.SetupProducer(options.IdentifierMapperOptions.AnonImagesProducerOptions!, isBatch: true); - - Consumer = new IdentifierMapperQueueConsumer(_producerModel, _swapper) - { - AllowRegexMatching = options.IdentifierMapperOptions.AllowRegexMatching - }; + FansiImplementations.Load(); - // Add our event handler for control messages - AddControlHandler(new IdentifierMapperControlMessageHandler(_swapper)); + if (swapper == null) + { + Logger.Info("Not passed a swapper, creating one of type " + options.IdentifierMapperOptions!.SwapperType); + _swapper = ObjectFactory.CreateInstance(options.IdentifierMapperOptions.SwapperType!, typeof(ISwapIdentifiers).Assembly) + ?? throw new Exception("Could not create a swapper"); } - - public override void Start() + else { - _consumerId = MessageBroker.StartConsumer(_consumerOptions, Consumer, isSolo: false); + _swapper = swapper; } - public override void Stop(string reason) - { - if (_consumerId != Guid.Empty) - MessageBroker.StopConsumer(_consumerId, RabbitMQBroker.DefaultOperationTimeout); + // If we want to use a Redis server to cache answers then wrap the mapper in a Redis caching swapper + if (!string.IsNullOrWhiteSpace(options.IdentifierMapperOptions!.RedisConnectionString)) try { - // Wait for any unconfirmed messages before calling stop - _producerModel.WaitForConfirms(); + _swapper = new RedisSwapper(options.IdentifierMapperOptions.RedisConnectionString, _swapper); } - catch (AlreadyClosedException) + catch (RedisConnectionException e) { - // TODO(rkm 2021-04-09) This might be a genuine error if we are not exiting due to a connection loss - Logger.Warn("Got AlreadyClosedException when waiting for confirmations"); + // NOTE(rkm 2020-03-30) Log & throw! I hate this, but if we don't log here using NLog, then the exception will bubble-up + // and only be printed to STDERR instead of to the log file and may be lost + Logger.Error(e, "Could not connect to Redis"); + throw; } - _swapper?.LogProgress(Logger, LogLevel.Info); + _swapper.Setup(_consumerOptions); + Logger.Info($"Swapper of type {_swapper.GetType()} created"); - base.Stop(reason); + // Batching now handled implicitly as backlog demands + _producerModel = MessageBroker.SetupProducer(options.IdentifierMapperOptions.AnonImagesProducerOptions!, isBatch: true); + + Consumer = new IdentifierMapperQueueConsumer(_producerModel, _swapper) + { + AllowRegexMatching = options.IdentifierMapperOptions.AllowRegexMatching + }; + + // Add our event handler for control messages + AddControlHandler(new IdentifierMapperControlMessageHandler(_swapper)); + } + + public override void Start() + { + _consumerId = MessageBroker.StartConsumer(_consumerOptions, Consumer, isSolo: false); + } + + public override void Stop(string reason) + { + if (_consumerId != Guid.Empty) + MessageBroker.StopConsumer(_consumerId, RabbitMQBroker.DefaultOperationTimeout); + try + { + // Wait for any unconfirmed messages before calling stop + _producerModel.WaitForConfirms(); } + catch (AlreadyClosedException) + { + // TODO(rkm 2021-04-09) This might be a genuine error if we are not exiting due to a connection loss + Logger.Warn("Got AlreadyClosedException when waiting for confirmations"); + } + + _swapper?.LogProgress(Logger, LogLevel.Info); + + base.Stop(reason); } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperQueueConsumer.cs b/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperQueueConsumer.cs index 881a25732..dd3e5bc21 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperQueueConsumer.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/IdentifierMapperQueueConsumer.cs @@ -11,228 +11,227 @@ using System.Text.RegularExpressions; using System.Threading; -namespace SmiServices.Microservices.IdentifierMapper +namespace SmiServices.Microservices.IdentifierMapper; + +public class IdentifierMapperQueueConsumer : Consumer { - public class IdentifierMapperQueueConsumer : Consumer - { - public bool AllowRegexMatching { get; set; } + public bool AllowRegexMatching { get; set; } - private readonly IProducerModel _producer; - private readonly ISwapIdentifiers _swapper; + private readonly IProducerModel _producer; + private readonly ISwapIdentifiers _swapper; - private readonly Regex _patientIdRegex = new("\"00100020\":{\"vr\":\"LO\",\"Value\":\\[\"(\\d*)\"]", RegexOptions.IgnoreCase); + private readonly Regex _patientIdRegex = new("\"00100020\":{\"vr\":\"LO\",\"Value\":\\[\"(\\d*)\"]", RegexOptions.IgnoreCase); - private readonly BlockingCollection> msgq = []; - private readonly Thread acker; + private readonly BlockingCollection> msgq = []; + private readonly Thread acker; - public IdentifierMapperQueueConsumer(IProducerModel producer, ISwapIdentifiers swapper) - { - _producer = producer; - _swapper = swapper; - acker = new Thread(() => + public IdentifierMapperQueueConsumer(IProducerModel producer, ISwapIdentifiers swapper) + { + _producer = producer; + _swapper = swapper; + acker = new Thread(() => + { + try { - try + while (true) { - while (true) - { - List> done = []; - Tuple t; - t = msgq.Take(); + List> done = []; + Tuple t; + t = msgq.Take(); - lock (_producer) + lock (_producer) + { + _producer.SendMessage(t.Item1, t.Item2, ""); + done.Add(new Tuple(t.Item2, t.Item3)); + while (msgq.TryTake(out t!)) { _producer.SendMessage(t.Item1, t.Item2, ""); done.Add(new Tuple(t.Item2, t.Item3)); - while (msgq.TryTake(out t!)) - { - _producer.SendMessage(t.Item1, t.Item2, ""); - done.Add(new Tuple(t.Item2, t.Item3)); - } - _producer.WaitForConfirms(); - foreach (var ack in done) - { - Ack(ack.Item1, ack.Item2); - } + } + _producer.WaitForConfirms(); + foreach (var ack in done) + { + Ack(ack.Item1, ack.Item2); } } } - catch (InvalidOperationException) - { - // The BlockingCollection will throw this exception when closed by Shutdown() - return; - } - }) - { - IsBackground = true - }; - acker.Start(); - } - - /// - /// Cleanly shut this process down, draining the Ack queue and ending that thread - /// - public override void Shutdown() + } + catch (InvalidOperationException) + { + // The BlockingCollection will throw this exception when closed by Shutdown() + return; + } + }) { - msgq.CompleteAdding(); - acker.Join(); - } + IsBackground = true + }; + acker.Start(); + } - protected override void ProcessMessageImpl(IMessageHeader header, DicomFileMessage msg, ulong tag) - { - string? errorReason = null; - var success = false; + /// + /// Cleanly shut this process down, draining the Ack queue and ending that thread + /// + public override void Shutdown() + { + msgq.CompleteAdding(); + acker.Join(); + } - try - { - if (AllowRegexMatching) - { - Match match = _patientIdRegex.Match(msg.DicomDataset); - - //Try to do swap using regex looking for a chi (10 digits in length) - if (match.Success) - { - string patId = match.Groups[1].Value; - if (!string.IsNullOrEmpty(patId) && patId.Trim().Length == 10) - success = SwapIdentifier(msg, patId.Trim(), out errorReason); - } - } + protected override void ProcessMessageImpl(IMessageHeader header, DicomFileMessage msg, ulong tag) + { + string? errorReason = null; + var success = false; - if (!success) - success = SwapIdentifier(msg, out errorReason); - } - catch (BadPatientIDException e) - { - ErrorAndNack(header, tag, "Error while processing DicomFileMessage", e); - return; - } - catch (ApplicationException e) + try + { + if (AllowRegexMatching) { - // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage + Match match = _patientIdRegex.Match(msg.DicomDataset); - ErrorAndNack(header, tag, "Error while processing DicomFileMessage", e); - return; + //Try to do swap using regex looking for a chi (10 digits in length) + if (match.Success) + { + string patId = match.Groups[1].Value; + if (!string.IsNullOrEmpty(patId) && patId.Trim().Length == 10) + success = SwapIdentifier(msg, patId.Trim(), out errorReason); + } } if (!success) - { - Logger.Info($"Could not swap identifiers for message {header.MessageGuid}. Reason was: {errorReason}"); - ErrorAndNack(header, tag, errorReason!, new Exception()); - } - else - { - // Enqueue the outgoing message. Request will be acked by the queue handling thread above. - msgq.Add(new Tuple(msg, header, tag)); - } + success = SwapIdentifier(msg, out errorReason); } - - private bool SwapIdentifier(DicomFileMessage msg, string patientId, out string? errorReason) + catch (BadPatientIDException e) { - string? to = _swapper.GetSubstitutionFor(patientId, out errorReason); - - if (to == null) - { - errorReason = $"Swapper {_swapper} returned null"; - return false; - } + ErrorAndNack(header, tag, "Error while processing DicomFileMessage", e); + return; + } + catch (ApplicationException e) + { + // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - msg.DicomDataset = msg.DicomDataset.Replace($":[\"{patientId}\"]", $":[\"{to}\"]"); + ErrorAndNack(header, tag, "Error while processing DicomFileMessage", e); + return; + } - return true; + if (!success) + { + Logger.Info($"Could not swap identifiers for message {header.MessageGuid}. Reason was: {errorReason}"); + ErrorAndNack(header, tag, errorReason!, new Exception()); + } + else + { + // Enqueue the outgoing message. Request will be acked by the queue handling thread above. + msgq.Add(new Tuple(msg, header, tag)); } + } - /// - /// Swaps the patient ID in the for its anonymous mapping. Returns true if a mapping - /// was found or false if it was not possible to get a mapping for some reason (e.g. tag is missing or no mapping - /// was found). - /// - /// - /// - /// - /// Thrown if PatientID tag is corrupt - public bool SwapIdentifier(DicomFileMessage msg, [NotNullWhen(false)] out string? reason) - { - DicomDataset ds; - try - { - ds = DicomTypeTranslater.DeserializeJsonToDataset(msg.DicomDataset); - } - catch (Exception e) - { - throw new ApplicationException("Failed to deserialize dataset", e); - } + private bool SwapIdentifier(DicomFileMessage msg, string patientId, out string? errorReason) + { + string? to = _swapper.GetSubstitutionFor(patientId, out errorReason); - if (!ds.Contains(DicomTag.PatientID)) - { - reason = "Dataset did not contain PatientID"; - return false; - } + if (to == null) + { + errorReason = $"Swapper {_swapper} returned null"; + return false; + } - var from = GetPatientID(ds); + msg.DicomDataset = msg.DicomDataset.Replace($":[\"{patientId}\"]", $":[\"{to}\"]"); - if (string.IsNullOrWhiteSpace(from)) - { - reason = "PatientID was blank"; - return false; - } + return true; + } - string? to = _swapper.GetSubstitutionFor(from, out reason); + /// + /// Swaps the patient ID in the for its anonymous mapping. Returns true if a mapping + /// was found or false if it was not possible to get a mapping for some reason (e.g. tag is missing or no mapping + /// was found). + /// + /// + /// + /// + /// Thrown if PatientID tag is corrupt + public bool SwapIdentifier(DicomFileMessage msg, [NotNullWhen(false)] out string? reason) + { + DicomDataset ds; + try + { + ds = DicomTypeTranslater.DeserializeJsonToDataset(msg.DicomDataset); + } + catch (Exception e) + { + throw new ApplicationException("Failed to deserialize dataset", e); + } - if (to == null) - { - reason = $"Swapper {_swapper} returned null"; - return false; - } + if (!ds.Contains(DicomTag.PatientID)) + { + reason = "Dataset did not contain PatientID"; + return false; + } - // Update the JSON deserialized dicom dataset - ds.AddOrUpdate(DicomTag.PatientID, to); + var from = GetPatientID(ds); - string updatedJson; + if (string.IsNullOrWhiteSpace(from)) + { + reason = "PatientID was blank"; + return false; + } - try - { - updatedJson = DicomTypeTranslater.SerializeDatasetToJson(ds); - } - catch (Exception e) - { - throw new ApplicationException("Failed to serialize dataset", e); - } + string? to = _swapper.GetSubstitutionFor(from, out reason); + if (to == null) + { + reason = $"Swapper {_swapper} returned null"; + return false; + } - // Unlikely, but should still check - if (updatedJson == null) - { - reason = "Updated json string was null"; - return false; - } + // Update the JSON deserialized dicom dataset + ds.AddOrUpdate(DicomTag.PatientID, to); - // Override the message DicomDataset with the new serialized dataset - msg.DicomDataset = updatedJson; + string updatedJson; - return true; + try + { + updatedJson = DicomTypeTranslater.SerializeDatasetToJson(ds); } + catch (Exception e) + { + throw new ApplicationException("Failed to serialize dataset", e); + } + - private static string? GetPatientID(DicomDataset ds) + // Unlikely, but should still check + if (updatedJson == null) { - var val = DicomTypeTranslaterReader.GetCSharpValue(ds, DicomTag.PatientID); + reason = "Updated json string was null"; + return false; + } - switch (val) - { - case null: - return null; - case string s: - return s; - case string[] arr: - { - var unique = arr.Where(a => !string.IsNullOrWhiteSpace(a)).Distinct().ToArray(); - if (unique.Length == 0) - return null; - if (unique.Length == 1) - return unique[0]; - throw new BadPatientIDException($"DicomDataset had multiple values for PatientID:{string.Join("\\", arr)}"); - } - default: - throw new BadPatientIDException($"DicomDataset had bad Type for PatientID:{val.GetType()}"); - } + // Override the message DicomDataset with the new serialized dataset + msg.DicomDataset = updatedJson; + + return true; + } + + private static string? GetPatientID(DicomDataset ds) + { + var val = DicomTypeTranslaterReader.GetCSharpValue(ds, DicomTag.PatientID); + + switch (val) + { + case null: + return null; + case string s: + return s; + case string[] arr: + { + var unique = arr.Where(a => !string.IsNullOrWhiteSpace(a)).Distinct().ToArray(); + if (unique.Length == 0) + return null; + if (unique.Length == 1) + return unique[0]; + throw new BadPatientIDException($"DicomDataset had multiple values for PatientID:{string.Join("\\", arr)}"); + } + default: + throw new BadPatientIDException($"DicomDataset had bad Type for PatientID:{val.GetType()}"); } } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/Swappers/ForGuidIdentifierSwapper.cs b/src/SmiServices/Microservices/IdentifierMapper/Swappers/ForGuidIdentifierSwapper.cs index 109b5fda7..091613db6 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/Swappers/ForGuidIdentifierSwapper.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/Swappers/ForGuidIdentifierSwapper.cs @@ -8,180 +8,179 @@ using System.Text; using TypeGuesser; -namespace SmiServices.Microservices.IdentifierMapper.Swappers +namespace SmiServices.Microservices.IdentifierMapper.Swappers; + +/// +/// Connects to a (possibly empty) database containing values to swap identifiers with. If no valid replacement found for a value, +/// we create a new , insert it into the database, and return it as the swapped value. Keeps a cache of swap values +/// +public class ForGuidIdentifierSwapper : SwapIdentifiers { + private readonly ILogger _logger; + + private IMappingTableOptions? _options; + + private DiscoveredTable? _table; + + private readonly Dictionary _cachedAnswers = []; + private readonly object _oCacheLock = new(); + + private int _swapColumnLength; + + public ForGuidIdentifierSwapper() + { + _logger = LogManager.GetCurrentClassLogger(); + } + /// - /// Connects to a (possibly empty) database containing values to swap identifiers with. If no valid replacement found for a value, - /// we create a new , insert it into the database, and return it as the swapped value. Keeps a cache of swap values + /// Connects to the specified swapping table if it exists, or creates it /// - public class ForGuidIdentifierSwapper : SwapIdentifiers + /// + public override void Setup(IMappingTableOptions mappingTableOptions) { - private readonly ILogger _logger; - - private IMappingTableOptions? _options; - - private DiscoveredTable? _table; + // TODO(rkm 2021-04-09) Check if this can be in a constructor instead? + _options = mappingTableOptions; + _table = _options.Discover(); - private readonly Dictionary _cachedAnswers = []; - private readonly object _oCacheLock = new(); + using (new TimeTracker(DatabaseStopwatch)) + CreateTableIfNotExists(); + } - private int _swapColumnLength; + public override string? GetSubstitutionFor(string toSwap, out string? reason) + { + reason = null; - public ForGuidIdentifierSwapper() + if (_swapColumnLength > 0 && toSwap.Length > _swapColumnLength) { - _logger = LogManager.GetCurrentClassLogger(); + reason = $"Supplied value was too long ({toSwap.Length}) - max allowed is ({_swapColumnLength})"; + Invalid++; + return null; } - /// - /// Connects to the specified swapping table if it exists, or creates it - /// - /// - public override void Setup(IMappingTableOptions mappingTableOptions) + string insertSql; + lock (_oCacheLock) { - // TODO(rkm 2021-04-09) Check if this can be in a constructor instead? - _options = mappingTableOptions; - _table = _options.Discover(); + if (_cachedAnswers.TryGetValue(toSwap, out string? value)) + { + CacheHit++; + Success++; + return value; + } - using (new TimeTracker(DatabaseStopwatch)) - CreateTableIfNotExists(); - } - public override string? GetSubstitutionFor(string toSwap, out string? reason) - { - reason = null; + var guid = Guid.NewGuid().ToString(); - if (_swapColumnLength > 0 && toSwap.Length > _swapColumnLength) + insertSql = _options!.MappingDatabaseType switch { - reason = $"Supplied value was too long ({toSwap.Length}) - max allowed is ({_swapColumnLength})"; - Invalid++; - return null; - } + FAnsi.DatabaseType.MicrosoftSQLServer => string.Format( + "if not exists( select 1 from {0} where {1} = '{3}') insert into {0}({1},{2}) values ('{3}','{4}')", + _table!.GetRuntimeName(), _options.SwapColumnName, _options.ReplacementColumnName, toSwap, + guid), + FAnsi.DatabaseType.MySql => + $"INSERT IGNORE INTO {_table!.GetFullyQualifiedName()} SET {_options.SwapColumnName} = '{toSwap}', {_options.ReplacementColumnName} = '{guid}';", + FAnsi.DatabaseType.PostgreSql => + $"INSERT INTO {_table!.GetFullyQualifiedName()} ({_options.SwapColumnName},{_options.ReplacementColumnName}) VALUES ('{toSwap}','{guid}') ON CONFLICT DO NOTHING;", + _ => throw new ArgumentOutOfRangeException(_options.MappingConnectionString) + }; - string insertSql; - lock (_oCacheLock) + using (new TimeTracker(DatabaseStopwatch)) + using (var con = _table.Database.Server.BeginNewTransactedConnection()) { - if (_cachedAnswers.TryGetValue(toSwap, out string? value)) + DbCommand cmd = _table.Database.Server.GetCommand(insertSql, con); + + try { - CacheHit++; - Success++; - return value; + cmd.ExecuteNonQuery(); + } + catch (Exception e) + { + Invalid++; + throw new Exception("Failed to perform lookup of toSwap with SQL:" + insertSql, e); } + //guid may not have been inserted. Just because we don't have it in our cache doesn't mean that other people might + //not have allocated that one at the same time. - var guid = Guid.NewGuid().ToString(); + DbCommand cmd2 = _table.Database.Server.GetCommand($"SELECT {_options.ReplacementColumnName} FROM {_table.GetFullyQualifiedName()} WHERE {_options.SwapColumnName} = '{toSwap}' ", con); + var syncAnswer = (string?)cmd2.ExecuteScalar() ?? throw new Exception("Replacement value was null"); - insertSql = _options!.MappingDatabaseType switch - { - FAnsi.DatabaseType.MicrosoftSQLServer => string.Format( - "if not exists( select 1 from {0} where {1} = '{3}') insert into {0}({1},{2}) values ('{3}','{4}')", - _table!.GetRuntimeName(), _options.SwapColumnName, _options.ReplacementColumnName, toSwap, - guid), - FAnsi.DatabaseType.MySql => - $"INSERT IGNORE INTO {_table!.GetFullyQualifiedName()} SET {_options.SwapColumnName} = '{toSwap}', {_options.ReplacementColumnName} = '{guid}';", - FAnsi.DatabaseType.PostgreSql => - $"INSERT INTO {_table!.GetFullyQualifiedName()} ({_options.SwapColumnName},{_options.ReplacementColumnName}) VALUES ('{toSwap}','{guid}') ON CONFLICT DO NOTHING;", - _ => throw new ArgumentOutOfRangeException(_options.MappingConnectionString) - }; - - using (new TimeTracker(DatabaseStopwatch)) - using (var con = _table.Database.Server.BeginNewTransactedConnection()) - { - DbCommand cmd = _table.Database.Server.GetCommand(insertSql, con); - - try - { - cmd.ExecuteNonQuery(); - } - catch (Exception e) - { - Invalid++; - throw new Exception("Failed to perform lookup of toSwap with SQL:" + insertSql, e); - } - - //guid may not have been inserted. Just because we don't have it in our cache doesn't mean that other people might - //not have allocated that one at the same time. - - DbCommand cmd2 = _table.Database.Server.GetCommand($"SELECT {_options.ReplacementColumnName} FROM {_table.GetFullyQualifiedName()} WHERE {_options.SwapColumnName} = '{toSwap}' ", con); - var syncAnswer = (string?)cmd2.ExecuteScalar() ?? throw new Exception("Replacement value was null"); - - _cachedAnswers.Add(toSwap, syncAnswer); - - con.ManagedTransaction?.CommitAndCloseConnection(); - Success++; - CacheMiss++; - return syncAnswer; - } + _cachedAnswers.Add(toSwap, syncAnswer); + + con.ManagedTransaction?.CommitAndCloseConnection(); + Success++; + CacheMiss++; + return syncAnswer; } } + } - /// - /// Clears the in-memory cache of swap pairs - /// - public override void ClearCache() + /// + /// Clears the in-memory cache of swap pairs + /// + public override void ClearCache() + { + lock (_oCacheLock) { - lock (_oCacheLock) - { - _cachedAnswers.Clear(); - _logger.Info("Cache cleared"); - } + _cachedAnswers.Clear(); + _logger.Info("Cache cleared"); } + } - private void CreateTableIfNotExists() + private void CreateTableIfNotExists() + { + try { - try - { - ArgumentNullException.ThrowIfNull(_table, nameof(_table)); - ArgumentNullException.ThrowIfNull(_options, nameof(_options)); - ArgumentNullException.ThrowIfNull(_options.SwapColumnName, nameof(_options.SwapColumnName)); - ArgumentNullException.ThrowIfNull(_options.ReplacementColumnName, nameof(_options.ReplacementColumnName)); - - //create the database if it doesn't exist - if (!_table.Database.Exists()) - _table.Database.Create(); + ArgumentNullException.ThrowIfNull(_table, nameof(_table)); + ArgumentNullException.ThrowIfNull(_options, nameof(_options)); + ArgumentNullException.ThrowIfNull(_options.SwapColumnName, nameof(_options.SwapColumnName)); + ArgumentNullException.ThrowIfNull(_options.ReplacementColumnName, nameof(_options.ReplacementColumnName)); - //create the table if it doesn't exist - if (!_table.Exists()) - { - _logger.Info("Guid mapping table does not exist, creating it now"); + //create the database if it doesn't exist + if (!_table.Database.Exists()) + _table.Database.Create(); - _table.Database.CreateTable(_table.GetRuntimeName(), - [ - new DatabaseColumnRequest(_options.SwapColumnName, new DatabaseTypeRequest(typeof(string), 10), false){ IsPrimaryKey = true }, - new DatabaseColumnRequest(_options.ReplacementColumnName,new DatabaseTypeRequest(typeof(string), 255), false)] - ); - } + //create the table if it doesn't exist + if (!_table.Exists()) + { + _logger.Info("Guid mapping table does not exist, creating it now"); - if (_table.Exists()) - _logger.Info("Guid mapping table exist (" + _table + ")"); - else - throw new Exception("Table creation did not result in table existing!"); + _table.Database.CreateTable(_table.GetRuntimeName(), + [ + new DatabaseColumnRequest(_options.SwapColumnName, new DatabaseTypeRequest(typeof(string), 10), false){ IsPrimaryKey = true }, + new DatabaseColumnRequest(_options.ReplacementColumnName,new DatabaseTypeRequest(typeof(string), 255), false)] + ); + } - _logger.Info("Checking for column " + _options!.SwapColumnName); - _swapColumnLength = _table.DiscoverColumn(_options.SwapColumnName).DataType?.GetLengthIfString() ?? -1; + if (_table.Exists()) + _logger.Info("Guid mapping table exist (" + _table + ")"); + else + throw new Exception("Table creation did not result in table existing!"); - _logger.Info("Checking for column " + _options.ReplacementColumnName); - _table.DiscoverColumn(_options.ReplacementColumnName); - } - catch (Exception e) - { - var sb = new StringBuilder(); + _logger.Info("Checking for column " + _options!.SwapColumnName); + _swapColumnLength = _table.DiscoverColumn(_options.SwapColumnName).DataType?.GetLengthIfString() ?? -1; - if (_table != null) - { - sb.AppendLine("Server:" + _table.Database.Server.Name); - sb.AppendLine("Database:" + _table.Database.GetRuntimeName()); - sb.AppendLine("Username:" + _table.Database.Server.ExplicitUsernameIfAny); - sb.AppendLine("Table:" + _table.GetFullyQualifiedName()); - } + _logger.Info("Checking for column " + _options.ReplacementColumnName); + _table.DiscoverColumn(_options.ReplacementColumnName); + } + catch (Exception e) + { + var sb = new StringBuilder(); - throw new Exception("Error creating/checking Guid substitution table on:" + Environment.NewLine + sb, e); + if (_table != null) + { + sb.AppendLine("Server:" + _table.Database.Server.Name); + sb.AppendLine("Database:" + _table.Database.GetRuntimeName()); + sb.AppendLine("Username:" + _table.Database.Server.ExplicitUsernameIfAny); + sb.AppendLine("Table:" + _table.GetFullyQualifiedName()); } - } - public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) - { - return options.Discover(); + throw new Exception("Error creating/checking Guid substitution table on:" + Environment.NewLine + sb, e); } } + + public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) + { + return options.Discover(); + } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/Swappers/ISwapIdentifiers.cs b/src/SmiServices/Microservices/IdentifierMapper/Swappers/ISwapIdentifiers.cs index 5a2c90534..e0846d7cd 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/Swappers/ISwapIdentifiers.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/Swappers/ISwapIdentifiers.cs @@ -2,40 +2,39 @@ using NLog; using SmiServices.Common.Options; -namespace SmiServices.Microservices.IdentifierMapper.Swappers +namespace SmiServices.Microservices.IdentifierMapper.Swappers; + +public interface ISwapIdentifiers { - public interface ISwapIdentifiers - { - /// - /// Setup the swapper - /// - /// - void Setup(IMappingTableOptions mappingTableOptions); + /// + /// Setup the swapper + /// + /// + void Setup(IMappingTableOptions mappingTableOptions); - /// - /// Returns the substitution identifier for toSwap or the reason why no substitution is possible - /// - /// - /// - /// - string? GetSubstitutionFor(string toSwap, out string? reason); + /// + /// Returns the substitution identifier for toSwap or the reason why no substitution is possible + /// + /// + /// + /// + string? GetSubstitutionFor(string toSwap, out string? reason); - /// - /// Clear the mapping cache (if exists) and reload - /// - void ClearCache(); + /// + /// Clear the mapping cache (if exists) and reload + /// + void ClearCache(); - /// - /// Report on the current number of swapped identifiers - /// - /// - /// - void LogProgress(ILogger logger, LogLevel level); + /// + /// Report on the current number of swapped identifiers + /// + /// + /// + void LogProgress(ILogger logger, LogLevel level); - /// - /// If there is a map table based on schema then this should return the pointer to that table. Otherwise should return null - /// - /// - DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options); - } + /// + /// If there is a map table based on schema then this should return the pointer to that table. Otherwise should return null + /// + /// + DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options); } diff --git a/src/SmiServices/Microservices/IdentifierMapper/Swappers/PreloadTableSwapper.cs b/src/SmiServices/Microservices/IdentifierMapper/Swappers/PreloadTableSwapper.cs index 2ccf03e34..13c59b971 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/Swappers/PreloadTableSwapper.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/Swappers/PreloadTableSwapper.cs @@ -8,102 +8,101 @@ using System.Diagnostics; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.IdentifierMapper.Swappers +namespace SmiServices.Microservices.IdentifierMapper.Swappers; + +/// +/// Connects to a database containing values to swap identifiers with, and loads it entirely into memory +/// +public class PreloadTableSwapper : SwapIdentifiers { - /// - /// Connects to a database containing values to swap identifiers with, and loads it entirely into memory - /// - public class PreloadTableSwapper : SwapIdentifiers - { - private readonly ILogger _logger; + private readonly ILogger _logger; - private IMappingTableOptions? _options; + private IMappingTableOptions? _options; - private Dictionary? _mapping; - private readonly object _oDictionaryLock = new(); + private Dictionary? _mapping; + private readonly object _oDictionaryLock = new(); - public PreloadTableSwapper() - { - _logger = LogManager.GetCurrentClassLogger(); - } + public PreloadTableSwapper() + { + _logger = LogManager.GetCurrentClassLogger(); + } - /// - /// Preloads the swap table into memory - /// - /// - [MemberNotNull(nameof(_mapping))] - public override void Setup(IMappingTableOptions options) - { - _logger.Info("Setting up mapping dictionary"); + /// + /// Preloads the swap table into memory + /// + /// + [MemberNotNull(nameof(_mapping))] + public override void Setup(IMappingTableOptions options) + { + _logger.Info("Setting up mapping dictionary"); - using (new TimeTracker(DatabaseStopwatch)) - lock (_oDictionaryLock) - { - _options = options; + using (new TimeTracker(DatabaseStopwatch)) + lock (_oDictionaryLock) + { + _options = options; - DiscoveredTable tbl = options.Discover(); + DiscoveredTable tbl = options.Discover(); - using DbConnection con = tbl.Database.Server.GetConnection(); - con.Open(); + using DbConnection con = tbl.Database.Server.GetConnection(); + con.Open(); - string sql = - $"SELECT {options.SwapColumnName}, {options.ReplacementColumnName} FROM {tbl.GetFullyQualifiedName()}"; - _logger.Debug($"SQL: {sql}"); + string sql = + $"SELECT {options.SwapColumnName}, {options.ReplacementColumnName} FROM {tbl.GetFullyQualifiedName()}"; + _logger.Debug($"SQL: {sql}"); - DbCommand cmd = tbl.Database.Server.GetCommand(sql, con); - cmd.CommandTimeout = _options.TimeoutInSeconds; + DbCommand cmd = tbl.Database.Server.GetCommand(sql, con); + cmd.CommandTimeout = _options.TimeoutInSeconds; - DbDataReader dataReader = cmd.ExecuteReader(); + DbDataReader dataReader = cmd.ExecuteReader(); - _mapping = []; + _mapping = []; - _logger.Debug("Populating dictionary from mapping table..."); - Stopwatch sw = Stopwatch.StartNew(); + _logger.Debug("Populating dictionary from mapping table..."); + Stopwatch sw = Stopwatch.StartNew(); - while (dataReader.Read()) - _mapping.Add(dataReader[_options.SwapColumnName!].ToString()!, dataReader[_options.ReplacementColumnName!].ToString()!); + while (dataReader.Read()) + _mapping.Add(dataReader[_options.SwapColumnName!].ToString()!, dataReader[_options.ReplacementColumnName!].ToString()!); - _logger.Debug("Mapping dictionary populated with " + _mapping.Count + " entries in " + sw.Elapsed.ToString("g")); - } - } + _logger.Debug("Mapping dictionary populated with " + _mapping.Count + " entries in " + sw.Elapsed.ToString("g")); + } + } - public override string? GetSubstitutionFor(string toSwap, out string? reason) + public override string? GetSubstitutionFor(string toSwap, out string? reason) + { + lock (_oDictionaryLock) { - lock (_oDictionaryLock) + if (!_mapping!.ContainsKey(toSwap)) { - if (!_mapping!.ContainsKey(toSwap)) - { - reason = "PatientID was not in mapping table"; - Fail++; - CacheMiss++; - return null; - } - - reason = null; + reason = "PatientID was not in mapping table"; + Fail++; + CacheMiss++; + return null; } - Success++; - CacheHit++; - return _mapping[toSwap]; + reason = null; } - /// - /// Clears the cached table and reloads it from the database - /// - public override void ClearCache() - { - _logger.Debug("Clearing cache and reloading"); + Success++; + CacheHit++; + return _mapping[toSwap]; + } - if (_options == null) - throw new ApplicationException("ClearCache called before mapping options set"); + /// + /// Clears the cached table and reloads it from the database + /// + public override void ClearCache() + { + _logger.Debug("Clearing cache and reloading"); - Setup(_options); - } + if (_options == null) + throw new ApplicationException("ClearCache called before mapping options set"); - public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) - { - return null; - } + Setup(_options); + } + + public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) + { + return null; } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/Swappers/RedisSwapper.cs b/src/SmiServices/Microservices/IdentifierMapper/Swappers/RedisSwapper.cs index 55cc882f6..498f2c494 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/Swappers/RedisSwapper.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/Swappers/RedisSwapper.cs @@ -7,135 +7,134 @@ using System.Collections.Concurrent; using System.Threading; -namespace SmiServices.Microservices.IdentifierMapper.Swappers +namespace SmiServices.Microservices.IdentifierMapper.Swappers; + +/// +/// A swapper that wraps and uses a Redis database (on localhost) +/// to store cached values +/// +public class RedisSwapper : SwapIdentifiers, IDisposable { - /// - /// A swapper that wraps and uses a Redis database (on localhost) - /// to store cached values - /// - public class RedisSwapper : SwapIdentifiers, IDisposable - { - private readonly ConnectionMultiplexer _redis; - private readonly ISwapIdentifiers _hostedSwapper; + private readonly ConnectionMultiplexer _redis; + private readonly ISwapIdentifiers _hostedSwapper; - private const string NullString = "NO MATCH"; + private const string NullString = "NO MATCH"; - private readonly MemoryCache _cache = new(new MemoryCacheOptions - { - SizeLimit = 1024 - }); + private readonly MemoryCache _cache = new(new MemoryCacheOptions + { + SizeLimit = 1024 + }); - private readonly ConcurrentDictionary _locks = new(); + private readonly ConcurrentDictionary _locks = new(); - private readonly ILogger _logger; + private readonly ILogger _logger; - public RedisSwapper(string redisHost, ISwapIdentifiers wrappedSwapper) - { - _logger = LogManager.GetCurrentClassLogger(); - _redis = ConnectionMultiplexer.Connect(redisHost); - _hostedSwapper = wrappedSwapper; - } + public RedisSwapper(string redisHost, ISwapIdentifiers wrappedSwapper) + { + _logger = LogManager.GetCurrentClassLogger(); + _redis = ConnectionMultiplexer.Connect(redisHost); + _hostedSwapper = wrappedSwapper; + } - public override void Setup(IMappingTableOptions mappingTableOptions) - { - _hostedSwapper.Setup(mappingTableOptions); - } + public override void Setup(IMappingTableOptions mappingTableOptions) + { + _hostedSwapper.Setup(mappingTableOptions); + } - public override string? GetSubstitutionFor(string toSwap, out string? reason) - { - reason = null; + public override string? GetSubstitutionFor(string toSwap, out string? reason) + { + reason = null; - //lookup in memory - if (!_cache.TryGetValue(toSwap, out string? result)) + //lookup in memory + if (!_cache.TryGetValue(toSwap, out string? result)) + { + SemaphoreSlim locket = _locks.GetOrAdd(toSwap, k => new SemaphoreSlim(1, 1)); + locket.Wait(); + try { - SemaphoreSlim locket = _locks.GetOrAdd(toSwap, k => new SemaphoreSlim(1, 1)); - locket.Wait(); - try + if (!_cache.TryGetValue(toSwap, out result)) { - if (!_cache.TryGetValue(toSwap, out result)) + // Now try Redis cache + IDatabase db = _redis.GetDatabase(); + var val = db.StringGet(toSwap); + //we have a cached answer (which might be null) + if (val.HasValue) { - // Now try Redis cache - IDatabase db = _redis.GetDatabase(); - var val = db.StringGet(toSwap); - //we have a cached answer (which might be null) - if (val.HasValue) - { - result = val.ToString(); - Interlocked.Increment(ref CacheHit); - } - else - { - //we have no cached answer from Redis - Interlocked.Increment(ref CacheMiss); - - //Go to the hosted swapper - lock (_hostedSwapper) - { - result = _hostedSwapper.GetSubstitutionFor(toSwap, out reason); - } + result = val.ToString(); + Interlocked.Increment(ref CacheHit); + } + else + { + //we have no cached answer from Redis + Interlocked.Increment(ref CacheMiss); - //and cache the result (even if it is null - no lookup match found) - db.StringSet(toSwap, result ?? NullString); + //Go to the hosted swapper + lock (_hostedSwapper) + { + result = _hostedSwapper.GetSubstitutionFor(toSwap, out reason); } - _cache.Set(toSwap, result ?? NullString, new MemoryCacheEntryOptions - { - Size = 1 - }); + //and cache the result (even if it is null - no lookup match found) + db.StringSet(toSwap, result ?? NullString); } - } - finally - { - locket.Release(); - } - } - else - { - Interlocked.Increment(ref CacheHit); - } - if (string.Equals(NullString, result)) - { - result = null; - reason = $"Value '{toSwap}' was cached in Redis as missing (i.e. no mapping was found)"; + _cache.Set(toSwap, result ?? NullString, new MemoryCacheEntryOptions + { + Size = 1 + }); + } } - - if (result == null) - Interlocked.Increment(ref Fail); - else + finally { - int res = Interlocked.Increment(ref Success); - if (res % 1000 == 0) - LogProgress(_logger, LogLevel.Info); + locket.Release(); } - - return result; } - - - public override void ClearCache() + else { - _hostedSwapper.ClearCache(); + Interlocked.Increment(ref CacheHit); } - public void Dispose() + if (string.Equals(NullString, result)) { - GC.SuppressFinalize(this); - _redis?.Dispose(); + result = null; + reason = $"Value '{toSwap}' was cached in Redis as missing (i.e. no mapping was found)"; } - public override void LogProgress(ILogger logger, LogLevel level) + if (result == null) + Interlocked.Increment(ref Fail); + else { - //output the Redis stats - base.LogProgress(logger, level); - - //output the hosted mapper stats - _hostedSwapper.LogProgress(logger, level); + int res = Interlocked.Increment(ref Success); + if (res % 1000 == 0) + LogProgress(_logger, LogLevel.Info); } - public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) - { - return _hostedSwapper.GetGuidTableIfAny(options); - } + return result; + } + + + public override void ClearCache() + { + _hostedSwapper.ClearCache(); + } + + public void Dispose() + { + GC.SuppressFinalize(this); + _redis?.Dispose(); + } + + public override void LogProgress(ILogger logger, LogLevel level) + { + //output the Redis stats + base.LogProgress(logger, level); + + //output the hosted mapper stats + _hostedSwapper.LogProgress(logger, level); + } + + public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) + { + return _hostedSwapper.GetGuidTableIfAny(options); } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/Swappers/SwapIdentifiers.cs b/src/SmiServices/Microservices/IdentifierMapper/Swappers/SwapIdentifiers.cs index 410d11470..df3e9985e 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/Swappers/SwapIdentifiers.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/Swappers/SwapIdentifiers.cs @@ -3,30 +3,29 @@ using SmiServices.Common.Options; using System.Diagnostics; -namespace SmiServices.Microservices.IdentifierMapper.Swappers -{ - public abstract class SwapIdentifiers : ISwapIdentifiers - { - public int CacheHit; - public int CacheMiss; +namespace SmiServices.Microservices.IdentifierMapper.Swappers; - public int Success; - public int Fail; - public int Invalid { get; protected set; } +public abstract class SwapIdentifiers : ISwapIdentifiers +{ + public int CacheHit; + public int CacheMiss; - public Stopwatch DatabaseStopwatch { get; } = new Stopwatch(); + public int Success; + public int Fail; + public int Invalid { get; protected set; } - public abstract void Setup(IMappingTableOptions mappingTableOptions); + public Stopwatch DatabaseStopwatch { get; } = new Stopwatch(); - public abstract string? GetSubstitutionFor(string toSwap, out string? reason); + public abstract void Setup(IMappingTableOptions mappingTableOptions); - public abstract void ClearCache(); + public abstract string? GetSubstitutionFor(string toSwap, out string? reason); - public virtual void LogProgress(ILogger logger, LogLevel level) - { - logger.Log(level, $"{GetType().Name}: CacheRatio={CacheHit}:{CacheMiss} SuccessRatio={Success}:{Fail}:{Invalid} DatabaseTime:{DatabaseStopwatch.Elapsed}"); - } + public abstract void ClearCache(); - public abstract DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options); + public virtual void LogProgress(ILogger logger, LogLevel level) + { + logger.Log(level, $"{GetType().Name}: CacheRatio={CacheHit}:{CacheMiss} SuccessRatio={Success}:{Fail}:{Invalid} DatabaseTime:{DatabaseStopwatch.Elapsed}"); } + + public abstract DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options); } diff --git a/src/SmiServices/Microservices/IdentifierMapper/Swappers/TableLookupSwapper.cs b/src/SmiServices/Microservices/IdentifierMapper/Swappers/TableLookupSwapper.cs index 3839f81fc..46cb41dc9 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/Swappers/TableLookupSwapper.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/Swappers/TableLookupSwapper.cs @@ -5,91 +5,90 @@ using System; using System.Data.Common; -namespace SmiServices.Microservices.IdentifierMapper.Swappers +namespace SmiServices.Microservices.IdentifierMapper.Swappers; + +/// +/// Connects to a database containing values to swap identifiers with. Keeps a single cache of the last seen value +/// +public class TableLookupSwapper : SwapIdentifiers { - /// - /// Connects to a database containing values to swap identifiers with. Keeps a single cache of the last seen value - /// - public class TableLookupSwapper : SwapIdentifiers - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private DiscoveredServer? _server; - private IMappingTableOptions? _options; - private DiscoveredTable? _swapTable; + private DiscoveredServer? _server; + private IMappingTableOptions? _options; + private DiscoveredTable? _swapTable; - // Simple cache of the last swap pair - private string? _lastKey; - private string? _lastVal; + // Simple cache of the last swap pair + private string? _lastKey; + private string? _lastVal; - public override void Setup(IMappingTableOptions options) - { - _options = options; - _swapTable = options.Discover(); - _server = _swapTable.Database.Server; + public override void Setup(IMappingTableOptions options) + { + _options = options; + _swapTable = options.Discover(); + _server = _swapTable.Database.Server; - if (!_swapTable.Exists()) - throw new ArgumentException($"Swap table '{_swapTable.GetFullyQualifiedName()}' did not exist on server '{_server}'"); - } + if (!_swapTable.Exists()) + throw new ArgumentException($"Swap table '{_swapTable.GetFullyQualifiedName()}' did not exist on server '{_server}'"); + } - public override string? GetSubstitutionFor(string toSwap, out string? reason) - { - reason = null; + public override string? GetSubstitutionFor(string toSwap, out string? reason) + { + reason = null; - // If the cached key matches, return the last value - if (string.Equals(toSwap, _lastKey) && _lastVal != null) - { - _logger.Debug("Using cached swap value"); + // If the cached key matches, return the last value + if (string.Equals(toSwap, _lastKey) && _lastVal != null) + { + _logger.Debug("Using cached swap value"); - CacheHit++; - Success++; + CacheHit++; + Success++; - return _lastVal; - } + return _lastVal; + } - CacheMiss++; + CacheMiss++; - // Else fall through to the database lookup - using (new TimeTracker(DatabaseStopwatch)) - using (DbConnection con = _server!.GetConnection()) - { - con.Open(); + // Else fall through to the database lookup + using (new TimeTracker(DatabaseStopwatch)) + using (DbConnection con = _server!.GetConnection()) + { + con.Open(); - string sql = - $"SELECT {_options!.ReplacementColumnName} FROM {_swapTable!.GetFullyQualifiedName()} WHERE {_options.SwapColumnName}=@val"; + string sql = + $"SELECT {_options!.ReplacementColumnName} FROM {_swapTable!.GetFullyQualifiedName()} WHERE {_options.SwapColumnName}=@val"; - DbCommand cmd = _server.GetCommand(sql, con); - _server.AddParameterWithValueToCommand("@val", cmd, toSwap); + DbCommand cmd = _server.GetCommand(sql, con); + _server.AddParameterWithValueToCommand("@val", cmd, toSwap); - object? result = cmd.ExecuteScalar(); + object? result = cmd.ExecuteScalar(); - if (result == DBNull.Value || result == null) - { - reason = $"No match found for '{toSwap}'"; - Fail++; - return null; - } + if (result == DBNull.Value || result == null) + { + reason = $"No match found for '{toSwap}'"; + Fail++; + return null; + } - _lastKey = toSwap; - _lastVal = result.ToString(); + _lastKey = toSwap; + _lastVal = result.ToString(); - ++Success; + ++Success; - return _lastVal; - } + return _lastVal; } + } - public override void ClearCache() - { - _lastVal = null; - _logger.Debug("ClearCache called, single value cache cleared"); - } + public override void ClearCache() + { + _lastVal = null; + _logger.Debug("ClearCache called, single value cache cleared"); + } - public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) - { - return null; - } + public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) + { + return null; } } diff --git a/src/SmiServices/Microservices/IdentifierMapper/Swappers/TableLookupWithGuidFallbackSwapper.cs b/src/SmiServices/Microservices/IdentifierMapper/Swappers/TableLookupWithGuidFallbackSwapper.cs index 19ef72311..e92e6675d 100644 --- a/src/SmiServices/Microservices/IdentifierMapper/Swappers/TableLookupWithGuidFallbackSwapper.cs +++ b/src/SmiServices/Microservices/IdentifierMapper/Swappers/TableLookupWithGuidFallbackSwapper.cs @@ -2,100 +2,99 @@ using NLog; using SmiServices.Common.Options; -namespace SmiServices.Microservices.IdentifierMapper.Swappers +namespace SmiServices.Microservices.IdentifierMapper.Swappers; + +/// +/// Identifier swapper which connects to lookup table (wrapping ) but if no mapping is +/// found then generates a guid substitution value (which is stored in the same database - using ) +/// +/// The Guid mapping table will be the mapping table name with the suffix (i.e. + suffix) +/// +public class TableLookupWithGuidFallbackSwapper : SwapIdentifiers { /// - /// Identifier swapper which connects to lookup table (wrapping ) but if no mapping is - /// found then generates a guid substitution value (which is stored in the same database - using ) - /// - /// The Guid mapping table will be the mapping table name with the suffix (i.e. + suffix) + /// Determines the name to give/expect for the substitution table when the lookup misses. The name will be + /// the name of the lookup table + this suffix /// - public class TableLookupWithGuidFallbackSwapper : SwapIdentifiers - { - /// - /// Determines the name to give/expect for the substitution table when the lookup misses. The name will be - /// the name of the lookup table + this suffix - /// - public const string GuidTableSuffix = "_guid"; + public const string GuidTableSuffix = "_guid"; - /// - /// The name to give/expect for the in the guid swap table - /// (that stores lookup misses) - /// - public const string GuidColumnName = "guid"; + /// + /// The name to give/expect for the in the guid swap table + /// (that stores lookup misses) + /// + public const string GuidColumnName = "guid"; - private readonly TableLookupSwapper _tableSwapper; - private readonly ForGuidIdentifierSwapper _guidSwapper; + private readonly TableLookupSwapper _tableSwapper; + private readonly ForGuidIdentifierSwapper _guidSwapper; - public TableLookupWithGuidFallbackSwapper() - { - _tableSwapper = new TableLookupSwapper(); - _guidSwapper = new ForGuidIdentifierSwapper(); - } + public TableLookupWithGuidFallbackSwapper() + { + _tableSwapper = new TableLookupSwapper(); + _guidSwapper = new ForGuidIdentifierSwapper(); + } - /// - public override void Setup(IMappingTableOptions mappingTableOptions) - { - _tableSwapper.Setup(mappingTableOptions); + /// + public override void Setup(IMappingTableOptions mappingTableOptions) + { + _tableSwapper.Setup(mappingTableOptions); - var guidOptions = mappingTableOptions.Clone(); - guidOptions.MappingTableName = GetGuidTableIfAny(guidOptions)?.GetFullyQualifiedName(); - guidOptions.ReplacementColumnName = GuidColumnName; - _guidSwapper.Setup(guidOptions); - } + var guidOptions = mappingTableOptions.Clone(); + guidOptions.MappingTableName = GetGuidTableIfAny(guidOptions)?.GetFullyQualifiedName(); + guidOptions.ReplacementColumnName = GuidColumnName; + _guidSwapper.Setup(guidOptions); + } - /// - /// Returns the main lookup table, for the temporary guid allocations use - /// - /// - public static DiscoveredTable GetMappingTable(IMappingTableOptions options) - { - return options.Discover(); - } + /// + /// Returns the main lookup table, for the temporary guid allocations use + /// + /// + public static DiscoveredTable GetMappingTable(IMappingTableOptions options) + { + return options.Discover(); + } - /// - /// Returns a table in which guids would be stored for mapping table lookup misses. This will be in - /// the same database and schema as but the table name will have the - /// - /// - /// - public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) - { - var mappingTable = options.Discover(); - var guidTableName = mappingTable.GetRuntimeName() + GuidTableSuffix; + /// + /// Returns a table in which guids would be stored for mapping table lookup misses. This will be in + /// the same database and schema as but the table name will have the + /// + /// + /// + public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) + { + var mappingTable = options.Discover(); + var guidTableName = mappingTable.GetRuntimeName() + GuidTableSuffix; - return mappingTable.Database.ExpectTable(guidTableName, mappingTable.Schema, TableType.Table); - } + return mappingTable.Database.ExpectTable(guidTableName, mappingTable.Schema, TableType.Table); + } - /// - /// Returns a substitution from the wrapped . If no match is found then a guid is allocated - /// and stored using a wrapped . - /// - /// - /// - /// - public override string? GetSubstitutionFor(string toSwap, out string? reason) - { - //get answer from lookup table - var answer = _tableSwapper.GetSubstitutionFor(toSwap, out reason); + /// + /// Returns a substitution from the wrapped . If no match is found then a guid is allocated + /// and stored using a wrapped . + /// + /// + /// + /// + public override string? GetSubstitutionFor(string toSwap, out string? reason) + { + //get answer from lookup table + var answer = _tableSwapper.GetSubstitutionFor(toSwap, out reason); - //if we didn't get a hit in the lookup table use the guid allocation swapper - return !string.IsNullOrWhiteSpace(answer) ? answer : _guidSwapper.GetSubstitutionFor(toSwap, out reason); - } + //if we didn't get a hit in the lookup table use the guid allocation swapper + return !string.IsNullOrWhiteSpace(answer) ? answer : _guidSwapper.GetSubstitutionFor(toSwap, out reason); + } - /// - /// Calls on both wrapped swappers (guid and lookup) - /// - public override void ClearCache() - { - _tableSwapper.ClearCache(); - _guidSwapper.ClearCache(); - } + /// + /// Calls on both wrapped swappers (guid and lookup) + /// + public override void ClearCache() + { + _tableSwapper.ClearCache(); + _guidSwapper.ClearCache(); + } - public override void LogProgress(ILogger logger, LogLevel level) - { - _tableSwapper.LogProgress(logger, level); - _guidSwapper.LogProgress(logger, level); - } + public override void LogProgress(ILogger logger, LogLevel level) + { + _tableSwapper.LogProgress(logger, level); + _guidSwapper.LogProgress(logger, level); } } diff --git a/src/SmiServices/Microservices/IsIdentifiable/Classifier.cs b/src/SmiServices/Microservices/IsIdentifiable/Classifier.cs index e7a1a81c5..2b90909a6 100644 --- a/src/SmiServices/Microservices/IsIdentifiable/Classifier.cs +++ b/src/SmiServices/Microservices/IsIdentifiable/Classifier.cs @@ -5,55 +5,54 @@ using System.IO.Abstractions; using System.Linq; -namespace SmiServices.Microservices.IsIdentifiable +namespace SmiServices.Microservices.IsIdentifiable; + +public abstract class Classifier : IClassifier { - public abstract class Classifier : IClassifier + public DirectoryInfo? DataDirectory { get; set; } + + + protected Classifier(DirectoryInfo dataDirectory) { - public DirectoryInfo? DataDirectory { get; set; } + DataDirectory = dataDirectory; + if (!DataDirectory.Exists) + throw new DirectoryNotFoundException($"Could not find directory {DataDirectory.FullName}"); + } - protected Classifier(DirectoryInfo dataDirectory) - { - DataDirectory = dataDirectory; + public abstract IEnumerable Classify(IFileInfo dcm); + + /// + /// Finds a single directory of a given name in the and asserts that it exists + /// + /// + /// + protected DirectoryInfo GetSubdirectory(string toFind) + { + var stanfordNerDir = DataDirectory!.GetDirectories(toFind).SingleOrDefault() ?? throw new DirectoryNotFoundException($"Expected sub-directory called '{toFind}' to exist in '{DataDirectory}'"); + return stanfordNerDir; + } - if (!DataDirectory.Exists) - throw new DirectoryNotFoundException($"Could not find directory {DataDirectory.FullName}"); - } - public abstract IEnumerable Classify(IFileInfo dcm); + /// + /// Finds (including in subdirectories) files that match the . If exactly 1 match is + /// found then it is returned otherwise a + /// + /// + /// + /// + protected static FileInfo FindOneFile(string searchPattern, DirectoryInfo directory) + { + var files = directory.GetFiles(searchPattern, SearchOption.AllDirectories).ToArray(); - /// - /// Finds a single directory of a given name in the and asserts that it exists - /// - /// - /// - protected DirectoryInfo GetSubdirectory(string toFind) + return files.Length switch { - var stanfordNerDir = DataDirectory!.GetDirectories(toFind).SingleOrDefault() ?? throw new DirectoryNotFoundException($"Expected sub-directory called '{toFind}' to exist in '{DataDirectory}'"); - return stanfordNerDir; - } - - - /// - /// Finds (including in subdirectories) files that match the . If exactly 1 match is - /// found then it is returned otherwise a - /// - /// - /// - /// - protected static FileInfo FindOneFile(string searchPattern, DirectoryInfo directory) - { - var files = directory.GetFiles(searchPattern, SearchOption.AllDirectories).ToArray(); - - return files.Length switch - { - 0 => throw new FileNotFoundException( - $"Expected 1 file matching '{searchPattern}' to exist in {directory}"), - > 1 => throw new Exception( - $"Found '{files.Length}' file matching '{searchPattern}' in {directory} (expected 1)"), - _ => files[0] - }; - } - + 0 => throw new FileNotFoundException( + $"Expected 1 file matching '{searchPattern}' to exist in {directory}"), + > 1 => throw new Exception( + $"Found '{files.Length}' file matching '{searchPattern}' in {directory} (expected 1)"), + _ => files[0] + }; } + } diff --git a/src/SmiServices/Microservices/IsIdentifiable/IClassifier.cs b/src/SmiServices/Microservices/IsIdentifiable/IClassifier.cs index df3ee7ec8..a79219179 100644 --- a/src/SmiServices/Microservices/IsIdentifiable/IClassifier.cs +++ b/src/SmiServices/Microservices/IsIdentifiable/IClassifier.cs @@ -3,15 +3,14 @@ using System.IO; using System.IO.Abstractions; -namespace SmiServices.Microservices.IsIdentifiable +namespace SmiServices.Microservices.IsIdentifiable; + +public interface IClassifier { - public interface IClassifier - { - /// - /// The location in which you can get your required data files - /// - DirectoryInfo? DataDirectory { get; set; } + /// + /// The location in which you can get your required data files + /// + DirectoryInfo? DataDirectory { get; set; } - IEnumerable Classify(IFileInfo dcm); - } + IEnumerable Classify(IFileInfo dcm); } diff --git a/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiable.cs b/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiable.cs index b4e4b6476..412631802 100644 --- a/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiable.cs +++ b/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiable.cs @@ -3,23 +3,22 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.IsIdentifiable +namespace SmiServices.Microservices.IsIdentifiable; + +public static class IsIdentifiable { - public static class IsIdentifiable + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - - int ret = SmiCliInit.ParseAndRun(args, nameof(IsIdentifiable), OnParse); - return ret; - } - private static int OnParse(GlobalOptions globals, CliOptions opts) - { - var bootstrapper = new MicroserviceHostBootstrapper( - () => new IsIdentifiableHost(globals)); - return bootstrapper.Main(); - } + int ret = SmiCliInit.ParseAndRun(args, nameof(IsIdentifiable), OnParse); + return ret; } + private static int OnParse(GlobalOptions globals, CliOptions opts) + { + var bootstrapper = new MicroserviceHostBootstrapper( + () => new IsIdentifiableHost(globals)); + return bootstrapper.Main(); + } + } diff --git a/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiableHost.cs b/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiableHost.cs index 4b5ce59c3..0da3856fb 100644 --- a/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiableHost.cs +++ b/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiableHost.cs @@ -5,48 +5,47 @@ using System; using System.IO; -namespace SmiServices.Microservices.IsIdentifiable +namespace SmiServices.Microservices.IsIdentifiable; + +public class IsIdentifiableHost : MicroserviceHost { - public class IsIdentifiableHost : MicroserviceHost - { - private readonly ConsumerOptions _consumerOptions; - public IsIdentifiableQueueConsumer Consumer { get; } + private readonly ConsumerOptions _consumerOptions; + public IsIdentifiableQueueConsumer Consumer { get; } - private readonly IProducerModel _producerModel; + private readonly IProducerModel _producerModel; - public IsIdentifiableHost( - GlobalOptions globals - ) - : base(globals) - { - _consumerOptions = globals.IsIdentifiableServiceOptions ?? throw new ArgumentNullException(nameof(globals)); + public IsIdentifiableHost( + GlobalOptions globals + ) + : base(globals) + { + _consumerOptions = globals.IsIdentifiableServiceOptions ?? throw new ArgumentNullException(nameof(globals)); - string? classifierTypename = globals.IsIdentifiableServiceOptions.ClassifierType; - string? dataDirectory = globals.IsIdentifiableServiceOptions.DataDirectory; + string? classifierTypename = globals.IsIdentifiableServiceOptions.ClassifierType; + string? dataDirectory = globals.IsIdentifiableServiceOptions.DataDirectory; - if (string.IsNullOrWhiteSpace(classifierTypename)) - throw new ArgumentException("No IClassifier has been set in options. Enter a value for ClassifierType", nameof(globals)); - if (string.IsNullOrWhiteSpace(dataDirectory)) - throw new ArgumentException("A DataDirectory must be set", nameof(globals)); + if (string.IsNullOrWhiteSpace(classifierTypename)) + throw new ArgumentException("No IClassifier has been set in options. Enter a value for ClassifierType", nameof(globals)); + if (string.IsNullOrWhiteSpace(dataDirectory)) + throw new ArgumentException("A DataDirectory must be set", nameof(globals)); - var objectFactory = new MicroserviceObjectFactory(); - var classifier = objectFactory.CreateInstance(classifierTypename, typeof(IClassifier).Assembly, new DirectoryInfo(dataDirectory), globals.IsIdentifiableOptions!) - ?? throw new TypeLoadException($"Could not find IClassifier Type {classifierTypename}"); - _producerModel = MessageBroker.SetupProducer(globals.IsIdentifiableServiceOptions.IsIdentifiableProducerOptions!, isBatch: false); + var objectFactory = new MicroserviceObjectFactory(); + var classifier = objectFactory.CreateInstance(classifierTypename, typeof(IClassifier).Assembly, new DirectoryInfo(dataDirectory), globals.IsIdentifiableOptions!) + ?? throw new TypeLoadException($"Could not find IClassifier Type {classifierTypename}"); + _producerModel = MessageBroker.SetupProducer(globals.IsIdentifiableServiceOptions.IsIdentifiableProducerOptions!, isBatch: false); - Consumer = new IsIdentifiableQueueConsumer(_producerModel, globals.FileSystemOptions!.ExtractRoot!, classifier); - } + Consumer = new IsIdentifiableQueueConsumer(_producerModel, globals.FileSystemOptions!.ExtractRoot!, classifier); + } - public override void Start() - { - MessageBroker.StartConsumer(_consumerOptions, Consumer, isSolo: false); - } + public override void Start() + { + MessageBroker.StartConsumer(_consumerOptions, Consumer, isSolo: false); + } - public override void Stop(string reason) - { - base.Stop(reason); + public override void Stop(string reason) + { + base.Stop(reason); - Consumer?.Dispose(); - } + Consumer?.Dispose(); } } diff --git a/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiableQueueConsumer.cs b/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiableQueueConsumer.cs index e05c54ebb..12bb383a1 100644 --- a/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiableQueueConsumer.cs +++ b/src/SmiServices/Microservices/IsIdentifiable/IsIdentifiableQueueConsumer.cs @@ -9,99 +9,98 @@ using System.IO.Abstractions; using System.Linq; -namespace SmiServices.Microservices.IsIdentifiable +namespace SmiServices.Microservices.IsIdentifiable; + +public class IsIdentifiableQueueConsumer : Consumer, IDisposable { - public class IsIdentifiableQueueConsumer : Consumer, IDisposable + private readonly IProducerModel _producer; + private readonly IFileSystem _fileSystem; + private readonly string _extractionRoot; + private readonly IClassifier _classifier; + + public IsIdentifiableQueueConsumer( + IProducerModel producer, + string extractionRoot, + IClassifier classifier, + IFileSystem? fileSystem = null + ) { - private readonly IProducerModel _producer; - private readonly IFileSystem _fileSystem; - private readonly string _extractionRoot; - private readonly IClassifier _classifier; - - public IsIdentifiableQueueConsumer( - IProducerModel producer, - string extractionRoot, - IClassifier classifier, - IFileSystem? fileSystem = null - ) - { - _producer = producer; - _extractionRoot = string.IsNullOrWhiteSpace(extractionRoot) ? throw new ArgumentException($"Argument cannot be null or whitespace", nameof(extractionRoot)) : extractionRoot; - _classifier = classifier; - _fileSystem = fileSystem ?? new FileSystem(); + _producer = producer; + _extractionRoot = string.IsNullOrWhiteSpace(extractionRoot) ? throw new ArgumentException($"Argument cannot be null or whitespace", nameof(extractionRoot)) : extractionRoot; + _classifier = classifier; + _fileSystem = fileSystem ?? new FileSystem(); - if (!_fileSystem.Directory.Exists(_extractionRoot)) - throw new DirectoryNotFoundException($"Could not find the extraction root '{_extractionRoot}' in the filesystem"); - } + if (!_fileSystem.Directory.Exists(_extractionRoot)) + throw new DirectoryNotFoundException($"Could not find the extraction root '{_extractionRoot}' in the filesystem"); + } - protected override void ProcessMessageImpl(IMessageHeader header, ExtractedFileStatusMessage statusMessage, ulong tag) + protected override void ProcessMessageImpl(IMessageHeader header, ExtractedFileStatusMessage statusMessage, ulong tag) + { + // We should only ever receive messages regarding anonymised images + if (statusMessage.Status != ExtractedFileStatus.Anonymised) + throw new ApplicationException($"Received an {statusMessage.GetType().Name} message with Status '{statusMessage.Status}' and StatusMessage '{statusMessage.StatusMessage}'"); + + if (statusMessage.OutputFilePath == null) + throw new ApplicationException($"Received an {statusMessage.GetType().Name} message with a null OutputPath"); + + IFileInfo toProcess = _fileSystem.FileInfo.New( + _fileSystem.Path.Combine( + _extractionRoot, + statusMessage.ExtractionDirectory, + statusMessage.OutputFilePath + ) + ); + + if (!toProcess.Exists) { - // We should only ever receive messages regarding anonymised images - if (statusMessage.Status != ExtractedFileStatus.Anonymised) - throw new ApplicationException($"Received an {statusMessage.GetType().Name} message with Status '{statusMessage.Status}' and StatusMessage '{statusMessage.StatusMessage}'"); - - if (statusMessage.OutputFilePath == null) - throw new ApplicationException($"Received an {statusMessage.GetType().Name} message with a null OutputPath"); - - IFileInfo toProcess = _fileSystem.FileInfo.New( - _fileSystem.Path.Combine( - _extractionRoot, - statusMessage.ExtractionDirectory, - statusMessage.OutputFilePath - ) - ); - - if (!toProcess.Exists) - { - SendVerificationMessage(statusMessage, header, tag, VerifiedFileStatus.ErrorWontRetry, $"Exception while processing {statusMessage.GetType().Name}: Could not find file to process '{toProcess.FullName}'"); - return; - } - - IEnumerable failures; - - try - { - failures = _classifier.Classify(toProcess); - } - catch (Exception e) - { - SendVerificationMessage(statusMessage, header, tag, VerifiedFileStatus.ErrorWontRetry, $"Exception while classifying {statusMessage.GetType().Name}:\n{e}. File could not be scanned."); - return; - } - - foreach (Failure f in failures) - Logger.Info($"Validation failed for {f.Resource} Problem Value:{f.ProblemValue}"); - - - var status = failures.Any() ? VerifiedFileStatus.IsIdentifiable : VerifiedFileStatus.NotIdentifiable; - var report = JsonConvert.SerializeObject(failures); - - SendVerificationMessage(statusMessage, header, tag, status, report); + SendVerificationMessage(statusMessage, header, tag, VerifiedFileStatus.ErrorWontRetry, $"Exception while processing {statusMessage.GetType().Name}: Could not find file to process '{toProcess.FullName}'"); + return; } - private void SendVerificationMessage( - ExtractedFileStatusMessage statusMessage, - IMessageHeader header, - ulong tag, - VerifiedFileStatus status, - string report - ) + IEnumerable failures; + + try { - var response = new ExtractedFileVerificationMessage(statusMessage) - { - Status = status, - Report = report, - }; - _producer.SendMessage(response, header, routingKey: null); - - Ack(header, tag); + failures = _classifier.Classify(toProcess); } - - public void Dispose() + catch (Exception e) { - if (_classifier is IDisposable d) - d.Dispose(); - GC.SuppressFinalize(this); + SendVerificationMessage(statusMessage, header, tag, VerifiedFileStatus.ErrorWontRetry, $"Exception while classifying {statusMessage.GetType().Name}:\n{e}. File could not be scanned."); + return; } + + foreach (Failure f in failures) + Logger.Info($"Validation failed for {f.Resource} Problem Value:{f.ProblemValue}"); + + + var status = failures.Any() ? VerifiedFileStatus.IsIdentifiable : VerifiedFileStatus.NotIdentifiable; + var report = JsonConvert.SerializeObject(failures); + + SendVerificationMessage(statusMessage, header, tag, status, report); + } + + private void SendVerificationMessage( + ExtractedFileStatusMessage statusMessage, + IMessageHeader header, + ulong tag, + VerifiedFileStatus status, + string report + ) + { + var response = new ExtractedFileVerificationMessage(statusMessage) + { + Status = status, + Report = report, + }; + _producer.SendMessage(response, header, routingKey: null); + + Ack(header, tag); + } + + public void Dispose() + { + if (_classifier is IDisposable d) + d.Dispose(); + GC.SuppressFinalize(this); } } diff --git a/src/SmiServices/Microservices/IsIdentifiable/NoChisInAnyColumnsConstraint.cs b/src/SmiServices/Microservices/IsIdentifiable/NoChisInAnyColumnsConstraint.cs index 821d684e5..efcce4060 100644 --- a/src/SmiServices/Microservices/IsIdentifiable/NoChisInAnyColumnsConstraint.cs +++ b/src/SmiServices/Microservices/IsIdentifiable/NoChisInAnyColumnsConstraint.cs @@ -1,32 +1,31 @@ using System.Text.RegularExpressions; -namespace SmiServices.Microservices.IsIdentifiable +namespace SmiServices.Microservices.IsIdentifiable; + +public class NoChisInAnyColumnsConstraint { - public class NoChisInAnyColumnsConstraint + // DDMMYY + 4 digits + // \b bounded i.e. not more than 10 digits + static readonly Regex _chiRegex = new(@"\b[0-3][0-9][0-1][0-9][0-9]{6}\b"); + + public static string GetHumanReadableDescriptionOfValidation() { - // DDMMYY + 4 digits - // \b bounded i.e. not more than 10 digits - static readonly Regex _chiRegex = new(@"\b[0-3][0-9][0-1][0-9][0-9]{6}\b"); + return "Checks all cells in the current row for any fields containing chis"; + } - public static string GetHumanReadableDescriptionOfValidation() + public static string? Validate(object[] otherColumns, string[] otherColumnNames) + { + for (var i = 0; i < otherColumnNames.Length; i++) { - return "Checks all cells in the current row for any fields containing chis"; + if (otherColumns[i] is string s && ContainsChi(s)) + return $"Found chi in field {otherColumnNames[i]}"; } - public static string? Validate(object[] otherColumns, string[] otherColumnNames) - { - for (var i = 0; i < otherColumnNames.Length; i++) - { - if (otherColumns[i] is string s && ContainsChi(s)) - return $"Found chi in field {otherColumnNames[i]}"; - } - - return null; - } + return null; + } - private static bool ContainsChi(string value) - { - return _chiRegex.IsMatch(value); - } + private static bool ContainsChi(string value) + { + return _chiRegex.IsMatch(value); } } diff --git a/src/SmiServices/Microservices/IsIdentifiable/RejectAllClassifier.cs b/src/SmiServices/Microservices/IsIdentifiable/RejectAllClassifier.cs index e2869e019..90f9bf03f 100644 --- a/src/SmiServices/Microservices/IsIdentifiable/RejectAllClassifier.cs +++ b/src/SmiServices/Microservices/IsIdentifiable/RejectAllClassifier.cs @@ -4,17 +4,16 @@ using System.IO; using System.IO.Abstractions; -namespace SmiServices.Microservices.IsIdentifiable +namespace SmiServices.Microservices.IsIdentifiable; + +public class RejectAllClassifier : Classifier { - public class RejectAllClassifier : Classifier + public RejectAllClassifier(DirectoryInfo dataDirectory, IsIdentifiableDicomFileOptions _) : base(dataDirectory) { - public RejectAllClassifier(DirectoryInfo dataDirectory, IsIdentifiableDicomFileOptions _) : base(dataDirectory) - { - } + } - public override IEnumerable Classify(IFileInfo dcm) - { - yield return new Failure([new FailurePart("Reject All classifier rejected all content", FailureClassification.Person)]); - } + public override IEnumerable Classify(IFileInfo dcm) + { + yield return new Failure([new FailurePart("Reject All classifier rejected all content", FailureClassification.Person)]); } } diff --git a/src/SmiServices/Microservices/IsIdentifiable/TesseractStanfordDicomFileClassifier.cs b/src/SmiServices/Microservices/IsIdentifiable/TesseractStanfordDicomFileClassifier.cs index 35ebebe35..6617823db 100644 --- a/src/SmiServices/Microservices/IsIdentifiable/TesseractStanfordDicomFileClassifier.cs +++ b/src/SmiServices/Microservices/IsIdentifiable/TesseractStanfordDicomFileClassifier.cs @@ -7,43 +7,42 @@ using System.IO; using System.IO.Abstractions; -namespace SmiServices.Microservices.IsIdentifiable +namespace SmiServices.Microservices.IsIdentifiable; + +public class TesseractStanfordDicomFileClassifier : Classifier, IDisposable { - public class TesseractStanfordDicomFileClassifier : Classifier, IDisposable - { - private readonly DicomFileRunner _runner; + private readonly DicomFileRunner _runner; - //public TesseractStanfordDicomFileClassifier(DirectoryInfo dataDirectory) : base(dataDirectory) - public TesseractStanfordDicomFileClassifier(DirectoryInfo dataDirectory, IsIdentifiableDicomFileOptions fileOptions) : base(dataDirectory) - { - //need to pass this so that the runner doesn't get unhappy about there being no reports (even though we clear it below) - fileOptions.ColumnReport = true; - fileOptions.TessDirectory = dataDirectory.FullName; + //public TesseractStanfordDicomFileClassifier(DirectoryInfo dataDirectory) : base(dataDirectory) + public TesseractStanfordDicomFileClassifier(DirectoryInfo dataDirectory, IsIdentifiableDicomFileOptions fileOptions) : base(dataDirectory) + { + //need to pass this so that the runner doesn't get unhappy about there being no reports (even though we clear it below) + fileOptions.ColumnReport = true; + fileOptions.TessDirectory = dataDirectory.FullName; - // The Rules directory is always called "IsIdentifiableRules" - DirectoryInfo[] subDirs = dataDirectory.GetDirectories("IsIdentifiableRules"); - foreach (DirectoryInfo subDir in subDirs) - fileOptions.RulesDirectory = subDir.FullName; + // The Rules directory is always called "IsIdentifiableRules" + DirectoryInfo[] subDirs = dataDirectory.GetDirectories("IsIdentifiableRules"); + foreach (DirectoryInfo subDir in subDirs) + fileOptions.RulesDirectory = subDir.FullName; - _runner = new DicomFileRunner(fileOptions, new FileSystem()); - } + _runner = new DicomFileRunner(fileOptions, new FileSystem()); + } - public override IEnumerable Classify(IFileInfo dcm) - { - _runner.Reports.Clear(); - var toMemory = new ToMemoryFailureReport(); - _runner.Reports.Add(toMemory); - _runner.ValidateDicomFile(dcm); + public override IEnumerable Classify(IFileInfo dcm) + { + _runner.Reports.Clear(); + var toMemory = new ToMemoryFailureReport(); + _runner.Reports.Add(toMemory); + _runner.ValidateDicomFile(dcm); - return toMemory.Failures; - } + return toMemory.Failures; + } - public void Dispose() - { - _runner?.Dispose(); - GC.SuppressFinalize(this); - } + public void Dispose() + { + _runner?.Dispose(); + GC.SuppressFinalize(this); } } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/IMongoDbAdapter.cs b/src/SmiServices/Microservices/MongoDBPopulator/IMongoDbAdapter.cs index e81c2bbf0..4f0c99e92 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/IMongoDbAdapter.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/IMongoDbAdapter.cs @@ -2,15 +2,14 @@ using MongoDB.Bson; using System.Collections.Generic; -namespace SmiServices.Microservices.MongoDBPopulator -{ - /// - /// Possible return statuses of a write operation - /// - public enum WriteResult { Success, Failure, Unknown } +namespace SmiServices.Microservices.MongoDBPopulator; + +/// +/// Possible return statuses of a write operation +/// +public enum WriteResult { Success, Failure, Unknown } - public interface IMongoDbAdapter - { - WriteResult WriteMany(IList toWrite, string? collectionNamePostfix = null); - } +public interface IMongoDbAdapter +{ + WriteResult WriteMany(IList toWrite, string? collectionNamePostfix = null); } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/IMongoDbPopulatorMessageConsumer.cs b/src/SmiServices/Microservices/MongoDBPopulator/IMongoDbPopulatorMessageConsumer.cs index 4dc78d562..650a13026 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/IMongoDbPopulatorMessageConsumer.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/IMongoDbPopulatorMessageConsumer.cs @@ -2,12 +2,11 @@ using SmiServices.Common.Options; using SmiServices.Microservices.MongoDBPopulator.Processing; -namespace SmiServices.Microservices.MongoDBPopulator +namespace SmiServices.Microservices.MongoDBPopulator; + +public interface IMongoDbPopulatorMessageConsumer { - public interface IMongoDbPopulatorMessageConsumer - { - ConsumerOptions ConsumerOptions { get; } + ConsumerOptions ConsumerOptions { get; } - IMessageProcessor Processor { get; } - } + IMessageProcessor Processor { get; } } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/MongoDBPopulator.cs b/src/SmiServices/Microservices/MongoDBPopulator/MongoDBPopulator.cs index 803719c25..460dc0621 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/MongoDBPopulator.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/MongoDBPopulator.cs @@ -3,25 +3,24 @@ using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.MongoDBPopulator +namespace SmiServices.Microservices.MongoDBPopulator; + +public static class MongoDBPopulator { - public static class MongoDBPopulator + /// + /// Program entry point when run from command line + /// + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - /// - /// Program entry point when run from command line - /// - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - int ret = SmiCliInit.ParseAndRun(args, nameof(MongoDBPopulator), OnParse); - return ret; - } + int ret = SmiCliInit.ParseAndRun(args, nameof(MongoDBPopulator), OnParse); + return ret; + } - private static int OnParse(GlobalOptions globals, CliOptions _) - { - var bootstrapper = new MicroserviceHostBootstrapper(() => new MongoDbPopulatorHost(globals)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, CliOptions _) + { + var bootstrapper = new MicroserviceHostBootstrapper(() => new MongoDbPopulatorHost(globals)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/MongoDbAdapter.cs b/src/SmiServices/Microservices/MongoDBPopulator/MongoDbAdapter.cs index 16f2e6012..c34e798cc 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/MongoDbAdapter.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/MongoDbAdapter.cs @@ -8,86 +8,85 @@ using System.Linq; -namespace SmiServices.Microservices.MongoDBPopulator +namespace SmiServices.Microservices.MongoDBPopulator; + +/// +/// Class to handle the MongoDb connection +/// +public class MongoDbAdapter : IMongoDbAdapter { + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + + private readonly IMongoDatabase _database; + private readonly string _defaultCollectionName; + private readonly IMongoCollection _defaultCollection; + + /// - /// Class to handle the MongoDb connection + /// Constructor /// - public class MongoDbAdapter : IMongoDbAdapter + /// Name to identify the connection to MongoDb with + /// + /// Default collectionNamePostfix to write to unless overridden + public MongoDbAdapter(string applicationName, MongoDbOptions mongoDbOptions, string defaultCollectionName) { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + if (string.IsNullOrWhiteSpace(defaultCollectionName)) + throw new ArgumentException(null, nameof(defaultCollectionName)); - private readonly IMongoDatabase _database; - private readonly string _defaultCollectionName; - private readonly IMongoCollection _defaultCollection; + _logger.Debug("MongoDbAdapter: Creating connection to MongoDb on " + mongoDbOptions.HostName + ":" + mongoDbOptions.Port); + //TODO Standardise AppId + MongoClient mongoClient = MongoClientHelpers.GetMongoClient(mongoDbOptions, "MongoDbPopulator::" + applicationName, string.IsNullOrWhiteSpace(mongoDbOptions.UserName)); - /// - /// Constructor - /// - /// Name to identify the connection to MongoDb with - /// - /// Default collectionNamePostfix to write to unless overridden - public MongoDbAdapter(string applicationName, MongoDbOptions mongoDbOptions, string defaultCollectionName) - { - if (string.IsNullOrWhiteSpace(defaultCollectionName)) - throw new ArgumentException(null, nameof(defaultCollectionName)); + _logger.Debug("MongoDbAdapter: Getting reference to database " + mongoDbOptions.DatabaseName); + _database = mongoClient.GetDatabase(mongoDbOptions.DatabaseName); - _logger.Debug("MongoDbAdapter: Creating connection to MongoDb on " + mongoDbOptions.HostName + ":" + mongoDbOptions.Port); + _logger.Debug("MongoDbAdapter: Getting reference to collection " + defaultCollectionName); + _defaultCollectionName = defaultCollectionName; + _defaultCollection = _database.GetCollection(defaultCollectionName); - //TODO Standardise AppId - MongoClient mongoClient = MongoClientHelpers.GetMongoClient(mongoDbOptions, "MongoDbPopulator::" + applicationName, string.IsNullOrWhiteSpace(mongoDbOptions.UserName)); + _logger.Debug("MongoDbAdapter: Checking initial collection"); - _logger.Debug("MongoDbAdapter: Getting reference to database " + mongoDbOptions.DatabaseName); - _database = mongoClient.GetDatabase(mongoDbOptions.DatabaseName); + bool isLive = _database.RunCommandAsync((Command)"{ping:1}").Wait(1000); - _logger.Debug("MongoDbAdapter: Getting reference to collection " + defaultCollectionName); - _defaultCollectionName = defaultCollectionName; - _defaultCollection = _database.GetCollection(defaultCollectionName); + if (!isLive) + throw new ArgumentException($"Could not connect to the MongoDB server/database on startup at {mongoDbOptions.HostName}:{mongoDbOptions.Port}"); - _logger.Debug("MongoDbAdapter: Checking initial collection"); + _logger.Debug("MongoDbAdapter: Connection setup successfully"); + } - bool isLive = _database.RunCommandAsync((Command)"{ping:1}").Wait(1000); - if (!isLive) - throw new ArgumentException($"Could not connect to the MongoDB server/database on startup at {mongoDbOptions.HostName}:{mongoDbOptions.Port}"); + /// + /// Writes one or more (s) to MongoDb + /// + /// + /// Optional argument to write to a different collection with the specified tag + /// + public WriteResult WriteMany(IList toWrite, string? collectionNamePostfix = null) + { + if (!toWrite.Any()) + return WriteResult.Success; - _logger.Debug("MongoDbAdapter: Connection setup successfully"); - } + //TODO Test whether pre-fetching references to all the image_* collections results in any speedup + IMongoCollection collectionForWrite = + collectionNamePostfix == null + ? _defaultCollection + : _database.GetCollection($"{_defaultCollectionName}_{collectionNamePostfix}"); + _logger.Info($"Attempting bulk write of {toWrite.Count} documents to {collectionForWrite.CollectionNamespace}"); - /// - /// Writes one or more (s) to MongoDb - /// - /// - /// Optional argument to write to a different collection with the specified tag - /// - public WriteResult WriteMany(IList toWrite, string? collectionNamePostfix = null) + try + { + //TODO Try and determine if any write errors are to do with a document in the batch or not + BulkWriteResult res = collectionForWrite.BulkWrite(toWrite.Select(d => new InsertOneModel(d))); + _logger.Debug(" Write to {0} acknowledged: {1}", collectionForWrite.CollectionNamespace, res.IsAcknowledged); + return res.IsAcknowledged ? WriteResult.Success : WriteResult.Failure; + } + catch (MongoBulkWriteException e) { - if (!toWrite.Any()) - return WriteResult.Success; - - //TODO Test whether pre-fetching references to all the image_* collections results in any speedup - IMongoCollection collectionForWrite = - collectionNamePostfix == null - ? _defaultCollection - : _database.GetCollection($"{_defaultCollectionName}_{collectionNamePostfix}"); - - _logger.Info($"Attempting bulk write of {toWrite.Count} documents to {collectionForWrite.CollectionNamespace}"); - - try - { - //TODO Try and determine if any write errors are to do with a document in the batch or not - BulkWriteResult res = collectionForWrite.BulkWrite(toWrite.Select(d => new InsertOneModel(d))); - _logger.Debug(" Write to {0} acknowledged: {1}", collectionForWrite.CollectionNamespace, res.IsAcknowledged); - return res.IsAcknowledged ? WriteResult.Success : WriteResult.Failure; - } - catch (MongoBulkWriteException e) - { - //TODO Determine possible causes of MongoBulkWriteException - _logger.Error("Exception when writing to MongoDb: " + e); - return WriteResult.Unknown; - } + //TODO Determine possible causes of MongoBulkWriteException + _logger.Error("Exception when writing to MongoDb: " + e); + return WriteResult.Unknown; } } } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/MongoDbPopulatorHost.cs b/src/SmiServices/Microservices/MongoDBPopulator/MongoDbPopulatorHost.cs index 38e2e7e3f..6fe695e5d 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/MongoDbPopulatorHost.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/MongoDbPopulatorHost.cs @@ -2,46 +2,45 @@ using SmiServices.Common.Messages; using SmiServices.Common.Options; -namespace SmiServices.Microservices.MongoDBPopulator +namespace SmiServices.Microservices.MongoDBPopulator; + +/// +/// Main class to setup and manage the microservice +/// +public class MongoDbPopulatorHost : MicroserviceHost { + public readonly MongoDbPopulatorMessageConsumer SeriesConsumer; + public readonly MongoDbPopulatorMessageConsumer ImageConsumer; + + public MongoDbPopulatorHost(GlobalOptions options) + : base(options) + { + SeriesConsumer = new MongoDbPopulatorMessageConsumer(options.MongoDatabases!.DicomStoreOptions!, options.MongoDbPopulatorOptions!, options.MongoDbPopulatorOptions!.SeriesQueueConsumerOptions!); + ImageConsumer = new MongoDbPopulatorMessageConsumer(options.MongoDatabases.DicomStoreOptions!, options.MongoDbPopulatorOptions, options.MongoDbPopulatorOptions.ImageQueueConsumerOptions!); + } + /// - /// Main class to setup and manage the microservice + /// Start processing messages /// - public class MongoDbPopulatorHost : MicroserviceHost + public override void Start() { - public readonly MongoDbPopulatorMessageConsumer SeriesConsumer; - public readonly MongoDbPopulatorMessageConsumer ImageConsumer; - - public MongoDbPopulatorHost(GlobalOptions options) - : base(options) - { - SeriesConsumer = new MongoDbPopulatorMessageConsumer(options.MongoDatabases!.DicomStoreOptions!, options.MongoDbPopulatorOptions!, options.MongoDbPopulatorOptions!.SeriesQueueConsumerOptions!); - ImageConsumer = new MongoDbPopulatorMessageConsumer(options.MongoDatabases.DicomStoreOptions!, options.MongoDbPopulatorOptions, options.MongoDbPopulatorOptions.ImageQueueConsumerOptions!); - } - - /// - /// Start processing messages - /// - public override void Start() - { - Logger.Info("Starting consumers"); - - MessageBroker.StartConsumer(SeriesConsumer.ConsumerOptions, SeriesConsumer, isSolo: false); - MessageBroker.StartConsumer(ImageConsumer.ConsumerOptions, ImageConsumer, isSolo: false); - - Logger.Info("Consumers successfully started"); - } - - /// - /// Stop processing messages and shut down - /// - /// - public override void Stop(string reason) - { - SeriesConsumer.Processor.StopProcessing("Host - " + reason); - ImageConsumer.Processor.StopProcessing("Host - " + reason); - - base.Stop(reason); - } + Logger.Info("Starting consumers"); + + MessageBroker.StartConsumer(SeriesConsumer.ConsumerOptions, SeriesConsumer, isSolo: false); + MessageBroker.StartConsumer(ImageConsumer.ConsumerOptions, ImageConsumer, isSolo: false); + + Logger.Info("Consumers successfully started"); + } + + /// + /// Stop processing messages and shut down + /// + /// + public override void Stop(string reason) + { + SeriesConsumer.Processor.StopProcessing("Host - " + reason); + ImageConsumer.Processor.StopProcessing("Host - " + reason); + + base.Stop(reason); } } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/MongoDbPopulatorMessageConsumer.cs b/src/SmiServices/Microservices/MongoDBPopulator/MongoDbPopulatorMessageConsumer.cs index ca5e868a4..b0c05c61f 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/MongoDbPopulatorMessageConsumer.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/MongoDbPopulatorMessageConsumer.cs @@ -4,75 +4,74 @@ using SmiServices.Microservices.MongoDBPopulator.Processing; using System; -namespace SmiServices.Microservices.MongoDBPopulator +namespace SmiServices.Microservices.MongoDBPopulator; + +public class MongoDbPopulatorMessageConsumer : Consumer, IMongoDbPopulatorMessageConsumer + where T : IMessage { - public class MongoDbPopulatorMessageConsumer : Consumer, IMongoDbPopulatorMessageConsumer - where T : IMessage - { - public ConsumerOptions ConsumerOptions { get; } + public ConsumerOptions ConsumerOptions { get; } - public IMessageProcessor Processor { get; } + public IMessageProcessor Processor { get; } - private static readonly string _messageTypePrefix = typeof(T).Name + "Consumer: "; + private static readonly string _messageTypePrefix = typeof(T).Name + "Consumer: "; - private bool _hasThrown; + private bool _hasThrown; - public MongoDbPopulatorMessageConsumer(MongoDbOptions mongoDbOptions, MongoDbPopulatorOptions populatorOptions, ConsumerOptions consumerOptions) + public MongoDbPopulatorMessageConsumer(MongoDbOptions mongoDbOptions, MongoDbPopulatorOptions populatorOptions, ConsumerOptions consumerOptions) + { + if (typeof(T) == typeof(DicomFileMessage)) { - if (typeof(T) == typeof(DicomFileMessage)) - { - var mongoImageAdapter = new MongoDbAdapter("ImageMessageProcessor", mongoDbOptions, populatorOptions.ImageCollection!); - Processor = (IMessageProcessor)new ImageMessageProcessor(populatorOptions, mongoImageAdapter, consumerOptions.QoSPrefetchCount, ExceptionCallback); - } + var mongoImageAdapter = new MongoDbAdapter("ImageMessageProcessor", mongoDbOptions, populatorOptions.ImageCollection!); + Processor = (IMessageProcessor)new ImageMessageProcessor(populatorOptions, mongoImageAdapter, consumerOptions.QoSPrefetchCount, ExceptionCallback); + } - else if (typeof(T) == typeof(SeriesMessage)) - { - var mongoSeriesAdapter = new MongoDbAdapter("SeriesMessageProcessor", mongoDbOptions, populatorOptions.SeriesCollection!); - Processor = (IMessageProcessor)new SeriesMessageProcessor(populatorOptions, mongoSeriesAdapter, consumerOptions.QoSPrefetchCount, ExceptionCallback); - } + else if (typeof(T) == typeof(SeriesMessage)) + { + var mongoSeriesAdapter = new MongoDbAdapter("SeriesMessageProcessor", mongoDbOptions, populatorOptions.SeriesCollection!); + Processor = (IMessageProcessor)new SeriesMessageProcessor(populatorOptions, mongoSeriesAdapter, consumerOptions.QoSPrefetchCount, ExceptionCallback); + } - else - throw new ArgumentException("Message type " + typeof(T).Name + " not supported here"); + else + throw new ArgumentException("Message type " + typeof(T).Name + " not supported here"); - ConsumerOptions = consumerOptions; - Logger.Debug(_messageTypePrefix + "Constructed for " + typeof(T).Name); + ConsumerOptions = consumerOptions; + Logger.Debug(_messageTypePrefix + "Constructed for " + typeof(T).Name); - Processor.OnAck += (o, a) => { Ack(a.Header, a.DeliveryTag); }; - } + Processor.OnAck += (o, a) => { Ack(a.Header, a.DeliveryTag); }; + } - private void ExceptionCallback(Exception e) - { - //TODO Make this thread safe - // Prevent both consumers throwing for the same reason (e.g. timeout) - if (_hasThrown) - return; + private void ExceptionCallback(Exception e) + { + //TODO Make this thread safe + // Prevent both consumers throwing for the same reason (e.g. timeout) + if (_hasThrown) + return; - _hasThrown = true; + _hasThrown = true; - Fatal("Processor threw an exception", e); - } + Fatal("Processor threw an exception", e); + } + + protected override void ProcessMessageImpl(IMessageHeader header, T message, ulong tag) + { + // We are shutting down anyway + if (Processor.IsStopping) + return; - protected override void ProcessMessageImpl(IMessageHeader header, T message, ulong tag) + try { - // We are shutting down anyway - if (Processor.IsStopping) - return; - - try - { - ((IMessageProcessor)Processor).AddToWriteQueue(message, header, tag); - } - catch (ApplicationException e) - { - // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage - - ErrorAndNack(header, tag, "Error while processing " + typeof(T).Name, e); - - // ReSharper disable once RedundantJumpStatement - return; - } + ((IMessageProcessor)Processor).AddToWriteQueue(message, header, tag); + } + catch (ApplicationException e) + { + // Catch specific exceptions we are aware of, any uncaught will bubble up to the wrapper in ProcessMessage + + ErrorAndNack(header, tag, "Error while processing " + typeof(T).Name, e); + + // ReSharper disable once RedundantJumpStatement + return; } } } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/Processing/IMessageProcessor.cs b/src/SmiServices/Microservices/MongoDBPopulator/Processing/IMessageProcessor.cs index 7adf122fc..af142a5fd 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/Processing/IMessageProcessor.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/Processing/IMessageProcessor.cs @@ -2,40 +2,39 @@ using SmiServices.Common.Events; using SmiServices.Common.Messages; -namespace SmiServices.Microservices.MongoDBPopulator.Processing +namespace SmiServices.Microservices.MongoDBPopulator.Processing; + +/// +/// Interface for classes which process (s) into MongoDb +/// +/// +public interface IMessageProcessor : IMessageProcessor where T : IMessage { /// - /// Interface for classes which process (s) into MongoDb + /// Add a message to the write queue /// - /// - public interface IMessageProcessor : IMessageProcessor where T : IMessage - { - /// - /// Add a message to the write queue - /// - /// Message to write - /// - /// Delivery tag for the message acknowledgement - void AddToWriteQueue(T message, IMessageHeader header, ulong deliveryTag); - } + /// Message to write + /// + /// Delivery tag for the message acknowledgement + void AddToWriteQueue(T message, IMessageHeader header, ulong deliveryTag); +} - public interface IMessageProcessor - { - /// - /// Indicates if the processor is stopping and no more messages should be queued for processing - /// - bool IsStopping { get; } +public interface IMessageProcessor +{ + /// + /// Indicates if the processor is stopping and no more messages should be queued for processing + /// + bool IsStopping { get; } - event SmiAckEventHandler? OnAck; + event SmiAckEventHandler? OnAck; - /// - /// Count of the total number of acknowledged messages during this processors lifetime - /// - int AckCount { get; } + /// + /// Count of the total number of acknowledged messages during this processors lifetime + /// + int AckCount { get; } - /// - /// Stops the continuous processing of messages - /// - void StopProcessing(string reason); - } + /// + /// Stops the continuous processing of messages + /// + void StopProcessing(string reason); } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/Processing/ImageMessageProcessor.cs b/src/SmiServices/Microservices/MongoDBPopulator/Processing/ImageMessageProcessor.cs index 387476e74..63e24c1f1 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/Processing/ImageMessageProcessor.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/Processing/ImageMessageProcessor.cs @@ -10,143 +10,142 @@ using System.Collections.Generic; using System.Linq; -namespace SmiServices.Microservices.MongoDBPopulator.Processing +namespace SmiServices.Microservices.MongoDBPopulator.Processing; + +/// +/// Delegate class used to perform the actual processing of messages +/// +public class ImageMessageProcessor : MessageProcessor { + private const string MongoLogMessage = "Added to write queue"; + /// - /// Delegate class used to perform the actual processing of messages + /// Constructor /// - public class ImageMessageProcessor : MessageProcessor - { - private const string MongoLogMessage = "Added to write queue"; + /// + /// + /// + /// + public ImageMessageProcessor(MongoDbPopulatorOptions options, IMongoDbAdapter mongoDbAdapter, int maxQueueSize, Action exceptionCallback) + : base(options, mongoDbAdapter, maxQueueSize, exceptionCallback) { } - /// - /// Constructor - /// - /// - /// - /// - /// - public ImageMessageProcessor(MongoDbPopulatorOptions options, IMongoDbAdapter mongoDbAdapter, int maxQueueSize, Action exceptionCallback) - : base(options, mongoDbAdapter, maxQueueSize, exceptionCallback) { } + public override void AddToWriteQueue(DicomFileMessage message, IMessageHeader header, ulong deliveryTag) + { + // Only time we are not processing is if we are shutting down anyway + if (IsStopping) + return; + + DicomDataset dataset; - public override void AddToWriteQueue(DicomFileMessage message, IMessageHeader header, ulong deliveryTag) + try { - // Only time we are not processing is if we are shutting down anyway - if (IsStopping) - return; + dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); + } + catch (Exception e) + { + throw new ApplicationException("Could not deserialize json to dataset", e); + } - DicomDataset dataset; + BsonDocument datasetDoc; - try - { - dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); - } - catch (Exception e) - { - throw new ApplicationException("Could not deserialize json to dataset", e); - } + try + { + datasetDoc = DicomTypeTranslaterReader.BuildBsonDocument(dataset); + } + catch (Exception e) + { + throw new ApplicationException("Exception converting dataset to BsonDocument", e); + } - BsonDocument datasetDoc; + // Generate a new header to record the current service before storing in MongoDB + var newHeader = new MessageHeader(header); + newHeader.Log(Logger, LogLevel.Trace, MongoLogMessage); - try - { - datasetDoc = DicomTypeTranslaterReader.BuildBsonDocument(dataset); - } - catch (Exception e) - { - throw new ApplicationException("Exception converting dataset to BsonDocument", e); - } + BsonDocument bsonHeader = MongoDocumentHeaders.ImageDocumentHeader(message, newHeader); - // Generate a new header to record the current service before storing in MongoDB - var newHeader = new MessageHeader(header); - newHeader.Log(Logger, LogLevel.Trace, MongoLogMessage); + BsonDocument document = new BsonDocument() + .Add("header", bsonHeader) + .AddRange(datasetDoc); - BsonDocument bsonHeader = MongoDocumentHeaders.ImageDocumentHeader(message, newHeader); + int docByteLength = document.ToBson().Length; - BsonDocument document = new BsonDocument() - .Add("header", bsonHeader) - .AddRange(datasetDoc); + if (docByteLength > MaxDocumentSize) + throw new ApplicationException("BsonDocument was larger than the max allowed size (have " + docByteLength + ", max is " + MaxDocumentSize + ")"); - int docByteLength = document.ToBson().Length; + var forceProcess = false; - if (docByteLength > MaxDocumentSize) - throw new ApplicationException("BsonDocument was larger than the max allowed size (have " + docByteLength + ", max is " + MaxDocumentSize + ")"); + lock (LockObj) + { + ToProcess.Enqueue(new Tuple(document, header, deliveryTag)); - var forceProcess = false; + if (ToProcess.Count >= MaxQueueSize) + forceProcess = true; + } - lock (LockObj) - { - ToProcess.Enqueue(new Tuple(document, header, deliveryTag)); + if (!forceProcess) + return; - if (ToProcess.Count >= MaxQueueSize) - forceProcess = true; - } + Logger.Debug("ImageMessageProcessor: Max queue size reached, calling ProcessQueue"); + ProcessQueue(); + } - if (!forceProcess) + /// + /// Writes all messages currently in the queue to MongoDb and acknowledges + /// + protected override void ProcessQueue() + { + lock (LockObj) + { + if (ToProcess.Count == 0) return; - Logger.Debug("ImageMessageProcessor: Max queue size reached, calling ProcessQueue"); - ProcessQueue(); - } + Logger.Info($"Queue contains {ToProcess.Count} message to write"); - /// - /// Writes all messages currently in the queue to MongoDb and acknowledges - /// - protected override void ProcessQueue() - { - lock (LockObj) + foreach ((string modality, List modalityDocs) in + MongoModalityGroups.GetModalityChunks(ToProcess.Select(x => x.Item1).ToList())) { - if (ToProcess.Count == 0) - return; + Logger.Debug($"Attempting to write {modalityDocs.Count} documents of modality {modality}"); - Logger.Info($"Queue contains {ToProcess.Count} message to write"); - - foreach ((string modality, List modalityDocs) in - MongoModalityGroups.GetModalityChunks(ToProcess.Select(x => x.Item1).ToList())) + while (FailedWriteAttempts < FailedWriteLimit) { - Logger.Debug($"Attempting to write {modalityDocs.Count} documents of modality {modality}"); + WriteResult imageWriteResult = MongoDbAdapter.WriteMany(modalityDocs, modality); - while (FailedWriteAttempts < FailedWriteLimit) + if (imageWriteResult == WriteResult.Success) { - WriteResult imageWriteResult = MongoDbAdapter.WriteMany(modalityDocs, modality); + Logger.Debug($"Wrote {modalityDocs.Count} documents successfully, sending ACKs"); - if (imageWriteResult == WriteResult.Success) + // Hopefully this uses ReferenceEquals, otherwise will be slow... + foreach ( + var (_, header, deliveryTag) in + ToProcess.Where(x => modalityDocs.Contains(x.Item1)) + ) { - Logger.Debug($"Wrote {modalityDocs.Count} documents successfully, sending ACKs"); - - // Hopefully this uses ReferenceEquals, otherwise will be slow... - foreach ( - var (_, header, deliveryTag) in - ToProcess.Where(x => modalityDocs.Contains(x.Item1)) - ) - { - Ack(header, deliveryTag); - } - - AckCount += modalityDocs.Count; - FailedWriteAttempts = 0; - break; + Ack(header, deliveryTag); } - Logger.Warn($"Failed to write {FailedWriteAttempts + 1} time(s) in a row"); + AckCount += modalityDocs.Count; + FailedWriteAttempts = 0; + break; + } - if (++FailedWriteAttempts < FailedWriteLimit) - continue; + Logger.Warn($"Failed to write {FailedWriteAttempts + 1} time(s) in a row"); - throw new ApplicationException("Failed write attempts exceeded"); - } - } + if (++FailedWriteAttempts < FailedWriteLimit) + continue; - Logger.Debug("Wrote and acknowledged all documents in queue. Clearing and continutig"); - ToProcess.Clear(); + throw new ApplicationException("Failed write attempts exceeded"); + } } - } - public override void StopProcessing(string reason) - { - Logger.Debug("ImageMessageProcessor: Stopping (" + reason + ")"); - StopProcessing(); + Logger.Debug("Wrote and acknowledged all documents in queue. Clearing and continutig"); + ToProcess.Clear(); } } + + public override void StopProcessing(string reason) + { + Logger.Debug("ImageMessageProcessor: Stopping (" + reason + ")"); + StopProcessing(); + } } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/Processing/MessageProcessor.cs b/src/SmiServices/Microservices/MongoDBPopulator/Processing/MessageProcessor.cs index f425c12ed..5f561b05d 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/Processing/MessageProcessor.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/Processing/MessageProcessor.cs @@ -8,120 +8,119 @@ using System.Collections.Generic; using SysTimers = System.Timers; -namespace SmiServices.Microservices.MongoDBPopulator.Processing +namespace SmiServices.Microservices.MongoDBPopulator.Processing; + +/// +/// +/// Abstract class containing the common functionality of both the processor classes +/// +/// +public abstract class MessageProcessor : IMessageProcessor where T : IMessage { + #region Abstract Fields/Properties + + /// + public abstract void AddToWriteQueue(T message, IMessageHeader header, ulong deliveryTag); + /// /// - /// Abstract class containing the common functionality of both the processor classes + /// Stop the processing of further messages /// - /// - public abstract class MessageProcessor : IMessageProcessor where T : IMessage - { - #region Abstract Fields/Properties + /// + public abstract void StopProcessing(string reason); - /// - public abstract void AddToWriteQueue(T message, IMessageHeader header, ulong deliveryTag); + protected abstract void ProcessQueue(); - /// - /// - /// Stop the processing of further messages - /// - /// - public abstract void StopProcessing(string reason); + #endregion - protected abstract void ProcessQueue(); + #region Concrete Fields/Properties - #endregion + public event SmiAckEventHandler? OnAck; - #region Concrete Fields/Properties - - public event SmiAckEventHandler? OnAck; - - /// - /// - /// Indicates if the object is actively processing messages - /// - public bool IsStopping { get; private set; } + /// + /// + /// Indicates if the object is actively processing messages + /// + public bool IsStopping { get; private set; } - /// - /// - /// Total number of messages acknowledged during the processor's lifetime - /// - public int AckCount { get; protected set; } + /// + /// + /// Total number of messages acknowledged during the processor's lifetime + /// + public int AckCount { get; protected set; } - //TODO Check this is named properly in subclasses - protected readonly ILogger Logger; + //TODO Check this is named properly in subclasses + protected readonly ILogger Logger; - protected readonly IMongoDbAdapter MongoDbAdapter; + protected readonly IMongoDbAdapter MongoDbAdapter; - // Keeps track of the number of consecutive times we have failed a write attempt - protected int FailedWriteAttempts; - protected readonly int FailedWriteLimit; + // Keeps track of the number of consecutive times we have failed a write attempt + protected int FailedWriteAttempts; + protected readonly int FailedWriteLimit; - protected readonly Queue> ToProcess = new(); - protected readonly int MaxQueueSize; - protected readonly object LockObj = new(); - private readonly SysTimers.Timer _processTimer; + protected readonly Queue> ToProcess = new(); + protected readonly int MaxQueueSize; + protected readonly object LockObj = new(); + private readonly SysTimers.Timer _processTimer; - private readonly Action _exceptionCallback; + private readonly Action _exceptionCallback; - // Set the max size to 16MB minus some overhead - protected const int MaxDocumentSize = 16 * 1024 * 1024 - 512; + // Set the max size to 16MB minus some overhead + protected const int MaxDocumentSize = 16 * 1024 * 1024 - 512; - /// - /// Constructor - /// - /// - /// - /// - /// - protected MessageProcessor(MongoDbPopulatorOptions options, IMongoDbAdapter mongoDbAdapter, int maxQueueSize, Action exceptionCallback) - { - Logger = LogManager.GetLogger(GetType().Name); + /// + /// Constructor + /// + /// + /// + /// + /// + protected MessageProcessor(MongoDbPopulatorOptions options, IMongoDbAdapter mongoDbAdapter, int maxQueueSize, Action exceptionCallback) + { + Logger = LogManager.GetLogger(GetType().Name); - _exceptionCallback = exceptionCallback; + _exceptionCallback = exceptionCallback; - MongoDbAdapter = mongoDbAdapter; - FailedWriteLimit = options.FailedWriteLimit; + MongoDbAdapter = mongoDbAdapter; + FailedWriteLimit = options.FailedWriteLimit; - MaxQueueSize = maxQueueSize; + MaxQueueSize = maxQueueSize; - _processTimer = new SysTimers.Timer(Math.Min(int.MaxValue, (double)options.MongoDbFlushTime * 1000)); - _processTimer.Elapsed += TimerElapsedEvent; - _processTimer.Start(); + _processTimer = new SysTimers.Timer(Math.Min(int.MaxValue, (double)options.MongoDbFlushTime * 1000)); + _processTimer.Elapsed += TimerElapsedEvent; + _processTimer.Start(); - IsStopping = false; - } + IsStopping = false; + } - private void TimerElapsedEvent(object? source, SysTimers.ElapsedEventArgs e) + private void TimerElapsedEvent(object? source, SysTimers.ElapsedEventArgs e) + { + try { - try - { - ProcessQueue(); - } - catch (Exception ex) - { - StopProcessing("Timed ProcessQueue threw an exception"); - _exceptionCallback(ex); - } + ProcessQueue(); } - - protected void StopProcessing() + catch (Exception ex) { - // Ensures no more messages are added to the queue - _processTimer.Stop(); - IsStopping = true; - - // Forces process to wait until any current processing is finished - lock (LockObj) - Logger.Debug("Lock released, no more messages will be processed"); + StopProcessing("Timed ProcessQueue threw an exception"); + _exceptionCallback(ex); } + } - protected void Ack(IMessageHeader header, ulong deliveryTag) - { - OnAck?.Invoke(this, new SmiAckEventArgs(header) { DeliveryTag = deliveryTag, Multiple = false }); - } + protected void StopProcessing() + { + // Ensures no more messages are added to the queue + _processTimer.Stop(); + IsStopping = true; - #endregion + // Forces process to wait until any current processing is finished + lock (LockObj) + Logger.Debug("Lock released, no more messages will be processed"); } + + protected void Ack(IMessageHeader header, ulong deliveryTag) + { + OnAck?.Invoke(this, new SmiAckEventArgs(header) { DeliveryTag = deliveryTag, Multiple = false }); + } + + #endregion } diff --git a/src/SmiServices/Microservices/MongoDBPopulator/Processing/SeriesMessageProcessor.cs b/src/SmiServices/Microservices/MongoDBPopulator/Processing/SeriesMessageProcessor.cs index ccefd6d0a..97041e644 100644 --- a/src/SmiServices/Microservices/MongoDBPopulator/Processing/SeriesMessageProcessor.cs +++ b/src/SmiServices/Microservices/MongoDBPopulator/Processing/SeriesMessageProcessor.cs @@ -10,126 +10,125 @@ using System.Linq; -namespace SmiServices.Microservices.MongoDBPopulator.Processing +namespace SmiServices.Microservices.MongoDBPopulator.Processing; + +/// +/// Delegate class used to perform the actual processing of messages +/// +public class SeriesMessageProcessor : MessageProcessor { /// - /// Delegate class used to perform the actual processing of messages + /// Constructor /// - public class SeriesMessageProcessor : MessageProcessor + /// + /// + /// + /// + public SeriesMessageProcessor(MongoDbPopulatorOptions options, IMongoDbAdapter mongoDbAdapter, int maxQueueSize, Action exceptionCallback) + : base(options, mongoDbAdapter, maxQueueSize, exceptionCallback) { } + + public override void AddToWriteQueue(SeriesMessage message, IMessageHeader header, ulong deliveryTag) { - /// - /// Constructor - /// - /// - /// - /// - /// - public SeriesMessageProcessor(MongoDbPopulatorOptions options, IMongoDbAdapter mongoDbAdapter, int maxQueueSize, Action exceptionCallback) - : base(options, mongoDbAdapter, maxQueueSize, exceptionCallback) { } - - public override void AddToWriteQueue(SeriesMessage message, IMessageHeader header, ulong deliveryTag) - { - // Only time we are not processing is if we are shutting down anyway - if (IsStopping) - return; + // Only time we are not processing is if we are shutting down anyway + if (IsStopping) + return; - DicomDataset dataset; + DicomDataset dataset; - try - { - dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); + try + { + dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); - } - catch (Exception e) - { - throw new ApplicationException("Could not deserialize json to dataset", e); - } + } + catch (Exception e) + { + throw new ApplicationException("Could not deserialize json to dataset", e); + } - BsonDocument datasetDoc; + BsonDocument datasetDoc; - try - { - datasetDoc = DicomTypeTranslaterReader.BuildBsonDocument(dataset); - } - catch (Exception e) - { - throw new ApplicationException("Exception converting dataset to BsonDocument", e); - } + try + { + datasetDoc = DicomTypeTranslaterReader.BuildBsonDocument(dataset); + } + catch (Exception e) + { + throw new ApplicationException("Exception converting dataset to BsonDocument", e); + } - BsonDocument bsonHeader = MongoDocumentHeaders.SeriesDocumentHeader(message); + BsonDocument bsonHeader = MongoDocumentHeaders.SeriesDocumentHeader(message); - BsonDocument document = new BsonDocument() - .Add("header", bsonHeader) - .AddRange(datasetDoc); + BsonDocument document = new BsonDocument() + .Add("header", bsonHeader) + .AddRange(datasetDoc); - int docByteLength = document.ToBson().Length; - if (docByteLength > MaxDocumentSize) - throw new ApplicationException($"BsonDocument was larger than the max allowed size (have {docByteLength}, max is {MaxDocumentSize})"); + int docByteLength = document.ToBson().Length; + if (docByteLength > MaxDocumentSize) + throw new ApplicationException($"BsonDocument was larger than the max allowed size (have {docByteLength}, max is {MaxDocumentSize})"); - var forceProcess = false; + var forceProcess = false; - lock (LockObj) - { - ToProcess.Enqueue(new Tuple(document, header, deliveryTag)); + lock (LockObj) + { + ToProcess.Enqueue(new Tuple(document, header, deliveryTag)); - if (ToProcess.Count >= MaxQueueSize) - forceProcess = true; - } + if (ToProcess.Count >= MaxQueueSize) + forceProcess = true; + } - if (!forceProcess) - return; + if (!forceProcess) + return; - Logger.Debug("SeriesMessageProcessor: Max queue size reached, calling ProcessQueue"); - ProcessQueue(); - } + Logger.Debug("SeriesMessageProcessor: Max queue size reached, calling ProcessQueue"); + ProcessQueue(); + } - /// - /// Writes all messages currently in the queue to MongoDb and acknowledges - /// - protected override void ProcessQueue() + /// + /// Writes all messages currently in the queue to MongoDb and acknowledges + /// + protected override void ProcessQueue() + { + lock (LockObj) { - lock (LockObj) - { - if (ToProcess.Count == 0) - return; + if (ToProcess.Count == 0) + return; - Logger.Debug("SeriesMessageProcessor: Queue contains " + ToProcess.Count + " message to write"); + Logger.Debug("SeriesMessageProcessor: Queue contains " + ToProcess.Count + " message to write"); - IEnumerable batchDirectories = ToProcess.Select(t => t.Item1.GetValue("header")["DirectoryPath"].AsString).Distinct(); - Logger.Trace($"Writing series from directories: {string.Join(", ", batchDirectories)}"); + IEnumerable batchDirectories = ToProcess.Select(t => t.Item1.GetValue("header")["DirectoryPath"].AsString).Distinct(); + Logger.Trace($"Writing series from directories: {string.Join(", ", batchDirectories)}"); - WriteResult seriesWriteResult = MongoDbAdapter.WriteMany(ToProcess.Select(t => t.Item1).ToList()); + WriteResult seriesWriteResult = MongoDbAdapter.WriteMany(ToProcess.Select(t => t.Item1).ToList()); - // Result => Need to differentiate between connection loss and error in the data to be written - // As well as making sure either all are written or none + // Result => Need to differentiate between connection loss and error in the data to be written + // As well as making sure either all are written or none - if (seriesWriteResult == WriteResult.Success) - { - Logger.Debug("SeriesMessageProcessor: Wrote " + ToProcess.Count + " messages successfully, sending ACKs"); + if (seriesWriteResult == WriteResult.Success) + { + Logger.Debug("SeriesMessageProcessor: Wrote " + ToProcess.Count + " messages successfully, sending ACKs"); - foreach (var (_, header, deliveryTag) in ToProcess) - Ack(header, deliveryTag); + foreach (var (_, header, deliveryTag) in ToProcess) + Ack(header, deliveryTag); - AckCount += ToProcess.Count; - ToProcess.Clear(); - FailedWriteAttempts = 0; - } - else - { - Logger.Warn($"SeriesMessageProcessor: Failed to write {FailedWriteAttempts + 1} time(s) in a row"); + AckCount += ToProcess.Count; + ToProcess.Clear(); + FailedWriteAttempts = 0; + } + else + { + Logger.Warn($"SeriesMessageProcessor: Failed to write {FailedWriteAttempts + 1} time(s) in a row"); - if (++FailedWriteAttempts < FailedWriteLimit) - return; + if (++FailedWriteAttempts < FailedWriteLimit) + return; - throw new ApplicationException("Failed write attempts exceeded"); - } + throw new ApplicationException("Failed write attempts exceeded"); } } + } - public override void StopProcessing(string reason) - { - Logger.Debug("SeriesMessageProcessor: Stopping (" + reason + ")"); - StopProcessing(); - } + public override void StopProcessing(string reason) + { + Logger.Debug("SeriesMessageProcessor: Stopping (" + reason + ")"); + StopProcessing(); } } diff --git a/src/SmiServices/Microservices/UpdateValues/IUpdater.cs b/src/SmiServices/Microservices/UpdateValues/IUpdater.cs index b91fe7617..2ab13fc8d 100644 --- a/src/SmiServices/Microservices/UpdateValues/IUpdater.cs +++ b/src/SmiServices/Microservices/UpdateValues/IUpdater.cs @@ -1,14 +1,13 @@ using SmiServices.Common.Messages.Updating; -namespace SmiServices.Microservices.UpdateValues +namespace SmiServices.Microservices.UpdateValues; + +public interface IUpdater { - public interface IUpdater - { - /// - /// Update one or more database tables to fully propagate to all relevant tables - /// - /// What should be updated - /// total number of rows updated in the database(s) - int HandleUpdate(UpdateValuesMessage message); - } + /// + /// Update one or more database tables to fully propagate to all relevant tables + /// + /// What should be updated + /// total number of rows updated in the database(s) + int HandleUpdate(UpdateValuesMessage message); } diff --git a/src/SmiServices/Microservices/UpdateValues/UpdateTableAudit.cs b/src/SmiServices/Microservices/UpdateValues/UpdateTableAudit.cs index dc2dcfe70..ea1f6d9d6 100644 --- a/src/SmiServices/Microservices/UpdateValues/UpdateTableAudit.cs +++ b/src/SmiServices/Microservices/UpdateValues/UpdateTableAudit.cs @@ -2,73 +2,72 @@ using System.Diagnostics; using System.Threading; -namespace SmiServices.Microservices.UpdateValues +namespace SmiServices.Microservices.UpdateValues; + +public class UpdateTableAudit { - public class UpdateTableAudit - { - /// - /// The number of update queries that have been sent to the table so far - /// - public int Queries; + /// + /// The number of update queries that have been sent to the table so far + /// + public int Queries; - /// - /// The total amount of affected rows returned from the DBMS across all queries sent - /// + /// + /// The total amount of affected rows returned from the DBMS across all queries sent + /// - public int AffectedRows; + public int AffectedRows; - /// - /// The total length of time spent running queries on this - /// - public Stopwatch Stopwatch { get; } = new Stopwatch(); + /// + /// The total length of time spent running queries on this + /// + public Stopwatch Stopwatch { get; } = new Stopwatch(); - /// - /// The number of queries currently executing - /// - public int ExecutingQueries = 0; + /// + /// The number of queries currently executing + /// + public int ExecutingQueries = 0; - /// - /// Lock for - /// - private readonly object lockWatch = new(); + /// + /// Lock for + /// + private readonly object lockWatch = new(); - /// - /// The table that is being updated - /// - public DiscoveredTable? Table { get; } + /// + /// The table that is being updated + /// + public DiscoveredTable? Table { get; } - public UpdateTableAudit(DiscoveredTable? t) - { - Table = t; - } + public UpdateTableAudit(DiscoveredTable? t) + { + Table = t; + } - public void StartOne() - { - Interlocked.Increment(ref ExecutingQueries); - Interlocked.Increment(ref Queries); + public void StartOne() + { + Interlocked.Increment(ref ExecutingQueries); + Interlocked.Increment(ref Queries); - Stopwatch.Start(); - } + Stopwatch.Start(); + } - public void EndOne(int affectedRows) - { - Interlocked.Add(ref AffectedRows, affectedRows); - Interlocked.Decrement(ref ExecutingQueries); + public void EndOne(int affectedRows) + { + Interlocked.Add(ref AffectedRows, affectedRows); + Interlocked.Decrement(ref ExecutingQueries); - lock (lockWatch) + lock (lockWatch) + { + if (ExecutingQueries == 0) { - if (ExecutingQueries == 0) - { - Stopwatch.Stop(); - } + Stopwatch.Stop(); } } + } - public override string ToString() - { - return $"Table:{Table?.GetFullyQualifiedName()} Queries:{Queries} Time:{Stopwatch.Elapsed:c} AffectedRows:{AffectedRows:N0} ExecutingQueries:{ExecutingQueries}"; - } + public override string ToString() + { + return $"Table:{Table?.GetFullyQualifiedName()} Queries:{Queries} Time:{Stopwatch.Elapsed:c} AffectedRows:{AffectedRows:N0} ExecutingQueries:{ExecutingQueries}"; } } diff --git a/src/SmiServices/Microservices/UpdateValues/UpdateValues.cs b/src/SmiServices/Microservices/UpdateValues/UpdateValues.cs index 1c1b6b6f9..4b1ba74c1 100644 --- a/src/SmiServices/Microservices/UpdateValues/UpdateValues.cs +++ b/src/SmiServices/Microservices/UpdateValues/UpdateValues.cs @@ -4,26 +4,25 @@ using System.Diagnostics.CodeAnalysis; -namespace SmiServices.Microservices.UpdateValues +namespace SmiServices.Microservices.UpdateValues; + +public static class UpdateValues { - public static class UpdateValues + [ExcludeFromCodeCoverage] + public static int Main(IEnumerable args) { - [ExcludeFromCodeCoverage] - public static int Main(IEnumerable args) - { - return SmiCliInit - .ParseAndRun( - args, - nameof(UpdateValues), - OnParse - ); - } + return SmiCliInit + .ParseAndRun( + args, + nameof(UpdateValues), + OnParse + ); + } - private static int OnParse(GlobalOptions globals, UpdateValuesCliOptions opts) - { - var bootstrapper = new MicroserviceHostBootstrapper(() => new UpdateValuesHost(globals)); - int ret = bootstrapper.Main(); - return ret; - } + private static int OnParse(GlobalOptions globals, UpdateValuesCliOptions opts) + { + var bootstrapper = new MicroserviceHostBootstrapper(() => new UpdateValuesHost(globals)); + int ret = bootstrapper.Main(); + return ret; } } diff --git a/src/SmiServices/Microservices/UpdateValues/UpdateValuesCliOptions.cs b/src/SmiServices/Microservices/UpdateValues/UpdateValuesCliOptions.cs index b9bb018a1..db2bda632 100644 --- a/src/SmiServices/Microservices/UpdateValues/UpdateValuesCliOptions.cs +++ b/src/SmiServices/Microservices/UpdateValues/UpdateValuesCliOptions.cs @@ -1,9 +1,8 @@ using SmiServices.Common.Options; -namespace SmiServices.Microservices.UpdateValues +namespace SmiServices.Microservices.UpdateValues; + +public class UpdateValuesCliOptions : CliOptions { - public class UpdateValuesCliOptions : CliOptions - { - } } diff --git a/src/SmiServices/Microservices/UpdateValues/UpdateValuesHost.cs b/src/SmiServices/Microservices/UpdateValues/UpdateValuesHost.cs index fd7e1d82a..53e04fc4a 100644 --- a/src/SmiServices/Microservices/UpdateValues/UpdateValuesHost.cs +++ b/src/SmiServices/Microservices/UpdateValues/UpdateValuesHost.cs @@ -3,25 +3,24 @@ using SmiServices.Common.Execution; using SmiServices.Common.Options; -namespace SmiServices.Microservices.UpdateValues +namespace SmiServices.Microservices.UpdateValues; + +public class UpdateValuesHost : MicroserviceHost { - public class UpdateValuesHost : MicroserviceHost - { - public UpdateValuesQueueConsumer? Consumer { get; set; } + public UpdateValuesQueueConsumer? Consumer { get; set; } - public UpdateValuesHost(GlobalOptions globals, IMessageBroker? messageBroker = null) - : base(globals, messageBroker) - { - FansiImplementations.Load(); - } + public UpdateValuesHost(GlobalOptions globals, IMessageBroker? messageBroker = null) + : base(globals, messageBroker) + { + FansiImplementations.Load(); + } - public override void Start() - { + public override void Start() + { - IRDMPPlatformRepositoryServiceLocator repositoryLocator = Globals.RDMPOptions!.GetRepositoryProvider(); - Consumer = new UpdateValuesQueueConsumer(Globals.UpdateValuesOptions!, repositoryLocator.CatalogueRepository); + IRDMPPlatformRepositoryServiceLocator repositoryLocator = Globals.RDMPOptions!.GetRepositoryProvider(); + Consumer = new UpdateValuesQueueConsumer(Globals.UpdateValuesOptions!, repositoryLocator.CatalogueRepository); - MessageBroker.StartConsumer(Globals.UpdateValuesOptions!, Consumer, isSolo: false); - } + MessageBroker.StartConsumer(Globals.UpdateValuesOptions!, Consumer, isSolo: false); } } diff --git a/src/SmiServices/Microservices/UpdateValues/UpdateValuesQueueConsumer.cs b/src/SmiServices/Microservices/UpdateValues/UpdateValuesQueueConsumer.cs index 0d6ee885d..ef41b2edd 100644 --- a/src/SmiServices/Microservices/UpdateValues/UpdateValuesQueueConsumer.cs +++ b/src/SmiServices/Microservices/UpdateValues/UpdateValuesQueueConsumer.cs @@ -5,35 +5,34 @@ using SmiServices.Common.Options; using System; -namespace SmiServices.Microservices.UpdateValues +namespace SmiServices.Microservices.UpdateValues; + +public class UpdateValuesQueueConsumer : Consumer { - public class UpdateValuesQueueConsumer : Consumer - { - private readonly Updater _updater; + private readonly Updater _updater; - public UpdateValuesQueueConsumer(UpdateValuesOptions opts, ICatalogueRepository repo) + public UpdateValuesQueueConsumer(UpdateValuesOptions opts, ICatalogueRepository repo) + { + _updater = new Updater(repo) { - _updater = new Updater(repo) - { - UpdateTimeout = opts.UpdateTimeout, - TableInfosToUpdate = opts.TableInfosToUpdate - }; - } + UpdateTimeout = opts.UpdateTimeout, + TableInfosToUpdate = opts.TableInfosToUpdate + }; + } - DateTime lastPerformanceAudit = new(2001, 1, 1); - readonly TimeSpan auditEvery = TimeSpan.FromSeconds(60); + DateTime lastPerformanceAudit = new(2001, 1, 1); + readonly TimeSpan auditEvery = TimeSpan.FromSeconds(60); - protected override void ProcessMessageImpl(IMessageHeader header, UpdateValuesMessage message, ulong tag) - { - _updater.HandleUpdate(message); + protected override void ProcessMessageImpl(IMessageHeader header, UpdateValuesMessage message, ulong tag) + { + _updater.HandleUpdate(message); - Ack(header, tag); + Ack(header, tag); - if (DateTime.Now.Subtract(lastPerformanceAudit) > auditEvery) - { - _updater.LogProgress(Logger, NLog.LogLevel.Trace); - lastPerformanceAudit = DateTime.Now; - } + if (DateTime.Now.Subtract(lastPerformanceAudit) > auditEvery) + { + _updater.LogProgress(Logger, NLog.LogLevel.Trace); + lastPerformanceAudit = DateTime.Now; } } } diff --git a/src/SmiServices/Microservices/UpdateValues/Updater.cs b/src/SmiServices/Microservices/UpdateValues/Updater.cs index 2ff0c5a38..c7d6ec532 100644 --- a/src/SmiServices/Microservices/UpdateValues/Updater.cs +++ b/src/SmiServices/Microservices/UpdateValues/Updater.cs @@ -9,160 +9,159 @@ using System.Linq; using System.Text; -namespace SmiServices.Microservices.UpdateValues +namespace SmiServices.Microservices.UpdateValues; + +public class Updater : IUpdater { - public class Updater : IUpdater - { - private readonly ICatalogueRepository _repository; + private readonly ICatalogueRepository _repository; - /// - /// Number of seconds the updater will wait when running a single value UPDATE on the live table e.g. ECHI A needs to be replaced with ECHI B - /// - public int UpdateTimeout { get; set; } = 1000000; + /// + /// Number of seconds the updater will wait when running a single value UPDATE on the live table e.g. ECHI A needs to be replaced with ECHI B + /// + public int UpdateTimeout { get; set; } = 1000000; - /// - /// List of IDs of that should be examined for update potential. If blank/empty then all tables will be considered. - /// - public int[] TableInfosToUpdate { get; internal set; } = []; + /// + /// List of IDs of that should be examined for update potential. If blank/empty then all tables will be considered. + /// + public int[] TableInfosToUpdate { get; internal set; } = []; - private readonly ConcurrentDictionary _audits = new(); + private readonly ConcurrentDictionary _audits = new(); - public Updater(ICatalogueRepository repository) - { - _repository = repository; - } + public Updater(ICatalogueRepository repository) + { + _repository = repository; + } - public int HandleUpdate(UpdateValuesMessage message) - { - message.Validate(); + public int HandleUpdate(UpdateValuesMessage message) + { + message.Validate(); - TableInfo[] tables; - var affectedRows = 0; + TableInfo[] tables; + var affectedRows = 0; - if (message.ExplicitTableInfo.Length != 0) - { - tables = _repository.GetAllObjectsInIDList(message.ExplicitTableInfo).ToArray(); + if (message.ExplicitTableInfo.Length != 0) + { + tables = _repository.GetAllObjectsInIDList(message.ExplicitTableInfo).ToArray(); - if (tables.Length != message.ExplicitTableInfo.Length) - { - throw new Exception($"Could not find all TableInfos IDs={string.Join(",", message.ExplicitTableInfo)}. Found {tables.Length}:{string.Join(",", tables.Select(t => t.ID))}"); - } - } - else + if (tables.Length != message.ExplicitTableInfo.Length) { - tables = GetAllTables(message.WhereFields.Union(message.WriteIntoFields).ToArray()).ToArray(); - - if (tables.Length == 0) - throw new Exception($"Could not find any tables to update that matched the field set {message}"); + throw new Exception($"Could not find all TableInfos IDs={string.Join(",", message.ExplicitTableInfo)}. Found {tables.Length}:{string.Join(",", tables.Select(t => t.ID))}"); } + } + else + { + tables = GetAllTables(message.WhereFields.Union(message.WriteIntoFields).ToArray()).ToArray(); - //don't try to update views - foreach (var tbl in tables.Where(static t => !t.IsView).Select(static t => - t.Discover(Rdmp.Core.ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad))) - { - if (!tbl.Exists()) - throw new Exception($"Table {tbl} did not exist"); + if (tables.Length == 0) + throw new Exception($"Could not find any tables to update that matched the field set {message}"); + } - affectedRows += UpdateTable(tbl, message); - } + //don't try to update views + foreach (var tbl in tables.Where(static t => !t.IsView).Select(static t => + t.Discover(Rdmp.Core.ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad))) + { + if (!tbl.Exists()) + throw new Exception($"Table {tbl} did not exist"); - return affectedRows; + affectedRows += UpdateTable(tbl, message); } - /// - /// Generates and runs an SQL command on - /// - /// - /// - protected virtual int UpdateTable(DiscoveredTable t, UpdateValuesMessage message) - { - var audit = _audits.GetOrAdd(t, static k => new UpdateTableAudit(k)); + return affectedRows; + } - var builder = new StringBuilder($"UPDATE {t.GetFullyQualifiedName()} SET "); - builder.AppendJoin(',', - message.WriteIntoFields.Select((field, i) => - GetFieldEqualsValueExpression(t.DiscoverColumn(message.WriteIntoFields[i]), message.Values[i], - "="))); + /// + /// Generates and runs an SQL command on + /// + /// + /// + protected virtual int UpdateTable(DiscoveredTable t, UpdateValuesMessage message) + { + var audit = _audits.GetOrAdd(t, static k => new UpdateTableAudit(k)); + + var builder = new StringBuilder($"UPDATE {t.GetFullyQualifiedName()} SET "); + builder.AppendJoin(',', + message.WriteIntoFields.Select((field, i) => + GetFieldEqualsValueExpression(t.DiscoverColumn(message.WriteIntoFields[i]), message.Values[i], + "="))); + + builder.AppendLine(" WHERE "); - builder.AppendLine(" WHERE "); + builder.AppendJoin(" AND ", message.WhereFields.Select((field, i) => + GetFieldEqualsValueExpression(t.DiscoverColumn(field ?? throw new ArgumentNullException(nameof(field))), + message.HaveValues[i]!, + message.Operators?[i]))); - builder.AppendJoin(" AND ", message.WhereFields.Select((field, i) => - GetFieldEqualsValueExpression(t.DiscoverColumn(field ?? throw new ArgumentNullException(nameof(field))), - message.HaveValues[i]!, - message.Operators?[i]))); + var sql = builder.ToString(); + var affectedRows = 0; + + audit.StartOne(); + try + { + using var con = t.Database.Server.GetConnection(); + con.Open(); - var sql = builder.ToString(); - var affectedRows = 0; + using var cmd = t.Database.Server.GetCommand(sql, con); + cmd.CommandTimeout = UpdateTimeout; - audit.StartOne(); try { - using var con = t.Database.Server.GetConnection(); - con.Open(); - - using var cmd = t.Database.Server.GetCommand(sql, con); - cmd.CommandTimeout = UpdateTimeout; - - try - { - return affectedRows = cmd.ExecuteNonQuery(); - } - catch (Exception ex) - { - throw new Exception($"Failed to execute query {sql} ", ex); - } + return affectedRows = cmd.ExecuteNonQuery(); } - finally + catch (Exception ex) { - audit.EndOne(affectedRows < 0 ? 0 : affectedRows); + throw new Exception($"Failed to execute query {sql} ", ex); } } - - /// - /// Returns the SQL string = - /// - /// LHS argument - /// RHS argument, if null then string literal "null" is used - /// The SQL operator to use, if null "=" is used - /// - protected static string GetFieldEqualsValueExpression(DiscoveredColumn col, string value, string? op) + finally { - StringBuilder builder = new(); + audit.EndOne(affectedRows < 0 ? 0 : affectedRows); + } + } - builder.Append(col.GetWrappedName()); - builder.Append(' '); - builder.Append(op ?? "="); - builder.Append(' '); + /// + /// Returns the SQL string = + /// + /// LHS argument + /// RHS argument, if null then string literal "null" is used + /// The SQL operator to use, if null "=" is used + /// + protected static string GetFieldEqualsValueExpression(DiscoveredColumn col, string value, string? op) + { + StringBuilder builder = new(); - builder.Append(string.IsNullOrWhiteSpace(value) ? "null" : value); + builder.Append(col.GetWrappedName()); + builder.Append(' '); + builder.Append(op ?? "="); + builder.Append(' '); - return builder.ToString(); - } + builder.Append(string.IsNullOrWhiteSpace(value) ? "null" : value); - /// - /// Returns all which have all the listed - /// - /// - /// - protected virtual IEnumerable GetAllTables(string?[] fields) - { - //the tables we should consider - var tables = TableInfosToUpdate.Length != 0 ? - _repository.GetAllObjectsInIDList(TableInfosToUpdate) : - _repository.GetAllObjects(); + return builder.ToString(); + } - // get only those that have all the WHERE/SET columns in them - return tables.Where(t => fields.All(f => t.ColumnInfos.Select(static c => c.GetRuntimeName()).Contains(f))); - } + /// + /// Returns all which have all the listed + /// + /// + /// + protected virtual IEnumerable GetAllTables(string?[] fields) + { + //the tables we should consider + var tables = TableInfosToUpdate.Length != 0 ? + _repository.GetAllObjectsInIDList(TableInfosToUpdate) : + _repository.GetAllObjects(); + // get only those that have all the WHERE/SET columns in them + return tables.Where(t => fields.All(f => t.ColumnInfos.Select(static c => c.GetRuntimeName()).Contains(f))); + } - internal void LogProgress(ILogger logger, LogLevel level) + + internal void LogProgress(ILogger logger, LogLevel level) + { + // ToArray prevents modification during enumeration possibility + foreach (var audit in _audits.Values.ToArray()) { - // ToArray prevents modification during enumeration possibility - foreach (var audit in _audits.Values.ToArray()) - { - logger.Log(level, audit.ToString()); - } + logger.Log(level, audit.ToString()); } } } diff --git a/src/SmiServices/Program.cs b/src/SmiServices/Program.cs index 704258569..ffa2e6bd8 100644 --- a/src/SmiServices/Program.cs +++ b/src/SmiServices/Program.cs @@ -5,95 +5,94 @@ using System.Linq; -namespace SmiServices +namespace SmiServices; + +[ExcludeFromCodeCoverage] +public static class Program { - [ExcludeFromCodeCoverage] - public static class Program - { - public static readonly Type[] AllApplications = - [ - typeof(DicomLoaderVerb), - typeof(DicomDirectoryProcessorVerb), - typeof(ExtractImagesVerb), - typeof(TriggerUpdatesVerb), - typeof(SetupVerb), - typeof(DynamicRulesTesterVerb), - ]; + public static readonly Type[] AllApplications = + [ + typeof(DicomLoaderVerb), + typeof(DicomDirectoryProcessorVerb), + typeof(ExtractImagesVerb), + typeof(TriggerUpdatesVerb), + typeof(SetupVerb), + typeof(DynamicRulesTesterVerb), + ]; - public static readonly Type[] AllServices = - [ - typeof(CohortExtractorVerb), - typeof(DicomAnonymiserVerb), - typeof(CohortPackagerVerb), - typeof(DicomRelationalMapperVerb), - typeof(DicomReprocessorVerb), - typeof(DicomTagReaderVerb), - typeof(FileCopierVerb), - typeof(IdentifierMapperVerb), - typeof(IsIdentifiableVerb), - typeof(MongoDbPopulatorVerb), - typeof(UpdateValuesVerb), - ]; + public static readonly Type[] AllServices = + [ + typeof(CohortExtractorVerb), + typeof(DicomAnonymiserVerb), + typeof(CohortPackagerVerb), + typeof(DicomRelationalMapperVerb), + typeof(DicomReprocessorVerb), + typeof(DicomTagReaderVerb), + typeof(FileCopierVerb), + typeof(IdentifierMapperVerb), + typeof(IsIdentifiableVerb), + typeof(MongoDbPopulatorVerb), + typeof(UpdateValuesVerb), + ]; - public static int Main(string[] args) - { - var rest = args.Skip(1); + public static int Main(string[] args) + { + var rest = args.Skip(1); - var allTypes = new List(AllApplications); - allTypes.AddRange(AllServices); + var allTypes = new List(AllApplications); + allTypes.AddRange(AllServices); - int res; + int res; - try - { - res = SmiCliInit.ParseServiceVerbAndRun( - args.Take(1), - [.. allTypes], - service => + try + { + res = SmiCliInit.ParseServiceVerbAndRun( + args.Take(1), + [.. allTypes], + service => + { + // TODO(rkm 2021-02-26) Probably want to test that every case is covered here + return service switch { - // TODO(rkm 2021-02-26) Probably want to test that every case is covered here - return service switch - { - // Applications - DicomLoaderVerb _ => Applications.DicomLoader.DicomLoader.Main(rest), - DynamicRulesTesterVerb _ => Applications.DynamicRulesTester.DynamicRulesTester.Main(rest), - TriggerUpdatesVerb _ => Applications.TriggerUpdates.TriggerUpdates.Main(rest), - DicomDirectoryProcessorVerb _ => Applications.DicomDirectoryProcessor.DicomDirectoryProcessor.Main(rest), - ExtractImagesVerb _ => Applications.ExtractImages.ExtractImages.Main(rest), - SetupVerb _ => Applications.Setup.Setup.Main(rest), - - // Microservices - CohortExtractorVerb _ => Microservices.CohortExtractor.CohortExtractor.Main(rest), - CohortPackagerVerb _ => Microservices.CohortPackager.CohortPackager.Main(rest), - DicomAnonymiserVerb _ => Microservices.DicomAnonymiser.DicomAnonymiser.Main(rest), - DicomRelationalMapperVerb _ => Microservices.DicomRelationalMapper.DicomRelationalMapper.Main(rest), - DicomReprocessorVerb _ => Microservices.DicomReprocessor.DicomReprocessor.Main(rest), - DicomTagReaderVerb _ => Microservices.DicomTagReader.DicomTagReader.Main(rest), - FileCopierVerb _ => Microservices.FileCopier.FileCopier.Main(rest), - IdentifierMapperVerb _ => Microservices.IdentifierMapper.IdentifierMapper.Main(rest), - IsIdentifiableVerb _ => Microservices.IsIdentifiable.IsIdentifiable.Main(rest), - MongoDbPopulatorVerb _ => Microservices.MongoDBPopulator.MongoDBPopulator.Main(rest), - UpdateValuesVerb _ => Microservices.UpdateValues.UpdateValues.Main(rest), - _ => throw new ArgumentException($"No case for {nameof(service)}") - }; - } - ); - } - catch (Exception e) - { - Console.Error.WriteLine(e); - const int rc = 1; - Console.Error.WriteLine($"\nError (exit code {rc}): {e.Message}"); - return rc; - } + // Applications + DicomLoaderVerb _ => Applications.DicomLoader.DicomLoader.Main(rest), + DynamicRulesTesterVerb _ => Applications.DynamicRulesTester.DynamicRulesTester.Main(rest), + TriggerUpdatesVerb _ => Applications.TriggerUpdates.TriggerUpdates.Main(rest), + DicomDirectoryProcessorVerb _ => Applications.DicomDirectoryProcessor.DicomDirectoryProcessor.Main(rest), + ExtractImagesVerb _ => Applications.ExtractImages.ExtractImages.Main(rest), + SetupVerb _ => Applications.Setup.Setup.Main(rest), - if (args.Any(a => a.Equals("--help"))) - { - Console.WriteLine("Read more at:"); - Console.WriteLine("https://github.com/SMI/SmiServices/tree/main/"); - } + // Microservices + CohortExtractorVerb _ => Microservices.CohortExtractor.CohortExtractor.Main(rest), + CohortPackagerVerb _ => Microservices.CohortPackager.CohortPackager.Main(rest), + DicomAnonymiserVerb _ => Microservices.DicomAnonymiser.DicomAnonymiser.Main(rest), + DicomRelationalMapperVerb _ => Microservices.DicomRelationalMapper.DicomRelationalMapper.Main(rest), + DicomReprocessorVerb _ => Microservices.DicomReprocessor.DicomReprocessor.Main(rest), + DicomTagReaderVerb _ => Microservices.DicomTagReader.DicomTagReader.Main(rest), + FileCopierVerb _ => Microservices.FileCopier.FileCopier.Main(rest), + IdentifierMapperVerb _ => Microservices.IdentifierMapper.IdentifierMapper.Main(rest), + IsIdentifiableVerb _ => Microservices.IsIdentifiable.IsIdentifiable.Main(rest), + MongoDbPopulatorVerb _ => Microservices.MongoDBPopulator.MongoDBPopulator.Main(rest), + UpdateValuesVerb _ => Microservices.UpdateValues.UpdateValues.Main(rest), + _ => throw new ArgumentException($"No case for {nameof(service)}") + }; + } + ); + } + catch (Exception e) + { + Console.Error.WriteLine(e); + const int rc = 1; + Console.Error.WriteLine($"\nError (exit code {rc}): {e.Message}"); + return rc; + } - return res; + if (args.Any(a => a.Equals("--help"))) + { + Console.WriteLine("Read more at:"); + Console.WriteLine("https://github.com/SMI/SmiServices/tree/main/"); } + + return res; } } diff --git a/src/SmiServices/ServiceVerbs.cs b/src/SmiServices/ServiceVerbs.cs index 206c89644..8d5ce7cf1 100644 --- a/src/SmiServices/ServiceVerbs.cs +++ b/src/SmiServices/ServiceVerbs.cs @@ -1,78 +1,77 @@ using CommandLine; -namespace SmiServices +namespace SmiServices; + +public abstract class VerbBase { - public abstract class VerbBase - { - protected const string BaseHelpText = "See here at your release version: https://github.com/SMI/SmiServices/tree/main/"; - } + protected const string BaseHelpText = "See here at your release version: https://github.com/SMI/SmiServices/tree/main/"; +} - #region Applications +#region Applications - public abstract class ApplicationVerbBase : VerbBase - { - protected new const string BaseHelpText = VerbBase.BaseHelpText + "src/applications/Applications."; - } +public abstract class ApplicationVerbBase : VerbBase +{ + protected new const string BaseHelpText = VerbBase.BaseHelpText + "src/applications/Applications."; +} - [Verb("dicom-loader", HelpText = "Load DICOM files into MongoDB")] - public sealed class DicomLoaderVerb : ApplicationVerbBase { } +[Verb("dicom-loader", HelpText = "Load DICOM files into MongoDB")] +public sealed class DicomLoaderVerb : ApplicationVerbBase { } - [Verb("dynamic-rules-tester", HelpText = "Tester for DynamicRules configurations")] - public sealed class DynamicRulesTesterVerb : ApplicationVerbBase { } +[Verb("dynamic-rules-tester", HelpText = "Tester for DynamicRules configurations")] +public sealed class DynamicRulesTesterVerb : ApplicationVerbBase { } - [Verb("dicom-directory-processor", HelpText = "Queue dicom files on disk for ETL")] - public sealed class DicomDirectoryProcessorVerb : ApplicationVerbBase { } +[Verb("dicom-directory-processor", HelpText = "Queue dicom files on disk for ETL")] +public sealed class DicomDirectoryProcessorVerb : ApplicationVerbBase { } - [Verb("extract-images", HelpText = "Launch an image extraction job")] - public sealed class ExtractImagesVerb : ApplicationVerbBase { } +[Verb("extract-images", HelpText = "Launch an image extraction job")] +public sealed class ExtractImagesVerb : ApplicationVerbBase { } - [Verb("trigger-updates", HelpText = "Queue system wide database updates to specific fields e.g. changes in PatientID, Tag Promotion etc")] - public sealed class TriggerUpdatesVerb : ApplicationVerbBase { } +[Verb("trigger-updates", HelpText = "Queue system wide database updates to specific fields e.g. changes in PatientID, Tag Promotion etc")] +public sealed class TriggerUpdatesVerb : ApplicationVerbBase { } - [Verb("setup", HelpText = "Tool for assisting in the setup and checking of SmiServices configurations")] - public sealed class SetupVerb : ApplicationVerbBase { } - #endregion +[Verb("setup", HelpText = "Tool for assisting in the setup and checking of SmiServices configurations")] +public sealed class SetupVerb : ApplicationVerbBase { } +#endregion - #region Microservices +#region Microservices - public abstract class MicroservicesVerbBase : VerbBase - { - protected new const string BaseHelpText = VerbBase.BaseHelpText + "src/microservices/Microservices."; - } +public abstract class MicroservicesVerbBase : VerbBase +{ + protected new const string BaseHelpText = VerbBase.BaseHelpText + "src/microservices/Microservices."; +} - [Verb("cohort-extractor", HelpText = "Microservice for queuing images for extraction")] - public sealed class CohortExtractorVerb : MicroservicesVerbBase { } +[Verb("cohort-extractor", HelpText = "Microservice for queuing images for extraction")] +public sealed class CohortExtractorVerb : MicroservicesVerbBase { } - [Verb("cohort-packager", HelpText = "Microservice for detecting when all images in an extraction have been produced/validated")] - public sealed class CohortPackagerVerb : MicroservicesVerbBase { } +[Verb("cohort-packager", HelpText = "Microservice for detecting when all images in an extraction have been produced/validated")] +public sealed class CohortPackagerVerb : MicroservicesVerbBase { } - [Verb("dicom-anonymiser", HelpText = "Microservice for anonymising DICOM files")] - public sealed class DicomAnonymiserVerb : MicroservicesVerbBase { } +[Verb("dicom-anonymiser", HelpText = "Microservice for anonymising DICOM files")] +public sealed class DicomAnonymiserVerb : MicroservicesVerbBase { } - [Verb("dicom-relational-mapper", HelpText = "Microservice for loading relational database with images queued by dicom-reprocessor")] - public sealed class DicomRelationalMapperVerb : MicroservicesVerbBase { } +[Verb("dicom-relational-mapper", HelpText = "Microservice for loading relational database with images queued by dicom-reprocessor")] +public sealed class DicomRelationalMapperVerb : MicroservicesVerbBase { } - [Verb("dicom-reprocessor", HelpText = "Queue images stored in a MongoDb unstructured database for ETL to a relational database")] - public sealed class DicomReprocessorVerb : MicroservicesVerbBase { } +[Verb("dicom-reprocessor", HelpText = "Queue images stored in a MongoDb unstructured database for ETL to a relational database")] +public sealed class DicomReprocessorVerb : MicroservicesVerbBase { } - [Verb("dicom-tag-reader", HelpText = "Microservice for loading dicom images (file path + tags) off disk and into a RabbitMQ queue for downstream microservices e.g. for loading into a database")] - public sealed class DicomTagReaderVerb : MicroservicesVerbBase { } +[Verb("dicom-tag-reader", HelpText = "Microservice for loading dicom images (file path + tags) off disk and into a RabbitMQ queue for downstream microservices e.g. for loading into a database")] +public sealed class DicomTagReaderVerb : MicroservicesVerbBase { } - [Verb("file-copier", HelpText = "Extraction microservice that copies requested images directly to an output location (without any anonymisation). Runs down stream from cohort-extractor")] - public sealed class FileCopierVerb : MicroservicesVerbBase { } +[Verb("file-copier", HelpText = "Extraction microservice that copies requested images directly to an output location (without any anonymisation). Runs down stream from cohort-extractor")] +public sealed class FileCopierVerb : MicroservicesVerbBase { } - [Verb("identifier-mapper", HelpText = "Microservice for substituting PatientID for an anonymous representation e.g. before loading to a relational database")] - public sealed class IdentifierMapperVerb : MicroservicesVerbBase { } +[Verb("identifier-mapper", HelpText = "Microservice for substituting PatientID for an anonymous representation e.g. before loading to a relational database")] +public sealed class IdentifierMapperVerb : MicroservicesVerbBase { } - [Verb("is-identifiable", HelpText = "Evaluates database table(s), flat files or dicom files for identifiable data.")] - public sealed class IsIdentifiableVerb : MicroservicesVerbBase { } +[Verb("is-identifiable", HelpText = "Evaluates database table(s), flat files or dicom files for identifiable data.")] +public sealed class IsIdentifiableVerb : MicroservicesVerbBase { } - [Verb("mongodb-populator", HelpText = "Microservice for loading queued dicom images into a MongoDb unstructured database")] - public sealed class MongoDbPopulatorVerb : MicroservicesVerbBase { } +[Verb("mongodb-populator", HelpText = "Microservice for loading queued dicom images into a MongoDb unstructured database")] +public sealed class MongoDbPopulatorVerb : MicroservicesVerbBase { } - [Verb("update-values", HelpText = "Microservice for applying system wide SQL UPDATE commands for updates queued by trigger-updates")] - public sealed class UpdateValuesVerb : MicroservicesVerbBase { } +[Verb("update-values", HelpText = "Microservice for applying system wide SQL UPDATE commands for updates queued by trigger-updates")] +public sealed class UpdateValuesVerb : MicroservicesVerbBase { } - #endregion -} +#endregion diff --git a/tests/SmiServices.IntegrationTests/Applications/ExtractImages/ExtractImagesHostTests.cs b/tests/SmiServices.IntegrationTests/Applications/ExtractImages/ExtractImagesHostTests.cs index ddb23a7ac..f8ebd24e6 100644 --- a/tests/SmiServices.IntegrationTests/Applications/ExtractImages/ExtractImagesHostTests.cs +++ b/tests/SmiServices.IntegrationTests/Applications/ExtractImages/ExtractImagesHostTests.cs @@ -13,252 +13,251 @@ using System.Threading; -namespace SmiServices.IntegrationTests.Applications.ExtractImages +namespace SmiServices.IntegrationTests.Applications.ExtractImages; + +[RequiresRabbit] +public class ExtractImagesHostTests { - [RequiresRabbit] - public class ExtractImagesHostTests - { - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void HappyPath() - { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(HappyPath)); - globals.ExtractImagesOptions!.MaxIdentifiersPerMessage = 1; + [Test] + public void HappyPath() + { + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(HappyPath)); + globals.ExtractImagesOptions!.MaxIdentifiersPerMessage = 1; - var cliOptions = new ExtractImagesCliOptions { CohortCsvFile = "foo.csv", ProjectId = "1234-5678", NonInteractive = true }; + var cliOptions = new ExtractImagesCliOptions { CohortCsvFile = "foo.csv", ProjectId = "1234-5678", NonInteractive = true }; - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "SeriesInstanceUID\n1.2.3.4"}, - } - ); - var extractRoot = fs.Path.Join(fs.Path.GetTempPath(), "extract-root"); - fs.Directory.CreateDirectory(extractRoot); - globals.FileSystemOptions!.ExtractRoot = extractRoot; + var fs = new MockFileSystem( + new Dictionary + { + {"foo.csv", "SeriesInstanceUID\n1.2.3.4"}, + } + ); + var extractRoot = fs.Path.Join(fs.Path.GetTempPath(), "extract-root"); + fs.Directory.CreateDirectory(extractRoot); + globals.FileSystemOptions!.ExtractRoot = extractRoot; - Expression> expr = x => x.SendMessages(ExtractionKey.SeriesInstanceUID, new List { "1.2.3.4" }); - var mockExtractionMessageSender = new Mock(MockBehavior.Strict); - mockExtractionMessageSender.Setup(expr); + Expression> expr = x => x.SendMessages(ExtractionKey.SeriesInstanceUID, new List { "1.2.3.4" }); + var mockExtractionMessageSender = new Mock(MockBehavior.Strict); + mockExtractionMessageSender.Setup(expr); - using var _ = new MicroserviceTester(globals.RabbitOptions!); + using var _ = new MicroserviceTester(globals.RabbitOptions!); - var host = new ExtractImagesHost(globals, cliOptions, mockExtractionMessageSender.Object, fileSystem: fs); - host.Start(); + var host = new ExtractImagesHost(globals, cliOptions, mockExtractionMessageSender.Object, fileSystem: fs); + host.Start(); - mockExtractionMessageSender.Verify(expr, Times.Once); - } + mockExtractionMessageSender.Verify(expr, Times.Once); + } - [Test] - public void HappyPath_Integration() - { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(HappyPath_Integration)); + [Test] + public void HappyPath_Integration() + { + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(HappyPath_Integration)); - string extractRoot = Path.GetTempPath(); - globals.FileSystemOptions!.ExtractRoot = extractRoot; + string extractRoot = Path.GetTempPath(); + globals.FileSystemOptions!.ExtractRoot = extractRoot; - ExtractImagesOptions options = globals.ExtractImagesOptions!; + ExtractImagesOptions options = globals.ExtractImagesOptions!; - string tmpFile = Path.GetTempFileName(); - File.WriteAllText(tmpFile, "SeriesInstanceUID\n1.2.3.4"); + string tmpFile = Path.GetTempFileName(); + File.WriteAllText(tmpFile, "SeriesInstanceUID\n1.2.3.4"); - var cliOptions = new ExtractImagesCliOptions - { - CohortCsvFile = tmpFile, - ProjectId = "1234-5678", - NonInteractive = true, - Modality = "CT", - IsIdentifiableExtraction = true, - IsNoFiltersExtraction = true, - }; - - var extReqExchName = options.ExtractionRequestProducerOptions!.ExchangeName!; - var extReqInfoExchName = options.ExtractionRequestInfoProducerOptions!.ExchangeName!; + var cliOptions = new ExtractImagesCliOptions + { + CohortCsvFile = tmpFile, + ProjectId = "1234-5678", + NonInteractive = true, + Modality = "CT", + IsIdentifiableExtraction = true, + IsNoFiltersExtraction = true, + }; - var consumedExtReqMsgs = new List>(); - var consumedExtReqInfoMsgs = new List>(); + var extReqExchName = options.ExtractionRequestProducerOptions!.ExchangeName!; + var extReqInfoExchName = options.ExtractionRequestInfoProducerOptions!.ExchangeName!; - using (var tester = new MicroserviceTester(globals.RabbitOptions!)) - { - tester.CreateExchange(extReqExchName); - tester.CreateExchange(extReqInfoExchName); + var consumedExtReqMsgs = new List>(); + var consumedExtReqInfoMsgs = new List>(); - var host = new ExtractImagesHost(globals, cliOptions); - host.Start(); + using (var tester = new MicroserviceTester(globals.RabbitOptions!)) + { + tester.CreateExchange(extReqExchName); + tester.CreateExchange(extReqInfoExchName); - var timeoutSecs = 10.0; - const double delta = 0.1; - while ((consumedExtReqMsgs.Count == 0 || consumedExtReqInfoMsgs.Count == 0) && timeoutSecs >= 0) - { - consumedExtReqMsgs.AddRange(tester.ConsumeMessages(extReqExchName.Replace("Exchange", "Queue"))); - consumedExtReqInfoMsgs.AddRange(tester.ConsumeMessages(extReqInfoExchName.Replace("Exchange", "Queue"))); + var host = new ExtractImagesHost(globals, cliOptions); + host.Start(); - timeoutSecs -= delta; - Thread.Sleep(TimeSpan.FromSeconds(delta)); - } + var timeoutSecs = 10.0; + const double delta = 0.1; + while ((consumedExtReqMsgs.Count == 0 || consumedExtReqInfoMsgs.Count == 0) && timeoutSecs >= 0) + { + consumedExtReqMsgs.AddRange(tester.ConsumeMessages(extReqExchName.Replace("Exchange", "Queue"))); + consumedExtReqInfoMsgs.AddRange(tester.ConsumeMessages(extReqInfoExchName.Replace("Exchange", "Queue"))); - Assert.That(timeoutSecs, Is.GreaterThan(0)); + timeoutSecs -= delta; + Thread.Sleep(TimeSpan.FromSeconds(delta)); } - File.Delete(tmpFile); + Assert.That(timeoutSecs, Is.GreaterThan(0)); + } - Assert.That(consumedExtReqMsgs, Has.Count.EqualTo(1)); - ExtractionRequestMessage receivedRequestMessage = consumedExtReqMsgs[0].Item2; - Assert.Multiple(() => - { - Assert.That(receivedRequestMessage.KeyTag, Is.EqualTo("SeriesInstanceUID")); - Assert.That(receivedRequestMessage.Modality, Is.EqualTo("CT")); - Assert.That(receivedRequestMessage.ExtractionIdentifiers, Is.EqualTo(new List { "1.2.3.4" })); + File.Delete(tmpFile); - Assert.That(consumedExtReqInfoMsgs, Has.Count.EqualTo(1)); - }); - ExtractionRequestInfoMessage receivedRequestInfoMessage = consumedExtReqInfoMsgs[0].Item2; + Assert.That(consumedExtReqMsgs, Has.Count.EqualTo(1)); + ExtractionRequestMessage receivedRequestMessage = consumedExtReqMsgs[0].Item2; + Assert.Multiple(() => + { + Assert.That(receivedRequestMessage.KeyTag, Is.EqualTo("SeriesInstanceUID")); + Assert.That(receivedRequestMessage.Modality, Is.EqualTo("CT")); + Assert.That(receivedRequestMessage.ExtractionIdentifiers, Is.EqualTo(new List { "1.2.3.4" })); + + Assert.That(consumedExtReqInfoMsgs, Has.Count.EqualTo(1)); + }); + ExtractionRequestInfoMessage receivedRequestInfoMessage = consumedExtReqInfoMsgs[0].Item2; + Assert.Multiple(() => + { + Assert.That(receivedRequestInfoMessage.KeyTag, Is.EqualTo("SeriesInstanceUID")); + Assert.That(receivedRequestInfoMessage.Modality, Is.EqualTo("CT")); + Assert.That(receivedRequestInfoMessage.KeyValueCount, Is.EqualTo(1)); + }); + + foreach (IExtractMessage msg in new List { receivedRequestMessage, receivedRequestInfoMessage }) + { Assert.Multiple(() => { - Assert.That(receivedRequestInfoMessage.KeyTag, Is.EqualTo("SeriesInstanceUID")); - Assert.That(receivedRequestInfoMessage.Modality, Is.EqualTo("CT")); - Assert.That(receivedRequestInfoMessage.KeyValueCount, Is.EqualTo(1)); + Assert.That(msg.ProjectNumber, Is.EqualTo("1234-5678")); + Assert.That(msg.ExtractionDirectory, Is.EqualTo(Path.Join("1234-5678", "extractions", Path.GetFileNameWithoutExtension(tmpFile)))); + Assert.That(msg.IsIdentifiableExtraction, Is.True); + Assert.That(msg.IsNoFilterExtraction, Is.True); }); - - foreach (IExtractMessage msg in new List { receivedRequestMessage, receivedRequestInfoMessage }) - { - Assert.Multiple(() => - { - Assert.That(msg.ProjectNumber, Is.EqualTo("1234-5678")); - Assert.That(msg.ExtractionDirectory, Is.EqualTo(Path.Join("1234-5678", "extractions", Path.GetFileNameWithoutExtension(tmpFile)))); - Assert.That(msg.IsIdentifiableExtraction, Is.True); - Assert.That(msg.IsNoFilterExtraction, Is.True); - }); - } } + } - [Test] - public void ExtractImagesOptions_AreValid() - { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(ExtractImagesOptions_AreValid)); - globals.ExtractImagesOptions = null; + [Test] + public void ExtractImagesOptions_AreValid() + { + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(ExtractImagesOptions_AreValid)); + globals.ExtractImagesOptions = null; - using var _ = new MicroserviceTester(globals.RabbitOptions!); + using var _ = new MicroserviceTester(globals.RabbitOptions!); - var exc = Assert.Throws(() => - { - var _ = new ExtractImagesHost(globals, new ExtractImagesCliOptions()); - }); - Assert.That(exc?.Message, Is.EqualTo("ExtractImagesOptions")); - } - - [Test] - public void ExtractionRoot_VerifyPresent() + var exc = Assert.Throws(() => { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(ExtractionRoot_VerifyPresent)); - globals.FileSystemOptions!.ExtractRoot = "nope"; + var _ = new ExtractImagesHost(globals, new ExtractImagesCliOptions()); + }); + Assert.That(exc?.Message, Is.EqualTo("ExtractImagesOptions")); + } - using var _ = new MicroserviceTester(globals.RabbitOptions!); + [Test] + public void ExtractionRoot_VerifyPresent() + { + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(ExtractionRoot_VerifyPresent)); + globals.FileSystemOptions!.ExtractRoot = "nope"; - var exc = Assert.Throws(() => - { - var _ = new ExtractImagesHost(globals, new ExtractImagesCliOptions()); - }); - Assert.That(exc?.Message, Is.EqualTo("Could not find the extraction root 'nope'")); - } + using var _ = new MicroserviceTester(globals.RabbitOptions!); - [Test] - public void CsvFile_VerifyPresent() + var exc = Assert.Throws(() => { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(CsvFile_VerifyPresent)); - globals.FileSystemOptions!.ExtractRoot = "extract-root"; + var _ = new ExtractImagesHost(globals, new ExtractImagesCliOptions()); + }); + Assert.That(exc?.Message, Is.EqualTo("Could not find the extraction root 'nope'")); + } - using var _ = new MicroserviceTester(globals.RabbitOptions!); + [Test] + public void CsvFile_VerifyPresent() + { + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(CsvFile_VerifyPresent)); + globals.FileSystemOptions!.ExtractRoot = "extract-root"; - var fs = new MockFileSystem(); - fs.Directory.CreateDirectory(globals.FileSystemOptions.ExtractRoot); + using var _ = new MicroserviceTester(globals.RabbitOptions!); - var cliOptions = new ExtractImagesCliOptions { CohortCsvFile = "missing.csv" }; - var exc = Assert.Throws(() => - { - var _ = new ExtractImagesHost(globals, cliOptions, fileSystem: fs); - }); - Assert.That(exc?.Message, Is.EqualTo("Could not find the cohort CSV file 'missing.csv'")); - } + var fs = new MockFileSystem(); + fs.Directory.CreateDirectory(globals.FileSystemOptions.ExtractRoot); - [Test] - public void ExtractionDirectory_VerifyAbsent() + var cliOptions = new ExtractImagesCliOptions { CohortCsvFile = "missing.csv" }; + var exc = Assert.Throws(() => { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(ExtractionDirectory_VerifyAbsent)); - globals.FileSystemOptions!.ExtractRoot = "extract-root"; + var _ = new ExtractImagesHost(globals, cliOptions, fileSystem: fs); + }); + Assert.That(exc?.Message, Is.EqualTo("Could not find the cohort CSV file 'missing.csv'")); + } - using var _ = new MicroserviceTester(globals.RabbitOptions!); + [Test] + public void ExtractionDirectory_VerifyAbsent() + { + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(ExtractionDirectory_VerifyAbsent)); + globals.FileSystemOptions!.ExtractRoot = "extract-root"; - var cliOptions = new ExtractImagesCliOptions { CohortCsvFile = "test.csv", ProjectId = "foo" }; + using var _ = new MicroserviceTester(globals.RabbitOptions!); - var fs = new MockFileSystem(); - fs.Directory.CreateDirectory(globals.FileSystemOptions.ExtractRoot); - fs.File.Create("test.csv", 0); - fs.Directory.CreateDirectory("extract-root/foo/extractions/test"); + var cliOptions = new ExtractImagesCliOptions { CohortCsvFile = "test.csv", ProjectId = "foo" }; - var exc = Assert.Throws(() => - { - var _ = new ExtractImagesHost(globals, cliOptions, fileSystem: fs); - }); - Assert.That(exc?.Message.StartsWith("Extraction directory already exists"), Is.True); - } + var fs = new MockFileSystem(); + fs.Directory.CreateDirectory(globals.FileSystemOptions.ExtractRoot); + fs.File.Create("test.csv", 0); + fs.Directory.CreateDirectory("extract-root/foo/extractions/test"); - [Test] - public void Start_DisallowedExtractionKey_ThrowsException() + var exc = Assert.Throws(() => { - // Arrange + var _ = new ExtractImagesHost(globals, cliOptions, fileSystem: fs); + }); + Assert.That(exc?.Message.StartsWith("Extraction directory already exists"), Is.True); + } - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Start_DisallowedExtractionKey_ThrowsException)); - globals.ExtractImagesOptions!.AllowedExtractionKeys = [ExtractionKey.SeriesInstanceUID, ExtractionKey.SOPInstanceUID]; + [Test] + public void Start_DisallowedExtractionKey_ThrowsException() + { + // Arrange - var cliOptions = new ExtractImagesCliOptions { CohortCsvFile = "foo.csv", ProjectId = "1234-5678", NonInteractive = true }; + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Start_DisallowedExtractionKey_ThrowsException)); + globals.ExtractImagesOptions!.AllowedExtractionKeys = [ExtractionKey.SeriesInstanceUID, ExtractionKey.SOPInstanceUID]; - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "StudyInstanceUID\n1.2.3.4"}, - } - ); - var extractRoot = fs.Path.Join(fs.Path.GetTempPath(), "extract-root"); - fs.Directory.CreateDirectory(extractRoot); - globals.FileSystemOptions!.ExtractRoot = extractRoot; + var cliOptions = new ExtractImagesCliOptions { CohortCsvFile = "foo.csv", ProjectId = "1234-5678", NonInteractive = true }; - var mockExtractionMessageSender = new Mock(MockBehavior.Strict); - var host = new ExtractImagesHost(globals, cliOptions, mockExtractionMessageSender.Object, fileSystem: fs); + var fs = new MockFileSystem( + new Dictionary + { + {"foo.csv", "StudyInstanceUID\n1.2.3.4"}, + } + ); + var extractRoot = fs.Path.Join(fs.Path.GetTempPath(), "extract-root"); + fs.Directory.CreateDirectory(extractRoot); + globals.FileSystemOptions!.ExtractRoot = extractRoot; - // Act + var mockExtractionMessageSender = new Mock(MockBehavior.Strict); + var host = new ExtractImagesHost(globals, cliOptions, mockExtractionMessageSender.Object, fileSystem: fs); - void act() => host.Start(); + // Act - // Assert + void act() => host.Start(); - var exc = Assert.Throws(act); - Assert.That(exc?.Message, Is.EqualTo("'StudyInstanceUID' from CSV not in list of supported extraction keys (SeriesInstanceUID,SOPInstanceUID)")); - } + // Assert - #endregion + var exc = Assert.Throws(act); + Assert.That(exc?.Message, Is.EqualTo("'StudyInstanceUID' from CSV not in list of supported extraction keys (SeriesInstanceUID,SOPInstanceUID)")); } + + #endregion } diff --git a/tests/SmiServices.IntegrationTests/Applications/TriggerUpdates/MapperSourceIntegrationTest.cs b/tests/SmiServices.IntegrationTests/Applications/TriggerUpdates/MapperSourceIntegrationTest.cs index 41aec09ac..4abe17856 100644 --- a/tests/SmiServices.IntegrationTests/Applications/TriggerUpdates/MapperSourceIntegrationTest.cs +++ b/tests/SmiServices.IntegrationTests/Applications/TriggerUpdates/MapperSourceIntegrationTest.cs @@ -21,150 +21,149 @@ using Tests.Common; -namespace SmiServices.IntegrationTests.Applications.TriggerUpdates -{ - [RequiresRabbit] - class MapperSourceIntegrationTest : DatabaseTests - { +namespace SmiServices.IntegrationTests.Applications.TriggerUpdates; - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void MapperSource_IntegrationTest(DatabaseType dbType) - { - var db = GetCleanedServer(dbType); +[RequiresRabbit] +class MapperSourceIntegrationTest : DatabaseTests +{ - DataTable dt = new(); - dt.Columns.Add("PatientID", typeof(string)); - dt.Columns.Add("StudyDescription", typeof(string)); - dt.SetDoNotReType(true); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void MapperSource_IntegrationTest(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - // We have a live table with anonymised data. There is one person with a known ECHI 0101010101=0A0A0A0A0A - dt.Rows.Add("0A0A0A0A0A", "CT Head"); + DataTable dt = new(); + dt.Columns.Add("PatientID", typeof(string)); + dt.Columns.Add("StudyDescription", typeof(string)); + dt.SetDoNotReType(true); - //There are 2 people for whome we have added temporary identifiers - dt.Rows.Add("bbb-bbb-bbb", "CT Tail"); - dt.Rows.Add("ccc-ccc-ccc", "CT Wings"); + // We have a live table with anonymised data. There is one person with a known ECHI 0101010101=0A0A0A0A0A + dt.Rows.Add("0A0A0A0A0A", "CT Head"); - var liveTable = db.CreateTable("MyLiveTable", dt); + //There are 2 people for whome we have added temporary identifiers + dt.Rows.Add("bbb-bbb-bbb", "CT Tail"); + dt.Rows.Add("ccc-ccc-ccc", "CT Wings"); - DiscoveredTable map; + var liveTable = db.CreateTable("MyLiveTable", dt); - using (var dtMap = new DataTable()) - { - dtMap.Columns.Add("CHI"); - dtMap.Columns.Add("ECHI"); + DiscoveredTable map; - dtMap.PrimaryKey = [dtMap.Columns["CHI"]!]; + using (var dtMap = new DataTable()) + { + dtMap.Columns.Add("CHI"); + dtMap.Columns.Add("ECHI"); - dtMap.Rows.Add("0101010101", "0A0A0A0A0A"); - map = db.CreateTable("Map", dtMap); - } + dtMap.PrimaryKey = [dtMap.Columns["CHI"]!]; - // Import into RDMP the live table so we have a TableInfo pointer to it floating around - Import(liveTable); + dtMap.Rows.Add("0101010101", "0A0A0A0A0A"); + map = db.CreateTable("Map", dtMap); + } - var mapperOptions = new IdentifierMapperOptions - { - MappingTableName = map.GetFullyQualifiedName(), - MappingConnectionString = db.Server.Builder.ConnectionString, - SwapColumnName = "CHI", - ReplacementColumnName = "ECHI", - MappingDatabaseType = db.Server.DatabaseType, - SwapperType = typeof(TableLookupWithGuidFallbackSwapper).FullName - }; + // Import into RDMP the live table so we have a TableInfo pointer to it floating around + Import(liveTable); - var swapper = new TableLookupWithGuidFallbackSwapper(); - swapper.Setup(mapperOptions); + var mapperOptions = new IdentifierMapperOptions + { + MappingTableName = map.GetFullyQualifiedName(), + MappingConnectionString = db.Server.Builder.ConnectionString, + SwapColumnName = "CHI", + ReplacementColumnName = "ECHI", + MappingDatabaseType = db.Server.DatabaseType, + SwapperType = typeof(TableLookupWithGuidFallbackSwapper).FullName + }; + + var swapper = new TableLookupWithGuidFallbackSwapper(); + swapper.Setup(mapperOptions); + + var guidTable = swapper.GetGuidTableIfAny(mapperOptions); + Assert.Multiple(() => + { + Assert.That(guidTable, Is.Not.Null); + Assert.That(guidTable?.GetRowCount(), Is.EqualTo(0), "No temporary guids should exist yet"); + Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + }); + if (guidTable is null) return; // We've actually failed and aborted already at this point, but the compiler doesn't know that - var guidTable = swapper.GetGuidTableIfAny(mapperOptions); - Assert.Multiple(() => - { - Assert.That(guidTable, Is.Not.Null); - Assert.That(guidTable?.GetRowCount(), Is.EqualTo(0), "No temporary guids should exist yet"); - Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + guidTable.Insert(new Dictionary + { + { "CHI","0202020202" }, + { TableLookupWithGuidFallbackSwapper.GuidColumnName,"bbb-bbb-bbb"} }); - if (guidTable is null) return; // We've actually failed and aborted already at this point, but the compiler doesn't know that - - guidTable.Insert(new Dictionary - { - { "CHI","0202020202" }, - { TableLookupWithGuidFallbackSwapper.GuidColumnName,"bbb-bbb-bbb"} - }); - guidTable.Insert(new Dictionary - { - { "CHI","0303030303" }, - { TableLookupWithGuidFallbackSwapper.GuidColumnName,"ccc-ccc-ccc"} - }); - - Assert.Multiple(() => - { - Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); - Assert.That(guidTable.GetRowCount(), Is.EqualTo(2), "We should have a temporary guid for 0202020202"); + guidTable.Insert(new Dictionary + { + { "CHI","0303030303" }, + { TableLookupWithGuidFallbackSwapper.GuidColumnName,"ccc-ccc-ccc"} }); - // make a fake data load into this table (create trigger and insert/update) - var triggerImplementer = new TriggerImplementerFactory(dbType).Create(map); - triggerImplementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); - - //create a brand new mapping - map.Insert(new Dictionary - { - {"CHI","0303030303" }, - {"ECHI","0C0C0C0C0C" }, - {SpecialFieldNames.ValidFrom,DateTime.Now }, - {SpecialFieldNames.DataLoadRunID,55}, - }); + Assert.Multiple(() => + { + Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + Assert.That(guidTable.GetRowCount(), Is.EqualTo(2), "We should have a temporary guid for 0202020202"); + }); - var globals = new GlobalOptionsFactory().Load(nameof(MapperSource_IntegrationTest)); + // make a fake data load into this table (create trigger and insert/update) + var triggerImplementer = new TriggerImplementerFactory(dbType).Create(map); + triggerImplementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); - var cliOptions = new TriggerUpdatesFromMapperOptions - { - DateOfLastUpdate = new DateTime(2020, 01, 01), - LiveDatabaseFieldName = "PatientID", - Qualifier = '\'', + //create a brand new mapping + map.Insert(new Dictionary + { + {"CHI","0303030303" }, + {"ECHI","0C0C0C0C0C" }, + {SpecialFieldNames.ValidFrom,DateTime.Now }, + {SpecialFieldNames.DataLoadRunID,55}, + }); - }; + var globals = new GlobalOptionsFactory().Load(nameof(MapperSource_IntegrationTest)); - globals.UseTestValues( - RequiresRabbit.Connection.Value, - RequiresMongoDb.GetMongoClientSettings(), - RequiresRelationalDb.GetRelationalDatabaseConnectionStrings(), - ((TableRepository)RepositoryLocator.CatalogueRepository).ConnectionStringBuilder, - ((TableRepository)RepositoryLocator.DataExportRepository).ConnectionStringBuilder); + var cliOptions = new TriggerUpdatesFromMapperOptions + { + DateOfLastUpdate = new DateTime(2020, 01, 01), + LiveDatabaseFieldName = "PatientID", + Qualifier = '\'', + }; - //make sure the identifier mapper goes to the right table - globals.IdentifierMapperOptions!.MappingConnectionString = db.Server.Builder.ConnectionString; - globals.IdentifierMapperOptions.MappingDatabaseType = dbType; - globals.IdentifierMapperOptions.MappingTableName = map.GetFullyQualifiedName(); - globals.IdentifierMapperOptions.SwapperType = typeof(TableLookupWithGuidFallbackSwapper).FullName; + globals.UseTestValues( + RequiresRabbit.Connection.Value, + RequiresMongoDb.GetMongoClientSettings(), + RequiresRelationalDb.GetRelationalDatabaseConnectionStrings(), + ((TableRepository)RepositoryLocator.CatalogueRepository).ConnectionStringBuilder, + ((TableRepository)RepositoryLocator.DataExportRepository).ConnectionStringBuilder); - using (var tester = new MicroserviceTester(globals.RabbitOptions!, globals.CohortExtractorOptions!)) - { - tester.CreateExchange(globals.TriggerUpdatesOptions!.ExchangeName!, globals.UpdateValuesOptions!.QueueName); - var sourceHost = new TriggerUpdatesHost(globals, new MapperSource(globals, cliOptions)); - var destHost = new UpdateValuesHost(globals); + //make sure the identifier mapper goes to the right table + globals.IdentifierMapperOptions!.MappingConnectionString = db.Server.Builder.ConnectionString; + globals.IdentifierMapperOptions.MappingDatabaseType = dbType; + globals.IdentifierMapperOptions.MappingTableName = map.GetFullyQualifiedName(); + globals.IdentifierMapperOptions.SwapperType = typeof(TableLookupWithGuidFallbackSwapper).FullName; - sourceHost.Start(); - tester.StopOnDispose.Add(sourceHost); + using (var tester = new MicroserviceTester(globals.RabbitOptions!, globals.CohortExtractorOptions!)) + { + tester.CreateExchange(globals.TriggerUpdatesOptions!.ExchangeName!, globals.UpdateValuesOptions!.QueueName); - destHost.Start(); - tester.StopOnDispose.Add(destHost); + var sourceHost = new TriggerUpdatesHost(globals, new MapperSource(globals, cliOptions)); + var destHost = new UpdateValuesHost(globals); + sourceHost.Start(); + tester.StopOnDispose.Add(sourceHost); - //wait till updater is done updating the live table - TestTimelineAwaiter.Await(() => destHost.Consumer!.AckCount == 1); - } + destHost.Start(); + tester.StopOnDispose.Add(destHost); - var liveDtAfter = liveTable.GetDataTable(); - Assert.Multiple(() => - { - Assert.That(liveDtAfter.Rows.Cast().Count(r => (string)r["PatientID"] == "0A0A0A0A0A"), Is.EqualTo(1), "Expected original data to still be intact"); - Assert.That(liveDtAfter.Rows.Cast().Count(r => (string)r["PatientID"] == "bbb-bbb-bbb"), Is.EqualTo(1), "Expected unknown CHI with guid bbb to still be unknown"); - Assert.That(liveDtAfter.Rows.Cast().Count(r => (string)r["PatientID"] == "0C0C0C0C0C"), Is.EqualTo(1), "Expected the unknown CHI ccc to be now known as 0C0C0C0C0C"); - }); + //wait till updater is done updating the live table + TestTimelineAwaiter.Await(() => destHost.Consumer!.AckCount == 1); } + + var liveDtAfter = liveTable.GetDataTable(); + + Assert.Multiple(() => + { + Assert.That(liveDtAfter.Rows.Cast().Count(r => (string)r["PatientID"] == "0A0A0A0A0A"), Is.EqualTo(1), "Expected original data to still be intact"); + Assert.That(liveDtAfter.Rows.Cast().Count(r => (string)r["PatientID"] == "bbb-bbb-bbb"), Is.EqualTo(1), "Expected unknown CHI with guid bbb to still be unknown"); + Assert.That(liveDtAfter.Rows.Cast().Count(r => (string)r["PatientID"] == "0C0C0C0C0C"), Is.EqualTo(1), "Expected the unknown CHI ccc to be now known as 0C0C0C0C0C"); + }); } } diff --git a/tests/SmiServices.IntegrationTests/Applications/TriggerUpdates/MapperSourceTests.cs b/tests/SmiServices.IntegrationTests/Applications/TriggerUpdates/MapperSourceTests.cs index daf88a21f..eca9da8bb 100644 --- a/tests/SmiServices.IntegrationTests/Applications/TriggerUpdates/MapperSourceTests.cs +++ b/tests/SmiServices.IntegrationTests/Applications/TriggerUpdates/MapperSourceTests.cs @@ -13,319 +13,318 @@ using System.Linq; using Tests.Common; -namespace SmiServices.IntegrationTests.Applications.TriggerUpdates +namespace SmiServices.IntegrationTests.Applications.TriggerUpdates; + +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +[RequiresRelationalDb(DatabaseType.MySql)] +class MapperSourceTests : DatabaseTests { - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - [RequiresRelationalDb(DatabaseType.MySql)] - class MapperSourceTests : DatabaseTests + /// + /// Sets up a CHI/ECHI mapping table with fallback guid and populates each table with a single record. + /// 0101010101 is a known CHI and 0202020202 is an known one (which was assigned a temporary guid mapping). + /// Also prepares the main map table for DLE loading () + /// + /// + /// + /// + /// + /// true to create a otherwise creates a + private void SetupMappers(DatabaseType dbType, out DiscoveredTable map, out DiscoveredTable? guidTable, out IdentifierMapperOptions mapperOptions, bool guids = true) { - /// - /// Sets up a CHI/ECHI mapping table with fallback guid and populates each table with a single record. - /// 0101010101 is a known CHI and 0202020202 is an known one (which was assigned a temporary guid mapping). - /// Also prepares the main map table for DLE loading () - /// - /// - /// - /// - /// - /// true to create a otherwise creates a - private void SetupMappers(DatabaseType dbType, out DiscoveredTable map, out DiscoveredTable? guidTable, out IdentifierMapperOptions mapperOptions, bool guids = true) - { - var db = GetCleanedServer(dbType); - - using (var dt = new DataTable()) - { - dt.Columns.Add("CHI"); - dt.Columns.Add("ECHI"); - - dt.PrimaryKey = [dt.Columns["CHI"]!]; - - dt.Rows.Add("0101010101", "0A0A0A0A0A"); - map = db.CreateTable("Map", dt); - } - - mapperOptions = new IdentifierMapperOptions - { - MappingTableName = map.GetFullyQualifiedName(), - MappingConnectionString = db.Server.Builder.ConnectionString, - SwapColumnName = "CHI", - ReplacementColumnName = "ECHI", - MappingDatabaseType = db.Server.DatabaseType, - SwapperType = (guids ? typeof(TableLookupWithGuidFallbackSwapper) : typeof(TableLookupSwapper)).FullName - }; - - if (guids) - { - var swapper = new TableLookupWithGuidFallbackSwapper(); - swapper.Setup(mapperOptions); - - guidTable = swapper.GetGuidTableIfAny(mapperOptions); -#pragma warning disable NUnit2045 // Use Assert.Multiple - Assert.That(guidTable?.GetRowCount(), Is.EqualTo(0), "No temporary guids should exist yet"); - Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + var db = GetCleanedServer(dbType); - //lookup an as yet unknown value - swapper.GetSubstitutionFor("0202020202", out _); + using (var dt = new DataTable()) + { + dt.Columns.Add("CHI"); + dt.Columns.Add("ECHI"); - Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); - Assert.That(guidTable?.GetRowCount(), Is.EqualTo(1), "We should have a temporary guid for 0202020202"); -#pragma warning restore NUnit2045 // Use Assert.Multiple - } - else - guidTable = null; + dt.PrimaryKey = [dt.Columns["CHI"]!]; - // make a fake data load into this table (create trigger and insert/update) - var triggerImplementer = new TriggerImplementerFactory(dbType).Create(map); - triggerImplementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); + dt.Rows.Add("0101010101", "0A0A0A0A0A"); + map = db.CreateTable("Map", dt); } - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestMapperSource_BrandNewMapping(DatabaseType dbType) + mapperOptions = new IdentifierMapperOptions { - SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions); + MappingTableName = map.GetFullyQualifiedName(), + MappingConnectionString = db.Server.Builder.ConnectionString, + SwapColumnName = "CHI", + ReplacementColumnName = "ECHI", + MappingDatabaseType = db.Server.DatabaseType, + SwapperType = (guids ? typeof(TableLookupWithGuidFallbackSwapper) : typeof(TableLookupSwapper)).FullName + }; + + if (guids) + { + var swapper = new TableLookupWithGuidFallbackSwapper(); + swapper.Setup(mapperOptions); - //create a brand new mapping - map.Insert(new Dictionary - { - {"CHI","0303030303" }, - {"ECHI","0C0C0C0C0C" }, - {SpecialFieldNames.ValidFrom,DateTime.Now }, - {SpecialFieldNames.DataLoadRunID,55}, - }); + guidTable = swapper.GetGuidTableIfAny(mapperOptions); +#pragma warning disable NUnit2045 // Use Assert.Multiple + Assert.That(guidTable?.GetRowCount(), Is.EqualTo(0), "No temporary guids should exist yet"); + Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); - var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); + //lookup an as yet unknown value + swapper.GetSubstitutionFor("0202020202", out _); - Assert.That(source.GetUpdates(), Is.Empty, "Since 0303030303 has never before been seen (not in guid table) we don't have any existing mappings to update"); + Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + Assert.That(guidTable?.GetRowCount(), Is.EqualTo(1), "We should have a temporary guid for 0202020202"); +#pragma warning restore NUnit2045 // Use Assert.Multiple } + else + guidTable = null; - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestMapperSource_NoArchiveTable(DatabaseType dbType) + // make a fake data load into this table (create trigger and insert/update) + var triggerImplementer = new TriggerImplementerFactory(dbType).Create(map); + triggerImplementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestMapperSource_BrandNewMapping(DatabaseType dbType) + { + SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions); + + //create a brand new mapping + map.Insert(new Dictionary { - SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? guidTable, out IdentifierMapperOptions mapperOptions); + {"CHI","0303030303" }, + {"ECHI","0C0C0C0C0C" }, + {SpecialFieldNames.ValidFrom,DateTime.Now }, + {SpecialFieldNames.DataLoadRunID,55}, + }); - var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); - archive.Drop(); + var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); - var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); - var ex = Assert.Throws(() => source.GetUpdates().ToArray()); + Assert.That(source.GetUpdates(), Is.Empty, "Since 0303030303 has never before been seen (not in guid table) we don't have any existing mappings to update"); + } - Assert.That(ex!.Message, Does.StartWith("No Archive table exists for mapping table")); - } + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestMapperSource_NoArchiveTable(DatabaseType dbType) + { + SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? guidTable, out IdentifierMapperOptions mapperOptions); + + var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); + archive.Drop(); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestMapperSource_UpdatedMapping(DatabaseType dbType) + var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); + var ex = Assert.Throws(() => source.GetUpdates().ToArray()); + + Assert.That(ex!.Message, Does.StartWith("No Archive table exists for mapping table")); + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestMapperSource_UpdatedMapping(DatabaseType dbType) + { + SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions); + + // Simulate a data load that changes the mapping of CHI 0101010101 from 0A0A0A0A0A to 0Z0Z0Z0Z0Z + using (var con = map.Database.Server.GetConnection()) { - SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions); + con.Open(); + Assert.That(map.GetCommand($"UPDATE {map.GetFullyQualifiedName()} SET ECHI = '0Z0Z0Z0Z0Z' WHERE CHI = '0101010101'", con).ExecuteNonQuery(), Is.EqualTo(1)); - // Simulate a data load that changes the mapping of CHI 0101010101 from 0A0A0A0A0A to 0Z0Z0Z0Z0Z - using (var con = map.Database.Server.GetConnection()) - { - con.Open(); - Assert.That(map.GetCommand($"UPDATE {map.GetFullyQualifiedName()} SET ECHI = '0Z0Z0Z0Z0Z' WHERE CHI = '0101010101'", con).ExecuteNonQuery(), Is.EqualTo(1)); + } - } + var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); + Assert.Multiple(() => + { + Assert.That(archive.Exists(), Is.True, "Archive table should definitely be there, we created the trigger after all"); + Assert.That(archive.GetRowCount(), Is.EqualTo(1), "Expected the old ECHI to have an entry in the archive when it was updated"); - var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); - Assert.Multiple(() => - { - Assert.That(archive.Exists(), Is.True, "Archive table should definitely be there, we created the trigger after all"); - Assert.That(archive.GetRowCount(), Is.EqualTo(1), "Expected the old ECHI to have an entry in the archive when it was updated"); + Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + }); - Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); - }); + var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); - var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); + var msg = source.GetUpdates().ToArray(); + Assert.That(msg, Is.Not.Null); - var msg = source.GetUpdates().ToArray(); - Assert.That(msg, Is.Not.Null); + Assert.Multiple(() => + { + Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("ECHI")); + Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("ECHI")); - Assert.Multiple(() => - { - Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("ECHI")); - Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("ECHI")); + Assert.That(msg[0].HaveValues.Single(), Is.EqualTo("0A0A0A0A0A")); + Assert.That(msg[0].Values.Single(), Is.EqualTo("0Z0Z0Z0Z0Z")); - Assert.That(msg[0].HaveValues.Single(), Is.EqualTo("0A0A0A0A0A")); - Assert.That(msg[0].Values.Single(), Is.EqualTo("0Z0Z0Z0Z0Z")); + Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); + }); + } - Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); - }); + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestMapperSource_UpdatedMapping_WithExplicitDifferentColumnName(DatabaseType dbType) + { + SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions); + + // Simulate a data load that changes the mapping of CHI 0101010101 from 0A0A0A0A0A to 0Z0Z0Z0Z0Z + using (var con = map.Database.Server.GetConnection()) + { + con.Open(); + Assert.That(map.GetCommand($"UPDATE {map.GetFullyQualifiedName()} SET ECHI = '0Z0Z0Z0Z0Z' WHERE CHI = '0101010101'", con).ExecuteNonQuery(), Is.EqualTo(1)); } - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestMapperSource_UpdatedMapping_WithExplicitDifferentColumnName(DatabaseType dbType) + var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); + Assert.Multiple(() => { - SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions); - - // Simulate a data load that changes the mapping of CHI 0101010101 from 0A0A0A0A0A to 0Z0Z0Z0Z0Z - using (var con = map.Database.Server.GetConnection()) - { - con.Open(); - Assert.That(map.GetCommand($"UPDATE {map.GetFullyQualifiedName()} SET ECHI = '0Z0Z0Z0Z0Z' WHERE CHI = '0101010101'", con).ExecuteNonQuery(), Is.EqualTo(1)); - } - - var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); - Assert.Multiple(() => - { - Assert.That(archive.Exists(), Is.True, "Archive table should definitely be there, we created the trigger after all"); - Assert.That(archive.GetRowCount(), Is.EqualTo(1), "Expected the old ECHI to have an entry in the archive when it was updated"); - - Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); - }); + Assert.That(archive.Exists(), Is.True, "Archive table should definitely be there, we created the trigger after all"); + Assert.That(archive.GetRowCount(), Is.EqualTo(1), "Expected the old ECHI to have an entry in the archive when it was updated"); - var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions - { - DateOfLastUpdate = new DateTime(2020, 01, 01), + Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + }); - // This is the thing we are testing - LiveDatabaseFieldName = "PatientID" - }); + var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions + { + DateOfLastUpdate = new DateTime(2020, 01, 01), - var msg = source.GetUpdates().ToArray(); - Assert.That(msg, Is.Not.Null); + // This is the thing we are testing + LiveDatabaseFieldName = "PatientID" + }); - Assert.Multiple(() => - { - Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("PatientID"), "Expected the column in the live database to be updated to be the explicit column name we provided on the command line"); - Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("PatientID"), "Expected the column in the live database to be updated to be the explicit column name we provided on the command line"); + var msg = source.GetUpdates().ToArray(); + Assert.That(msg, Is.Not.Null); - Assert.That(msg[0].HaveValues.Single(), Is.EqualTo("0A0A0A0A0A")); - Assert.That(msg[0].Values.Single(), Is.EqualTo("0Z0Z0Z0Z0Z")); + Assert.Multiple(() => + { + Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("PatientID"), "Expected the column in the live database to be updated to be the explicit column name we provided on the command line"); + Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("PatientID"), "Expected the column in the live database to be updated to be the explicit column name we provided on the command line"); - Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); - }); + Assert.That(msg[0].HaveValues.Single(), Is.EqualTo("0A0A0A0A0A")); + Assert.That(msg[0].Values.Single(), Is.EqualTo("0Z0Z0Z0Z0Z")); + + Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); + }); + } + + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestMapperSource_UpdatedMapping_Qualifier(DatabaseType dbType) + { + SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions); + + // Simulate a data load that changes the mapping of CHI 0101010101 from 0A0A0A0A0A to 0Z0Z0Z0Z0Z + using (var con = map.Database.Server.GetConnection()) + { + con.Open(); + Assert.That(map.GetCommand($"UPDATE {map.GetFullyQualifiedName()} SET ECHI = null WHERE CHI = '0101010101'", con).ExecuteNonQuery(), Is.EqualTo(1)); } + var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); + Assert.Multiple(() => + { + Assert.That(archive.Exists(), Is.True, "Archive table should definitely be there, we created the trigger after all"); + Assert.That(archive.GetRowCount(), Is.EqualTo(1), "Expected the old ECHI to have an entry in the archive when it was updated"); + + Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + }); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestMapperSource_UpdatedMapping_Qualifier(DatabaseType dbType) + var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { - SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions); - - // Simulate a data load that changes the mapping of CHI 0101010101 from 0A0A0A0A0A to 0Z0Z0Z0Z0Z - using (var con = map.Database.Server.GetConnection()) - { - con.Open(); - Assert.That(map.GetCommand($"UPDATE {map.GetFullyQualifiedName()} SET ECHI = null WHERE CHI = '0101010101'", con).ExecuteNonQuery(), Is.EqualTo(1)); - } - - var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); - Assert.Multiple(() => - { - Assert.That(archive.Exists(), Is.True, "Archive table should definitely be there, we created the trigger after all"); - Assert.That(archive.GetRowCount(), Is.EqualTo(1), "Expected the old ECHI to have an entry in the archive when it was updated"); - - Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); - }); + DateOfLastUpdate = new DateTime(2020, 01, 01), - var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions - { - DateOfLastUpdate = new DateTime(2020, 01, 01), + // This is the thing we are testing + Qualifier = '\'', + LiveDatabaseFieldName = "PatientID" + }); - // This is the thing we are testing - Qualifier = '\'', - LiveDatabaseFieldName = "PatientID" - }); + var msg = source.GetUpdates().ToArray(); + Assert.That(msg, Is.Not.Null); - var msg = source.GetUpdates().ToArray(); - Assert.That(msg, Is.Not.Null); + Assert.Multiple(() => + { + Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("PatientID"), "Expected the column in the live database to be updated to be the explicit column name we provided on the command line"); + Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("PatientID"), "Expected the column in the live database to be updated to be the explicit column name we provided on the command line"); - Assert.Multiple(() => - { - Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("PatientID"), "Expected the column in the live database to be updated to be the explicit column name we provided on the command line"); - Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("PatientID"), "Expected the column in the live database to be updated to be the explicit column name we provided on the command line"); + Assert.That(msg[0].HaveValues.Single(), Is.EqualTo("'0A0A0A0A0A'")); + Assert.That(msg[0].Values.Single(), Is.EqualTo("null")); - Assert.That(msg[0].HaveValues.Single(), Is.EqualTo("'0A0A0A0A0A'")); - Assert.That(msg[0].Values.Single(), Is.EqualTo("null")); + Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); + }); + } - Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); - }); - } + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestMapperSource_GuidMappingNowExists(DatabaseType dbType) + { + SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? guidTable, out IdentifierMapperOptions mapperOptions); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestMapperSource_GuidMappingNowExists(DatabaseType dbType) + // Simulate a data load that inserts the previously unknown value 0202020202 into the mapping as 0X0X0X0X0X + // The value 0202020202 is in the guid mapping table! so we would expect a global system update to be issued for the temporary guid mapping to the new legit mapping + map.Insert(new Dictionary { - SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? guidTable, out IdentifierMapperOptions mapperOptions); - - // Simulate a data load that inserts the previously unknown value 0202020202 into the mapping as 0X0X0X0X0X - // The value 0202020202 is in the guid mapping table! so we would expect a global system update to be issued for the temporary guid mapping to the new legit mapping - map.Insert(new Dictionary - { - {"CHI","0202020202" }, - {"ECHI","0X0X0X0X0X" }, - {SpecialFieldNames.ValidFrom,DateTime.Now }, - {SpecialFieldNames.DataLoadRunID,55}, - }); - - var oldTempGuid = guidTable!.GetDataTable().Rows[0][TableLookupWithGuidFallbackSwapper.GuidColumnName]; - Assert.Multiple(() => - { - Assert.That(oldTempGuid, Is.Not.Null); - - Assert.That(map.GetRowCount(), Is.EqualTo(2), "We should have a mapping table with 2 entries, the old existing one 0101010101 and a new one 0202020202"); + {"CHI","0202020202" }, + {"ECHI","0X0X0X0X0X" }, + {SpecialFieldNames.ValidFrom,DateTime.Now }, + {SpecialFieldNames.DataLoadRunID,55}, }); - var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); + var oldTempGuid = guidTable!.GetDataTable().Rows[0][TableLookupWithGuidFallbackSwapper.GuidColumnName]; + Assert.Multiple(() => + { + Assert.That(oldTempGuid, Is.Not.Null); - var msg = source.GetUpdates().ToArray(); - Assert.That(msg, Is.Not.Null); + Assert.That(map.GetRowCount(), Is.EqualTo(2), "We should have a mapping table with 2 entries, the old existing one 0101010101 and a new one 0202020202"); + }); - Assert.Multiple(() => - { - Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("ECHI")); - Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("ECHI")); + var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); - Assert.That(msg[0].HaveValues.Single(), Is.EqualTo(oldTempGuid), "Expected the temporary guid to be the thing we are searching for to replace"); - Assert.That(msg[0].Values.Single(), Is.EqualTo("0X0X0X0X0X"), "Expected the replacement value to be the new legit mapping"); + var msg = source.GetUpdates().ToArray(); + Assert.That(msg, Is.Not.Null); - Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); - }); + Assert.Multiple(() => + { + Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("ECHI")); + Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("ECHI")); + + Assert.That(msg[0].HaveValues.Single(), Is.EqualTo(oldTempGuid), "Expected the temporary guid to be the thing we are searching for to replace"); + Assert.That(msg[0].Values.Single(), Is.EqualTo("0X0X0X0X0X"), "Expected the replacement value to be the new legit mapping"); + + Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); + }); + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void Test_MapperSource_NoGuids(DatabaseType dbType) + { + SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions, false); + + // Simulate a data load that changes the mapping of CHI 0101010101 from 0A0A0A0A0A to 0Z0Z0Z0Z0Z + using (var con = map.Database.Server.GetConnection()) + { + con.Open(); + Assert.That(map.GetCommand($"UPDATE {map.GetFullyQualifiedName()} SET ECHI = '0Z0Z0Z0Z0Z' WHERE CHI = '0101010101'", con).ExecuteNonQuery(), Is.EqualTo(1)); } - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void Test_MapperSource_NoGuids(DatabaseType dbType) + var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); + Assert.Multiple(() => { - SetupMappers(dbType, out DiscoveredTable map, out DiscoveredTable? _, out IdentifierMapperOptions mapperOptions, false); - - // Simulate a data load that changes the mapping of CHI 0101010101 from 0A0A0A0A0A to 0Z0Z0Z0Z0Z - using (var con = map.Database.Server.GetConnection()) - { - con.Open(); - Assert.That(map.GetCommand($"UPDATE {map.GetFullyQualifiedName()} SET ECHI = '0Z0Z0Z0Z0Z' WHERE CHI = '0101010101'", con).ExecuteNonQuery(), Is.EqualTo(1)); - } - - var archive = map.Database.ExpectTable(map.GetRuntimeName() + "_Archive"); - Assert.Multiple(() => - { - Assert.That(archive.Exists(), Is.True, "Archive table should definitely be there, we created the trigger after all"); - Assert.That(archive.GetRowCount(), Is.EqualTo(1), "Expected the old ECHI to have an entry in the archive when it was updated"); - - Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); - }); + Assert.That(archive.Exists(), Is.True, "Archive table should definitely be there, we created the trigger after all"); + Assert.That(archive.GetRowCount(), Is.EqualTo(1), "Expected the old ECHI to have an entry in the archive when it was updated"); - var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); + Assert.That(map.GetRowCount(), Is.EqualTo(1), "We should have a mapping table with 1 entry"); + }); - var msg = source.GetUpdates().ToArray(); - Assert.That(msg, Is.Not.Null); + var source = new MapperSource(new GlobalOptions { IdentifierMapperOptions = mapperOptions, TriggerUpdatesOptions = new TriggerUpdatesOptions() }, new TriggerUpdatesFromMapperOptions { DateOfLastUpdate = new DateTime(2020, 01, 01) }); - Assert.Multiple(() => - { - Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("ECHI")); - Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("ECHI")); + var msg = source.GetUpdates().ToArray(); + Assert.That(msg, Is.Not.Null); - Assert.That(msg[0].HaveValues.Single(), Is.EqualTo("0A0A0A0A0A")); - Assert.That(msg[0].Values.Single(), Is.EqualTo("0Z0Z0Z0Z0Z")); + Assert.Multiple(() => + { + Assert.That(msg[0].WhereFields.Single(), Is.EqualTo("ECHI")); + Assert.That(msg[0].WriteIntoFields.Single(), Is.EqualTo("ECHI")); - Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); - }); - } + Assert.That(msg[0].HaveValues.Single(), Is.EqualTo("0A0A0A0A0A")); + Assert.That(msg[0].Values.Single(), Is.EqualTo("0Z0Z0Z0Z0Z")); + + Assert.That(msg, Has.Length.EqualTo(1), "We expected only one update"); + }); } } diff --git a/tests/SmiServices.IntegrationTests/Common/GlobalOptionsExtensions.cs b/tests/SmiServices.IntegrationTests/Common/GlobalOptionsExtensions.cs index d53a3ec93..1ad80d1ff 100644 --- a/tests/SmiServices.IntegrationTests/Common/GlobalOptionsExtensions.cs +++ b/tests/SmiServices.IntegrationTests/Common/GlobalOptionsExtensions.cs @@ -4,65 +4,64 @@ using SmiServices.Common.Options; using System.Data.Common; -namespace SmiServices.IntegrationTests.Common +namespace SmiServices.IntegrationTests.Common; + +public static class GlobalOptionsExtensions { - public static class GlobalOptionsExtensions + /// + /// Updates the to reference the provided arguments. Passing null for arguments results + /// in the associated settings being set to null. + /// + /// + /// + /// + /// + /// Connection string to RDMP catalogue database e.g. TEST_Catalogue + /// Connection string to RDMP data export database e.g. TEST_DataExport + public static void UseTestValues( + this GlobalOptions g, + IConnection? rabbit, + MongoClientSettings? mongo, + RequiresRelationalDb.ConStrs? relational, + DbConnectionStringBuilder? catalogueConnectionString, + DbConnectionStringBuilder? dataExportConnectionStringBuilder + ) { - /// - /// Updates the to reference the provided arguments. Passing null for arguments results - /// in the associated settings being set to null. - /// - /// - /// - /// - /// - /// Connection string to RDMP catalogue database e.g. TEST_Catalogue - /// Connection string to RDMP data export database e.g. TEST_DataExport - public static void UseTestValues( - this GlobalOptions g, - IConnection? rabbit, - MongoClientSettings? mongo, - RequiresRelationalDb.ConStrs? relational, - DbConnectionStringBuilder? catalogueConnectionString, - DbConnectionStringBuilder? dataExportConnectionStringBuilder - ) - { - //Rabbit - g.RabbitOptions!.RabbitMqHostName = rabbit?.Endpoint.HostName!; - g.RabbitOptions.RabbitMqHostPort = rabbit?.Endpoint.Port ?? -1; + //Rabbit + g.RabbitOptions!.RabbitMqHostName = rabbit?.Endpoint.HostName!; + g.RabbitOptions.RabbitMqHostPort = rabbit?.Endpoint.Port ?? -1; - //RDMP - g.RDMPOptions!.CatalogueConnectionString = catalogueConnectionString?.ConnectionString; - g.RDMPOptions.DataExportConnectionString = dataExportConnectionStringBuilder?.ConnectionString; + //RDMP + g.RDMPOptions!.CatalogueConnectionString = catalogueConnectionString?.ConnectionString; + g.RDMPOptions.DataExportConnectionString = dataExportConnectionStringBuilder?.ConnectionString; - //Mongo Db - g.MongoDatabases!.DicomStoreOptions!.HostName = mongo?.Server?.Host; - g.MongoDatabases.ExtractionStoreOptions!.HostName = mongo?.Server?.Host; + //Mongo Db + g.MongoDatabases!.DicomStoreOptions!.HostName = mongo?.Server?.Host; + g.MongoDatabases.ExtractionStoreOptions!.HostName = mongo?.Server?.Host; - g.MongoDatabases.DicomStoreOptions.Port = mongo?.Server?.Port ?? -1; - g.MongoDatabases.ExtractionStoreOptions.Port = mongo?.Server?.Port ?? -1; + g.MongoDatabases.DicomStoreOptions.Port = mongo?.Server?.Port ?? -1; + g.MongoDatabases.ExtractionStoreOptions.Port = mongo?.Server?.Port ?? -1; - g.MongoDatabases.DicomStoreOptions.UserName = mongo?.Credential?.Username; - g.MongoDatabases.ExtractionStoreOptions.UserName = mongo?.Credential?.Username; + g.MongoDatabases.DicomStoreOptions.UserName = mongo?.Credential?.Username; + g.MongoDatabases.ExtractionStoreOptions.UserName = mongo?.Credential?.Username; - //Relational Databases - var mappingDb = relational?.GetServer(DatabaseType.MicrosoftSQLServer)?.ExpectDatabase("TEST_MappingDatabase"); + //Relational Databases + var mappingDb = relational?.GetServer(DatabaseType.MicrosoftSQLServer)?.ExpectDatabase("TEST_MappingDatabase"); - g.IdentifierMapperOptions!.MappingConnectionString = mappingDb?.Server?.Builder?.ConnectionString; - g.IdentifierMapperOptions.MappingDatabaseType = mappingDb?.Server?.DatabaseType ?? DatabaseType.MicrosoftSQLServer; - g.IdentifierMapperOptions.MappingTableName = mappingDb?.ExpectTable("MappingTable").GetFullyQualifiedName(); + g.IdentifierMapperOptions!.MappingConnectionString = mappingDb?.Server?.Builder?.ConnectionString; + g.IdentifierMapperOptions.MappingDatabaseType = mappingDb?.Server?.DatabaseType ?? DatabaseType.MicrosoftSQLServer; + g.IdentifierMapperOptions.MappingTableName = mappingDb?.ExpectTable("MappingTable").GetFullyQualifiedName(); - g.DicomRelationalMapperOptions!.QoSPrefetchCount = 1; - g.CohortExtractorOptions!.QoSPrefetchCount = 1; - g.CohortPackagerOptions!.ExtractRequestInfoOptions!.QoSPrefetchCount = 1; - g.CohortPackagerOptions.FileCollectionInfoOptions!.QoSPrefetchCount = 1; - g.CohortPackagerOptions.NoVerifyStatusOptions!.QoSPrefetchCount = 1; - g.CohortPackagerOptions.VerificationStatusOptions!.QoSPrefetchCount = 1; - g.DicomTagReaderOptions!.QoSPrefetchCount = 1; - g.IdentifierMapperOptions.QoSPrefetchCount = 1; - g.MongoDbPopulatorOptions!.SeriesQueueConsumerOptions!.QoSPrefetchCount = 1; - g.MongoDbPopulatorOptions.ImageQueueConsumerOptions!.QoSPrefetchCount = 1; - } + g.DicomRelationalMapperOptions!.QoSPrefetchCount = 1; + g.CohortExtractorOptions!.QoSPrefetchCount = 1; + g.CohortPackagerOptions!.ExtractRequestInfoOptions!.QoSPrefetchCount = 1; + g.CohortPackagerOptions.FileCollectionInfoOptions!.QoSPrefetchCount = 1; + g.CohortPackagerOptions.NoVerifyStatusOptions!.QoSPrefetchCount = 1; + g.CohortPackagerOptions.VerificationStatusOptions!.QoSPrefetchCount = 1; + g.DicomTagReaderOptions!.QoSPrefetchCount = 1; + g.IdentifierMapperOptions.QoSPrefetchCount = 1; + g.MongoDbPopulatorOptions!.SeriesQueueConsumerOptions!.QoSPrefetchCount = 1; + g.MongoDbPopulatorOptions.ImageQueueConsumerOptions!.QoSPrefetchCount = 1; } } diff --git a/tests/SmiServices.IntegrationTests/Common/HeaderPreservationTest.cs b/tests/SmiServices.IntegrationTests/Common/HeaderPreservationTest.cs index 4961aa5da..292cee1a8 100644 --- a/tests/SmiServices.IntegrationTests/Common/HeaderPreservationTest.cs +++ b/tests/SmiServices.IntegrationTests/Common/HeaderPreservationTest.cs @@ -7,79 +7,77 @@ using SmiServices.UnitTests.TestCommon; using System; -namespace SmiServices.IntegrationTests.Common +namespace SmiServices.IntegrationTests.Common; + +[RequiresRabbit] +public class HeaderPreservationTest { - [RequiresRabbit] - public class HeaderPreservationTest + [OneTimeSetUp] + public void OneTimeSetUp() { - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + } - [Test] - public void SendHeader() - { - var o = new GlobalOptionsFactory().Load(nameof(SendHeader)); + [Test] + public void SendHeader() + { + var o = new GlobalOptionsFactory().Load(nameof(SendHeader)); - var consumerOptions = new ConsumerOptions - { - QueueName = "TEST.HeaderPreservationTest_Read1", - AutoAck = false, - QoSPrefetchCount = 1 - }; + var consumerOptions = new ConsumerOptions + { + QueueName = "TEST.HeaderPreservationTest_Read1", + AutoAck = false, + QoSPrefetchCount = 1 + }; - TestConsumer consumer; + TestConsumer consumer; - using var tester = new MicroserviceTester(o.RabbitOptions!, consumerOptions); + using var tester = new MicroserviceTester(o.RabbitOptions!, consumerOptions); - var header = new MessageHeader - { - MessageGuid = Guid.Parse("5afce68f-c270-4bf3-b327-756f6038bb76"), - Parents = [Guid.Parse("12345678-c270-4bf3-b327-756f6038bb76"), Guid.Parse("87654321-c270-4bf3-b327-756f6038bb76")], - }; + var header = new MessageHeader + { + MessageGuid = Guid.Parse("5afce68f-c270-4bf3-b327-756f6038bb76"), + Parents = [Guid.Parse("12345678-c270-4bf3-b327-756f6038bb76"), Guid.Parse("87654321-c270-4bf3-b327-756f6038bb76")], + }; - tester.SendMessage(consumerOptions, header, new TestMessage { Message = "hi" }); + tester.SendMessage(consumerOptions, header, new TestMessage { Message = "hi" }); - consumer = new TestConsumer(); - tester.Broker.StartConsumer(consumerOptions, consumer); + consumer = new TestConsumer(); + tester.Broker.StartConsumer(consumerOptions, consumer); - TestTimelineAwaiter.Await(() => consumer.Failed || consumer.Passed, "timed out", 5000); + TestTimelineAwaiter.Await(() => consumer.Failed || consumer.Passed, "timed out", 5000); - Assert.That(consumer.Passed, Is.True); - } + Assert.That(consumer.Passed, Is.True); + } - private class TestConsumer : Consumer - { - public bool Passed { get; private set; } - public bool Failed { get; private set; } + private class TestConsumer : Consumer + { + public bool Passed { get; private set; } + public bool Failed { get; private set; } - protected override void ProcessMessageImpl(IMessageHeader header, TestMessage message, ulong tag) + protected override void ProcessMessageImpl(IMessageHeader header, TestMessage message, ulong tag) + { + try { - try + Assert.Multiple(() => { - Assert.Multiple(() => - { - Assert.That(header.Parents[0].ToString(), Is.EqualTo("12345678-c270-4bf3-b327-756f6038bb76")); - Assert.That(header.Parents[1].ToString(), Is.EqualTo("87654321-c270-4bf3-b327-756f6038bb76")); - Assert.That(header.Parents[2].ToString(), Is.EqualTo("5afce68f-c270-4bf3-b327-756f6038bb76")); - }); - - Passed = true; - Ack(header, tag); - } - catch (Exception) - { - Failed = true; - } - } - } + Assert.That(header.Parents[0].ToString(), Is.EqualTo("12345678-c270-4bf3-b327-756f6038bb76")); + Assert.That(header.Parents[1].ToString(), Is.EqualTo("87654321-c270-4bf3-b327-756f6038bb76")); + Assert.That(header.Parents[2].ToString(), Is.EqualTo("5afce68f-c270-4bf3-b327-756f6038bb76")); + }); - private class TestMessage : IMessage - { - public string? Message { get; set; } + Passed = true; + Ack(header, tag); + } + catch (Exception) + { + Failed = true; + } } } + private class TestMessage : IMessage + { + public string? Message { get; set; } + } } diff --git a/tests/SmiServices.IntegrationTests/Common/Messaging/RabbitMQBrokerTests.cs b/tests/SmiServices.IntegrationTests/Common/Messaging/RabbitMQBrokerTests.cs index b1b2248c0..a44dab9de 100644 --- a/tests/SmiServices.IntegrationTests/Common/Messaging/RabbitMQBrokerTests.cs +++ b/tests/SmiServices.IntegrationTests/Common/Messaging/RabbitMQBrokerTests.cs @@ -10,611 +10,610 @@ using System.Collections.Generic; using System.Text; -namespace SmiServices.IntegrationTests.Common.Messaging +namespace SmiServices.IntegrationTests.Common.Messaging; + +[RequiresRabbit] +public class RabbitMQBrokerTests { - [RequiresRabbit] - public class RabbitMQBrokerTests + private static ConsumerOptions TestConsumerOptions() + => new() + { + QueueName = "TEST.TestQueue", + QoSPrefetchCount = 1, + AutoAck = false + }; + + private static GlobalOptions GlobalOptionsForTest() + => new GlobalOptionsFactory().Load(TestContext.CurrentContext.Test.Name); + + [TestCase(null)] + [TestCase(" ")] + public void SetupProducer_InvalidExchangeName_Throws(string? exchangeName) { - private static ConsumerOptions TestConsumerOptions() - => new() - { - QueueName = "TEST.TestQueue", - QoSPrefetchCount = 1, - AutoAck = false - }; - - private static GlobalOptions GlobalOptionsForTest() - => new GlobalOptionsFactory().Load(TestContext.CurrentContext.Test.Name); - - [TestCase(null)] - [TestCase(" ")] - public void SetupProducer_InvalidExchangeName_Throws(string? exchangeName) + // Arrange + + var producerOptions = new ProducerOptions { - // Arrange + ExchangeName = exchangeName + }; + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); - var producerOptions = new ProducerOptions - { - ExchangeName = exchangeName - }; - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); + // Act - // Act + void call() => tester.Broker.SetupProducer(producerOptions); - void call() => tester.Broker.SetupProducer(producerOptions); + // Assert - // Assert + var exc = Assert.Throws(call); + Assert.That(exc.Message, Is.EqualTo("The given producer options have invalid values")); + } - var exc = Assert.Throws(call); - Assert.That(exc.Message, Is.EqualTo("The given producer options have invalid values")); - } + [Test] + public void SetupProducer_MissingExchange_Throws() + { + // Arrange - [Test] - public void SetupProducer_MissingExchange_Throws() + var producerOptions = new ProducerOptions { - // Arrange + ExchangeName = "TEST.DoesNotExistExchange" + }; - var producerOptions = new ProducerOptions - { - ExchangeName = "TEST.DoesNotExistExchange" - }; + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); + // Act - // Act + void call() => tester.Broker.SetupProducer(producerOptions); - void call() => tester.Broker.SetupProducer(producerOptions); + // Assert - // Assert + var exc = Assert.Throws(call); + Assert.That(exc.Message, Is.EqualTo("Expected exchange \"TEST.DoesNotExistExchange\" to exist")); + } - var exc = Assert.Throws(call); - Assert.That(exc.Message, Is.EqualTo("Expected exchange \"TEST.DoesNotExistExchange\" to exist")); - } + [Test] + public void StartConsumer_SetsQoSPrefetchCount() + { + // Arrange - [Test] - public void StartConsumer_SetsQoSPrefetchCount() - { - // Arrange + var globalOptions = GlobalOptionsForTest(); + var consumerOptions = TestConsumerOptions(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); + var mockConsumer = new Mock>(MockBehavior.Strict); + mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + consumerOptions.QoSPrefetchCount = 123; - var globalOptions = GlobalOptionsForTest(); - var consumerOptions = TestConsumerOptions(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); - var mockConsumer = new Mock>(MockBehavior.Strict); - mockConsumer.SetupProperty(x => x.QoSPrefetchCount); - consumerOptions.QoSPrefetchCount = 123; + // Act - // Act + tester.Broker.StartConsumer(consumerOptions, mockConsumer.Object); - tester.Broker.StartConsumer(consumerOptions, mockConsumer.Object); + // Assert - // Assert + Assert.That(mockConsumer.Object.QoSPrefetchCount, Is.EqualTo(123)); + } - Assert.That(mockConsumer.Object.QoSPrefetchCount, Is.EqualTo(123)); - } + [Test] + public void StartConsumer_MissingQueue_Throws() + { + // Arrange - [Test] - public void StartConsumer_MissingQueue_Throws() - { - // Arrange + var consumerOptions = TestConsumerOptions(); + var queueName = $"TEST.WrongQueue{new Random().NextInt64()}"; + consumerOptions.QueueName = queueName; - var consumerOptions = TestConsumerOptions(); - var queueName = $"TEST.WrongQueue{new Random().NextInt64()}"; - consumerOptions.QueueName = queueName; + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); + var mockConsumer = new Mock>(MockBehavior.Strict); + mockConsumer.SetupProperty(x => x.QoSPrefetchCount); - var mockConsumer = new Mock>(MockBehavior.Strict); - mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + // Act - // Act + void call() => tester.Broker.StartConsumer(consumerOptions, mockConsumer.Object); - void call() => tester.Broker.StartConsumer(consumerOptions, mockConsumer.Object); + // Assert - // Assert + var exc = Assert.Throws(call); + Assert.That(exc.Message, Is.EqualTo($"Expected queue \"{queueName}\" to exist")); + } - var exc = Assert.Throws(call); - Assert.That(exc.Message, Is.EqualTo($"Expected queue \"{queueName}\" to exist")); - } + [Test] + public void Shutdown_NoTimeout_Throws() + { + // Arrange - [Test] - public void Shutdown_NoTimeout_Throws() - { - // Arrange + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + var mockConsumer = new Mock>(MockBehavior.Strict); + mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + broker.StartConsumer(TestConsumerOptions(), mockConsumer.Object); - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - var mockConsumer = new Mock>(MockBehavior.Strict); - mockConsumer.SetupProperty(x => x.QoSPrefetchCount); - broker.StartConsumer(TestConsumerOptions(), mockConsumer.Object); + // Act - // Act + void call() => broker.Shutdown(TimeSpan.Zero); - void call() => broker.Shutdown(TimeSpan.Zero); + // Assert - // Assert + var exc = Assert.Throws(call); + Assert.That(exc.Message, Is.EqualTo("Invalid timeout value")); + } - var exc = Assert.Throws(call); - Assert.That(exc.Message, Is.EqualTo("Invalid timeout value")); - } + [Test] + public void StartConsumer_AfterShutdown_Throws() + { + // Arrange - [Test] - public void StartConsumer_AfterShutdown_Throws() - { - // Arrange + var globalOptions = GlobalOptionsForTest(); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + var mockConsumer = new Mock>(MockBehavior.Strict); + mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + + // Act - var globalOptions = GlobalOptionsForTest(); - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - var mockConsumer = new Mock>(MockBehavior.Strict); - mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + broker.Shutdown(RabbitMQBroker.DefaultOperationTimeout); - // Act + // Assert - broker.Shutdown(RabbitMQBroker.DefaultOperationTimeout); + Assert.That(broker.ShutdownCalled, Is.EqualTo(true)); + var exc = Assert.Throws(() => broker.StartConsumer(TestConsumerOptions(), mockConsumer.Object)); + Assert.That(exc.Message, Is.EqualTo("Adapter has been shut down")); + } - // Assert + [Test] + public void StartConsumer_InvalidOptions_Throws() + { + // Arrange - Assert.That(broker.ShutdownCalled, Is.EqualTo(true)); - var exc = Assert.Throws(() => broker.StartConsumer(TestConsumerOptions(), mockConsumer.Object)); - Assert.That(exc.Message, Is.EqualTo("Adapter has been shut down")); - } + var globalOptions = GlobalOptionsForTest(); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + var mockConsumer = new Mock>(MockBehavior.Strict); - [Test] - public void StartConsumer_InvalidOptions_Throws() + var consumerOptions = new ConsumerOptions() { - // Arrange + QueueName = null, + }; - var globalOptions = GlobalOptionsForTest(); - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - var mockConsumer = new Mock>(MockBehavior.Strict); + // Act - var consumerOptions = new ConsumerOptions() - { - QueueName = null, - }; + void call() => broker.StartConsumer(consumerOptions, mockConsumer.Object); - // Act + // Assert - void call() => broker.StartConsumer(consumerOptions, mockConsumer.Object); + var exc = Assert.Throws(call); + Assert.That(exc.Message, Is.EqualTo("The given consumerOptions has invalid values")); + } - // Assert + [Test] + public void StartConsumer_SoloWithMultipleConsumers_Throws() + { + // Arrange - var exc = Assert.Throws(call); - Assert.That(exc.Message, Is.EqualTo("The given consumerOptions has invalid values")); - } + var globalOptions = GlobalOptionsForTest(); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + var mockConsumer = new Mock>(MockBehavior.Strict); + mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + var consumerOptions = TestConsumerOptions(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); - [Test] - public void StartConsumer_SoloWithMultipleConsumers_Throws() - { - // Arrange + broker.StartConsumer(consumerOptions, mockConsumer.Object); - var globalOptions = GlobalOptionsForTest(); - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - var mockConsumer = new Mock>(MockBehavior.Strict); - mockConsumer.SetupProperty(x => x.QoSPrefetchCount); - var consumerOptions = TestConsumerOptions(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); + // Act - broker.StartConsumer(consumerOptions, mockConsumer.Object); + void call() => broker.StartConsumer(consumerOptions, mockConsumer.Object, isSolo: true); - // Act + // Assert - void call() => broker.StartConsumer(consumerOptions, mockConsumer.Object, isSolo: true); + var exc = Assert.Throws(call); + Assert.That(exc.Message, Is.EqualTo("Already a consumer on queue TEST.TestQueue and solo consumer was specified")); + } - // Assert + [Test] + public void HandleMessage_MissingHeader_IsDiscarded() + { + // Arrange - var exc = Assert.Throws(call); - Assert.That(exc.Message, Is.EqualTo("Already a consumer on queue TEST.TestQueue and solo consumer was specified")); - } + var globalOptions = GlobalOptionsForTest(); + var consumerOptions = TestConsumerOptions(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); + using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); + model.ConfirmSelect(); + var properties = model.CreateBasicProperties(); - [Test] - public void HandleMessage_MissingHeader_IsDiscarded() - { - // Arrange + var mockConsumer = new Mock>(MockBehavior.Strict); + mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + var fatalCalled = false; + mockConsumer.Object.OnFatal += (sender, args) => fatalCalled = true; - var globalOptions = GlobalOptionsForTest(); - var consumerOptions = TestConsumerOptions(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); - using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); - model.ConfirmSelect(); - var properties = model.CreateBasicProperties(); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + broker.StartConsumer(consumerOptions, mockConsumer.Object); - var mockConsumer = new Mock>(MockBehavior.Strict); - mockConsumer.SetupProperty(x => x.QoSPrefetchCount); - var fatalCalled = false; - mockConsumer.Object.OnFatal += (sender, args) => fatalCalled = true; + // Act - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - broker.StartConsumer(consumerOptions, mockConsumer.Object); + model.BasicPublish("TEST.TestExchange", "", mandatory: true, properties, Array.Empty()); + model.WaitForConfirms(); + TestTimelineAwaiter.Await(() => model.MessageCount(consumerOptions.QueueName) == 0); - // Act + // Assert - model.BasicPublish("TEST.TestExchange", "", mandatory: true, properties, Array.Empty()); - model.WaitForConfirms(); - TestTimelineAwaiter.Await(() => model.MessageCount(consumerOptions.QueueName) == 0); + Assert.That(fatalCalled, Is.False); + } - // Assert + [Test] + public void HandleMessage_InvalidMessage_IsDiscarded() + { + // Arrange - Assert.That(fatalCalled, Is.False); - } + var globalOptions = GlobalOptionsForTest(); + var consumerOptions = TestConsumerOptions(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); + using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); + model.ConfirmSelect(); + var properties = model.CreateBasicProperties(); + properties.Headers = new Dictionary(); + var header = new MessageHeader(); + header.Populate(properties.Headers); - [Test] - public void HandleMessage_InvalidMessage_IsDiscarded() - { - // Arrange + var mockConsumer = new Mock>(MockBehavior.Strict); + mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + var fatalCalled = false; + mockConsumer.Object.OnFatal += (sender, args) => fatalCalled = true; - var globalOptions = GlobalOptionsForTest(); - var consumerOptions = TestConsumerOptions(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); - using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); - model.ConfirmSelect(); - var properties = model.CreateBasicProperties(); - properties.Headers = new Dictionary(); - var header = new MessageHeader(); - header.Populate(properties.Headers); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + broker.StartConsumer(consumerOptions, mockConsumer.Object); - var mockConsumer = new Mock>(MockBehavior.Strict); - mockConsumer.SetupProperty(x => x.QoSPrefetchCount); - var fatalCalled = false; - mockConsumer.Object.OnFatal += (sender, args) => fatalCalled = true; + // Act - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - broker.StartConsumer(consumerOptions, mockConsumer.Object); + model.BasicPublish("TEST.TestExchange", "", mandatory: true, properties, Encoding.UTF8.GetBytes("Hello")); + model.WaitForConfirms(); + TestTimelineAwaiter.Await(() => model.MessageCount(consumerOptions.QueueName) == 0); - // Act + // Assert - model.BasicPublish("TEST.TestExchange", "", mandatory: true, properties, Encoding.UTF8.GetBytes("Hello")); - model.WaitForConfirms(); - TestTimelineAwaiter.Await(() => model.MessageCount(consumerOptions.QueueName) == 0); + Assert.That(fatalCalled, Is.False); + } - // Assert + [Test] + public void HandleControlMessage_HappyPath_IsOk() + { + // Arrange - Assert.That(fatalCalled, Is.False); - } + var globalOptions = GlobalOptionsForTest(); - [Test] - public void HandleControlMessage_HappyPath_IsOk() + var mockControlConsumer = new Mock(MockBehavior.Strict); + var consumerOptions = new ConsumerOptions { - // Arrange + QueueName = $"Control.{TestContext.CurrentContext.Test.Name}", + AutoAck = true, + }; + mockControlConsumer.Setup(x => x.ControlConsumerOptions).Returns(consumerOptions); + var called = false; + mockControlConsumer.Setup(x => x.ProcessMessage("hello", "")).Callback(() => called = true); - var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); + using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); + model.ConfirmSelect(); + model.QueueDeclare(consumerOptions.QueueName, false, true, true, null); + model.QueueBind(consumerOptions.QueueName, globalOptions.RabbitOptions!.RabbitMqControlExchangeName, "", null); - var mockControlConsumer = new Mock(MockBehavior.Strict); - var consumerOptions = new ConsumerOptions - { - QueueName = $"Control.{TestContext.CurrentContext.Test.Name}", - AutoAck = true, - }; - mockControlConsumer.Setup(x => x.ControlConsumerOptions).Returns(consumerOptions); - var called = false; - mockControlConsumer.Setup(x => x.ProcessMessage("hello", "")).Callback(() => called = true); + var properties = model.CreateBasicProperties(); + var body = Encoding.UTF8.GetBytes("hello"); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); - using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); - model.ConfirmSelect(); - model.QueueDeclare(consumerOptions.QueueName, false, true, true, null); - model.QueueBind(consumerOptions.QueueName, globalOptions.RabbitOptions!.RabbitMqControlExchangeName, "", null); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + broker.StartControlConsumer(mockControlConsumer.Object); - var properties = model.CreateBasicProperties(); - var body = Encoding.UTF8.GetBytes("hello"); + // Act - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - broker.StartControlConsumer(mockControlConsumer.Object); + model.BasicPublish(globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, "", mandatory: true, properties, body); + model.WaitForConfirms(); - // Act + // Assert - model.BasicPublish(globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, "", mandatory: true, properties, body); - model.WaitForConfirms(); + TestTimelineAwaiter.Await(() => called == true); + } - // Assert + [Test] + public void HandleControlMessage_HappyPathWithValidContentEncoding_IsOk() + { + // Arrange - TestTimelineAwaiter.Await(() => called == true); - } + var globalOptions = GlobalOptionsForTest(); - [Test] - public void HandleControlMessage_HappyPathWithValidContentEncoding_IsOk() + var mockControlConsumer = new Mock(MockBehavior.Strict); + var consumerOptions = new ConsumerOptions { - // Arrange + QueueName = $"Control.{TestContext.CurrentContext.Test.Name}", + AutoAck = true, + }; + mockControlConsumer.Setup(x => x.ControlConsumerOptions).Returns(consumerOptions); + var called = false; + mockControlConsumer.Setup(x => x.ProcessMessage("hello", "")).Callback(() => called = true); - var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); + using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); + model.ConfirmSelect(); + model.QueueDeclare(consumerOptions.QueueName, false, true, true, null); + model.QueueBind(consumerOptions.QueueName, globalOptions.RabbitOptions!.RabbitMqControlExchangeName, "", null); - var mockControlConsumer = new Mock(MockBehavior.Strict); - var consumerOptions = new ConsumerOptions - { - QueueName = $"Control.{TestContext.CurrentContext.Test.Name}", - AutoAck = true, - }; - mockControlConsumer.Setup(x => x.ControlConsumerOptions).Returns(consumerOptions); - var called = false; - mockControlConsumer.Setup(x => x.ProcessMessage("hello", "")).Callback(() => called = true); + var properties = model.CreateBasicProperties(); + properties.ContentEncoding = "UTF-8"; + var body = Encoding.UTF8.GetBytes("hello"); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); - using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); - model.ConfirmSelect(); - model.QueueDeclare(consumerOptions.QueueName, false, true, true, null); - model.QueueBind(consumerOptions.QueueName, globalOptions.RabbitOptions!.RabbitMqControlExchangeName, "", null); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + broker.StartControlConsumer(mockControlConsumer.Object); - var properties = model.CreateBasicProperties(); - properties.ContentEncoding = "UTF-8"; - var body = Encoding.UTF8.GetBytes("hello"); + // Act - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - broker.StartControlConsumer(mockControlConsumer.Object); + model.BasicPublish(globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, "", mandatory: true, properties, body); + model.WaitForConfirms(); - // Act + // Assert - model.BasicPublish(globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, "", mandatory: true, properties, body); - model.WaitForConfirms(); + TestTimelineAwaiter.Await(() => called == true); + } - // Assert + [Test] + public void HandleControlMessage_HappyPathWithInalidContentEncoding_IsIgnored() + { + // Arrange - TestTimelineAwaiter.Await(() => called == true); - } + var globalOptions = GlobalOptionsForTest(); - [Test] - public void HandleControlMessage_HappyPathWithInalidContentEncoding_IsIgnored() + var mockControlConsumer = new Mock(MockBehavior.Strict); + var consumerOptions = new ConsumerOptions { - // Arrange + QueueName = $"Control.{TestContext.CurrentContext.Test.Name}", + AutoAck = true, + }; + mockControlConsumer.Setup(x => x.ControlConsumerOptions).Returns(consumerOptions); + var called = false; + mockControlConsumer.Setup(x => x.ProcessMessage("hello", "")).Callback(() => called = true); - var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); + using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); + model.ConfirmSelect(); + model.QueueDeclare(consumerOptions.QueueName, false, true, true, null); + model.QueueBind(consumerOptions.QueueName, globalOptions.RabbitOptions!.RabbitMqControlExchangeName, "", null); - var mockControlConsumer = new Mock(MockBehavior.Strict); - var consumerOptions = new ConsumerOptions - { - QueueName = $"Control.{TestContext.CurrentContext.Test.Name}", - AutoAck = true, - }; - mockControlConsumer.Setup(x => x.ControlConsumerOptions).Returns(consumerOptions); - var called = false; - mockControlConsumer.Setup(x => x.ProcessMessage("hello", "")).Callback(() => called = true); + var properties = model.CreateBasicProperties(); + properties.ContentEncoding = "invalid"; + var body = Encoding.UTF8.GetBytes("hello"); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); - using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); - model.ConfirmSelect(); - model.QueueDeclare(consumerOptions.QueueName, false, true, true, null); - model.QueueBind(consumerOptions.QueueName, globalOptions.RabbitOptions!.RabbitMqControlExchangeName, "", null); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + broker.StartControlConsumer(mockControlConsumer.Object); - var properties = model.CreateBasicProperties(); - properties.ContentEncoding = "invalid"; - var body = Encoding.UTF8.GetBytes("hello"); + // Act - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - broker.StartControlConsumer(mockControlConsumer.Object); + model.BasicPublish(globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, "", mandatory: true, properties, body); + model.WaitForConfirms(); - // Act + // Assert - model.BasicPublish(globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, "", mandatory: true, properties, body); - model.WaitForConfirms(); + TestTimelineAwaiter.Await(() => called == true); + } - // Assert + [Test] + public void HandleControlMessage_EmptyBody_IsDiscarded() + { + // Arrange - TestTimelineAwaiter.Await(() => called == true); - } + var globalOptions = GlobalOptionsForTest(); - [Test] - public void HandleControlMessage_EmptyBody_IsDiscarded() + var mockControlConsumer = new Mock(MockBehavior.Strict); + var consumerOptions = new ConsumerOptions { - // Arrange + QueueName = "Control.Test123" + }; + mockControlConsumer.Setup(x => x.ControlConsumerOptions).Returns(consumerOptions); - var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); + using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); + model.ConfirmSelect(); + model.QueueDeclare(consumerOptions.QueueName, false, true, true, null); + model.QueueBind(consumerOptions.QueueName, globalOptions.RabbitOptions!.RabbitMqControlExchangeName, "", null); + var properties = model.CreateBasicProperties(); - var mockControlConsumer = new Mock(MockBehavior.Strict); - var consumerOptions = new ConsumerOptions - { - QueueName = "Control.Test123" - }; - mockControlConsumer.Setup(x => x.ControlConsumerOptions).Returns(consumerOptions); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + broker.StartControlConsumer(mockControlConsumer.Object); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!); - using var model = tester.Broker.GetModel(TestContext.CurrentContext.Test.Name); - model.ConfirmSelect(); - model.QueueDeclare(consumerOptions.QueueName, false, true, true, null); - model.QueueBind(consumerOptions.QueueName, globalOptions.RabbitOptions!.RabbitMqControlExchangeName, "", null); - var properties = model.CreateBasicProperties(); + // Act - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - broker.StartControlConsumer(mockControlConsumer.Object); + model.BasicPublish(globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, "", mandatory: true, properties, Array.Empty()); + model.WaitForConfirms(); - // Act + // Assert - model.BasicPublish(globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, "", mandatory: true, properties, Array.Empty()); - model.WaitForConfirms(); + TestTimelineAwaiter.Await(() => model.MessageCount(mockControlConsumer.Object.ControlConsumerOptions.QueueName) == 0); + } - // Assert + [Test] + public void StartControlConsumer_HappyPath_IsOk() + { + // Arrange - TestTimelineAwaiter.Await(() => model.MessageCount(mockControlConsumer.Object.ControlConsumerOptions.QueueName) == 0); - } + var globalOptions = GlobalOptionsForTest(); - [Test] - public void StartControlConsumer_HappyPath_IsOk() - { - // Arrange + var controlConsumer = new ControlMessageConsumer(globalOptions.RabbitOptions!, TestContext.CurrentContext.Test.Name, 123, globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, (_) => { }); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - var globalOptions = GlobalOptionsForTest(); + // Act - var controlConsumer = new ControlMessageConsumer(globalOptions.RabbitOptions!, TestContext.CurrentContext.Test.Name, 123, globalOptions.RabbitOptions!.RabbitMqControlExchangeName!, (_) => { }); - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + void call() => broker.StartControlConsumer(controlConsumer); - // Act + // Assert - void call() => broker.StartControlConsumer(controlConsumer); + Assert.DoesNotThrow(call); + } - // Assert + [Test] + public void SetupProducer_AfterShutdown_Throws() + { + // Arrange - Assert.DoesNotThrow(call); - } + var globalOptions = GlobalOptionsForTest(); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); - [Test] - public void SetupProducer_AfterShutdown_Throws() - { - // Arrange + // Act - var globalOptions = GlobalOptionsForTest(); - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMQBrokerTests"); + broker.Shutdown(RabbitMQBroker.DefaultOperationTimeout); - // Act + // Assert - broker.Shutdown(RabbitMQBroker.DefaultOperationTimeout); + Assert.That(broker.ShutdownCalled, Is.EqualTo(true)); + var exc = Assert.Throws(() => broker.SetupProducer(new ProducerOptions())); + Assert.That(exc.Message, Is.EqualTo("Adapter has been shut down")); + } - // Assert + [Test] + public void StopConsumer_CalledTwice_Throws() + { + // Arrange - Assert.That(broker.ShutdownCalled, Is.EqualTo(true)); - var exc = Assert.Throws(() => broker.SetupProducer(new ProducerOptions())); - Assert.That(exc.Message, Is.EqualTo("Adapter has been shut down")); - } + var consumerOptions = TestConsumerOptions(); + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); + var mockConsumer = new Mock>(MockBehavior.Strict); + mockConsumer.SetupProperty(x => x.QoSPrefetchCount); + var consumerId = tester.Broker.StartConsumer(consumerOptions, mockConsumer.Object); - [Test] - public void StopConsumer_CalledTwice_Throws() - { - // Arrange + // Act - var consumerOptions = TestConsumerOptions(); - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, consumerOptions); - var mockConsumer = new Mock>(MockBehavior.Strict); - mockConsumer.SetupProperty(x => x.QoSPrefetchCount); - var consumerId = tester.Broker.StartConsumer(consumerOptions, mockConsumer.Object); + tester.Broker.StopConsumer(consumerId, RabbitMQBroker.DefaultOperationTimeout); - // Act + // Assert - tester.Broker.StopConsumer(consumerId, RabbitMQBroker.DefaultOperationTimeout); + var exc = Assert.Throws(() => tester.Broker.StopConsumer(consumerId, RabbitMQBroker.DefaultOperationTimeout)); + Assert.That(exc.Message, Is.EqualTo("Guid was not found in the task register")); + } - // Assert + [Test] + public void WaitForConfirms_Repeated_IsOk() + { + // Arrange - var exc = Assert.Throws(() => tester.Broker.StopConsumer(consumerId, RabbitMQBroker.DefaultOperationTimeout)); - Assert.That(exc.Message, Is.EqualTo("Guid was not found in the task register")); - } + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); - [Test] - public void WaitForConfirms_Repeated_IsOk() + var producerOptions = new ProducerOptions { - // Arrange + ExchangeName = "TEST.TestExchange" + }; + var producerModel = tester.Broker.SetupProducer(producerOptions, true); - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); + producerModel.SendMessage(new TestMessage(), isInResponseTo: null, routingKey: null); - var producerOptions = new ProducerOptions - { - ExchangeName = "TEST.TestExchange" - }; - var producerModel = tester.Broker.SetupProducer(producerOptions, true); + // Act - producerModel.SendMessage(new TestMessage(), isInResponseTo: null, routingKey: null); + void call() + { + producerModel.WaitForConfirms(); + producerModel.WaitForConfirms(); + } - // Act + // Assert - void call() - { - producerModel.WaitForConfirms(); - producerModel.WaitForConfirms(); - } + Assert.DoesNotThrow(call); + } - // Assert + [Test] + public void SetupProducer_NullBackoffProvider_DoesNotThrow() + { + // Arrange - Assert.DoesNotThrow(call); - } + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); + var exchangeName = $"TEST.{nameof(SetupProducer_NullBackoffProvider_DoesNotThrow)}Exchange"; + tester.CreateExchange(exchangeName); - [Test] - public void SetupProducer_NullBackoffProvider_DoesNotThrow() + var producerOptions = new ProducerOptions { - // Arrange + ExchangeName = exchangeName, + BackoffProviderType = null, + }; - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); - var exchangeName = $"TEST.{nameof(SetupProducer_NullBackoffProvider_DoesNotThrow)}Exchange"; - tester.CreateExchange(exchangeName); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMqAdapterTests"); - var producerOptions = new ProducerOptions - { - ExchangeName = exchangeName, - BackoffProviderType = null, - }; + // Act - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMqAdapterTests"); + void call() => broker.SetupProducer(producerOptions); - // Act + // Assert - void call() => broker.SetupProducer(producerOptions); + Assert.DoesNotThrow(call); + } - // Assert + [Test] + public void SetupProducer_InvalidBackoffProvider_Throws() + { + // Arrange - Assert.DoesNotThrow(call); - } + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); + var exchangeName = $"TEST.{nameof(SetupProducer_InvalidBackoffProvider_Throws)}Exchange"; + tester.CreateExchange(exchangeName); - [Test] - public void SetupProducer_InvalidBackoffProvider_Throws() + var producerOptions = new ProducerOptions { - // Arrange + ExchangeName = exchangeName, + BackoffProviderType = "Foo", + }; - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); - var exchangeName = $"TEST.{nameof(SetupProducer_InvalidBackoffProvider_Throws)}Exchange"; - tester.CreateExchange(exchangeName); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMqAdapterTests"); - var producerOptions = new ProducerOptions - { - ExchangeName = exchangeName, - BackoffProviderType = "Foo", - }; + // Act - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMqAdapterTests"); + void call() => broker.SetupProducer(producerOptions); - // Act + // Assert - void call() => broker.SetupProducer(producerOptions); + var exc = Assert.Throws(call); + Assert.That(exc.Message, Is.EqualTo("Could not parse 'Foo' to a valid BackoffProviderType")); + } - // Assert + [Test] + public void SetupProducer_ValidBackoffProvider_IsOk() + { + // Arrange - var exc = Assert.Throws(call); - Assert.That(exc.Message, Is.EqualTo("Could not parse 'Foo' to a valid BackoffProviderType")); - } + var globalOptions = GlobalOptionsForTest(); + using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); + var exchangeName = $"TEST.{nameof(SetupProducer_InvalidBackoffProvider_Throws)}Exchange"; + tester.CreateExchange(exchangeName); - [Test] - public void SetupProducer_ValidBackoffProvider_IsOk() + var producerOptions = new ProducerOptions { - // Arrange - - var globalOptions = GlobalOptionsForTest(); - using var tester = new MicroserviceTester(globalOptions.RabbitOptions!, TestConsumerOptions()); - var exchangeName = $"TEST.{nameof(SetupProducer_InvalidBackoffProvider_Throws)}Exchange"; - tester.CreateExchange(exchangeName); - - var producerOptions = new ProducerOptions - { - ExchangeName = exchangeName, - BackoffProviderType = "StaticBackoffProvider", - }; + ExchangeName = exchangeName, + BackoffProviderType = "StaticBackoffProvider", + }; - var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMqAdapterTests"); + var broker = new RabbitMQBroker(globalOptions.RabbitOptions!, "RabbitMqAdapterTests"); - // Act + // Act - void call() => broker.SetupProducer(producerOptions); + void call() => broker.SetupProducer(producerOptions); - // Assert - - Assert.DoesNotThrow(call); - } + // Assert - [Test] - public void Constructor_InvalidHostName_Throws() - { - // Arrange + Assert.DoesNotThrow(call); + } - var globalOptions = GlobalOptionsForTest(); + [Test] + public void Constructor_InvalidHostName_Throws() + { + // Arrange - // Act + var globalOptions = GlobalOptionsForTest(); - void call() => _ = new RabbitMQBroker(globalOptions.RabbitOptions!, " "); + // Act - // Assert + void call() => _ = new RabbitMQBroker(globalOptions.RabbitOptions!, " "); - var exc = Assert.Throws(call); - Assert.That(exc.Message, Is.EqualTo("RabbitMQ host ID required (Parameter 'hostId')")); - } + // Assert - private class TestMessage : IMessage { } + var exc = Assert.Throws(call); + Assert.That(exc.Message, Is.EqualTo("RabbitMQ host ID required (Parameter 'hostId')")); } + + private class TestMessage : IMessage { } } diff --git a/tests/SmiServices.IntegrationTests/Common/MongoDB/MongoQueryParserTests.cs b/tests/SmiServices.IntegrationTests/Common/MongoDB/MongoQueryParserTests.cs index bba6798e5..24259940b 100644 --- a/tests/SmiServices.IntegrationTests/Common/MongoDB/MongoQueryParserTests.cs +++ b/tests/SmiServices.IntegrationTests/Common/MongoDB/MongoQueryParserTests.cs @@ -8,76 +8,75 @@ using SmiServices.UnitTests.Common; using System.Threading.Tasks; -namespace SmiServices.IntegrationTests.Common.MongoDB +namespace SmiServices.IntegrationTests.Common.MongoDB; + +[TestFixture, RequiresMongoDb] +public class MongoQueryParserTests { - [TestFixture, RequiresMongoDb] - public class MongoQueryParserTests - { - private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); + private readonly ILogger _logger = LogManager.GetCurrentClassLogger(); - private MongoDbOptions _mongoOptions = null!; + private MongoDbOptions _mongoOptions = null!; - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { + [OneTimeSetUp] + public void OneTimeSetUp() + { - GlobalOptions globalOptions = new GlobalOptionsFactory().Load(nameof(MongoQueryParserTests)); - _mongoOptions = globalOptions.MongoDatabases!.DicomStoreOptions!; - } + GlobalOptions globalOptions = new GlobalOptionsFactory().Load(nameof(MongoQueryParserTests)); + _mongoOptions = globalOptions.MongoDatabases!.DicomStoreOptions!; + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - private const string QUERY_BASIC = "{\"find\":{\"SeriesDate\":{\"$regex\":\"^2007\"}}}"; - private const string QUERY_ADVANCED = "{\"find\":{\"SeriesDate\":\"20170201\"},\"sort\":{\"_id\":-1},\"skip\":123,\"limit\":456}"; + private const string QUERY_BASIC = "{\"find\":{\"SeriesDate\":{\"$regex\":\"^2007\"}}}"; + private const string QUERY_ADVANCED = "{\"find\":{\"SeriesDate\":\"20170201\"},\"sort\":{\"_id\":-1},\"skip\":123,\"limit\":456}"; - [Test] - [TestCase(QUERY_BASIC, null, null)] - [TestCase(QUERY_ADVANCED, 123, 456)] - public void TestParseQuery(string jsonQuery, int? expectedSkip, int? expectedLimit) - { - MongoClient mongoClient = MongoClientHelpers.GetMongoClient(_mongoOptions, "MongoQueryParserTests"); - - IMongoDatabase database = mongoClient.GetDatabase("test"); - IMongoCollection coll = database.GetCollection("test"); + [Test] + [TestCase(QUERY_BASIC, null, null)] + [TestCase(QUERY_ADVANCED, 123, 456)] + public void TestParseQuery(string jsonQuery, int? expectedSkip, int? expectedLimit) + { + MongoClient mongoClient = MongoClientHelpers.GetMongoClient(_mongoOptions, "MongoQueryParserTests"); - var findOptions = new FindOptions { BatchSize = 1 }; + IMongoDatabase database = mongoClient.GetDatabase("test"); + IMongoCollection coll = database.GetCollection("test"); - Task> t = MongoQueryParser.GetCursor(coll, findOptions, jsonQuery); + var findOptions = new FindOptions { BatchSize = 1 }; - t.Wait(1_000); - Assert.Multiple(() => - { - Assert.That(t.IsCompleted, Is.True); - Assert.That(t.IsFaulted, Is.False); - }); + Task> t = MongoQueryParser.GetCursor(coll, findOptions, jsonQuery); - using IAsyncCursor _ = t.Result; - _logger.Info("Received new batch"); + t.Wait(1_000); + Assert.Multiple(() => + { + Assert.That(t.IsCompleted, Is.True); + Assert.That(t.IsFaulted, Is.False); + }); - Assert.Multiple(() => - { - Assert.That(findOptions.Skip, Is.EqualTo(expectedSkip)); - Assert.That(findOptions.Limit, Is.EqualTo(expectedLimit)); - }); - } + using IAsyncCursor _ = t.Result; + _logger.Info("Received new batch"); - #endregion + Assert.Multiple(() => + { + Assert.That(findOptions.Skip, Is.EqualTo(expectedSkip)); + Assert.That(findOptions.Limit, Is.EqualTo(expectedLimit)); + }); } + + #endregion } diff --git a/tests/SmiServices.IntegrationTests/Common/OptionsTests.cs b/tests/SmiServices.IntegrationTests/Common/OptionsTests.cs index 3960aa574..51ac1dda2 100644 --- a/tests/SmiServices.IntegrationTests/Common/OptionsTests.cs +++ b/tests/SmiServices.IntegrationTests/Common/OptionsTests.cs @@ -3,106 +3,105 @@ using System; using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.IntegrationTests.Common +namespace SmiServices.IntegrationTests.Common; + +//TODO: Rework these tests. We should assert that every option in GlobalOptions has an entry in default.yaml. Non-required options should be present with a comment +[TestFixture] +public class OptionsTests { - //TODO: Rework these tests. We should assert that every option in GlobalOptions has an entry in default.yaml. Non-required options should be present with a comment - [TestFixture] - public class OptionsTests + [TestCase] + public void GlobalOptions_Test() { - [TestCase] - public void GlobalOptions_Test() + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(GlobalOptions_Test)); + Assert.Multiple(() => { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(GlobalOptions_Test)); - Assert.Multiple(() => - { - Assert.That(string.IsNullOrWhiteSpace(globals.RabbitOptions!.RabbitMqHostName), Is.False); - Assert.That(string.IsNullOrWhiteSpace(globals.FileSystemOptions!.FileSystemRoot), Is.False); - Assert.That(string.IsNullOrWhiteSpace(globals.RDMPOptions!.CatalogueConnectionString), Is.False); - Assert.That(string.IsNullOrWhiteSpace(globals.RDMPOptions.DataExportConnectionString), Is.False); - }); - } + Assert.That(string.IsNullOrWhiteSpace(globals.RabbitOptions!.RabbitMqHostName), Is.False); + Assert.That(string.IsNullOrWhiteSpace(globals.FileSystemOptions!.FileSystemRoot), Is.False); + Assert.That(string.IsNullOrWhiteSpace(globals.RDMPOptions!.CatalogueConnectionString), Is.False); + Assert.That(string.IsNullOrWhiteSpace(globals.RDMPOptions.DataExportConnectionString), Is.False); + }); + } - [Test] - public void TestVerifyPopulatedChecks() - { - var producerOptions = new ProducerOptions(); + [Test] + public void TestVerifyPopulatedChecks() + { + var producerOptions = new ProducerOptions(); - Assert.That(producerOptions.VerifyPopulated(), Is.False); + Assert.That(producerOptions.VerifyPopulated(), Is.False); - producerOptions.ExchangeName = ""; - Assert.That(producerOptions.VerifyPopulated(), Is.False); + producerOptions.ExchangeName = ""; + Assert.That(producerOptions.VerifyPopulated(), Is.False); - producerOptions.ExchangeName = "Test.ExchangeName"; - Assert.That(producerOptions.VerifyPopulated(), Is.True); + producerOptions.ExchangeName = "Test.ExchangeName"; + Assert.That(producerOptions.VerifyPopulated(), Is.True); - var consumerOptions = new ConsumerOptions(); + var consumerOptions = new ConsumerOptions(); - Assert.That(consumerOptions.VerifyPopulated(), Is.False); + Assert.That(consumerOptions.VerifyPopulated(), Is.False); - consumerOptions.QueueName = "Test.QueueName"; - Assert.That(consumerOptions.VerifyPopulated(), Is.True); - } + consumerOptions.QueueName = "Test.QueueName"; + Assert.That(consumerOptions.VerifyPopulated(), Is.True); + } - [Test] - public void Test_GlobalOptionsUseTestValues_Nulls() - { - GlobalOptions g = new GlobalOptionsFactory().Load(nameof(Test_GlobalOptionsUseTestValues_Nulls)); + [Test] + public void Test_GlobalOptionsUseTestValues_Nulls() + { + GlobalOptions g = new GlobalOptionsFactory().Load(nameof(Test_GlobalOptionsUseTestValues_Nulls)); - Assert.That(g.RabbitOptions!.RabbitMqHostName, Is.Not.Null); - g.UseTestValues(null, null, null, null, null); - Assert.That(g.RabbitOptions.RabbitMqHostName, Is.Null); - } + Assert.That(g.RabbitOptions!.RabbitMqHostName, Is.Not.Null); + g.UseTestValues(null, null, null, null, null); + Assert.That(g.RabbitOptions.RabbitMqHostName, Is.Null); + } - [Test] - public void Test_GlobalOptions_FileReadOption_ThrowsException() - { - GlobalOptions g = new GlobalOptionsFactory().Load(nameof(Test_GlobalOptions_FileReadOption_ThrowsException)); - g.DicomTagReaderOptions!.FileReadOption = "SkipLargeTags"; + [Test] + public void Test_GlobalOptions_FileReadOption_ThrowsException() + { + GlobalOptions g = new GlobalOptionsFactory().Load(nameof(Test_GlobalOptions_FileReadOption_ThrowsException)); + g.DicomTagReaderOptions!.FileReadOption = "SkipLargeTags"; - Assert.Throws(() => g.DicomTagReaderOptions.GetReadOption()); - } + Assert.Throws(() => g.DicomTagReaderOptions.GetReadOption()); + } - private class TestDecorator : OptionsDecorator + private class TestDecorator : OptionsDecorator + { + public override GlobalOptions Decorate(GlobalOptions options) { - public override GlobalOptions Decorate(GlobalOptions options) - { - ForAll(options, (o) => new MongoDbOptions { DatabaseName = "FFFFF" }); - return options; - } + ForAll(options, (o) => new MongoDbOptions { DatabaseName = "FFFFF" }); + return options; } + } - [Test] - public void TestDecorators() + [Test] + public void TestDecorators() + { + var factory = new GlobalOptionsFactory([new TestDecorator()]); + var g = factory.Load(nameof(TestDecorators)); + Assert.Multiple(() => { - var factory = new GlobalOptionsFactory([new TestDecorator()]); - var g = factory.Load(nameof(TestDecorators)); - Assert.Multiple(() => - { - Assert.That(g.MongoDatabases!.DicomStoreOptions!.DatabaseName, Is.EqualTo("FFFFF")); - Assert.That(g.MongoDatabases.ExtractionStoreOptions!.DatabaseName, Is.EqualTo("FFFFF")); - }); - } + Assert.That(g.MongoDatabases!.DicomStoreOptions!.DatabaseName, Is.EqualTo("FFFFF")); + Assert.That(g.MongoDatabases.ExtractionStoreOptions!.DatabaseName, Is.EqualTo("FFFFF")); + }); + } - [Test] - public void GlobalOptionsFactory_Load_EmptyFile_ThrowsWithUsefulMessage() - { - var fileSystem = new MockFileSystem(); - fileSystem.File.Create("foo.yaml"); - var globalOptionsFactory = new GlobalOptionsFactory(); + [Test] + public void GlobalOptionsFactory_Load_EmptyFile_ThrowsWithUsefulMessage() + { + var fileSystem = new MockFileSystem(); + fileSystem.File.Create("foo.yaml"); + var globalOptionsFactory = new GlobalOptionsFactory(); - var exc = Assert.Throws(() => globalOptionsFactory.Load(nameof(GlobalOptionsFactory_Load_EmptyFile_ThrowsWithUsefulMessage), "foo.yaml", fileSystem)); - Assert.That(exc?.Message, Is.EqualTo("Did not deserialize a GlobalOptions object from the provided YAML file. Does it contain at least one valid key?")); - } + var exc = Assert.Throws(() => globalOptionsFactory.Load(nameof(GlobalOptionsFactory_Load_EmptyFile_ThrowsWithUsefulMessage), "foo.yaml", fileSystem)); + Assert.That(exc?.Message, Is.EqualTo("Did not deserialize a GlobalOptions object from the provided YAML file. Does it contain at least one valid key?")); + } - [Test] - public void GlobalOptionsFactory_Load_MissingFile_ThrowsWithUsefulMessage() - { - var fileSystem = new MockFileSystem(); - var globalOptionsFactory = new GlobalOptionsFactory(); + [Test] + public void GlobalOptionsFactory_Load_MissingFile_ThrowsWithUsefulMessage() + { + var fileSystem = new MockFileSystem(); + var globalOptionsFactory = new GlobalOptionsFactory(); - var exc = Assert.Throws(() => globalOptionsFactory.Load(nameof(GlobalOptionsFactory_Load_EmptyFile_ThrowsWithUsefulMessage), "foo.yaml", fileSystem)); - Assert.That(exc?.Message, Is.EqualTo("Could not find config file 'foo.yaml'")); - } + var exc = Assert.Throws(() => globalOptionsFactory.Load(nameof(GlobalOptionsFactory_Load_EmptyFile_ThrowsWithUsefulMessage), "foo.yaml", fileSystem)); + Assert.That(exc?.Message, Is.EqualTo("Could not find config file 'foo.yaml'")); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/BlacklistRejectorTests.cs b/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/BlacklistRejectorTests.cs index a5f59e2e4..fc6220b78 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/BlacklistRejectorTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/BlacklistRejectorTests.cs @@ -4,125 +4,124 @@ using System.Data; using Tests.Common; -namespace SmiServices.IntegrationTests.Microservices.CohortExtractor +namespace SmiServices.IntegrationTests.Microservices.CohortExtractor; + +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +[RequiresRelationalDb(DatabaseType.MySql)] +[RequiresRelationalDb(DatabaseType.PostgreSql)] +class BlacklistRejectorTests : DatabaseTests { - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - [RequiresRelationalDb(DatabaseType.MySql)] - [RequiresRelationalDb(DatabaseType.PostgreSql)] - class BlacklistRejectorTests : DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.PostgreSql)] + public void TestBlacklistOn_Study(DatabaseType dbType) { - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.PostgreSql)] - public void TestBlacklistOn_Study(DatabaseType dbType) + var db = GetCleanedServer(dbType); + + using var dt = new DataTable(); + dt.Columns.Add("StudyInstanceUID"); + dt.Rows.Add("fff"); + + var tbl = db.CreateTable("SomeTbl", dt); + + var cata = Import(tbl); + + var rejector = new BlacklistRejector(cata); + Assert.Multiple(() => { - var db = GetCleanedServer(dbType); + Assert.That(rejector.DoLookup("fff", "aaa", "bbb"), Is.True); + Assert.That(rejector.DoLookup("aaa", "fff", "bbb"), Is.False); + Assert.That(rejector.DoLookup("aaa", "bbb", "fff"), Is.False); + }); + } - using var dt = new DataTable(); - dt.Columns.Add("StudyInstanceUID"); - dt.Rows.Add("fff"); - var tbl = db.CreateTable("SomeTbl", dt); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.PostgreSql)] + public void TestBlacklistOn_Series(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - var cata = Import(tbl); + using var dt = new DataTable(); + dt.Columns.Add("SeriesInstanceUID"); + dt.Rows.Add("fff"); - var rejector = new BlacklistRejector(cata); - Assert.Multiple(() => - { - Assert.That(rejector.DoLookup("fff", "aaa", "bbb"), Is.True); - Assert.That(rejector.DoLookup("aaa", "fff", "bbb"), Is.False); - Assert.That(rejector.DoLookup("aaa", "bbb", "fff"), Is.False); - }); - } + var tbl = db.CreateTable("SomeTbl", dt); + var cata = Import(tbl); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.PostgreSql)] - public void TestBlacklistOn_Series(DatabaseType dbType) + var rejector = new BlacklistRejector(cata); + Assert.Multiple(() => { - var db = GetCleanedServer(dbType); + Assert.That(rejector.DoLookup("fff", "aaa", "bbb"), Is.False); + Assert.That(rejector.DoLookup("aaa", "fff", "bbb"), Is.True); + Assert.That(rejector.DoLookup("aaa", "bbb", "fff"), Is.False); + }); + } - using var dt = new DataTable(); - dt.Columns.Add("SeriesInstanceUID"); - dt.Rows.Add("fff"); - var tbl = db.CreateTable("SomeTbl", dt); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.PostgreSql)] + public void TestBlacklistOn_SOPInstanceUID(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - var cata = Import(tbl); + using var dt = new DataTable(); + dt.Columns.Add("SOPInstanceUID"); + dt.Rows.Add("fff"); - var rejector = new BlacklistRejector(cata); - Assert.Multiple(() => - { - Assert.That(rejector.DoLookup("fff", "aaa", "bbb"), Is.False); - Assert.That(rejector.DoLookup("aaa", "fff", "bbb"), Is.True); - Assert.That(rejector.DoLookup("aaa", "bbb", "fff"), Is.False); - }); - } + var tbl = db.CreateTable("SomeTbl", dt); + var cata = Import(tbl); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.PostgreSql)] - public void TestBlacklistOn_SOPInstanceUID(DatabaseType dbType) + var rejector = new BlacklistRejector(cata); + Assert.Multiple(() => { - var db = GetCleanedServer(dbType); + Assert.That(rejector.DoLookup("fff", "aaa", "bbb"), Is.False); + Assert.That(rejector.DoLookup("aaa", "fff", "bbb"), Is.False); + Assert.That(rejector.DoLookup("aaa", "bbb", "fff"), Is.True); + }); + } + - using var dt = new DataTable(); - dt.Columns.Add("SOPInstanceUID"); - dt.Rows.Add("fff"); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.PostgreSql)] + public void TestBlacklistOn_AllThree(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - var tbl = db.CreateTable("SomeTbl", dt); + using var dt = new DataTable(); + dt.Columns.Add("StudyInstanceUID"); + dt.Columns.Add("SeriesInstanceUID"); + dt.Columns.Add("SOPInstanceUID"); + dt.Columns.Add("SomeOtherCol"); - var cata = Import(tbl); + dt.Rows.Add("aaa", "bbb", "ccc", "ffff"); - var rejector = new BlacklistRejector(cata); - Assert.Multiple(() => - { - Assert.That(rejector.DoLookup("fff", "aaa", "bbb"), Is.False); - Assert.That(rejector.DoLookup("aaa", "fff", "bbb"), Is.False); - Assert.That(rejector.DoLookup("aaa", "bbb", "fff"), Is.True); - }); - } + var tbl = db.CreateTable("SomeTbl", dt); + var cata = Import(tbl); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.PostgreSql)] - public void TestBlacklistOn_AllThree(DatabaseType dbType) + var rejector = new BlacklistRejector(cata); + Assert.Multiple(() => { - var db = GetCleanedServer(dbType); - - using var dt = new DataTable(); - dt.Columns.Add("StudyInstanceUID"); - dt.Columns.Add("SeriesInstanceUID"); - dt.Columns.Add("SOPInstanceUID"); - dt.Columns.Add("SomeOtherCol"); - - dt.Rows.Add("aaa", "bbb", "ccc", "ffff"); - - var tbl = db.CreateTable("SomeTbl", dt); - - var cata = Import(tbl); - - var rejector = new BlacklistRejector(cata); - Assert.Multiple(() => - { - Assert.That(rejector.DoLookup("aaa", "bbb", "ccc"), Is.True); - Assert.That(rejector.DoLookup("---", "bbb", "ccc"), Is.True); - Assert.That(rejector.DoLookup("aaa", "bbb", "---"), Is.True); - Assert.That(rejector.DoLookup("---", "bbb", "---"), Is.True); - Assert.That(rejector.DoLookup("---", "---", "ccc"), Is.True); - Assert.That(rejector.DoLookup("aaa", "---", "---"), Is.True); - - Assert.That(rejector.DoLookup("---", "---", "---"), Is.False); - Assert.That(rejector.DoLookup("bbb", "ccc", "aaa"), Is.False); - Assert.That(rejector.DoLookup("---", "ccc", "bbb"), Is.False); - Assert.That(rejector.DoLookup("bbb", "aaa", "---"), Is.False); - Assert.That(rejector.DoLookup("---", "aaa", "---"), Is.False); - Assert.That(rejector.DoLookup("---", "---", "bbb"), Is.False); - Assert.That(rejector.DoLookup("bbb", "---", "---"), Is.False); - }); - } + Assert.That(rejector.DoLookup("aaa", "bbb", "ccc"), Is.True); + Assert.That(rejector.DoLookup("---", "bbb", "ccc"), Is.True); + Assert.That(rejector.DoLookup("aaa", "bbb", "---"), Is.True); + Assert.That(rejector.DoLookup("---", "bbb", "---"), Is.True); + Assert.That(rejector.DoLookup("---", "---", "ccc"), Is.True); + Assert.That(rejector.DoLookup("aaa", "---", "---"), Is.True); + + Assert.That(rejector.DoLookup("---", "---", "---"), Is.False); + Assert.That(rejector.DoLookup("bbb", "ccc", "aaa"), Is.False); + Assert.That(rejector.DoLookup("---", "ccc", "bbb"), Is.False); + Assert.That(rejector.DoLookup("bbb", "aaa", "---"), Is.False); + Assert.That(rejector.DoLookup("---", "aaa", "---"), Is.False); + Assert.That(rejector.DoLookup("---", "---", "bbb"), Is.False); + Assert.That(rejector.DoLookup("bbb", "---", "---"), Is.False); + }); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/ColumnInfoValuesRejectorTests.cs b/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/ColumnInfoValuesRejectorTests.cs index bf3a58057..e255abf95 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/ColumnInfoValuesRejectorTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/ColumnInfoValuesRejectorTests.cs @@ -9,61 +9,60 @@ using System.Data.Common; using Tests.Common; -namespace SmiServices.IntegrationTests.Microservices.CohortExtractor -{ +namespace SmiServices.IntegrationTests.Microservices.CohortExtractor; - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - [RequiresRelationalDb(DatabaseType.MySql)] - public class ColumnInfoValuesRejectorTests : DatabaseTests - { - private const string PatColName = "PatientID"; - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void Test_ColumnInfoValuesRejectorTests(DatabaseType type) - { - DiscoveredDatabase server = GetCleanedServer(type); - DiscoveredTable tbl = server.CreateTable("BadPatients", [new DatabaseColumnRequest(PatColName, "varchar(100)")]); +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +[RequiresRelationalDb(DatabaseType.MySql)] +public class ColumnInfoValuesRejectorTests : DatabaseTests +{ + private const string PatColName = "PatientID"; - tbl.Insert(new Dictionary { { PatColName, "Frank" } }); - tbl.Insert(new Dictionary { { PatColName, "Peter" } }); - tbl.Insert(new Dictionary { { PatColName, "Frank" } }); //duplication for the lols - tbl.Insert(new Dictionary { { PatColName, "David" } }); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void Test_ColumnInfoValuesRejectorTests(DatabaseType type) + { + DiscoveredDatabase server = GetCleanedServer(type); + DiscoveredTable tbl = server.CreateTable("BadPatients", [new DatabaseColumnRequest(PatColName, "varchar(100)")]); - new TableInfoImporter(CatalogueRepository, tbl).DoImport(out var _, out ColumnInfo[] cols); + tbl.Insert(new Dictionary { { PatColName, "Frank" } }); + tbl.Insert(new Dictionary { { PatColName, "Peter" } }); + tbl.Insert(new Dictionary { { PatColName, "Frank" } }); //duplication for the lols + tbl.Insert(new Dictionary { { PatColName, "David" } }); - var rejector = new ColumnInfoValuesRejector(cols[0]); + new TableInfoImporter(CatalogueRepository, tbl).DoImport(out var _, out ColumnInfo[] cols); - var moqDave = new Mock(); - moqDave.Setup(x => x[PatColName]) - .Returns("Dave"); + var rejector = new ColumnInfoValuesRejector(cols[0]); - Assert.Multiple(() => - { - Assert.That(rejector.Reject(moqDave.Object, out string? reason), Is.False); - Assert.That(reason, Is.Null); - }); + var moqDave = new Mock(); + moqDave.Setup(x => x[PatColName]) + .Returns("Dave"); - var moqFrank = new Mock(); - moqFrank.Setup(x => x[PatColName]) - .Returns("Frank"); + Assert.Multiple(() => + { + Assert.That(rejector.Reject(moqDave.Object, out string? reason), Is.False); + Assert.That(reason, Is.Null); + }); - Assert.Multiple(() => - { - Assert.That(rejector.Reject(moqFrank.Object, out var reason), Is.True); - Assert.That(reason, Is.EqualTo("Patient or Identifier was in reject list")); - }); + var moqFrank = new Mock(); + moqFrank.Setup(x => x[PatColName]) + .Returns("Frank"); - var moqLowerCaseFrank = new Mock(); - moqLowerCaseFrank.Setup(x => x[PatColName]) - .Returns("frank"); + Assert.Multiple(() => + { + Assert.That(rejector.Reject(moqFrank.Object, out var reason), Is.True); + Assert.That(reason, Is.EqualTo("Patient or Identifier was in reject list")); + }); - Assert.Multiple(() => - { - Assert.That(rejector.Reject(moqLowerCaseFrank.Object, out var reason), Is.True); - Assert.That(reason, Is.EqualTo("Patient or Identifier was in reject list")); - }); - } + var moqLowerCaseFrank = new Mock(); + moqLowerCaseFrank.Setup(x => x[PatColName]) + .Returns("frank"); + Assert.Multiple(() => + { + Assert.That(rejector.Reject(moqLowerCaseFrank.Object, out var reason), Is.True); + Assert.That(reason, Is.EqualTo("Patient or Identifier was in reject list")); + }); } + } diff --git a/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/ExtractionSuperTableTests.cs b/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/ExtractionSuperTableTests.cs index 477db143a..badf2a596 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/ExtractionSuperTableTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/ExtractionSuperTableTests.cs @@ -17,234 +17,233 @@ using TypeGuesser; using DatabaseType = FAnsi.DatabaseType; -namespace SmiServices.IntegrationTests.Microservices.CohortExtractor +namespace SmiServices.IntegrationTests.Microservices.CohortExtractor; + +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +[RequiresRelationalDb(DatabaseType.MySql)] +class ExtractionSuperTableTests : DatabaseTests { - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - [RequiresRelationalDb(DatabaseType.MySql)] - class ExtractionSuperTableTests : DatabaseTests - { - private static DiscoveredTable BuildExampleExtractionTable(DiscoveredDatabase db, string modality, int recordCount, bool useDcmFileExtension) + private static DiscoveredTable BuildExampleExtractionTable(DiscoveredDatabase db, string modality, int recordCount, bool useDcmFileExtension) + { + var tbl = db.CreateTable(modality + "_IsExtractable", + [ + new DatabaseColumnRequest("StudyInstanceUID", new DatabaseTypeRequest(typeof(string), 64), false), + new DatabaseColumnRequest("SeriesInstanceUID", new DatabaseTypeRequest(typeof(string), 64), false), + new DatabaseColumnRequest("SOPInstanceUID", new DatabaseTypeRequest(typeof(string), 64), false){IsPrimaryKey = true}, + new DatabaseColumnRequest("IsExtractableToDisk", new DatabaseTypeRequest(typeof(bool))), + new DatabaseColumnRequest("IsExtractableToDisk_Reason", new DatabaseTypeRequest(typeof(string), 512)), + new DatabaseColumnRequest("RelativeFileArchiveURI", new DatabaseTypeRequest(typeof(string), 512), false), + new DatabaseColumnRequest("IsOriginal", new DatabaseTypeRequest(typeof(bool)), false), + new DatabaseColumnRequest("IsPrimary", new DatabaseTypeRequest(typeof(bool)), false), + new DatabaseColumnRequest(SpecialFieldNames.DataLoadRunID, new DatabaseTypeRequest(typeof(int))), + new DatabaseColumnRequest(SpecialFieldNames.ValidFrom, new DatabaseTypeRequest(typeof(DateTime))), + ]); + + if (recordCount > 0) { - var tbl = db.CreateTable(modality + "_IsExtractable", - [ - new DatabaseColumnRequest("StudyInstanceUID", new DatabaseTypeRequest(typeof(string), 64), false), - new DatabaseColumnRequest("SeriesInstanceUID", new DatabaseTypeRequest(typeof(string), 64), false), - new DatabaseColumnRequest("SOPInstanceUID", new DatabaseTypeRequest(typeof(string), 64), false){IsPrimaryKey = true}, - new DatabaseColumnRequest("IsExtractableToDisk", new DatabaseTypeRequest(typeof(bool))), - new DatabaseColumnRequest("IsExtractableToDisk_Reason", new DatabaseTypeRequest(typeof(string), 512)), - new DatabaseColumnRequest("RelativeFileArchiveURI", new DatabaseTypeRequest(typeof(string), 512), false), - new DatabaseColumnRequest("IsOriginal", new DatabaseTypeRequest(typeof(bool)), false), - new DatabaseColumnRequest("IsPrimary", new DatabaseTypeRequest(typeof(bool)), false), - new DatabaseColumnRequest(SpecialFieldNames.DataLoadRunID, new DatabaseTypeRequest(typeof(int))), - new DatabaseColumnRequest(SpecialFieldNames.ValidFrom, new DatabaseTypeRequest(typeof(DateTime))), - ]); - - if (recordCount > 0) + var r = new Random(500); + + DicomDataGenerator g = new(r, null) { - var r = new Random(500); + MaximumImages = recordCount + }; - DicomDataGenerator g = new(r, null) + var persons = new PersonCollection(); + persons.GeneratePeople(500, r); + while (recordCount > 0) + foreach (var image in g.GenerateStudyImages(persons.People[r.Next(persons.People.Length)], out _)) { - MaximumImages = recordCount - }; - - var persons = new PersonCollection(); - persons.GeneratePeople(500, r); - while (recordCount > 0) - foreach (var image in g.GenerateStudyImages(persons.People[r.Next(persons.People.Length)], out _)) + tbl.Insert(new Dictionary { - tbl.Insert(new Dictionary - { - {"StudyInstanceUID", image.GetSingleValue(DicomTag.StudyInstanceUID)}, - {"SeriesInstanceUID", image.GetSingleValue(DicomTag.SeriesInstanceUID)}, - {"SOPInstanceUID", image.GetSingleValue(DicomTag.SOPInstanceUID)}, + {"StudyInstanceUID", image.GetSingleValue(DicomTag.StudyInstanceUID)}, + {"SeriesInstanceUID", image.GetSingleValue(DicomTag.SeriesInstanceUID)}, + {"SOPInstanceUID", image.GetSingleValue(DicomTag.SOPInstanceUID)}, - {"IsExtractableToDisk", true}, - {"IsExtractableToDisk_Reason", DBNull.Value}, - {"RelativeFileArchiveURI", image.GetSingleValue(DicomTag.SOPInstanceUID) + (useDcmFileExtension ? ".dcm" :"")}, - {"IsOriginal", image.GetValues(DicomTag.ImageType)[0] == "ORIGINAL"}, - {"IsPrimary", image.GetValues(DicomTag.ImageType)[1] == "PRIMARY"}, + {"IsExtractableToDisk", true}, + {"IsExtractableToDisk_Reason", DBNull.Value}, + {"RelativeFileArchiveURI", image.GetSingleValue(DicomTag.SOPInstanceUID) + (useDcmFileExtension ? ".dcm" :"")}, + {"IsOriginal", image.GetValues(DicomTag.ImageType)[0] == "ORIGINAL"}, + {"IsPrimary", image.GetValues(DicomTag.ImageType)[1] == "PRIMARY"}, - {SpecialFieldNames.DataLoadRunID, 1}, - {SpecialFieldNames.ValidFrom, DateTime.Now}, + {SpecialFieldNames.DataLoadRunID, 1}, + {SpecialFieldNames.ValidFrom, DateTime.Now}, - }); + }); - recordCount--; + recordCount--; - if (recordCount <= 0) - break; - } - } - - return tbl; + if (recordCount <= 0) + break; + } } + return tbl; + } - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MySql, true)] - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - [TestCase(DatabaseType.MySql, false)] - public void Test_OnlyExtractableImages(DatabaseType dbType, bool useDynamic) - { - var db = GetCleanedServer(dbType); - - //create table with 300 rows to ensure at least two studies - const int testrows = 300; - var tbl = BuildExampleExtractionTable(db, "CT", testrows, true); - Assert.That(tbl.GetRowCount(), Is.EqualTo(testrows)); + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MySql, true)] + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, false)] + public void Test_OnlyExtractableImages(DatabaseType dbType, bool useDynamic) + { + var db = GetCleanedServer(dbType); - var cata = Import(tbl); + //create table with 300 rows to ensure at least two studies + const int testrows = 300; + var tbl = BuildExampleExtractionTable(db, "CT", testrows, true); - List studies; + Assert.That(tbl.GetRowCount(), Is.EqualTo(testrows)); - //fetch all unique studies from the database - using (var dt = tbl.GetDataTable()) - studies = dt.Rows.Cast().Select(r => r["StudyInstanceUID"]).Cast().Distinct().ToList(); + var cata = Import(tbl); - Assert.That(studies, Has.Count.GreaterThanOrEqualTo(2), "Expected at least 2 studies to be randomly generated in database"); + List studies; - //Create message to extract all the studies by StudyInstanceUID - var msgIn = new ExtractionRequestMessage - { - KeyTag = DicomTag.StudyInstanceUID.DictionaryEntry.Keyword, - Modality = "CT", - ExtractionIdentifiers = studies - }; + //fetch all unique studies from the database + using (var dt = tbl.GetDataTable()) + studies = dt.Rows.Cast().Select(r => r["StudyInstanceUID"]).Cast().Distinct().ToList(); - int matches = 0; + Assert.That(studies, Has.Count.GreaterThanOrEqualTo(2), "Expected at least 2 studies to be randomly generated in database"); - //The strategy pattern implementation that goes to the database but also considers reason - var fulfiller = new FromCataloguesExtractionRequestFulfiller([cata]); - fulfiller.Rejectors.Add(useDynamic ? (IRejector)new DynamicRejector(null) : new TestRejector()); + //Create message to extract all the studies by StudyInstanceUID + var msgIn = new ExtractionRequestMessage + { + KeyTag = DicomTag.StudyInstanceUID.DictionaryEntry.Keyword, + Modality = "CT", + ExtractionIdentifiers = studies + }; - foreach (ExtractImageCollection msgOut in fulfiller.GetAllMatchingFiles(msgIn)) - { - matches += msgOut.Accepted.Count; - Assert.That(msgOut.Rejected, Is.Empty); - } + int matches = 0; - //currently all images are extractable - Assert.That(matches, Is.EqualTo(testrows)); + //The strategy pattern implementation that goes to the database but also considers reason + var fulfiller = new FromCataloguesExtractionRequestFulfiller([cata]); + fulfiller.Rejectors.Add(useDynamic ? (IRejector)new DynamicRejector(null) : new TestRejector()); - //now make 10 not extractable - using (var con = tbl.Database.Server.GetConnection()) - { - con.Open(); + foreach (ExtractImageCollection msgOut in fulfiller.GetAllMatchingFiles(msgIn)) + { + matches += msgOut.Accepted.Count; + Assert.That(msgOut.Rejected, Is.Empty); + } - string sql = GetUpdateTopXSql(tbl, 10, "Set IsExtractableToDisk=0, IsExtractableToDisk_Reason = 'We decided NO!'"); + //currently all images are extractable + Assert.That(matches, Is.EqualTo(testrows)); - //make the top 10 not extractable - using var cmd = tbl.Database.Server.GetCommand(sql, con); - cmd.ExecuteNonQuery(); - } + //now make 10 not extractable + using (var con = tbl.Database.Server.GetConnection()) + { + con.Open(); - matches = 0; - int rejections = 0; + string sql = GetUpdateTopXSql(tbl, 10, "Set IsExtractableToDisk=0, IsExtractableToDisk_Reason = 'We decided NO!'"); - foreach (ExtractImageCollection msgOut in fulfiller.GetAllMatchingFiles(msgIn)) - { - matches += msgOut.Accepted.Count; - rejections += msgOut.Rejected.Count; + //make the top 10 not extractable + using var cmd = tbl.Database.Server.GetCommand(sql, con); + cmd.ExecuteNonQuery(); + } - Assert.That(msgOut.Rejected.All(v => v.RejectReason!.Equals("We decided NO!")), Is.True); - } + matches = 0; + int rejections = 0; - Assert.Multiple(() => - { - Assert.That(matches, Is.EqualTo(testrows - 10)); - Assert.That(rejections, Is.EqualTo(10)); - }); + foreach (ExtractImageCollection msgOut in fulfiller.GetAllMatchingFiles(msgIn)) + { + matches += msgOut.Accepted.Count; + rejections += msgOut.Rejected.Count; + Assert.That(msgOut.Rejected.All(v => v.RejectReason!.Equals("We decided NO!")), Is.True); } - - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void Test_OnlyListedModalities(DatabaseType dbType) + Assert.Multiple(() => { - var db = GetCleanedServer(dbType); + Assert.That(matches, Is.EqualTo(testrows - 10)); + Assert.That(rejections, Is.EqualTo(10)); + }); - //create table with 100 rows - var tblCT = BuildExampleExtractionTable(db, "CT", 70, true); - var tblMR = BuildExampleExtractionTable(db, "MR", 30, true); + } - var cataCT = Import(tblCT); - var cataMR = Import(tblMR); - List studies = []; + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void Test_OnlyListedModalities(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - //fetch all unique studies from the database - using (var dt = tblCT.GetDataTable()) - studies.AddRange(dt.Rows.Cast().Select(r => r["StudyInstanceUID"]).Cast().Distinct()); - using (var dt = tblMR.GetDataTable()) - studies.AddRange(dt.Rows.Cast().Select(r => r["StudyInstanceUID"]).Cast().Distinct()); + //create table with 100 rows + var tblCT = BuildExampleExtractionTable(db, "CT", 70, true); + var tblMR = BuildExampleExtractionTable(db, "MR", 30, true); - //Create message to extract all the series by StudyInstanceUID - var msgIn = new ExtractionRequestMessage - { - KeyTag = DicomTag.StudyInstanceUID.DictionaryEntry.Keyword, + var cataCT = Import(tblCT); + var cataMR = Import(tblMR); - //extract only MR (this is what we are actually testing). - Modality = "MR", - ExtractionIdentifiers = studies - }; + List studies = []; - int matches = 0; + //fetch all unique studies from the database + using (var dt = tblCT.GetDataTable()) + studies.AddRange(dt.Rows.Cast().Select(r => r["StudyInstanceUID"]).Cast().Distinct()); + using (var dt = tblMR.GetDataTable()) + studies.AddRange(dt.Rows.Cast().Select(r => r["StudyInstanceUID"]).Cast().Distinct()); - //The strategy pattern implementation that goes to the database but also considers reason + //Create message to extract all the series by StudyInstanceUID + var msgIn = new ExtractionRequestMessage + { + KeyTag = DicomTag.StudyInstanceUID.DictionaryEntry.Keyword, - var fulfiller = new FromCataloguesExtractionRequestFulfiller([cataCT, cataMR]); + //extract only MR (this is what we are actually testing). + Modality = "MR", + ExtractionIdentifiers = studies + }; - foreach (ExtractImageCollection msgOut in fulfiller.GetAllMatchingFiles(msgIn)) - { - matches += msgOut.Accepted.Count; - Assert.That(msgOut.Rejected, Is.Empty); - } + int matches = 0; - //expect only the MR images to be returned - Assert.That(matches, Is.EqualTo(30)); + //The strategy pattern implementation that goes to the database but also considers reason - // Ask for something that doesn't exist - msgIn.Modality = "Hello"; - var ex = Assert.Throws(() => fulfiller.GetAllMatchingFiles(msgIn).ToArray()); - Assert.That(ex!.Message, Does.Contain("Modality=Hello")); - } + var fulfiller = new FromCataloguesExtractionRequestFulfiller([cataCT, cataMR]); - /// - /// Returns SQL to update the with the provided SET string - /// - /// Table to update - /// Number of rows to change - /// Set SQL e.g. "Set Col1='fish'" - /// - private static string GetUpdateTopXSql(DiscoveredTable tbl, int topXRows, string setSql) + foreach (ExtractImageCollection msgOut in fulfiller.GetAllMatchingFiles(msgIn)) { - return tbl.Database.Server.DatabaseType switch - { - DatabaseType.MicrosoftSQLServer => $"UPDATE TOP ({topXRows}) {tbl.GetFullyQualifiedName()} {setSql}", - DatabaseType.MySql => $"UPDATE {tbl.GetFullyQualifiedName()} {setSql} LIMIT {topXRows}", - _ => throw new ArgumentOutOfRangeException(nameof(tbl)), - }; + matches += msgOut.Accepted.Count; + Assert.That(msgOut.Rejected, Is.Empty); } - public class TestRejector : IRejector + //expect only the MR images to be returned + Assert.That(matches, Is.EqualTo(30)); + + // Ask for something that doesn't exist + msgIn.Modality = "Hello"; + var ex = Assert.Throws(() => fulfiller.GetAllMatchingFiles(msgIn).ToArray()); + Assert.That(ex!.Message, Does.Contain("Modality=Hello")); + } + + /// + /// Returns SQL to update the with the provided SET string + /// + /// Table to update + /// Number of rows to change + /// Set SQL e.g. "Set Col1='fish'" + /// + private static string GetUpdateTopXSql(DiscoveredTable tbl, int topXRows, string setSql) + { + return tbl.Database.Server.DatabaseType switch { - public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) - { - //if the image is not extractable - if (!Convert.ToBoolean(row["IsExtractableToDisk"])) - { - //tell them why and reject it - reason = (row["IsExtractableToDisk_Reason"] as string)!; - return true; - } + DatabaseType.MicrosoftSQLServer => $"UPDATE TOP ({topXRows}) {tbl.GetFullyQualifiedName()} {setSql}", + DatabaseType.MySql => $"UPDATE {tbl.GetFullyQualifiedName()} {setSql} LIMIT {topXRows}", + _ => throw new ArgumentOutOfRangeException(nameof(tbl)), + }; + } - reason = null; - return false; + public class TestRejector : IRejector + { + public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) + { + //if the image is not extractable + if (!Convert.ToBoolean(row["IsExtractableToDisk"])) + { + //tell them why and reject it + reason = (row["IsExtractableToDisk_Reason"] as string)!; + return true; } + + reason = null; + return false; } } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/FromCataloguesExtractionRequestFulfillerTests.cs b/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/FromCataloguesExtractionRequestFulfillerTests.cs index 93f0115f1..63de2df4b 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/FromCataloguesExtractionRequestFulfillerTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/CohortExtractor/FromCataloguesExtractionRequestFulfillerTests.cs @@ -14,160 +14,159 @@ using System.Linq; using Tests.Common; -namespace SmiServices.IntegrationTests.Microservices.CohortExtractor +namespace SmiServices.IntegrationTests.Microservices.CohortExtractor; + +/// +/// Tests the ability of to connect to a database +/// (described in a ) and fetch matching image urls out of the database (creating +/// ExtractImageCollection results). +/// +[RequiresRelationalDb(DatabaseType.MySql)] +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +public class FromCataloguesExtractionRequestFulfillerTests : DatabaseTests { - /// - /// Tests the ability of to connect to a database - /// (described in a ) and fetch matching image urls out of the database (creating - /// ExtractImageCollection results). - /// - [RequiresRelationalDb(DatabaseType.MySql)] - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - public class FromCataloguesExtractionRequestFulfillerTests : DatabaseTests + [SetUp] + protected override void SetUp() { - [SetUp] - protected override void SetUp() + base.SetUp(); + + } + + + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void FromCataloguesExtractionRequestFulfiller_NormalMatching(DatabaseType databaseType) + { + var db = GetCleanedServer(databaseType); + + var dt = new DataTable(); + dt.Columns.Add("StudyInstanceUID"); + dt.Columns.Add("SeriesInstanceUID"); + dt.Columns.Add("SOPInstanceUID"); + dt.Columns.Add("Extractable"); + dt.Columns.Add(QueryToExecuteColumnSet.DefaultImagePathColumnName); + + dt.Rows.Add("1.1", "123.1", "1.1", true, "/images/1.dcm"); + dt.Rows.Add("1.1", "123.1", "2.1", false, "/images/2.dcm"); + dt.Rows.Add("1.1", "1234.1", "3.1", false, "/images/3.dcm"); + dt.Rows.Add("1.1", "1234.1", "4.1", true, "/images/4.dcm"); + + dt.SetDoNotReType(true); + + var tbl = db.CreateTable("CT_ImageTable", dt); + var catalogue = Import(tbl); + + var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); + + var matching = fulfiller.GetAllMatchingFiles(new ExtractionRequestMessage + { + KeyTag = "SeriesInstanceUID", + Modality = "CT", + ExtractionIdentifiers = new List(["123.1"]), + }).ToArray(); + + Assert.That(matching, Has.Length.EqualTo(1)); + Assert.That(matching[0].Accepted, Has.Count.EqualTo(2)); + Assert.Multiple(() => { - base.SetUp(); + Assert.That(matching[0].Accepted.Count(f => f.FilePathValue.Equals("/images/1.dcm")), Is.EqualTo(1)); + Assert.That(matching[0].Accepted.Count(f => f.FilePathValue.Equals("/images/2.dcm")), Is.EqualTo(1)); + }); + } - } + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void FromCataloguesExtractionRequestFulfiller_MandatoryFilter(DatabaseType databaseType) + { + var db = GetCleanedServer(databaseType); + + var dt = new DataTable(); + dt.Columns.Add("StudyInstanceUID"); + dt.Columns.Add("SeriesInstanceUID"); + dt.Columns.Add("SOPInstanceUID"); + dt.Columns.Add("Extractable", typeof(bool)); + dt.Columns.Add(QueryToExecuteColumnSet.DefaultImagePathColumnName); + + dt.Rows.Add("1.1", "123.1", "1.1", true, "/images/1.dcm"); + dt.Rows.Add("1.1", "123.1", "2.1", false, "/images/2.dcm"); + dt.Rows.Add("1.1", "1234.1", "3.1", false, "/images/3.dcm"); + dt.Rows.Add("1.1", "1234.1", "4.1", true, "/images/4.dcm"); + + dt.SetDoNotReType(true); + var tbl = db.CreateTable("CT_ImageTable", dt); + var catalogue = Import(tbl); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void FromCataloguesExtractionRequestFulfiller_NormalMatching(DatabaseType databaseType) + var ei = catalogue.GetAllExtractionInformation(ExtractionCategory.Any).First(); + var filter = new ExtractionFilter(CatalogueRepository, "Extractable only", ei) { - var db = GetCleanedServer(databaseType); - - var dt = new DataTable(); - dt.Columns.Add("StudyInstanceUID"); - dt.Columns.Add("SeriesInstanceUID"); - dt.Columns.Add("SOPInstanceUID"); - dt.Columns.Add("Extractable"); - dt.Columns.Add(QueryToExecuteColumnSet.DefaultImagePathColumnName); - - dt.Rows.Add("1.1", "123.1", "1.1", true, "/images/1.dcm"); - dt.Rows.Add("1.1", "123.1", "2.1", false, "/images/2.dcm"); - dt.Rows.Add("1.1", "1234.1", "3.1", false, "/images/3.dcm"); - dt.Rows.Add("1.1", "1234.1", "4.1", true, "/images/4.dcm"); - - dt.SetDoNotReType(true); - - var tbl = db.CreateTable("CT_ImageTable", dt); - var catalogue = Import(tbl); - - var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); - - var matching = fulfiller.GetAllMatchingFiles(new ExtractionRequestMessage - { - KeyTag = "SeriesInstanceUID", - Modality = "CT", - ExtractionIdentifiers = new List(["123.1"]), - }).ToArray(); - - Assert.That(matching, Has.Length.EqualTo(1)); - Assert.That(matching[0].Accepted, Has.Count.EqualTo(2)); - Assert.Multiple(() => - { - Assert.That(matching[0].Accepted.Count(f => f.FilePathValue.Equals("/images/1.dcm")), Is.EqualTo(1)); - Assert.That(matching[0].Accepted.Count(f => f.FilePathValue.Equals("/images/2.dcm")), Is.EqualTo(1)); - }); - } + IsMandatory = true, + WhereSQL = "Extractable = 1" + }; + filter.SaveToDatabase(); + var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void FromCataloguesExtractionRequestFulfiller_MandatoryFilter(DatabaseType databaseType) + var matching = fulfiller.GetAllMatchingFiles(new ExtractionRequestMessage { - var db = GetCleanedServer(databaseType); - - var dt = new DataTable(); - dt.Columns.Add("StudyInstanceUID"); - dt.Columns.Add("SeriesInstanceUID"); - dt.Columns.Add("SOPInstanceUID"); - dt.Columns.Add("Extractable", typeof(bool)); - dt.Columns.Add(QueryToExecuteColumnSet.DefaultImagePathColumnName); - - dt.Rows.Add("1.1", "123.1", "1.1", true, "/images/1.dcm"); - dt.Rows.Add("1.1", "123.1", "2.1", false, "/images/2.dcm"); - dt.Rows.Add("1.1", "1234.1", "3.1", false, "/images/3.dcm"); - dt.Rows.Add("1.1", "1234.1", "4.1", true, "/images/4.dcm"); - - dt.SetDoNotReType(true); - - var tbl = db.CreateTable("CT_ImageTable", dt); - var catalogue = Import(tbl); - - var ei = catalogue.GetAllExtractionInformation(ExtractionCategory.Any).First(); - var filter = new ExtractionFilter(CatalogueRepository, "Extractable only", ei) - { - IsMandatory = true, - WhereSQL = "Extractable = 1" - }; - filter.SaveToDatabase(); - var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); - - var matching = fulfiller.GetAllMatchingFiles(new ExtractionRequestMessage - { - KeyTag = "SeriesInstanceUID", - Modality = "CT", - ExtractionIdentifiers = new List(["123.1"]), - }).ToArray(); - - Assert.That(matching, Has.Length.EqualTo(1)); - Assert.That(matching[0].Accepted, Has.Count.EqualTo(1)); - Assert.That(matching[0].Accepted.Count(f => f.FilePathValue.Equals("/images/1.dcm")), Is.EqualTo(1)); - } + KeyTag = "SeriesInstanceUID", + Modality = "CT", + ExtractionIdentifiers = new List(["123.1"]), + }).ToArray(); + + Assert.That(matching, Has.Length.EqualTo(1)); + Assert.That(matching[0].Accepted, Has.Count.EqualTo(1)); + Assert.That(matching[0].Accepted.Count(f => f.FilePathValue.Equals("/images/1.dcm")), Is.EqualTo(1)); + } - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - public void Test_FromCataloguesExtractionRequestFulfiller_NoFilterExtraction(DatabaseType databaseType, bool isNoFiltersExtraction) + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + public void Test_FromCataloguesExtractionRequestFulfiller_NoFilterExtraction(DatabaseType databaseType, bool isNoFiltersExtraction) + { + DiscoveredDatabase db = GetCleanedServer(databaseType); + + var dt = new DataTable(); + dt.Columns.Add("StudyInstanceUID"); + dt.Columns.Add("SeriesInstanceUID"); + dt.Columns.Add("SOPInstanceUID"); + dt.Columns.Add("Extractable", typeof(bool)); + dt.Columns.Add(QueryToExecuteColumnSet.DefaultImagePathColumnName); + dt.Rows.Add("1.1", "123.1", "1.1", true, "/images/1.dcm"); + dt.SetDoNotReType(true); + + DiscoveredTable tbl = db.CreateTable("CT_ImageTable", dt); + ICatalogue catalogue = Import(tbl); + + ExtractionInformation ei = catalogue.GetAllExtractionInformation(ExtractionCategory.Any).First(); + var filter = new ExtractionFilter(CatalogueRepository, "Extractable only", ei) { - DiscoveredDatabase db = GetCleanedServer(databaseType); - - var dt = new DataTable(); - dt.Columns.Add("StudyInstanceUID"); - dt.Columns.Add("SeriesInstanceUID"); - dt.Columns.Add("SOPInstanceUID"); - dt.Columns.Add("Extractable", typeof(bool)); - dt.Columns.Add(QueryToExecuteColumnSet.DefaultImagePathColumnName); - dt.Rows.Add("1.1", "123.1", "1.1", true, "/images/1.dcm"); - dt.SetDoNotReType(true); - - DiscoveredTable tbl = db.CreateTable("CT_ImageTable", dt); - ICatalogue catalogue = Import(tbl); - - ExtractionInformation ei = catalogue.GetAllExtractionInformation(ExtractionCategory.Any).First(); - var filter = new ExtractionFilter(CatalogueRepository, "Extractable only", ei) - { - IsMandatory = true, - WhereSQL = "Extractable = 1" - }; - filter.SaveToDatabase(); - var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); - fulfiller.Rejectors.Add(new RejectAll()); - - var message = new ExtractionRequestMessage - { - KeyTag = "SeriesInstanceUID", - Modality = "CT", - ExtractionIdentifiers = new List(["123.1"]), - IsNoFilterExtraction = isNoFiltersExtraction, - }; - - ExtractImageCollection[] matching = fulfiller.GetAllMatchingFiles(message).ToArray(); - - int expected = isNoFiltersExtraction ? 1 : 0; - Assert.That(matching, Has.Length.EqualTo(1)); - Assert.That(matching[0].Accepted, Has.Count.EqualTo(expected)); - } + IsMandatory = true, + WhereSQL = "Extractable = 1" + }; + filter.SaveToDatabase(); + var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); + fulfiller.Rejectors.Add(new RejectAll()); + + var message = new ExtractionRequestMessage + { + KeyTag = "SeriesInstanceUID", + Modality = "CT", + ExtractionIdentifiers = new List(["123.1"]), + IsNoFilterExtraction = isNoFiltersExtraction, + }; + + ExtractImageCollection[] matching = fulfiller.GetAllMatchingFiles(message).ToArray(); - private class RejectAll : IRejector + int expected = isNoFiltersExtraction ? 1 : 0; + Assert.That(matching, Has.Length.EqualTo(1)); + Assert.That(matching[0].Accepted, Has.Count.EqualTo(expected)); + } + + private class RejectAll : IRejector + { + public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) { - public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) - { - reason = "Rejector is " + nameof(RejectAll); - return true; - } + reason = "Rejector is " + nameof(RejectAll); + return true; } } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/CohortPackager/CohortPackagerHostTest.cs b/tests/SmiServices.IntegrationTests/Microservices/CohortPackager/CohortPackagerHostTest.cs index 03a44d51f..bd22bd018 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/CohortPackager/CohortPackagerHostTest.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/CohortPackager/CohortPackagerHostTest.cs @@ -19,234 +19,234 @@ using System.Threading; -namespace SmiServices.IntegrationTests.Microservices.CohortPackager +namespace SmiServices.IntegrationTests.Microservices.CohortPackager; + +[TestFixture, RequiresMongoDb, RequiresRabbit] +public class CohortPackagerHostTest { - [TestFixture, RequiresMongoDb, RequiresRabbit] - public class CohortPackagerHostTest - { - private readonly TestDateTimeProvider _dateTimeProvider = new(); + private readonly TestDateTimeProvider _dateTimeProvider = new(); - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Fixtures + #region Fixtures - #endregion + #endregion - #region Tests + #region Tests - private static bool HaveFiles(PathFixtures pf) => Directory.Exists(pf.ProjReportsDirAbsolute) && Directory.EnumerateFiles(pf.ProjExtractDirAbsolute).Any(); + private static bool HaveFiles(PathFixtures pf) => Directory.Exists(pf.ProjReportsDirAbsolute) && Directory.EnumerateFiles(pf.ProjExtractDirAbsolute).Any(); - private static void VerifyReports(GlobalOptions globals, PathFixtures pf, IEnumerable> toSend, bool isIdentifiableExtraction) + private static void VerifyReports(GlobalOptions globals, PathFixtures pf, IEnumerable> toSend, bool isIdentifiableExtraction) + { + globals.FileSystemOptions!.ExtractRoot = pf.ExtractRootAbsolute; + globals.CohortPackagerOptions!.JobWatcherTimeoutInSeconds = 5; + globals.CohortPackagerOptions.VerificationMessageQueueProcessBatches = false; + + MongoClient client = MongoClientHelpers.GetMongoClient(globals.MongoDatabases!.ExtractionStoreOptions!, "test", true); + globals.MongoDatabases.ExtractionStoreOptions!.DatabaseName += "-" + Guid.NewGuid().ToString().Split('-')[0]; + client.DropDatabase(globals.MongoDatabases.ExtractionStoreOptions.DatabaseName); + + using (var tester = new MicroserviceTester( + globals.RabbitOptions!, + globals.CohortPackagerOptions.ExtractRequestInfoOptions!, + globals.CohortPackagerOptions.FileCollectionInfoOptions!, + globals.CohortPackagerOptions.NoVerifyStatusOptions!, + globals.CohortPackagerOptions.VerificationStatusOptions!)) { - globals.FileSystemOptions!.ExtractRoot = pf.ExtractRootAbsolute; - globals.CohortPackagerOptions!.JobWatcherTimeoutInSeconds = 5; - globals.CohortPackagerOptions.VerificationMessageQueueProcessBatches = false; - - MongoClient client = MongoClientHelpers.GetMongoClient(globals.MongoDatabases!.ExtractionStoreOptions!, "test", true); - globals.MongoDatabases.ExtractionStoreOptions!.DatabaseName += "-" + Guid.NewGuid().ToString().Split('-')[0]; - client.DropDatabase(globals.MongoDatabases.ExtractionStoreOptions.DatabaseName); - - using (var tester = new MicroserviceTester( - globals.RabbitOptions!, - globals.CohortPackagerOptions.ExtractRequestInfoOptions!, - globals.CohortPackagerOptions.FileCollectionInfoOptions!, - globals.CohortPackagerOptions.NoVerifyStatusOptions!, - globals.CohortPackagerOptions.VerificationStatusOptions!)) - { - foreach ((ConsumerOptions consumerOptions, IMessage message) in toSend) - tester.SendMessage(consumerOptions, new MessageHeader(), message); - - var host = new CohortPackagerHost(globals); + foreach ((ConsumerOptions consumerOptions, IMessage message) in toSend) + tester.SendMessage(consumerOptions, new MessageHeader(), message); - host.Start(); + var host = new CohortPackagerHost(globals); - var timeoutSecs = 10; + host.Start(); - while (!HaveFiles(pf) && timeoutSecs > 0) - { - --timeoutSecs; - Thread.Sleep(TimeSpan.FromSeconds(1)); - } + var timeoutSecs = 10; - host.Stop("Test end"); + while (!HaveFiles(pf) && timeoutSecs > 0) + { + --timeoutSecs; + Thread.Sleep(TimeSpan.FromSeconds(1)); } - var firstLine = $"# SMI extraction validation report for testProj1 {pf.ExtractName}{globals.CohortPackagerOptions.ReportNewLine}"; - - string extractReportsDirAbsolute = Path.Combine(pf.ProjReportsDirAbsolute, pf.ExtractName); - var expectedReports = isIdentifiableExtraction ? 3 : 4; - Assert.That(Directory.GetFiles(extractReportsDirAbsolute), Has.Length.EqualTo(expectedReports)); - string[] reportContent = File.ReadAllLines(Path.Combine(extractReportsDirAbsolute, "README.md")); - Assert.That(reportContent[0], Is.EqualTo(firstLine)); + host.Stop("Test end"); } - [Test] - public void Integration_HappyPath() - { - // Test messages: - // - series-1 - // - series-1-anon-1.dcm -> valid + var firstLine = $"# SMI extraction validation report for testProj1 {pf.ExtractName}{globals.CohortPackagerOptions.ReportNewLine}"; - using var pf = new PathFixtures(nameof(Integration_HappyPath)); + string extractReportsDirAbsolute = Path.Combine(pf.ProjReportsDirAbsolute, pf.ExtractName); + var expectedReports = isIdentifiableExtraction ? 3 : 4; + Assert.That(Directory.GetFiles(extractReportsDirAbsolute), Has.Length.EqualTo(expectedReports)); + string[] reportContent = File.ReadAllLines(Path.Combine(extractReportsDirAbsolute, "README.md")); + Assert.That(reportContent[0], Is.EqualTo(firstLine)); + } - var jobId = Guid.NewGuid(); - var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage - { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - KeyTag = "SeriesInstanceUID", - KeyValueCount = 1, - UserName = "testUser", - }; - var testExtractFileCollectionInfoMessage = new ExtractFileCollectionInfoMessage - { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - ExtractFileMessagesDispatched = new JsonCompatibleDictionary - { - { new MessageHeader(), "series-1-anon-1.dcm" }, - }, - RejectionReasons = new Dictionary - { - {"rejected - blah", 1 }, - }, - KeyValue = "series-1", - }; - var testIsIdentifiableMessage = new ExtractedFileVerificationMessage - { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - OutputFilePath = "series-1-anon-1.dcm", - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - Status = VerifiedFileStatus.NotIdentifiable, - Report = "[]", - DicomFilePath = "series-1-orig-1.dcm", - }; - - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Integration_HappyPath)); - - VerifyReports( - globals, - pf, - [ - new Tuple(globals.CohortPackagerOptions!.ExtractRequestInfoOptions!, testExtractionRequestInfoMessage), - new Tuple(globals.CohortPackagerOptions.FileCollectionInfoOptions!, testExtractFileCollectionInfoMessage), - new Tuple(globals.CohortPackagerOptions.VerificationStatusOptions!, testIsIdentifiableMessage), - ], - isIdentifiableExtraction: false - ); - } + [Test] + public void Integration_HappyPath() + { + // Test messages: + // - series-1 + // - series-1-anon-1.dcm -> valid + + using var pf = new PathFixtures(nameof(Integration_HappyPath)); - [Test] - public void Integration_BumpyRoad() + var jobId = Guid.NewGuid(); + var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage { - // Test messages: - // - series-1 - // - series-1-anon-1.dcm -> valid - // - series-1-anon-2.dcm -> rejected - // - series-2 - // - series-2-anon-1.dcm -> fails anonymisation - // - series-2-anon-2.dcm -> fails validation - - using var pf = new PathFixtures(nameof(Integration_BumpyRoad)); - - var jobId = Guid.NewGuid(); - var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + KeyTag = "SeriesInstanceUID", + KeyValueCount = 1, + UserName = "testUser", + }; + var testExtractFileCollectionInfoMessage = new ExtractFileCollectionInfoMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + ExtractFileMessagesDispatched = new JsonCompatibleDictionary { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - KeyTag = "SeriesInstanceUID", - KeyValueCount = 2, - UserName = "testUser", - }; - var testExtractFileCollectionInfoMessage1 = new ExtractFileCollectionInfoMessage + { new MessageHeader(), "series-1-anon-1.dcm" }, + }, + RejectionReasons = new Dictionary { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - ExtractFileMessagesDispatched = new JsonCompatibleDictionary - { - { new MessageHeader(), "series-1-anon-1.dcm" }, - }, - RejectionReasons = new Dictionary - { - {"rejected - blah", 1 }, - }, - KeyValue = "series-1", - }; - var testExtractFileCollectionInfoMessage2 = new ExtractFileCollectionInfoMessage + {"rejected - blah", 1 }, + }, + KeyValue = "series-1", + }; + var testIsIdentifiableMessage = new ExtractedFileVerificationMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + OutputFilePath = "series-1-anon-1.dcm", + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + Status = VerifiedFileStatus.NotIdentifiable, + Report = "[]", + DicomFilePath = "series-1-orig-1.dcm", + }; + + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Integration_HappyPath)); + + VerifyReports( + globals, + pf, + [ + new Tuple(globals.CohortPackagerOptions!.ExtractRequestInfoOptions!, testExtractionRequestInfoMessage), + new Tuple(globals.CohortPackagerOptions.FileCollectionInfoOptions!, testExtractFileCollectionInfoMessage), + new Tuple(globals.CohortPackagerOptions.VerificationStatusOptions!, testIsIdentifiableMessage), + ], + isIdentifiableExtraction: false + ); + } + + [Test] + public void Integration_BumpyRoad() + { + // Test messages: + // - series-1 + // - series-1-anon-1.dcm -> valid + // - series-1-anon-2.dcm -> rejected + // - series-2 + // - series-2-anon-1.dcm -> fails anonymisation + // - series-2-anon-2.dcm -> fails validation + + using var pf = new PathFixtures(nameof(Integration_BumpyRoad)); + + var jobId = Guid.NewGuid(); + var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + KeyTag = "SeriesInstanceUID", + KeyValueCount = 2, + UserName = "testUser", + }; + var testExtractFileCollectionInfoMessage1 = new ExtractFileCollectionInfoMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + ExtractFileMessagesDispatched = new JsonCompatibleDictionary { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - ExtractFileMessagesDispatched = new JsonCompatibleDictionary - { - { new MessageHeader(), "series-2-anon-1.dcm" }, - { new MessageHeader(), "series-2-anon-2.dcm" }, - }, - RejectionReasons = [], - KeyValue = "series-2", - }; - var testExtractFileStatusMessage = new ExtractedFileStatusMessage + { new MessageHeader(), "series-1-anon-1.dcm" }, + }, + RejectionReasons = new Dictionary { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - OutputFilePath = "series-2-anon-1.dcm", - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - Status = ExtractedFileStatus.ErrorWontRetry, - StatusMessage = "Couldn't anonymise", - DicomFilePath = "series-2-orig-1.dcm", - }; - var testIsIdentifiableMessage1 = new ExtractedFileVerificationMessage + {"rejected - blah", 1 }, + }, + KeyValue = "series-1", + }; + var testExtractFileCollectionInfoMessage2 = new ExtractFileCollectionInfoMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + ExtractFileMessagesDispatched = new JsonCompatibleDictionary { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - OutputFilePath = "series-1-anon-1.dcm", - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - Status = VerifiedFileStatus.NotIdentifiable, - Report = "[]", - DicomFilePath = "series-1-orig-1.dcm", - }; - const string failureReport = @" + { new MessageHeader(), "series-2-anon-1.dcm" }, + { new MessageHeader(), "series-2-anon-2.dcm" }, + }, + RejectionReasons = [], + KeyValue = "series-2", + }; + var testExtractFileStatusMessage = new ExtractedFileStatusMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + OutputFilePath = "series-2-anon-1.dcm", + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + Status = ExtractedFileStatus.ErrorWontRetry, + StatusMessage = "Couldn't anonymise", + DicomFilePath = "series-2-orig-1.dcm", + }; + var testIsIdentifiableMessage1 = new ExtractedFileVerificationMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + OutputFilePath = "series-1-anon-1.dcm", + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + Status = VerifiedFileStatus.NotIdentifiable, + Report = "[]", + DicomFilePath = "series-1-orig-1.dcm", + }; + const string failureReport = @" [ { 'Parts': [], @@ -256,150 +256,149 @@ public void Integration_BumpyRoad() 'ProblemValue': 'FOO' } ]"; - var testIsIdentifiableMessage2 = new ExtractedFileVerificationMessage - { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - OutputFilePath = "series-2-anon-2.dcm", - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "CT", - Status = VerifiedFileStatus.IsIdentifiable, - Report = failureReport, - DicomFilePath = "series-2-orig-2.dcm", - }; - - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Integration_BumpyRoad)); - - VerifyReports( - globals, - pf, - [ - new Tuple(globals.CohortPackagerOptions!.ExtractRequestInfoOptions!, testExtractionRequestInfoMessage), - new Tuple(globals.CohortPackagerOptions.FileCollectionInfoOptions!,testExtractFileCollectionInfoMessage1), - new Tuple(globals.CohortPackagerOptions.FileCollectionInfoOptions!, testExtractFileCollectionInfoMessage2), - new Tuple(globals.CohortPackagerOptions.NoVerifyStatusOptions!, testExtractFileStatusMessage), - new Tuple(globals.CohortPackagerOptions.VerificationStatusOptions!, testIsIdentifiableMessage1), - new Tuple(globals.CohortPackagerOptions.VerificationStatusOptions!, testIsIdentifiableMessage2), - ], - isIdentifiableExtraction: false - ); - } - - [Test] - public void Integration_IdentifiableExtraction_HappyPath() + var testIsIdentifiableMessage2 = new ExtractedFileVerificationMessage { - using var pf = new PathFixtures(nameof(Integration_IdentifiableExtraction_HappyPath)); + JobSubmittedAt = _dateTimeProvider.UtcNow(), + OutputFilePath = "series-2-anon-2.dcm", + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "CT", + Status = VerifiedFileStatus.IsIdentifiable, + Report = failureReport, + DicomFilePath = "series-2-orig-2.dcm", + }; + + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Integration_BumpyRoad)); + + VerifyReports( + globals, + pf, + [ + new Tuple(globals.CohortPackagerOptions!.ExtractRequestInfoOptions!, testExtractionRequestInfoMessage), + new Tuple(globals.CohortPackagerOptions.FileCollectionInfoOptions!,testExtractFileCollectionInfoMessage1), + new Tuple(globals.CohortPackagerOptions.FileCollectionInfoOptions!, testExtractFileCollectionInfoMessage2), + new Tuple(globals.CohortPackagerOptions.NoVerifyStatusOptions!, testExtractFileStatusMessage), + new Tuple(globals.CohortPackagerOptions.VerificationStatusOptions!, testIsIdentifiableMessage1), + new Tuple(globals.CohortPackagerOptions.VerificationStatusOptions!, testIsIdentifiableMessage2), + ], + isIdentifiableExtraction: false + ); + } - var jobId = Guid.NewGuid(); - var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage - { - Modality = "MR", - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - KeyTag = "StudyInstanceUID", - KeyValueCount = 1, - UserName = "testUser", - IsIdentifiableExtraction = true, - }; - var testExtractFileCollectionInfoMessage = new ExtractFileCollectionInfoMessage - { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "MR", - ExtractFileMessagesDispatched = new JsonCompatibleDictionary - { - { new MessageHeader(), "out1.dcm" }, - { new MessageHeader(), "out2.dcm" }, - }, - RejectionReasons = new Dictionary - { - {"rejected - blah", 1 }, - }, - KeyValue = "study-1", - IsIdentifiableExtraction = true, - }; - var testExtractFileStatusMessage1 = new ExtractedFileStatusMessage + [Test] + public void Integration_IdentifiableExtraction_HappyPath() + { + using var pf = new PathFixtures(nameof(Integration_IdentifiableExtraction_HappyPath)); + + var jobId = Guid.NewGuid(); + var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage + { + Modality = "MR", + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + KeyTag = "StudyInstanceUID", + KeyValueCount = 1, + UserName = "testUser", + IsIdentifiableExtraction = true, + }; + var testExtractFileCollectionInfoMessage = new ExtractFileCollectionInfoMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "MR", + ExtractFileMessagesDispatched = new JsonCompatibleDictionary { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - OutputFilePath = "src.dcm", - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "MR", - Status = ExtractedFileStatus.Copied, - StatusMessage = null, - DicomFilePath = "study-1-orig-1.dcm", - IsIdentifiableExtraction = true, - }; - var testExtractFileStatusMessage2 = new ExtractedFileStatusMessage + { new MessageHeader(), "out1.dcm" }, + { new MessageHeader(), "out2.dcm" }, + }, + RejectionReasons = new Dictionary { - JobSubmittedAt = _dateTimeProvider.UtcNow(), - OutputFilePath = null, - ProjectNumber = "testProj1", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = pf.ProjExtractDirRelative, - Modality = "MR", - Status = ExtractedFileStatus.FileMissing, - StatusMessage = "Couldn't find src_missing.dcm", - DicomFilePath = "src_missing.dcm", - IsIdentifiableExtraction = true, - }; - - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Integration_IdentifiableExtraction_HappyPath)); - - VerifyReports( - globals, - pf, - [ - new Tuple(globals.CohortPackagerOptions!.ExtractRequestInfoOptions!, testExtractionRequestInfoMessage), - new Tuple(globals.CohortPackagerOptions.FileCollectionInfoOptions!,testExtractFileCollectionInfoMessage), - new Tuple(globals.CohortPackagerOptions.NoVerifyStatusOptions!,testExtractFileStatusMessage1), - new Tuple(globals.CohortPackagerOptions.NoVerifyStatusOptions!, testExtractFileStatusMessage2), - ], - isIdentifiableExtraction: true - ); - } - - [Test] - public void Constructor_JobStoreDateProvider_ThrowsException() + {"rejected - blah", 1 }, + }, + KeyValue = "study-1", + IsIdentifiableExtraction = true, + }; + var testExtractFileStatusMessage1 = new ExtractedFileStatusMessage { - // Arrange + JobSubmittedAt = _dateTimeProvider.UtcNow(), + OutputFilePath = "src.dcm", + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "MR", + Status = ExtractedFileStatus.Copied, + StatusMessage = null, + DicomFilePath = "study-1-orig-1.dcm", + IsIdentifiableExtraction = true, + }; + var testExtractFileStatusMessage2 = new ExtractedFileStatusMessage + { + JobSubmittedAt = _dateTimeProvider.UtcNow(), + OutputFilePath = null, + ProjectNumber = "testProj1", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = pf.ProjExtractDirRelative, + Modality = "MR", + Status = ExtractedFileStatus.FileMissing, + StatusMessage = "Couldn't find src_missing.dcm", + DicomFilePath = "src_missing.dcm", + IsIdentifiableExtraction = true, + }; + + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Integration_IdentifiableExtraction_HappyPath)); + + VerifyReports( + globals, + pf, + [ + new Tuple(globals.CohortPackagerOptions!.ExtractRequestInfoOptions!, testExtractionRequestInfoMessage), + new Tuple(globals.CohortPackagerOptions.FileCollectionInfoOptions!,testExtractFileCollectionInfoMessage), + new Tuple(globals.CohortPackagerOptions.NoVerifyStatusOptions!,testExtractFileStatusMessage1), + new Tuple(globals.CohortPackagerOptions.NoVerifyStatusOptions!, testExtractFileStatusMessage2), + ], + isIdentifiableExtraction: true + ); + } - var globals = new GlobalOptionsFactory().Load(nameof(Constructor_JobStoreDateProvider_ThrowsException)); + [Test] + public void Constructor_JobStoreDateProvider_ThrowsException() + { + // Arrange - // Act + var globals = new GlobalOptionsFactory().Load(nameof(Constructor_JobStoreDateProvider_ThrowsException)); - CohortPackagerHost constructor() => new(globals, new Mock().Object, null, null, null, new Mock().Object, new DateTimeProvider()); + // Act - // Assert + CohortPackagerHost constructor() => new(globals, new Mock().Object, null, null, null, new Mock().Object, new DateTimeProvider()); - var exc = Assert.Throws(() => constructor()); - Assert.That(exc!.Message, Is.EqualTo("jobStore and dateTimeProvider are mutually exclusive arguments")); - } + // Assert - [Test] - public void Constructor_InvalidExtractRoot_ThrowsException() - { - // Arrange + var exc = Assert.Throws(() => constructor()); + Assert.That(exc!.Message, Is.EqualTo("jobStore and dateTimeProvider are mutually exclusive arguments")); + } - var globals = new GlobalOptionsFactory().Load(nameof(Constructor_InvalidExtractRoot_ThrowsException)); - globals.FileSystemOptions!.ExtractRoot = " "; + [Test] + public void Constructor_InvalidExtractRoot_ThrowsException() + { + // Arrange - // Act + var globals = new GlobalOptionsFactory().Load(nameof(Constructor_InvalidExtractRoot_ThrowsException)); + globals.FileSystemOptions!.ExtractRoot = " "; - CohortPackagerHost constructor() => new(globals, new Mock().Object, null, null, null, new Mock().Object, null); + // Act - // Assert + CohortPackagerHost constructor() => new(globals, new Mock().Object, null, null, null, new Mock().Object, null); - var exc = Assert.Throws(() => constructor()); - Assert.That(exc!.Message, Is.EqualTo("Specified argument was out of the range of valid values. (Parameter 'globals')")); - } + // Assert - #endregion + var exc = Assert.Throws(() => constructor()); + Assert.That(exc!.Message, Is.EqualTo("Specified argument was out of the range of valid values. (Parameter 'globals')")); } + + #endregion } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomAnonymiser/DicomAnonymiserHostTests.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomAnonymiser/DicomAnonymiserHostTests.cs index 8d9798c21..d7e22570e 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomAnonymiser/DicomAnonymiserHostTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomAnonymiser/DicomAnonymiserHostTests.cs @@ -14,185 +14,184 @@ using System.Linq; using System.Threading; -namespace SmiServices.IntegrationTests.Microservices.DicomAnonymiser +namespace SmiServices.IntegrationTests.Microservices.DicomAnonymiser; + +[RequiresRabbit] +public class DicomAnonymiserHostTests { - [RequiresRabbit] - public class DicomAnonymiserHostTests - { - #region Fixture Methods + #region Fixture Methods - // Private fields (_tempTestDir, _dicomRoot, _fakeDicom) that - // are used in the setup and teardown of each test. - private DirectoryInfo _tempTestDir = null!; - private DirectoryInfo _dicomRoot = null!; - private string _fakeDicom = null!; + // Private fields (_tempTestDir, _dicomRoot, _fakeDicom) that + // are used in the setup and teardown of each test. + private DirectoryInfo _tempTestDir = null!; + private DirectoryInfo _dicomRoot = null!; + private string _fakeDicom = null!; - // [OneTimeSetUp] and [OneTimeTearDown] methods are run once - // before and after all the tests in the class, respectively. - // In this case, the setup method is used to set up a logger. - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + // [OneTimeSetUp] and [OneTimeTearDown] methods are run once + // before and after all the tests in the class, respectively. + // In this case, the setup method is used to set up a logger. + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - // [SetUp] and [TearDown] methods are run before and after each - // test, respectively. In this case, the setup method creates a - // temporary directory and a fake DICOM file, and the teardown - // method deletes the temporary directory. - [SetUp] - public void SetUp() - { - // TODO(rkm 2022-12-08) check this is set properly for each test - var tempTestDirPath = TestFileSystemHelpers.GetTemporaryTestDirectory(); - _tempTestDir = Directory.CreateDirectory(tempTestDirPath); - _dicomRoot = Directory.CreateDirectory(Path.Combine(_tempTestDir.FullName, "dicom")); - _fakeDicom = Path.Combine(_dicomRoot.FullName, "foo.dcm"); - } + // [SetUp] and [TearDown] methods are run before and after each + // test, respectively. In this case, the setup method creates a + // temporary directory and a fake DICOM file, and the teardown + // method deletes the temporary directory. + [SetUp] + public void SetUp() + { + // TODO(rkm 2022-12-08) check this is set properly for each test + var tempTestDirPath = TestFileSystemHelpers.GetTemporaryTestDirectory(); + _tempTestDir = Directory.CreateDirectory(tempTestDirPath); + _dicomRoot = Directory.CreateDirectory(Path.Combine(_tempTestDir.FullName, "dicom")); + _fakeDicom = Path.Combine(_dicomRoot.FullName, "foo.dcm"); + } - [TearDown] - public void TearDown() - { - File.SetAttributes(_fakeDicom, FileAttributes.Normal); - _tempTestDir.Delete(recursive: true); - } + [TearDown] + public void TearDown() + { + File.SetAttributes(_fakeDicom, FileAttributes.Normal); + _tempTestDir.Delete(recursive: true); + } - #endregion + #endregion - #region Tests + #region Tests - // The Integration_HappyPath_MockAnonymiser method is a test case - // structured in the Arrange-Act-Assert pattern. It tests for the - // scenario where the DICOM Anonymiser successfully anonymises a - // DICOM file. - [Test] - public void Integration_HappyPath_MockAnonymiser() + // The Integration_HappyPath_MockAnonymiser method is a test case + // structured in the Arrange-Act-Assert pattern. It tests for the + // scenario where the DICOM Anonymiser successfully anonymises a + // DICOM file. + [Test] + public void Integration_HappyPath_MockAnonymiser() + { + // Arrange + // It sets up the necessary objects and state for the test. This + // includes creating a mock DICOM Anonymiser, setting file paths, + // and creating a DicomAnonymiserHost. + + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Integration_HappyPath_MockAnonymiser)); + globals.FileSystemOptions!.FileSystemRoot = _dicomRoot.FullName; + + var extractRoot = Directory.CreateDirectory(Path.Combine(_tempTestDir.FullName, "extractRoot")); + globals.FileSystemOptions.ExtractRoot = extractRoot.FullName; + + // NOTE: The commented out code below is an alternative way to create + // a fake DICOM file, however, it is not used in this test. + // File.Create(_fakeDicom).Dispose(); + // File.SetAttributes(_fakeDicom, File.GetAttributes(_fakeDicom) | FileAttributes.ReadOnly); + + var dicomFile = new DicomFile(); + dicomFile.Dataset.Add(DicomTag.PatientID, "12345678"); + dicomFile.Dataset.Add(DicomTag.Modality, "CT"); + dicomFile.Dataset.Add(DicomTag.StudyInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); + dicomFile.Dataset.Add(DicomTag.SeriesInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); + dicomFile.Dataset.Add(DicomTag.SOPInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); + dicomFile.FileMetaInfo.MediaStorageSOPClassUID = DicomUID.SecondaryCaptureImageStorage; + dicomFile.FileMetaInfo.MediaStorageSOPInstanceUID = DicomUIDGenerator.GenerateDerivedFromUUID(); + dicomFile.FileMetaInfo.ImplementationClassUID = DicomUIDGenerator.GenerateDerivedFromUUID(); + dicomFile.FileMetaInfo.TransferSyntax = DicomTransferSyntax.ExplicitVRLittleEndian; + dicomFile.Save(_fakeDicom); + + File.SetAttributes(_fakeDicom, File.GetAttributes(_fakeDicom) | FileAttributes.ReadOnly); + + var extractDirAbs = Directory.CreateDirectory(Path.Combine(extractRoot.FullName, "extractDir")); + var expectedAnonPathAbs = Path.Combine(extractDirAbs.FullName, "foo-an.dcm"); + + var testExtractFileMessage = new ExtractFileMessage { - // Arrange - // It sets up the necessary objects and state for the test. This - // includes creating a mock DICOM Anonymiser, setting file paths, - // and creating a DicomAnonymiserHost. - - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Integration_HappyPath_MockAnonymiser)); - globals.FileSystemOptions!.FileSystemRoot = _dicomRoot.FullName; - - var extractRoot = Directory.CreateDirectory(Path.Combine(_tempTestDir.FullName, "extractRoot")); - globals.FileSystemOptions.ExtractRoot = extractRoot.FullName; - - // NOTE: The commented out code below is an alternative way to create - // a fake DICOM file, however, it is not used in this test. - // File.Create(_fakeDicom).Dispose(); - // File.SetAttributes(_fakeDicom, File.GetAttributes(_fakeDicom) | FileAttributes.ReadOnly); - - var dicomFile = new DicomFile(); - dicomFile.Dataset.Add(DicomTag.PatientID, "12345678"); - dicomFile.Dataset.Add(DicomTag.Modality, "CT"); - dicomFile.Dataset.Add(DicomTag.StudyInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); - dicomFile.Dataset.Add(DicomTag.SeriesInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); - dicomFile.Dataset.Add(DicomTag.SOPInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); - dicomFile.FileMetaInfo.MediaStorageSOPClassUID = DicomUID.SecondaryCaptureImageStorage; - dicomFile.FileMetaInfo.MediaStorageSOPInstanceUID = DicomUIDGenerator.GenerateDerivedFromUUID(); - dicomFile.FileMetaInfo.ImplementationClassUID = DicomUIDGenerator.GenerateDerivedFromUUID(); - dicomFile.FileMetaInfo.TransferSyntax = DicomTransferSyntax.ExplicitVRLittleEndian; - dicomFile.Save(_fakeDicom); - - File.SetAttributes(_fakeDicom, File.GetAttributes(_fakeDicom) | FileAttributes.ReadOnly); - - var extractDirAbs = Directory.CreateDirectory(Path.Combine(extractRoot.FullName, "extractDir")); - var expectedAnonPathAbs = Path.Combine(extractDirAbs.FullName, "foo-an.dcm"); - - var testExtractFileMessage = new ExtractFileMessage - { - ExtractionJobIdentifier = Guid.NewGuid(), - ProjectNumber = "1234", - ExtractionDirectory = extractDirAbs.Name, - Modality = "CT", - JobSubmittedAt = DateTime.UtcNow, - IsIdentifiableExtraction = false, - IsNoFilterExtraction = false, - - DicomFilePath = "foo.dcm", - OutputPath = "foo-an.dcm", - }; - - // The test uses the Moq library to create a mock implementation of - // the IDicomAnonymiser interface. This allows the test to control the - // behavior of the DICOM Anonymiser and verify that it is called with - // the correct arguments. - var mockAnonymiser = new Mock(MockBehavior.Strict); - mockAnonymiser - .Setup( - x => x.Anonymise( - It.Is(x => x.ExtractionJobIdentifier == testExtractFileMessage.ExtractionJobIdentifier), - It.Is(x => x.FullName == _fakeDicom), - It.Is(x => x.FullName == Path.Combine(extractDirAbs.FullName, "foo-an.dcm")), - out It.Ref.IsAny - ) + ExtractionJobIdentifier = Guid.NewGuid(), + ProjectNumber = "1234", + ExtractionDirectory = extractDirAbs.Name, + Modality = "CT", + JobSubmittedAt = DateTime.UtcNow, + IsIdentifiableExtraction = false, + IsNoFilterExtraction = false, + + DicomFilePath = "foo.dcm", + OutputPath = "foo-an.dcm", + }; + + // The test uses the Moq library to create a mock implementation of + // the IDicomAnonymiser interface. This allows the test to control the + // behavior of the DICOM Anonymiser and verify that it is called with + // the correct arguments. + var mockAnonymiser = new Mock(MockBehavior.Strict); + mockAnonymiser + .Setup( + x => x.Anonymise( + It.Is(x => x.ExtractionJobIdentifier == testExtractFileMessage.ExtractionJobIdentifier), + It.Is(x => x.FullName == _fakeDicom), + It.Is(x => x.FullName == Path.Combine(extractDirAbs.FullName, "foo-an.dcm")), + out It.Ref.IsAny ) - .Callback(() => File.Create(expectedAnonPathAbs).Dispose()) - .Returns(ExtractedFileStatus.Anonymised); + ) + .Callback(() => File.Create(expectedAnonPathAbs).Dispose()) + .Returns(ExtractedFileStatus.Anonymised); - var statusExchange = globals.DicomAnonymiserOptions!.ExtractFileStatusProducerOptions!.ExchangeName!; - var successQueue = globals.IsIdentifiableServiceOptions!.QueueName!; - var failureQueue = globals.CohortPackagerOptions!.NoVerifyStatusOptions!.QueueName!; + var statusExchange = globals.DicomAnonymiserOptions!.ExtractFileStatusProducerOptions!.ExchangeName!; + var successQueue = globals.IsIdentifiableServiceOptions!.QueueName!; + var failureQueue = globals.CohortPackagerOptions!.NoVerifyStatusOptions!.QueueName!; - List statusMessages = []; + List statusMessages = []; - using ( - var tester = new MicroserviceTester( - globals.RabbitOptions!, - globals.DicomAnonymiserOptions.AnonFileConsumerOptions! - ) + using ( + var tester = new MicroserviceTester( + globals.RabbitOptions!, + globals.DicomAnonymiserOptions.AnonFileConsumerOptions! ) - { - tester.CreateExchange(statusExchange, successQueue, isSecondaryBinding: false, routingKey: "verify"); - tester.CreateExchange(statusExchange, failureQueue, isSecondaryBinding: true, routingKey: "noverify"); - - tester.SendMessage(globals.DicomAnonymiserOptions.AnonFileConsumerOptions!, new MessageHeader(), testExtractFileMessage); + ) + { + tester.CreateExchange(statusExchange, successQueue, isSecondaryBinding: false, routingKey: "verify"); + tester.CreateExchange(statusExchange, failureQueue, isSecondaryBinding: true, routingKey: "noverify"); - var host = new DicomAnonymiserHost(globals, mockAnonymiser.Object); + tester.SendMessage(globals.DicomAnonymiserOptions.AnonFileConsumerOptions!, new MessageHeader(), testExtractFileMessage); - // Act - // It starts the DicomAnonymiserHost and waits for it to process - //a message. + var host = new DicomAnonymiserHost(globals, mockAnonymiser.Object); - host.Start(); + // Act + // It starts the DicomAnonymiserHost and waits for it to process + //a message. - var timeoutSecs = 10; + host.Start(); - while (statusMessages.Count == 0 && timeoutSecs > 0) - { - statusMessages.AddRange(tester.ConsumeMessages(successQueue).Select(x => x.Item2)); - statusMessages.AddRange(tester.ConsumeMessages(failureQueue).Select(x => x.Item2)); + var timeoutSecs = 10; - --timeoutSecs; - if (statusMessages.Count == 0) - Thread.Sleep(TimeSpan.FromSeconds(1)); - } + while (statusMessages.Count == 0 && timeoutSecs > 0) + { + statusMessages.AddRange(tester.ConsumeMessages(successQueue).Select(x => x.Item2)); + statusMessages.AddRange(tester.ConsumeMessages(failureQueue).Select(x => x.Item2)); - host.Stop("Test end"); + --timeoutSecs; + if (statusMessages.Count == 0) + Thread.Sleep(TimeSpan.FromSeconds(1)); } - // Assert - // It checks that the expected outcome has occurred. In this case, it - // checks that the status message indicates that the file was anonymised - // and that the anonymised file exists. - - var statusMessage = statusMessages.Single(); - Assert.Multiple(() => - { - Assert.That(statusMessage.Status, Is.EqualTo(ExtractedFileStatus.Anonymised), statusMessage.StatusMessage); - Assert.That(File.Exists(expectedAnonPathAbs), Is.True); - }); + host.Stop("Test end"); } - #endregion + // Assert + // It checks that the expected outcome has occurred. In this case, it + // checks that the status message indicates that the file was anonymised + // and that the anonymised file exists. + + var statusMessage = statusMessages.Single(); + Assert.Multiple(() => + { + Assert.That(statusMessage.Status, Is.EqualTo(ExtractedFileStatus.Anonymised), statusMessage.StatusMessage); + Assert.That(File.Exists(expectedAnonPathAbs), Is.True); + }); } + + #endregion } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DLEBenchmarkingTests/HowFastIsDLETest.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DLEBenchmarkingTests/HowFastIsDLETest.cs index 4e0b756b8..efdf4334e 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DLEBenchmarkingTests/HowFastIsDLETest.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DLEBenchmarkingTests/HowFastIsDLETest.cs @@ -35,237 +35,235 @@ using Tests.Common; using DatabaseType = FAnsi.DatabaseType; -namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper.DLEBenchmarkingTests -{ - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - public class HowFastIsDLETest : DatabaseTests - { - private GlobalOptions _globals = null!; - private DicomRelationalMapperTestHelper _helper = null!; - private IDataLoadInfo _dli = null!; +namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper.DLEBenchmarkingTests; - readonly string _templateXml = File.ReadAllText(Path.Combine(TestContext.CurrentContext.TestDirectory, @"CT.it")); +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +public class HowFastIsDLETest : DatabaseTests +{ + private GlobalOptions _globals = null!; + private DicomRelationalMapperTestHelper _helper = null!; + private IDataLoadInfo _dli = null!; - [OneTimeSetUp] - public void SetupLogging() - { - var lm = CatalogueRepository.GetDefaultLogManager(); - lm.CreateNewLoggingTaskIfNotExists("aaa"); - _dli = lm.CreateDataLoadInfo("aaa", "HowFastIsDLETest", "Test", "", true); - } + readonly string _templateXml = File.ReadAllText(Path.Combine(TestContext.CurrentContext.TestDirectory, @"CT.it")); - [Test] - public void Test_NullRoot() - { - var _ = new DicomDatasetCollectionSource - { - ArchiveRoot = null - }; - } + [OneTimeSetUp] + public void SetupLogging() + { + var lm = CatalogueRepository.GetDefaultLogManager(); + lm.CreateNewLoggingTaskIfNotExists("aaa"); + _dli = lm.CreateDataLoadInfo("aaa", "HowFastIsDLETest", "Test", "", true); + } - [TestCase(DatabaseType.MySql, 600), RequiresRabbit] - [TestCase(DatabaseType.MicrosoftSQLServer, 600), RequiresRabbit] - public void TestLargeImageDatasets(DatabaseType databaseType, int numberOfImages) + [Test] + public void Test_NullRoot() + { + var _ = new DicomDatasetCollectionSource { - foreach (Pipeline p in CatalogueRepository.GetAllObjects()) - p.DeleteInDatabase(); + ArchiveRoot = null + }; + } - var db = GetCleanedServer(databaseType); + [TestCase(DatabaseType.MySql, 600), RequiresRabbit] + [TestCase(DatabaseType.MicrosoftSQLServer, 600), RequiresRabbit] + public void TestLargeImageDatasets(DatabaseType databaseType, int numberOfImages) + { + foreach (Pipeline p in CatalogueRepository.GetAllObjects()) + p.DeleteInDatabase(); - if (CatalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer) is not null) - CatalogueRepository.ClearDefault(PermissableDefaults.RAWDataLoadServer); + var db = GetCleanedServer(databaseType); - var template = ImageTableTemplateCollection.LoadFrom(_templateXml); + if (CatalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer) is not null) + CatalogueRepository.ClearDefault(PermissableDefaults.RAWDataLoadServer); - _globals = new GlobalOptionsFactory().Load(nameof(TestLargeImageDatasets)); + var template = ImageTableTemplateCollection.LoadFrom(_templateXml); - _globals.DicomRelationalMapperOptions!.DatabaseNamerType = typeof(MyFixedStagingDatabaseNamer).FullName; - _globals.DicomRelationalMapperOptions.QoSPrefetchCount = ushort.MaxValue; - _globals.DicomRelationalMapperOptions.MinimumBatchSize = numberOfImages; - _globals.DicomRelationalMapperOptions.UseInsertIntoForRAWMigration = true; + _globals = new GlobalOptionsFactory().Load(nameof(TestLargeImageDatasets)); - _helper = new DicomRelationalMapperTestHelper(); - _helper.SetupSuite(db, RepositoryLocator, _globals, typeof(DicomDatasetCollectionSource), root: null, template: template, persistentRaw: true); + _globals.DicomRelationalMapperOptions!.DatabaseNamerType = typeof(MyFixedStagingDatabaseNamer).FullName; + _globals.DicomRelationalMapperOptions.QoSPrefetchCount = ushort.MaxValue; + _globals.DicomRelationalMapperOptions.MinimumBatchSize = numberOfImages; + _globals.DicomRelationalMapperOptions.UseInsertIntoForRAWMigration = true; - //do not use an explicit RAW data load server - if (CatalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer) is not null) - CatalogueRepository.ClearDefault(PermissableDefaults.RAWDataLoadServer); + _helper = new DicomRelationalMapperTestHelper(); + _helper.SetupSuite(db, RepositoryLocator, _globals, typeof(DicomDatasetCollectionSource), root: null, template: template, persistentRaw: true); - Random r = new(123); + //do not use an explicit RAW data load server + if (CatalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer) is not null) + CatalogueRepository.ClearDefault(PermissableDefaults.RAWDataLoadServer); - List allImages; + Random r = new(123); - using (var generator = new DicomDataGenerator(r, TestContext.CurrentContext.TestDirectory, "CT") { NoPixels = true }) - allImages = generator.GenerateImages(numberOfImages, r); + List allImages; - Assert.That(allImages, Has.Count.EqualTo(numberOfImages)); + using (var generator = new DicomDataGenerator(r, TestContext.CurrentContext.TestDirectory, "CT") { NoPixels = true }) + allImages = generator.GenerateImages(numberOfImages, r); - using (var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions)) - { - using var host = new DicomRelationalMapperHost(_globals); - tester.SendMessages(_globals.DicomRelationalMapperOptions, allImages.Select(GetFileMessageForDataset), true); + Assert.That(allImages, Has.Count.EqualTo(numberOfImages)); - Console.WriteLine("Starting Host"); - host.Start(); + using (var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions)) + { + using var host = new DicomRelationalMapperHost(_globals); + tester.SendMessages(_globals.DicomRelationalMapperOptions, allImages.Select(GetFileMessageForDataset), true); - Stopwatch sw = Stopwatch.StartNew(); - TestTimelineAwaiter.Await(() => host.Consumer!.AckCount == numberOfImages, null, 20 * 60 * 100); //1 minute + Console.WriteLine("Starting Host"); + host.Start(); - Console.Write($"Time For DLE:{sw.Elapsed.TotalSeconds}s"); - host.Stop("Test finished"); - } + Stopwatch sw = Stopwatch.StartNew(); + TestTimelineAwaiter.Await(() => host.Consumer!.AckCount == numberOfImages, null, 20 * 60 * 100); //1 minute - foreach (Pipeline allObject in CatalogueRepository.GetAllObjects()) - allObject.DeleteInDatabase(); + Console.Write($"Time For DLE:{sw.Elapsed.TotalSeconds}s"); + host.Stop("Test finished"); } + foreach (Pipeline allObject in CatalogueRepository.GetAllObjects()) + allObject.DeleteInDatabase(); + } - [TestCase(500)] - public void TestGetChunkOnly(int numberOfImages) - { - Random r = new(123); - List allImages; + [TestCase(500)] + public void TestGetChunkOnly(int numberOfImages) + { + Random r = new(123); - using (DicomDataGenerator g = new(r, null, "CT", "MR")) - allImages = g.GenerateImages(numberOfImages, r); + List allImages; - DicomDatasetCollectionSource source = new(); - source.PreInitialize( - new ExplicitListDicomDatasetWorklist([.. allImages], "amagad.dcm", - new Dictionary { { "MessageGuid", "0x123" } }), - ThrowImmediatelyDataLoadEventListener.Quiet); - source.FilenameField = "gggg"; + using (DicomDataGenerator g = new(r, null, "CT", "MR")) + allImages = g.GenerateImages(numberOfImages, r); - Stopwatch sw = new(); - sw.Start(); + DicomDatasetCollectionSource source = new(); + source.PreInitialize( + new ExplicitListDicomDatasetWorklist([.. allImages], "amagad.dcm", + new Dictionary { { "MessageGuid", "0x123" } }), + ThrowImmediatelyDataLoadEventListener.Quiet); + source.FilenameField = "gggg"; - var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - source.Dispose(ThrowImmediatelyDataLoadEventListener.Noisy, null); + Stopwatch sw = new(); + sw.Start(); - sw.Stop(); - Console.WriteLine($"GetChunk took {sw.ElapsedMilliseconds}"); + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + source.Dispose(ThrowImmediatelyDataLoadEventListener.Noisy, null); - Assert.That(dt.Rows, Has.Count.EqualTo(numberOfImages)); - Assert.That(dt.Rows.Cast().Select(static w => w["SOPInstanceUID"]).Distinct().Count(), Is.EqualTo(numberOfImages)); - } + sw.Stop(); + Console.WriteLine($"GetChunk took {sw.ElapsedMilliseconds}"); - [TestCase(DatabaseType.MySql, 500)] - [TestCase(DatabaseType.MicrosoftSQLServer, 500)] - public void TestBulkInsertOnly(DatabaseType databaseType, int numberOfImages) - { - foreach (Pipeline p in CatalogueRepository.GetAllObjects()) - p.DeleteInDatabase(); + Assert.That(dt.Rows, Has.Count.EqualTo(numberOfImages)); + Assert.That(dt.Rows.Cast().Select(static w => w["SOPInstanceUID"]).Distinct().Count(), Is.EqualTo(numberOfImages)); + } + + [TestCase(DatabaseType.MySql, 500)] + [TestCase(DatabaseType.MicrosoftSQLServer, 500)] + public void TestBulkInsertOnly(DatabaseType databaseType, int numberOfImages) + { + foreach (Pipeline p in CatalogueRepository.GetAllObjects()) + p.DeleteInDatabase(); - var db = GetCleanedServer(databaseType); + var db = GetCleanedServer(databaseType); - var template = ImageTableTemplateCollection.LoadFrom(_templateXml); + var template = ImageTableTemplateCollection.LoadFrom(_templateXml); - _globals = new GlobalOptionsFactory().Load(nameof(TestBulkInsertOnly)); + _globals = new GlobalOptionsFactory().Load(nameof(TestBulkInsertOnly)); - _globals.DicomRelationalMapperOptions!.DatabaseNamerType = typeof(MyFixedStagingDatabaseNamer).FullName; - _globals.DicomRelationalMapperOptions.QoSPrefetchCount = ushort.MaxValue; - _globals.DicomRelationalMapperOptions.MinimumBatchSize = numberOfImages; - _globals.DicomRelationalMapperOptions.UseInsertIntoForRAWMigration = true; + _globals.DicomRelationalMapperOptions!.DatabaseNamerType = typeof(MyFixedStagingDatabaseNamer).FullName; + _globals.DicomRelationalMapperOptions.QoSPrefetchCount = ushort.MaxValue; + _globals.DicomRelationalMapperOptions.MinimumBatchSize = numberOfImages; + _globals.DicomRelationalMapperOptions.UseInsertIntoForRAWMigration = true; - _helper = new DicomRelationalMapperTestHelper(); - _helper.SetupSuite(db, RepositoryLocator, _globals, typeof(DicomDatasetCollectionSource), root: null, template: template, persistentRaw: true); + _helper = new DicomRelationalMapperTestHelper(); + _helper.SetupSuite(db, RepositoryLocator, _globals, typeof(DicomDatasetCollectionSource), root: null, template: template, persistentRaw: true); - //do not use an explicit RAW data load server - if (CatalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer) is not null) - CatalogueRepository.ClearDefault(PermissableDefaults.RAWDataLoadServer); + //do not use an explicit RAW data load server + if (CatalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer) is not null) + CatalogueRepository.ClearDefault(PermissableDefaults.RAWDataLoadServer); - Random r = new(123); + Random r = new(123); - List allImages; + List allImages; - using (var generator = new DicomDataGenerator(r, null, "CT")) - allImages = generator.GenerateImages(numberOfImages, r); + using (var generator = new DicomDataGenerator(r, null, "CT")) + allImages = generator.GenerateImages(numberOfImages, r); - DicomDatasetCollectionSource source = new(); - source.PreInitialize( - new ExplicitListDicomDatasetWorklist([.. allImages], "amagad.dcm", - new Dictionary { { "MessageGuid", "0x123" } }), - ThrowImmediatelyDataLoadEventListener.Quiet); - source.FilenameField = "gggg"; + DicomDatasetCollectionSource source = new(); + source.PreInitialize( + new ExplicitListDicomDatasetWorklist([.. allImages], "amagad.dcm", + new Dictionary { { "MessageGuid", "0x123" } }), + ThrowImmediatelyDataLoadEventListener.Quiet); + source.FilenameField = "gggg"; - var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - source.Dispose(ThrowImmediatelyDataLoadEventListener.Noisy, null); + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + source.Dispose(ThrowImmediatelyDataLoadEventListener.Noisy, null); - Assert.Multiple(() => - { - Assert.That(allImages, Has.Count.EqualTo(numberOfImages)); - Assert.That(dt.Rows, Has.Count.EqualTo(numberOfImages)); - }); + Assert.Multiple(() => + { + Assert.That(allImages, Has.Count.EqualTo(numberOfImages)); + Assert.That(dt.Rows, Has.Count.EqualTo(numberOfImages)); + }); - var tables = _helper.LoadMetadata!.GetDistinctTableInfoList(false); + var tables = _helper.LoadMetadata!.GetDistinctTableInfoList(false); - var config = new HICDatabaseConfiguration(_helper.LoadMetadata, new SuffixBasedNamer()); + var config = new HICDatabaseConfiguration(_helper.LoadMetadata, new SuffixBasedNamer()); - var job = Mock.Of( - j => j.RegularTablesToLoad == tables.Cast().ToList() && - j.DataLoadInfo == _dli && - j.Configuration == config); + var job = Mock.Of( + j => j.RegularTablesToLoad == tables.Cast().ToList() && + j.DataLoadInfo == _dli && + j.Configuration == config); - var attacher = new AutoRoutingAttacher - { - Job = job - }; + var attacher = new AutoRoutingAttacher + { + Job = job + }; - //Drop Primary Keys - using (var con = db.Server.GetConnection()) - { - con.Open(); + //Drop Primary Keys + using (var con = db.Server.GetConnection()) + { + con.Open(); - var cmd = db.Server.GetCommand( - databaseType == DatabaseType.MicrosoftSQLServer ? - @"ALTER TABLE ImageTable DROP CONSTRAINT PK_ImageTable + var cmd = db.Server.GetCommand( + databaseType == DatabaseType.MicrosoftSQLServer ? + @"ALTER TABLE ImageTable DROP CONSTRAINT PK_ImageTable ALTER TABLE SeriesTable DROP CONSTRAINT PK_SeriesTable ALTER TABLE StudyTable DROP CONSTRAINT PK_StudyTable" : - @"ALTER TABLE ImageTable DROP PRIMARY KEY; + @"ALTER TABLE ImageTable DROP PRIMARY KEY; ALTER TABLE SeriesTable DROP PRIMARY KEY; ALTER TABLE StudyTable DROP PRIMARY KEY;" - , con); - - cmd.ExecuteNonQuery(); - } - - attacher.Initialize(LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "IgnoreMe", true), db); - try - { - attacher.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - attacher.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); - } - catch (Exception e) - { - attacher.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, e); - throw; - } - - foreach (var tableInfo in tables) - Assert.That(tableInfo.Discover(DataAccessContext.InternalDataProcessing).GetRowCount(), Is.EqualTo(numberOfImages), - "Row count was wrong for " + tableInfo); - - foreach (Pipeline allObject in CatalogueRepository.GetAllObjects()) - allObject.DeleteInDatabase(); + , con); + + cmd.ExecuteNonQuery(); } - private DicomFileMessage GetFileMessageForDataset(DicomDataset dicomDataset) + attacher.Initialize(LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "IgnoreMe", true), db); + try { - - var root = TestContext.CurrentContext.TestDirectory; - - var f = Path.GetRandomFileName(); - var msg = new DicomFileMessage(root, Path.Combine(root, $"{f}.dcm")) - { - SeriesInstanceUID = dicomDataset.GetString(DicomTag.SeriesInstanceUID), - StudyInstanceUID = dicomDataset.GetString(DicomTag.StudyInstanceUID), - SOPInstanceUID = dicomDataset.GetString(DicomTag.SOPInstanceUID), - DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(dicomDataset) - }; - return msg; + attacher.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + attacher.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); } + catch (Exception e) + { + attacher.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, e); + throw; + } + + foreach (var tableInfo in tables) + Assert.That(tableInfo.Discover(DataAccessContext.InternalDataProcessing).GetRowCount(), Is.EqualTo(numberOfImages), + "Row count was wrong for " + tableInfo); + + foreach (Pipeline allObject in CatalogueRepository.GetAllObjects()) + allObject.DeleteInDatabase(); } + private DicomFileMessage GetFileMessageForDataset(DicomDataset dicomDataset) + { + + var root = TestContext.CurrentContext.TestDirectory; + + var f = Path.GetRandomFileName(); + var msg = new DicomFileMessage(root, Path.Combine(root, $"{f}.dcm")) + { + SeriesInstanceUID = dicomDataset.GetString(DicomTag.SeriesInstanceUID), + StudyInstanceUID = dicomDataset.GetString(DicomTag.StudyInstanceUID), + SOPInstanceUID = dicomDataset.GetString(DicomTag.SOPInstanceUID), + DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(dicomDataset) + }; + return msg; + } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DicomRelationalMapperHostTests.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DicomRelationalMapperHostTests.cs index 08bdc8ab5..6d528ecfe 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DicomRelationalMapperHostTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DicomRelationalMapperHostTests.cs @@ -10,59 +10,58 @@ using System.Data; using Tests.Common; -namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper +namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper; + +[RequiresRabbit, RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +public class DicomRelationalMapperHostTests : DatabaseTests { - [RequiresRabbit, RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - public class DicomRelationalMapperHostTests : DatabaseTests + [TestCase(DatabaseType.MySql, "GuidDatabaseNamer", typeof(GuidDatabaseNamer))] + [TestCase(DatabaseType.MicrosoftSQLServer, "GuidDatabaseNamer", typeof(GuidDatabaseNamer))] + [TestCase(DatabaseType.MySql, "MyFixedStagingDatabaseNamer", typeof(MyFixedStagingDatabaseNamer))] + [TestCase(DatabaseType.MicrosoftSQLServer, "MyFixedStagingDatabaseNamer", typeof(MyFixedStagingDatabaseNamer))] + public void TestCreatingNamer_CorrectType(DatabaseType dbType, string typeName, Type expectedType) { - [TestCase(DatabaseType.MySql, "GuidDatabaseNamer", typeof(GuidDatabaseNamer))] - [TestCase(DatabaseType.MicrosoftSQLServer, "GuidDatabaseNamer", typeof(GuidDatabaseNamer))] - [TestCase(DatabaseType.MySql, "MyFixedStagingDatabaseNamer", typeof(MyFixedStagingDatabaseNamer))] - [TestCase(DatabaseType.MicrosoftSQLServer, "MyFixedStagingDatabaseNamer", typeof(MyFixedStagingDatabaseNamer))] - public void TestCreatingNamer_CorrectType(DatabaseType dbType, string typeName, Type expectedType) - { - var db = GetCleanedServer(dbType); + var db = GetCleanedServer(dbType); - var dt = new DataTable(); - dt.Columns.Add("Hi"); - dt.Rows.Add("There"); + var dt = new DataTable(); + dt.Columns.Add("Hi"); + dt.Rows.Add("There"); - var tbl = db.CreateTable("DicomRelationalMapperHostTests", dt); + var tbl = db.CreateTable("DicomRelationalMapperHostTests", dt); - var cata = Import(tbl); + var cata = Import(tbl); - var globals = new GlobalOptionsFactory().Load(nameof(TestCreatingNamer_CorrectType)); - var consumerOptions = globals.DicomRelationalMapperOptions; + var globals = new GlobalOptionsFactory().Load(nameof(TestCreatingNamer_CorrectType)); + var consumerOptions = globals.DicomRelationalMapperOptions; - var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); - lmd.LinkToCatalogue(cata); - cata.SaveToDatabase(); + var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); + lmd.LinkToCatalogue(cata); + cata.SaveToDatabase(); - consumerOptions!.LoadMetadataId = lmd.ID; - consumerOptions.DatabaseNamerType = typeName; - consumerOptions.Guid = Guid.Empty; + consumerOptions!.LoadMetadataId = lmd.ID; + consumerOptions.DatabaseNamerType = typeName; + consumerOptions.Guid = Guid.Empty; - if (globals.RDMPOptions is null) - throw new ApplicationException("RDMPOptions null"); + if (globals.RDMPOptions is null) + throw new ApplicationException("RDMPOptions null"); - if (CatalogueRepository is ITableRepository crtr) - globals.RDMPOptions.CatalogueConnectionString = crtr.DiscoveredServer.Builder.ConnectionString; - if (DataExportRepository is ITableRepository dertr) - globals.RDMPOptions.DataExportConnectionString = dertr.DiscoveredServer.Builder.ConnectionString; + if (CatalogueRepository is ITableRepository crtr) + globals.RDMPOptions.CatalogueConnectionString = crtr.DiscoveredServer.Builder.ConnectionString; + if (DataExportRepository is ITableRepository dertr) + globals.RDMPOptions.DataExportConnectionString = dertr.DiscoveredServer.Builder.ConnectionString; - using (new MicroserviceTester(globals.RabbitOptions ?? throw new InvalidOperationException(), globals.DicomRelationalMapperOptions!)) - { - using var host = new DicomRelationalMapperHost(globals); - host.Start(); + using (new MicroserviceTester(globals.RabbitOptions ?? throw new InvalidOperationException(), globals.DicomRelationalMapperOptions!)) + { + using var host = new DicomRelationalMapperHost(globals); + host.Start(); - Assert.Multiple(() => - { - Assert.That(host.Consumer?.DatabaseNamer.GetType(), Is.EqualTo(expectedType)); - Assert.That(host, Is.Not.Null); - }); + Assert.Multiple(() => + { + Assert.That(host.Consumer?.DatabaseNamer.GetType(), Is.EqualTo(expectedType)); + Assert.That(host, Is.Not.Null); + }); - host.Stop("Test finished"); - } + host.Stop("Test finished"); } } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DicomRelationalMapperTests.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DicomRelationalMapperTests.cs index d938a7192..c630e3a9f 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DicomRelationalMapperTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/DicomRelationalMapperTests.cs @@ -19,264 +19,263 @@ using Tests.Common; using DatabaseType = FAnsi.DatabaseType; -namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper +namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper; + +[RequiresRabbit, RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +public class DicomRelationalMapperTests : DatabaseTests { - [RequiresRabbit, RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - public class DicomRelationalMapperTests : DatabaseTests + private DicomRelationalMapperTestHelper _helper = null!; + private GlobalOptions _globals = null!; + + [SetUp] + public void Setup() { - private DicomRelationalMapperTestHelper _helper = null!; - private GlobalOptions _globals = null!; + BlitzMainDataTables(); - [SetUp] - public void Setup() - { - BlitzMainDataTables(); + _globals = new GlobalOptionsFactory().Load(nameof(DicomRelationalMapperTests)); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + _helper = new DicomRelationalMapperTestHelper(); + _helper.SetupSuite(db, RepositoryLocator, _globals, typeof(DicomDatasetCollectionSource)); - _globals = new GlobalOptionsFactory().Load(nameof(DicomRelationalMapperTests)); - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - _helper = new DicomRelationalMapperTestHelper(); - _helper.SetupSuite(db, RepositoryLocator, _globals, typeof(DicomDatasetCollectionSource)); + } - } + [Test] + public void Test_DodgyTagNames() + { + _helper.TruncateTablesIfExists(); + + DirectoryInfo d = new(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_DodgyTagNames))); + d.Create(); - [Test] - public void Test_DodgyTagNames() + var td = new TestData(); + var fi = td.Create(new FileInfo(Path.Combine(d.FullName, "MyTestFile.dcm"))); + var fi2 = td.Create(new FileInfo(Path.Combine(d.FullName, "MyTestFile2.dcm"))); + + DicomFile dcm; + + using (var stream = File.OpenRead(fi.FullName)) { - _helper.TruncateTablesIfExists(); + dcm = DicomFile.Open(stream); + // JS 2022-04-27 fo-dicom 4 version of this test used .Print, which is a group 0 tag disallowed in metadata in fo-dicom 5 + dcm.Dataset.AddOrUpdate(DicomTag.PrintPriority, "FISH"); + dcm.Dataset.AddOrUpdate(DicomTag.Date, new DateTime(2001, 01, 01)); + dcm.Save(fi2.FullName); + } - DirectoryInfo d = new(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_DodgyTagNames))); - d.Create(); + var adder = new TagColumnAdder(DicomTypeTranslaterReader.GetColumnNameForTag(DicomTag.Date, false), "datetime2", _helper.ImageTableInfo, new AcceptAllCheckNotifier()); + adder.Execute(); - var td = new TestData(); - var fi = td.Create(new FileInfo(Path.Combine(d.FullName, "MyTestFile.dcm"))); - var fi2 = td.Create(new FileInfo(Path.Combine(d.FullName, "MyTestFile2.dcm"))); + adder = new TagColumnAdder(DicomTypeTranslaterReader.GetColumnNameForTag(DicomTag.PrintPriority, false), "varchar(max)", _helper.ImageTableInfo, new AcceptAllCheckNotifier()); + adder.Execute(); - DicomFile dcm; + fi.Delete(); + File.Move(fi2.FullName, fi.FullName); - using (var stream = File.OpenRead(fi.FullName)) - { - dcm = DicomFile.Open(stream); - // JS 2022-04-27 fo-dicom 4 version of this test used .Print, which is a group 0 tag disallowed in metadata in fo-dicom 5 - dcm.Dataset.AddOrUpdate(DicomTag.PrintPriority, "FISH"); - dcm.Dataset.AddOrUpdate(DicomTag.Date, new DateTime(2001, 01, 01)); - dcm.Save(fi2.FullName); - } + //creates the queues, exchanges and bindings + var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions!); + tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, null); - var adder = new TagColumnAdder(DicomTypeTranslaterReader.GetColumnNameForTag(DicomTag.Date, false), "datetime2", _helper.ImageTableInfo, new AcceptAllCheckNotifier()); - adder.Execute(); + using (var host = new DicomRelationalMapperHost(_globals)) + { + host.Start(); - adder = new TagColumnAdder(DicomTypeTranslaterReader.GetColumnNameForTag(DicomTag.PrintPriority, false), "varchar(max)", _helper.ImageTableInfo, new AcceptAllCheckNotifier()); - adder.Execute(); + using var timeline = new TestTimeline(tester); + timeline.SendMessage(_globals.DicomRelationalMapperOptions!, DicomRelationalMapperTestHelper.GetDicomFileMessage(_globals.FileSystemOptions!.FileSystemRoot!, fi)); - fi.Delete(); - File.Move(fi2.FullName, fi.FullName); + //start the timeline + timeline.StartTimeline(); - //creates the queues, exchanges and bindings - var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions!); - tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, null); + Thread.Sleep(TimeSpan.FromSeconds(10)); + TestTimelineAwaiter.Await(() => host.Consumer!.AckCount >= 1, null, 30000, () => host.Consumer!.DleErrors); - using (var host = new DicomRelationalMapperHost(_globals)) + Assert.Multiple(() => { - host.Start(); + Assert.That(_helper.SeriesTable!.GetRowCount(), Is.EqualTo(1), "SeriesTable did not have the expected number of rows in LIVE"); + Assert.That(_helper.StudyTable!.GetRowCount(), Is.EqualTo(1), "StudyTable did not have the expected number of rows in LIVE"); + Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(1), "ImageTable did not have the expected number of rows in LIVE"); + }); - using var timeline = new TestTimeline(tester); - timeline.SendMessage(_globals.DicomRelationalMapperOptions!, DicomRelationalMapperTestHelper.GetDicomFileMessage(_globals.FileSystemOptions!.FileSystemRoot!, fi)); + host.Stop("Test end"); - //start the timeline - timeline.StartTimeline(); + } - Thread.Sleep(TimeSpan.FromSeconds(10)); - TestTimelineAwaiter.Await(() => host.Consumer!.AckCount >= 1, null, 30000, () => host.Consumer!.DleErrors); + tester.Shutdown(); + } - Assert.Multiple(() => - { - Assert.That(_helper.SeriesTable!.GetRowCount(), Is.EqualTo(1), "SeriesTable did not have the expected number of rows in LIVE"); - Assert.That(_helper.StudyTable!.GetRowCount(), Is.EqualTo(1), "StudyTable did not have the expected number of rows in LIVE"); - Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(1), "ImageTable did not have the expected number of rows in LIVE"); - }); - host.Stop("Test end"); + [TestCase(1, false)] + [TestCase(1, true)] + [TestCase(10, false)] + public void TestLoadingOneImage_SingleFileMessage(int numberOfMessagesToSend, bool mixInATextFile) + { + _helper.TruncateTablesIfExists(); - } + DirectoryInfo d = new(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestLoadingOneImage_SingleFileMessage))); + d.Create(); - tester.Shutdown(); - } + var fi = new TestData().Create(new FileInfo(Path.Combine(d.FullName, "MyTestFile.dcm"))); - - [TestCase(1, false)] - [TestCase(1, true)] - [TestCase(10, false)] - public void TestLoadingOneImage_SingleFileMessage(int numberOfMessagesToSend, bool mixInATextFile) + if (mixInATextFile) { - _helper.TruncateTablesIfExists(); + var randomText = new FileInfo(Path.Combine(d.FullName, "RandomTextFile.dcm")); + File.WriteAllLines(randomText.FullName, ["I love dancing", "all around the world", "boy the world is a big place eh?"]); + } - DirectoryInfo d = new(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestLoadingOneImage_SingleFileMessage))); - d.Create(); + //creates the queues, exchanges and bindings + var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions!); + tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, null); - var fi = new TestData().Create(new FileInfo(Path.Combine(d.FullName, "MyTestFile.dcm"))); + using (var host = new DicomRelationalMapperHost(_globals)) + { + host.Start(); - if (mixInATextFile) + using var timeline = new TestTimeline(tester); + //send the message 10 times over a 10 second period + for (int i = 0; i < numberOfMessagesToSend; i++) { - var randomText = new FileInfo(Path.Combine(d.FullName, "RandomTextFile.dcm")); - File.WriteAllLines(randomText.FullName, ["I love dancing", "all around the world", "boy the world is a big place eh?"]); + timeline + .SendMessage(_globals.DicomRelationalMapperOptions!, DicomRelationalMapperTestHelper.GetDicomFileMessage(_globals.FileSystemOptions!.FileSystemRoot!, fi)) + .Wait(1000); } - //creates the queues, exchanges and bindings - var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions!); - tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, null); + //start the timeline + timeline.StartTimeline(); - using (var host = new DicomRelationalMapperHost(_globals)) - { - host.Start(); - - using var timeline = new TestTimeline(tester); - //send the message 10 times over a 10 second period - for (int i = 0; i < numberOfMessagesToSend; i++) - { - timeline - .SendMessage(_globals.DicomRelationalMapperOptions!, DicomRelationalMapperTestHelper.GetDicomFileMessage(_globals.FileSystemOptions!.FileSystemRoot!, fi)) - .Wait(1000); - } + Thread.Sleep(TimeSpan.FromSeconds(10)); + TestTimelineAwaiter.Await(() => host.Consumer!.AckCount >= numberOfMessagesToSend, null, 30000, () => host.Consumer!.DleErrors); - //start the timeline - timeline.StartTimeline(); + Assert.Multiple(() => + { + Assert.That(_helper.SeriesTable!.GetRowCount(), Is.EqualTo(1), "SeriesTable did not have the expected number of rows in LIVE"); + Assert.That(_helper.StudyTable!.GetRowCount(), Is.EqualTo(1), "StudyTable did not have the expected number of rows in LIVE"); + Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(1), "ImageTable did not have the expected number of rows in LIVE"); + }); - Thread.Sleep(TimeSpan.FromSeconds(10)); - TestTimelineAwaiter.Await(() => host.Consumer!.AckCount >= numberOfMessagesToSend, null, 30000, () => host.Consumer!.DleErrors); + host.Stop("Test end"); - Assert.Multiple(() => - { - Assert.That(_helper.SeriesTable!.GetRowCount(), Is.EqualTo(1), "SeriesTable did not have the expected number of rows in LIVE"); - Assert.That(_helper.StudyTable!.GetRowCount(), Is.EqualTo(1), "StudyTable did not have the expected number of rows in LIVE"); - Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(1), "ImageTable did not have the expected number of rows in LIVE"); - }); + } - host.Stop("Test end"); + tester.Shutdown(); + } - } + [Test] + public void TestLoadingOneImage_MileWideTest() + { + _helper.TruncateTablesIfExists(); - tester.Shutdown(); - } + DirectoryInfo d = new(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestLoadingOneImage_MileWideTest))); + d.Create(); - [Test] - public void TestLoadingOneImage_MileWideTest() - { - _helper.TruncateTablesIfExists(); + var r = new Random(5000); + FileInfo[] files; - DirectoryInfo d = new(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestLoadingOneImage_MileWideTest))); - d.Create(); + using (var g = new DicomDataGenerator(r, d.FullName, "CT")) + files = g.GenerateImageFiles(1, r).ToArray(); - var r = new Random(5000); - FileInfo[] files; + Assert.That(files, Has.Length.EqualTo(1)); - using (var g = new DicomDataGenerator(r, d.FullName, "CT")) - files = g.GenerateImageFiles(1, r).ToArray(); + var existingColumns = _helper.ImageTable!.DiscoverColumns(); - Assert.That(files, Has.Length.EqualTo(1)); + //Add 200 random tags + foreach (string tag in TagColumnAdder.GetAvailableTags().OrderBy(a => r.Next()).Take(200)) + { + string dataType; - var existingColumns = _helper.ImageTable!.DiscoverColumns(); + try + { + dataType = TagColumnAdder.GetDataTypeForTag(tag, MicrosoftSQLTypeTranslater.Instance); - //Add 200 random tags - foreach (string tag in TagColumnAdder.GetAvailableTags().OrderBy(a => r.Next()).Take(200)) + } + catch (Exception) { - string dataType; - - try - { - dataType = TagColumnAdder.GetDataTypeForTag(tag, MicrosoftSQLTypeTranslater.Instance); - - } - catch (Exception) - { - continue; - } - - if (existingColumns.Any(c => c.GetRuntimeName().Equals(tag))) - continue; - - var adder = new TagColumnAdder(tag, dataType, _helper.ImageTableInfo, new AcceptAllCheckNotifier()) - { - SkipChecksAndSynchronization = true - }; - adder.Execute(); + continue; } - new TableInfoSynchronizer(_helper.ImageTableInfo).Synchronize(new AcceptAllCheckNotifier()); - - //creates the queues, exchanges and bindings - var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions!); - tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, null); + if (existingColumns.Any(c => c.GetRuntimeName().Equals(tag))) + continue; - using (var host = new DicomRelationalMapperHost(_globals)) + var adder = new TagColumnAdder(tag, dataType, _helper.ImageTableInfo, new AcceptAllCheckNotifier()) { - host.Start(); + SkipChecksAndSynchronization = true + }; + adder.Execute(); + } - using var timeline = new TestTimeline(tester); - foreach (var f in files) - timeline.SendMessage(_globals.DicomRelationalMapperOptions!, - DicomRelationalMapperTestHelper.GetDicomFileMessage(_globals.FileSystemOptions!.FileSystemRoot!, f)); + new TableInfoSynchronizer(_helper.ImageTableInfo).Synchronize(new AcceptAllCheckNotifier()); - //start the timeline - timeline.StartTimeline(); + //creates the queues, exchanges and bindings + var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions!); + tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, null); - TestTimelineAwaiter.Await(() => host.Consumer!.MessagesProcessed == 1, null, 30000, () => host.Consumer!.DleErrors); + using (var host = new DicomRelationalMapperHost(_globals)) + { + host.Start(); - Assert.Multiple(() => - { - Assert.That(_helper.SeriesTable!.GetRowCount(), Is.GreaterThanOrEqualTo(1), "SeriesTable did not have the expected number of rows in LIVE"); - Assert.That(_helper.StudyTable!.GetRowCount(), Is.GreaterThanOrEqualTo(1), "StudyTable did not have the expected number of rows in LIVE"); - Assert.That(_helper.ImageTable.GetRowCount(), Is.EqualTo(1), "ImageTable did not have the expected number of rows in LIVE"); - }); + using var timeline = new TestTimeline(tester); + foreach (var f in files) + timeline.SendMessage(_globals.DicomRelationalMapperOptions!, + DicomRelationalMapperTestHelper.GetDicomFileMessage(_globals.FileSystemOptions!.FileSystemRoot!, f)); - host.Stop("Test end"); - } + //start the timeline + timeline.StartTimeline(); + + TestTimelineAwaiter.Await(() => host.Consumer!.MessagesProcessed == 1, null, 30000, () => host.Consumer!.DleErrors); - tester.Shutdown(); + Assert.Multiple(() => + { + Assert.That(_helper.SeriesTable!.GetRowCount(), Is.GreaterThanOrEqualTo(1), "SeriesTable did not have the expected number of rows in LIVE"); + Assert.That(_helper.StudyTable!.GetRowCount(), Is.GreaterThanOrEqualTo(1), "StudyTable did not have the expected number of rows in LIVE"); + Assert.That(_helper.ImageTable.GetRowCount(), Is.EqualTo(1), "ImageTable did not have the expected number of rows in LIVE"); + }); + host.Stop("Test end"); } - /// - /// Tests the abilities of the DLE to not load identical FileMessage - /// - [Test] - public void IdenticalDatasetsTest() - { - _helper.TruncateTablesIfExists(); + tester.Shutdown(); + + } - var ds = new DicomDataset(); - ds.AddOrUpdate(DicomTag.SeriesInstanceUID, "123"); - ds.AddOrUpdate(DicomTag.SOPInstanceUID, "123"); - ds.AddOrUpdate(DicomTag.StudyInstanceUID, "123"); - ds.AddOrUpdate(DicomTag.PatientID, "123"); + /// + /// Tests the abilities of the DLE to not load identical FileMessage + /// + [Test] + public void IdenticalDatasetsTest() + { + _helper.TruncateTablesIfExists(); - var msg1 = DicomRelationalMapperTestHelper.GetDicomFileMessage(ds, _globals.FileSystemOptions!.FileSystemRoot!, Path.Combine(_globals.FileSystemOptions.FileSystemRoot!, "mydicom.dcm")); - var msg2 = DicomRelationalMapperTestHelper.GetDicomFileMessage(ds, _globals.FileSystemOptions.FileSystemRoot!, Path.Combine(_globals.FileSystemOptions.FileSystemRoot!, "mydicom.dcm")); + var ds = new DicomDataset(); + ds.AddOrUpdate(DicomTag.SeriesInstanceUID, "123"); + ds.AddOrUpdate(DicomTag.SOPInstanceUID, "123"); + ds.AddOrUpdate(DicomTag.StudyInstanceUID, "123"); + ds.AddOrUpdate(DicomTag.PatientID, "123"); + var msg1 = DicomRelationalMapperTestHelper.GetDicomFileMessage(ds, _globals.FileSystemOptions!.FileSystemRoot!, Path.Combine(_globals.FileSystemOptions.FileSystemRoot!, "mydicom.dcm")); + var msg2 = DicomRelationalMapperTestHelper.GetDicomFileMessage(ds, _globals.FileSystemOptions.FileSystemRoot!, Path.Combine(_globals.FileSystemOptions.FileSystemRoot!, "mydicom.dcm")); - //creates the queues, exchanges and bindings - using var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions!); - tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, null); - tester.SendMessage(_globals.DicomRelationalMapperOptions!, msg1); - tester.SendMessage(_globals.DicomRelationalMapperOptions!, msg2); - _globals.DicomRelationalMapperOptions!.RunChecks = true; + //creates the queues, exchanges and bindings + using var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.DicomRelationalMapperOptions!); + tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, null); + tester.SendMessage(_globals.DicomRelationalMapperOptions!, msg1); + tester.SendMessage(_globals.DicomRelationalMapperOptions!, msg2); - using (var host = new DicomRelationalMapperHost(_globals)) - { - host.Start(); + _globals.DicomRelationalMapperOptions!.RunChecks = true; + + using (var host = new DicomRelationalMapperHost(_globals)) + { + host.Start(); - TestTimelineAwaiter.Await(() => host.Consumer!.MessagesProcessed == 2, null, 30000, () => host.Consumer!.DleErrors); + TestTimelineAwaiter.Await(() => host.Consumer!.MessagesProcessed == 2, null, 30000, () => host.Consumer!.DleErrors); - Assert.Multiple(() => - { - Assert.That(_helper.SeriesTable!.GetRowCount(), Is.GreaterThanOrEqualTo(1), "SeriesTable did not have the expected number of rows in LIVE"); - Assert.That(_helper.StudyTable!.GetRowCount(), Is.GreaterThanOrEqualTo(1), "StudyTable did not have the expected number of rows in LIVE"); - Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(1), "ImageTable did not have the expected number of rows in LIVE"); - }); + Assert.Multiple(() => + { + Assert.That(_helper.SeriesTable!.GetRowCount(), Is.GreaterThanOrEqualTo(1), "SeriesTable did not have the expected number of rows in LIVE"); + Assert.That(_helper.StudyTable!.GetRowCount(), Is.GreaterThanOrEqualTo(1), "StudyTable did not have the expected number of rows in LIVE"); + Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(1), "ImageTable did not have the expected number of rows in LIVE"); + }); - host.Stop("Test end"); - } - tester.Shutdown(); + host.Stop("Test end"); } + tester.Shutdown(); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/MicroservicesIntegrationTest.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/MicroservicesIntegrationTest.cs index 8fde8bfb3..c160ae3c3 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/MicroservicesIntegrationTest.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/MicroservicesIntegrationTest.cs @@ -40,314 +40,314 @@ using Tests.Common; using DatabaseType = FAnsi.DatabaseType; -namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper +namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper; + +[RequiresRabbit, RequiresMongoDb, RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +public class MicroservicesIntegrationTest : DatabaseTests { - [RequiresRabbit, RequiresMongoDb, RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - public class MicroservicesIntegrationTest : DatabaseTests - { - public const string ScratchDatabaseName = "RDMPTests_ScratchArea"; + public const string ScratchDatabaseName = "RDMPTests_ScratchArea"; - private DicomRelationalMapperTestHelper _helper = null!; - private GlobalOptions _globals = null!; - private const string MongoTestDbName = "nUnitTestDb"; + private DicomRelationalMapperTestHelper _helper = null!; + private GlobalOptions _globals = null!; + private const string MongoTestDbName = "nUnitTestDb"; - private void SetupSuite(DiscoveredDatabase server, bool persistentRaw, string? modalityPrefix) - { - _globals = new GlobalOptionsFactory().Load(nameof(MicroservicesIntegrationTest)); + private void SetupSuite(DiscoveredDatabase server, bool persistentRaw, string? modalityPrefix) + { + _globals = new GlobalOptionsFactory().Load(nameof(MicroservicesIntegrationTest)); - _globals.UseTestValues( - RequiresRabbit.Connection.Value, - RequiresMongoDb.GetMongoClientSettings(), - RequiresRelationalDb.GetRelationalDatabaseConnectionStrings(), - ((TableRepository)RepositoryLocator.CatalogueRepository).ConnectionStringBuilder, - ((TableRepository)RepositoryLocator.DataExportRepository).ConnectionStringBuilder); + _globals.UseTestValues( + RequiresRabbit.Connection.Value, + RequiresMongoDb.GetMongoClientSettings(), + RequiresRelationalDb.GetRelationalDatabaseConnectionStrings(), + ((TableRepository)RepositoryLocator.CatalogueRepository).ConnectionStringBuilder, + ((TableRepository)RepositoryLocator.DataExportRepository).ConnectionStringBuilder); - _helper = new DicomRelationalMapperTestHelper(); - _helper.SetupSuite(server, RepositoryLocator, _globals, typeof(DicomDatasetCollectionSource), null, null, persistentRaw, modalityPrefix); + _helper = new DicomRelationalMapperTestHelper(); + _helper.SetupSuite(server, RepositoryLocator, _globals, typeof(DicomDatasetCollectionSource), null, null, persistentRaw, modalityPrefix); - _globals.DicomRelationalMapperOptions!.RetryOnFailureCount = 0; - _globals.DicomRelationalMapperOptions.RetryDelayInSeconds = 0; + _globals.DicomRelationalMapperOptions!.RetryOnFailureCount = 0; + _globals.DicomRelationalMapperOptions.RetryDelayInSeconds = 0; - //do not use an explicit RAW data load server - if (CatalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer) is not null) - CatalogueRepository.ClearDefault(PermissableDefaults.RAWDataLoadServer); - } + //do not use an explicit RAW data load server + if (CatalogueRepository.GetDefaultFor(PermissableDefaults.RAWDataLoadServer) is not null) + CatalogueRepository.ClearDefault(PermissableDefaults.RAWDataLoadServer); + } - [TearDown] - public new void TearDown() - { - //delete all joins - foreach (var j in CatalogueRepository.GetAllObjects()) - j.DeleteInDatabase(); + [TearDown] + public new void TearDown() + { + //delete all joins + foreach (var j in CatalogueRepository.GetAllObjects()) + j.DeleteInDatabase(); - //delete everything from data export - foreach (var o in new Type[] { typeof(ExtractionConfiguration), typeof(ExternalCohortTable), typeof(ExtractableDataSet) }.SelectMany(t => DataExportRepository.GetAllObjects(t))) - o.DeleteInDatabase(); + //delete everything from data export + foreach (var o in new Type[] { typeof(ExtractionConfiguration), typeof(ExternalCohortTable), typeof(ExtractableDataSet) }.SelectMany(t => DataExportRepository.GetAllObjects(t))) + o.DeleteInDatabase(); - //delete everything from catalogue - foreach (var o in new Type[] { typeof(Catalogue), typeof(TableInfo), typeof(LoadMetadata), typeof(Pipeline) }.SelectMany(t => CatalogueRepository.GetAllObjects(t))) - o.DeleteInDatabase(); - } + //delete everything from catalogue + foreach (var o in new Type[] { typeof(Catalogue), typeof(TableInfo), typeof(LoadMetadata), typeof(Pipeline) }.SelectMany(t => CatalogueRepository.GetAllObjects(t))) + o.DeleteInDatabase(); + } - [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidDatabaseNamer))] - [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidTableNamer))] - [TestCase(DatabaseType.MySql, typeof(GuidDatabaseNamer))] - [TestCase(DatabaseType.MySql, typeof(GuidTableNamer))] - public void IntegrationTest_HappyPath(DatabaseType databaseType, Type namerType) - { - var server = GetCleanedServer(databaseType, ScratchDatabaseName); - SetupSuite(server, false, "MR_"); + [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidDatabaseNamer))] + [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidTableNamer))] + [TestCase(DatabaseType.MySql, typeof(GuidDatabaseNamer))] + [TestCase(DatabaseType.MySql, typeof(GuidTableNamer))] + public void IntegrationTest_HappyPath(DatabaseType databaseType, Type namerType) + { + var server = GetCleanedServer(databaseType, ScratchDatabaseName); + SetupSuite(server, false, "MR_"); - //this ensures that the ExtractionConfiguration.ID and Project.ID properties are out of sync (they are automnums). Its just a precaution since we are using both IDs in places if we - //had any bugs where we used the wrong one but they were the same then it would be obscured until production - var p = new Project(DataExportRepository, "delme"); - p.DeleteInDatabase(); + //this ensures that the ExtractionConfiguration.ID and Project.ID properties are out of sync (they are automnums). Its just a precaution since we are using both IDs in places if we + //had any bugs where we used the wrong one but they were the same then it would be obscured until production + var p = new Project(DataExportRepository, "delme"); + p.DeleteInDatabase(); - _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f700-4515-86e8-e3d38b3d1823"); - _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; - _globals.DicomRelationalMapperOptions.DatabaseNamerType = namerType.FullName; + _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f700-4515-86e8-e3d38b3d1823"); + _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; + _globals.DicomRelationalMapperOptions.DatabaseNamerType = namerType.FullName; - _helper.TruncateTablesIfExists(); + _helper.TruncateTablesIfExists(); - //Create test directory with a single image - var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_HappyPath))); - dir.Create(); + //Create test directory with a single image + var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_HappyPath))); + dir.Create(); - var arg = _helper.LoadMetadata!.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("ModalityMatchingRegex")); - arg.SetValue(new Regex("([A-z]*)_.*$")); - arg.SaveToDatabase(); + var arg = _helper.LoadMetadata!.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("ModalityMatchingRegex")); + arg.SetValue(new Regex("([A-z]*)_.*$")); + arg.SaveToDatabase(); - //clean up the directory - foreach (var f in dir.GetFiles()) - f.Delete(); + //clean up the directory + foreach (var f in dir.GetFiles()) + f.Delete(); - new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "MyTestFile.dcm"))); + new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "MyTestFile.dcm"))); - RunTest(dir, 1); - } + RunTest(dir, 1); + } - [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidDatabaseNamer))] - [TestCase(DatabaseType.MySql, typeof(GuidDatabaseNamer))] - public void IntegrationTest_NoFileExtensions(DatabaseType databaseType, Type namerType) - { - var server = GetCleanedServer(databaseType, ScratchDatabaseName); - SetupSuite(server, false, "MR_"); + [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidDatabaseNamer))] + [TestCase(DatabaseType.MySql, typeof(GuidDatabaseNamer))] + public void IntegrationTest_NoFileExtensions(DatabaseType databaseType, Type namerType) + { + var server = GetCleanedServer(databaseType, ScratchDatabaseName); + SetupSuite(server, false, "MR_"); - //this ensures that the ExtractionConfiguration.ID and Project.ID properties are out of sync (they are automnums). Its just a precaution since we are using both IDs in places if we - //had any bugs where we used the wrong one but they were the same then it would be obscured until production - var p = new Project(DataExportRepository, "delme"); - p.DeleteInDatabase(); + //this ensures that the ExtractionConfiguration.ID and Project.ID properties are out of sync (they are automnums). Its just a precaution since we are using both IDs in places if we + //had any bugs where we used the wrong one but they were the same then it would be obscured until production + var p = new Project(DataExportRepository, "delme"); + p.DeleteInDatabase(); - _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f700-4515-86e8-e3d38b3d1823"); - _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; - _globals.DicomRelationalMapperOptions.DatabaseNamerType = namerType.FullName; + _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f700-4515-86e8-e3d38b3d1823"); + _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; + _globals.DicomRelationalMapperOptions.DatabaseNamerType = namerType.FullName; - _globals.FileSystemOptions!.DicomSearchPattern = "*"; + _globals.FileSystemOptions!.DicomSearchPattern = "*"; - _helper.TruncateTablesIfExists(); + _helper.TruncateTablesIfExists(); - //Create test directory with a single image - var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_NoFileExtensions))); - dir.Create(); + //Create test directory with a single image + var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_NoFileExtensions))); + dir.Create(); - var arg = _helper.LoadMetadata!.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("ModalityMatchingRegex")); - arg.SetValue(new Regex("([A-z]*)_.*$")); - arg.SaveToDatabase(); + var arg = _helper.LoadMetadata!.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("ModalityMatchingRegex")); + arg.SetValue(new Regex("([A-z]*)_.*$")); + arg.SaveToDatabase(); - //clean up the directory - foreach (var f in dir.GetFiles()) - f.Delete(); + //clean up the directory + foreach (var f in dir.GetFiles()) + f.Delete(); - new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "Mr.010101"))); //this is legit a dicom file + new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "Mr.010101"))); //this is legit a dicom file - try - { - RunTest(dir, 1, (o) => o.DicomSearchPattern = "*"); - } - finally - { - // Reset this in case it breaks other tests - _globals.FileSystemOptions.DicomSearchPattern = "*.dcm"; - } + try + { + RunTest(dir, 1, (o) => o.DicomSearchPattern = "*"); } - - [TestCase(DatabaseType.MicrosoftSQLServer, null)] - [TestCase(DatabaseType.MySql, typeof(RejectAll))] - public void IntegrationTest_Rejector(DatabaseType databaseType, Type? rejector) + finally { - var server = GetCleanedServer(databaseType, ScratchDatabaseName); - SetupSuite(server, false, "MR_"); + // Reset this in case it breaks other tests + _globals.FileSystemOptions.DicomSearchPattern = "*.dcm"; + } + } - //this ensures that the ExtractionConfiguration.ID and Project.ID properties are out of sync (they are automnums). Its just a precaution since we are using both IDs in places if we - //had any bugs where we used the wrong one but they were the same then it would be obscured until production - var p = new Project(DataExportRepository, "delme"); - p.DeleteInDatabase(); + [TestCase(DatabaseType.MicrosoftSQLServer, null)] + [TestCase(DatabaseType.MySql, typeof(RejectAll))] + public void IntegrationTest_Rejector(DatabaseType databaseType, Type? rejector) + { + var server = GetCleanedServer(databaseType, ScratchDatabaseName); + SetupSuite(server, false, "MR_"); - _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f700-4515-86e8-e3d38b3d1823"); - _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; - _globals.DicomRelationalMapperOptions.DatabaseNamerType = typeof(GuidDatabaseNamer).FullName; + //this ensures that the ExtractionConfiguration.ID and Project.ID properties are out of sync (they are automnums). Its just a precaution since we are using both IDs in places if we + //had any bugs where we used the wrong one but they were the same then it would be obscured until production + var p = new Project(DataExportRepository, "delme"); + p.DeleteInDatabase(); - _globals.CohortExtractorOptions!.RejectorType = rejector?.FullName; + _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f700-4515-86e8-e3d38b3d1823"); + _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; + _globals.DicomRelationalMapperOptions.DatabaseNamerType = typeof(GuidDatabaseNamer).FullName; - _globals.FileSystemOptions!.DicomSearchPattern = "*"; + _globals.CohortExtractorOptions!.RejectorType = rejector?.FullName; - _helper.TruncateTablesIfExists(); + _globals.FileSystemOptions!.DicomSearchPattern = "*"; - //Create test directory with a single image - var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_NoFileExtensions))); - dir.Create(); + _helper.TruncateTablesIfExists(); - var arg = _helper.LoadMetadata!.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("ModalityMatchingRegex")); - arg.SetValue(new Regex("([A-z]*)_.*$")); - arg.SaveToDatabase(); + //Create test directory with a single image + var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_NoFileExtensions))); + dir.Create(); - //clean up the directory - foreach (var f in dir.GetFiles()) - f.Delete(); + var arg = _helper.LoadMetadata!.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("ModalityMatchingRegex")); + arg.SetValue(new Regex("([A-z]*)_.*$")); + arg.SaveToDatabase(); - new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "Mr.010101"))); //this is legit a dicom file + //clean up the directory + foreach (var f in dir.GetFiles()) + f.Delete(); - try - { - RunTest(dir, 1, (o) => o.DicomSearchPattern = "*"); - } - finally - { - // Reset this in case it breaks other tests - _globals.FileSystemOptions.DicomSearchPattern = "*.dcm"; - } - } + new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "Mr.010101"))); //this is legit a dicom file - [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidDatabaseNamer))] - [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidTableNamer))] - [TestCase(DatabaseType.MySql, typeof(GuidDatabaseNamer))] - [TestCase(DatabaseType.MySql, typeof(GuidTableNamer))] - public void IntegrationTest_HappyPath_WithIsolation(DatabaseType databaseType, Type namerType) + try { - var server = GetCleanedServer(databaseType, ScratchDatabaseName); - SetupSuite(server, false, "MR_"); + RunTest(dir, 1, (o) => o.DicomSearchPattern = "*"); + } + finally + { + // Reset this in case it breaks other tests + _globals.FileSystemOptions.DicomSearchPattern = "*.dcm"; + } + } - //this ensures that the ExtractionConfiguration.ID and Project.ID properties are out of sync (they are automnums). Its just a precaution since we are using both IDs in places if we - //had any bugs where we used the wrong one but they were the same then it would be obscured until production - var p = new Project(DataExportRepository, "delme"); - p.DeleteInDatabase(); + [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidDatabaseNamer))] + [TestCase(DatabaseType.MicrosoftSQLServer, typeof(GuidTableNamer))] + [TestCase(DatabaseType.MySql, typeof(GuidDatabaseNamer))] + [TestCase(DatabaseType.MySql, typeof(GuidTableNamer))] + public void IntegrationTest_HappyPath_WithIsolation(DatabaseType databaseType, Type namerType) + { + var server = GetCleanedServer(databaseType, ScratchDatabaseName); + SetupSuite(server, false, "MR_"); - _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f700-4515-86e8-e3d38b3d1823"); - _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; - _globals.DicomRelationalMapperOptions.MinimumBatchSize = 3; - _globals.DicomRelationalMapperOptions.DatabaseNamerType = namerType.FullName; + //this ensures that the ExtractionConfiguration.ID and Project.ID properties are out of sync (they are automnums). Its just a precaution since we are using both IDs in places if we + //had any bugs where we used the wrong one but they were the same then it would be obscured until production + var p = new Project(DataExportRepository, "delme"); + p.DeleteInDatabase(); - _helper.TruncateTablesIfExists(); + _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f700-4515-86e8-e3d38b3d1823"); + _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; + _globals.DicomRelationalMapperOptions.MinimumBatchSize = 3; + _globals.DicomRelationalMapperOptions.DatabaseNamerType = namerType.FullName; - //Create test directory with a single image - var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_HappyPath_WithIsolation))); - dir.Create(); + _helper.TruncateTablesIfExists(); + //Create test directory with a single image + var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_HappyPath_WithIsolation))); + dir.Create(); - var ptIsolation = new ProcessTask(CatalogueRepository, _helper.LoadMetadata, LoadStage.AdjustRaw); - ptIsolation.CreateArgumentsForClassIfNotExists(); - ptIsolation.Path = typeof(PrimaryKeyCollisionIsolationMutilation).FullName; - ptIsolation.ProcessTaskType = ProcessTaskType.MutilateDataTable; - ptIsolation.SaveToDatabase(); - var arg1 = _helper.LoadMetadata!.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("TablesToIsolate")); - arg1.SetValue(new[] { _helper.StudyTableInfo, _helper.SeriesTableInfo, _helper.ImageTableInfo }); - arg1.SaveToDatabase(); + var ptIsolation = new ProcessTask(CatalogueRepository, _helper.LoadMetadata, LoadStage.AdjustRaw); + ptIsolation.CreateArgumentsForClassIfNotExists(); + ptIsolation.Path = typeof(PrimaryKeyCollisionIsolationMutilation).FullName; + ptIsolation.ProcessTaskType = ProcessTaskType.MutilateDataTable; + ptIsolation.SaveToDatabase(); - var db = new ExternalDatabaseServer(CatalogueRepository, "IsolationDatabase(live)", null); - db.SetProperties(server); + var arg1 = _helper.LoadMetadata!.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("TablesToIsolate")); + arg1.SetValue(new[] { _helper.StudyTableInfo, _helper.SeriesTableInfo, _helper.ImageTableInfo }); + arg1.SaveToDatabase(); - var arg2 = _helper.LoadMetadata.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("IsolationDatabase")); - arg2.SetValue(db); - arg2.SaveToDatabase(); + var db = new ExternalDatabaseServer(CatalogueRepository, "IsolationDatabase(live)", null); + db.SetProperties(server); - var arg3 = _helper.LoadMetadata.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("ModalityMatchingRegex")); - arg3.SetValue(new Regex("([A-z]*)_.*$")); - arg3.SaveToDatabase(); + var arg2 = _helper.LoadMetadata.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("IsolationDatabase")); + arg2.SetValue(db); + arg2.SaveToDatabase(); - //build the joins - _ = new JoinInfo(CatalogueRepository, - _helper.ImageTableInfo!.ColumnInfos.Single(c => c.GetRuntimeName().Equals("SeriesInstanceUID")), - _helper.SeriesTableInfo!.ColumnInfos.Single(c => c.GetRuntimeName().Equals("SeriesInstanceUID")), - ExtractionJoinType.Right, null); + var arg3 = _helper.LoadMetadata.ProcessTasks.SelectMany(a => a.ProcessTaskArguments).Single(a => a.Name.Equals("ModalityMatchingRegex")); + arg3.SetValue(new Regex("([A-z]*)_.*$")); + arg3.SaveToDatabase(); - _ = new JoinInfo(CatalogueRepository, - _helper.SeriesTableInfo.ColumnInfos.Single(c => c.GetRuntimeName().Equals("StudyInstanceUID")), - _helper.StudyTableInfo!.ColumnInfos.Single(c => c.GetRuntimeName().Equals("StudyInstanceUID")), - ExtractionJoinType.Right, null); + //build the joins + _ = new JoinInfo(CatalogueRepository, + _helper.ImageTableInfo!.ColumnInfos.Single(c => c.GetRuntimeName().Equals("SeriesInstanceUID")), + _helper.SeriesTableInfo!.ColumnInfos.Single(c => c.GetRuntimeName().Equals("SeriesInstanceUID")), + ExtractionJoinType.Right, null); - //start with Study table - _helper.StudyTableInfo.IsPrimaryExtractionTable = true; - _helper.StudyTableInfo.SaveToDatabase(); + _ = new JoinInfo(CatalogueRepository, + _helper.SeriesTableInfo.ColumnInfos.Single(c => c.GetRuntimeName().Equals("StudyInstanceUID")), + _helper.StudyTableInfo!.ColumnInfos.Single(c => c.GetRuntimeName().Equals("StudyInstanceUID")), + ExtractionJoinType.Right, null); - //clean up the directory - foreach (var f in dir.GetFiles()) - f.Delete(); + //start with Study table + _helper.StudyTableInfo.IsPrimaryExtractionTable = true; + _helper.StudyTableInfo.SaveToDatabase(); - new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "MyTestFile.dcm"))); + //clean up the directory + foreach (var f in dir.GetFiles()) + f.Delete(); - var ds1 = new DicomDataset - { - { DicomTag.StudyInstanceUID, "1.2.3" }, - { DicomTag.SeriesInstanceUID, "1.2.2" }, - { DicomTag.SOPInstanceUID, "1.2.3" }, - { DicomTag.PatientAge, "030Y" }, - { DicomTag.PatientID, "123" }, - { DicomTag.SOPClassUID, "1" }, - { DicomTag.Modality, "MR" } - }; - - new DicomFile(ds1).Save(Path.Combine(dir.FullName, "abc.dcm")); - - var ds2 = new DicomDataset - { - { DicomTag.StudyInstanceUID, "1.2.3" }, - { DicomTag.SeriesInstanceUID, "1.2.4" }, - { DicomTag.SOPInstanceUID, "1.2.7" }, - { DicomTag.PatientAge, "040Y" }, //age is replicated but should be unique at study level so gets isolated - { DicomTag.PatientID, "123" }, - { DicomTag.SOPClassUID, "1" }, - { DicomTag.Modality, "MR" } - }; + new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "MyTestFile.dcm"))); + + var ds1 = new DicomDataset + { + { DicomTag.StudyInstanceUID, "1.2.3" }, + { DicomTag.SeriesInstanceUID, "1.2.2" }, + { DicomTag.SOPInstanceUID, "1.2.3" }, + { DicomTag.PatientAge, "030Y" }, + { DicomTag.PatientID, "123" }, + { DicomTag.SOPClassUID, "1" }, + { DicomTag.Modality, "MR" } + }; + + new DicomFile(ds1).Save(Path.Combine(dir.FullName, "abc.dcm")); + + var ds2 = new DicomDataset + { + { DicomTag.StudyInstanceUID, "1.2.3" }, + { DicomTag.SeriesInstanceUID, "1.2.4" }, + { DicomTag.SOPInstanceUID, "1.2.7" }, + { DicomTag.PatientAge, "040Y" }, //age is replicated but should be unique at study level so gets isolated + { DicomTag.PatientID, "123" }, + { DicomTag.SOPClassUID, "1" }, + { DicomTag.Modality, "MR" } + }; - new DicomFile(ds2).Save(Path.Combine(dir.FullName, "def.dcm")); + new DicomFile(ds2).Save(Path.Combine(dir.FullName, "def.dcm")); - var checks = new ProcessTaskChecks(_helper.LoadMetadata, new ThrowImmediatelyActivator(RepositoryLocator)); - checks.Check(new AcceptAllCheckNotifier()); + var checks = new ProcessTaskChecks(_helper.LoadMetadata, new ThrowImmediatelyActivator(RepositoryLocator)); + checks.Check(new AcceptAllCheckNotifier()); - RunTest(dir, 1); + RunTest(dir, 1); - Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(1)); + Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(1)); - var isoTable = server.ExpectTable($"{_helper.ImageTable.GetRuntimeName()}_Isolation"); - Assert.That(isoTable.GetRowCount(), Is.EqualTo(2)); - } + var isoTable = server.ExpectTable($"{_helper.ImageTable.GetRuntimeName()}_Isolation"); + Assert.That(isoTable.GetRowCount(), Is.EqualTo(2)); + } - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - [TestCase(DatabaseType.MySql, true)] - [TestCase(DatabaseType.MySql, false)] - public void IntegrationTest_HappyPath_WithElevation(DatabaseType databaseType, bool persistentRaw) - { - var server = GetCleanedServer(databaseType, ScratchDatabaseName); - SetupSuite(server, persistentRaw: persistentRaw, "MR_"); + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, true)] + [TestCase(DatabaseType.MySql, false)] + public void IntegrationTest_HappyPath_WithElevation(DatabaseType databaseType, bool persistentRaw) + { + var server = GetCleanedServer(databaseType, ScratchDatabaseName); + SetupSuite(server, persistentRaw: persistentRaw, "MR_"); - _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f888-4515-86e8-e3d38b3d1823"); - _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; + _globals.DicomRelationalMapperOptions!.Guid = new Guid("fc229fc3-f888-4515-86e8-e3d38b3d1823"); + _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; - _helper.TruncateTablesIfExists(); + _helper.TruncateTablesIfExists(); - //add the column to the table - _helper.ImageTable!.AddColumn("d_DerivationCodeMeaning", "varchar(100)", true, 5); + //add the column to the table + _helper.ImageTable!.AddColumn("d_DerivationCodeMeaning", "varchar(100)", true, 5); - var archiveTable = _helper.ImageTable.Database.ExpectTable($"{_helper.ImageTable.GetRuntimeName()}_Archive"); - if (archiveTable.Exists()) - archiveTable.AddColumn("d_DerivationCodeMeaning", "varchar(100)", true, 5); + var archiveTable = _helper.ImageTable.Database.ExpectTable($"{_helper.ImageTable.GetRuntimeName()}_Archive"); + if (archiveTable.Exists()) + archiveTable.AddColumn("d_DerivationCodeMeaning", "varchar(100)", true, 5); - new TableInfoSynchronizer(_helper.ImageTableInfo).Synchronize(new AcceptAllCheckNotifier()); + new TableInfoSynchronizer(_helper.ImageTableInfo).Synchronize(new AcceptAllCheckNotifier()); - var elevationRules = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "ElevationConfig.xml")); - File.WriteAllText(elevationRules.FullName, + var elevationRules = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "ElevationConfig.xml")); + File.WriteAllText(elevationRules.FullName, @" @@ -366,262 +366,261 @@ public void IntegrationTest_HappyPath_WithElevation(DatabaseType databaseType, b "); - var arg = _helper.DicomSourcePipelineComponent!.PipelineComponentArguments.Single(a => a.Name.Equals("TagElevationConfigurationFile")); - arg.SetValue(elevationRules); - arg.SaveToDatabase(); + var arg = _helper.DicomSourcePipelineComponent!.PipelineComponentArguments.Single(a => a.Name.Equals("TagElevationConfigurationFile")); + arg.SetValue(elevationRules); + arg.SaveToDatabase(); - //Create test directory with a single image - var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_HappyPath_WithElevation))); - dir.Create(); + //Create test directory with a single image + var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_HappyPath_WithElevation))); + dir.Create(); - //clean up the directory - foreach (var f in dir.GetFiles()) - f.Delete(); + //clean up the directory + foreach (var f in dir.GetFiles()) + f.Delete(); - new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "MyTestFile.dcm"))); + new TestData().Create(new FileInfo(Path.Combine(dir.FullName, "MyTestFile.dcm"))); - RunTest(dir, 1); + RunTest(dir, 1); - var tbl = _helper.ImageTable.GetDataTable(); - Assert.That(tbl.Rows, Has.Count.EqualTo(1)); - Assert.That(tbl.Rows[0]["d_DerivationCodeMeaning"], Is.EqualTo("Full fidelity image, uncompressed or lossless compressed")); + var tbl = _helper.ImageTable.GetDataTable(); + Assert.That(tbl.Rows, Has.Count.EqualTo(1)); + Assert.That(tbl.Rows[0]["d_DerivationCodeMeaning"], Is.EqualTo("Full fidelity image, uncompressed or lossless compressed")); - _helper.ImageTable.DropColumn(_helper.ImageTable.DiscoverColumn("d_DerivationCodeMeaning")); + _helper.ImageTable.DropColumn(_helper.ImageTable.DiscoverColumn("d_DerivationCodeMeaning")); - if (archiveTable.Exists()) - archiveTable.DropColumn(archiveTable.DiscoverColumn("d_DerivationCodeMeaning")); - } + if (archiveTable.Exists()) + archiveTable.DropColumn(archiveTable.DiscoverColumn("d_DerivationCodeMeaning")); + } - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void IntegrationTest_BumpyRide(DatabaseType databaseType) - { - var server = GetCleanedServer(databaseType, ScratchDatabaseName); - SetupSuite(server, false, "MR_"); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void IntegrationTest_BumpyRide(DatabaseType databaseType) + { + var server = GetCleanedServer(databaseType, ScratchDatabaseName); + SetupSuite(server, false, "MR_"); - _globals.DicomRelationalMapperOptions!.Guid = new Guid("6c7cfbce-1af6-4101-ade7-6537eea72e03"); - _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; - _globals.IdentifierMapperOptions!.QoSPrefetchCount = 50; - _globals.DicomTagReaderOptions!.NackIfAnyFileErrors = false; + _globals.DicomRelationalMapperOptions!.Guid = new Guid("6c7cfbce-1af6-4101-ade7-6537eea72e03"); + _globals.DicomRelationalMapperOptions.QoSPrefetchCount = 5000; + _globals.IdentifierMapperOptions!.QoSPrefetchCount = 50; + _globals.DicomTagReaderOptions!.NackIfAnyFileErrors = false; - _helper.TruncateTablesIfExists(); + _helper.TruncateTablesIfExists(); - //Create test directory - var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_BumpyRide))); + //Create test directory + var dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(IntegrationTest_BumpyRide))); - var r = new Random(500); + var r = new Random(500); - //create a generator - using var generator = new DicomDataGenerator(r, dir.FullName, "MR"); - generator.GenerateImageFiles(40, r); - RunTest(dir, 40); - } - private void RunTest(DirectoryInfo dir, int numberOfExpectedRows, Action? adjustFileSystemOptions = null) + //create a generator + using var generator = new DicomDataGenerator(r, dir.FullName, "MR"); + generator.GenerateImageFiles(40, r); + RunTest(dir, 40); + } + private void RunTest(DirectoryInfo dir, int numberOfExpectedRows, Action? adjustFileSystemOptions = null) + { + var logger = LogManager.GetLogger("MicroservicesIntegrationTest"); + + _globals.FileSystemOptions!.FileSystemRoot = TestContext.CurrentContext.TestDirectory; + + var readFromFatalErrors = new ConsumerOptions { - var logger = LogManager.GetLogger("MicroservicesIntegrationTest"); + QueueName = "TEST.FatalLoggingQueue" + }; - _globals.FileSystemOptions!.FileSystemRoot = TestContext.CurrentContext.TestDirectory; + ///////////////////////////////////// Directory ////////////////////////// + var processDirectoryOptions = new DicomDirectoryProcessorCliOptions + { + ToProcessDir = dir, + DirectoryFormat = "Default" + }; - var readFromFatalErrors = new ConsumerOptions - { - QueueName = "TEST.FatalLoggingQueue" - }; + adjustFileSystemOptions?.Invoke(_globals.FileSystemOptions); - ///////////////////////////////////// Directory ////////////////////////// - var processDirectoryOptions = new DicomDirectoryProcessorCliOptions - { - ToProcessDir = dir, - DirectoryFormat = "Default" - }; + //////////////////////////////////////////////// Mongo Db Populator //////////////////////// + // Make this a GUID or something, should be unique per test + var currentSeriesCollectionName = $"Integration_HappyPath_Series{DateTime.Now.Ticks}"; + var currentImageCollectionName = $"Integration_HappyPath_Image{DateTime.Now.Ticks}"; + + _globals.MongoDbPopulatorOptions!.SeriesCollection = currentSeriesCollectionName; + _globals.MongoDbPopulatorOptions.ImageCollection = currentImageCollectionName; - adjustFileSystemOptions?.Invoke(_globals.FileSystemOptions); + //use the test catalogue not the one in the combined app.config - //////////////////////////////////////////////// Mongo Db Populator //////////////////////// - // Make this a GUID or something, should be unique per test - var currentSeriesCollectionName = $"Integration_HappyPath_Series{DateTime.Now.Ticks}"; - var currentImageCollectionName = $"Integration_HappyPath_Image{DateTime.Now.Ticks}"; + _globals.RDMPOptions!.CatalogueConnectionString = (RepositoryLocator.CatalogueRepository as TableRepository)?.DiscoveredServer.Builder.ConnectionString; + _globals.RDMPOptions.DataExportConnectionString = (RepositoryLocator.DataExportRepository as TableRepository)?.DiscoveredServer.Builder.ConnectionString; + _globals.DicomRelationalMapperOptions!.RunChecks = true; - _globals.MongoDbPopulatorOptions!.SeriesCollection = currentSeriesCollectionName; - _globals.MongoDbPopulatorOptions.ImageCollection = currentImageCollectionName; + if (_globals.DicomRelationalMapperOptions.MinimumBatchSize < 1) + _globals.DicomRelationalMapperOptions.MinimumBatchSize = 1; - //use the test catalogue not the one in the combined app.config + using var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.CohortExtractorOptions!); + tester.CreateExchange(_globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!.ExchangeName!, _globals.DicomTagReaderOptions!.QueueName); + tester.CreateExchange(_globals.DicomTagReaderOptions!.SeriesProducerOptions!.ExchangeName!, _globals.MongoDbPopulatorOptions.SeriesQueueConsumerOptions!.QueueName); + tester.CreateExchange(_globals.DicomTagReaderOptions!.ImageProducerOptions!.ExchangeName!, _globals.IdentifierMapperOptions!.QueueName); + tester.CreateExchange(_globals.DicomTagReaderOptions.ImageProducerOptions.ExchangeName!, _globals.MongoDbPopulatorOptions.ImageQueueConsumerOptions!.QueueName, true); + tester.CreateExchange(_globals.IdentifierMapperOptions.AnonImagesProducerOptions!.ExchangeName!, _globals.DicomRelationalMapperOptions.QueueName); + tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, readFromFatalErrors.QueueName); - _globals.RDMPOptions!.CatalogueConnectionString = (RepositoryLocator.CatalogueRepository as TableRepository)?.DiscoveredServer.Builder.ConnectionString; - _globals.RDMPOptions.DataExportConnectionString = (RepositoryLocator.DataExportRepository as TableRepository)?.DiscoveredServer.Builder.ConnectionString; - _globals.DicomRelationalMapperOptions!.RunChecks = true; + tester.CreateExchange(_globals.CohortExtractorOptions!.ExtractFilesProducerOptions!.ExchangeName!, null, false, _globals.CohortExtractorOptions.ExtractIdentRoutingKey!); + tester.CreateExchange(_globals.CohortExtractorOptions.ExtractFilesProducerOptions.ExchangeName!, null, true, _globals.CohortExtractorOptions.ExtractAnonRoutingKey!); + tester.CreateExchange(_globals.CohortExtractorOptions.ExtractFilesInfoProducerOptions!.ExchangeName!, null); - if (_globals.DicomRelationalMapperOptions.MinimumBatchSize < 1) - _globals.DicomRelationalMapperOptions.MinimumBatchSize = 1; + #region Running Microservices - using var tester = new MicroserviceTester(_globals.RabbitOptions!, _globals.CohortExtractorOptions!); - tester.CreateExchange(_globals.ProcessDirectoryOptions!.AccessionDirectoryProducerOptions!.ExchangeName!, _globals.DicomTagReaderOptions!.QueueName); - tester.CreateExchange(_globals.DicomTagReaderOptions!.SeriesProducerOptions!.ExchangeName!, _globals.MongoDbPopulatorOptions.SeriesQueueConsumerOptions!.QueueName); - tester.CreateExchange(_globals.DicomTagReaderOptions!.ImageProducerOptions!.ExchangeName!, _globals.IdentifierMapperOptions!.QueueName); - tester.CreateExchange(_globals.DicomTagReaderOptions.ImageProducerOptions.ExchangeName!, _globals.MongoDbPopulatorOptions.ImageQueueConsumerOptions!.QueueName, true); - tester.CreateExchange(_globals.IdentifierMapperOptions.AnonImagesProducerOptions!.ExchangeName!, _globals.DicomRelationalMapperOptions.QueueName); - tester.CreateExchange(_globals.RabbitOptions!.FatalLoggingExchange!, readFromFatalErrors.QueueName); + var processDirectory = new DicomDirectoryProcessorHost(_globals, processDirectoryOptions); + processDirectory.Start(); + tester.StopOnDispose.Add(processDirectory); - tester.CreateExchange(_globals.CohortExtractorOptions!.ExtractFilesProducerOptions!.ExchangeName!, null, false, _globals.CohortExtractorOptions.ExtractIdentRoutingKey!); - tester.CreateExchange(_globals.CohortExtractorOptions.ExtractFilesProducerOptions.ExchangeName!, null, true, _globals.CohortExtractorOptions.ExtractAnonRoutingKey!); - tester.CreateExchange(_globals.CohortExtractorOptions.ExtractFilesInfoProducerOptions!.ExchangeName!, null); + var dicomTagReaderHost = new DicomTagReaderHost(_globals); + dicomTagReaderHost.Start(); + tester.StopOnDispose.Add(dicomTagReaderHost); - #region Running Microservices + var mongoDbPopulatorHost = new MongoDbPopulatorHost(_globals); + mongoDbPopulatorHost.Start(); + tester.StopOnDispose.Add(mongoDbPopulatorHost); - var processDirectory = new DicomDirectoryProcessorHost(_globals, processDirectoryOptions); - processDirectory.Start(); - tester.StopOnDispose.Add(processDirectory); + var identifierMapperHost = new IdentifierMapperHost(_globals, new SwapForFixedValueTester("FISHFISH")); + identifierMapperHost.Start(); + tester.StopOnDispose.Add(identifierMapperHost); - var dicomTagReaderHost = new DicomTagReaderHost(_globals); - dicomTagReaderHost.Start(); - tester.StopOnDispose.Add(dicomTagReaderHost); + TestTimelineAwaiter.Await(() => dicomTagReaderHost.AccessionDirectoryMessageConsumer.AckCount >= 1); + logger.Info("\n### DicomTagReader has processed its messages ###\n"); - var mongoDbPopulatorHost = new MongoDbPopulatorHost(_globals); - mongoDbPopulatorHost.Start(); - tester.StopOnDispose.Add(mongoDbPopulatorHost); + // FIXME: This isn't exactly how the pipeline runs + TestTimelineAwaiter.Await(() => identifierMapperHost.Consumer.AckCount >= 1); + logger.Info("\n### IdentifierMapper has processed its messages ###\n"); - var identifierMapperHost = new IdentifierMapperHost(_globals, new SwapForFixedValueTester("FISHFISH")); - identifierMapperHost.Start(); - tester.StopOnDispose.Add(identifierMapperHost); + using (var relationalMapperHost = new DicomRelationalMapperHost(_globals)) + { + var start = DateTime.Now; - TestTimelineAwaiter.Await(() => dicomTagReaderHost.AccessionDirectoryMessageConsumer.AckCount >= 1); - logger.Info("\n### DicomTagReader has processed its messages ###\n"); + relationalMapperHost.Start(); + tester.StopOnDispose.Add(relationalMapperHost); - // FIXME: This isn't exactly how the pipeline runs - TestTimelineAwaiter.Await(() => identifierMapperHost.Consumer.AckCount >= 1); + TestTimelineAwaiter.Await(() => mongoDbPopulatorHost.SeriesConsumer.Processor.AckCount >= 1); + TestTimelineAwaiter.Await(() => mongoDbPopulatorHost.ImageConsumer.Processor.AckCount >= 1); + logger.Info("\n### MongoDbPopulator has processed its messages ###\n"); + + TestTimelineAwaiter.Await(() => identifierMapperHost.Consumer.AckCount >= 1);//number of series logger.Info("\n### IdentifierMapper has processed its messages ###\n"); - using (var relationalMapperHost = new DicomRelationalMapperHost(_globals)) + Assert.Multiple(() => { - var start = DateTime.Now; - - relationalMapperHost.Start(); - tester.StopOnDispose.Add(relationalMapperHost); - - TestTimelineAwaiter.Await(() => mongoDbPopulatorHost.SeriesConsumer.Processor.AckCount >= 1); - TestTimelineAwaiter.Await(() => mongoDbPopulatorHost.ImageConsumer.Processor.AckCount >= 1); - logger.Info("\n### MongoDbPopulator has processed its messages ###\n"); - - TestTimelineAwaiter.Await(() => identifierMapperHost.Consumer.AckCount >= 1);//number of series - logger.Info("\n### IdentifierMapper has processed its messages ###\n"); - - Assert.Multiple(() => - { - Assert.That(dicomTagReaderHost.AccessionDirectoryMessageConsumer.NackCount, Is.EqualTo(0)); - Assert.That(identifierMapperHost.Consumer.NackCount, Is.EqualTo(0)); - Assert.That(mongoDbPopulatorHost.SeriesConsumer.NackCount, Is.EqualTo(0)); - Assert.That(mongoDbPopulatorHost.ImageConsumer.NackCount, Is.EqualTo(0)); - }); - - - try - { - Thread.Sleep(TimeSpan.FromSeconds(10)); - TestTimelineAwaiter.Await(() => relationalMapperHost.Consumer!.AckCount >= numberOfExpectedRows, null, 30000, () => relationalMapperHost.Consumer!.DleErrors); //number of image files - logger.Info("\n### DicomRelationalMapper has processed its messages ###\n"); - } - finally - { - //find out what happens from the logging database - var rdmpLogging = new Rdmp.Core.Logging.LogManager(_helper.LoadMetadata!.GetDistinctLoggingDatabase()); - - //if error was reported during the dicom relational mapper run - foreach (var dli in rdmpLogging.GetArchivalDataLoadInfos(_helper.LoadMetadata.GetDistinctLoggingTask(), null, null)) - if (dli.StartTime > start) - foreach (var e in dli.Errors) - logger.Error($"{e.Date.TimeOfDay}:{e.Source}:{e.Description}"); - } - - Assert.Multiple(() => - { - Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(numberOfExpectedRows), "All images should appear in the image table"); - Assert.That(_helper.SeriesTable!.GetRowCount(), Is.LessThanOrEqualTo(numberOfExpectedRows), "Only unique series data should appear in series table, there should be less unique series than images (or equal)"); - Assert.That(_helper.StudyTable!.GetRowCount(), Is.LessThanOrEqualTo(numberOfExpectedRows), "Only unique study data should appear in study table, there should be less unique studies than images (or equal)"); - Assert.That(_helper.StudyTable.GetRowCount(), Is.LessThanOrEqualTo(_helper.SeriesTable.GetRowCount()), "There should be less studies than series (or equal)"); - - //make sure that the substitution identifier (that replaces old the PatientId) is the correct substitution (FISHFISH)/ - Assert.That(_helper.StudyTable.GetDataTable().Rows.OfType().First()["PatientId"], Is.EqualTo("FISHFISH")); - - //The file size in the final table should be more than 0 - Assert.That((long)_helper.ImageTable.GetDataTable().Rows.OfType().First()["DicomFileSize"], Is.GreaterThan(0)); - }); - - dicomTagReaderHost.Stop("TestIsFinished"); - - mongoDbPopulatorHost.Stop("TestIsFinished"); - DropMongoTestDb(_globals.MongoDatabases!.DicomStoreOptions!.HostName!, _globals.MongoDatabases.DicomStoreOptions.Port); - - identifierMapperHost.Stop("TestIsFinished"); - - relationalMapperHost.Stop("Test end"); - } - - //Now do extraction - var extractorHost = new CohortExtractorHost(_globals, null, null); + Assert.That(dicomTagReaderHost.AccessionDirectoryMessageConsumer.NackCount, Is.EqualTo(0)); + Assert.That(identifierMapperHost.Consumer.NackCount, Is.EqualTo(0)); + Assert.That(mongoDbPopulatorHost.SeriesConsumer.NackCount, Is.EqualTo(0)); + Assert.That(mongoDbPopulatorHost.ImageConsumer.NackCount, Is.EqualTo(0)); + }); - extractorHost.Start(); - var extract = new ExtractionRequestMessage + try { - ExtractionJobIdentifier = Guid.NewGuid(), - ProjectNumber = "1234-5678", - ExtractionDirectory = "1234-5678_P1", - Modality = "MR", - KeyTag = "SeriesInstanceUID", - }; - - foreach (var row in _helper.ImageTable?.GetDataTable().Rows.Cast() ?? []) + Thread.Sleep(TimeSpan.FromSeconds(10)); + TestTimelineAwaiter.Await(() => relationalMapperHost.Consumer!.AckCount >= numberOfExpectedRows, null, 30000, () => relationalMapperHost.Consumer!.DleErrors); //number of image files + logger.Info("\n### DicomRelationalMapper has processed its messages ###\n"); + } + finally { - var seriesId = (string)row["SeriesInstanceUID"]; - - if (!extract.ExtractionIdentifiers.Contains(seriesId)) - extract.ExtractionIdentifiers.Add(seriesId); + //find out what happens from the logging database + var rdmpLogging = new Rdmp.Core.Logging.LogManager(_helper.LoadMetadata!.GetDistinctLoggingDatabase()); + + //if error was reported during the dicom relational mapper run + foreach (var dli in rdmpLogging.GetArchivalDataLoadInfos(_helper.LoadMetadata.GetDistinctLoggingTask(), null, null)) + if (dli.StartTime > start) + foreach (var e in dli.Errors) + logger.Error($"{e.Date.TimeOfDay}:{e.Source}:{e.Description}"); } - tester.SendMessage(_globals.CohortExtractorOptions, extract); + Assert.Multiple(() => + { + Assert.That(_helper.ImageTable!.GetRowCount(), Is.EqualTo(numberOfExpectedRows), "All images should appear in the image table"); + Assert.That(_helper.SeriesTable!.GetRowCount(), Is.LessThanOrEqualTo(numberOfExpectedRows), "Only unique series data should appear in series table, there should be less unique series than images (or equal)"); + Assert.That(_helper.StudyTable!.GetRowCount(), Is.LessThanOrEqualTo(numberOfExpectedRows), "Only unique study data should appear in study table, there should be less unique studies than images (or equal)"); + Assert.That(_helper.StudyTable.GetRowCount(), Is.LessThanOrEqualTo(_helper.SeriesTable.GetRowCount()), "There should be less studies than series (or equal)"); + + //make sure that the substitution identifier (that replaces old the PatientId) is the correct substitution (FISHFISH)/ + Assert.That(_helper.StudyTable.GetDataTable().Rows.OfType().First()["PatientId"], Is.EqualTo("FISHFISH")); - //wait till extractor picked up the messages and dispatched the responses - TestTimelineAwaiter.Await(() => extractorHost.Consumer!.AckCount == 1); + //The file size in the final table should be more than 0 + Assert.That((long)_helper.ImageTable.GetDataTable().Rows.OfType().First()["DicomFileSize"], Is.GreaterThan(0)); + }); - extractorHost.Stop("TestIsFinished"); + dicomTagReaderHost.Stop("TestIsFinished"); - tester.Shutdown(); + mongoDbPopulatorHost.Stop("TestIsFinished"); + DropMongoTestDb(_globals.MongoDatabases!.DicomStoreOptions!.HostName!, _globals.MongoDatabases.DicomStoreOptions.Port); + identifierMapperHost.Stop("TestIsFinished"); - #endregion + relationalMapperHost.Stop("Test end"); } - private static void DropMongoTestDb(string mongoDbHostName, int mongoDbHostPort) + //Now do extraction + var extractorHost = new CohortExtractorHost(_globals, null, null); + + extractorHost.Start(); + + var extract = new ExtractionRequestMessage + { + ExtractionJobIdentifier = Guid.NewGuid(), + ProjectNumber = "1234-5678", + ExtractionDirectory = "1234-5678_P1", + Modality = "MR", + KeyTag = "SeriesInstanceUID", + }; + + foreach (var row in _helper.ImageTable?.GetDataTable().Rows.Cast() ?? []) { - new MongoClient(new MongoClientSettings { Server = new MongoServerAddress(mongoDbHostName, mongoDbHostPort) }).DropDatabase(MongoTestDbName); + var seriesId = (string)row["SeriesInstanceUID"]; + + if (!extract.ExtractionIdentifiers.Contains(seriesId)) + extract.ExtractionIdentifiers.Add(seriesId); } - private class SwapForFixedValueTester : SwapIdentifiers - { - private readonly string _swapForString; + tester.SendMessage(_globals.CohortExtractorOptions, extract); + //wait till extractor picked up the messages and dispatched the responses + TestTimelineAwaiter.Await(() => extractorHost.Consumer!.AckCount == 1); - public SwapForFixedValueTester(string swapForString) - { - _swapForString = swapForString; - } + extractorHost.Stop("TestIsFinished"); + tester.Shutdown(); - public override void Setup(IMappingTableOptions mappingTableOptions) { } - public override string? GetSubstitutionFor(string toSwap, out string? reason) - { - reason = null; - Success++; - CacheHit++; - return _swapForString; - } + #endregion + } - public override void ClearCache() { } + private static void DropMongoTestDb(string mongoDbHostName, int mongoDbHostPort) + { + new MongoClient(new MongoClientSettings { Server = new MongoServerAddress(mongoDbHostName, mongoDbHostPort) }).DropDatabase(MongoTestDbName); + } - public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) - { - return null; - } + private class SwapForFixedValueTester : SwapIdentifiers + { + private readonly string _swapForString; + + + public SwapForFixedValueTester(string swapForString) + { + _swapForString = swapForString; + } + + + public override void Setup(IMappingTableOptions mappingTableOptions) { } + + public override string? GetSubstitutionFor(string toSwap, out string? reason) + { + reason = null; + Success++; + CacheHit++; + return _swapForString; + } + + public override void ClearCache() { } + + public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) + { + return null; } } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/RunMeFirstTests/RunMeFirstMongoServers.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/RunMeFirstTests/RunMeFirstMongoServers.cs index c2395f2f1..65d5b02f3 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/RunMeFirstTests/RunMeFirstMongoServers.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/RunMeFirstTests/RunMeFirstMongoServers.cs @@ -3,25 +3,24 @@ using SmiServices.Common.Options; using System; -namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper.RunMeFirstTests +namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper.RunMeFirstTests; + +[Category("RunMeFirst")] +public class RunMeFirstMongoServers { - [Category("RunMeFirst")] - public class RunMeFirstMongoServers + [Test, RequiresMongoDb] + public void TestMongoAvailable() { - [Test, RequiresMongoDb] - public void TestMongoAvailable() - { - Assert.Pass(); - } + Assert.Pass(); + } - [Test, RequiresRabbit] - public void RabbitAvailable() - { - var options = new GlobalOptionsFactory().Load(nameof(RabbitAvailable)); - var rabbitOptions = options.RabbitOptions!; + [Test, RequiresRabbit] + public void RabbitAvailable() + { + var options = new GlobalOptionsFactory().Load(nameof(RabbitAvailable)); + var rabbitOptions = options.RabbitOptions!; - Assert.DoesNotThrow(() => _ = new RabbitMQBroker(rabbitOptions, nameof(RabbitAvailable)), $"Rabbit failed with the following configuration:{Environment.NewLine}{rabbitOptions}"); - } + Assert.DoesNotThrow(() => _ = new RabbitMQBroker(rabbitOptions, nameof(RabbitAvailable)), $"Rabbit failed with the following configuration:{Environment.NewLine}{rabbitOptions}"); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/RunMeFirstTests/RunMeFirstRdmpTests.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/RunMeFirstTests/RunMeFirstRdmpTests.cs index 9916a5e08..bbcdc066f 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/RunMeFirstTests/RunMeFirstRdmpTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomRelationalMapper/RunMeFirstTests/RunMeFirstRdmpTests.cs @@ -4,24 +4,23 @@ using System.IO; using Tests.Common; -namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper.RunMeFirstTests +namespace SmiServices.IntegrationTests.Microservices.DicomRelationalMapper.RunMeFirstTests; + +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +[Category("RunMeFirst")] +public class RunMeFirstRdmpTests : DatabaseTests { - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - [Category("RunMeFirst")] - public class RunMeFirstRdmpTests : DatabaseTests + [Test] + public void PlatformDatabasesAvailable() { - [Test] - public void PlatformDatabasesAvailable() - { - var f = Path.Combine(TestContext.CurrentContext.TestDirectory, "TestDatabases.txt"); + var f = Path.Combine(TestContext.CurrentContext.TestDirectory, "TestDatabases.txt"); - if (!File.Exists(f)) - Assert.Fail("TestDatabases.txt was not found in the bin directory, check the project includes a reference to HIC.RDMP.Plugin.Tests nuget package and that the file is set to CopyAlways"); + if (!File.Exists(f)) + Assert.Fail("TestDatabases.txt was not found in the bin directory, check the project includes a reference to HIC.RDMP.Plugin.Tests nuget package and that the file is set to CopyAlways"); - if (CatalogueRepository is ITableRepository crtr && !crtr.DiscoveredServer.RespondsWithinTime(5, out _)) - Assert.Fail("Catalogue database was unreachable"); - if (DataExportRepository is ITableRepository dertr && !dertr.DiscoveredServer.RespondsWithinTime(5, out _)) - Assert.Fail("DataExport database was unreachable"); - } + if (CatalogueRepository is ITableRepository crtr && !crtr.DiscoveredServer.RespondsWithinTime(5, out _)) + Assert.Fail("Catalogue database was unreachable"); + if (DataExportRepository is ITableRepository dertr && !dertr.DiscoveredServer.RespondsWithinTime(5, out _)) + Assert.Fail("DataExport database was unreachable"); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/DicomTagReaderHostTests.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/DicomTagReaderHostTests.cs index 275026f5a..2cf8ffb1d 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/DicomTagReaderHostTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/DicomTagReaderHostTests.cs @@ -11,109 +11,108 @@ using System.Threading; -namespace SmiServices.IntegrationTests.Microservices.DicomTagReader +namespace SmiServices.IntegrationTests.Microservices.DicomTagReader; + +[TestFixture, RequiresRabbit] +public class DicomTagReaderHostTests { - [TestFixture, RequiresRabbit] - public class DicomTagReaderHostTests - { - private readonly DicomTagReaderTestHelper _helper = new(); + private readonly DicomTagReaderTestHelper _helper = new(); - [OneTimeSetUp] - public void OneTimeSetUp() - { - _helper.SetUpSuite(); - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + _helper.SetUpSuite(); + } - [OneTimeTearDown] - public void OneTimeTearDown() - { - _helper.Dispose(); - } + [OneTimeTearDown] + public void OneTimeTearDown() + { + _helper.Dispose(); + } - [SetUp] - public void SetUp() - { - _helper.ResetSuite(); - } + [SetUp] + public void SetUp() + { + _helper.ResetSuite(); + } - [TearDown] - public void TearDown() - { + [TearDown] + public void TearDown() + { - } + } - /// - /// Tests basic operation of the tag reader when receiving a single message - /// - [Test] - public void TestBasicOperation() - { - _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; - _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; + /// + /// Tests basic operation of the tag reader when receiving a single message + /// + [Test] + public void TestBasicOperation() + { + _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; + _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - var tester = new MicroserviceTester(_helper.Options.RabbitOptions!, _helper.AccessionConsumerOptions); + var tester = new MicroserviceTester(_helper.Options.RabbitOptions!, _helper.AccessionConsumerOptions); - var host = new DicomTagReaderHost(_helper.Options); - host.Start(); + var host = new DicomTagReaderHost(_helper.Options); + host.Start(); - tester.SendMessage(_helper.AccessionConsumerOptions, new MessageHeader(), _helper.TestAccessionDirectoryMessage); + tester.SendMessage(_helper.AccessionConsumerOptions, new MessageHeader(), _helper.TestAccessionDirectoryMessage); - var timeout = 30000; - const int stepSize = 500; + var timeout = 30000; + const int stepSize = 500; - while (!_helper.CheckQueues(1, 1) && timeout > 0) - { - timeout -= 500; - Thread.Sleep(stepSize); - } + while (!_helper.CheckQueues(1, 1) && timeout > 0) + { + timeout -= 500; + Thread.Sleep(stepSize); + } - host.Stop("Test end"); - tester.Dispose(); + host.Stop("Test end"); + tester.Dispose(); - if (timeout <= 0) - Assert.Fail("Failed to process expected number of messages within the timeout"); - } + if (timeout <= 0) + Assert.Fail("Failed to process expected number of messages within the timeout"); + } - [Test] - public void TestTagReader_SingleFileMode() - { - var dirRoot = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "TestTagReader_SingleFileMode")); + [Test] + public void TestTagReader_SingleFileMode() + { + var dirRoot = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "TestTagReader_SingleFileMode")); - if (dirRoot.Exists) - dirRoot.Delete(true); + if (dirRoot.Exists) + dirRoot.Delete(true); - dirRoot.Create(); - var julyFolder = dirRoot.CreateSubdirectory("July"); + dirRoot.Create(); + var julyFolder = dirRoot.CreateSubdirectory("July"); - _helper.Options.FileSystemOptions!.FileSystemRoot = dirRoot.FullName; + _helper.Options.FileSystemOptions!.FileSystemRoot = dirRoot.FullName; - var host = new DicomTagReaderHost(_helper.Options); + var host = new DicomTagReaderHost(_helper.Options); - var r = new Random(5); - var generator = new DicomDataGenerator(r, julyFolder.FullName, "CT"); - var files = generator.GenerateImageFiles(10, r).ToArray(); + var r = new Random(5); + var generator = new DicomDataGenerator(r, julyFolder.FullName, "CT"); + var files = generator.GenerateImageFiles(10, r).ToArray(); - host.AccessionDirectoryMessageConsumer.RunSingleFile(files[2]); + host.AccessionDirectoryMessageConsumer.RunSingleFile(files[2]); - Assert.Multiple(() => - { - Assert.That(_helper.ImageCount, Is.EqualTo(1)); - Assert.That(_helper.SeriesCount, Is.EqualTo(1)); - }); + Assert.Multiple(() => + { + Assert.That(_helper.ImageCount, Is.EqualTo(1)); + Assert.That(_helper.SeriesCount, Is.EqualTo(1)); + }); - var julyZip = Path.Combine(dirRoot.FullName, "july.zip"); + var julyZip = Path.Combine(dirRoot.FullName, "july.zip"); - ZipFile.CreateFromDirectory(julyFolder.FullName, julyZip); + ZipFile.CreateFromDirectory(julyFolder.FullName, julyZip); - host.AccessionDirectoryMessageConsumer.RunSingleFile(new FileInfo(julyZip)); + host.AccessionDirectoryMessageConsumer.RunSingleFile(new FileInfo(julyZip)); - Assert.Multiple(() => - { - Assert.That(_helper.ImageCount, Is.EqualTo(11)); - Assert.That(_helper.SeriesCount, Is.GreaterThanOrEqualTo(1)); - }); - } + Assert.Multiple(() => + { + Assert.That(_helper.ImageCount, Is.EqualTo(11)); + Assert.That(_helper.SeriesCount, Is.GreaterThanOrEqualTo(1)); + }); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/Messaging/DicomTagReaderConsumerTests.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/Messaging/DicomTagReaderConsumerTests.cs index 215615367..2a20cb866 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/Messaging/DicomTagReaderConsumerTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/Messaging/DicomTagReaderConsumerTests.cs @@ -9,94 +9,93 @@ using System.IO.Abstractions; using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.IntegrationTests.Microservices.DicomTagReader.Messaging +namespace SmiServices.IntegrationTests.Microservices.DicomTagReader.Messaging; + +[TestFixture, RequiresRabbit] +public class DicomTagReaderConsumerTests { - [TestFixture, RequiresRabbit] - public class DicomTagReaderConsumerTests + private readonly DicomTagReaderTestHelper _helper = new(); + + [OneTimeSetUp] + public void OneTimeSetUp() { - private readonly DicomTagReaderTestHelper _helper = new(); + _helper.SetUpSuite(); + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - _helper.SetUpSuite(); - } + [OneTimeTearDown] + public void OneTimeTearDown() + { + _helper.Dispose(); + } - [OneTimeTearDown] - public void OneTimeTearDown() - { - _helper.Dispose(); - } + [SetUp] + public void SetUp() + { + _helper.ResetSuite(); + } + + [TearDown] + public void TearDown() { } - [SetUp] - public void SetUp() - { - _helper.ResetSuite(); - } - [TearDown] - public void TearDown() { } + private TagReaderBase GetMockTagReader(IFileSystem? fileSystem = null) + { + fileSystem ??= _helper.MockFileSystem; + return new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions!, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, fileSystem); + } - private TagReaderBase GetMockTagReader(IFileSystem? fileSystem = null) - { - fileSystem ??= _helper.MockFileSystem; + private void CheckAckNackCounts(DicomTagReaderConsumer consumer, int desiredAckCount, int desiredNackCount) + { + var fatalCalled = false; + consumer.OnFatal += (sender, args) => fatalCalled = true; - return new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions!, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, fileSystem); - } + consumer.ProcessMessage(new MessageHeader(), _helper.TestAccessionDirectoryMessage, 1); - private void CheckAckNackCounts(DicomTagReaderConsumer consumer, int desiredAckCount, int desiredNackCount) - { - var fatalCalled = false; - consumer.OnFatal += (sender, args) => fatalCalled = true; - - consumer.ProcessMessage(new MessageHeader(), _helper.TestAccessionDirectoryMessage, 1); - - Assert.Multiple(() => - { - Assert.That(consumer.AckCount, Is.EqualTo(desiredAckCount)); - Assert.That(consumer.NackCount, Is.EqualTo(desiredNackCount)); - Assert.That(fatalCalled, Is.False); - }); - } - - /// - /// Tests that a valid message is acknowledged - /// - [Test] - public void TestValidMessageAck() + Assert.Multiple(() => { - _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; + Assert.That(consumer.AckCount, Is.EqualTo(desiredAckCount)); + Assert.That(consumer.NackCount, Is.EqualTo(desiredNackCount)); + Assert.That(fatalCalled, Is.False); + }); + } - _helper.TestImageModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(new MessageHeader()); + /// + /// Tests that a valid message is acknowledged + /// + [Test] + public void TestValidMessageAck() + { + _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; + _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; - _helper.TestSeriesModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(new MessageHeader()); + _helper.TestImageModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(new MessageHeader()); - CheckAckNackCounts(new DicomTagReaderConsumer(GetMockTagReader(new FileSystem()), Mock.Of()), 1, 0); - } + _helper.TestSeriesModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(new MessageHeader()); - /// - /// Tests that messages are NACKd if an exception is thrown - /// - [Test] - public void TestInvalidMessageNack() - { - _helper.MockFileSystem.AddFile(@"C:\Temp\invalidDicomFile.dcm", new MockFileData([0x12, 0x34, 0x56, 0x78])); + CheckAckNackCounts(new DicomTagReaderConsumer(GetMockTagReader(new FileSystem()), Mock.Of()), 1, 0); + } + + /// + /// Tests that messages are NACKd if an exception is thrown + /// + [Test] + public void TestInvalidMessageNack() + { + _helper.MockFileSystem.AddFile(@"C:\Temp\invalidDicomFile.dcm", new MockFileData([0x12, 0x34, 0x56, 0x78])); - _helper.TestImageModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(new MessageHeader()); + _helper.TestImageModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(new MessageHeader()); - _helper.TestSeriesModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(new MessageHeader()); + _helper.TestSeriesModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(new MessageHeader()); - CheckAckNackCounts(new DicomTagReaderConsumer(GetMockTagReader(), Mock.Of()), 0, 1); - } + CheckAckNackCounts(new DicomTagReaderConsumer(GetMockTagReader(), Mock.Of()), 0, 1); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/TagReaderTests.cs b/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/TagReaderTests.cs index b6c95a229..7a5668654 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/TagReaderTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/DicomTagReader/TagReaderTests.cs @@ -11,232 +11,231 @@ using System.IO.Compression; using System.Linq; -namespace SmiServices.IntegrationTests.Microservices.DicomTagReader +namespace SmiServices.IntegrationTests.Microservices.DicomTagReader; + +//TODO Some of these can be tested without RabbitMQ +[TestFixture, RequiresRabbit] +public class TagReaderTests { - //TODO Some of these can be tested without RabbitMQ - [TestFixture, RequiresRabbit] - public class TagReaderTests + private readonly DicomTagReaderTestHelper _helper = new(); + + [OneTimeSetUp] + public void OneTimeSetUp() { - private readonly DicomTagReaderTestHelper _helper = new(); + _helper.SetUpSuite(); + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - _helper.SetUpSuite(); - } + [OneTimeTearDown] + public void OneTimeTearDown() + { + _helper.Dispose(); + } - [OneTimeTearDown] - public void OneTimeTearDown() - { - _helper.Dispose(); - } + [SetUp] + public void SetUp() + { + _helper.ResetSuite(); + } - [SetUp] - public void SetUp() - { - _helper.ResetSuite(); - } + [TearDown] + public void TearDown() + { - [TearDown] - public void TearDown() - { + } - } + /// + /// Test that the TagReader behaves properly depending on the NackIfAnyFileErrors option + /// + /// + [Test] + [TestCase(true)] + [TestCase(false)] + public void TestNackIfAnyFileErrorsOption(bool nackIfAnyFileErrors) + { + var messagesSent = 0; - /// - /// Test that the TagReader behaves properly depending on the NackIfAnyFileErrors option - /// - /// - [Test] - [TestCase(true)] - [TestCase(false)] - public void TestNackIfAnyFileErrorsOption(bool nackIfAnyFileErrors) - { - var messagesSent = 0; + var fi = new FileInfo(Path.Combine(_helper.TestDir.FullName, "InvalidFile.dcm")); + File.WriteAllBytes(fi.FullName, [0x12, 0x34, 0x56, 0x78]); - var fi = new FileInfo(Path.Combine(_helper.TestDir.FullName, "InvalidFile.dcm")); - File.WriteAllBytes(fi.FullName, [0x12, 0x34, 0x56, 0x78]); + Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); - Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); + _helper.Options.DicomTagReaderOptions!.NackIfAnyFileErrors = nackIfAnyFileErrors; + _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; + _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - _helper.Options.DicomTagReaderOptions!.NackIfAnyFileErrors = nackIfAnyFileErrors; - _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; - _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - - _helper.TestImageModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(new MessageHeader()); - - _helper.TestSeriesModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(new MessageHeader()) - .Callback(() => ++messagesSent); - - var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); - - if (nackIfAnyFileErrors) - { - Assert.Throws(() => tagReader.ReadTags(new MessageHeader(), _helper.TestAccessionDirectoryMessage)); - Assert.That(messagesSent, Is.EqualTo(0)); - } - else - { - tagReader.ReadTags(new MessageHeader(), _helper.TestAccessionDirectoryMessage); - Assert.That(messagesSent, Is.EqualTo(1)); - } - } + _helper.TestImageModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(new MessageHeader()); - /// - /// Tests that a directory path to search outside of the FileSystemRoot is rejected - /// - [Test] - public void TestPathNotBelowRootThrowsException() - { - _helper.Options.FileSystemOptions!.FileSystemRoot = "C:\\Temp"; - _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; + _helper.TestSeriesModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(new MessageHeader()) + .Callback(() => ++messagesSent); - var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); + var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); - Assert.Throws(() => tagReader.ReadTags(null, _helper.TestAccessionDirectoryMessage)); + if (nackIfAnyFileErrors) + { + Assert.Throws(() => tagReader.ReadTags(new MessageHeader(), _helper.TestAccessionDirectoryMessage)); + Assert.That(messagesSent, Is.EqualTo(0)); } - - /// - /// Tests that a directory containing no dicom files is rejected - /// - [Test] - public void TestEmptyDirectoryThrowsException() + else { - foreach (FileInfo file in _helper.TestDir.EnumerateFiles("*.dcm")) - file.Delete(); + tagReader.ReadTags(new MessageHeader(), _helper.TestAccessionDirectoryMessage); + Assert.That(messagesSent, Is.EqualTo(1)); + } + } - Assert.That(!_helper.TestDir.EnumerateFiles("*.dcm").Any(), Is.True); + /// + /// Tests that a directory path to search outside of the FileSystemRoot is rejected + /// + [Test] + public void TestPathNotBelowRootThrowsException() + { + _helper.Options.FileSystemOptions!.FileSystemRoot = "C:\\Temp"; + _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; - _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; + var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); - var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); + Assert.Throws(() => tagReader.ReadTags(null, _helper.TestAccessionDirectoryMessage)); + } - Assert.Throws(() => tagReader.ReadTags(null, _helper.TestAccessionDirectoryMessage)); - } + /// + /// Tests that a directory containing no dicom files is rejected + /// + [Test] + public void TestEmptyDirectoryThrowsException() + { + foreach (FileInfo file in _helper.TestDir.EnumerateFiles("*.dcm")) + file.Delete(); - /// - /// Tests that the field ImagesInSeries of the SeriesMessage is set properly - /// - [Test] - public void TestSeriesMessageImagesInSeriesCorrect() - { - _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; - _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; + Assert.That(!_helper.TestDir.EnumerateFiles("*.dcm").Any(), Is.True); - File.Copy($"{_helper.TestDir.FullName}/MyTestFile.dcm", $"{_helper.TestDir.FullName}/MyTestFile2.dcm"); - Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); + _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; + _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - IMessage? message = null; + var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); - _helper.TestImageModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Returns(new MessageHeader()); + Assert.Throws(() => tagReader.ReadTags(null, _helper.TestAccessionDirectoryMessage)); + } - _helper.TestSeriesModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((m, h, s) => message = m) - .Returns(new MessageHeader()); + /// + /// Tests that the field ImagesInSeries of the SeriesMessage is set properly + /// + [Test] + public void TestSeriesMessageImagesInSeriesCorrect() + { + _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; + _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); - tagReader.ReadTags(new MessageHeader(), _helper.TestAccessionDirectoryMessage); + File.Copy($"{_helper.TestDir.FullName}/MyTestFile.dcm", $"{_helper.TestDir.FullName}/MyTestFile2.dcm"); + Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); - Assert.That(message, Is.Not.EqualTo(null)); + IMessage? message = null; - var seriesMessage = message as SeriesMessage; - Assert.That(seriesMessage, Is.Not.EqualTo(null)); - Assert.That(seriesMessage!.ImagesInSeries, Is.EqualTo(2)); - } + _helper.TestImageModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(new MessageHeader()); - /// - /// Tests that we can read a mixture of zip files and dcm files using - /// - [Test] - public void TestSeriesMessageImagesInSeriesCorrect_WhenUsingZips() - { - _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; - _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; + _helper.TestSeriesModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((m, h, s) => message = m) + .Returns(new MessageHeader()); - File.Copy($"{_helper.TestDir.FullName}/MyTestFile.dcm", $"{_helper.TestDir.FullName}/MyTestFile2.dcm"); - Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); + var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); + tagReader.ReadTags(new MessageHeader(), _helper.TestAccessionDirectoryMessage); + + Assert.That(message, Is.Not.EqualTo(null)); - // Where we want to put it - var zipFilePath = Path.Combine(_helper.TestDir.FullName, "my.zip"); + var seriesMessage = message as SeriesMessage; + Assert.That(seriesMessage, Is.Not.EqualTo(null)); + Assert.That(seriesMessage!.ImagesInSeries, Is.EqualTo(2)); + } - //create the zip file in a temporary directory outside of the working directory to avoid file access errors - var tempDir = _helper.TestDir.Parent!.CreateSubdirectory("temppp"); - var tempPath = Path.Combine(tempDir.FullName, "my.zip"); + /// + /// Tests that we can read a mixture of zip files and dcm files using + /// + [Test] + public void TestSeriesMessageImagesInSeriesCorrect_WhenUsingZips() + { + _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; + _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - //zip everything in the working dir to the temp path zip file - ZipFile.CreateFromDirectory(_helper.TestDir.FullName, tempPath); + File.Copy($"{_helper.TestDir.FullName}/MyTestFile.dcm", $"{_helper.TestDir.FullName}/MyTestFile2.dcm"); + Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); - //then move the zip file where we actually want it (in the working path) - File.Move(tempPath, zipFilePath); + // Where we want to put it + var zipFilePath = Path.Combine(_helper.TestDir.FullName, "my.zip"); - Assert.Multiple(() => - { - Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); - Assert.That(_helper.TestDir.EnumerateFiles("*.zip").Count(), Is.EqualTo(1)); - }); + //create the zip file in a temporary directory outside of the working directory to avoid file access errors + var tempDir = _helper.TestDir.Parent!.CreateSubdirectory("temppp"); + var tempPath = Path.Combine(tempDir.FullName, "my.zip"); - IMessage? message = null; - List fileImages = []; + //zip everything in the working dir to the temp path zip file + ZipFile.CreateFromDirectory(_helper.TestDir.FullName, tempPath); - _helper.TestImageModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((m, h, s) => fileImages.Add(m)) - .Returns(new MessageHeader()); + //then move the zip file where we actually want it (in the working path) + File.Move(tempPath, zipFilePath); - _helper.TestSeriesModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((m, h, s) => message = m) - .Returns(new MessageHeader()); + Assert.Multiple(() => + { + Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); + Assert.That(_helper.TestDir.EnumerateFiles("*.zip").Count(), Is.EqualTo(1)); + }); - var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); - tagReader.ReadTags(new MessageHeader(), _helper.TestAccessionDirectoryMessage); + IMessage? message = null; + List fileImages = []; - Assert.That(message, Is.Not.EqualTo(null)); + _helper.TestImageModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((m, h, s) => fileImages.Add(m)) + .Returns(new MessageHeader()); - var seriesMessage = message as SeriesMessage; - Assert.That(seriesMessage, Is.Not.EqualTo(null)); + _helper.TestSeriesModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((m, h, s) => message = m) + .Returns(new MessageHeader()); - Assert.Multiple(() => - { - Assert.That(seriesMessage!.ImagesInSeries, Is.EqualTo(4), "Expected 4, 2 in the zip archive and 2 in the root"); + var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions!, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); + tagReader.ReadTags(new MessageHeader(), _helper.TestAccessionDirectoryMessage); - Assert.That(fileImages, Has.Count.EqualTo(4), "Expected 4 file messages to be sent and recorded by TestImageModel Callback"); - }); + Assert.That(message, Is.Not.EqualTo(null)); - Assert.That(fileImages.Select(m => ((DicomFileMessage)m).DicomFilePath).ToArray(), Does.Contain("MyTestFile.dcm")); - Assert.That(fileImages.Select(m => ((DicomFileMessage)m).DicomFilePath).ToArray(), Does.Contain("MyTestFile2.dcm")); - Assert.That(fileImages.Select(m => ((DicomFileMessage)m).DicomFilePath).ToArray(), Does.Contain("my.zip!MyTestFile.dcm")); - Assert.That(fileImages.Select(m => ((DicomFileMessage)m).DicomFilePath).ToArray(), Does.Contain("my.zip!MyTestFile2.dcm")); - } + var seriesMessage = message as SeriesMessage; + Assert.That(seriesMessage, Is.Not.EqualTo(null)); - /// - /// Tests that the correct exception is thrown if we try and open a corrupt dicom file - /// - [Test] - public void TestInvalidFileThrowsApplicationException() + Assert.Multiple(() => { - _helper.Options.DicomTagReaderOptions!.NackIfAnyFileErrors = true; - _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; - _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; + Assert.That(seriesMessage!.ImagesInSeries, Is.EqualTo(4), "Expected 4, 2 in the zip archive and 2 in the root"); - var fi = new FileInfo(Path.Combine(_helper.TestDir.FullName, "InvalidFile.dcm")); - File.WriteAllBytes(fi.FullName, [0x12, 0x34, 0x56, 0x78]); + Assert.That(fileImages, Has.Count.EqualTo(4), "Expected 4 file messages to be sent and recorded by TestImageModel Callback"); + }); - // One valid, one invalid - Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); + Assert.That(fileImages.Select(m => ((DicomFileMessage)m).DicomFilePath).ToArray(), Does.Contain("MyTestFile.dcm")); + Assert.That(fileImages.Select(m => ((DicomFileMessage)m).DicomFilePath).ToArray(), Does.Contain("MyTestFile2.dcm")); + Assert.That(fileImages.Select(m => ((DicomFileMessage)m).DicomFilePath).ToArray(), Does.Contain("my.zip!MyTestFile.dcm")); + Assert.That(fileImages.Select(m => ((DicomFileMessage)m).DicomFilePath).ToArray(), Does.Contain("my.zip!MyTestFile2.dcm")); + } - var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); + /// + /// Tests that the correct exception is thrown if we try and open a corrupt dicom file + /// + [Test] + public void TestInvalidFileThrowsApplicationException() + { + _helper.Options.DicomTagReaderOptions!.NackIfAnyFileErrors = true; + _helper.Options.FileSystemOptions!.FileSystemRoot = _helper.TestDir.FullName; + _helper.TestAccessionDirectoryMessage.DirectoryPath = _helper.TestDir.FullName; - Assert.Throws(() => tagReader.ReadTags(null, _helper.TestAccessionDirectoryMessage)); - } + var fi = new FileInfo(Path.Combine(_helper.TestDir.FullName, "InvalidFile.dcm")); + File.WriteAllBytes(fi.FullName, [0x12, 0x34, 0x56, 0x78]); + + // One valid, one invalid + Assert.That(_helper.TestDir.EnumerateFiles("*.dcm").Count(), Is.EqualTo(2)); + + var tagReader = new SerialTagReader(_helper.Options.DicomTagReaderOptions, _helper.Options.FileSystemOptions, _helper.TestSeriesModel.Object, _helper.TestImageModel.Object, new FileSystem()); + + Assert.Throws(() => tagReader.ReadTags(null, _helper.TestAccessionDirectoryMessage)); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/FileCopier/FileCopierHostTest.cs b/tests/SmiServices.IntegrationTests/Microservices/FileCopier/FileCopierHostTest.cs index 5cf75a217..9d6dd0dcf 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/FileCopier/FileCopierHostTest.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/FileCopier/FileCopierHostTest.cs @@ -11,83 +11,82 @@ using System.IO.Abstractions.TestingHelpers; using System.Text; -namespace SmiServices.IntegrationTests.Microservices.FileCopier +namespace SmiServices.IntegrationTests.Microservices.FileCopier; + +[RequiresRabbit] +public class FileCopierHostTest { - [RequiresRabbit] - public class FileCopierHostTest + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } + + #endregion - [OneTimeTearDown] - public void OneTimeTearDown() { } + #region Test Methods - #endregion + [SetUp] + public void SetUp() { } - #region Test Methods + [TearDown] + public void TearDown() { } - [SetUp] - public void SetUp() { } + #endregion - [TearDown] - public void TearDown() { } + #region Tests - #endregion + [Test] + public void Test_FileCopierHost_HappyPath() + { + var globals = new GlobalOptionsFactory().Load(nameof(Test_FileCopierHost_HappyPath)); + globals.FileSystemOptions!.FileSystemRoot = "root"; + globals.FileSystemOptions.ExtractRoot = "exroot"; + + using var tester = new MicroserviceTester(globals.RabbitOptions!, globals.FileCopierOptions!); + + var outputQueueName = globals.FileCopierOptions!.CopyStatusProducerOptions!.ExchangeName!.Replace("Exchange", "Queue"); + tester.CreateExchange( + globals.FileCopierOptions.CopyStatusProducerOptions.ExchangeName, + outputQueueName, + false, + globals.FileCopierOptions.NoVerifyRoutingKey!); - #region Tests + var mockFileSystem = new MockFileSystem(); + mockFileSystem.AddDirectory(globals.FileSystemOptions.FileSystemRoot); + mockFileSystem.AddDirectory(globals.FileSystemOptions.ExtractRoot); + mockFileSystem.AddFile(mockFileSystem.Path.Combine(globals.FileSystemOptions.FileSystemRoot, "file.dcm"), null); - [Test] - public void Test_FileCopierHost_HappyPath() + var host = new FileCopierHost(globals, mockFileSystem); + tester.StopOnDispose.Add(host); + host.Start(); + + var message = new ExtractFileMessage { - var globals = new GlobalOptionsFactory().Load(nameof(Test_FileCopierHost_HappyPath)); - globals.FileSystemOptions!.FileSystemRoot = "root"; - globals.FileSystemOptions.ExtractRoot = "exroot"; - - using var tester = new MicroserviceTester(globals.RabbitOptions!, globals.FileCopierOptions!); - - var outputQueueName = globals.FileCopierOptions!.CopyStatusProducerOptions!.ExchangeName!.Replace("Exchange", "Queue"); - tester.CreateExchange( - globals.FileCopierOptions.CopyStatusProducerOptions.ExchangeName, - outputQueueName, - false, - globals.FileCopierOptions.NoVerifyRoutingKey!); - - var mockFileSystem = new MockFileSystem(); - mockFileSystem.AddDirectory(globals.FileSystemOptions.FileSystemRoot); - mockFileSystem.AddDirectory(globals.FileSystemOptions.ExtractRoot); - mockFileSystem.AddFile(mockFileSystem.Path.Combine(globals.FileSystemOptions.FileSystemRoot, "file.dcm"), null); - - var host = new FileCopierHost(globals, mockFileSystem); - tester.StopOnDispose.Add(host); - host.Start(); - - var message = new ExtractFileMessage - { - ExtractionJobIdentifier = Guid.NewGuid(), - JobSubmittedAt = DateTime.UtcNow, - ProjectNumber = "1234", - ExtractionDirectory = "1234/foo", - Modality = "CT", - DicomFilePath = "file.dcm", - IsIdentifiableExtraction = true, - OutputPath = "output.dcm", - }; - tester.SendMessage(globals.FileCopierOptions, message); - - using var model = tester.Broker.GetModel(nameof(FileCopierHostTest)); - var consumer = new EventingBasicConsumer(model); - ExtractedFileStatusMessage? statusMessage = null; - consumer.Received += (_, ea) => statusMessage = JsonConvert.DeserializeObject(Encoding.UTF8.GetString(ea.Body.ToArray())); - model.BasicConsume(outputQueueName, true, "", consumer); - - TestTimelineAwaiter.Await(() => statusMessage != null); - Assert.That(statusMessage!.Status, Is.EqualTo(ExtractedFileStatus.Copied)); - } - - #endregion + ExtractionJobIdentifier = Guid.NewGuid(), + JobSubmittedAt = DateTime.UtcNow, + ProjectNumber = "1234", + ExtractionDirectory = "1234/foo", + Modality = "CT", + DicomFilePath = "file.dcm", + IsIdentifiableExtraction = true, + OutputPath = "output.dcm", + }; + tester.SendMessage(globals.FileCopierOptions, message); + + using var model = tester.Broker.GetModel(nameof(FileCopierHostTest)); + var consumer = new EventingBasicConsumer(model); + ExtractedFileStatusMessage? statusMessage = null; + consumer.Received += (_, ea) => statusMessage = JsonConvert.DeserializeObject(Encoding.UTF8.GetString(ea.Body.ToArray())); + model.BasicConsume(outputQueueName, true, "", consumer); + + TestTimelineAwaiter.Await(() => statusMessage != null); + Assert.That(statusMessage!.Status, Is.EqualTo(ExtractedFileStatus.Copied)); } + + #endregion } diff --git a/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/IdentifierMapperTests.cs b/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/IdentifierMapperTests.cs index bb540a0bf..99fca1670 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/IdentifierMapperTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/IdentifierMapperTests.cs @@ -22,642 +22,641 @@ using Tests.Common; using DatabaseType = FAnsi.DatabaseType; -namespace SmiServices.IntegrationTests.Microservices.IdentifierMapper +namespace SmiServices.IntegrationTests.Microservices.IdentifierMapper; + +[TestFixture] +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +public class IdentifierMapperTests : DatabaseTests { - [TestFixture] - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - public class IdentifierMapperTests : DatabaseTests + [OneTimeSetUp] + public void DisableFoDicomValidation() { - [OneTimeSetUp] - public void DisableFoDicomValidation() - { - new DicomSetupBuilder().SkipValidation(); - } + new DicomSetupBuilder().SkipValidation(); + } - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestIdentifierSwap(DatabaseType type) - { - var mappingDataTable = new DataTable("IdMap"); - mappingDataTable.Columns.Add("priv"); - mappingDataTable.Columns.Add("pub"); - mappingDataTable.Rows.Add("010101", "020202"); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestIdentifierSwap(DatabaseType type) + { + var mappingDataTable = new DataTable("IdMap"); + mappingDataTable.Columns.Add("priv"); + mappingDataTable.Columns.Add("pub"); + mappingDataTable.Rows.Add("010101", "020202"); - var db = GetCleanedServer(type); + var db = GetCleanedServer(type); - var options = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString, - MappingTableName = db.CreateTable("IdMap", mappingDataTable).GetFullyQualifiedName(), - SwapColumnName = "priv", - ReplacementColumnName = "pub", - MappingDatabaseType = type, - TimeoutInSeconds = 500 - }; + var options = new IdentifierMapperOptions + { + MappingConnectionString = db.Server.Builder.ConnectionString, + MappingTableName = db.CreateTable("IdMap", mappingDataTable).GetFullyQualifiedName(), + SwapColumnName = "priv", + ReplacementColumnName = "pub", + MappingDatabaseType = type, + TimeoutInSeconds = 500 + }; - var swapper = new PreloadTableSwapper(); - swapper.Setup(options); + var swapper = new PreloadTableSwapper(); + swapper.Setup(options); - var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); + var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); - var msg = GetTestDicomFileMessage(); + var msg = GetTestDicomFileMessage(); - consumer.SwapIdentifier(msg, out var _); + consumer.SwapIdentifier(msg, out var _); - AssertDicomFileMessageHasPatientID(msg, "020202"); - } + AssertDicomFileMessageHasPatientID(msg, "020202"); + } - [TestCase(DatabaseType.MicrosoftSQLServer, Test.Normal)] - [TestCase(DatabaseType.MicrosoftSQLServer, Test.ProperlyFormatedChi)] - [TestCase(DatabaseType.MySql, Test.Normal)] - [TestCase(DatabaseType.MySql, Test.ProperlyFormatedChi)] - public void TestIdentifierSwap_NoCache(DatabaseType type, Test test) - { - var mappingDataTable = new DataTable("IdMap"); - mappingDataTable.Columns.Add("priv"); - mappingDataTable.Columns.Add("pub"); - mappingDataTable.Rows.Add("010101", "020202"); - mappingDataTable.Rows.Add("0101010101", "0202020202"); + [TestCase(DatabaseType.MicrosoftSQLServer, Test.Normal)] + [TestCase(DatabaseType.MicrosoftSQLServer, Test.ProperlyFormatedChi)] + [TestCase(DatabaseType.MySql, Test.Normal)] + [TestCase(DatabaseType.MySql, Test.ProperlyFormatedChi)] + public void TestIdentifierSwap_NoCache(DatabaseType type, Test test) + { + var mappingDataTable = new DataTable("IdMap"); + mappingDataTable.Columns.Add("priv"); + mappingDataTable.Columns.Add("pub"); + mappingDataTable.Rows.Add("010101", "020202"); + mappingDataTable.Rows.Add("0101010101", "0202020202"); - var db = GetCleanedServer(type); + var db = GetCleanedServer(type); - var options = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString, - MappingTableName = db.CreateTable("IdMap", mappingDataTable).GetFullyQualifiedName(), - SwapColumnName = "priv", - ReplacementColumnName = "pub", - MappingDatabaseType = type, - TimeoutInSeconds = 500 - }; - - var swapper = new TableLookupSwapper(); - swapper.Setup(options); - - var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper) - { - AllowRegexMatching = true - }; + var options = new IdentifierMapperOptions + { + MappingConnectionString = db.Server.Builder.ConnectionString, + MappingTableName = db.CreateTable("IdMap", mappingDataTable).GetFullyQualifiedName(), + SwapColumnName = "priv", + ReplacementColumnName = "pub", + MappingDatabaseType = type, + TimeoutInSeconds = 500 + }; + + var swapper = new TableLookupSwapper(); + swapper.Setup(options); + + var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper) + { + AllowRegexMatching = true + }; - var msg = GetTestDicomFileMessage(test); - consumer.SwapIdentifier(msg, out _); + var msg = GetTestDicomFileMessage(test); + consumer.SwapIdentifier(msg, out _); - switch (test) - { - case Test.Normal: - AssertDicomFileMessageHasPatientID(msg, "020202"); - break; - case Test.ProperlyFormatedChi: - AssertDicomFileMessageHasPatientID(msg, "0202020202"); - break; - default: - Assert.Fail("Wrong test case?"); - break; - } + switch (test) + { + case Test.Normal: + AssertDicomFileMessageHasPatientID(msg, "020202"); + break; + case Test.ProperlyFormatedChi: + AssertDicomFileMessageHasPatientID(msg, "0202020202"); + break; + default: + Assert.Fail("Wrong test case?"); + break; } + } - [TestCase(DatabaseType.MicrosoftSQLServer, 8), RequiresRabbit] - [TestCase(DatabaseType.MySql, 8), RequiresRabbit] - public void TestIdentifierSwap_RegexVsDeserialize(DatabaseType type, int batchSize) - { + [TestCase(DatabaseType.MicrosoftSQLServer, 8), RequiresRabbit] + [TestCase(DatabaseType.MySql, 8), RequiresRabbit] + public void TestIdentifierSwap_RegexVsDeserialize(DatabaseType type, int batchSize) + { - var options = new GlobalOptionsFactory().Load(nameof(TestIdentifierSwap_RegexVsDeserialize)); + var options = new GlobalOptionsFactory().Load(nameof(TestIdentifierSwap_RegexVsDeserialize)); - var mappingDataTable = new DataTable("IdMap"); - mappingDataTable.Columns.Add("priv"); - mappingDataTable.Columns.Add("pub"); - mappingDataTable.Rows.Add("010101", "020202"); - mappingDataTable.Rows.Add("0101010101", "0202020202"); + var mappingDataTable = new DataTable("IdMap"); + mappingDataTable.Columns.Add("priv"); + mappingDataTable.Columns.Add("pub"); + mappingDataTable.Rows.Add("010101", "020202"); + mappingDataTable.Rows.Add("0101010101", "0202020202"); - var db = GetCleanedServer(type); + var db = GetCleanedServer(type); - options.IdentifierMapperOptions!.MappingConnectionString = db.Server.Builder.ConnectionString; - options.IdentifierMapperOptions.MappingTableName = db.CreateTable("IdMap", mappingDataTable).GetFullyQualifiedName(); - options.IdentifierMapperOptions.SwapColumnName = "priv"; - options.IdentifierMapperOptions.ReplacementColumnName = "pub"; - options.IdentifierMapperOptions.MappingDatabaseType = type; - options.IdentifierMapperOptions.TimeoutInSeconds = 500; + options.IdentifierMapperOptions!.MappingConnectionString = db.Server.Builder.ConnectionString; + options.IdentifierMapperOptions.MappingTableName = db.CreateTable("IdMap", mappingDataTable).GetFullyQualifiedName(); + options.IdentifierMapperOptions.SwapColumnName = "priv"; + options.IdentifierMapperOptions.ReplacementColumnName = "pub"; + options.IdentifierMapperOptions.MappingDatabaseType = type; + options.IdentifierMapperOptions.TimeoutInSeconds = 500; - var swapper = new PreloadTableSwapper(); - swapper.Setup(options.IdentifierMapperOptions); + var swapper = new PreloadTableSwapper(); + swapper.Setup(options.IdentifierMapperOptions); - var goodChis = new List(); - var badChis = new List(); + var goodChis = new List(); + var badChis = new List(); - Console.WriteLine("Generating Test data..."); + Console.WriteLine("Generating Test data..."); - List tasks = []; - object oTaskLock = new(); + List tasks = []; + object oTaskLock = new(); - for (int i = 0; i < batchSize; i++) + for (int i = 0; i < batchSize; i++) + { + var t = new Task(() => { - var t = new Task(() => + var a = GetTestDicomFileMessage(Test.ProperlyFormatedChi); + var b = GetTestDicomFileMessage(Test.ProperlyFormatedChi); + lock (oTaskLock) { - var a = GetTestDicomFileMessage(Test.ProperlyFormatedChi); - var b = GetTestDicomFileMessage(Test.ProperlyFormatedChi); - lock (oTaskLock) - { - goodChis.Add(a); - badChis.Add(b); - } - }); + goodChis.Add(a); + badChis.Add(b); + } + }); - t.Start(); - tasks.Add(t); + t.Start(); + tasks.Add(t); - if (i % Environment.ProcessorCount == 0) - { - Task.WaitAll([.. tasks]); - tasks.Clear(); - } - - if (i % 100 == 0) - Console.WriteLine(i + " pairs done"); + if (i % Environment.ProcessorCount == 0) + { + Task.WaitAll([.. tasks]); + tasks.Clear(); } - Task.WaitAll([.. tasks]); + if (i % 100 == 0) + Console.WriteLine(i + " pairs done"); + } - options.IdentifierMapperOptions.AllowRegexMatching = true; + Task.WaitAll([.. tasks]); - using (var tester = new MicroserviceTester(options.RabbitOptions!, options.IdentifierMapperOptions)) - { - tester.CreateExchange(options.IdentifierMapperOptions.AnonImagesProducerOptions!.ExchangeName!, null); + options.IdentifierMapperOptions.AllowRegexMatching = true; - Console.WriteLine("Pushing good messages to Rabbit..."); - tester.SendMessages(options.IdentifierMapperOptions, goodChis, true); + using (var tester = new MicroserviceTester(options.RabbitOptions!, options.IdentifierMapperOptions)) + { + tester.CreateExchange(options.IdentifierMapperOptions.AnonImagesProducerOptions!.ExchangeName!, null); - var host = new IdentifierMapperHost(options, swapper); - tester.StopOnDispose.Add(host); + Console.WriteLine("Pushing good messages to Rabbit..."); + tester.SendMessages(options.IdentifierMapperOptions, goodChis, true); - Console.WriteLine("Starting host"); + var host = new IdentifierMapperHost(options, swapper); + tester.StopOnDispose.Add(host); - Stopwatch sw = Stopwatch.StartNew(); - host.Start(); + Console.WriteLine("Starting host"); - TestTimelineAwaiter.Await(() => host.Consumer.AckCount == batchSize); + Stopwatch sw = Stopwatch.StartNew(); + host.Start(); - Console.WriteLine("Good message processing (" + batchSize + ") took:" + sw.ElapsedMilliseconds + "ms"); - host.Stop("Test finished"); - } + TestTimelineAwaiter.Await(() => host.Consumer.AckCount == batchSize); - options.IdentifierMapperOptions.AllowRegexMatching = false; + Console.WriteLine("Good message processing (" + batchSize + ") took:" + sw.ElapsedMilliseconds + "ms"); + host.Stop("Test finished"); + } - using (var tester = new MicroserviceTester(options.RabbitOptions!, options.IdentifierMapperOptions)) - { - tester.CreateExchange(options.IdentifierMapperOptions.AnonImagesProducerOptions.ExchangeName!, null); + options.IdentifierMapperOptions.AllowRegexMatching = false; - Console.WriteLine("Pushing bad messages to Rabbit..."); - tester.SendMessages(options.IdentifierMapperOptions, badChis, true); + using (var tester = new MicroserviceTester(options.RabbitOptions!, options.IdentifierMapperOptions)) + { + tester.CreateExchange(options.IdentifierMapperOptions.AnonImagesProducerOptions.ExchangeName!, null); - var host = new IdentifierMapperHost(options, swapper); - tester.StopOnDispose.Add(host); + Console.WriteLine("Pushing bad messages to Rabbit..."); + tester.SendMessages(options.IdentifierMapperOptions, badChis, true); - Console.WriteLine("Starting host"); + var host = new IdentifierMapperHost(options, swapper); + tester.StopOnDispose.Add(host); - Stopwatch sw = Stopwatch.StartNew(); - host.Start(); + Console.WriteLine("Starting host"); - TestTimelineAwaiter.Await(() => host.Consumer.AckCount == batchSize); + Stopwatch sw = Stopwatch.StartNew(); + host.Start(); - Console.WriteLine("Bad message processing (" + batchSize + ") took:" + sw.ElapsedMilliseconds + "ms"); + TestTimelineAwaiter.Await(() => host.Consumer.AckCount == batchSize); - host.Stop("Test finished"); - } + Console.WriteLine("Bad message processing (" + batchSize + ") took:" + sw.ElapsedMilliseconds + "ms"); + + host.Stop("Test finished"); } + } - [Explicit("Slow, tests lookup scalability")] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestIdentifierSwap_MillionsOfRows(DatabaseType type) - { - Console.WriteLine("DatabaseType:" + type); + [Explicit("Slow, tests lookup scalability")] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestIdentifierSwap_MillionsOfRows(DatabaseType type) + { + Console.WriteLine("DatabaseType:" + type); - var mappingDataTable = new DataTable("IdMap"); - mappingDataTable.Columns.Add("priv"); - mappingDataTable.Columns.Add("pub"); - mappingDataTable.Rows.Add("abclkjlkjdefghijiklaskdf", Guid.NewGuid().ToString()); - var db = GetCleanedServer(type); + var mappingDataTable = new DataTable("IdMap"); + mappingDataTable.Columns.Add("priv"); + mappingDataTable.Columns.Add("pub"); + mappingDataTable.Rows.Add("abclkjlkjdefghijiklaskdf", Guid.NewGuid().ToString()); + var db = GetCleanedServer(type); - DiscoveredTable tbl; + DiscoveredTable tbl; - var options = new IdentifierMapperOptions + var options = new IdentifierMapperOptions + { + MappingConnectionString = db.Server.Builder.ConnectionString, + MappingTableName = (tbl = db.CreateTable("IdMap", mappingDataTable)).GetFullyQualifiedName(), + SwapColumnName = "priv", + ReplacementColumnName = "pub", + MappingDatabaseType = type + }; + + Stopwatch sw = new(); + sw.Start(); + + mappingDataTable.Rows.Clear(); + using (var blk = tbl.BeginBulkInsert()) + for (int i = 0; i < 9999999; i++) //9 million { - MappingConnectionString = db.Server.Builder.ConnectionString, - MappingTableName = (tbl = db.CreateTable("IdMap", mappingDataTable)).GetFullyQualifiedName(), - SwapColumnName = "priv", - ReplacementColumnName = "pub", - MappingDatabaseType = type - }; - - Stopwatch sw = new(); - sw.Start(); - - mappingDataTable.Rows.Clear(); - using (var blk = tbl.BeginBulkInsert()) - for (int i = 0; i < 9999999; i++) //9 million + mappingDataTable.Rows.Add(i.ToString(), Guid.NewGuid().ToString()); + + if (i % 100000 == 0) { - mappingDataTable.Rows.Add(i.ToString(), Guid.NewGuid().ToString()); - - if (i % 100000 == 0) - { - blk.Upload(mappingDataTable); - mappingDataTable.Rows.Clear(); - Console.WriteLine("Upload Table " + i + " rows " + sw.ElapsedMilliseconds); - } + blk.Upload(mappingDataTable); + mappingDataTable.Rows.Clear(); + Console.WriteLine("Upload Table " + i + " rows " + sw.ElapsedMilliseconds); } + } - sw.Stop(); - sw.Reset(); + sw.Stop(); + sw.Reset(); - sw.Start(); - var swapper = new PreloadTableSwapper(); - swapper.Setup(options); + sw.Start(); + var swapper = new PreloadTableSwapper(); + swapper.Setup(options); - sw.Stop(); - Console.WriteLine("PreloadTableSwapper.Setup:" + sw.ElapsedMilliseconds); - sw.Reset(); + sw.Stop(); + Console.WriteLine("PreloadTableSwapper.Setup:" + sw.ElapsedMilliseconds); + sw.Reset(); - sw.Start(); - var answer = swapper.GetSubstitutionFor("12325", out _); - sw.Stop(); - Console.WriteLine("Lookup Key:" + sw.ElapsedMilliseconds); - sw.Reset(); + sw.Start(); + var answer = swapper.GetSubstitutionFor("12325", out _); + sw.Stop(); + Console.WriteLine("Lookup Key:" + sw.ElapsedMilliseconds); + sw.Reset(); - Assert.That(answer, Is.Not.Null); - Assert.That(answer!, Has.Length.GreaterThan(20)); - } + Assert.That(answer, Is.Not.Null); + Assert.That(answer!, Has.Length.GreaterThan(20)); + } + + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.PostgreSql)] + public void TestIdentifierSwapForGuid(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); + var mapTbl = db.ExpectTable("Map"); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.PostgreSql)] - public void TestIdentifierSwapForGuid(DatabaseType dbType) + //the declaration of what the guid namer table should be + var options = new IdentifierMapperOptions { - var db = GetCleanedServer(dbType); - var mapTbl = db.ExpectTable("Map"); + MappingConnectionString = db.Server.Builder.ConnectionString, + MappingTableName = mapTbl.GetFullyQualifiedName(), + SwapColumnName = "priv", + ReplacementColumnName = "pub", + MappingDatabaseType = dbType + }; - //the declaration of what the guid namer table should be - var options = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString, - MappingTableName = mapTbl.GetFullyQualifiedName(), - SwapColumnName = "priv", - ReplacementColumnName = "pub", - MappingDatabaseType = dbType - }; + var swapper = new ForGuidIdentifierSwapper(); + swapper.Setup(options); + swapper.Setup(options); + swapper.Setup(options); //this isn't just for the lols, this will test both the 'create it mode' and the 'discover it mode' - var swapper = new ForGuidIdentifierSwapper(); - swapper.Setup(options); - swapper.Setup(options); - swapper.Setup(options); //this isn't just for the lols, this will test both the 'create it mode' and the 'discover it mode' + var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); - var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); + var msg = GetTestDicomFileMessage(); - var msg = GetTestDicomFileMessage(); + consumer.SwapIdentifier(msg, out var reason); - consumer.SwapIdentifier(msg, out var reason); + var newDs = DicomTypeTranslater.DeserializeJsonToDataset(msg.DicomDataset); + var guidAllocated = newDs.GetValue(DicomTag.PatientID, 0); - var newDs = DicomTypeTranslater.DeserializeJsonToDataset(msg.DicomDataset); - var guidAllocated = newDs.GetValue(DicomTag.PatientID, 0); + using var dt = mapTbl.GetDataTable(); + Assert.Multiple(() => + { + Assert.That(dt.Rows, Has.Count.EqualTo(1)); - using var dt = mapTbl.GetDataTable(); - Assert.Multiple(() => - { - Assert.That(dt.Rows, Has.Count.EqualTo(1)); + //e.g. '841A2E3E-B7C9-410C-A5D1-816B95C0E806' + Assert.That(guidAllocated, Has.Length.EqualTo(36)); + }); + } - //e.g. '841A2E3E-B7C9-410C-A5D1-816B95C0E806' - Assert.That(guidAllocated, Has.Length.EqualTo(36)); - }); - } + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.PostgreSql)] + public void TestIdentifierSwap2ForGuids(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); + var mapTbl = db.ExpectTable("Map"); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.PostgreSql)] - public void TestIdentifierSwap2ForGuids(DatabaseType dbType) + //the declaration of what the guid namer table should be + var options = new IdentifierMapperOptions { - var db = GetCleanedServer(dbType); - var mapTbl = db.ExpectTable("Map"); + MappingConnectionString = db.Server.Builder.ConnectionString, + MappingTableName = mapTbl.GetFullyQualifiedName(), + SwapColumnName = "priv", + ReplacementColumnName = "pub", + MappingDatabaseType = dbType + }; - //the declaration of what the guid namer table should be - var options = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString, - MappingTableName = mapTbl.GetFullyQualifiedName(), - SwapColumnName = "priv", - ReplacementColumnName = "pub", - MappingDatabaseType = dbType - }; + var swapper = new ForGuidIdentifierSwapper(); + swapper.Setup(options); - var swapper = new ForGuidIdentifierSwapper(); - swapper.Setup(options); + Assert.Multiple(() => + { + Assert.That(swapper.GetSubstitutionFor("01010101", out var reason), Has.Length.EqualTo(36)); + Assert.That(swapper.GetSubstitutionFor("02020202", out reason), Has.Length.EqualTo(36)); + }); - Assert.Multiple(() => - { - Assert.That(swapper.GetSubstitutionFor("01010101", out var reason), Has.Length.EqualTo(36)); - Assert.That(swapper.GetSubstitutionFor("02020202", out reason), Has.Length.EqualTo(36)); - }); + var answer1 = swapper.GetSubstitutionFor("03030303", out _); - var answer1 = swapper.GetSubstitutionFor("03030303", out _); + var answer2 = swapper.GetSubstitutionFor("04040404", out _); - var answer2 = swapper.GetSubstitutionFor("04040404", out _); + var answer3 = swapper.GetSubstitutionFor("03030303", out _); - var answer3 = swapper.GetSubstitutionFor("03030303", out _); + Assert.Multiple(() => + { + Assert.That(answer3, Is.EqualTo(answer1)); - Assert.Multiple(() => - { - Assert.That(answer3, Is.EqualTo(answer1)); + Assert.That(answer2, Is.Not.EqualTo(answer1)); + }); + } - Assert.That(answer2, Is.Not.EqualTo(answer1)); - }); - } + /// + /// Tests two microservices inserting a guid at the same time (neither has a cached answer each thinks it's guid it allocated + /// will be respected). Correct behaviour is for the swappers to always read guids only from the database and in transaction + /// safe manner. + /// + /// + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.PostgreSql)] + public void TestIdentifierSwap2ForGuids_WithSeperateSwappers(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); + var mapTbl = db.ExpectTable("Map"); - /// - /// Tests two microservices inserting a guid at the same time (neither has a cached answer each thinks it's guid it allocated - /// will be respected). Correct behaviour is for the swappers to always read guids only from the database and in transaction - /// safe manner. - /// - /// - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.PostgreSql)] - public void TestIdentifierSwap2ForGuids_WithSeperateSwappers(DatabaseType dbType) + //the declaration of what the guid namer table should be + var options = new IdentifierMapperOptions { - var db = GetCleanedServer(dbType); - var mapTbl = db.ExpectTable("Map"); + MappingConnectionString = db.Server.Builder.ConnectionString, + MappingTableName = mapTbl.GetFullyQualifiedName(), + SwapColumnName = "priv", + ReplacementColumnName = "pub", + MappingDatabaseType = dbType + }; - //the declaration of what the guid namer table should be - var options = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString, - MappingTableName = mapTbl.GetFullyQualifiedName(), - SwapColumnName = "priv", - ReplacementColumnName = "pub", - MappingDatabaseType = dbType - }; + var swapper1 = new ForGuidIdentifierSwapper(); + swapper1.Setup(options); - var swapper1 = new ForGuidIdentifierSwapper(); - swapper1.Setup(options); + var swapper2 = new ForGuidIdentifierSwapper(); + swapper2.Setup(options); - var swapper2 = new ForGuidIdentifierSwapper(); - swapper2.Setup(options); + var answer1 = swapper1.GetSubstitutionFor("01010101", out _); + var answer2 = swapper2.GetSubstitutionFor("01010101", out _); - var answer1 = swapper1.GetSubstitutionFor("01010101", out _); - var answer2 = swapper2.GetSubstitutionFor("01010101", out _); + Assert.Multiple(() => + { + Assert.That(answer2, Is.EqualTo(answer1)); - Assert.Multiple(() => - { - Assert.That(answer2, Is.EqualTo(answer1)); + Assert.That(answer1, Is.Not.Null); + }); + Assert.That(answer2, Is.Not.Null); + } - Assert.That(answer1, Is.Not.Null); - }); - Assert.That(answer2, Is.Not.Null); - } + public enum Test + { + Normal, + NoPatientTag, + EmptyInPatientTag, + ProperlyFormatedChi, + DuplicatePatientIDButNull, + DuplicatePatientID, + DuplicatePatientIDAndDifferent, + } - public enum Test - { - Normal, - NoPatientTag, - EmptyInPatientTag, - ProperlyFormatedChi, - DuplicatePatientIDButNull, - DuplicatePatientID, - DuplicatePatientIDAndDifferent, - } + [Test] + [TestCase(Test.NoPatientTag)] + [TestCase(Test.EmptyInPatientTag)] + public void Test_BlankPatientIdentifier(Test testCase) + { + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - [Test] - [TestCase(Test.NoPatientTag)] - [TestCase(Test.EmptyInPatientTag)] - public void Test_BlankPatientIdentifier(Test testCase) + //the declaration of what the guid namer table should be + var options = new IdentifierMapperOptions { - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + MappingConnectionString = db.Server.Builder.ConnectionString + }; - //the declaration of what the guid namer table should be - var options = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString - }; - - var swapper = new SwapForFixedValueTester("meeee"); - swapper.Setup(options); + var swapper = new SwapForFixedValueTester("meeee"); + swapper.Setup(options); - var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); + var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); - var msg = GetTestDicomFileMessage(testCase: testCase); + var msg = GetTestDicomFileMessage(testCase: testCase); - Assert.That(consumer.SwapIdentifier(msg, out var reason), Is.False); + Assert.That(consumer.SwapIdentifier(msg, out var reason), Is.False); - switch (testCase) - { - case Test.EmptyInPatientTag: - Assert.That(reason, Is.EqualTo("PatientID was blank")); - break; - case Test.NoPatientTag: - Assert.That(reason, Is.EqualTo("Dataset did not contain PatientID")); - break; - } + switch (testCase) + { + case Test.EmptyInPatientTag: + Assert.That(reason, Is.EqualTo("PatientID was blank")); + break; + case Test.NoPatientTag: + Assert.That(reason, Is.EqualTo("Dataset did not contain PatientID")); + break; } + } - [Test] - [TestCase(Test.DuplicatePatientID, true)] - [TestCase(Test.DuplicatePatientIDButNull, true)] - [TestCase(Test.DuplicatePatientIDAndDifferent, false)] - public void Test_DuplicatePatientID(Test testCase, bool expectAllowed) - { - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + [Test] + [TestCase(Test.DuplicatePatientID, true)] + [TestCase(Test.DuplicatePatientIDButNull, true)] + [TestCase(Test.DuplicatePatientIDAndDifferent, false)] + public void Test_DuplicatePatientID(Test testCase, bool expectAllowed) + { + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - //the declaration of what the guid namer table should be - var options = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString - }; + //the declaration of what the guid namer table should be + var options = new IdentifierMapperOptions + { + MappingConnectionString = db.Server.Builder.ConnectionString + }; - var swapper = new SwapForFixedValueTester("meeee"); - swapper.Setup(options); + var swapper = new SwapForFixedValueTester("meeee"); + swapper.Setup(options); - var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); + var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); - var msg = GetTestDicomFileMessage(testCase: testCase); + var msg = GetTestDicomFileMessage(testCase: testCase); - if (expectAllowed) - { - Assert.That(consumer.SwapIdentifier(msg, out _), Is.True); - AssertDicomFileMessageHasPatientID(msg, "meeee"); - } - else - { - Assert.Throws(() => consumer.SwapIdentifier(msg, out _)); - } + if (expectAllowed) + { + Assert.That(consumer.SwapIdentifier(msg, out _), Is.True); + AssertDicomFileMessageHasPatientID(msg, "meeee"); } - - [Test] - public void Test_NoMatchingIdentifierFound() + else { - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + Assert.Throws(() => consumer.SwapIdentifier(msg, out _)); + } + } - //the declaration of what the guid namer table should be - var options = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString - }; + [Test] + public void Test_NoMatchingIdentifierFound() + { + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - //null here means it will never find any identifier - var swapper = new SwapForFixedValueTester(null); - swapper.Setup(options); + //the declaration of what the guid namer table should be + var options = new IdentifierMapperOptions + { + MappingConnectionString = db.Server.Builder.ConnectionString + }; - var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); + //null here means it will never find any identifier + var swapper = new SwapForFixedValueTester(null); + swapper.Setup(options); - var msg = GetTestDicomFileMessage(); + var consumer = new IdentifierMapperQueueConsumer(Mock.Of(), swapper); - Assert.Multiple(() => - { - Assert.That(consumer.SwapIdentifier(msg, out var reason), Is.False); - Assert.That(reason, Is.EqualTo("Swapper SmiServices.UnitTests.Microservices.IdentifierMapper.SwapForFixedValueTester returned null")); - }); - } + var msg = GetTestDicomFileMessage(); - private static void AssertDicomFileMessageHasPatientID(DicomFileMessage msg, string patientId) + Assert.Multiple(() => { - var newDs = DicomTypeTranslater.DeserializeJsonToDataset(msg.DicomDataset); - Assert.That(patientId, Is.EqualTo(newDs.GetValue(DicomTag.PatientID, 0))); - } + Assert.That(consumer.SwapIdentifier(msg, out var reason), Is.False); + Assert.That(reason, Is.EqualTo("Swapper SmiServices.UnitTests.Microservices.IdentifierMapper.SwapForFixedValueTester returned null")); + }); + } - private static DicomFileMessage GetTestDicomFileMessage(Test testCase = Test.Normal) + private static void AssertDicomFileMessageHasPatientID(DicomFileMessage msg, string patientId) + { + var newDs = DicomTypeTranslater.DeserializeJsonToDataset(msg.DicomDataset); + Assert.That(patientId, Is.EqualTo(newDs.GetValue(DicomTag.PatientID, 0))); + } + + private static DicomFileMessage GetTestDicomFileMessage(Test testCase = Test.Normal) + { + var msg = new DicomFileMessage { - var msg = new DicomFileMessage - { - DicomFilePath = "Path/To/The/File.dcm", - SOPInstanceUID = "1.2.3.4", - SeriesInstanceUID = "1.2.3.4", - StudyInstanceUID = "1.2.3.4", - }; + DicomFilePath = "Path/To/The/File.dcm", + SOPInstanceUID = "1.2.3.4", + SeriesInstanceUID = "1.2.3.4", + StudyInstanceUID = "1.2.3.4", + }; - DicomDataset ds; + DicomDataset ds; - Random r = new(123); + Random r = new(123); - using (var generator = new DicomDataGenerator(r, null, "CT")) - ds = generator.GenerateTestDataset(new Person(r), r); + using (var generator = new DicomDataGenerator(r, null, "CT")) + ds = generator.GenerateTestDataset(new Person(r), r); - ds.AddOrUpdate(DicomTag.AccessionNumber, "1234"); - ds.AddOrUpdate(DicomTag.SOPInstanceUID, "1.2.3.4"); - ds.AddOrUpdate(DicomTag.SeriesInstanceUID, "1.2.3.4"); - ds.AddOrUpdate(DicomTag.StudyInstanceUID, "1.2.3.4"); + ds.AddOrUpdate(DicomTag.AccessionNumber, "1234"); + ds.AddOrUpdate(DicomTag.SOPInstanceUID, "1.2.3.4"); + ds.AddOrUpdate(DicomTag.SeriesInstanceUID, "1.2.3.4"); + ds.AddOrUpdate(DicomTag.StudyInstanceUID, "1.2.3.4"); - switch (testCase) - { - case Test.Normal: - ds.AddOrUpdate(DicomTag.PatientID, "010101"); - break; - case Test.NoPatientTag: - ds.Remove(DicomTag.PatientID); - break; - case Test.EmptyInPatientTag: - ds.AddOrUpdate(DicomTag.PatientID, string.Empty); - break; - case Test.ProperlyFormatedChi: - ds.AddOrUpdate(DicomTag.PatientID, "0101010101"); - break; - case Test.DuplicatePatientIDButNull: - ds.AddOrUpdate(DicomTag.PatientID, new[] { "0101010101", null }); - break; - case Test.DuplicatePatientID: - ds.AddOrUpdate(DicomTag.PatientID, new[] { "0101010101", "0101010101" }); - break; - case Test.DuplicatePatientIDAndDifferent: - ds.AddOrUpdate(DicomTag.PatientID, new[] { "0101010101", "0202020202" }); - break; - default: - throw new ArgumentOutOfRangeException(nameof(testCase)); - } + switch (testCase) + { + case Test.Normal: + ds.AddOrUpdate(DicomTag.PatientID, "010101"); + break; + case Test.NoPatientTag: + ds.Remove(DicomTag.PatientID); + break; + case Test.EmptyInPatientTag: + ds.AddOrUpdate(DicomTag.PatientID, string.Empty); + break; + case Test.ProperlyFormatedChi: + ds.AddOrUpdate(DicomTag.PatientID, "0101010101"); + break; + case Test.DuplicatePatientIDButNull: + ds.AddOrUpdate(DicomTag.PatientID, new[] { "0101010101", null }); + break; + case Test.DuplicatePatientID: + ds.AddOrUpdate(DicomTag.PatientID, new[] { "0101010101", "0101010101" }); + break; + case Test.DuplicatePatientIDAndDifferent: + ds.AddOrUpdate(DicomTag.PatientID, new[] { "0101010101", "0202020202" }); + break; + default: + throw new ArgumentOutOfRangeException(nameof(testCase)); + } - msg.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); + msg.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); - return msg; - } + return msg; + } - /// - /// Tests that the control queue consumer correctly calls . - /// Each implementation of the swapper should be tested to ensure it correctly deals with the message - /// - [Test] - public void TestIdentifierSwap_ControlQueueRefresh() - { - var mockSwapper = new Mock(); + /// + /// Tests that the control queue consumer correctly calls . + /// Each implementation of the swapper should be tested to ensure it correctly deals with the message + /// + [Test] + public void TestIdentifierSwap_ControlQueueRefresh() + { + var mockSwapper = new Mock(); - var controlConsumer = new IdentifierMapperControlMessageHandler(mockSwapper.Object); + var controlConsumer = new IdentifierMapperControlMessageHandler(mockSwapper.Object); - controlConsumer.ControlMessageHandler("refresh"); + controlConsumer.ControlMessageHandler("refresh"); - mockSwapper.Verify(x => x.ClearCache(), Times.Once); - } + mockSwapper.Verify(x => x.ClearCache(), Times.Once); + } - [Test] - public void TestSwapCache() - { - var mappingDataTable = new DataTable("IdMap"); - mappingDataTable.Columns.Add("priv"); - mappingDataTable.Columns.Add("pub"); + [Test] + public void TestSwapCache() + { + var mappingDataTable = new DataTable("IdMap"); + mappingDataTable.Columns.Add("priv"); + mappingDataTable.Columns.Add("pub"); - mappingDataTable.Rows.Add("CHI-1", "REP-1"); - mappingDataTable.Rows.Add("CHI-2", "REP-2"); + mappingDataTable.Rows.Add("CHI-1", "REP-1"); + mappingDataTable.Rows.Add("CHI-2", "REP-2"); - DiscoveredDatabase db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + DiscoveredDatabase db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - GlobalOptions options = new GlobalOptionsFactory().Load(nameof(TestSwapCache)); - options.IdentifierMapperOptions = new IdentifierMapperOptions - { - MappingConnectionString = db.Server.Builder.ConnectionString, - MappingTableName = db.CreateTable("IdMap", mappingDataTable).GetFullyQualifiedName(), - SwapColumnName = "priv", - ReplacementColumnName = "pub", - MappingDatabaseType = DatabaseType.MicrosoftSQLServer, - TimeoutInSeconds = 500 - }; - - var swapper = new TableLookupSwapper(); - swapper.Setup(options.IdentifierMapperOptions); - - string? swapped = swapper.GetSubstitutionFor("CHI-1", out var _); + GlobalOptions options = new GlobalOptionsFactory().Load(nameof(TestSwapCache)); + options.IdentifierMapperOptions = new IdentifierMapperOptions + { + MappingConnectionString = db.Server.Builder.ConnectionString, + MappingTableName = db.CreateTable("IdMap", mappingDataTable).GetFullyQualifiedName(), + SwapColumnName = "priv", + ReplacementColumnName = "pub", + MappingDatabaseType = DatabaseType.MicrosoftSQLServer, + TimeoutInSeconds = 500 + }; + + var swapper = new TableLookupSwapper(); + swapper.Setup(options.IdentifierMapperOptions); + + string? swapped = swapper.GetSubstitutionFor("CHI-1", out var _); + Assert.That(swapped, Is.EqualTo("REP-1")); + swapped = swapper.GetSubstitutionFor("CHI-1", out _); + Assert.Multiple(() => + { Assert.That(swapped, Is.EqualTo("REP-1")); - swapped = swapper.GetSubstitutionFor("CHI-1", out _); - Assert.Multiple(() => - { - Assert.That(swapped, Is.EqualTo("REP-1")); - Assert.That(swapper.Success, Is.EqualTo(2)); - Assert.That(swapper.CacheHit, Is.EqualTo(1)); - }); + Assert.That(swapper.Success, Is.EqualTo(2)); + Assert.That(swapper.CacheHit, Is.EqualTo(1)); + }); - swapped = swapper.GetSubstitutionFor("CHI-2", out _); + swapped = swapper.GetSubstitutionFor("CHI-2", out _); + Assert.That(swapped, Is.EqualTo("REP-2")); + swapped = swapper.GetSubstitutionFor("CHI-2", out _); + Assert.Multiple(() => + { Assert.That(swapped, Is.EqualTo("REP-2")); - swapped = swapper.GetSubstitutionFor("CHI-2", out _); - Assert.Multiple(() => - { - Assert.That(swapped, Is.EqualTo("REP-2")); - Assert.That(swapper.Success, Is.EqualTo(4)); - Assert.That(swapper.CacheHit, Is.EqualTo(2)); - }); + Assert.That(swapper.Success, Is.EqualTo(4)); + Assert.That(swapper.CacheHit, Is.EqualTo(2)); + }); - // Just to make sure... + // Just to make sure... - swapped = swapper.GetSubstitutionFor("CHI-1", out _); - Assert.Multiple(() => - { - Assert.That(swapped, Is.EqualTo("REP-1")); + swapped = swapper.GetSubstitutionFor("CHI-1", out _); + Assert.Multiple(() => + { + Assert.That(swapped, Is.EqualTo("REP-1")); - Assert.That(swapper.Success, Is.EqualTo(5)); - Assert.That(swapper.CacheHit, Is.EqualTo(2)); - }); - } + Assert.That(swapper.Success, Is.EqualTo(5)); + Assert.That(swapper.CacheHit, Is.EqualTo(2)); + }); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/RedisSwapperTests.cs b/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/RedisSwapperTests.cs index 6459388c6..5882bcca5 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/RedisSwapperTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/RedisSwapperTests.cs @@ -9,155 +9,154 @@ using System.Linq; using Tests.Common; -namespace SmiServices.IntegrationTests.Microservices.IdentifierMapper +namespace SmiServices.IntegrationTests.Microservices.IdentifierMapper; + +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +[RequiresRelationalDb(DatabaseType.MySql)] +class RedisSwapperTests : DatabaseTests { - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - [RequiresRelationalDb(DatabaseType.MySql)] - class RedisSwapperTests : DatabaseTests + private const string TestRedisServer = "localhost"; + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void Test_Redist_CacheUsage(DatabaseType dbType) { - private const string TestRedisServer = "localhost"; + var db = GetCleanedServer(dbType); + + DiscoveredTable map; + + using (var dt = new DataTable()) + { + dt.Columns.Add("CHI"); + dt.Columns.Add("ECHI"); + + dt.Rows.Add("0101010101", "0A0A0A0A0A"); + map = db.CreateTable("Map", dt); + } + + var options = new IdentifierMapperOptions + { + MappingTableName = map.GetFullyQualifiedName(), + MappingConnectionString = db.Server.Builder.ConnectionString, + SwapColumnName = "CHI", + ReplacementColumnName = "ECHI", + MappingDatabaseType = db.Server.DatabaseType + }; + + RedisSwapper swapper; - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void Test_Redist_CacheUsage(DatabaseType dbType) + try { - var db = GetCleanedServer(dbType); - - DiscoveredTable map; - - using (var dt = new DataTable()) - { - dt.Columns.Add("CHI"); - dt.Columns.Add("ECHI"); - - dt.Rows.Add("0101010101", "0A0A0A0A0A"); - map = db.CreateTable("Map", dt); - } - - var options = new IdentifierMapperOptions - { - MappingTableName = map.GetFullyQualifiedName(), - MappingConnectionString = db.Server.Builder.ConnectionString, - SwapColumnName = "CHI", - ReplacementColumnName = "ECHI", - MappingDatabaseType = db.Server.DatabaseType - }; - - RedisSwapper swapper; - - try - { - swapper = new RedisSwapper(TestRedisServer, new TableLookupWithGuidFallbackSwapper()); - swapper.Setup(options); - - ClearRedisServer(); - } - catch (RedisConnectionException) - { - Assert.Inconclusive(); - throw new Exception("To keep static analysis happy, btw Redis was unavailable"); - } - - //hit on the lookup table - string? answer = swapper.GetSubstitutionFor("0101010101", out string? reason); - Assert.Multiple(() => - { - Assert.That(answer, Is.EqualTo("0A0A0A0A0A")); - Assert.That(reason, Is.Null); - - //hit didn't come from Redis - Assert.That(swapper.CacheHit, Is.EqualTo(0)); - Assert.That(swapper.Success, Is.EqualTo(1)); - }); - - - //hit from Redis - string? answer2 = swapper.GetSubstitutionFor("0101010101", out string? reason2); - Assert.Multiple(() => - { - Assert.That(answer, Is.EqualTo("0A0A0A0A0A")); - Assert.That(reason, Is.Null); - - //hit must come from Redis - Assert.That(swapper.CacheHit, Is.EqualTo(1)); - Assert.That(swapper.Success, Is.EqualTo(2)); - }); + swapper = new RedisSwapper(TestRedisServer, new TableLookupWithGuidFallbackSwapper()); + swapper.Setup(options); + + ClearRedisServer(); + } + catch (RedisConnectionException) + { + Assert.Inconclusive(); + throw new Exception("To keep static analysis happy, btw Redis was unavailable"); } + //hit on the lookup table + string? answer = swapper.GetSubstitutionFor("0101010101", out string? reason); + Assert.Multiple(() => + { + Assert.That(answer, Is.EqualTo("0A0A0A0A0A")); + Assert.That(reason, Is.Null); + + //hit didn't come from Redis + Assert.That(swapper.CacheHit, Is.EqualTo(0)); + Assert.That(swapper.Success, Is.EqualTo(1)); + }); + + + //hit from Redis + string? answer2 = swapper.GetSubstitutionFor("0101010101", out string? reason2); + Assert.Multiple(() => + { + Assert.That(answer, Is.EqualTo("0A0A0A0A0A")); + Assert.That(reason, Is.Null); + + //hit must come from Redis + Assert.That(swapper.CacheHit, Is.EqualTo(1)); + Assert.That(swapper.Success, Is.EqualTo(2)); + }); + } + + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void Test_Redist_CacheMisses(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void Test_Redist_CacheMisses(DatabaseType dbType) + DiscoveredTable map; + + using (var dt = new DataTable()) { - var db = GetCleanedServer(dbType); - - DiscoveredTable map; - - using (var dt = new DataTable()) - { - dt.Columns.Add("CHI"); - dt.Columns.Add("ECHI"); - - dt.Rows.Add("0101010101", "0A0A0A0A0A"); - map = db.CreateTable("Map", dt); - } - - var options = new IdentifierMapperOptions - { - MappingTableName = map.GetFullyQualifiedName(), - MappingConnectionString = db.Server.Builder.ConnectionString, - SwapColumnName = "CHI", - ReplacementColumnName = "ECHI", - MappingDatabaseType = db.Server.DatabaseType - }; - - RedisSwapper swapper; - - try - { - swapper = new RedisSwapper(TestRedisServer, new TableLookupSwapper()); - swapper.Setup(options); - - ClearRedisServer(); - } - catch (RedisConnectionException) - { - Assert.Inconclusive(); - throw new Exception("To keep static analysis happy, btw Redis was unavailable"); - } - - //hit on the lookup table - string? answer = swapper.GetSubstitutionFor("GOGOGO", out string? reason); - Assert.Multiple(() => - { - Assert.That(answer, Is.Null); - Assert.That(reason, Is.EqualTo("No match found for 'GOGOGO'")); - - //hit didn't come from Redis - Assert.That(swapper.CacheHit, Is.EqualTo(0)); - Assert.That(swapper.Fail, Is.EqualTo(1)); - }); - - //hit from Redis - string? answer2 = swapper.GetSubstitutionFor("GOGOGO", out string? reason2); - Assert.Multiple(() => - { - Assert.That(answer2, Is.Null); - Assert.That(reason2, Is.EqualTo("Value 'GOGOGO' was cached in Redis as missing (i.e. no mapping was found)")); - - //hit must come from Redis - Assert.That(swapper.CacheHit, Is.EqualTo(1)); - Assert.That(swapper.Fail, Is.EqualTo(2)); - }); + dt.Columns.Add("CHI"); + dt.Columns.Add("ECHI"); + + dt.Rows.Add("0101010101", "0A0A0A0A0A"); + map = db.CreateTable("Map", dt); } - private static void ClearRedisServer() + var options = new IdentifierMapperOptions + { + MappingTableName = map.GetFullyQualifiedName(), + MappingConnectionString = db.Server.Builder.ConnectionString, + SwapColumnName = "CHI", + ReplacementColumnName = "ECHI", + MappingDatabaseType = db.Server.DatabaseType + }; + + RedisSwapper swapper; + + try + { + swapper = new RedisSwapper(TestRedisServer, new TableLookupSwapper()); + swapper.Setup(options); + + ClearRedisServer(); + } + catch (RedisConnectionException) { - using var admin = ConnectionMultiplexer.Connect($"{TestRedisServer},allowAdmin=true"); - foreach (var server in admin.GetEndPoints().Select(e => admin.GetServer(e))) - server.FlushAllDatabases(); + Assert.Inconclusive(); + throw new Exception("To keep static analysis happy, btw Redis was unavailable"); } + + //hit on the lookup table + string? answer = swapper.GetSubstitutionFor("GOGOGO", out string? reason); + Assert.Multiple(() => + { + Assert.That(answer, Is.Null); + Assert.That(reason, Is.EqualTo("No match found for 'GOGOGO'")); + + //hit didn't come from Redis + Assert.That(swapper.CacheHit, Is.EqualTo(0)); + Assert.That(swapper.Fail, Is.EqualTo(1)); + }); + + //hit from Redis + string? answer2 = swapper.GetSubstitutionFor("GOGOGO", out string? reason2); + Assert.Multiple(() => + { + Assert.That(answer2, Is.Null); + Assert.That(reason2, Is.EqualTo("Value 'GOGOGO' was cached in Redis as missing (i.e. no mapping was found)")); + + //hit must come from Redis + Assert.That(swapper.CacheHit, Is.EqualTo(1)); + Assert.That(swapper.Fail, Is.EqualTo(2)); + }); + } + + private static void ClearRedisServer() + { + using var admin = ConnectionMultiplexer.Connect($"{TestRedisServer},allowAdmin=true"); + foreach (var server in admin.GetEndPoints().Select(e => admin.GetServer(e))) + server.FlushAllDatabases(); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/TableLookupWithGuidFallbackSwapperTests.cs b/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/TableLookupWithGuidFallbackSwapperTests.cs index 86b22dc4b..fddf8f271 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/TableLookupWithGuidFallbackSwapperTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/IdentifierMapper/TableLookupWithGuidFallbackSwapperTests.cs @@ -9,159 +9,158 @@ using Tests.Common; using TypeGuesser; -namespace SmiServices.IntegrationTests.Microservices.IdentifierMapper +namespace SmiServices.IntegrationTests.Microservices.IdentifierMapper; + +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +[RequiresRelationalDb(DatabaseType.MySql)] +public class TableLookupWithGuidFallbackSwapperTests : DatabaseTests { - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - [RequiresRelationalDb(DatabaseType.MySql)] - public class TableLookupWithGuidFallbackSwapperTests : DatabaseTests + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void Test_Cache1Hit1Miss(DatabaseType dbType) { - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void Test_Cache1Hit1Miss(DatabaseType dbType) + var db = GetCleanedServer(dbType); + + DiscoveredTable map; + + using (var dt = new DataTable()) + { + dt.Columns.Add("CHI"); + dt.Columns.Add("ECHI"); + + dt.Rows.Add("0101010101", "0A0A0A0A0A"); + map = db.CreateTable("Map", dt); + } + + var options = new IdentifierMapperOptions + { + MappingTableName = map.GetFullyQualifiedName(), + MappingConnectionString = db.Server.Builder.ConnectionString, + SwapColumnName = "CHI", + ReplacementColumnName = "ECHI", + MappingDatabaseType = db.Server.DatabaseType + }; + + var swapper = new TableLookupWithGuidFallbackSwapper(); + swapper.Setup(options); + + //cache hit + var answer = swapper.GetSubstitutionFor("0101010101", out var reason); + Assert.Multiple(() => + { + Assert.That(answer, Is.EqualTo("0A0A0A0A0A")); + Assert.That(reason, Is.Null); + }); + + var guidTable = swapper.GetGuidTableIfAny(options); + + Assert.Multiple(() => + { + Assert.That(guidTable!.GetRuntimeName(), Is.EqualTo("Map_guid")); + + //The key column should match the SwapColumnName + Assert.That(guidTable.DiscoverColumn("CHI"), Is.Not.Null); + + //but the swap column should always be called guid + Assert.That(guidTable.DiscoverColumn("guid"), Is.Not.Null); + }); + + var answer2 = swapper.GetSubstitutionFor("0202020202", out reason); + + //should be a guid e.g. like "bc70d07d-4c77-4086-be1c-2971fd66ccf2" + Assert.That(answer2, Is.Not.Null); + Assert.Multiple(() => + { + Assert.That(answer2!.Count(c => c == '-'), Is.EqualTo(4), $"Answer '{answer2}' did not look like a guid"); + Assert.That(reason, Is.Null); + + //make sure the guid mapping table has the correct row persisted for repeated calls + Assert.That(guidTable, Is.Not.Null); + Assert.That(guidTable?.Exists(), Is.True); + Assert.That(guidTable?.GetRowCount(), Is.EqualTo(1)); + Assert.That(guidTable?.GetDataTable().Rows[0]["CHI"], Is.EqualTo("0202020202")); + Assert.That(guidTable?.GetDataTable().Rows[0]["guid"], Is.EqualTo(answer2)); + + + //repeated misses should not result in more rows and should return the same guid (obviously) + Assert.That(swapper.GetSubstitutionFor("0202020202", out reason), Is.EqualTo(answer2)); + }); + Assert.That(swapper.GetSubstitutionFor("0202020202", out reason), Is.EqualTo(answer2)); + Assert.Multiple(() => { - var db = GetCleanedServer(dbType); - - DiscoveredTable map; - - using (var dt = new DataTable()) - { - dt.Columns.Add("CHI"); - dt.Columns.Add("ECHI"); - - dt.Rows.Add("0101010101", "0A0A0A0A0A"); - map = db.CreateTable("Map", dt); - } - - var options = new IdentifierMapperOptions - { - MappingTableName = map.GetFullyQualifiedName(), - MappingConnectionString = db.Server.Builder.ConnectionString, - SwapColumnName = "CHI", - ReplacementColumnName = "ECHI", - MappingDatabaseType = db.Server.DatabaseType - }; - - var swapper = new TableLookupWithGuidFallbackSwapper(); - swapper.Setup(options); - - //cache hit - var answer = swapper.GetSubstitutionFor("0101010101", out var reason); - Assert.Multiple(() => - { - Assert.That(answer, Is.EqualTo("0A0A0A0A0A")); - Assert.That(reason, Is.Null); - }); - - var guidTable = swapper.GetGuidTableIfAny(options); - - Assert.Multiple(() => - { - Assert.That(guidTable!.GetRuntimeName(), Is.EqualTo("Map_guid")); - - //The key column should match the SwapColumnName - Assert.That(guidTable.DiscoverColumn("CHI"), Is.Not.Null); - - //but the swap column should always be called guid - Assert.That(guidTable.DiscoverColumn("guid"), Is.Not.Null); - }); - - var answer2 = swapper.GetSubstitutionFor("0202020202", out reason); - - //should be a guid e.g. like "bc70d07d-4c77-4086-be1c-2971fd66ccf2" - Assert.That(answer2, Is.Not.Null); - Assert.Multiple(() => - { - Assert.That(answer2!.Count(c => c == '-'), Is.EqualTo(4), $"Answer '{answer2}' did not look like a guid"); - Assert.That(reason, Is.Null); - - //make sure the guid mapping table has the correct row persisted for repeated calls - Assert.That(guidTable, Is.Not.Null); - Assert.That(guidTable?.Exists(), Is.True); - Assert.That(guidTable?.GetRowCount(), Is.EqualTo(1)); - Assert.That(guidTable?.GetDataTable().Rows[0]["CHI"], Is.EqualTo("0202020202")); - Assert.That(guidTable?.GetDataTable().Rows[0]["guid"], Is.EqualTo(answer2)); - - - //repeated misses should not result in more rows and should return the same guid (obviously) - Assert.That(swapper.GetSubstitutionFor("0202020202", out reason), Is.EqualTo(answer2)); - }); Assert.That(swapper.GetSubstitutionFor("0202020202", out reason), Is.EqualTo(answer2)); - Assert.Multiple(() => - { - Assert.That(swapper.GetSubstitutionFor("0202020202", out reason), Is.EqualTo(answer2)); - Assert.That(guidTable?.GetRowCount(), Is.EqualTo(1)); - Assert.That(guidTable?.GetDataTable().Rows[0]["CHI"], Is.EqualTo("0202020202")); - Assert.That(guidTable?.GetDataTable().Rows[0]["guid"], Is.EqualTo(answer2)); - }); + Assert.That(guidTable?.GetRowCount(), Is.EqualTo(1)); + Assert.That(guidTable?.GetDataTable().Rows[0]["CHI"], Is.EqualTo("0202020202")); + Assert.That(guidTable?.GetDataTable().Rows[0]["guid"], Is.EqualTo(answer2)); + }); - //now insert a legit mapping for 0202020202 - map.Insert(new Dictionary - {{"CHI","0202020202"},{"ECHI","0B0B0B0B0B"}}); + //now insert a legit mapping for 0202020202 + map.Insert(new Dictionary + {{"CHI","0202020202"},{"ECHI","0B0B0B0B0B"}}); - //note that the below line could fail if we ever implement miss caching (i.e. cache that we looked up the value and failed in the lookup swapper in which case this test would need to clearcache) + //note that the below line could fail if we ever implement miss caching (i.e. cache that we looked up the value and failed in the lookup swapper in which case this test would need to clearcache) - //now that we have a cache hit we can lookup the good value - Assert.That(swapper.GetSubstitutionFor("0202020202", out reason), Is.EqualTo("0B0B0B0B0B")); + //now that we have a cache hit we can lookup the good value + Assert.That(swapper.GetSubstitutionFor("0202020202", out reason), Is.EqualTo("0B0B0B0B0B")); + + } + [TestCase(DatabaseType.MySql, true)] + [TestCase(DatabaseType.MySql, false)] + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + public void Test_SwapValueTooLong(DatabaseType dbType, bool createGuidTableUpFront) + { + var db = GetCleanedServer(dbType); + + DiscoveredTable map; + + using (var dt = new DataTable()) + { + dt.Columns.Add("CHI"); + dt.Columns.Add("ECHI"); + + dt.Rows.Add("0101010101", "0A0A0A0A0A"); + map = db.CreateTable("Map", dt); } - [TestCase(DatabaseType.MySql, true)] - [TestCase(DatabaseType.MySql, false)] - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - public void Test_SwapValueTooLong(DatabaseType dbType, bool createGuidTableUpFront) + using (var dt = new DataTable()) { - var db = GetCleanedServer(dbType); - - DiscoveredTable map; - - using (var dt = new DataTable()) - { - dt.Columns.Add("CHI"); - dt.Columns.Add("ECHI"); - - dt.Rows.Add("0101010101", "0A0A0A0A0A"); - map = db.CreateTable("Map", dt); - } - - using (var dt = new DataTable()) - { - dt.Columns.Add("CHI"); - dt.Columns.Add("guid"); - - } - - if (createGuidTableUpFront) - db.CreateTable("Map_guid", - [ - new("CHI",new DatabaseTypeRequest(typeof(string),30,null)), - new("Guid",new DatabaseTypeRequest(typeof(string),36,null)), - ]); - - - var options = new IdentifierMapperOptions - { - MappingTableName = map.GetFullyQualifiedName(), - MappingConnectionString = db.Server.Builder.ConnectionString, - SwapColumnName = "CHI", - ReplacementColumnName = "ECHI", - MappingDatabaseType = db.Server.DatabaseType - }; - - var swapper = new TableLookupWithGuidFallbackSwapper(); - swapper.Setup(options); - - //cache hit - var answer = swapper.GetSubstitutionFor("010101010031002300020320402054240204022433040301", out var reason); - Assert.Multiple(() => - { - Assert.That(answer, Is.Null); - - Assert.That( - reason, Is.EqualTo($"Supplied value was too long (48) - max allowed is ({(createGuidTableUpFront ? 30 : 10)})").IgnoreCase); - }); + dt.Columns.Add("CHI"); + dt.Columns.Add("guid"); + } + + if (createGuidTableUpFront) + db.CreateTable("Map_guid", + [ + new("CHI",new DatabaseTypeRequest(typeof(string),30,null)), + new("Guid",new DatabaseTypeRequest(typeof(string),36,null)), + ]); + + + var options = new IdentifierMapperOptions + { + MappingTableName = map.GetFullyQualifiedName(), + MappingConnectionString = db.Server.Builder.ConnectionString, + SwapColumnName = "CHI", + ReplacementColumnName = "ECHI", + MappingDatabaseType = db.Server.DatabaseType + }; + + var swapper = new TableLookupWithGuidFallbackSwapper(); + swapper.Setup(options); + + //cache hit + var answer = swapper.GetSubstitutionFor("010101010031002300020320402054240204022433040301", out var reason); + Assert.Multiple(() => + { + Assert.That(answer, Is.Null); + + Assert.That( +reason, Is.EqualTo($"Supplied value was too long (48) - max allowed is ({(createGuidTableUpFront ? 30 : 10)})").IgnoreCase); + }); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/IsIdentifiable/IsIdentifiableHostTests.cs b/tests/SmiServices.IntegrationTests/Microservices/IsIdentifiable/IsIdentifiableHostTests.cs index 71462da9d..e1ec44400 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/IsIdentifiable/IsIdentifiableHostTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/IsIdentifiable/IsIdentifiableHostTests.cs @@ -7,118 +7,117 @@ using System; using System.IO; -namespace SmiServices.IntegrationTests.Microservices.IsIdentifiable +namespace SmiServices.IntegrationTests.Microservices.IsIdentifiable; + +[TestFixture, RequiresRabbit] +public class IsIdentifiableHostTests { - [TestFixture, RequiresRabbit] - public class IsIdentifiableHostTests + [OneTimeSetUp] + public void OneTimeSetUp() { - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + } - /// - /// The relative path to /data/ from the test bin directory - /// - public const string DataDirectory = @"../../../../../../../data/"; + /// + /// The relative path to /data/ from the test bin directory + /// + public const string DataDirectory = @"../../../../../../../data/"; - [Test] - public void TestClassifierName_NoClassifier() - { - var options = new GlobalOptionsFactory().Load(nameof(TestClassifierName_NoClassifier)); + [Test] + public void TestClassifierName_NoClassifier() + { + var options = new GlobalOptionsFactory().Load(nameof(TestClassifierName_NoClassifier)); - options.IsIdentifiableServiceOptions!.ClassifierType = ""; - var ex = Assert.Throws(() => new IsIdentifiableHost(options)); - Assert.That(ex!.Message, Does.Contain("No IClassifier has been set in options. Enter a value for " + nameof(options.IsIdentifiableServiceOptions.ClassifierType))); - } + options.IsIdentifiableServiceOptions!.ClassifierType = ""; + var ex = Assert.Throws(() => new IsIdentifiableHost(options)); + Assert.That(ex!.Message, Does.Contain("No IClassifier has been set in options. Enter a value for " + nameof(options.IsIdentifiableServiceOptions.ClassifierType))); + } - [Test] - public void TestClassifierName_NotRecognized() - { - var options = new GlobalOptionsFactory().Load(nameof(TestClassifierName_NotRecognized)); - options.IsIdentifiableServiceOptions!.DataDirectory = TestContext.CurrentContext.WorkDirectory; + [Test] + public void TestClassifierName_NotRecognized() + { + var options = new GlobalOptionsFactory().Load(nameof(TestClassifierName_NotRecognized)); + options.IsIdentifiableServiceOptions!.DataDirectory = TestContext.CurrentContext.WorkDirectory; + + options.IsIdentifiableServiceOptions.ClassifierType = "HappyFunTimes"; + var ex = Assert.Throws(() => new IsIdentifiableHost(options)); + Assert.That(ex!.Message, Does.Contain("Could not load type 'HappyFunTimes' from")); + } + + [Test] + public void TestClassifierName_ValidClassifier() + { + var options = new GlobalOptionsFactory().Load(nameof(TestClassifierName_ValidClassifier)); + + var testDcm = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestClassifierName_ValidClassifier), "f1.dcm")); Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestClassifierName_ValidClassifier), "f1.dcm"); + new TestData().Create(testDcm); - options.IsIdentifiableServiceOptions.ClassifierType = "HappyFunTimes"; - var ex = Assert.Throws(() => new IsIdentifiableHost(options)); - Assert.That(ex!.Message, Does.Contain("Could not load type 'HappyFunTimes' from")); - } + using var tester = new MicroserviceTester(options.RabbitOptions!, options.IsIdentifiableServiceOptions!); + tester.CreateExchange(options.IsIdentifiableServiceOptions!.IsIdentifiableProducerOptions!.ExchangeName!, null); - [Test] - public void TestClassifierName_ValidClassifier() + options.IsIdentifiableServiceOptions.ClassifierType = typeof(RejectAllClassifier).FullName; + options.IsIdentifiableServiceOptions.DataDirectory = TestContext.CurrentContext.TestDirectory; + + var extractRoot = Path.Join(Path.GetTempPath(), "extractRoot"); + Directory.CreateDirectory(extractRoot); + options.FileSystemOptions!.ExtractRoot = extractRoot; + + var host = new IsIdentifiableHost(options); + Assert.That(host, Is.Not.Null); + host.Start(); + + tester.SendMessage(options.IsIdentifiableServiceOptions, new ExtractedFileStatusMessage { - var options = new GlobalOptionsFactory().Load(nameof(TestClassifierName_ValidClassifier)); - - var testDcm = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestClassifierName_ValidClassifier), "f1.dcm")); Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestClassifierName_ValidClassifier), "f1.dcm"); - new TestData().Create(testDcm); - - using var tester = new MicroserviceTester(options.RabbitOptions!, options.IsIdentifiableServiceOptions!); - tester.CreateExchange(options.IsIdentifiableServiceOptions!.IsIdentifiableProducerOptions!.ExchangeName!, null); - - options.IsIdentifiableServiceOptions.ClassifierType = typeof(RejectAllClassifier).FullName; - options.IsIdentifiableServiceOptions.DataDirectory = TestContext.CurrentContext.TestDirectory; - - var extractRoot = Path.Join(Path.GetTempPath(), "extractRoot"); - Directory.CreateDirectory(extractRoot); - options.FileSystemOptions!.ExtractRoot = extractRoot; - - var host = new IsIdentifiableHost(options); - Assert.That(host, Is.Not.Null); - host.Start(); - - tester.SendMessage(options.IsIdentifiableServiceOptions, new ExtractedFileStatusMessage - { - DicomFilePath = "yay.dcm", - OutputFilePath = testDcm.FullName, - ProjectNumber = "100", - ExtractionDirectory = "./fish", - Modality = "CT", - StatusMessage = "yay!", - Status = ExtractedFileStatus.Anonymised - }); - - TestTimelineAwaiter.Await(() => host.Consumer.AckCount == 1); - } - - [Ignore("Requires leptonica fix")] - [Test] - public void TestIsIdentifiable_TesseractStanfordDicomFileClassifier() + DicomFilePath = "yay.dcm", + OutputFilePath = testDcm.FullName, + ProjectNumber = "100", + ExtractionDirectory = "./fish", + Modality = "CT", + StatusMessage = "yay!", + Status = ExtractedFileStatus.Anonymised + }); + + TestTimelineAwaiter.Await(() => host.Consumer.AckCount == 1); + } + + [Ignore("Requires leptonica fix")] + [Test] + public void TestIsIdentifiable_TesseractStanfordDicomFileClassifier() + { + var options = new GlobalOptionsFactory().Load(nameof(TestIsIdentifiable_TesseractStanfordDicomFileClassifier)); + + // Create a test data directory containing IsIdentifiableRules with 0 rules, and tessdata with the eng.traineddata classifier + // TODO(rkm 2020-04-14) This is a stop-gap solution until the tests are properly refactored + var testRulesDir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "data", "IsIdentifiableRules")); + testRulesDir.Create(); + options.IsIdentifiableServiceOptions!.DataDirectory = testRulesDir.Parent!.FullName; + var tessDir = new DirectoryInfo(Path.Combine(testRulesDir.Parent.FullName, "tessdata")); + tessDir.Create(); + var dest = Path.Combine(tessDir.FullName, "eng.traineddata"); + if (!File.Exists(dest)) + File.Copy(Path.Combine(DataDirectory, "tessdata", "eng.traineddata"), dest); + + var testDcm = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestIsIdentifiable_TesseractStanfordDicomFileClassifier), "f1.dcm")); + + Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestClassifierName_ValidClassifier), "f1.dcm"); + new TestData().Create(testDcm); + + using var tester = new MicroserviceTester(options.RabbitOptions!, options.IsIdentifiableServiceOptions); + options.IsIdentifiableServiceOptions.ClassifierType = typeof(TesseractStanfordDicomFileClassifier).FullName; + + var host = new IsIdentifiableHost(options); + host.Start(); + + tester.SendMessage(options.IsIdentifiableServiceOptions, new ExtractedFileStatusMessage { - var options = new GlobalOptionsFactory().Load(nameof(TestIsIdentifiable_TesseractStanfordDicomFileClassifier)); - - // Create a test data directory containing IsIdentifiableRules with 0 rules, and tessdata with the eng.traineddata classifier - // TODO(rkm 2020-04-14) This is a stop-gap solution until the tests are properly refactored - var testRulesDir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "data", "IsIdentifiableRules")); - testRulesDir.Create(); - options.IsIdentifiableServiceOptions!.DataDirectory = testRulesDir.Parent!.FullName; - var tessDir = new DirectoryInfo(Path.Combine(testRulesDir.Parent.FullName, "tessdata")); - tessDir.Create(); - var dest = Path.Combine(tessDir.FullName, "eng.traineddata"); - if (!File.Exists(dest)) - File.Copy(Path.Combine(DataDirectory, "tessdata", "eng.traineddata"), dest); - - var testDcm = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestIsIdentifiable_TesseractStanfordDicomFileClassifier), "f1.dcm")); - - Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(TestClassifierName_ValidClassifier), "f1.dcm"); - new TestData().Create(testDcm); - - using var tester = new MicroserviceTester(options.RabbitOptions!, options.IsIdentifiableServiceOptions); - options.IsIdentifiableServiceOptions.ClassifierType = typeof(TesseractStanfordDicomFileClassifier).FullName; - - var host = new IsIdentifiableHost(options); - host.Start(); - - tester.SendMessage(options.IsIdentifiableServiceOptions, new ExtractedFileStatusMessage - { - DicomFilePath = "yay.dcm", - OutputFilePath = testDcm.FullName, - ProjectNumber = "100", - ExtractionDirectory = "./fish", - StatusMessage = "yay!", - Status = ExtractedFileStatus.Anonymised - }); - - TestTimelineAwaiter.Await(() => host.Consumer.AckCount == 1 || host.Consumer.NackCount == 1); - Assert.That(host.Consumer.AckCount, Is.EqualTo(1), "Tesseract not acking"); - } + DicomFilePath = "yay.dcm", + OutputFilePath = testDcm.FullName, + ProjectNumber = "100", + ExtractionDirectory = "./fish", + StatusMessage = "yay!", + Status = ExtractedFileStatus.Anonymised + }); + + TestTimelineAwaiter.Await(() => host.Consumer.AckCount == 1 || host.Consumer.NackCount == 1); + Assert.That(host.Consumer.AckCount, Is.EqualTo(1), "Tesseract not acking"); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/MongoDbAdapterTests.cs b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/MongoDbAdapterTests.cs index d19787f88..e6ad61026 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/MongoDbAdapterTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/MongoDbAdapterTests.cs @@ -5,68 +5,67 @@ using SmiServices.UnitTests.Microservices.MongoDbPopulator; using System.Collections.Generic; -namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator +namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator; + +[TestFixture, RequiresMongoDb] +public class MongoDbAdapterTests { - [TestFixture, RequiresMongoDb] - public class MongoDbAdapterTests + private MongoDbPopulatorTestHelper _helper = null!; + + [OneTimeSetUp] + public void OneTimeSetUp() { - private MongoDbPopulatorTestHelper _helper = null!; + _helper = new MongoDbPopulatorTestHelper(); + _helper.SetupSuite(); + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - _helper = new MongoDbPopulatorTestHelper(); - _helper.SetupSuite(); - } + [OneTimeTearDown] + public void OneTimeTearDown() + { + _helper.Dispose(); + } - [OneTimeTearDown] - public void OneTimeTearDown() - { - _helper.Dispose(); - } + /// + /// Test basic write operation of the adapter + /// + [Test] + public void TestBasicWrite() + { + string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestBasicWrite"); + var adapter = new MongoDbAdapter("TestApplication", _helper.Globals.MongoDatabases!.DicomStoreOptions!, + collectionName); - /// - /// Test basic write operation of the adapter - /// - [Test] - public void TestBasicWrite() + var testDoc = new BsonDocument { - string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestBasicWrite"); - var adapter = new MongoDbAdapter("TestApplication", _helper.Globals.MongoDatabases!.DicomStoreOptions!, - collectionName); - - var testDoc = new BsonDocument - { - {"hello", "world"} - }; + {"hello", "world"} + }; - WriteResult result = adapter.WriteMany([testDoc]); + WriteResult result = adapter.WriteMany([testDoc]); - Assert.Multiple(() => - { - Assert.That(result, Is.EqualTo(WriteResult.Success)); - Assert.That(_helper.TestDatabase.GetCollection(collectionName) - .CountDocuments(new BsonDocument()), Is.EqualTo(1)); - }); + Assert.Multiple(() => + { + Assert.That(result, Is.EqualTo(WriteResult.Success)); + Assert.That(_helper.TestDatabase.GetCollection(collectionName) + .CountDocuments(new BsonDocument()), Is.EqualTo(1)); + }); - BsonDocument doc = - _helper.TestDatabase.GetCollection(collectionName).Find(_ => true).ToList()[0]; + BsonDocument doc = + _helper.TestDatabase.GetCollection(collectionName).Find(_ => true).ToList()[0]; - Assert.That(doc, Is.EqualTo(testDoc)); + Assert.That(doc, Is.EqualTo(testDoc)); - var toWrite = new List(); + var toWrite = new List(); - for (var i = 0; i < 99; i++) - toWrite.Add(new BsonDocument { { "hello", i } }); + for (var i = 0; i < 99; i++) + toWrite.Add(new BsonDocument { { "hello", i } }); - result = adapter.WriteMany(toWrite); + result = adapter.WriteMany(toWrite); - Assert.Multiple(() => - { - Assert.That(result, Is.EqualTo(WriteResult.Success)); - Assert.That(_helper.TestDatabase.GetCollection(collectionName) - .CountDocuments(new BsonDocument()), Is.EqualTo(100)); - }); - } + Assert.Multiple(() => + { + Assert.That(result, Is.EqualTo(WriteResult.Success)); + Assert.That(_helper.TestDatabase.GetCollection(collectionName) + .CountDocuments(new BsonDocument()), Is.EqualTo(100)); + }); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/MongoDbPopulatorHostTests.cs b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/MongoDbPopulatorHostTests.cs index cbc713702..cdc483a95 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/MongoDbPopulatorHostTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/MongoDbPopulatorHostTests.cs @@ -13,107 +13,106 @@ using System.Threading; -namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator +namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator; + +[TestFixture, RequiresMongoDb, RequiresRabbit] +public class MongoDbPopulatorHostTests { - [TestFixture, RequiresMongoDb, RequiresRabbit] - public class MongoDbPopulatorHostTests - { - private MongoDbPopulatorTestHelper _helper = null!; + private MongoDbPopulatorTestHelper _helper = null!; - [SetUp] - public void SetUp() - { - _helper = new MongoDbPopulatorTestHelper(); - _helper.SetupSuite(); - } + [SetUp] + public void SetUp() + { + _helper = new MongoDbPopulatorTestHelper(); + _helper.SetupSuite(); + } - [TearDown] - public void TearDown() - { - _helper.Dispose(); - } + [TearDown] + public void TearDown() + { + _helper.Dispose(); + } - /// - /// Asserts that we throw an exception if we can't connect to MongoDb on startup - /// - [Test] - public void TestMissingMongoConnectionOnStartup() - { - GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); - options.MongoDatabases!.DicomStoreOptions!.Port = 12345; + /// + /// Asserts that we throw an exception if we can't connect to MongoDb on startup + /// + [Test] + public void TestMissingMongoConnectionOnStartup() + { + GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); + options.MongoDatabases!.DicomStoreOptions!.Port = 12345; - // ReSharper disable once ObjectCreationAsStatement - Assert.Throws(() => new MongoDbPopulatorMessageConsumer(options.MongoDatabases.DicomStoreOptions, options.MongoDbPopulatorOptions!, new ConsumerOptions())); - } + // ReSharper disable once ObjectCreationAsStatement + Assert.Throws(() => new MongoDbPopulatorMessageConsumer(options.MongoDatabases.DicomStoreOptions, options.MongoDbPopulatorOptions!, new ConsumerOptions())); + } - /// - /// Tests basic operation of the populator by asserting the correct number of messages are written before a timeout - /// - /// - [Test] - [TestCase(1)] - [TestCase(10)] - [TestCase(100)] - public void TestPopulatorBasic(int nMessages) - { - // Arrange + /// + /// Tests basic operation of the populator by asserting the correct number of messages are written before a timeout + /// + /// + [Test] + [TestCase(1)] + [TestCase(10)] + [TestCase(100)] + public void TestPopulatorBasic(int nMessages) + { + // Arrange - string currentCollectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest( - $"TestPopulatorBasic({nMessages})"); + string currentCollectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest( + $"TestPopulatorBasic({nMessages})"); - _helper.Globals.MongoDbPopulatorOptions!.SeriesCollection = currentCollectionName; + _helper.Globals.MongoDbPopulatorOptions!.SeriesCollection = currentCollectionName; - var tester = new MicroserviceTester(_helper.Globals.RabbitOptions!, _helper.Globals.MongoDbPopulatorOptions.SeriesQueueConsumerOptions!, _helper.Globals.MongoDbPopulatorOptions.ImageQueueConsumerOptions!); - var host = new MongoDbPopulatorHost(_helper.Globals); + var tester = new MicroserviceTester(_helper.Globals.RabbitOptions!, _helper.Globals.MongoDbPopulatorOptions.SeriesQueueConsumerOptions!, _helper.Globals.MongoDbPopulatorOptions.ImageQueueConsumerOptions!); + var host = new MongoDbPopulatorHost(_helper.Globals); - host.Start(); + host.Start(); - using var timeline = new TestTimeline(tester); - var ds = new DicomDataset - { - new DicomUniqueIdentifier(DicomTag.SOPInstanceUID, "1.2.3.4") - }; + using var timeline = new TestTimeline(tester); + var ds = new DicomDataset + { + new DicomUniqueIdentifier(DicomTag.SOPInstanceUID, "1.2.3.4") + }; - var message = new SeriesMessage - { - DirectoryPath = "DirectoryPath-test", - StudyInstanceUID = "StudyInstanceUID-test", - SeriesInstanceUID = "SeriesInstanceUID-test", - ImagesInSeries = 123, - DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds) - }; + var message = new SeriesMessage + { + DirectoryPath = "DirectoryPath-test", + StudyInstanceUID = "StudyInstanceUID-test", + SeriesInstanceUID = "SeriesInstanceUID-test", + ImagesInSeries = 123, + DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds) + }; - // Act + // Act - for (var i = 0; i < nMessages; i++) - timeline.SendMessage(_helper.Globals.MongoDbPopulatorOptions.SeriesQueueConsumerOptions!, message); + for (var i = 0; i < nMessages; i++) + timeline.SendMessage(_helper.Globals.MongoDbPopulatorOptions.SeriesQueueConsumerOptions!, message); - timeline.StartTimeline(); + timeline.StartTimeline(); - var timeout = 30000; - const int stepSize = 500; + var timeout = 30000; + const int stepSize = 500; - if (Debugger.IsAttached) - timeout = int.MaxValue; + if (Debugger.IsAttached) + timeout = int.MaxValue; - var nWritten = 0L; + var nWritten = 0L; - while (nWritten < nMessages && timeout > 0) - { - nWritten = _helper.TestDatabase.GetCollection(currentCollectionName).CountDocuments(new BsonDocument()); + while (nWritten < nMessages && timeout > 0) + { + nWritten = _helper.TestDatabase.GetCollection(currentCollectionName).CountDocuments(new BsonDocument()); - Thread.Sleep(stepSize); - timeout -= stepSize; - } + Thread.Sleep(stepSize); + timeout -= stepSize; + } - // Assert + // Assert - if (timeout <= 0) - Assert.Fail("Failed to process expected number of messages within the timeout"); + if (timeout <= 0) + Assert.Fail("Failed to process expected number of messages within the timeout"); - host.Stop("Test end"); - tester.Shutdown(); - } + host.Stop("Test end"); + tester.Shutdown(); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/ImageMessageProcessorTests.cs b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/ImageMessageProcessorTests.cs index 5f175e4ff..aee9a0a02 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/ImageMessageProcessorTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/ImageMessageProcessorTests.cs @@ -16,185 +16,184 @@ using System.Linq; -namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator.Processing +namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator.Processing; + +[TestFixture, RequiresMongoDb] +public class ImageMessageProcessorTests { - [TestFixture, RequiresMongoDb] - public class ImageMessageProcessorTests - { - private MongoDbPopulatorTestHelper _helper = null!; + private MongoDbPopulatorTestHelper _helper = null!; - private readonly List _imageMessageProps = typeof(DicomFileMessage).GetProperties().Select(x => x.Name).ToList(); + private readonly List _imageMessageProps = typeof(DicomFileMessage).GetProperties().Select(x => x.Name).ToList(); - [OneTimeSetUp] - public void OneTimeSetUp() - { - _helper = new MongoDbPopulatorTestHelper(); - _helper.SetupSuite(); - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + _helper = new MongoDbPopulatorTestHelper(); + _helper.SetupSuite(); + } - [OneTimeTearDown] - public void OneTimeTearDown() - { - _helper.Dispose(); - } + [OneTimeTearDown] + public void OneTimeTearDown() + { + _helper.Dispose(); + } - private void Validate(DicomFileMessage message, MessageHeader header, BsonDocument document) - { - Assert.That(document.TryGetElement("header", out var element), Is.True); + private void Validate(DicomFileMessage message, MessageHeader header, BsonDocument document) + { + Assert.That(document.TryGetElement("header", out var element), Is.True); - var docHeader = (BsonDocument)element.Value; - Assert.That(docHeader.ElementCount, Is.EqualTo(_imageMessageProps.Count - 3)); - ValidateHeader(message, header, docHeader); + var docHeader = (BsonDocument)element.Value; + Assert.That(docHeader.ElementCount, Is.EqualTo(_imageMessageProps.Count - 3)); + ValidateHeader(message, header, docHeader); - DicomDataset dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); - Assert.That(dataset, Is.Not.Null); + DicomDataset dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); + Assert.That(dataset, Is.Not.Null); - BsonDocument datasetDocument = DicomTypeTranslaterReader.BuildBsonDocument(dataset); - document.Remove("_id"); - document.Remove("header"); + BsonDocument datasetDocument = DicomTypeTranslaterReader.BuildBsonDocument(dataset); + document.Remove("_id"); + document.Remove("header"); - Assert.That(document, Is.EqualTo(datasetDocument)); - } + Assert.That(document, Is.EqualTo(datasetDocument)); + } - private static void ValidateHeader(DicomFileMessage message, MessageHeader header, BsonDocument docHeader) + private static void ValidateHeader(DicomFileMessage message, MessageHeader header, BsonDocument docHeader) + { + Assert.Multiple(() => { - Assert.Multiple(() => - { - Assert.That(docHeader["DicomFilePath"].AsString, Is.EqualTo(message.DicomFilePath)); + Assert.That(docHeader["DicomFilePath"].AsString, Is.EqualTo(message.DicomFilePath)); - Assert.That(docHeader.TryGetElement("MessageHeader", out var element), Is.True); - Assert.That(element.Value, Is.Not.Null); + Assert.That(docHeader.TryGetElement("MessageHeader", out var element), Is.True); + Assert.That(element.Value, Is.Not.Null); - var messageHeaderDoc = (BsonDocument)element.Value; - Assert.That(messageHeaderDoc["ProducerProcessID"].AsInt32, Is.EqualTo(header.ProducerProcessID)); - Assert.That(messageHeaderDoc["ProducerExecutableName"].AsString, Is.EqualTo(header.ProducerExecutableName)); - Assert.That(messageHeaderDoc["OriginalPublishTimestamp"].AsInt64, Is.EqualTo(header.OriginalPublishTimestamp)); + var messageHeaderDoc = (BsonDocument)element.Value; + Assert.That(messageHeaderDoc["ProducerProcessID"].AsInt32, Is.EqualTo(header.ProducerProcessID)); + Assert.That(messageHeaderDoc["ProducerExecutableName"].AsString, Is.EqualTo(header.ProducerExecutableName)); + Assert.That(messageHeaderDoc["OriginalPublishTimestamp"].AsInt64, Is.EqualTo(header.OriginalPublishTimestamp)); - Assert.That(messageHeaderDoc.TryGetElement("Parents", out element), Is.True); + Assert.That(messageHeaderDoc.TryGetElement("Parents", out element), Is.True); - var parentsString = element.Value.AsString; - Assert.That(string.IsNullOrWhiteSpace(parentsString), Is.False); - Assert.That(parentsString, Has.Length.EqualTo(Guid.NewGuid().ToString().Length)); - }); - } - - [Test] - public void TestErrorHandling() - { - _helper.Globals.MongoDbPopulatorOptions!.FailedWriteLimit = 1; + var parentsString = element.Value.AsString; + Assert.That(string.IsNullOrWhiteSpace(parentsString), Is.False); + Assert.That(parentsString, Has.Length.EqualTo(Guid.NewGuid().ToString().Length)); + }); + } - var mockAdapter = new Mock(); - mockAdapter - .Setup(x => x.WriteMany(It.IsAny>(), It.IsAny())) - .Returns(WriteResult.Failure); + [Test] + public void TestErrorHandling() + { + _helper.Globals.MongoDbPopulatorOptions!.FailedWriteLimit = 1; - var processor = new ImageMessageProcessor(_helper.Globals.MongoDbPopulatorOptions, mockAdapter.Object, 1, delegate { }); + var mockAdapter = new Mock(); + mockAdapter + .Setup(x => x.WriteMany(It.IsAny>(), It.IsAny())) + .Returns(WriteResult.Failure); - Assert.Throws(() => processor.AddToWriteQueue(_helper.TestImageMessage, new MessageHeader(), 1)); - } + var processor = new ImageMessageProcessor(_helper.Globals.MongoDbPopulatorOptions, mockAdapter.Object, 1, delegate { }); - /// - /// Write a single image message and test the document format is as expected - /// - [Test] - public void TestImageDocumentFormat() - { - GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); - options.MongoDbPopulatorOptions!.MongoDbFlushTime = int.MaxValue / 1000; + Assert.Throws(() => processor.AddToWriteQueue(_helper.TestImageMessage, new MessageHeader(), 1)); + } - string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestImageDocumentFormat"); - var testAdapter = new MongoDbAdapter("TestImageDocumentFormat", options.MongoDatabases!.DicomStoreOptions!, collectionName); + /// + /// Write a single image message and test the document format is as expected + /// + [Test] + public void TestImageDocumentFormat() + { + GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); + options.MongoDbPopulatorOptions!.MongoDbFlushTime = int.MaxValue / 1000; - var callbackUsed = false; - void exceptionCallback(Exception exception) { callbackUsed = true; } + string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestImageDocumentFormat"); + var testAdapter = new MongoDbAdapter("TestImageDocumentFormat", options.MongoDatabases!.DicomStoreOptions!, collectionName); - var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions, testAdapter, 1, exceptionCallback); + var callbackUsed = false; + void exceptionCallback(Exception exception) { callbackUsed = true; } - var header = new MessageHeader(); + var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions, testAdapter, 1, exceptionCallback); - // Max queue size set to 1 so will immediately process this - processor.AddToWriteQueue(_helper.TestImageMessage, header, 1); + var header = new MessageHeader(); - Assert.Multiple(() => - { - Assert.That(callbackUsed, Is.False); - Assert.That(processor.AckCount, Is.EqualTo(1)); - }); + // Max queue size set to 1 so will immediately process this + processor.AddToWriteQueue(_helper.TestImageMessage, header, 1); - IMongoCollection imageCollection = _helper.TestDatabase.GetCollection(collectionName + "_SR"); + Assert.Multiple(() => + { + Assert.That(callbackUsed, Is.False); + Assert.That(processor.AckCount, Is.EqualTo(1)); + }); - Assert.That(imageCollection.CountDocuments(new BsonDocument()), Is.EqualTo(1)); + IMongoCollection imageCollection = _helper.TestDatabase.GetCollection(collectionName + "_SR"); - BsonDocument doc = imageCollection.FindAsync(FilterDefinition.Empty).Result.Single(); - Validate(_helper.TestImageMessage, header, doc); - } + Assert.That(imageCollection.CountDocuments(new BsonDocument()), Is.EqualTo(1)); + BsonDocument doc = imageCollection.FindAsync(FilterDefinition.Empty).Result.Single(); + Validate(_helper.TestImageMessage, header, doc); + } - [Test] - public void TestLargeMessageNack() - { - GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); - options.MongoDbPopulatorOptions!.MongoDbFlushTime = int.MaxValue / 1000; - var adapter = new MongoDbAdapter("ImageProcessor", options.MongoDatabases!.ExtractionStoreOptions!, "largeDocumentTest"); - var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions, adapter, 1, (Exception _) => { }); + [Test] + public void TestLargeMessageNack() + { + GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); + options.MongoDbPopulatorOptions!.MongoDbFlushTime = int.MaxValue / 1000; - var dataset = new DicomDataset - { - new DicomUnlimitedText(DicomTag.SelectorUTValue,new string('x', 16*1024*1024)) - }; + var adapter = new MongoDbAdapter("ImageProcessor", options.MongoDatabases!.ExtractionStoreOptions!, "largeDocumentTest"); + var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions, adapter, 1, (Exception _) => { }); - string json = DicomTypeTranslater.SerializeDatasetToJson(dataset); + var dataset = new DicomDataset + { + new DicomUnlimitedText(DicomTag.SelectorUTValue,new string('x', 16*1024*1024)) + }; - var largeMessage = new DicomFileMessage - { - SeriesInstanceUID = "", - StudyInstanceUID = "", - SOPInstanceUID = "", - DicomFilePath = "", - DicomDataset = json - }; + string json = DicomTypeTranslater.SerializeDatasetToJson(dataset); - Assert.Throws(() => processor.AddToWriteQueue(largeMessage, new MessageHeader(), 1)); + var largeMessage = new DicomFileMessage + { + SeriesInstanceUID = "", + StudyInstanceUID = "", + SOPInstanceUID = "", + DicomFilePath = "", + DicomDataset = json + }; + + Assert.Throws(() => processor.AddToWriteQueue(largeMessage, new MessageHeader(), 1)); + + dataset = + [ + // Should be ok, getting close to the threshold + new DicomUnlimitedText(DicomTag.SelectorUTValue,new string('x', 15*1024*1024 + 512)) + ]; + + json = DicomTypeTranslater.SerializeDatasetToJson(dataset); + largeMessage.DicomDataset = json; + + processor.AddToWriteQueue(largeMessage, new MessageHeader(), 2); + Assert.That(processor.AckCount, Is.EqualTo(1)); + } - dataset = - [ - // Should be ok, getting close to the threshold - new DicomUnlimitedText(DicomTag.SelectorUTValue,new string('x', 15*1024*1024 + 512)) - ]; + [Test] + public void TestLargeDocumentSplitOk() + { + GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); + options.MongoDbPopulatorOptions!.MongoDbFlushTime = int.MaxValue / 1000; - json = DicomTypeTranslater.SerializeDatasetToJson(dataset); - largeMessage.DicomDataset = json; + var adapter = new MongoDbAdapter("ImageProcessor", options.MongoDatabases!.ExtractionStoreOptions!, "largeDocumentTest"); + var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions!, adapter, 2, (Exception e) => { }); - processor.AddToWriteQueue(largeMessage, new MessageHeader(), 2); - Assert.That(processor.AckCount, Is.EqualTo(1)); - } + var dataset = new DicomDataset + { + new DicomUnlimitedText(DicomTag.SelectorUTValue,new string('x', 15*1024*1024)) + }; - [Test] - public void TestLargeDocumentSplitOk() + var largeMessage = new DicomFileMessage { - GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); - options.MongoDbPopulatorOptions!.MongoDbFlushTime = int.MaxValue / 1000; - - var adapter = new MongoDbAdapter("ImageProcessor", options.MongoDatabases!.ExtractionStoreOptions!, "largeDocumentTest"); - var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions!, adapter, 2, (Exception e) => { }); - - var dataset = new DicomDataset - { - new DicomUnlimitedText(DicomTag.SelectorUTValue,new string('x', 15*1024*1024)) - }; - - var largeMessage = new DicomFileMessage - { - SeriesInstanceUID = "", - StudyInstanceUID = "", - SOPInstanceUID = "", - DicomFilePath = "", - DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(dataset) - }; - - processor.AddToWriteQueue(largeMessage, new MessageHeader(), 1); - processor.AddToWriteQueue(largeMessage, new MessageHeader(), 2); - } + SeriesInstanceUID = "", + StudyInstanceUID = "", + SOPInstanceUID = "", + DicomFilePath = "", + DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(dataset) + }; + + processor.AddToWriteQueue(largeMessage, new MessageHeader(), 1); + processor.AddToWriteQueue(largeMessage, new MessageHeader(), 2); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/MessageProcessorTests.cs b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/MessageProcessorTests.cs index a48b4a082..96a7552a4 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/MessageProcessorTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/MessageProcessorTests.cs @@ -9,72 +9,71 @@ using System; using System.Threading; -namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator.Processing +namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator.Processing; + +[TestFixture, RequiresMongoDb] +public class MessageProcessorTests { - [TestFixture, RequiresMongoDb] - public class MessageProcessorTests + private MongoDbPopulatorTestHelper _helper = null!; + + [OneTimeSetUp] + public void OneTimeSetUp() { - private MongoDbPopulatorTestHelper _helper = null!; + _helper = new MongoDbPopulatorTestHelper(); + _helper.SetupSuite(); + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - _helper = new MongoDbPopulatorTestHelper(); - _helper.SetupSuite(); - } + [OneTimeTearDown] + public void OneTimeTearDown() + { + _helper.Dispose(); + } - [OneTimeTearDown] - public void OneTimeTearDown() - { - _helper.Dispose(); - } + /// + /// Tests that the exception callback is used if an exception is thrown in ProcessMessage + /// + [Test] + public void TestExceptionCallbackUsed() + { + var mockAdapter = Mock.Of(); - /// - /// Tests that the exception callback is used if an exception is thrown in ProcessMessage - /// - [Test] - public void TestExceptionCallbackUsed() - { - var mockAdapter = Mock.Of(); + var callbackUsed = false; + void exceptionCallback(Exception exception) { callbackUsed = true; } - var callbackUsed = false; - void exceptionCallback(Exception exception) { callbackUsed = true; } + _helper.Globals.MongoDbPopulatorOptions!.MongoDbFlushTime = 1; - _helper.Globals.MongoDbPopulatorOptions!.MongoDbFlushTime = 1; + var processor = new TestMessageProcessor(_helper.Globals.MongoDbPopulatorOptions, mockAdapter, 1, exceptionCallback); - var processor = new TestMessageProcessor(_helper.Globals.MongoDbPopulatorOptions, mockAdapter, 1, exceptionCallback); + Assert.That(processor.IsStopping, Is.False); - Assert.That(processor.IsStopping, Is.False); + Thread.Sleep(_helper.Globals.MongoDbPopulatorOptions.MongoDbFlushTime * 1000 + 100); - Thread.Sleep(_helper.Globals.MongoDbPopulatorOptions.MongoDbFlushTime * 1000 + 100); + Assert.Multiple(() => + { + Assert.That(callbackUsed, Is.True); + Assert.That(processor.IsStopping, Is.True); + }); + } - Assert.Multiple(() => - { - Assert.That(callbackUsed, Is.True); - Assert.That(processor.IsStopping, Is.True); - }); - } + // Implementation of MessageProcessor for testing + private class TestMessageProcessor : MessageProcessor + { + public TestMessageProcessor(MongoDbPopulatorOptions options, IMongoDbAdapter mongoDbAdapter, int maxQueueSize, Action exceptionCallback) + : base(options, mongoDbAdapter, maxQueueSize, exceptionCallback) { } - // Implementation of MessageProcessor for testing - private class TestMessageProcessor : MessageProcessor + public override void AddToWriteQueue(SeriesMessage message, IMessageHeader header, ulong deliveryTag) { - public TestMessageProcessor(MongoDbPopulatorOptions options, IMongoDbAdapter mongoDbAdapter, int maxQueueSize, Action exceptionCallback) - : base(options, mongoDbAdapter, maxQueueSize, exceptionCallback) { } - - public override void AddToWriteQueue(SeriesMessage message, IMessageHeader header, ulong deliveryTag) - { - ToProcess.Enqueue(new Tuple(new BsonDocument { { "hello", "world" } }, new MessageHeader(), deliveryTag)); - } + ToProcess.Enqueue(new Tuple(new BsonDocument { { "hello", "world" } }, new MessageHeader(), deliveryTag)); + } - public override void StopProcessing(string reason) - { - StopProcessing(); - } + public override void StopProcessing(string reason) + { + StopProcessing(); + } - protected override void ProcessQueue() - { - throw new ApplicationException("Test!"); - } + protected override void ProcessQueue() + { + throw new ApplicationException("Test!"); } } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/SeriesMessageProcessorTests.cs b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/SeriesMessageProcessorTests.cs index c639d69a2..1acde9e05 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/SeriesMessageProcessorTests.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/MongoDBPopulator/Processing/SeriesMessageProcessorTests.cs @@ -15,104 +15,103 @@ using System.Linq; -namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator.Processing +namespace SmiServices.IntegrationTests.Microservices.MongoDBPopulator.Processing; + +[TestFixture, RequiresMongoDb] +public class SeriesMessageProcessorTests { - [TestFixture, RequiresMongoDb] - public class SeriesMessageProcessorTests + private MongoDbPopulatorTestHelper _helper = null!; + + private readonly List _seriesMessageProps = typeof(SeriesMessage).GetProperties().Select(x => x.Name).ToList(); + + [OneTimeSetUp] + public void OneTimeSetUp() { - private MongoDbPopulatorTestHelper _helper = null!; + _helper = new MongoDbPopulatorTestHelper(); + _helper.SetupSuite(); + } - private readonly List _seriesMessageProps = typeof(SeriesMessage).GetProperties().Select(x => x.Name).ToList(); + [OneTimeTearDown] + public void OneTimeTearDown() + { + _helper.Dispose(); + } - [OneTimeSetUp] - public void OneTimeSetUp() + private void Validate(SeriesMessage message, BsonDocument document) + { + Assert.Multiple(() => { - _helper = new MongoDbPopulatorTestHelper(); - _helper.SetupSuite(); - } + Assert.That(message, Is.Not.Null); + Assert.That(document, Is.Not.Null); + }); - [OneTimeTearDown] - public void OneTimeTearDown() - { - _helper.Dispose(); - } + Assert.That(document.TryGetElement("header", out var element), Is.True); - private void Validate(SeriesMessage message, BsonDocument document) + var docHeader = (BsonDocument)element.Value; + Assert.Multiple(() => { - Assert.Multiple(() => - { - Assert.That(message, Is.Not.Null); - Assert.That(document, Is.Not.Null); - }); - - Assert.That(document.TryGetElement("header", out var element), Is.True); - - var docHeader = (BsonDocument)element.Value; - Assert.Multiple(() => - { - Assert.That(docHeader.ElementCount, Is.EqualTo(_seriesMessageProps.Count - 3)); - Assert.That(docHeader["DirectoryPath"].AsString, Is.EqualTo(message.DirectoryPath)); - Assert.That(docHeader["ImagesInSeries"].AsInt32, Is.EqualTo(message.ImagesInSeries)); - }); - - DicomDataset dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); - Assert.That(dataset, Is.Not.Null); - - BsonDocument datasetDocument = DicomTypeTranslaterReader.BuildBsonDocument(dataset); - document.Remove("_id"); - document.Remove("header"); - - Assert.That(document, Is.EqualTo(datasetDocument)); - } - - [Test] - public void TestErrorHandling() - { - _helper.Globals.MongoDbPopulatorOptions!.FailedWriteLimit = 1; + Assert.That(docHeader.ElementCount, Is.EqualTo(_seriesMessageProps.Count - 3)); + Assert.That(docHeader["DirectoryPath"].AsString, Is.EqualTo(message.DirectoryPath)); + Assert.That(docHeader["ImagesInSeries"].AsInt32, Is.EqualTo(message.ImagesInSeries)); + }); - var mockAdapter = new Mock(); - mockAdapter - .Setup(x => x.WriteMany(It.IsAny>(), It.IsAny())) - .Returns(WriteResult.Failure); + DicomDataset dataset = DicomTypeTranslater.DeserializeJsonToDataset(message.DicomDataset); + Assert.That(dataset, Is.Not.Null); - var processor = new SeriesMessageProcessor(_helper.Globals.MongoDbPopulatorOptions, mockAdapter.Object, 1, delegate { }); + BsonDocument datasetDocument = DicomTypeTranslaterReader.BuildBsonDocument(dataset); + document.Remove("_id"); + document.Remove("header"); - Assert.Throws(() => processor.AddToWriteQueue(_helper.TestSeriesMessage, new MessageHeader(), 1)); - } + Assert.That(document, Is.EqualTo(datasetDocument)); + } - /// - /// Write a single series message and test the document format is as expected - /// - [Test] - public void TestSeriesDocumentFormat() - { - GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); - options.MongoDbPopulatorOptions!.MongoDbFlushTime = int.MaxValue; + [Test] + public void TestErrorHandling() + { + _helper.Globals.MongoDbPopulatorOptions!.FailedWriteLimit = 1; - string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestSeriesDocumentFormat"); - var testAdapter = new MongoDbAdapter("TestSeriesDocumentFormat", options.MongoDatabases!.DicomStoreOptions!, collectionName); + var mockAdapter = new Mock(); + mockAdapter + .Setup(x => x.WriteMany(It.IsAny>(), It.IsAny())) + .Returns(WriteResult.Failure); - var callbackUsed = false; - void exceptionCallback(Exception exception) { callbackUsed = true; } + var processor = new SeriesMessageProcessor(_helper.Globals.MongoDbPopulatorOptions, mockAdapter.Object, 1, delegate { }); - var processor = new SeriesMessageProcessor(options.MongoDbPopulatorOptions, testAdapter, 1, exceptionCallback); + Assert.Throws(() => processor.AddToWriteQueue(_helper.TestSeriesMessage, new MessageHeader(), 1)); + } + + /// + /// Write a single series message and test the document format is as expected + /// + [Test] + public void TestSeriesDocumentFormat() + { + GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); + options.MongoDbPopulatorOptions!.MongoDbFlushTime = int.MaxValue; - // Max queue size set to 1 so will immediately process this - processor.AddToWriteQueue(_helper.TestSeriesMessage, new MessageHeader(), 1); + string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestSeriesDocumentFormat"); + var testAdapter = new MongoDbAdapter("TestSeriesDocumentFormat", options.MongoDatabases!.DicomStoreOptions!, collectionName); - Assert.Multiple(() => - { - Assert.That(callbackUsed, Is.False); - Assert.That(processor.AckCount, Is.EqualTo(1)); - }); + var callbackUsed = false; + void exceptionCallback(Exception exception) { callbackUsed = true; } + + var processor = new SeriesMessageProcessor(options.MongoDbPopulatorOptions, testAdapter, 1, exceptionCallback); + + // Max queue size set to 1 so will immediately process this + processor.AddToWriteQueue(_helper.TestSeriesMessage, new MessageHeader(), 1); + + Assert.Multiple(() => + { + Assert.That(callbackUsed, Is.False); + Assert.That(processor.AckCount, Is.EqualTo(1)); + }); - IMongoCollection collection = _helper.TestDatabase.GetCollection(collectionName); + IMongoCollection collection = _helper.TestDatabase.GetCollection(collectionName); - Assert.That(collection.CountDocuments(new BsonDocument()), Is.EqualTo(1)); + Assert.That(collection.CountDocuments(new BsonDocument()), Is.EqualTo(1)); - BsonDocument document = collection.Find(_ => true).ToList()[0]; + BsonDocument document = collection.Find(_ => true).ToList()[0]; - Validate(_helper.TestSeriesMessage, document); - } + Validate(_helper.TestSeriesMessage, document); } } diff --git a/tests/SmiServices.IntegrationTests/Microservices/UpdateValues/TestUpdateDatabase.cs b/tests/SmiServices.IntegrationTests/Microservices/UpdateValues/TestUpdateDatabase.cs index f3851e3b2..f9e9f8543 100644 --- a/tests/SmiServices.IntegrationTests/Microservices/UpdateValues/TestUpdateDatabase.cs +++ b/tests/SmiServices.IntegrationTests/Microservices/UpdateValues/TestUpdateDatabase.cs @@ -8,172 +8,171 @@ using System.Linq; using Tests.Common; -namespace SmiServices.IntegrationTests.Microservices.UpdateValues +namespace SmiServices.IntegrationTests.Microservices.UpdateValues; + +[RequiresRelationalDb(DatabaseType.MySql)] +[RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] +public class TestUpdateDatabase : DatabaseTests { - [RequiresRelationalDb(DatabaseType.MySql)] - [RequiresRelationalDb(DatabaseType.MicrosoftSQLServer)] - public class TestUpdateDatabase : DatabaseTests + protected DiscoveredTable SetupTestTable(DatabaseType dbType) { - protected DiscoveredTable SetupTestTable(DatabaseType dbType) - { - var type = GetCleanedServer(dbType); + var type = GetCleanedServer(dbType); - DataTable dt = new(); - dt.Columns.Add("PatientID"); - dt.Columns.Add("Age"); - dt.Columns.Add("Address"); + DataTable dt = new(); + dt.Columns.Add("PatientID"); + dt.Columns.Add("Age"); + dt.Columns.Add("Address"); - dt.Rows.Add("123", "1", "31 Homeland avenue"); - dt.Rows.Add("456", "2", "32 Homeland avenue"); - dt.Rows.Add("111", "3", "33 Homeland avenue"); - dt.Rows.Add("111", "4", "34 Homeland avenue"); + dt.Rows.Add("123", "1", "31 Homeland avenue"); + dt.Rows.Add("456", "2", "32 Homeland avenue"); + dt.Rows.Add("111", "3", "33 Homeland avenue"); + dt.Rows.Add("111", "4", "34 Homeland avenue"); - var tblToUpdate = type.CreateTable("MyTableForUpdating", dt); + var tblToUpdate = type.CreateTable("MyTableForUpdating", dt); - Import(tblToUpdate); + Import(tblToUpdate); - return tblToUpdate; - } + return tblToUpdate; + } - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestUpdateValues_OneTable(DatabaseType dbType) - { - var tblToUpdate = SetupTestTable(dbType); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestUpdateValues_OneTable(DatabaseType dbType) + { + var tblToUpdate = SetupTestTable(dbType); - var updater = new Updater(CatalogueRepository); + var updater = new Updater(CatalogueRepository); - Assert.Multiple(() => - { - //update PatientID that does not exist - Assert.That(updater.HandleUpdate(new UpdateValuesMessage - { - WhereFields = ["PatientID"], - HaveValues = ["5345"], - WriteIntoFields = ["PatientID"], - Values = ["999"] - }), Is.EqualTo(0), "Should not have been any updates because there is no patient number 5345"); - - //update PatientID that DOES exist - Assert.That(updater.HandleUpdate(new UpdateValuesMessage - { - WhereFields = ["PatientID"], - HaveValues = ["111"], - WriteIntoFields = ["PatientID"], - Values = ["222"] - }), Is.EqualTo(2), "Should have been 2 rows updated"); - - Assert.That(tblToUpdate.GetDataTable().Rows.Cast().Count(r => (int)r["PatientID"] == 222), Is.EqualTo(2)); - }); - } - - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestUpdateValues_OneTable_TwoWHERELogics(DatabaseType dbType) + Assert.Multiple(() => { - var tblToUpdate = SetupTestTable(dbType); - - var updater = new Updater(CatalogueRepository); + //update PatientID that does not exist + Assert.That(updater.HandleUpdate(new UpdateValuesMessage + { + WhereFields = ["PatientID"], + HaveValues = ["5345"], + WriteIntoFields = ["PatientID"], + Values = ["999"] + }), Is.EqualTo(0), "Should not have been any updates because there is no patient number 5345"); - Assert.Multiple(() => + //update PatientID that DOES exist + Assert.That(updater.HandleUpdate(new UpdateValuesMessage { - //update PatientID that DOES exist, there are 2 patient 111s but only one has the Age 3 - Assert.That(updater.HandleUpdate(new UpdateValuesMessage - { - WhereFields = ["PatientID", "Age"], - HaveValues = ["111", "3"], - WriteIntoFields = ["PatientID"], - Values = ["222"] - }), Is.EqualTo(1)); - - Assert.That(tblToUpdate.GetDataTable().Rows.Cast().Count(r => (int)r["PatientID"] == 222), Is.EqualTo(1)); - }); - } - - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestUpdateValues_OneTable_OperatorGreaterThan(DatabaseType dbType) - { - var tblToUpdate = SetupTestTable(dbType); + WhereFields = ["PatientID"], + HaveValues = ["111"], + WriteIntoFields = ["PatientID"], + Values = ["222"] + }), Is.EqualTo(2), "Should have been 2 rows updated"); - var updater = new Updater(CatalogueRepository); + Assert.That(tblToUpdate.GetDataTable().Rows.Cast().Count(r => (int)r["PatientID"] == 222), Is.EqualTo(2)); + }); + } - Assert.Multiple(() => - { - //update PatientID that DOES exist, there are 2 patient 111s both are under 6 - Assert.That(updater.HandleUpdate(new UpdateValuesMessage - { - WhereFields = ["PatientID", "Age"], - HaveValues = ["111", "6"], - Operators = ["=", "<="], - WriteIntoFields = ["PatientID"], - Values = ["222"], - - }), Is.EqualTo(2)); - - Assert.That(tblToUpdate.GetDataTable().Rows.Cast().Count(r => (int)r["PatientID"] == 222), Is.EqualTo(2)); - }); - } - [Test] - public void Test_TableInfoNotFound() - { - var updater = new Updater(CatalogueRepository); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestUpdateValues_OneTable_TwoWHERELogics(DatabaseType dbType) + { + var tblToUpdate = SetupTestTable(dbType); - var ex = Assert.Throws(() => - updater.HandleUpdate(new UpdateValuesMessage + var updater = new Updater(CatalogueRepository); + + Assert.Multiple(() => + { + //update PatientID that DOES exist, there are 2 patient 111s but only one has the Age 3 + Assert.That(updater.HandleUpdate(new UpdateValuesMessage { WhereFields = ["PatientID", "Age"], HaveValues = ["111", "3"], WriteIntoFields = ["PatientID"], - Values = ["222"], - ExplicitTableInfo = [999999999] - })); + Values = ["222"] + }), Is.EqualTo(1)); - Assert.That(ex!.Message, Is.EqualTo("Could not find all TableInfos IDs=999999999. Found 0:")); - } + Assert.That(tblToUpdate.GetDataTable().Rows.Cast().Count(r => (int)r["PatientID"] == 222), Is.EqualTo(1)); + }); + } - [Test] - public void Test_WhereField_NotFound() - { - SetupTestTable(DatabaseType.MicrosoftSQLServer); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestUpdateValues_OneTable_OperatorGreaterThan(DatabaseType dbType) + { + var tblToUpdate = SetupTestTable(dbType); - var updater = new Updater(CatalogueRepository); + var updater = new Updater(CatalogueRepository); - var ex = Assert.Throws(() => - updater.HandleUpdate(new UpdateValuesMessage + Assert.Multiple(() => + { + //update PatientID that DOES exist, there are 2 patient 111s both are under 6 + Assert.That(updater.HandleUpdate(new UpdateValuesMessage { - WhereFields = ["Blarg"], - HaveValues = ["111"], + WhereFields = ["PatientID", "Age"], + HaveValues = ["111", "6"], + Operators = ["=", "<="], WriteIntoFields = ["PatientID"], - Values = ["222"] - })); + Values = ["222"], - TestContext.Out.WriteLine(ex?.Message); + }), Is.EqualTo(2)); - Assert.That(ex?.Message, Is.EqualTo("Could not find any tables to update that matched the field set UpdateValuesMessage: WhereFields=Blarg WriteIntoFields=PatientID")); - } + Assert.That(tblToUpdate.GetDataTable().Rows.Cast().Count(r => (int)r["PatientID"] == 222), Is.EqualTo(2)); + }); + } + [Test] + public void Test_TableInfoNotFound() + { + var updater = new Updater(CatalogueRepository); - [Test] - public void Test_WriteIntoFields_NotFound() + var ex = Assert.Throws(() => + updater.HandleUpdate(new UpdateValuesMessage { - SetupTestTable(DatabaseType.MicrosoftSQLServer); + WhereFields = ["PatientID", "Age"], + HaveValues = ["111", "3"], + WriteIntoFields = ["PatientID"], + Values = ["222"], + ExplicitTableInfo = [999999999] + })); + + Assert.That(ex!.Message, Is.EqualTo("Could not find all TableInfos IDs=999999999. Found 0:")); + } - var updater = new Updater(CatalogueRepository); + [Test] + public void Test_WhereField_NotFound() + { + SetupTestTable(DatabaseType.MicrosoftSQLServer); - var ex = Assert.Throws(() => - updater.HandleUpdate(new UpdateValuesMessage - { - WhereFields = ["PatientID"], - HaveValues = ["111"], - WriteIntoFields = ["Blarg"], - Values = ["222"] - })); + var updater = new Updater(CatalogueRepository); + + var ex = Assert.Throws(() => + updater.HandleUpdate(new UpdateValuesMessage + { + WhereFields = ["Blarg"], + HaveValues = ["111"], + WriteIntoFields = ["PatientID"], + Values = ["222"] + })); + + TestContext.Out.WriteLine(ex?.Message); + + Assert.That(ex?.Message, Is.EqualTo("Could not find any tables to update that matched the field set UpdateValuesMessage: WhereFields=Blarg WriteIntoFields=PatientID")); + } + + [Test] + public void Test_WriteIntoFields_NotFound() + { + SetupTestTable(DatabaseType.MicrosoftSQLServer); - TestContext.Out.WriteLine(ex?.Message); + var updater = new Updater(CatalogueRepository); - Assert.That(ex?.Message, Is.EqualTo("Could not find any tables to update that matched the field set UpdateValuesMessage: WhereFields=PatientID WriteIntoFields=Blarg")); - } + var ex = Assert.Throws(() => + updater.HandleUpdate(new UpdateValuesMessage + { + WhereFields = ["PatientID"], + HaveValues = ["111"], + WriteIntoFields = ["Blarg"], + Values = ["222"] + })); + TestContext.Out.WriteLine(ex?.Message); + Assert.That(ex?.Message, Is.EqualTo("Could not find any tables to update that matched the field set UpdateValuesMessage: WhereFields=PatientID WriteIntoFields=Blarg")); } + + } diff --git a/tests/SmiServices.IntegrationTests/RequiresExternalService.cs b/tests/SmiServices.IntegrationTests/RequiresExternalService.cs index e930d4db0..39ecf0e07 100644 --- a/tests/SmiServices.IntegrationTests/RequiresExternalService.cs +++ b/tests/SmiServices.IntegrationTests/RequiresExternalService.cs @@ -4,52 +4,51 @@ using System; using System.Runtime.InteropServices; -namespace SmiServices.IntegrationTests +namespace SmiServices.IntegrationTests; + +public abstract class RequiresExternalService : CategoryAttribute, IApplyToContext { - public abstract class RequiresExternalService : CategoryAttribute, IApplyToContext + private static readonly bool _failIfUnavailable; + private static readonly bool _ignoreIfWinCiSkip; + private static bool _cached = false; + private static string? _cache = null; + + static RequiresExternalService() + { + var ci = Environment.GetEnvironmentVariable("CI"); + if (!string.IsNullOrWhiteSpace(ci) && (ci == "1" || ci.Equals("TRUE", StringComparison.OrdinalIgnoreCase))) + _failIfUnavailable = true; + + if ( + Environment.GetEnvironmentVariable("CI_SKIP_WIN_SERVICES") == "1" + && RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Windows) + ) + _ignoreIfWinCiSkip = true; + } + + public void ApplyToContext(TestExecutionContext context) { - private static readonly bool _failIfUnavailable; - private static readonly bool _ignoreIfWinCiSkip; - private static bool _cached = false; - private static string? _cache = null; + if (_ignoreIfWinCiSkip) + Assert.Ignore("CI_SKIP_WIN_SERVICES"); - static RequiresExternalService() + if (!_cached) { - var ci = Environment.GetEnvironmentVariable("CI"); - if (!string.IsNullOrWhiteSpace(ci) && (ci == "1" || ci.Equals("TRUE", StringComparison.OrdinalIgnoreCase))) - _failIfUnavailable = true; - - if ( - Environment.GetEnvironmentVariable("CI_SKIP_WIN_SERVICES") == "1" - && RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Windows) - ) - _ignoreIfWinCiSkip = true; + _cached = true; + _cache = ApplyToContextImpl(); } - public void ApplyToContext(TestExecutionContext context) + if (_cache is null) { - if (_ignoreIfWinCiSkip) - Assert.Ignore("CI_SKIP_WIN_SERVICES"); - - if (!_cached) - { - _cached = true; - _cache = ApplyToContextImpl(); - } - - if (_cache is null) - { - if (this is RequiresRabbit r) - r.CheckExchange(); - return; - } - - if (_failIfUnavailable) - Assert.Fail(_cache); - else - Assert.Ignore(_cache); + if (this is RequiresRabbit r) + r.CheckExchange(); + return; } - protected abstract string? ApplyToContextImpl(); + if (_failIfUnavailable) + Assert.Fail(_cache); + else + Assert.Ignore(_cache); } + + protected abstract string? ApplyToContextImpl(); } diff --git a/tests/SmiServices.IntegrationTests/RequiresMongoDb.cs b/tests/SmiServices.IntegrationTests/RequiresMongoDb.cs index 41170c8bd..ef393fbec 100644 --- a/tests/SmiServices.IntegrationTests/RequiresMongoDb.cs +++ b/tests/SmiServices.IntegrationTests/RequiresMongoDb.cs @@ -6,82 +6,81 @@ using System.IO; using YamlDotNet.Serialization; -namespace SmiServices.IntegrationTests +namespace SmiServices.IntegrationTests; + +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Interface | AttributeTargets.Assembly, AllowMultiple = true)] +public class RequiresMongoDb : RequiresExternalService { - [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Interface | AttributeTargets.Assembly, AllowMultiple = true)] - public class RequiresMongoDb : RequiresExternalService + protected override string? ApplyToContextImpl() { - protected override string? ApplyToContextImpl() - { - var address = GetMongoClientSettings(); + var address = GetMongoClientSettings(); - Console.WriteLine("Checking the following configuration:" + Environment.NewLine + address); + Console.WriteLine("Checking the following configuration:" + Environment.NewLine + address); - var client = new MongoClient(address); - - try - { - IAsyncCursor _ = client.ListDatabases(); - } - catch (Exception e) - { - return - e is MongoNotPrimaryException - ? "Connected to non-primary MongoDB server. Check replication is enabled" - : $"Could not connect to MongoDB at {address}: {e}"; - } + var client = new MongoClient(address); - return null; + try + { + IAsyncCursor _ = client.ListDatabases(); } - - public static MongoClientSettings GetMongoClientSettings() + catch (Exception e) { - var deserializer = new DeserializerBuilder() - .IgnoreUnmatchedProperties() - .Build(); - - return deserializer.Deserialize(new StreamReader(Path.Combine(TestContext.CurrentContext.TestDirectory, "Mongo.yaml"))); + return + e is MongoNotPrimaryException + ? "Connected to non-primary MongoDB server. Check replication is enabled" + : $"Could not connect to MongoDB at {address}: {e}"; } - class A : MongoClientSettings - { - private string? _host; - private int _port; + return null; + } - public string? Host + public static MongoClientSettings GetMongoClientSettings() + { + var deserializer = new DeserializerBuilder() + .IgnoreUnmatchedProperties() + .Build(); + + return deserializer.Deserialize(new StreamReader(Path.Combine(TestContext.CurrentContext.TestDirectory, "Mongo.yaml"))); + } + + class A : MongoClientSettings + { + private string? _host; + private int _port; + + public string? Host + { + get => _host; + set { - get => _host; - set - { - _host = value; - Server = new MongoServerAddress(_host, _port); - } + _host = value; + Server = new MongoServerAddress(_host, _port); } + } - public int Port + public int Port + { + get => _port; + set { - get => _port; - set - { - _port = value; - Server = new MongoServerAddress(_host, _port); - } + _port = value; + Server = new MongoServerAddress(_host, _port); } + } - public A() - { + public A() + { - DirectConnection = true; - ConnectTimeout = new TimeSpan(0, 0, 0, 5); - SocketTimeout = new TimeSpan(0, 0, 0, 5); - HeartbeatTimeout = new TimeSpan(0, 0, 0, 5); - ServerSelectionTimeout = new TimeSpan(0, 0, 0, 5); - WaitQueueTimeout = new TimeSpan(0, 0, 05); - } - public override string ToString() - { - return Host + ":" + Port; - } + DirectConnection = true; + ConnectTimeout = new TimeSpan(0, 0, 0, 5); + SocketTimeout = new TimeSpan(0, 0, 0, 5); + HeartbeatTimeout = new TimeSpan(0, 0, 0, 5); + ServerSelectionTimeout = new TimeSpan(0, 0, 0, 5); + WaitQueueTimeout = new TimeSpan(0, 0, 05); + } + public override string ToString() + { + return Host + ":" + Port; } } } diff --git a/tests/SmiServices.IntegrationTests/RequiresRabbit.cs b/tests/SmiServices.IntegrationTests/RequiresRabbit.cs index 586ec526c..09737af11 100644 --- a/tests/SmiServices.IntegrationTests/RequiresRabbit.cs +++ b/tests/SmiServices.IntegrationTests/RequiresRabbit.cs @@ -7,46 +7,45 @@ using System.Text; using YamlDotNet.Serialization; -namespace SmiServices.IntegrationTests +namespace SmiServices.IntegrationTests; + +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Interface | + AttributeTargets.Assembly, AllowMultiple = true)] +public sealed class RequiresRabbit : RequiresExternalService { - [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Interface | - AttributeTargets.Assembly, AllowMultiple = true)] - public sealed class RequiresRabbit : RequiresExternalService - { - public static readonly Lazy Connection = new(GetConnectionFactory); + public static readonly Lazy Connection = new(GetConnectionFactory); - protected override string? ApplyToContextImpl() + protected override string? ApplyToContextImpl() + { + try { - try - { - CheckExchange(); - return null; - } - catch (BrokerUnreachableException e) - { - return $"Could not connect to RabbitMQ{Environment.NewLine}{e.Message}"; - } + CheckExchange(); + return null; } - - private static IConnection GetConnectionFactory() + catch (BrokerUnreachableException e) { - var deserializer = new DeserializerBuilder() - .IgnoreUnmatchedProperties() - .Build(); - var factory = deserializer.Deserialize( - new StreamReader(Path.Combine(TestContext.CurrentContext.TestDirectory, "Rabbit.yaml"))); - factory.ContinuationTimeout = TimeSpan.FromSeconds(5); - factory.HandshakeContinuationTimeout = TimeSpan.FromSeconds(5); - factory.RequestedConnectionTimeout = TimeSpan.FromSeconds(5); - factory.SocketReadTimeout = TimeSpan.FromSeconds(5); - factory.SocketWriteTimeout = TimeSpan.FromSeconds(5); - return factory.CreateConnection(); + return $"Could not connect to RabbitMQ{Environment.NewLine}{e.Message}"; } + } - public void CheckExchange() - { - using var model = Connection.Value.CreateModel(); - model.ExchangeDeclare("TEST.ControlExchange", ExchangeType.Topic, durable: true); - } + private static IConnection GetConnectionFactory() + { + var deserializer = new DeserializerBuilder() + .IgnoreUnmatchedProperties() + .Build(); + var factory = deserializer.Deserialize( + new StreamReader(Path.Combine(TestContext.CurrentContext.TestDirectory, "Rabbit.yaml"))); + factory.ContinuationTimeout = TimeSpan.FromSeconds(5); + factory.HandshakeContinuationTimeout = TimeSpan.FromSeconds(5); + factory.RequestedConnectionTimeout = TimeSpan.FromSeconds(5); + factory.SocketReadTimeout = TimeSpan.FromSeconds(5); + factory.SocketWriteTimeout = TimeSpan.FromSeconds(5); + return factory.CreateConnection(); + } + + public void CheckExchange() + { + using var model = Connection.Value.CreateModel(); + model.ExchangeDeclare("TEST.ControlExchange", ExchangeType.Topic, durable: true); } } diff --git a/tests/SmiServices.IntegrationTests/RequiresRelationalDb.cs b/tests/SmiServices.IntegrationTests/RequiresRelationalDb.cs index d48da9f03..da6b19c5c 100644 --- a/tests/SmiServices.IntegrationTests/RequiresRelationalDb.cs +++ b/tests/SmiServices.IntegrationTests/RequiresRelationalDb.cs @@ -6,68 +6,67 @@ using System.IO; using YamlDotNet.Serialization; -namespace SmiServices.IntegrationTests +namespace SmiServices.IntegrationTests; + +[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Interface | + AttributeTargets.Assembly, AllowMultiple = true)] +public class RequiresRelationalDb : RequiresExternalService { - [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Interface | - AttributeTargets.Assembly, AllowMultiple = true)] - public class RequiresRelationalDb : RequiresExternalService - { - private readonly DatabaseType _type; - private const string Filename = "RelationalDatabases.yaml"; + private readonly DatabaseType _type; + private const string Filename = "RelationalDatabases.yaml"; - public RequiresRelationalDb(DatabaseType type) - { - _type = type; - } + public RequiresRelationalDb(DatabaseType type) + { + _type = type; + } - protected override string? ApplyToContextImpl() - { - FansiImplementations.Load(); + protected override string? ApplyToContextImpl() + { + FansiImplementations.Load(); - var connectionStrings = GetRelationalDatabaseConnectionStrings(); - var server = connectionStrings.GetServer(_type); + var connectionStrings = GetRelationalDatabaseConnectionStrings(); + var server = connectionStrings.GetServer(_type); - return server.Exists() - ? null - : $"Could not connect to {_type} at '{server.Name}' with the provided connection options"; - } + return server.Exists() + ? null + : $"Could not connect to {_type} at '{server.Name}' with the provided connection options"; + } - public static ConStrs GetRelationalDatabaseConnectionStrings() - { - IDeserializer deserializer = new DeserializerBuilder() - .IgnoreUnmatchedProperties() - .Build(); + public static ConStrs GetRelationalDatabaseConnectionStrings() + { + IDeserializer deserializer = new DeserializerBuilder() + .IgnoreUnmatchedProperties() + .Build(); - return deserializer.Deserialize(new StreamReader(Path.Combine(TestContext.CurrentContext.TestDirectory, Filename))); - } + return deserializer.Deserialize(new StreamReader(Path.Combine(TestContext.CurrentContext.TestDirectory, Filename))); + } - public class ConStrs + public class ConStrs + { + private string? _MySql; + public string? MySql { - private string? _MySql; - public string? MySql - { - get => _MySql; - set => _MySql = value?.Replace("ssl-mode", "sslmode", StringComparison.OrdinalIgnoreCase); - } + get => _MySql; + set => _MySql = value?.Replace("ssl-mode", "sslmode", StringComparison.OrdinalIgnoreCase); + } - public string? SqlServer { get; set; } - public string? PostgreSql { get; set; } + public string? SqlServer { get; set; } + public string? PostgreSql { get; set; } - public DiscoveredServer GetServer(DatabaseType dbType) + public DiscoveredServer GetServer(DatabaseType dbType) + { + string? str = dbType switch { - string? str = dbType switch - { - DatabaseType.MicrosoftSQLServer => SqlServer, - DatabaseType.MySql => MySql, - DatabaseType.PostgreSql => PostgreSql, - _ => throw new ArgumentOutOfRangeException(nameof(dbType)) - }; + DatabaseType.MicrosoftSQLServer => SqlServer, + DatabaseType.MySql => MySql, + DatabaseType.PostgreSql => PostgreSql, + _ => throw new ArgumentOutOfRangeException(nameof(dbType)) + }; - if (string.IsNullOrEmpty(str)) - Assert.Ignore($"No connection string configured in {Filename} for DatabaseType {dbType}"); + if (string.IsNullOrEmpty(str)) + Assert.Ignore($"No connection string configured in {Filename} for DatabaseType {dbType}"); - return new DiscoveredServer(str, dbType); - } + return new DiscoveredServer(str, dbType); } } } diff --git a/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/AccessionDirectoryListerTest.cs b/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/AccessionDirectoryListerTest.cs index 77c701e35..796ca17cd 100644 --- a/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/AccessionDirectoryListerTest.cs +++ b/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/AccessionDirectoryListerTest.cs @@ -10,77 +10,76 @@ using System.Text; -namespace SmiServices.UnitTests.Applications.DicomDirectoryProcessor +namespace SmiServices.UnitTests.Applications.DicomDirectoryProcessor; + +/// +/// Unit tests for AccessionDirectoryLister +/// +[TestFixture] +public class AccessionDirectoryListerTest { - /// - /// Unit tests for AccessionDirectoryLister - /// - [TestFixture] - public class AccessionDirectoryListerTest + [OneTimeSetUp] + public void OneTimeSetUp() { - [OneTimeSetUp] - public void OneTimeSetUp() - { - } - - private static string GetListContent() - { - StringBuilder accessionList = new(); - - accessionList.AppendLine("/PACS/2018/01/01/AAA,"); // exists and has dicom files - fail (requires indication that is dir) - accessionList.AppendLine("/PACS/2018/01/01/AAA/,"); // exists and has dicom files - pass - accessionList.AppendLine("/PACS/2018/01/01/E-123/,"); // exists and has dicom files - pass - accessionList.AppendLine("/PACS/2018/01/01/01.01.2018/,"); // exists and has dicom files - pass - accessionList.AppendLine("/PACS/2018/01/01/BBB/,"); // does exist but has no dicom files - fail - accessionList.AppendLine("/PACS/2018/01/01/CCC/,"); // does not exist - fail - accessionList.AppendLine("/PACS/2018/01/01/,"); // not pointing to accession directory - fail - accessionList.AppendLine("/PACS/2018/01/01/testDicom.dcm,"); // not pointing to accession directory - fail - accessionList.AppendLine(" "); // not pointing to accession directory - fail - accessionList.AppendLine("NULL"); // not pointing to accession directory - fail - accessionList.AppendLine(",,,,"); // not pointing to accession directory - fail - - return accessionList.ToString(); - } - - [Test] - public void TestAccessionDirectoryLister() - { - // Mock file system referenced in accession list - MockFileSystem mockFilesystem = new(null, Environment.CurrentDirectory); - string rootDir = Path.Combine(Path.GetPathRoot(Environment.CurrentDirectory)!, "PACS"); - - string testDicom = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/AAA/test.dcm")); - mockFilesystem.AddFile(testDicom, null); - - string specialCase1 = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/E-123/test.dcm")); - mockFilesystem.AddFile(specialCase1, null); - - string specialCase2 = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/01.01.2018/test.dcm")); - mockFilesystem.AddFile(specialCase2, null); - - string testBad = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/BBB/test.txt")); - mockFilesystem.AddFile(testBad, null); - - // Mock input file - string accessionList = Path.GetFullPath(Path.Combine(rootDir, "accessions.csv")); - var mockInputFile = new MockFileData(GetListContent()); - mockFilesystem.AddFile(accessionList, mockInputFile); - - // Mock producer - var totalSent = 0; - - Mock mockProducerModel = new(); - mockProducerModel - .Setup(x => x.SendMessage(It.IsAny(), - null, - null)) - .Callback(() => ++totalSent); - - AccessionDirectoryLister accessionLister = new(rootDir, mockFilesystem, "*.dcm", mockProducerModel.Object); - - accessionLister.SearchForDicomDirectories(accessionList); - - Assert.That(totalSent, Is.EqualTo(3)); - } + } + + private static string GetListContent() + { + StringBuilder accessionList = new(); + + accessionList.AppendLine("/PACS/2018/01/01/AAA,"); // exists and has dicom files - fail (requires indication that is dir) + accessionList.AppendLine("/PACS/2018/01/01/AAA/,"); // exists and has dicom files - pass + accessionList.AppendLine("/PACS/2018/01/01/E-123/,"); // exists and has dicom files - pass + accessionList.AppendLine("/PACS/2018/01/01/01.01.2018/,"); // exists and has dicom files - pass + accessionList.AppendLine("/PACS/2018/01/01/BBB/,"); // does exist but has no dicom files - fail + accessionList.AppendLine("/PACS/2018/01/01/CCC/,"); // does not exist - fail + accessionList.AppendLine("/PACS/2018/01/01/,"); // not pointing to accession directory - fail + accessionList.AppendLine("/PACS/2018/01/01/testDicom.dcm,"); // not pointing to accession directory - fail + accessionList.AppendLine(" "); // not pointing to accession directory - fail + accessionList.AppendLine("NULL"); // not pointing to accession directory - fail + accessionList.AppendLine(",,,,"); // not pointing to accession directory - fail + + return accessionList.ToString(); + } + + [Test] + public void TestAccessionDirectoryLister() + { + // Mock file system referenced in accession list + MockFileSystem mockFilesystem = new(null, Environment.CurrentDirectory); + string rootDir = Path.Combine(Path.GetPathRoot(Environment.CurrentDirectory)!, "PACS"); + + string testDicom = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/AAA/test.dcm")); + mockFilesystem.AddFile(testDicom, null); + + string specialCase1 = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/E-123/test.dcm")); + mockFilesystem.AddFile(specialCase1, null); + + string specialCase2 = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/01.01.2018/test.dcm")); + mockFilesystem.AddFile(specialCase2, null); + + string testBad = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/BBB/test.txt")); + mockFilesystem.AddFile(testBad, null); + + // Mock input file + string accessionList = Path.GetFullPath(Path.Combine(rootDir, "accessions.csv")); + var mockInputFile = new MockFileData(GetListContent()); + mockFilesystem.AddFile(accessionList, mockInputFile); + + // Mock producer + var totalSent = 0; + + Mock mockProducerModel = new(); + mockProducerModel + .Setup(x => x.SendMessage(It.IsAny(), + null, + null)) + .Callback(() => ++totalSent); + + AccessionDirectoryLister accessionLister = new(rootDir, mockFilesystem, "*.dcm", mockProducerModel.Object); + + accessionLister.SearchForDicomDirectories(accessionList); + + Assert.That(totalSent, Is.EqualTo(3)); } } diff --git a/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/DicomDirectoryFinderTest.cs b/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/DicomDirectoryFinderTest.cs index 76667bcf8..7af998051 100644 --- a/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/DicomDirectoryFinderTest.cs +++ b/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/DicomDirectoryFinderTest.cs @@ -9,46 +9,45 @@ using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Applications.DicomDirectoryProcessor +namespace SmiServices.UnitTests.Applications.DicomDirectoryProcessor; + +/// +/// Unit tests for BasicDicomDirectoryFinder +/// +[TestFixture] +public class DicomDirectoryFinderTest { - /// - /// Unit tests for BasicDicomDirectoryFinder - /// - [TestFixture] - public class DicomDirectoryFinderTest + [OneTimeSetUp] + public void OneTimeSetUp() + { + } + + [Test] + public void FindingAccessionDirectory() { - [OneTimeSetUp] - public void OneTimeSetUp() + var fileSystem = new MockFileSystem(new Dictionary { - } + { Path.GetFullPath("/PACS/2019/01/01/foo/01/a.dcm"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, + { Path.GetFullPath("/PACS/2019/01/02/foo/02/a.dcm"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, + }); - [Test] - public void FindingAccessionDirectory() + var m1 = new AccessionDirectoryMessage { - var fileSystem = new MockFileSystem(new Dictionary - { - { Path.GetFullPath("/PACS/2019/01/01/foo/01/a.dcm"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, - { Path.GetFullPath("/PACS/2019/01/02/foo/02/a.dcm"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, - }); - - var m1 = new AccessionDirectoryMessage - { - //NOTE: These can't be rooted, so can't easily use Path.GetFullPath - DirectoryPath = "2019/01/01/foo/01".Replace('/', Path.DirectorySeparatorChar) - }; - - var m2 = new AccessionDirectoryMessage - { - DirectoryPath = "2019/01/02/foo/02".Replace('/', Path.DirectorySeparatorChar) - }; - - string rootDir = Path.GetFullPath("/PACS"); - var mockProducerModel = new Mock(); - var ddf = new BasicDicomDirectoryFinder(rootDir, fileSystem, "*.dcm", mockProducerModel.Object); - ddf.SearchForDicomDirectories(rootDir); - - mockProducerModel.Verify(pm => pm.SendMessage(m1, null, It.IsAny())); - mockProducerModel.Verify(pm => pm.SendMessage(m2, null, It.IsAny())); - } + //NOTE: These can't be rooted, so can't easily use Path.GetFullPath + DirectoryPath = "2019/01/01/foo/01".Replace('/', Path.DirectorySeparatorChar) + }; + + var m2 = new AccessionDirectoryMessage + { + DirectoryPath = "2019/01/02/foo/02".Replace('/', Path.DirectorySeparatorChar) + }; + + string rootDir = Path.GetFullPath("/PACS"); + var mockProducerModel = new Mock(); + var ddf = new BasicDicomDirectoryFinder(rootDir, fileSystem, "*.dcm", mockProducerModel.Object); + ddf.SearchForDicomDirectories(rootDir); + + mockProducerModel.Verify(pm => pm.SendMessage(m1, null, It.IsAny())); + mockProducerModel.Verify(pm => pm.SendMessage(m2, null, It.IsAny())); } } diff --git a/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/PacsDirectoryFinderTests.cs b/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/PacsDirectoryFinderTests.cs index d07d965e2..422fd476f 100644 --- a/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/PacsDirectoryFinderTests.cs +++ b/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/PacsDirectoryFinderTests.cs @@ -9,74 +9,73 @@ using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Applications.DicomDirectoryProcessor +namespace SmiServices.UnitTests.Applications.DicomDirectoryProcessor; + +[TestFixture] +public class PacsDirectoryFinderTests { - [TestFixture] - public class PacsDirectoryFinderTests + [OneTimeSetUp] + public void OneTimeSetUp() + { + } + + [Test] + public void TestRegexMatches() { - [OneTimeSetUp] - public void OneTimeSetUp() + string rootDir = Path.GetFullPath("/PACS"); + var mockFs = new MockFileSystem(); + + string testFile = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/AAA/testDicom.dcm")); + mockFs.AddFile(testFile, null); + + string specialCase1 = Path.GetFullPath(Path.Combine(rootDir, "2016/01/01/E-12345/testDicom.dcm")); + mockFs.AddFile(specialCase1, null); + + string specialCase2 = Path.GetFullPath(Path.Combine(rootDir, "2017/01/01/01.01.2017/testDicom.dcm")); + mockFs.AddFile(specialCase2, null); + + string multiLayer1 = Path.GetFullPath(Path.Combine(rootDir, "2015/01/01/E-12345/testDicom.dcm")); + mockFs.AddFile(multiLayer1, null); + + string multiLayer2 = Path.GetFullPath(Path.Combine(rootDir, "2015/01/01/AAA/testDicom.dcm")); + mockFs.AddFile(multiLayer2, null); + + string multiLayer3 = Path.GetFullPath(Path.Combine(rootDir, "2015/01/01/BBB/testDicom.dcm")); + mockFs.AddFile(multiLayer3, null); + + // Test case, expected messages + var testCases = new Dictionary { - } + { "2018", 1 }, + { "2018/", 1 }, + { "2018/01", 1 }, + { "2018/01/", 1 }, + { "2018/01/01", 1 }, + { "2018/01/01/", 1 }, + { "2015/01/01/", 3 }, + { "2018/01/01/AAA", 0 }, + { "2018/01/01/AAA/", 1 }, + { "2016/01/01/E-12345/", 1 }, + { "2017/01/01/01.01.2017/", 1 } + }; - [Test] - public void TestRegexMatches() + var totalSent = 0; + + var mockProducerModel = new Mock(); + mockProducerModel + .Setup(x => x.SendMessage(It.IsAny(), + null, + null)) + .Callback(() => ++totalSent); + + var pacsFinder = new PacsDirectoryFinder(rootDir, mockFs, "*.dcm", mockProducerModel.Object); + + foreach (KeyValuePair item in testCases) { - string rootDir = Path.GetFullPath("/PACS"); - var mockFs = new MockFileSystem(); - - string testFile = Path.GetFullPath(Path.Combine(rootDir, "2018/01/01/AAA/testDicom.dcm")); - mockFs.AddFile(testFile, null); - - string specialCase1 = Path.GetFullPath(Path.Combine(rootDir, "2016/01/01/E-12345/testDicom.dcm")); - mockFs.AddFile(specialCase1, null); - - string specialCase2 = Path.GetFullPath(Path.Combine(rootDir, "2017/01/01/01.01.2017/testDicom.dcm")); - mockFs.AddFile(specialCase2, null); - - string multiLayer1 = Path.GetFullPath(Path.Combine(rootDir, "2015/01/01/E-12345/testDicom.dcm")); - mockFs.AddFile(multiLayer1, null); - - string multiLayer2 = Path.GetFullPath(Path.Combine(rootDir, "2015/01/01/AAA/testDicom.dcm")); - mockFs.AddFile(multiLayer2, null); - - string multiLayer3 = Path.GetFullPath(Path.Combine(rootDir, "2015/01/01/BBB/testDicom.dcm")); - mockFs.AddFile(multiLayer3, null); - - // Test case, expected messages - var testCases = new Dictionary - { - { "2018", 1 }, - { "2018/", 1 }, - { "2018/01", 1 }, - { "2018/01/", 1 }, - { "2018/01/01", 1 }, - { "2018/01/01/", 1 }, - { "2015/01/01/", 3 }, - { "2018/01/01/AAA", 0 }, - { "2018/01/01/AAA/", 1 }, - { "2016/01/01/E-12345/", 1 }, - { "2017/01/01/01.01.2017/", 1 } - }; - - var totalSent = 0; - - var mockProducerModel = new Mock(); - mockProducerModel - .Setup(x => x.SendMessage(It.IsAny(), - null, - null)) - .Callback(() => ++totalSent); - - var pacsFinder = new PacsDirectoryFinder(rootDir, mockFs, "*.dcm", mockProducerModel.Object); - - foreach (KeyValuePair item in testCases) - { - totalSent = 0; - pacsFinder.SearchForDicomDirectories(Path.GetFullPath(Path.Combine(rootDir, item.Key))); - - Assert.That(totalSent, Is.EqualTo(item.Value)); - } + totalSent = 0; + pacsFinder.SearchForDicomDirectories(Path.GetFullPath(Path.Combine(rootDir, item.Key))); + + Assert.That(totalSent, Is.EqualTo(item.Value)); } } } diff --git a/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/ZipDicomDirectoryFinderTests.cs b/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/ZipDicomDirectoryFinderTests.cs index 831196bd2..4605f3f66 100644 --- a/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/ZipDicomDirectoryFinderTests.cs +++ b/tests/SmiServices.UnitTests/Applications/DicomDirectoryProcessor/ZipDicomDirectoryFinderTests.cs @@ -9,54 +9,53 @@ using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Applications.DicomDirectoryProcessor +namespace SmiServices.UnitTests.Applications.DicomDirectoryProcessor; + +/// +/// Unit tests for ZipDicomDirectoryFinder +/// +[TestFixture] +public class ZipDicomDirectoryFinderTests { - /// - /// Unit tests for ZipDicomDirectoryFinder - /// - [TestFixture] - public class ZipDicomDirectoryFinderTests + [OneTimeSetUp] + public void OneTimeSetUp() + { + } + + [Test] + public void FindRandomDicomsOrZipsDirectory() { - [OneTimeSetUp] - public void OneTimeSetUp() + var fileSystem = new MockFileSystem(new Dictionary + { + { Path.GetFullPath("/PACS/FFF/DDD/a.dcm"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, + { Path.GetFullPath("/PACS/FFF/b.dcm"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, + { Path.GetFullPath("/PACS/CCC/c.zip"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, + }); + + var m1 = new AccessionDirectoryMessage { - } + //NOTE: These can't be rooted, so can't easily use Path.GetFullPath + DirectoryPath = "CCC".Replace('/', Path.DirectorySeparatorChar) + }; - [Test] - public void FindRandomDicomsOrZipsDirectory() + var m2 = new AccessionDirectoryMessage { - var fileSystem = new MockFileSystem(new Dictionary - { - { Path.GetFullPath("/PACS/FFF/DDD/a.dcm"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, - { Path.GetFullPath("/PACS/FFF/b.dcm"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, - { Path.GetFullPath("/PACS/CCC/c.zip"), new MockFileData([0x12, 0x34, 0x56, 0xd2] ) }, - }); - - var m1 = new AccessionDirectoryMessage - { - //NOTE: These can't be rooted, so can't easily use Path.GetFullPath - DirectoryPath = "CCC".Replace('/', Path.DirectorySeparatorChar) - }; - - var m2 = new AccessionDirectoryMessage - { - DirectoryPath = "FFF".Replace('/', Path.DirectorySeparatorChar) - }; - - - var m3 = new AccessionDirectoryMessage - { - DirectoryPath = "FFF/DDD".Replace('/', Path.DirectorySeparatorChar) - }; - - string rootDir = Path.GetFullPath("/PACS"); - var mockProducerModel = new Mock(); - var ddf = new ZipDicomDirectoryFinder(rootDir, fileSystem, "*.dcm", mockProducerModel.Object); - ddf.SearchForDicomDirectories(rootDir); - - mockProducerModel.Verify(pm => pm.SendMessage(m1, null, It.IsAny())); - mockProducerModel.Verify(pm => pm.SendMessage(m2, null, It.IsAny())); - mockProducerModel.Verify(pm => pm.SendMessage(m3, null, It.IsAny())); - } + DirectoryPath = "FFF".Replace('/', Path.DirectorySeparatorChar) + }; + + + var m3 = new AccessionDirectoryMessage + { + DirectoryPath = "FFF/DDD".Replace('/', Path.DirectorySeparatorChar) + }; + + string rootDir = Path.GetFullPath("/PACS"); + var mockProducerModel = new Mock(); + var ddf = new ZipDicomDirectoryFinder(rootDir, fileSystem, "*.dcm", mockProducerModel.Object); + ddf.SearchForDicomDirectories(rootDir); + + mockProducerModel.Verify(pm => pm.SendMessage(m1, null, It.IsAny())); + mockProducerModel.Verify(pm => pm.SendMessage(m2, null, It.IsAny())); + mockProducerModel.Verify(pm => pm.SendMessage(m3, null, It.IsAny())); } } diff --git a/tests/SmiServices.UnitTests/Applications/ExtractImages/CohortCsvParserTests.cs b/tests/SmiServices.UnitTests/Applications/ExtractImages/CohortCsvParserTests.cs index 42481238d..4aaf46c21 100644 --- a/tests/SmiServices.UnitTests/Applications/ExtractImages/CohortCsvParserTests.cs +++ b/tests/SmiServices.UnitTests/Applications/ExtractImages/CohortCsvParserTests.cs @@ -7,216 +7,215 @@ using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Applications.ExtractImages -{ - public class CohortCsvParserTests - { - #region Fixture Methods - - [OneTimeSetUp] - public void OneTimeSetUp() - { - } +namespace SmiServices.UnitTests.Applications.ExtractImages; - [OneTimeTearDown] - public void OneTimeTearDown() { } +public class CohortCsvParserTests +{ + #region Fixture Methods - #endregion + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - #region Test Methods + [OneTimeTearDown] + public void OneTimeTearDown() { } - [SetUp] - public void SetUp() { } + #endregion - [TearDown] - public void TearDown() { } + #region Test Methods - #endregion + [SetUp] + public void SetUp() { } - #region Tests + [TearDown] + public void TearDown() { } - [TestCase(ExtractionKey.StudyInstanceUID)] - [TestCase(ExtractionKey.SeriesInstanceUID)] - [TestCase(ExtractionKey.SOPInstanceUID)] - public void HappyPath(ExtractionKey expectedExtractionKey) - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", $"{expectedExtractionKey}\n1.2.3.4"}, - } - ); + #endregion - var parser = new CohortCsvParser(fs); - (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); + #region Tests - Assert.Multiple(() => + [TestCase(ExtractionKey.StudyInstanceUID)] + [TestCase(ExtractionKey.SeriesInstanceUID)] + [TestCase(ExtractionKey.SOPInstanceUID)] + public void HappyPath(ExtractionKey expectedExtractionKey) + { + var fs = new MockFileSystem( + new Dictionary { - Assert.That(ids, Is.EqualTo(new List { "1.2.3.4" })); - Assert.That(extractionKey, Is.EqualTo(expectedExtractionKey)); - }); - } + {"foo.csv", $"{expectedExtractionKey}\n1.2.3.4"}, + } + ); - [Test] - public void HappyPath_AFewMore() - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "StudyInstanceUID\n1.2.3.4\n5.6.7.8"}, - } - ); + var parser = new CohortCsvParser(fs); + (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); - var parser = new CohortCsvParser(fs); - (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); + Assert.Multiple(() => + { + Assert.That(ids, Is.EqualTo(new List { "1.2.3.4" })); + Assert.That(extractionKey, Is.EqualTo(expectedExtractionKey)); + }); + } - Assert.Multiple(() => + [Test] + public void HappyPath_AFewMore() + { + var fs = new MockFileSystem( + new Dictionary { - Assert.That(extractionKey, Is.EqualTo(ExtractionKey.StudyInstanceUID)); - Assert.That(ids, Is.EqualTo(new List { "1.2.3.4", "5.6.7.8" })); - }); - } + {"foo.csv", "StudyInstanceUID\n1.2.3.4\n5.6.7.8"}, + } + ); - [Test] - public void BlankLines_AreIgnored() - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "StudyInstanceUID\n\n1.2.3.4\n\n\n5.6.7.8\n\n\n\n"}, - } - ); + var parser = new CohortCsvParser(fs); + (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); - var parser = new CohortCsvParser(fs); - (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); + Assert.Multiple(() => + { + Assert.That(extractionKey, Is.EqualTo(ExtractionKey.StudyInstanceUID)); + Assert.That(ids, Is.EqualTo(new List { "1.2.3.4", "5.6.7.8" })); + }); + } - Assert.Multiple(() => + [Test] + public void BlankLines_AreIgnored() + { + var fs = new MockFileSystem( + new Dictionary { - Assert.That(extractionKey, Is.EqualTo(ExtractionKey.StudyInstanceUID)); - Assert.That(ids, Is.EqualTo(new List { "1.2.3.4", "5.6.7.8" })); - }); - } + {"foo.csv", "StudyInstanceUID\n\n1.2.3.4\n\n\n5.6.7.8\n\n\n\n"}, + } + ); - [Test] - public void ExtraWhitespace_IsStripped() - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "StudyInstanceUID\n 1.2.3.4 "}, - } - ); + var parser = new CohortCsvParser(fs); + (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); - var parser = new CohortCsvParser(fs); - (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); + Assert.Multiple(() => + { + Assert.That(extractionKey, Is.EqualTo(ExtractionKey.StudyInstanceUID)); + Assert.That(ids, Is.EqualTo(new List { "1.2.3.4", "5.6.7.8" })); + }); + } - Assert.Multiple(() => + [Test] + public void ExtraWhitespace_IsStripped() + { + var fs = new MockFileSystem( + new Dictionary { - Assert.That(extractionKey, Is.EqualTo(ExtractionKey.StudyInstanceUID)); - Assert.That(ids, Is.EqualTo(new List { "1.2.3.4" })); - }); - } + {"foo.csv", "StudyInstanceUID\n 1.2.3.4 "}, + } + ); - [Test] - public void QuotedValues_AreAllowed() - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "StudyInstanceUID\n\"1.2.3.4\""}, - } - ); + var parser = new CohortCsvParser(fs); + (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); - var parser = new CohortCsvParser(fs); - (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); + Assert.Multiple(() => + { + Assert.That(extractionKey, Is.EqualTo(ExtractionKey.StudyInstanceUID)); + Assert.That(ids, Is.EqualTo(new List { "1.2.3.4" })); + }); + } - Assert.Multiple(() => + [Test] + public void QuotedValues_AreAllowed() + { + var fs = new MockFileSystem( + new Dictionary { - Assert.That(extractionKey, Is.EqualTo(ExtractionKey.StudyInstanceUID)); - Assert.That(ids, Is.EqualTo(new List { "1.2.3.4" })); - }); - } + {"foo.csv", "StudyInstanceUID\n\"1.2.3.4\""}, + } + ); + + var parser = new CohortCsvParser(fs); + (ExtractionKey extractionKey, List ids) = parser.Parse("foo.csv"); - [Test] - public void EmptyCsv_ThrowsException() + Assert.Multiple(() => { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", ""}, - } - ); + Assert.That(extractionKey, Is.EqualTo(ExtractionKey.StudyInstanceUID)); + Assert.That(ids, Is.EqualTo(new List { "1.2.3.4" })); + }); + } - var parser = new CohortCsvParser(fs); + [Test] + public void EmptyCsv_ThrowsException() + { + var fs = new MockFileSystem( + new Dictionary + { + {"foo.csv", ""}, + } + ); - var exc = Assert.Throws(() => parser.Parse("foo.csv")); - Assert.That(exc!.Message, Is.EqualTo("CSV is empty")); - } + var parser = new CohortCsvParser(fs); - [Test] - public void InvalidHeader_ThrowsException() - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "Wheee"}, - } - ); + var exc = Assert.Throws(() => parser.Parse("foo.csv")); + Assert.That(exc!.Message, Is.EqualTo("CSV is empty")); + } - var parser = new CohortCsvParser(fs); + [Test] + public void InvalidHeader_ThrowsException() + { + var fs = new MockFileSystem( + new Dictionary + { + {"foo.csv", "Wheee"}, + } + ); - var exc = Assert.Throws(() => parser.Parse("foo.csv")); - Assert.That(exc!.Message, Does.StartWith("CSV header must be a valid ExtractionKey")); - } + var parser = new CohortCsvParser(fs); - [Test] - public void MultiColumn_InHeader_ThrowsException() - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "SeriesInstanceUID,"}, - } - ); + var exc = Assert.Throws(() => parser.Parse("foo.csv")); + Assert.That(exc!.Message, Does.StartWith("CSV header must be a valid ExtractionKey")); + } - var parser = new CohortCsvParser(fs); + [Test] + public void MultiColumn_InHeader_ThrowsException() + { + var fs = new MockFileSystem( + new Dictionary + { + {"foo.csv", "SeriesInstanceUID,"}, + } + ); - var exc = Assert.Throws(() => parser.Parse("foo.csv")); - Assert.That(exc!.Message, Is.EqualTo("CSV must have exactly 1 column")); - } + var parser = new CohortCsvParser(fs); - [Test] - public void MultiColumn_InRecord_ThrowsException() - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "SeriesInstanceUID\nfoo,"}, - } - ); + var exc = Assert.Throws(() => parser.Parse("foo.csv")); + Assert.That(exc!.Message, Is.EqualTo("CSV must have exactly 1 column")); + } - var parser = new CohortCsvParser(fs); + [Test] + public void MultiColumn_InRecord_ThrowsException() + { + var fs = new MockFileSystem( + new Dictionary + { + {"foo.csv", "SeriesInstanceUID\nfoo,"}, + } + ); - var exc = Assert.Throws(() => parser.Parse("foo.csv")); - Assert.That(exc!.Message, Is.EqualTo("CSV must have exactly 1 column")); - } + var parser = new CohortCsvParser(fs); - [Test] - public void NoRecords_ThrowsException() - { - var fs = new MockFileSystem( - new Dictionary - { - {"foo.csv", "SeriesInstanceUID\n"}, - } - ); + var exc = Assert.Throws(() => parser.Parse("foo.csv")); + Assert.That(exc!.Message, Is.EqualTo("CSV must have exactly 1 column")); + } - var parser = new CohortCsvParser(fs); + [Test] + public void NoRecords_ThrowsException() + { + var fs = new MockFileSystem( + new Dictionary + { + {"foo.csv", "SeriesInstanceUID\n"}, + } + ); - var exc = Assert.Throws(() => parser.Parse("foo.csv")); - Assert.That(exc!.Message, Is.EqualTo("No records in the cohort CSV")); - } + var parser = new CohortCsvParser(fs); - #endregion + var exc = Assert.Throws(() => parser.Parse("foo.csv")); + Assert.That(exc!.Message, Is.EqualTo("No records in the cohort CSV")); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractImagesCliOptionsTests.cs b/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractImagesCliOptionsTests.cs index 7a66f1c61..ae8e642b7 100644 --- a/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractImagesCliOptionsTests.cs +++ b/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractImagesCliOptionsTests.cs @@ -5,59 +5,58 @@ using System.Collections.Generic; -namespace SmiServices.UnitTests.Applications.ExtractImages +namespace SmiServices.UnitTests.Applications.ExtractImages; + +public class ExtractImagesCliOptionsTests { - public class ExtractImagesCliOptionsTests + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + #endregion - #endregion + #region Test Methods - #region Test Methods + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests - #region Tests + [Test] + public void ParseArguments() + { + Parser parser = SmiCliInit.GetDefaultParser(); - [Test] - public void ParseArguments() + void Verify(IEnumerable args, string modality, bool ident, bool noFilters) { - Parser parser = SmiCliInit.GetDefaultParser(); - - void Verify(IEnumerable args, string modality, bool ident, bool noFilters) - { - parser.ParseArguments(args) - .WithParsed(options => + parser.ParseArguments(args) + .WithParsed(options => + { + Assert.Multiple(() => { - Assert.Multiple(() => - { - Assert.That(options.ProjectId, Is.EqualTo("1234-5678")); - Assert.That(options.CohortCsvFile, Is.EqualTo("foo.csv")); - Assert.That(options.Modality, Is.EqualTo(modality)); - Assert.That(options.IsIdentifiableExtraction, Is.EqualTo(ident)); - Assert.That(options.IsNoFiltersExtraction, Is.EqualTo(noFilters)); - }); - }) - .WithNotParsed(errors => Assert.Fail(string.Join(',', errors))); - } - - Verify(["-p", "1234-5678", "-c", "foo.csv", "-m", "CT", "-i", "-f"], "CT", true, true); + Assert.That(options.ProjectId, Is.EqualTo("1234-5678")); + Assert.That(options.CohortCsvFile, Is.EqualTo("foo.csv")); + Assert.That(options.Modality, Is.EqualTo(modality)); + Assert.That(options.IsIdentifiableExtraction, Is.EqualTo(ident)); + Assert.That(options.IsNoFiltersExtraction, Is.EqualTo(noFilters)); + }); + }) + .WithNotParsed(errors => Assert.Fail(string.Join(',', errors))); } - #endregion + Verify(["-p", "1234-5678", "-c", "foo.csv", "-m", "CT", "-i", "-f"], "CT", true, true); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractImagesHostTests.cs b/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractImagesHostTests.cs index 155bd2588..3ef47af09 100644 --- a/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractImagesHostTests.cs +++ b/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractImagesHostTests.cs @@ -7,151 +7,150 @@ using System.IO.Abstractions; using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Applications.ExtractImages +namespace SmiServices.UnitTests.Applications.ExtractImages; + +internal class ExtractImagesHostTests { - internal class ExtractImagesHostTests + [Test] + public void Constructor_HappyPathWithoutPool() { - [Test] - public void Constructor_HappyPathWithoutPool() + var globals = new GlobalOptionsFactory().Load(nameof(Constructor_HappyPathWithoutPool)); + globals.FileSystemOptions!.ExtractRoot = "extract-root"; + globals.FileSystemOptions.ExtractionPoolRoot = null; + + var cliOptions = new ExtractImagesCliOptions { - var globals = new GlobalOptionsFactory().Load(nameof(Constructor_HappyPathWithoutPool)); - globals.FileSystemOptions!.ExtractRoot = "extract-root"; - globals.FileSystemOptions.ExtractionPoolRoot = null; + CohortCsvFile = "foo.csv", + }; - var cliOptions = new ExtractImagesCliOptions - { - CohortCsvFile = "foo.csv", - }; + var fileSystem = new MockFileSystem(); + fileSystem.Directory.CreateDirectory("extract-root"); + fileSystem.File.Create("foo.csv"); - var fileSystem = new MockFileSystem(); - fileSystem.Directory.CreateDirectory("extract-root"); - fileSystem.File.Create("foo.csv"); + var mockMessageBroker = new Mock(MockBehavior.Strict); + var mockSender = new Mock(MockBehavior.Strict); - var mockMessageBroker = new Mock(MockBehavior.Strict); - var mockSender = new Mock(MockBehavior.Strict); + var host = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); + } - var host = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); - } + [Test] + public void Constructor_HappyPathWithPool() + { + var globals = new GlobalOptionsFactory().Load(nameof(Constructor_HappyPathWithPool)); + globals.FileSystemOptions!.ExtractRoot = "extract-root"; + globals.FileSystemOptions.ExtractionPoolRoot = "pool"; - [Test] - public void Constructor_HappyPathWithPool() + var cliOptions = new ExtractImagesCliOptions { - var globals = new GlobalOptionsFactory().Load(nameof(Constructor_HappyPathWithPool)); - globals.FileSystemOptions!.ExtractRoot = "extract-root"; - globals.FileSystemOptions.ExtractionPoolRoot = "pool"; + CohortCsvFile = "foo.csv", + }; - var cliOptions = new ExtractImagesCliOptions - { - CohortCsvFile = "foo.csv", - }; + var fileSystem = new MockFileSystem(); + fileSystem.Directory.CreateDirectory("extract-root"); + fileSystem.File.Create("foo.csv"); + fileSystem.Directory.CreateDirectory("pool"); - var fileSystem = new MockFileSystem(); - fileSystem.Directory.CreateDirectory("extract-root"); - fileSystem.File.Create("foo.csv"); - fileSystem.Directory.CreateDirectory("pool"); + var mockMessageBroker = new Mock(MockBehavior.Strict); + var mockSender = new Mock(MockBehavior.Strict); - var mockMessageBroker = new Mock(MockBehavior.Strict); - var mockSender = new Mock(MockBehavior.Strict); + var host = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); + } + + [TestCase(null)] + [TestCase("some/missing/path")] + public void Constructor_PooledExtractionWithNoRootSet_ThrowsException(string? poolRoot) + { + // Arrange - var host = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); - } + var globals = new GlobalOptionsFactory().Load(nameof(Constructor_PooledExtractionWithNoRootSet_ThrowsException)); + globals.FileSystemOptions!.ExtractRoot = "extract-root"; + globals.FileSystemOptions.ExtractionPoolRoot = poolRoot; - [TestCase(null)] - [TestCase("some/missing/path")] - public void Constructor_PooledExtractionWithNoRootSet_ThrowsException(string? poolRoot) + var cliOptions = new ExtractImagesCliOptions { - // Arrange + CohortCsvFile = "foo.csv", + IsPooledExtraction = true, + }; - var globals = new GlobalOptionsFactory().Load(nameof(Constructor_PooledExtractionWithNoRootSet_ThrowsException)); - globals.FileSystemOptions!.ExtractRoot = "extract-root"; - globals.FileSystemOptions.ExtractionPoolRoot = poolRoot; + var fileSystem = new MockFileSystem(); + fileSystem.Directory.CreateDirectory("extract-root"); - var cliOptions = new ExtractImagesCliOptions - { - CohortCsvFile = "foo.csv", - IsPooledExtraction = true, - }; + var mockMessageBroker = new Mock(MockBehavior.Strict); + var mockSender = new Mock(MockBehavior.Strict); - var fileSystem = new MockFileSystem(); - fileSystem.Directory.CreateDirectory("extract-root"); + // Act - var mockMessageBroker = new Mock(MockBehavior.Strict); - var mockSender = new Mock(MockBehavior.Strict); + void act() => _ = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); - // Act + // Assert - void act() => _ = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); + var exc = Assert.Throws(act); + Assert.That(exc.Message, Is.EqualTo("IsPooledExtraction can only be passed if ExtractionPoolRoot is a directory")); + } - // Assert + [Test] + public void Constructor_PooledExtractionWithIsIdentifiableExtractionSet_ThrowsException() + { + // Arrange - var exc = Assert.Throws(act); - Assert.That(exc.Message, Is.EqualTo("IsPooledExtraction can only be passed if ExtractionPoolRoot is a directory")); - } + var globals = new GlobalOptionsFactory().Load(nameof(Constructor_PooledExtractionWithIsIdentifiableExtractionSet_ThrowsException)); + globals.FileSystemOptions!.ExtractRoot = "extract-root"; + globals.FileSystemOptions.ExtractionPoolRoot = "pool"; - [Test] - public void Constructor_PooledExtractionWithIsIdentifiableExtractionSet_ThrowsException() + var cliOptions = new ExtractImagesCliOptions { - // Arrange + CohortCsvFile = "foo.csv", + IsPooledExtraction = true, + IsIdentifiableExtraction = true, + }; - var globals = new GlobalOptionsFactory().Load(nameof(Constructor_PooledExtractionWithIsIdentifiableExtractionSet_ThrowsException)); - globals.FileSystemOptions!.ExtractRoot = "extract-root"; - globals.FileSystemOptions.ExtractionPoolRoot = "pool"; + var fileSystem = new MockFileSystem(); + fileSystem.Directory.CreateDirectory("extract-root"); + fileSystem.Directory.CreateDirectory("pool"); - var cliOptions = new ExtractImagesCliOptions - { - CohortCsvFile = "foo.csv", - IsPooledExtraction = true, - IsIdentifiableExtraction = true, - }; + var mockMessageBroker = new Mock(MockBehavior.Strict); + var mockSender = new Mock(MockBehavior.Strict); - var fileSystem = new MockFileSystem(); - fileSystem.Directory.CreateDirectory("extract-root"); - fileSystem.Directory.CreateDirectory("pool"); + // Act - var mockMessageBroker = new Mock(MockBehavior.Strict); - var mockSender = new Mock(MockBehavior.Strict); + void act() => _ = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); - // Act + // Assert - void act() => _ = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); + var exc = Assert.Throws(act); + Assert.That(exc.Message, Is.EqualTo("IsPooledExtraction is incompatible with IsIdentifiableExtraction")); + } - // Assert + [Test] + public void Constructor_PooledExtractionWithIsNoFilterExtractionSet_ThrowsException() + { + // Arrange - var exc = Assert.Throws(act); - Assert.That(exc.Message, Is.EqualTo("IsPooledExtraction is incompatible with IsIdentifiableExtraction")); - } + var globals = new GlobalOptionsFactory().Load(nameof(Constructor_PooledExtractionWithIsNoFilterExtractionSet_ThrowsException)); + globals.FileSystemOptions!.ExtractRoot = "extract-root"; + globals.FileSystemOptions.ExtractionPoolRoot = "pool"; - [Test] - public void Constructor_PooledExtractionWithIsNoFilterExtractionSet_ThrowsException() + var cliOptions = new ExtractImagesCliOptions { - // Arrange - - var globals = new GlobalOptionsFactory().Load(nameof(Constructor_PooledExtractionWithIsNoFilterExtractionSet_ThrowsException)); - globals.FileSystemOptions!.ExtractRoot = "extract-root"; - globals.FileSystemOptions.ExtractionPoolRoot = "pool"; - - var cliOptions = new ExtractImagesCliOptions - { - CohortCsvFile = "foo.csv", - IsPooledExtraction = true, - IsNoFiltersExtraction = true, - }; + CohortCsvFile = "foo.csv", + IsPooledExtraction = true, + IsNoFiltersExtraction = true, + }; - var fileSystem = new MockFileSystem(); - fileSystem.Directory.CreateDirectory("extract-root"); - fileSystem.Directory.CreateDirectory("pool"); + var fileSystem = new MockFileSystem(); + fileSystem.Directory.CreateDirectory("extract-root"); + fileSystem.Directory.CreateDirectory("pool"); - var mockMessageBroker = new Mock(MockBehavior.Strict); - var mockSender = new Mock(MockBehavior.Strict); + var mockMessageBroker = new Mock(MockBehavior.Strict); + var mockSender = new Mock(MockBehavior.Strict); - // Act + // Act - void act() => _ = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); + void act() => _ = new ExtractImagesHost(globals, cliOptions, mockSender.Object, mockMessageBroker.Object, fileSystem, false); - // Assert + // Assert - var exc = Assert.Throws(act); - Assert.That(exc.Message, Is.EqualTo("IsPooledExtraction is incompatible with IsNoFiltersExtraction")); - } + var exc = Assert.Throws(act); + Assert.That(exc.Message, Is.EqualTo("IsPooledExtraction is incompatible with IsNoFiltersExtraction")); } } diff --git a/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractionMessageSenderTests.cs b/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractionMessageSenderTests.cs index a15a3be0a..6a978d354 100644 --- a/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractionMessageSenderTests.cs +++ b/tests/SmiServices.UnitTests/Applications/ExtractImages/ExtractionMessageSenderTests.cs @@ -15,303 +15,302 @@ using System.Linq.Expressions; -namespace SmiServices.UnitTests.Applications.ExtractImages +namespace SmiServices.UnitTests.Applications.ExtractImages; + +public class ExtractionMessageSenderTests { - public class ExtractionMessageSenderTests + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + #endregion - #endregion + #region Test Methods - #region Test Methods + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + private class TestConsoleInput : IConsoleInput + { + private string? _line; - private class TestConsoleInput : IConsoleInput + public TestConsoleInput(string line) { - private string? _line; - - public TestConsoleInput(string line) - { - _line = line; - } + _line = line; + } - public string? GetNextLine() - { - string? line = _line; - _line = null; - return line; - } + public string? GetNextLine() + { + string? line = _line; + _line = null; + return line; } + } - #endregion + #endregion - #region Tests + #region Tests - [TestCase(true)] - [TestCase(false)] - public void HappyPath_Interactive(bool confirm) + [TestCase(true)] + [TestCase(false)] + public void HappyPath_Interactive(bool confirm) + { + Expression> expr = x => x.SendMessage(It.IsAny(), null, It.IsAny()); + + var mockExtractionRequestProducer = new Mock(MockBehavior.Strict); + mockExtractionRequestProducer.Setup(expr).Returns((IMessageHeader?)null); + + var mockExtractionRequestInfoProducer = new Mock(MockBehavior.Strict); + mockExtractionRequestInfoProducer.Setup(expr).Returns((IMessageHeader?)null); + + var fs = new MockFileSystem(); + const string extractRoot = "extractRoot"; + var extractDir = fs.Path.Join("proj1", "extractions", "foo"); + + var processor = new ExtractionMessageSender( + new ExtractImagesOptions(), + new ExtractImagesCliOptions { ProjectId = "1234-5678", Modality = "CT" }, + mockExtractionRequestProducer.Object, + mockExtractionRequestInfoProducer.Object, + fs, + extractRoot, + extractDir, + new TestDateTimeProvider(), + new TestConsoleInput(confirm ? "y" : "n") + ); + + var idList = new List { "foo" }; + processor.SendMessages(ExtractionKey.StudyInstanceUID, idList); + + if (confirm) { - Expression> expr = x => x.SendMessage(It.IsAny(), null, It.IsAny()); - - var mockExtractionRequestProducer = new Mock(MockBehavior.Strict); - mockExtractionRequestProducer.Setup(expr).Returns((IMessageHeader?)null); - - var mockExtractionRequestInfoProducer = new Mock(MockBehavior.Strict); - mockExtractionRequestInfoProducer.Setup(expr).Returns((IMessageHeader?)null); + mockExtractionRequestProducer.Verify(expr, Times.Once); + mockExtractionRequestInfoProducer.Verify(expr, Times.Once); - var fs = new MockFileSystem(); - const string extractRoot = "extractRoot"; - var extractDir = fs.Path.Join("proj1", "extractions", "foo"); + Assert.That(fs.File.Exists(fs.Path.Join(extractRoot, extractDir, "jobId.txt")), Is.True); + } + else + { + mockExtractionRequestProducer.Verify(expr, Times.Never); + mockExtractionRequestInfoProducer.Verify(expr, Times.Never); - var processor = new ExtractionMessageSender( - new ExtractImagesOptions(), - new ExtractImagesCliOptions { ProjectId = "1234-5678", Modality = "CT" }, - mockExtractionRequestProducer.Object, - mockExtractionRequestInfoProducer.Object, - fs, - extractRoot, - extractDir, - new TestDateTimeProvider(), - new TestConsoleInput(confirm ? "y" : "n") - ); + Assert.That(fs.Directory.Exists(extractDir), Is.False); + } + } - var idList = new List { "foo" }; - processor.SendMessages(ExtractionKey.StudyInstanceUID, idList); + [Test] + public void HappyPath_NonInteractive() + { + Expression> expr = x => x.SendMessage(It.IsAny(), null, It.IsAny()); - if (confirm) - { - mockExtractionRequestProducer.Verify(expr, Times.Once); - mockExtractionRequestInfoProducer.Verify(expr, Times.Once); + var mockExtractionRequestProducer = new Mock(MockBehavior.Strict); + mockExtractionRequestProducer.Setup(expr).Returns((IMessageHeader?)null); - Assert.That(fs.File.Exists(fs.Path.Join(extractRoot, extractDir, "jobId.txt")), Is.True); - } - else - { - mockExtractionRequestProducer.Verify(expr, Times.Never); - mockExtractionRequestInfoProducer.Verify(expr, Times.Never); + var mockExtractionRequestInfoProducer = new Mock(MockBehavior.Strict); + mockExtractionRequestInfoProducer.Setup(expr).Returns((IMessageHeader?)null); - Assert.That(fs.Directory.Exists(extractDir), Is.False); - } - } + var fs = new MockFileSystem(); + const string extractRoot = "extractRoot"; + var extractDir = fs.Path.Join("proj1", "extractions", "foo"); - [Test] - public void HappyPath_NonInteractive() - { - Expression> expr = x => x.SendMessage(It.IsAny(), null, It.IsAny()); + var processor = new ExtractionMessageSender( + new ExtractImagesOptions(), + new ExtractImagesCliOptions { ProjectId = "1234-5678", NonInteractive = true, Modality = "CT" }, + mockExtractionRequestProducer.Object, + mockExtractionRequestInfoProducer.Object, + fs, + extractRoot, + extractDir, + new TestDateTimeProvider(), + new RealConsoleInput() + ); - var mockExtractionRequestProducer = new Mock(MockBehavior.Strict); - mockExtractionRequestProducer.Setup(expr).Returns((IMessageHeader?)null); + var idList = new List { "foo" }; + processor.SendMessages(ExtractionKey.StudyInstanceUID, idList); - var mockExtractionRequestInfoProducer = new Mock(MockBehavior.Strict); - mockExtractionRequestInfoProducer.Setup(expr).Returns((IMessageHeader?)null); + mockExtractionRequestProducer.Verify(expr, Times.Once); + mockExtractionRequestInfoProducer.Verify(expr, Times.Once); - var fs = new MockFileSystem(); - const string extractRoot = "extractRoot"; - var extractDir = fs.Path.Join("proj1", "extractions", "foo"); + Assert.That(fs.File.Exists(fs.Path.Join(extractRoot, extractDir, "jobId.txt")), Is.True); + } - var processor = new ExtractionMessageSender( + [TestCase("")] + [TestCase(" ")] + public void ExtractionDirs_AreValidated(string dir) + { + Assert.Throws(() => + { + var _ = new ExtractionMessageSender( new ExtractImagesOptions(), - new ExtractImagesCliOptions { ProjectId = "1234-5678", NonInteractive = true, Modality = "CT" }, - mockExtractionRequestProducer.Object, - mockExtractionRequestInfoProducer.Object, - fs, - extractRoot, - extractDir, + new ExtractImagesCliOptions() { Modality = "CT" }, + new Mock(MockBehavior.Loose).Object, + new Mock(MockBehavior.Loose).Object, + new FileSystem(), + "extractionRoot", + dir, new TestDateTimeProvider(), new RealConsoleInput() ); + }); - var idList = new List { "foo" }; - processor.SendMessages(ExtractionKey.StudyInstanceUID, idList); - - mockExtractionRequestProducer.Verify(expr, Times.Once); - mockExtractionRequestInfoProducer.Verify(expr, Times.Once); - - Assert.That(fs.File.Exists(fs.Path.Join(extractRoot, extractDir, "jobId.txt")), Is.True); - } - - [TestCase("")] - [TestCase(" ")] - public void ExtractionDirs_AreValidated(string dir) - { - Assert.Throws(() => - { - var _ = new ExtractionMessageSender( - new ExtractImagesOptions(), - new ExtractImagesCliOptions() { Modality = "CT" }, - new Mock(MockBehavior.Loose).Object, - new Mock(MockBehavior.Loose).Object, - new FileSystem(), - "extractionRoot", - dir, - new TestDateTimeProvider(), - new RealConsoleInput() - ); - }); - - Assert.Throws(() => - { - var _ = new ExtractionMessageSender( - new ExtractImagesOptions(), - new ExtractImagesCliOptions() { Modality = "CT" }, - new Mock(MockBehavior.Loose).Object, - new Mock(MockBehavior.Loose).Object, - new FileSystem(), - dir, - "extractionDir", - new TestDateTimeProvider(), - new RealConsoleInput() - ); - }); - } - - [TestCase("")] - [TestCase(" ")] - public void ProjectId_IsValidated(string projectId) - { - Assert.Throws(() => - { - var _ = new ExtractionMessageSender( - new ExtractImagesOptions(), - new ExtractImagesCliOptions { ProjectId = projectId, Modality = "CT" }, - new Mock(MockBehavior.Loose).Object, - new Mock(MockBehavior.Loose).Object, - new FileSystem(), - "extractRoot", - "extractDir", - new TestDateTimeProvider(), - new RealConsoleInput() - ); - }); - } - - [Test] - public void MaxIdentifiersPerMessage_IsValidated() + Assert.Throws(() => { - Assert.Throws(() => - { - var _ = new ExtractionMessageSender( - new ExtractImagesOptions { MaxIdentifiersPerMessage = 0 }, - new ExtractImagesCliOptions() { Modality = "CT" }, - new Mock(MockBehavior.Loose).Object, - new Mock(MockBehavior.Loose).Object, - new FileSystem(), - "extractRoot", - "extractDir", - new TestDateTimeProvider(), - new RealConsoleInput() - ); - }); - } - + var _ = new ExtractionMessageSender( + new ExtractImagesOptions(), + new ExtractImagesCliOptions() { Modality = "CT" }, + new Mock(MockBehavior.Loose).Object, + new Mock(MockBehavior.Loose).Object, + new FileSystem(), + dir, + "extractionDir", + new TestDateTimeProvider(), + new RealConsoleInput() + ); + }); + } - [Test] - public void IdList_IsNotEmpty() + [TestCase("")] + [TestCase(" ")] + public void ProjectId_IsValidated(string projectId) + { + Assert.Throws(() => { - var sender = new ExtractionMessageSender( + var _ = new ExtractionMessageSender( new ExtractImagesOptions(), - new ExtractImagesCliOptions { ProjectId = "1234-5678", Modality = "CT" }, + new ExtractImagesCliOptions { ProjectId = projectId, Modality = "CT" }, new Mock(MockBehavior.Loose).Object, new Mock(MockBehavior.Loose).Object, new FileSystem(), - "extractRoot", + "extractRoot", "extractDir", new TestDateTimeProvider(), new RealConsoleInput() ); + }); + } - var exc = Assert.Throws(() => - { - sender.SendMessages(ExtractionKey.StudyInstanceUID, []); - }); - Assert.That(exc!.Message, Is.EqualTo("ID list is empty")); - } - - [Test] - public void ListChunking_CorrectIds() + [Test] + public void MaxIdentifiersPerMessage_IsValidated() + { + Assert.Throws(() => { - Expression> expr = x => x.SendMessage(It.IsAny(), null, It.IsAny()); - - var calledWith = new List(); - - var mockExtractionRequestProducer = new Mock(MockBehavior.Strict); - mockExtractionRequestProducer - .Setup(expr) - .Returns((IMessageHeader?)null) - .Callback((IMessage msg, IMessageHeader _, string __) => - { - calledWith.AddRange(((ExtractionRequestMessage)msg).ExtractionIdentifiers); - }); - - var mockExtractionRequestInfoProducer = new Mock(MockBehavior.Strict); - mockExtractionRequestInfoProducer.Setup(expr).Returns((IMessageHeader?)null); - - var processor = new ExtractionMessageSender( - new ExtractImagesOptions { MaxIdentifiersPerMessage = 1 }, - new ExtractImagesCliOptions { ProjectId = "1234-5678", NonInteractive = true, Modality = "CT" }, - mockExtractionRequestProducer.Object, - mockExtractionRequestInfoProducer.Object, + var _ = new ExtractionMessageSender( + new ExtractImagesOptions { MaxIdentifiersPerMessage = 0 }, + new ExtractImagesCliOptions() { Modality = "CT" }, + new Mock(MockBehavior.Loose).Object, + new Mock(MockBehavior.Loose).Object, new FileSystem(), - "extractRoot", + "extractRoot", "extractDir", new TestDateTimeProvider(), new RealConsoleInput() ); + }); + } - List idList = Enumerable.Range(0, 5).Select(x => x.ToString()).ToList(); - processor.SendMessages(ExtractionKey.StudyInstanceUID, idList); - mockExtractionRequestProducer.Verify(expr, Times.Exactly(5)); - mockExtractionRequestInfoProducer.Verify(expr, Times.Once); + [Test] + public void IdList_IsNotEmpty() + { + var sender = new ExtractionMessageSender( + new ExtractImagesOptions(), + new ExtractImagesCliOptions { ProjectId = "1234-5678", Modality = "CT" }, + new Mock(MockBehavior.Loose).Object, + new Mock(MockBehavior.Loose).Object, + new FileSystem(), + "extractRoot", + "extractDir", + new TestDateTimeProvider(), + new RealConsoleInput() + ); + + var exc = Assert.Throws(() => + { + sender.SendMessages(ExtractionKey.StudyInstanceUID, []); + }); + Assert.That(exc!.Message, Is.EqualTo("ID list is empty")); + } - Assert.That(idList.SequenceEqual(calledWith), Is.True); - } + [Test] + public void ListChunking_CorrectIds() + { + Expression> expr = x => x.SendMessage(It.IsAny(), null, It.IsAny()); - [TestCase(1, 1, 1)] // nIds = maxPerMessage => 1 message - [TestCase(1, 10, 1)] // nIds < maxPerMessage => 1 message - [TestCase(2, 1, 2)] // nIds > maxPerMessage => 2 messages - public void ListChunking_EdgeCases(int nIds, int maxPerMessage, int expectedMessages) - { - Expression> expr = x => x.SendMessage(It.IsAny(), null, It.IsAny()); + var calledWith = new List(); + + var mockExtractionRequestProducer = new Mock(MockBehavior.Strict); + mockExtractionRequestProducer + .Setup(expr) + .Returns((IMessageHeader?)null) + .Callback((IMessage msg, IMessageHeader _, string __) => + { + calledWith.AddRange(((ExtractionRequestMessage)msg).ExtractionIdentifiers); + }); - var mockExtractionRequestProducer = new Mock(MockBehavior.Strict); - mockExtractionRequestProducer.Setup(expr).Returns((IMessageHeader?)null); + var mockExtractionRequestInfoProducer = new Mock(MockBehavior.Strict); + mockExtractionRequestInfoProducer.Setup(expr).Returns((IMessageHeader?)null); - var mockExtractionRequestInfoProducer = new Mock(MockBehavior.Strict); - mockExtractionRequestInfoProducer.Setup(expr).Returns((IMessageHeader?)null); + var processor = new ExtractionMessageSender( + new ExtractImagesOptions { MaxIdentifiersPerMessage = 1 }, + new ExtractImagesCliOptions { ProjectId = "1234-5678", NonInteractive = true, Modality = "CT" }, + mockExtractionRequestProducer.Object, + mockExtractionRequestInfoProducer.Object, + new FileSystem(), + "extractRoot", + "extractDir", + new TestDateTimeProvider(), + new RealConsoleInput() + ); - var processor = new ExtractionMessageSender( - new ExtractImagesOptions { MaxIdentifiersPerMessage = maxPerMessage }, - new ExtractImagesCliOptions { ProjectId = "1234-5678", NonInteractive = true, Modality = "CT" }, - mockExtractionRequestProducer.Object, - mockExtractionRequestInfoProducer.Object, - new FileSystem(), - "extractRoot", - "extractDir", - new TestDateTimeProvider(), - new RealConsoleInput() - ); + List idList = Enumerable.Range(0, 5).Select(x => x.ToString()).ToList(); + processor.SendMessages(ExtractionKey.StudyInstanceUID, idList); - List idList = Enumerable.Range(0, nIds).Select(x => x.ToString()).ToList(); - processor.SendMessages(ExtractionKey.StudyInstanceUID, idList); + mockExtractionRequestProducer.Verify(expr, Times.Exactly(5)); + mockExtractionRequestInfoProducer.Verify(expr, Times.Once); - mockExtractionRequestProducer.Verify(expr, Times.Exactly(expectedMessages)); - mockExtractionRequestInfoProducer.Verify(expr, Times.Once); - } + Assert.That(idList.SequenceEqual(calledWith), Is.True); + } - #endregion + [TestCase(1, 1, 1)] // nIds = maxPerMessage => 1 message + [TestCase(1, 10, 1)] // nIds < maxPerMessage => 1 message + [TestCase(2, 1, 2)] // nIds > maxPerMessage => 2 messages + public void ListChunking_EdgeCases(int nIds, int maxPerMessage, int expectedMessages) + { + Expression> expr = x => x.SendMessage(It.IsAny(), null, It.IsAny()); + + var mockExtractionRequestProducer = new Mock(MockBehavior.Strict); + mockExtractionRequestProducer.Setup(expr).Returns((IMessageHeader?)null); + + var mockExtractionRequestInfoProducer = new Mock(MockBehavior.Strict); + mockExtractionRequestInfoProducer.Setup(expr).Returns((IMessageHeader?)null); + + var processor = new ExtractionMessageSender( + new ExtractImagesOptions { MaxIdentifiersPerMessage = maxPerMessage }, + new ExtractImagesCliOptions { ProjectId = "1234-5678", NonInteractive = true, Modality = "CT" }, + mockExtractionRequestProducer.Object, + mockExtractionRequestInfoProducer.Object, + new FileSystem(), + "extractRoot", + "extractDir", + new TestDateTimeProvider(), + new RealConsoleInput() + ); + + List idList = Enumerable.Range(0, nIds).Select(x => x.ToString()).ToList(); + processor.SendMessages(ExtractionKey.StudyInstanceUID, idList); + + mockExtractionRequestProducer.Verify(expr, Times.Exactly(expectedMessages)); + mockExtractionRequestInfoProducer.Verify(expr, Times.Once); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Applications/TriggerUpdates/MapperSourceUnitTests.cs b/tests/SmiServices.UnitTests/Applications/TriggerUpdates/MapperSourceUnitTests.cs index aa63caa32..48090c51b 100644 --- a/tests/SmiServices.UnitTests/Applications/TriggerUpdates/MapperSourceUnitTests.cs +++ b/tests/SmiServices.UnitTests/Applications/TriggerUpdates/MapperSourceUnitTests.cs @@ -4,36 +4,35 @@ using System; -namespace SmiServices.UnitTests.Applications.TriggerUpdates +namespace SmiServices.UnitTests.Applications.TriggerUpdates; + +class MapperSourceUnitTests { - class MapperSourceUnitTests + [Test] + public void TestNoIdentifierMapperOptions() { - [Test] - public void TestNoIdentifierMapperOptions() - { - var ex = Assert.Throws(() => new MapperSource(new GlobalOptions(), new TriggerUpdatesFromMapperOptions())); - Assert.That(ex!.Message, Is.EqualTo("No SwapperType has been specified in GlobalOptions.IdentifierMapperOptions")); + var ex = Assert.Throws(() => new MapperSource(new GlobalOptions(), new TriggerUpdatesFromMapperOptions())); + Assert.That(ex!.Message, Is.EqualTo("No SwapperType has been specified in GlobalOptions.IdentifierMapperOptions")); - } - [Test] - public void TestNoSwapper() - { - var ex = Assert.Throws(() => new MapperSource(new GlobalOptions { IdentifierMapperOptions = new IdentifierMapperOptions() }, new TriggerUpdatesFromMapperOptions())); - Assert.That(ex!.Message, Is.EqualTo("No SwapperType has been specified in GlobalOptions.IdentifierMapperOptions")); - } - [Test] - public void InvalidSwapper() + } + [Test] + public void TestNoSwapper() + { + var ex = Assert.Throws(() => new MapperSource(new GlobalOptions { IdentifierMapperOptions = new IdentifierMapperOptions() }, new TriggerUpdatesFromMapperOptions())); + Assert.That(ex!.Message, Is.EqualTo("No SwapperType has been specified in GlobalOptions.IdentifierMapperOptions")); + } + [Test] + public void InvalidSwapper() + { + var ex = Assert.Throws(() => new MapperSource(new GlobalOptions { - var ex = Assert.Throws(() => new MapperSource(new GlobalOptions + IdentifierMapperOptions = new IdentifierMapperOptions { - IdentifierMapperOptions = new IdentifierMapperOptions - { - SwapperType = "Trollolol" - } + SwapperType = "Trollolol" } - , new TriggerUpdatesFromMapperOptions())); - - Assert.That(ex!.Message, Is.EqualTo("Could not create IdentifierMapper Swapper with SwapperType:Trollolol")); } + , new TriggerUpdatesFromMapperOptions())); + + Assert.That(ex!.Message, Is.EqualTo("Could not create IdentifierMapper Swapper with SwapperType:Trollolol")); } } diff --git a/tests/SmiServices.UnitTests/Common/ArrayHelperTests.cs b/tests/SmiServices.UnitTests/Common/ArrayHelperTests.cs index 9492ba72f..306565697 100644 --- a/tests/SmiServices.UnitTests/Common/ArrayHelperTests.cs +++ b/tests/SmiServices.UnitTests/Common/ArrayHelperTests.cs @@ -2,19 +2,18 @@ using DicomTypeTranslation.Helpers; using NUnit.Framework; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class ArrayHelperTests { - public class ArrayHelperTests + [Test] + public void TestStringRepresentation() { - [Test] - public void TestStringRepresentation() - { - var a = new uint[2]; - a[0] = 10; - a[1] = 123; + var a = new uint[2]; + a[0] = 10; + a[1] = 123; - Assert.That(ArrayHelperMethods.GetStringRepresentation(a), Is.EqualTo("10\\123")); + Assert.That(ArrayHelperMethods.GetStringRepresentation(a), Is.EqualTo("10\\123")); - } } } diff --git a/tests/SmiServices.UnitTests/Common/ComplexMessageSerializationTests.cs b/tests/SmiServices.UnitTests/Common/ComplexMessageSerializationTests.cs index 1ae7a4982..9357231e4 100644 --- a/tests/SmiServices.UnitTests/Common/ComplexMessageSerializationTests.cs +++ b/tests/SmiServices.UnitTests/Common/ComplexMessageSerializationTests.cs @@ -6,92 +6,91 @@ using System.Collections.Generic; using System.Linq; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class ComplexMessageSerializationTests { - public class ComplexMessageSerializationTests + [Test] + public void ExtractFileCollectionInfoMessage_NoParents() { - [Test] - public void ExtractFileCollectionInfoMessage_NoParents() + var msg = new ExtractFileCollectionInfoMessage { - var msg = new ExtractFileCollectionInfoMessage - { - ExtractionJobIdentifier = Guid.NewGuid(), - KeyValue = "f", - ExtractFileMessagesDispatched = new JsonCompatibleDictionary { { new MessageHeader(), "dave" } }, - ExtractionDirectory = "C:\\fish", - Modality = "CT", - ProjectNumber = "1234-5678", - }; + ExtractionJobIdentifier = Guid.NewGuid(), + KeyValue = "f", + ExtractFileMessagesDispatched = new JsonCompatibleDictionary { { new MessageHeader(), "dave" } }, + ExtractionDirectory = "C:\\fish", + Modality = "CT", + ProjectNumber = "1234-5678", + }; - var str = Newtonsoft.Json.JsonConvert.SerializeObject(msg); - var msg2 = JsonConvert.DeserializeObject(str); + var str = Newtonsoft.Json.JsonConvert.SerializeObject(msg); + var msg2 = JsonConvert.DeserializeObject(str); - Assert.Multiple(() => - { - Assert.That(msg2!.ExtractFileMessagesDispatched, Has.Count.EqualTo(1)); - Assert.That(msg2.ExtractFileMessagesDispatched.Keys.Single(), Is.Not.EqualTo(null)); - }); - } + Assert.Multiple(() => + { + Assert.That(msg2!.ExtractFileMessagesDispatched, Has.Count.EqualTo(1)); + Assert.That(msg2.ExtractFileMessagesDispatched.Keys.Single(), Is.Not.EqualTo(null)); + }); + } - [Test] - public void ExtractFileCollectionInfoMessage_WithParents() + [Test] + public void ExtractFileCollectionInfoMessage_WithParents() + { + var msg = new ExtractFileCollectionInfoMessage { - var msg = new ExtractFileCollectionInfoMessage - { - ExtractionJobIdentifier = Guid.NewGuid(), - KeyValue = "f", - ExtractFileMessagesDispatched = [], - ExtractionDirectory = "C:\\fish", - Modality = "CT", - ProjectNumber = "123", - JobSubmittedAt = DateTime.UtcNow, - }; + ExtractionJobIdentifier = Guid.NewGuid(), + KeyValue = "f", + ExtractFileMessagesDispatched = [], + ExtractionDirectory = "C:\\fish", + Modality = "CT", + ProjectNumber = "123", + JobSubmittedAt = DateTime.UtcNow, + }; - var grandparent = new MessageHeader(); - var parent = new MessageHeader(grandparent); - var child = new MessageHeader(parent); - msg.ExtractFileMessagesDispatched.Add(child, "dave"); + var grandparent = new MessageHeader(); + var parent = new MessageHeader(grandparent); + var child = new MessageHeader(parent); + msg.ExtractFileMessagesDispatched.Add(child, "dave"); - var str = Newtonsoft.Json.JsonConvert.SerializeObject(msg); - var msg2 = JsonConvert.DeserializeObject(str); + var str = Newtonsoft.Json.JsonConvert.SerializeObject(msg); + var msg2 = JsonConvert.DeserializeObject(str); - Assert.Multiple(() => - { - Assert.That(msg2!.ExtractFileMessagesDispatched, Has.Count.EqualTo(1)); - Assert.That(msg2.ExtractFileMessagesDispatched.Keys.Single(), Is.Not.Null); + Assert.Multiple(() => + { + Assert.That(msg2!.ExtractFileMessagesDispatched, Has.Count.EqualTo(1)); + Assert.That(msg2.ExtractFileMessagesDispatched.Keys.Single(), Is.Not.Null); - Assert.That(msg2.ExtractFileMessagesDispatched.Keys.First().MessageGuid, Is.EqualTo(child.MessageGuid)); - Assert.That(msg2.ExtractFileMessagesDispatched.Keys.First().Parents, Does.Contain(parent.MessageGuid)); - }); - Assert.That(msg2.ExtractFileMessagesDispatched.Keys.First().Parents, Does.Contain(grandparent.MessageGuid)); - } + Assert.That(msg2.ExtractFileMessagesDispatched.Keys.First().MessageGuid, Is.EqualTo(child.MessageGuid)); + Assert.That(msg2.ExtractFileMessagesDispatched.Keys.First().Parents, Does.Contain(parent.MessageGuid)); + }); + Assert.That(msg2.ExtractFileMessagesDispatched.Keys.First().Parents, Does.Contain(grandparent.MessageGuid)); + } - [Test] - public void TestMessageSerialization_WithGuid() + [Test] + public void TestMessageSerialization_WithGuid() + { + var identifiers = new List { - var identifiers = new List - { - "fish1", - "fish2", - "fish3", - "fish4" - }; + "fish1", + "fish2", + "fish3", + "fish4" + }; - var message = new ExtractionRequestMessage - { - ExtractionJobIdentifier = Guid.NewGuid(), - ProjectNumber = "1234-5678", - ExtractionDirectory = "C:\\fish", - Modality = "CT", - KeyTag = "SeriesInstanceUID", - ExtractionIdentifiers = identifiers - }; + var message = new ExtractionRequestMessage + { + ExtractionJobIdentifier = Guid.NewGuid(), + ProjectNumber = "1234-5678", + ExtractionDirectory = "C:\\fish", + Modality = "CT", + KeyTag = "SeriesInstanceUID", + ExtractionIdentifiers = identifiers + }; - string json = Newtonsoft.Json.JsonConvert.SerializeObject(message); - Assert.That(json, Is.Not.Null); + string json = Newtonsoft.Json.JsonConvert.SerializeObject(message); + Assert.That(json, Is.Not.Null); - var reconstructed = JsonConvert.DeserializeObject(json); - Assert.That(reconstructed, Is.EqualTo(message)); - } + var reconstructed = JsonConvert.DeserializeObject(json); + Assert.That(reconstructed, Is.EqualTo(message)); } } diff --git a/tests/SmiServices.UnitTests/Common/ConsumerTests.cs b/tests/SmiServices.UnitTests/Common/ConsumerTests.cs index 41e043f5c..3929b899c 100644 --- a/tests/SmiServices.UnitTests/Common/ConsumerTests.cs +++ b/tests/SmiServices.UnitTests/Common/ConsumerTests.cs @@ -5,58 +5,57 @@ using System.Threading; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class ConsumerTests { - public class ConsumerTests + [Test] + public void Consumer_UnhandledException_TriggersFatal() { - [Test] - public void Consumer_UnhandledException_TriggersFatal() - { - var consumer = new ThrowingConsumer(); + var consumer = new ThrowingConsumer(); + + var fatalCalled = false; + consumer.OnFatal += (sender, args) => fatalCalled = true; - var fatalCalled = false; - consumer.OnFatal += (sender, args) => fatalCalled = true; + consumer.ProcessMessage(new MessageHeader(), new TestMessage(), 1); - consumer.ProcessMessage(new MessageHeader(), new TestMessage(), 1); + Thread.Sleep(1000); + Assert.That(fatalCalled, Is.True); + } - Thread.Sleep(1000); - Assert.That(fatalCalled, Is.True); - } + [Test] + public void MessageHolds() + { + // Arrange - [Test] - public void MessageHolds() + var consumer = new ThrowingConsumer() { - // Arrange - - var consumer = new ThrowingConsumer() - { - HoldUnprocessableMessages = true, - QoSPrefetchCount = 1, - }; + HoldUnprocessableMessages = true, + QoSPrefetchCount = 1, + }; - // Act + // Act - consumer.ProcessMessage(new MessageHeader(), new TestMessage(), 1); + consumer.ProcessMessage(new MessageHeader(), new TestMessage(), 1); - // Assert + // Assert - Assert.Multiple(() => - { - Assert.That(consumer.HeldMessages, Is.EqualTo(1)); - Assert.That(consumer.AckCount, Is.EqualTo(0)); - }); - } + Assert.Multiple(() => + { + Assert.That(consumer.HeldMessages, Is.EqualTo(1)); + Assert.That(consumer.AckCount, Is.EqualTo(0)); + }); } +} - public class TestMessage : IMessage { } +public class TestMessage : IMessage { } - public class ThrowingConsumer : Consumer - { - public int HeldMessages { get => _heldMessages; } +public class ThrowingConsumer : Consumer +{ + public int HeldMessages { get => _heldMessages; } - protected override void ProcessMessageImpl(IMessageHeader header, TestMessage msg, ulong tag) - { - throw new Exception("Throwing!"); - } + protected override void ProcessMessageImpl(IMessageHeader header, TestMessage msg, ulong tag) + { + throw new Exception("Throwing!"); } } diff --git a/tests/SmiServices.UnitTests/Common/DicomDataGeneratorExtensions.cs b/tests/SmiServices.UnitTests/Common/DicomDataGeneratorExtensions.cs index c8268dd0a..95f8dcb2b 100644 --- a/tests/SmiServices.UnitTests/Common/DicomDataGeneratorExtensions.cs +++ b/tests/SmiServices.UnitTests/Common/DicomDataGeneratorExtensions.cs @@ -7,40 +7,39 @@ using System.IO; using System.Linq; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public static class DicomDataGeneratorExtensions { - public static class DicomDataGeneratorExtensions + public static List GenerateImages(this DicomDataGenerator g, int numberOfImages, Random r) { - public static List GenerateImages(this DicomDataGenerator g, int numberOfImages, Random r) - { - var toReturn = new List(); - g.MaximumImages = numberOfImages; + var toReturn = new List(); + g.MaximumImages = numberOfImages; - while (toReturn.Count <= numberOfImages) - toReturn.AddRange(g.GenerateStudyImages(new Person(r), out _)); + while (toReturn.Count <= numberOfImages) + toReturn.AddRange(g.GenerateStudyImages(new Person(r), out _)); - //trim off extras - toReturn = toReturn.Take(numberOfImages).ToList(); + //trim off extras + toReturn = toReturn.Take(numberOfImages).ToList(); - Assert.That(toReturn, Has.Count.EqualTo(numberOfImages)); + Assert.That(toReturn, Has.Count.EqualTo(numberOfImages)); - return toReturn; - } + return toReturn; + } - public static IEnumerable GenerateImageFiles(this DicomDataGenerator g, int numberOfImages, Random r) - { - var p = new PersonCollection(); - p.GeneratePeople(5000, r); + public static IEnumerable GenerateImageFiles(this DicomDataGenerator g, int numberOfImages, Random r) + { + var p = new PersonCollection(); + p.GeneratePeople(5000, r); - if (g.OutputDir?.Exists == true) - g.OutputDir.Delete(true); + if (g.OutputDir?.Exists == true) + g.OutputDir.Delete(true); - var inventory = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "inventory.csv")); + var inventory = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "inventory.csv")); - g.MaximumImages = numberOfImages; - g.GenerateTestDataFile(p, inventory, numberOfImages); + g.MaximumImages = numberOfImages; + g.GenerateTestDataFile(p, inventory, numberOfImages); - return g.OutputDir?.GetFiles("*.dcm", SearchOption.AllDirectories) ?? Enumerable.Empty(); - } + return g.OutputDir?.GetFiles("*.dcm", SearchOption.AllDirectories) ?? Enumerable.Empty(); } } diff --git a/tests/SmiServices.UnitTests/Common/EquTests.cs b/tests/SmiServices.UnitTests/Common/EquTests.cs index 3f3a5d680..0a4c8aadd 100644 --- a/tests/SmiServices.UnitTests/Common/EquTests.cs +++ b/tests/SmiServices.UnitTests/Common/EquTests.cs @@ -3,91 +3,90 @@ using SmiServices.Common.Messages; using System.Collections.Generic; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class EquTests { - public class EquTests - { - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - private class FooMessage : MemberwiseEquatable, IMessage - { - public string? FooString { get; set; } - public List? FooList { get; set; } - public Dictionary? FooDict { get; set; } - } + private class FooMessage : MemberwiseEquatable, IMessage + { + public string? FooString { get; set; } + public List? FooList { get; set; } + public Dictionary? FooDict { get; set; } + } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void Equals_WithEqu_HandlesDictionaries() + [Test] + public void Equals_WithEqu_HandlesDictionaries() + { + var m1 = new FooMessage { - var m1 = new FooMessage - { - FooString = "study", - FooDict = new Dictionary - { - { "foo", 1 }, - { "bar", 2 }, - } - }; - - var m2 = new FooMessage + FooString = "study", + FooDict = new Dictionary { - FooString = "study", - FooDict = [] - }; + { "foo", 1 }, + { "bar", 2 }, + } + }; - Assert.That(m2, Is.Not.EqualTo(m1)); + var m2 = new FooMessage + { + FooString = "study", + FooDict = [] + }; - m2.FooDict.Add("bar", 2); - m2.FooDict.Add("foo", 1); + Assert.That(m2, Is.Not.EqualTo(m1)); - Assert.That(m2, Is.EqualTo(m1)); - } + m2.FooDict.Add("bar", 2); + m2.FooDict.Add("foo", 1); - [Test] - public void Equals_WithEqu_HandlesLists() - { - var m1 = new FooMessage - { - FooString = "study", - FooList = ["foo", "bar"] - }; + Assert.That(m2, Is.EqualTo(m1)); + } - var m2 = new FooMessage - { - FooString = "study", - }; + [Test] + public void Equals_WithEqu_HandlesLists() + { + var m1 = new FooMessage + { + FooString = "study", + FooList = ["foo", "bar"] + }; - Assert.That(m2, Is.Not.EqualTo(m1)); + var m2 = new FooMessage + { + FooString = "study", + }; - m2.FooList = ["bar", "foo"]; - Assert.That(m2, Is.Not.EqualTo(m1)); + Assert.That(m2, Is.Not.EqualTo(m1)); - m2.FooList = ["foo", "bar"]; - Assert.That(m2, Is.EqualTo(m1)); - } + m2.FooList = ["bar", "foo"]; + Assert.That(m2, Is.Not.EqualTo(m1)); - #endregion + m2.FooList = ["foo", "bar"]; + Assert.That(m2, Is.EqualTo(m1)); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Common/LoggingTests.cs b/tests/SmiServices.UnitTests/Common/LoggingTests.cs index 839c722d2..331e57bc3 100644 --- a/tests/SmiServices.UnitTests/Common/LoggingTests.cs +++ b/tests/SmiServices.UnitTests/Common/LoggingTests.cs @@ -4,48 +4,47 @@ using System.Collections.Generic; using System.IO; -namespace SmiServices.UnitTests.Common -{ - class LoggingTests - { - #region Fixture Methods +namespace SmiServices.UnitTests.Common; - [OneTimeSetUp] - public void OneTimeSetUp() - { - } +class LoggingTests +{ + #region Fixture Methods - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - #endregion + [OneTimeTearDown] + public void OneTimeTearDown() { } - #region Test Methods + #endregion - [SetUp] - public void SetUp() { } + #region Test Methods - [TearDown] - public void TearDown() { } + [SetUp] + public void SetUp() { } - #endregion + [TearDown] + public void TearDown() { } - #region Tests + #endregion - [Test] - public void InvalidConfiguration_ThrowsException() - { - const string fileName = "fakeconfig.xml"; + #region Tests - File.WriteAllLines(fileName, new List { "totally an xml file" }); + [Test] + public void InvalidConfiguration_ThrowsException() + { + const string fileName = "fakeconfig.xml"; - // No exception - LogManager.Configuration = new XmlLoggingConfiguration(fileName); + File.WriteAllLines(fileName, new List { "totally an xml file" }); - LogManager.ThrowConfigExceptions = true; - Assert.Throws(() => LogManager.Configuration = new XmlLoggingConfiguration(fileName)); - } + // No exception + LogManager.Configuration = new XmlLoggingConfiguration(fileName); - #endregion + LogManager.ThrowConfigExceptions = true; + Assert.Throws(() => LogManager.Configuration = new XmlLoggingConfiguration(fileName)); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Common/MessageEqualityTests.cs b/tests/SmiServices.UnitTests/Common/MessageEqualityTests.cs index 7c3b3328e..02b0c1da3 100644 --- a/tests/SmiServices.UnitTests/Common/MessageEqualityTests.cs +++ b/tests/SmiServices.UnitTests/Common/MessageEqualityTests.cs @@ -5,87 +5,86 @@ using System; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class MessageEqualityTests { - public class MessageEqualityTests + [Test] + public void TestEquals_AccessionDirectoryMessage() { - [Test] - public void TestEquals_AccessionDirectoryMessage() - { - var msg1 = new AccessionDirectoryMessage(); - var msg2 = new AccessionDirectoryMessage(); + var msg1 = new AccessionDirectoryMessage(); + var msg2 = new AccessionDirectoryMessage(); + + Assert.That(msg2, Is.EqualTo(msg1)); + Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); - Assert.That(msg2, Is.EqualTo(msg1)); - Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); + msg1.DirectoryPath = @"c:\temp"; + msg2.DirectoryPath = @"c:\temp"; - msg1.DirectoryPath = @"c:\temp"; - msg2.DirectoryPath = @"c:\temp"; + Assert.That(msg2, Is.EqualTo(msg1)); + Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); + } - Assert.That(msg2, Is.EqualTo(msg1)); - Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); - } + [Test] + public void TestEquals_DicomFileMessage() + { + var msg1 = new DicomFileMessage(); + var msg2 = new DicomFileMessage(); - [Test] - public void TestEquals_DicomFileMessage() - { - var msg1 = new DicomFileMessage(); - var msg2 = new DicomFileMessage(); + Assert.That(msg2, Is.EqualTo(msg1)); + Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); - Assert.That(msg2, Is.EqualTo(msg1)); - Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); + msg1.DicomDataset = "jsonified string"; + msg2.DicomDataset = "jsonified string"; - msg1.DicomDataset = "jsonified string"; - msg2.DicomDataset = "jsonified string"; + Assert.That(msg2, Is.EqualTo(msg1)); + Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); + } - Assert.That(msg2, Is.EqualTo(msg1)); - Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); - } + [Test] + public void TestEquals_SeriesMessage() + { + var msg1 = new SeriesMessage(); + var msg2 = new SeriesMessage(); - [Test] - public void TestEquals_SeriesMessage() - { - var msg1 = new SeriesMessage(); - var msg2 = new SeriesMessage(); + Assert.That(msg2, Is.EqualTo(msg1)); + Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); - Assert.That(msg2, Is.EqualTo(msg1)); - Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); + msg1.DicomDataset = "jsonified string"; + msg2.DicomDataset = "jsonified string"; - msg1.DicomDataset = "jsonified string"; - msg2.DicomDataset = "jsonified string"; + Assert.That(msg2, Is.EqualTo(msg1)); + Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); + } - Assert.That(msg2, Is.EqualTo(msg1)); - Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); - } + private class FooExtractMessage : ExtractMessage { } - private class FooExtractMessage : ExtractMessage { } + [Test] + public void Tests_ExtractMessage_Equality() + { + Guid guid = Guid.NewGuid(); + DateTime dt = DateTime.UtcNow; - [Test] - public void Tests_ExtractMessage_Equality() + // TODO(rkm 2020-08-26) Swap these object initializers for proper constructors + var msg1 = new FooExtractMessage + { + JobSubmittedAt = dt, + ExtractionJobIdentifier = guid, + ProjectNumber = "1234", + ExtractionDirectory = "foo/bar", + IsIdentifiableExtraction = true, + }; + var msg2 = new FooExtractMessage { - Guid guid = Guid.NewGuid(); - DateTime dt = DateTime.UtcNow; - - // TODO(rkm 2020-08-26) Swap these object initializers for proper constructors - var msg1 = new FooExtractMessage - { - JobSubmittedAt = dt, - ExtractionJobIdentifier = guid, - ProjectNumber = "1234", - ExtractionDirectory = "foo/bar", - IsIdentifiableExtraction = true, - }; - var msg2 = new FooExtractMessage - { - JobSubmittedAt = dt, - ExtractionJobIdentifier = guid, - ProjectNumber = "1234", - ExtractionDirectory = "foo/bar", - IsIdentifiableExtraction = true, - }; - - Assert.That(msg2, Is.EqualTo(msg1)); - Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); - } + JobSubmittedAt = dt, + ExtractionJobIdentifier = guid, + ProjectNumber = "1234", + ExtractionDirectory = "foo/bar", + IsIdentifiableExtraction = true, + }; + + Assert.That(msg2, Is.EqualTo(msg1)); + Assert.That(msg2.GetHashCode(), Is.EqualTo(msg1.GetHashCode())); } } diff --git a/tests/SmiServices.UnitTests/Common/Messages/ExtractedFileStatusMessageTests.cs b/tests/SmiServices.UnitTests/Common/Messages/ExtractedFileStatusMessageTests.cs index 94e4ddd19..167f73a29 100644 --- a/tests/SmiServices.UnitTests/Common/Messages/ExtractedFileStatusMessageTests.cs +++ b/tests/SmiServices.UnitTests/Common/Messages/ExtractedFileStatusMessageTests.cs @@ -1,52 +1,51 @@ using NUnit.Framework; using SmiServices.Common.Messages.Extraction; -namespace SmiServices.UnitTests.Common.Messages +namespace SmiServices.UnitTests.Common.Messages; + +public class ExtractedFileStatusMessageTests { - public class ExtractedFileStatusMessageTests + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + #endregion - #endregion + #region Test Methods - #region Test Methods + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests - #region Tests + [Test] + public void Constructor_FromExtractFileMessage_CopiesFilePath() + { + var fileMessage = new ExtractFileMessage + { + DicomFilePath = "foo.dcm", + OutputPath = "foo-an.dcm", + }; - [Test] - public void Constructor_FromExtractFileMessage_CopiesFilePath() + var statusMessage = new ExtractedFileStatusMessage(fileMessage); + + Assert.Multiple(() => { - var fileMessage = new ExtractFileMessage - { - DicomFilePath = "foo.dcm", - OutputPath = "foo-an.dcm", - }; - - var statusMessage = new ExtractedFileStatusMessage(fileMessage); - - Assert.Multiple(() => - { - Assert.That(statusMessage.DicomFilePath, Is.EqualTo("foo.dcm")); - Assert.That(statusMessage.OutputFilePath, Is.EqualTo("foo-an.dcm")); - }); - } - - #endregion + Assert.That(statusMessage.DicomFilePath, Is.EqualTo("foo.dcm")); + Assert.That(statusMessage.OutputFilePath, Is.EqualTo("foo-an.dcm")); + }); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Common/Messages/MessageHeaderTest.cs b/tests/SmiServices.UnitTests/Common/Messages/MessageHeaderTest.cs index b7e04c2b8..1252159b0 100644 --- a/tests/SmiServices.UnitTests/Common/Messages/MessageHeaderTest.cs +++ b/tests/SmiServices.UnitTests/Common/Messages/MessageHeaderTest.cs @@ -4,93 +4,92 @@ using System.Collections.Generic; using System.Text; -namespace SmiServices.UnitTests.Common.Messages +namespace SmiServices.UnitTests.Common.Messages; + +[TestFixture] +public class MessageHeaderTest { - [TestFixture] - public class MessageHeaderTest + private readonly Dictionary _testProps = new() { - private readonly Dictionary _testProps = new() - { - {"MessageGuid", Encoding.UTF8.GetBytes(Guid.NewGuid().ToString())}, - {"ProducerProcessID", 123}, - {"ProducerExecutableName", Encoding.UTF8.GetBytes("SomeOtherService")}, - {"OriginalPublishTimestamp", (long)456}, - {"Parents", Encoding.UTF8.GetBytes($"{Guid.NewGuid()}{MessageHeader.Splitter}{Guid.NewGuid()}")}, - }; + {"MessageGuid", Encoding.UTF8.GetBytes(Guid.NewGuid().ToString())}, + {"ProducerProcessID", 123}, + {"ProducerExecutableName", Encoding.UTF8.GetBytes("SomeOtherService")}, + {"OriginalPublishTimestamp", (long)456}, + {"Parents", Encoding.UTF8.GetBytes($"{Guid.NewGuid()}{MessageHeader.Splitter}{Guid.NewGuid()}")}, + }; - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() - { - } + [SetUp] + public void SetUp() + { + } + + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests - #region Tests + #endregion - #endregion + [Test] + public void TestMessageHeader_Equality() + { + var h1 = MessageHeader.FromDict(_testProps, Encoding.UTF8); + var h2 = MessageHeader.FromDict(_testProps, Encoding.UTF8); + var h3 = new MessageHeader(); - [Test] - public void TestMessageHeader_Equality() + Assert.Multiple(() => { - var h1 = MessageHeader.FromDict(_testProps, Encoding.UTF8); - var h2 = MessageHeader.FromDict(_testProps, Encoding.UTF8); - var h3 = new MessageHeader(); + // Test all the various flavours of equality - Assert.Multiple(() => - { - // Test all the various flavours of equality + Assert.That(h2, Is.EqualTo(h1)); + Assert.That(h1, Is.EqualTo(h2)); - Assert.That(h2, Is.EqualTo(h1)); - Assert.That(h1, Is.EqualTo(h2)); + Assert.That(h3, Is.Not.EqualTo(h1)); + }); + Assert.That(h1, Is.Not.EqualTo(h3)); + } - Assert.That(h3, Is.Not.EqualTo(h1)); - }); - Assert.That(h1, Is.Not.EqualTo(h3)); - } + [Test] + public void TestMessageHeader_GetHashCode() + { + var h1 = MessageHeader.FromDict(_testProps, Encoding.UTF8); + var h2 = MessageHeader.FromDict(_testProps, Encoding.UTF8); + // "A hash function must have the following properties: - If two objects compare as equal, the GetHashCode() method for each object must return the same value" + Assert.That(h2, Is.EqualTo(h1)); + Assert.That(h2.GetHashCode(), Is.EqualTo(h1.GetHashCode())); + } - [Test] - public void TestMessageHeader_GetHashCode() + [Test] + public void CurrentProgramName_Unset_ThrowsException() + { + var original = MessageHeader.CurrentProgramName; + MessageHeader.CurrentProgramName = null!; + + try { - var h1 = MessageHeader.FromDict(_testProps, Encoding.UTF8); - var h2 = MessageHeader.FromDict(_testProps, Encoding.UTF8); - // "A hash function must have the following properties: - If two objects compare as equal, the GetHashCode() method for each object must return the same value" - Assert.That(h2, Is.EqualTo(h1)); - Assert.That(h2.GetHashCode(), Is.EqualTo(h1.GetHashCode())); - } + var exc = Assert.Throws(static () => _ = new MessageHeader()); - [Test] - public void CurrentProgramName_Unset_ThrowsException() + Assert.That(exc?.Message, Is.EqualTo("Value must be set before use")); + } + finally { - var original = MessageHeader.CurrentProgramName; - MessageHeader.CurrentProgramName = null!; - - try - { - var exc = Assert.Throws(static () => _ = new MessageHeader()); - - Assert.That(exc?.Message, Is.EqualTo("Value must be set before use")); - } - finally - { - MessageHeader.CurrentProgramName = original; - } + MessageHeader.CurrentProgramName = original; } } } diff --git a/tests/SmiServices.UnitTests/Common/MicroserviceTester.cs b/tests/SmiServices.UnitTests/Common/MicroserviceTester.cs index 043df3ca0..c15414695 100644 --- a/tests/SmiServices.UnitTests/Common/MicroserviceTester.cs +++ b/tests/SmiServices.UnitTests/Common/MicroserviceTester.cs @@ -10,190 +10,189 @@ using System.Text; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class MicroserviceTester : IDisposable { - public class MicroserviceTester : IDisposable - { - public readonly RabbitMQBroker Broker; + public readonly RabbitMQBroker Broker; + + private readonly Dictionary _sendToConsumers = []; - private readonly Dictionary _sendToConsumers = []; + private readonly List _declaredExchanges = []; + private readonly List _declaredQueues = []; - private readonly List _declaredExchanges = []; - private readonly List _declaredQueues = []; + /// + /// When true, will delete any created queues/exchanges when Dispose is called. Can set to false to inspect + /// queue messages before they are deleted. + /// + /// Defaults to true + /// + public bool CleanUpAfterTest { get; set; } - /// - /// When true, will delete any created queues/exchanges when Dispose is called. Can set to false to inspect - /// queue messages before they are deleted. - /// - /// Defaults to true - /// - public bool CleanUpAfterTest { get; set; } + /// + /// Hosts to call Stop on in the Dispose step. This ensures that all hosts are correctly shutdown even if Exceptions + /// are thrown in test (provided the MicroserviceTester is in a using statement). + /// + public HashSet StopOnDispose = []; + + public MicroserviceTester(RabbitOptions rabbitOptions, params ConsumerOptions[] peopleYouWantToSendMessagesTo) + { + CleanUpAfterTest = true; - /// - /// Hosts to call Stop on in the Dispose step. This ensures that all hosts are correctly shutdown even if Exceptions - /// are thrown in test (provided the MicroserviceTester is in a using statement). - /// - public HashSet StopOnDispose = []; + Broker = new RabbitMQBroker(rabbitOptions, "TestHost"); - public MicroserviceTester(RabbitOptions rabbitOptions, params ConsumerOptions[] peopleYouWantToSendMessagesTo) + using var model = Broker.GetModel(nameof(MicroserviceTester)); + //setup a sender channel for each of the consumers you want to test sending messages to + foreach (ConsumerOptions consumer in peopleYouWantToSendMessagesTo) { - CleanUpAfterTest = true; + if (!consumer.QueueName!.Contains("TEST.")) + consumer.QueueName = consumer.QueueName.Insert(0, "TEST."); - Broker = new RabbitMQBroker(rabbitOptions, "TestHost"); + var exchangeName = consumer.QueueName.Replace("Queue", "Exchange"); - using var model = Broker.GetModel(nameof(MicroserviceTester)); - //setup a sender channel for each of the consumers you want to test sending messages to - foreach (ConsumerOptions consumer in peopleYouWantToSendMessagesTo) + //terminate any old queues / exchanges + model.ExchangeDelete(exchangeName); + model.QueueDelete(consumer.QueueName); + _declaredExchanges.Add(exchangeName); + + //Create a binding between the exchange and the queue + model.ExchangeDeclare(exchangeName, ExchangeType.Direct, true);//durable seems to be needed because MessageBroker wants it? + model.QueueDeclare(consumer.QueueName, true, false, false);//shared with other users + model.QueueBind(consumer.QueueName, exchangeName, ""); + _declaredQueues.Add(consumer.QueueName); + + //Create a producer which can send to the + var producerOptions = new ProducerOptions { - if (!consumer.QueueName!.Contains("TEST.")) - consumer.QueueName = consumer.QueueName.Insert(0, "TEST."); - - var exchangeName = consumer.QueueName.Replace("Queue", "Exchange"); - - //terminate any old queues / exchanges - model.ExchangeDelete(exchangeName); - model.QueueDelete(consumer.QueueName); - _declaredExchanges.Add(exchangeName); - - //Create a binding between the exchange and the queue - model.ExchangeDeclare(exchangeName, ExchangeType.Direct, true);//durable seems to be needed because MessageBroker wants it? - model.QueueDeclare(consumer.QueueName, true, false, false);//shared with other users - model.QueueBind(consumer.QueueName, exchangeName, ""); - _declaredQueues.Add(consumer.QueueName); - - //Create a producer which can send to the - var producerOptions = new ProducerOptions - { - ExchangeName = exchangeName - }; - - _sendToConsumers.Add(consumer, Broker.SetupProducer(producerOptions, true)); - } - } + ExchangeName = exchangeName + }; - /// - /// Sends the given message to your consumer, you must have passed the consumer into the MicroserviceTester constructor since all adapter setup happens via option - /// at MessageBroker construction time - /// - /// - /// - public void SendMessage(ConsumerOptions toConsumer, IMessage msg) - { - _sendToConsumers[toConsumer].SendMessage(msg, isInResponseTo: null, routingKey: null); - _sendToConsumers[toConsumer].WaitForConfirms(); + _sendToConsumers.Add(consumer, Broker.SetupProducer(producerOptions, true)); } + } - /// - /// Sends the given message to your consumer, you must have passed the consumer into the MicroserviceTester constructor since all adapter setup happens via option - /// at MessageBroker construction time - /// - /// - /// - /// - public void SendMessages(ConsumerOptions toConsumer, IEnumerable messages, bool generateIMessageHeaders) - { - foreach (IMessage msg in messages) - _sendToConsumers[toConsumer].SendMessage(msg, generateIMessageHeaders ? new MessageHeader() : null, routingKey: null); + /// + /// Sends the given message to your consumer, you must have passed the consumer into the MicroserviceTester constructor since all adapter setup happens via option + /// at MessageBroker construction time + /// + /// + /// + public void SendMessage(ConsumerOptions toConsumer, IMessage msg) + { + _sendToConsumers[toConsumer].SendMessage(msg, isInResponseTo: null, routingKey: null); + _sendToConsumers[toConsumer].WaitForConfirms(); + } - _sendToConsumers[toConsumer].WaitForConfirms(); - } + /// + /// Sends the given message to your consumer, you must have passed the consumer into the MicroserviceTester constructor since all adapter setup happens via option + /// at MessageBroker construction time + /// + /// + /// + /// + public void SendMessages(ConsumerOptions toConsumer, IEnumerable messages, bool generateIMessageHeaders) + { + foreach (IMessage msg in messages) + _sendToConsumers[toConsumer].SendMessage(msg, generateIMessageHeaders ? new MessageHeader() : null, routingKey: null); - /// - /// Sends the given message to your consumer, you must have passed the consumer into the MicroserviceTester constructor since all adapter setup happens via option - /// at MessageBroker construction time - /// - /// - /// - /// - public void SendMessage(ConsumerOptions toConsumer, IMessageHeader header, IMessage msg) - { - _sendToConsumers[toConsumer].SendMessage(msg, header, routingKey: null); - _sendToConsumers[toConsumer].WaitForConfirms(); - } + _sendToConsumers[toConsumer].WaitForConfirms(); + } - /// - /// Creates a self titled RabbitMQ exchange/queue pair where the name of the exchange is the ProducerOptions.ExchangeName and the queue has the same name. - /// This will delete and recreate the exchange if it already exists (ensuring no old messages are stuck floating around). - /// - /// - /// - /// false to create an entirely new Exchange=>Queue (including deleting any existing queue/exchange). False to simply declare the - /// queue and bind it to the exchange which is assumed to already exist (this allows you to set up exchange=>multiple queues). If you are setting up multiple queues - /// from a single exchange the first call should be isSecondaryBinding = false and all further calls after that for the same exchange should be isSecondaryBinding=true - /// - public void CreateExchange(string exchangeName, string? queueName = null, bool isSecondaryBinding = false, string routingKey = "") - { - if (!exchangeName.Contains("TEST.")) - exchangeName = exchangeName.Insert(0, "TEST."); + /// + /// Sends the given message to your consumer, you must have passed the consumer into the MicroserviceTester constructor since all adapter setup happens via option + /// at MessageBroker construction time + /// + /// + /// + /// + public void SendMessage(ConsumerOptions toConsumer, IMessageHeader header, IMessage msg) + { + _sendToConsumers[toConsumer].SendMessage(msg, header, routingKey: null); + _sendToConsumers[toConsumer].WaitForConfirms(); + } - string queueNameToUse = queueName ?? exchangeName.Replace("Exchange", "Queue"); + /// + /// Creates a self titled RabbitMQ exchange/queue pair where the name of the exchange is the ProducerOptions.ExchangeName and the queue has the same name. + /// This will delete and recreate the exchange if it already exists (ensuring no old messages are stuck floating around). + /// + /// + /// + /// false to create an entirely new Exchange=>Queue (including deleting any existing queue/exchange). False to simply declare the + /// queue and bind it to the exchange which is assumed to already exist (this allows you to set up exchange=>multiple queues). If you are setting up multiple queues + /// from a single exchange the first call should be isSecondaryBinding = false and all further calls after that for the same exchange should be isSecondaryBinding=true + /// + public void CreateExchange(string exchangeName, string? queueName = null, bool isSecondaryBinding = false, string routingKey = "") + { + if (!exchangeName.Contains("TEST.")) + exchangeName = exchangeName.Insert(0, "TEST."); - using var model = Broker.GetModel(nameof(CreateExchange)); - //setup a sender channel for each of the consumers you want to test sending messages to + string queueNameToUse = queueName ?? exchangeName.Replace("Exchange", "Queue"); - //terminate any old queues / exchanges - if (!isSecondaryBinding) - model.ExchangeDelete(exchangeName); + using var model = Broker.GetModel(nameof(CreateExchange)); + //setup a sender channel for each of the consumers you want to test sending messages to - model.QueueDelete(queueNameToUse); + //terminate any old queues / exchanges + if (!isSecondaryBinding) + model.ExchangeDelete(exchangeName); - //Create a binding between the exchange and the queue - if (!isSecondaryBinding) - model.ExchangeDeclare(exchangeName, ExchangeType.Direct, true);//durable seems to be needed because MessageBroker wants it? + model.QueueDelete(queueNameToUse); - model.QueueDeclare(queueNameToUse, true, false, false); //shared with other users - model.QueueBind(queueNameToUse, exchangeName, routingKey); + //Create a binding between the exchange and the queue + if (!isSecondaryBinding) + model.ExchangeDeclare(exchangeName, ExchangeType.Direct, true);//durable seems to be needed because MessageBroker wants it? - Console.WriteLine("Created Exchange " + exchangeName + "=>" + queueNameToUse); - } + model.QueueDeclare(queueNameToUse, true, false, false); //shared with other users + model.QueueBind(queueNameToUse, exchangeName, routingKey); - /// - /// Consumes all messages from the specified queue. Must all be of type T. - /// - /// - /// - /// - public IEnumerable> ConsumeMessages(string queueName) where T : IMessage - { - IModel model = Broker.GetModel($"ConsumeMessages-{queueName}"); + Console.WriteLine("Created Exchange " + exchangeName + "=>" + queueNameToUse); + } - while (true) - { - BasicGetResult message = model.BasicGet(queueName, autoAck: true); - if (message == null) - break; - var header = MessageHeader.FromDict(message.BasicProperties.Headers, Encoding.UTF8); - var iMessage = JsonConvert.DeserializeObject(Encoding.UTF8.GetString(message.Body.Span)); - yield return new Tuple(header, iMessage); - } - } + /// + /// Consumes all messages from the specified queue. Must all be of type T. + /// + /// + /// + /// + public IEnumerable> ConsumeMessages(string queueName) where T : IMessage + { + IModel model = Broker.GetModel($"ConsumeMessages-{queueName}"); - /// - /// Shuts down the tester without performing cleanup of any declared queues / exchanges - /// - public void Shutdown() + while (true) { - foreach (MicroserviceHost host in StopOnDispose) - host.Stop("MicroserviceTester Disposed"); + BasicGetResult message = model.BasicGet(queueName, autoAck: true); + if (message == null) + break; + var header = MessageHeader.FromDict(message.BasicProperties.Headers, Encoding.UTF8); + var iMessage = JsonConvert.DeserializeObject(Encoding.UTF8.GetString(message.Body.Span)); + yield return new Tuple(header, iMessage); } + } - /// - /// Deletes any declared queues / exchanges depending on the CleanUpAfterTest option, then calls shutdown - /// - public void Dispose() - { - Shutdown(); + /// + /// Shuts down the tester without performing cleanup of any declared queues / exchanges + /// + public void Shutdown() + { + foreach (MicroserviceHost host in StopOnDispose) + host.Stop("MicroserviceTester Disposed"); + } - if (CleanUpAfterTest) - { - using IModel model = Broker.GetModel(nameof(Dispose)); - _declaredExchanges.ForEach(x => model.ExchangeDelete(x)); - _declaredQueues.ForEach(x => model.QueueDelete(x)); - } + /// + /// Deletes any declared queues / exchanges depending on the CleanUpAfterTest option, then calls shutdown + /// + public void Dispose() + { + Shutdown(); - Broker.Shutdown(RabbitMQBroker.DefaultOperationTimeout); - GC.SuppressFinalize(this); + if (CleanUpAfterTest) + { + using IModel model = Broker.GetModel(nameof(Dispose)); + _declaredExchanges.ForEach(x => model.ExchangeDelete(x)); + _declaredQueues.ForEach(x => model.QueueDelete(x)); } + + Broker.Shutdown(RabbitMQBroker.DefaultOperationTimeout); + GC.SuppressFinalize(this); } } diff --git a/tests/SmiServices.UnitTests/Common/MongoDB/MongoDocumentHeadersTests.cs b/tests/SmiServices.UnitTests/Common/MongoDB/MongoDocumentHeadersTests.cs index 8e5bc5087..5626bee38 100644 --- a/tests/SmiServices.UnitTests/Common/MongoDB/MongoDocumentHeadersTests.cs +++ b/tests/SmiServices.UnitTests/Common/MongoDB/MongoDocumentHeadersTests.cs @@ -7,97 +7,96 @@ using System.Collections.Generic; using System.Text; -namespace SmiServices.UnitTests.Common.MongoDB +namespace SmiServices.UnitTests.Common.MongoDB; + +[TestFixture] +public class MongoDocumentHeadersTests { - [TestFixture] - public class MongoDocumentHeadersTests + [OneTimeSetUp] + public void OneTimeSetUp() { - [OneTimeSetUp] - public void OneTimeSetUp() + } + + [Test] + public void ImageDocumentHeader_HasCorrectHeaders() + { + var msg = new DicomFileMessage { - } + DicomFilePath = "path/to/file.dcm", + }; - [Test] - public void ImageDocumentHeader_HasCorrectHeaders() + string parents = $"{Guid.NewGuid()}->{Guid.NewGuid()}"; + var headers = new Dictionary { - var msg = new DicomFileMessage - { - DicomFilePath = "path/to/file.dcm", - }; + { "MessageGuid", Encoding.UTF8.GetBytes(Guid.NewGuid().ToString()) }, + { "ProducerProcessID", 1234 }, + { "ProducerExecutableName", Encoding.UTF8.GetBytes("MongoDocumentHeadersTests") }, + { "Parents", Encoding.UTF8.GetBytes(parents) }, + { "OriginalPublishTimestamp", MessageHeader.UnixTimeNow() } + }; - string parents = $"{Guid.NewGuid()}->{Guid.NewGuid()}"; - var headers = new Dictionary + var header = MessageHeader.FromDict(headers, Encoding.UTF8); + BsonDocument bsonImageHeader = MongoDocumentHeaders.ImageDocumentHeader(msg, header); + + var expected = new BsonDocument + { + { "DicomFilePath", msg.DicomFilePath }, + { "DicomFileSize", msg.DicomFileSize }, + { "MessageHeader", new BsonDocument { - { "MessageGuid", Encoding.UTF8.GetBytes(Guid.NewGuid().ToString()) }, - { "ProducerProcessID", 1234 }, - { "ProducerExecutableName", Encoding.UTF8.GetBytes("MongoDocumentHeadersTests") }, - { "Parents", Encoding.UTF8.GetBytes(parents) }, - { "OriginalPublishTimestamp", MessageHeader.UnixTimeNow() } - }; + { "MessageGuid", header.MessageGuid.ToString() }, + { "ProducerProcessID", header.ProducerProcessID }, + { "ProducerExecutableName", header.ProducerExecutableName }, + { "Parents", string.Join(MessageHeader.Splitter, header.Parents) }, + { "OriginalPublishTimestamp", header.OriginalPublishTimestamp } + }} + }; - var header = MessageHeader.FromDict(headers, Encoding.UTF8); - BsonDocument bsonImageHeader = MongoDocumentHeaders.ImageDocumentHeader(msg, header); + Assert.That(bsonImageHeader, Is.EqualTo(expected)); + } - var expected = new BsonDocument - { - { "DicomFilePath", msg.DicomFilePath }, - { "DicomFileSize", msg.DicomFileSize }, - { "MessageHeader", new BsonDocument - { - { "MessageGuid", header.MessageGuid.ToString() }, - { "ProducerProcessID", header.ProducerProcessID }, - { "ProducerExecutableName", header.ProducerExecutableName }, - { "Parents", string.Join(MessageHeader.Splitter, header.Parents) }, - { "OriginalPublishTimestamp", header.OriginalPublishTimestamp } - }} - }; - - Assert.That(bsonImageHeader, Is.EqualTo(expected)); - } - - [Test] - public void SeriesDocumentHeader_HasCorrectHeaders() + [Test] + public void SeriesDocumentHeader_HasCorrectHeaders() + { + var msg = new SeriesMessage { - var msg = new SeriesMessage - { - DirectoryPath = "path/to/files", - ImagesInSeries = 1234 - }; + DirectoryPath = "path/to/files", + ImagesInSeries = 1234 + }; - BsonDocument seriesHeader = MongoDocumentHeaders.SeriesDocumentHeader(msg); + BsonDocument seriesHeader = MongoDocumentHeaders.SeriesDocumentHeader(msg); - var expected = new BsonDocument - { - { "DirectoryPath", msg.DirectoryPath }, - { "ImagesInSeries", msg.ImagesInSeries } - }; + var expected = new BsonDocument + { + { "DirectoryPath", msg.DirectoryPath }, + { "ImagesInSeries", msg.ImagesInSeries } + }; + + Assert.That(seriesHeader, Is.EqualTo(expected)); + } - Assert.That(seriesHeader, Is.EqualTo(expected)); - } + [Test] + public void RebuildMessageHeader_HasCorrectHeaders() + { + var msg = new DicomFileMessage + { + DicomFilePath = "path/to/file.dcm", + }; - [Test] - public void RebuildMessageHeader_HasCorrectHeaders() + string parents = $"{Guid.NewGuid()}->{Guid.NewGuid()}"; + var headers = new Dictionary { - var msg = new DicomFileMessage - { - DicomFilePath = "path/to/file.dcm", - }; + { "MessageGuid", Encoding.UTF8.GetBytes(Guid.NewGuid().ToString()) }, + { "ProducerProcessID", 1234 }, + { "ProducerExecutableName", Encoding.UTF8.GetBytes("MongoDocumentHeadersTests") }, + { "Parents", Encoding.UTF8.GetBytes(parents) }, + { "OriginalPublishTimestamp", MessageHeader.UnixTimeNow() } + }; - string parents = $"{Guid.NewGuid()}->{Guid.NewGuid()}"; - var headers = new Dictionary - { - { "MessageGuid", Encoding.UTF8.GetBytes(Guid.NewGuid().ToString()) }, - { "ProducerProcessID", 1234 }, - { "ProducerExecutableName", Encoding.UTF8.GetBytes("MongoDocumentHeadersTests") }, - { "Parents", Encoding.UTF8.GetBytes(parents) }, - { "OriginalPublishTimestamp", MessageHeader.UnixTimeNow() } - }; - - var header = MessageHeader.FromDict(headers, Encoding.UTF8); - BsonDocument bsonImageHeader = MongoDocumentHeaders.ImageDocumentHeader(msg, header); - IMessageHeader rebuiltHeader = MongoDocumentHeaders.RebuildMessageHeader(bsonImageHeader["MessageHeader"].AsBsonDocument); - - Assert.That(rebuiltHeader, Is.EqualTo(header)); - } + var header = MessageHeader.FromDict(headers, Encoding.UTF8); + BsonDocument bsonImageHeader = MongoDocumentHeaders.ImageDocumentHeader(msg, header); + IMessageHeader rebuiltHeader = MongoDocumentHeaders.RebuildMessageHeader(bsonImageHeader["MessageHeader"].AsBsonDocument); + + Assert.That(rebuiltHeader, Is.EqualTo(header)); } } diff --git a/tests/SmiServices.UnitTests/Common/MongoDB/MongoModalityGroupsTests.cs b/tests/SmiServices.UnitTests/Common/MongoDB/MongoModalityGroupsTests.cs index dfdaa4157..48294e159 100644 --- a/tests/SmiServices.UnitTests/Common/MongoDB/MongoModalityGroupsTests.cs +++ b/tests/SmiServices.UnitTests/Common/MongoDB/MongoModalityGroupsTests.cs @@ -6,62 +6,61 @@ using System.Collections.Generic; using System.Linq; -namespace SmiServices.UnitTests.Common.MongoDB +namespace SmiServices.UnitTests.Common.MongoDB; + +[TestFixture] +public class MongoModalityGroupsTests { - [TestFixture] - public class MongoModalityGroupsTests + [Test] + public void ImageProcessor_ModalitySplit_StandardModalities() { - [Test] - public void ImageProcessor_ModalitySplit_StandardModalities() - { - List docs = MongoModalityGroups.MajorModalities - .Take(4) - .Select(x => new BsonDocument { { "tag", "value" }, { "Modality", x } }) - .ToList(); + List docs = MongoModalityGroups.MajorModalities + .Take(4) + .Select(x => new BsonDocument { { "tag", "value" }, { "Modality", x } }) + .ToList(); - List>> grouped = MongoModalityGroups.GetModalityChunks(docs).ToList(); + List>> grouped = MongoModalityGroups.GetModalityChunks(docs).ToList(); - Assert.That(grouped, Has.Count.EqualTo(4)); - foreach (Tuple> thing in grouped) - Assert.That(thing.Item2, Has.Count.EqualTo(1)); - } + Assert.That(grouped, Has.Count.EqualTo(4)); + foreach (Tuple> thing in grouped) + Assert.That(thing.Item2, Has.Count.EqualTo(1)); + } - [Test] - public void ImageProcessor_ModalitySplit_NonstandardModalities() + [Test] + public void ImageProcessor_ModalitySplit_NonstandardModalities() + { + var docs = new List { - var docs = new List - { - // MR group - new() {{"tag", "value"}, {"Modality", "MR"}}, - new() {{"tag", "value"}, {"Modality", "MR"}}, + // MR group + new() {{"tag", "value"}, {"Modality", "MR"}}, + new() {{"tag", "value"}, {"Modality", "MR"}}, - // CT group - new() {{"tag", "value"}, {"Modality", "CT"}}, - new() {{"tag", "value"}, {"Modality", "CT"}}, - new() {{"tag", "value"}, {"Modality", "CT"}}, + // CT group + new() {{"tag", "value"}, {"Modality", "CT"}}, + new() {{"tag", "value"}, {"Modality", "CT"}}, + new() {{"tag", "value"}, {"Modality", "CT"}}, - // Other group - new() {{"tag", "value"}, {"Modality", BsonNull.Value}}, - new() {{"tag", "value"}, {"Modality", "*"}}, - new() {{"tag", "value"}, {"Modality", "OTHER"}}, - new() {{"tag", "value"}} - }; + // Other group + new() {{"tag", "value"}, {"Modality", BsonNull.Value}}, + new() {{"tag", "value"}, {"Modality", "*"}}, + new() {{"tag", "value"}, {"Modality", "OTHER"}}, + new() {{"tag", "value"}} + }; - List>> grouped = MongoModalityGroups.GetModalityChunks(docs).ToList(); + List>> grouped = MongoModalityGroups.GetModalityChunks(docs).ToList(); - Assert.That(grouped, Has.Count.EqualTo(3), "Expected 3 groupings"); + Assert.That(grouped, Has.Count.EqualTo(3), "Expected 3 groupings"); - Assert.Multiple(() => - { - Assert.That(grouped[0].Item1, Is.EqualTo("MR"), "Expected MR group"); - Assert.That(grouped[0].Item2, Has.Count.EqualTo(2), "Expected 2 in MR group"); + Assert.Multiple(() => + { + Assert.That(grouped[0].Item1, Is.EqualTo("MR"), "Expected MR group"); + Assert.That(grouped[0].Item2, Has.Count.EqualTo(2), "Expected 2 in MR group"); - Assert.That(grouped[1].Item1, Is.EqualTo("CT"), "Expected CT group"); - Assert.That(grouped[1].Item2, Has.Count.EqualTo(3), "Expected 3 in CT group"); + Assert.That(grouped[1].Item1, Is.EqualTo("CT"), "Expected CT group"); + Assert.That(grouped[1].Item2, Has.Count.EqualTo(3), "Expected 3 in CT group"); - Assert.That(grouped[2].Item1, Is.EqualTo("OTHER"), "Expected OTHER group"); - Assert.That(grouped[2].Item2, Has.Count.EqualTo(4), "Expected 4 in OTHER group"); - }); - } + Assert.That(grouped[2].Item1, Is.EqualTo("OTHER"), "Expected OTHER group"); + Assert.That(grouped[2].Item2, Has.Count.EqualTo(4), "Expected 4 in OTHER group"); + }); } } diff --git a/tests/SmiServices.UnitTests/Common/MongoDB/StubMongoCollection.cs b/tests/SmiServices.UnitTests/Common/MongoDB/StubMongoCollection.cs index ca0b644d5..9b4f85c17 100644 --- a/tests/SmiServices.UnitTests/Common/MongoDB/StubMongoCollection.cs +++ b/tests/SmiServices.UnitTests/Common/MongoDB/StubMongoCollection.cs @@ -10,133 +10,132 @@ // MongoDB upstream doesn't have nullable annotations, so cleanest to disable them for this file to match #nullable disable -namespace SmiServices.UnitTests.Common.MongoDB +namespace SmiServices.UnitTests.Common.MongoDB; + +/// +/// Abstract base class for mocking an IMongoCollection, parameterised by a key type and value type +/// +public abstract class StubMongoCollection : IMongoCollection where TKey : struct { - /// - /// Abstract base class for mocking an IMongoCollection, parameterised by a key type and value type - /// - public abstract class StubMongoCollection : IMongoCollection where TKey : struct - { - public virtual CollectionNamespace CollectionNamespace => throw new NotImplementedException(); - public virtual IMongoDatabase Database => throw new NotImplementedException(); - public virtual IBsonSerializer DocumentSerializer => throw new NotImplementedException(); - public virtual IMongoIndexManager Indexes => throw new NotImplementedException(); + public virtual CollectionNamespace CollectionNamespace => throw new NotImplementedException(); + public virtual IMongoDatabase Database => throw new NotImplementedException(); + public virtual IBsonSerializer DocumentSerializer => throw new NotImplementedException(); + public virtual IMongoIndexManager Indexes => throw new NotImplementedException(); - /// - public IMongoSearchIndexManager SearchIndexes => throw new NotImplementedException(); + /// + public IMongoSearchIndexManager SearchIndexes => throw new NotImplementedException(); - public virtual MongoCollectionSettings Settings => throw new NotImplementedException(); - public virtual IAsyncCursor Aggregate(PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor Aggregate(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> AggregateAsync(PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> AggregateAsync(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual BulkWriteResult BulkWrite(IEnumerable> requests, BulkWriteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual BulkWriteResult BulkWrite(IClientSessionHandle session, IEnumerable> requests, BulkWriteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> BulkWriteAsync(IEnumerable> requests, BulkWriteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> BulkWriteAsync(IClientSessionHandle session, IEnumerable> requests, BulkWriteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual long Count(FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual long Count(IClientSessionHandle session, FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task CountAsync(FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task CountAsync(IClientSessionHandle session, FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual long CountDocuments(FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual long CountDocuments(IClientSessionHandle session, FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task CountDocumentsAsync(FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task CountDocumentsAsync(IClientSessionHandle session, FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual DeleteResult DeleteMany(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual DeleteResult DeleteMany(FilterDefinition filter, DeleteOptions options, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual DeleteResult DeleteMany(IClientSessionHandle session, FilterDefinition filter, DeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task DeleteManyAsync(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task DeleteManyAsync(FilterDefinition filter, DeleteOptions options, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task DeleteManyAsync(IClientSessionHandle session, FilterDefinition filter, DeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual DeleteResult DeleteOne(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual DeleteResult DeleteOne(FilterDefinition filter, DeleteOptions options, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual DeleteResult DeleteOne(IClientSessionHandle session, FilterDefinition filter, DeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task DeleteOneAsync(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task DeleteOneAsync(FilterDefinition filter, DeleteOptions options, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task DeleteOneAsync(IClientSessionHandle session, FilterDefinition filter, DeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor Distinct(FieldDefinition field, FilterDefinition filter, DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor Distinct(IClientSessionHandle session, FieldDefinition field, FilterDefinition filter, DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> DistinctAsync(FieldDefinition field, FilterDefinition filter, DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> DistinctAsync(IClientSessionHandle session, FieldDefinition field, FilterDefinition filter, DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual MongoCollectionSettings Settings => throw new NotImplementedException(); + public virtual IAsyncCursor Aggregate(PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor Aggregate(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> AggregateAsync(PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> AggregateAsync(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual BulkWriteResult BulkWrite(IEnumerable> requests, BulkWriteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual BulkWriteResult BulkWrite(IClientSessionHandle session, IEnumerable> requests, BulkWriteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> BulkWriteAsync(IEnumerable> requests, BulkWriteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> BulkWriteAsync(IClientSessionHandle session, IEnumerable> requests, BulkWriteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual long Count(FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual long Count(IClientSessionHandle session, FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task CountAsync(FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task CountAsync(IClientSessionHandle session, FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual long CountDocuments(FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual long CountDocuments(IClientSessionHandle session, FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task CountDocumentsAsync(FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task CountDocumentsAsync(IClientSessionHandle session, FilterDefinition filter, CountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual DeleteResult DeleteMany(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual DeleteResult DeleteMany(FilterDefinition filter, DeleteOptions options, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual DeleteResult DeleteMany(IClientSessionHandle session, FilterDefinition filter, DeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task DeleteManyAsync(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task DeleteManyAsync(FilterDefinition filter, DeleteOptions options, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task DeleteManyAsync(IClientSessionHandle session, FilterDefinition filter, DeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual DeleteResult DeleteOne(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual DeleteResult DeleteOne(FilterDefinition filter, DeleteOptions options, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual DeleteResult DeleteOne(IClientSessionHandle session, FilterDefinition filter, DeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task DeleteOneAsync(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task DeleteOneAsync(FilterDefinition filter, DeleteOptions options, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task DeleteOneAsync(IClientSessionHandle session, FilterDefinition filter, DeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor Distinct(FieldDefinition field, FilterDefinition filter, DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor Distinct(IClientSessionHandle session, FieldDefinition field, FilterDefinition filter, DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> DistinctAsync(FieldDefinition field, FilterDefinition filter, DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> DistinctAsync(IClientSessionHandle session, FieldDefinition field, FilterDefinition filter, DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public IAsyncCursor DistinctMany(FieldDefinition> field, FilterDefinition filter, DistinctOptions options = null, - CancellationToken cancellationToken = new CancellationToken()) => - throw new NotImplementedException(); + public IAsyncCursor DistinctMany(FieldDefinition> field, FilterDefinition filter, DistinctOptions options = null, + CancellationToken cancellationToken = new CancellationToken()) => + throw new NotImplementedException(); - public IAsyncCursor DistinctMany(IClientSessionHandle session, FieldDefinition> field, FilterDefinition filter, - DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => - throw new NotImplementedException(); + public IAsyncCursor DistinctMany(IClientSessionHandle session, FieldDefinition> field, FilterDefinition filter, + DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => + throw new NotImplementedException(); - public Task> DistinctManyAsync(FieldDefinition> field, FilterDefinition filter, DistinctOptions options = null, - CancellationToken cancellationToken = new CancellationToken()) => - throw new NotImplementedException(); + public Task> DistinctManyAsync(FieldDefinition> field, FilterDefinition filter, DistinctOptions options = null, + CancellationToken cancellationToken = new CancellationToken()) => + throw new NotImplementedException(); - public Task> DistinctManyAsync(IClientSessionHandle session, FieldDefinition> field, FilterDefinition filter, - DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => - throw new NotImplementedException(); + public Task> DistinctManyAsync(IClientSessionHandle session, FieldDefinition> field, FilterDefinition filter, + DistinctOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => + throw new NotImplementedException(); - public virtual long EstimatedDocumentCount(EstimatedDocumentCountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task EstimatedDocumentCountAsync(EstimatedDocumentCountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor FindSync(FilterDefinition filter, FindOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor FindSync(IClientSessionHandle session, FilterDefinition filter, FindOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> FindAsync(FilterDefinition filter, FindOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> FindAsync(IClientSessionHandle session, FilterDefinition filter, FindOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual TProjection FindOneAndDelete(FilterDefinition filter, FindOneAndDeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual TProjection FindOneAndDelete(IClientSessionHandle session, FilterDefinition filter, FindOneAndDeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task FindOneAndDeleteAsync(FilterDefinition filter, FindOneAndDeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task FindOneAndDeleteAsync(IClientSessionHandle session, FilterDefinition filter, FindOneAndDeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual TProjection FindOneAndReplace(FilterDefinition filter, TVal replacement, FindOneAndReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual TProjection FindOneAndReplace(IClientSessionHandle session, FilterDefinition filter, TVal replacement, FindOneAndReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task FindOneAndReplaceAsync(FilterDefinition filter, TVal replacement, FindOneAndReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task FindOneAndReplaceAsync(IClientSessionHandle session, FilterDefinition filter, TVal replacement, FindOneAndReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual TProjection FindOneAndUpdate(FilterDefinition filter, UpdateDefinition update, FindOneAndUpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual TProjection FindOneAndUpdate(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, FindOneAndUpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task FindOneAndUpdateAsync(FilterDefinition filter, UpdateDefinition update, FindOneAndUpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task FindOneAndUpdateAsync(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, FindOneAndUpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void InsertOne(TVal document, InsertOneOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void InsertOne(IClientSessionHandle session, TVal document, InsertOneOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task InsertOneAsync(TVal document, CancellationToken cancellationToken) => throw new NotImplementedException(); - public virtual Task InsertOneAsync(TVal document, InsertOneOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task InsertOneAsync(IClientSessionHandle session, TVal document, InsertOneOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void InsertMany(IEnumerable documents, InsertManyOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void InsertMany(IClientSessionHandle session, IEnumerable documents, InsertManyOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task InsertManyAsync(IEnumerable documents, InsertManyOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task InsertManyAsync(IClientSessionHandle session, IEnumerable documents, InsertManyOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - [Obsolete("n/a")] - public virtual IAsyncCursor MapReduce(BsonJavaScript map, BsonJavaScript reduce, MapReduceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - [Obsolete("n/a")] - public virtual IAsyncCursor MapReduce(IClientSessionHandle session, BsonJavaScript map, BsonJavaScript reduce, MapReduceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - [Obsolete("n/a")] - public virtual Task> MapReduceAsync(BsonJavaScript map, BsonJavaScript reduce, MapReduceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - [Obsolete("n/a")] - public virtual Task> MapReduceAsync(IClientSessionHandle session, BsonJavaScript map, BsonJavaScript reduce, MapReduceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IFilteredMongoCollection OfType() where TDerivedDocument : TVal => throw new NotImplementedException(); - public virtual ReplaceOneResult ReplaceOne(FilterDefinition filter, TVal replacement, ReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual ReplaceOneResult ReplaceOne(FilterDefinition filter, TVal replacement, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual ReplaceOneResult ReplaceOne(IClientSessionHandle session, FilterDefinition filter, TVal replacement, ReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual ReplaceOneResult ReplaceOne(IClientSessionHandle session, FilterDefinition filter, TVal replacement, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task ReplaceOneAsync(FilterDefinition filter, TVal replacement, ReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task ReplaceOneAsync(FilterDefinition filter, TVal replacement, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task ReplaceOneAsync(IClientSessionHandle session, FilterDefinition filter, TVal replacement, ReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task ReplaceOneAsync(IClientSessionHandle session, FilterDefinition filter, TVal replacement, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual UpdateResult UpdateMany(FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual UpdateResult UpdateMany(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task UpdateManyAsync(FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task UpdateManyAsync(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual UpdateResult UpdateOne(FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual UpdateResult UpdateOne(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task UpdateOneAsync(FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task UpdateOneAsync(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IChangeStreamCursor Watch(PipelineDefinition, TResult> pipeline, ChangeStreamOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IChangeStreamCursor Watch(IClientSessionHandle session, PipelineDefinition, TResult> pipeline, ChangeStreamOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> WatchAsync(PipelineDefinition, TResult> pipeline, ChangeStreamOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> WatchAsync(IClientSessionHandle session, PipelineDefinition, TResult> pipeline, ChangeStreamOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IMongoCollection WithReadConcern(ReadConcern readConcern) => throw new NotImplementedException(); - public virtual IMongoCollection WithReadPreference(ReadPreference readPreference) => throw new NotImplementedException(); - public virtual IMongoCollection WithWriteConcern(WriteConcern writeConcern) => throw new NotImplementedException(); - public void AggregateToCollection(PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); - public void AggregateToCollection(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); - public Task AggregateToCollectionAsync(PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); - public Task AggregateToCollectionAsync(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); - } + public virtual long EstimatedDocumentCount(EstimatedDocumentCountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task EstimatedDocumentCountAsync(EstimatedDocumentCountOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor FindSync(FilterDefinition filter, FindOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor FindSync(IClientSessionHandle session, FilterDefinition filter, FindOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> FindAsync(FilterDefinition filter, FindOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> FindAsync(IClientSessionHandle session, FilterDefinition filter, FindOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual TProjection FindOneAndDelete(FilterDefinition filter, FindOneAndDeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual TProjection FindOneAndDelete(IClientSessionHandle session, FilterDefinition filter, FindOneAndDeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task FindOneAndDeleteAsync(FilterDefinition filter, FindOneAndDeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task FindOneAndDeleteAsync(IClientSessionHandle session, FilterDefinition filter, FindOneAndDeleteOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual TProjection FindOneAndReplace(FilterDefinition filter, TVal replacement, FindOneAndReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual TProjection FindOneAndReplace(IClientSessionHandle session, FilterDefinition filter, TVal replacement, FindOneAndReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task FindOneAndReplaceAsync(FilterDefinition filter, TVal replacement, FindOneAndReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task FindOneAndReplaceAsync(IClientSessionHandle session, FilterDefinition filter, TVal replacement, FindOneAndReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual TProjection FindOneAndUpdate(FilterDefinition filter, UpdateDefinition update, FindOneAndUpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual TProjection FindOneAndUpdate(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, FindOneAndUpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task FindOneAndUpdateAsync(FilterDefinition filter, UpdateDefinition update, FindOneAndUpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task FindOneAndUpdateAsync(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, FindOneAndUpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void InsertOne(TVal document, InsertOneOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void InsertOne(IClientSessionHandle session, TVal document, InsertOneOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task InsertOneAsync(TVal document, CancellationToken cancellationToken) => throw new NotImplementedException(); + public virtual Task InsertOneAsync(TVal document, InsertOneOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task InsertOneAsync(IClientSessionHandle session, TVal document, InsertOneOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void InsertMany(IEnumerable documents, InsertManyOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void InsertMany(IClientSessionHandle session, IEnumerable documents, InsertManyOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task InsertManyAsync(IEnumerable documents, InsertManyOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task InsertManyAsync(IClientSessionHandle session, IEnumerable documents, InsertManyOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + [Obsolete("n/a")] + public virtual IAsyncCursor MapReduce(BsonJavaScript map, BsonJavaScript reduce, MapReduceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + [Obsolete("n/a")] + public virtual IAsyncCursor MapReduce(IClientSessionHandle session, BsonJavaScript map, BsonJavaScript reduce, MapReduceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + [Obsolete("n/a")] + public virtual Task> MapReduceAsync(BsonJavaScript map, BsonJavaScript reduce, MapReduceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + [Obsolete("n/a")] + public virtual Task> MapReduceAsync(IClientSessionHandle session, BsonJavaScript map, BsonJavaScript reduce, MapReduceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IFilteredMongoCollection OfType() where TDerivedDocument : TVal => throw new NotImplementedException(); + public virtual ReplaceOneResult ReplaceOne(FilterDefinition filter, TVal replacement, ReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual ReplaceOneResult ReplaceOne(FilterDefinition filter, TVal replacement, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual ReplaceOneResult ReplaceOne(IClientSessionHandle session, FilterDefinition filter, TVal replacement, ReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual ReplaceOneResult ReplaceOne(IClientSessionHandle session, FilterDefinition filter, TVal replacement, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task ReplaceOneAsync(FilterDefinition filter, TVal replacement, ReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task ReplaceOneAsync(FilterDefinition filter, TVal replacement, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task ReplaceOneAsync(IClientSessionHandle session, FilterDefinition filter, TVal replacement, ReplaceOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task ReplaceOneAsync(IClientSessionHandle session, FilterDefinition filter, TVal replacement, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual UpdateResult UpdateMany(FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual UpdateResult UpdateMany(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task UpdateManyAsync(FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task UpdateManyAsync(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual UpdateResult UpdateOne(FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual UpdateResult UpdateOne(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task UpdateOneAsync(FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task UpdateOneAsync(IClientSessionHandle session, FilterDefinition filter, UpdateDefinition update, UpdateOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IChangeStreamCursor Watch(PipelineDefinition, TResult> pipeline, ChangeStreamOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IChangeStreamCursor Watch(IClientSessionHandle session, PipelineDefinition, TResult> pipeline, ChangeStreamOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> WatchAsync(PipelineDefinition, TResult> pipeline, ChangeStreamOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> WatchAsync(IClientSessionHandle session, PipelineDefinition, TResult> pipeline, ChangeStreamOptions options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IMongoCollection WithReadConcern(ReadConcern readConcern) => throw new NotImplementedException(); + public virtual IMongoCollection WithReadPreference(ReadPreference readPreference) => throw new NotImplementedException(); + public virtual IMongoCollection WithWriteConcern(WriteConcern writeConcern) => throw new NotImplementedException(); + public void AggregateToCollection(PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); + public void AggregateToCollection(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); + public Task AggregateToCollectionAsync(PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); + public Task AggregateToCollectionAsync(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); } diff --git a/tests/SmiServices.UnitTests/Common/MongoDB/StubMongoDatabase.cs b/tests/SmiServices.UnitTests/Common/MongoDB/StubMongoDatabase.cs index 8d31faa18..bf31a845d 100644 --- a/tests/SmiServices.UnitTests/Common/MongoDB/StubMongoDatabase.cs +++ b/tests/SmiServices.UnitTests/Common/MongoDB/StubMongoDatabase.cs @@ -5,81 +5,80 @@ using System.Threading.Tasks; -namespace SmiServices.UnitTests.Common.MongoDB -{ - /// - /// Abstract base class for mocking an IMongoDatabase - /// - public abstract class StubMongoDatabase : IMongoDatabase - { - public virtual IMongoClient Client { get; } = null!; - public virtual DatabaseNamespace DatabaseNamespace { get; } = null!; - public virtual MongoDatabaseSettings Settings { get; } = null!; - public virtual IAsyncCursor Aggregate(PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor Aggregate(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> AggregateAsync(PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> AggregateAsync(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public void AggregateToCollection(PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); - public void AggregateToCollection(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); - public Task AggregateToCollectionAsync(PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); - public Task AggregateToCollectionAsync(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); - public virtual void CreateCollection(string name, CreateCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void CreateCollection(IClientSessionHandle session, string name, CreateCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task CreateCollectionAsync(string name, CreateCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task CreateCollectionAsync(IClientSessionHandle session, string name, CreateCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void CreateView(string viewName, string viewOn, PipelineDefinition pipeline, CreateViewOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void CreateView(IClientSessionHandle session, string viewName, string viewOn, PipelineDefinition pipeline, CreateViewOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task CreateViewAsync(string viewName, string viewOn, PipelineDefinition pipeline, CreateViewOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task CreateViewAsync(IClientSessionHandle session, string viewName, string viewOn, PipelineDefinition pipeline, CreateViewOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void DropCollection(string name, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); +namespace SmiServices.UnitTests.Common.MongoDB; - public virtual void DropCollection(string name, DropCollectionOptions options, - CancellationToken cancellationToken = new CancellationToken()) => - throw new NotImplementedException(); +/// +/// Abstract base class for mocking an IMongoDatabase +/// +public abstract class StubMongoDatabase : IMongoDatabase +{ + public virtual IMongoClient Client { get; } = null!; + public virtual DatabaseNamespace DatabaseNamespace { get; } = null!; + public virtual MongoDatabaseSettings Settings { get; } = null!; + public virtual IAsyncCursor Aggregate(PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor Aggregate(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> AggregateAsync(PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> AggregateAsync(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public void AggregateToCollection(PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); + public void AggregateToCollection(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); + public Task AggregateToCollectionAsync(PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); + public Task AggregateToCollectionAsync(IClientSessionHandle session, PipelineDefinition pipeline, AggregateOptions? options = null, CancellationToken cancellationToken = default) => throw new NotImplementedException(); + public virtual void CreateCollection(string name, CreateCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void CreateCollection(IClientSessionHandle session, string name, CreateCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task CreateCollectionAsync(string name, CreateCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task CreateCollectionAsync(IClientSessionHandle session, string name, CreateCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void CreateView(string viewName, string viewOn, PipelineDefinition pipeline, CreateViewOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void CreateView(IClientSessionHandle session, string viewName, string viewOn, PipelineDefinition pipeline, CreateViewOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task CreateViewAsync(string viewName, string viewOn, PipelineDefinition pipeline, CreateViewOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task CreateViewAsync(IClientSessionHandle session, string viewName, string viewOn, PipelineDefinition pipeline, CreateViewOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void DropCollection(string name, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void DropCollection(IClientSessionHandle session, string name, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void DropCollection(string name, DropCollectionOptions options, + CancellationToken cancellationToken = new CancellationToken()) => + throw new NotImplementedException(); - public virtual void DropCollection(IClientSessionHandle session, string name, DropCollectionOptions options, - CancellationToken cancellationToken = new CancellationToken()) => - throw new NotImplementedException(); + public virtual void DropCollection(IClientSessionHandle session, string name, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task DropCollectionAsync(string name, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void DropCollection(IClientSessionHandle session, string name, DropCollectionOptions options, + CancellationToken cancellationToken = new CancellationToken()) => + throw new NotImplementedException(); - public virtual Task DropCollectionAsync(string name, DropCollectionOptions options, - CancellationToken cancellationToken = new CancellationToken()) => - throw new NotImplementedException(); + public virtual Task DropCollectionAsync(string name, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task DropCollectionAsync(IClientSessionHandle session, string name, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task DropCollectionAsync(string name, DropCollectionOptions options, + CancellationToken cancellationToken = new CancellationToken()) => + throw new NotImplementedException(); - public virtual Task DropCollectionAsync(IClientSessionHandle session, string name, DropCollectionOptions options, - CancellationToken cancellationToken = new CancellationToken()) - { - throw new NotImplementedException(); - } + public virtual Task DropCollectionAsync(IClientSessionHandle session, string name, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IMongoCollection GetCollection(string name, MongoCollectionSettings? settings = null) => throw new NotImplementedException(); - public virtual IAsyncCursor ListCollectionNames(ListCollectionNamesOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor ListCollectionNames(IClientSessionHandle session, ListCollectionNamesOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> ListCollectionNamesAsync(ListCollectionNamesOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> ListCollectionNamesAsync(IClientSessionHandle session, ListCollectionNamesOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor ListCollections(ListCollectionsOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IAsyncCursor ListCollections(IClientSessionHandle session, ListCollectionsOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> ListCollectionsAsync(ListCollectionsOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> ListCollectionsAsync(IClientSessionHandle session, ListCollectionsOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void RenameCollection(string oldName, string newName, RenameCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual void RenameCollection(IClientSessionHandle session, string oldName, string newName, RenameCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task RenameCollectionAsync(string oldName, string newName, RenameCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task RenameCollectionAsync(IClientSessionHandle session, string oldName, string newName, RenameCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual TResult RunCommand(Command command, ReadPreference? readPreference = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual TResult RunCommand(IClientSessionHandle session, Command command, ReadPreference? readPreference = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task RunCommandAsync(Command command, ReadPreference? readPreference = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task RunCommandAsync(IClientSessionHandle session, Command command, ReadPreference? readPreference = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IChangeStreamCursor Watch(PipelineDefinition, TResult> pipeline, ChangeStreamOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IChangeStreamCursor Watch(IClientSessionHandle session, PipelineDefinition, TResult> pipeline, ChangeStreamOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> WatchAsync(PipelineDefinition, TResult> pipeline, ChangeStreamOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual Task> WatchAsync(IClientSessionHandle session, PipelineDefinition, TResult> pipeline, ChangeStreamOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); - public virtual IMongoDatabase WithReadConcern(ReadConcern readConcern) => throw new NotImplementedException(); - public virtual IMongoDatabase WithReadPreference(ReadPreference readPreference) => throw new NotImplementedException(); - public virtual IMongoDatabase WithWriteConcern(WriteConcern writeConcern) => throw new NotImplementedException(); + public virtual Task DropCollectionAsync(IClientSessionHandle session, string name, DropCollectionOptions options, + CancellationToken cancellationToken = new CancellationToken()) + { + throw new NotImplementedException(); } + + public virtual IMongoCollection GetCollection(string name, MongoCollectionSettings? settings = null) => throw new NotImplementedException(); + public virtual IAsyncCursor ListCollectionNames(ListCollectionNamesOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor ListCollectionNames(IClientSessionHandle session, ListCollectionNamesOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> ListCollectionNamesAsync(ListCollectionNamesOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> ListCollectionNamesAsync(IClientSessionHandle session, ListCollectionNamesOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor ListCollections(ListCollectionsOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IAsyncCursor ListCollections(IClientSessionHandle session, ListCollectionsOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> ListCollectionsAsync(ListCollectionsOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> ListCollectionsAsync(IClientSessionHandle session, ListCollectionsOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void RenameCollection(string oldName, string newName, RenameCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual void RenameCollection(IClientSessionHandle session, string oldName, string newName, RenameCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task RenameCollectionAsync(string oldName, string newName, RenameCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task RenameCollectionAsync(IClientSessionHandle session, string oldName, string newName, RenameCollectionOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual TResult RunCommand(Command command, ReadPreference? readPreference = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual TResult RunCommand(IClientSessionHandle session, Command command, ReadPreference? readPreference = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task RunCommandAsync(Command command, ReadPreference? readPreference = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task RunCommandAsync(IClientSessionHandle session, Command command, ReadPreference? readPreference = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IChangeStreamCursor Watch(PipelineDefinition, TResult> pipeline, ChangeStreamOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IChangeStreamCursor Watch(IClientSessionHandle session, PipelineDefinition, TResult> pipeline, ChangeStreamOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> WatchAsync(PipelineDefinition, TResult> pipeline, ChangeStreamOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual Task> WatchAsync(IClientSessionHandle session, PipelineDefinition, TResult> pipeline, ChangeStreamOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => throw new NotImplementedException(); + public virtual IMongoDatabase WithReadConcern(ReadConcern readConcern) => throw new NotImplementedException(); + public virtual IMongoDatabase WithReadPreference(ReadPreference readPreference) => throw new NotImplementedException(); + public virtual IMongoDatabase WithWriteConcern(WriteConcern writeConcern) => throw new NotImplementedException(); } diff --git a/tests/SmiServices.UnitTests/Common/Options/FileSystemOptionsTests.cs b/tests/SmiServices.UnitTests/Common/Options/FileSystemOptionsTests.cs index 55df54fab..bd7e6caeb 100644 --- a/tests/SmiServices.UnitTests/Common/Options/FileSystemOptionsTests.cs +++ b/tests/SmiServices.UnitTests/Common/Options/FileSystemOptionsTests.cs @@ -1,25 +1,24 @@ using NUnit.Framework; using SmiServices.Common.Options; -namespace SmiServices.UnitTests.Common.Options +namespace SmiServices.UnitTests.Common.Options; + +public class FileSystemOptionsTests { - public class FileSystemOptionsTests + [Test] + public void TestFileSystemOptions_AsLinuxRootDir() { - [Test] - public void TestFileSystemOptions_AsLinuxRootDir() - { - var opts = new FileSystemOptions - { - FileSystemRoot = "/", - ExtractRoot = "/", - }; + var opts = new FileSystemOptions + { + FileSystemRoot = "/", + ExtractRoot = "/", + }; - Assert.Multiple(() => - { - Assert.That(opts.FileSystemRoot, Is.EqualTo("/")); - Assert.That(opts.ExtractRoot, Is.EqualTo("/")); - }); - } + Assert.Multiple(() => + { + Assert.That(opts.FileSystemRoot, Is.EqualTo("/")); + Assert.That(opts.ExtractRoot, Is.EqualTo("/")); + }); } } diff --git a/tests/SmiServices.UnitTests/Common/Options/SmiCliInitTests.cs b/tests/SmiServices.UnitTests/Common/Options/SmiCliInitTests.cs index b0409e873..2c054ab20 100644 --- a/tests/SmiServices.UnitTests/Common/Options/SmiCliInitTests.cs +++ b/tests/SmiServices.UnitTests/Common/Options/SmiCliInitTests.cs @@ -3,119 +3,118 @@ using SmiServices.Common.Options; -namespace SmiServices.UnitTests.Common.Options +namespace SmiServices.UnitTests.Common.Options; + +public class SmiCliInitTests { - public class SmiCliInitTests + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + SmiCliInit.InitSmiLogging = false; + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - SmiCliInit.InitSmiLogging = false; - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + #endregion - #endregion + #region Test Methods - #region Test Methods + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + private class FakeCliOpts : CliOptions + { + [Option( + 'f', "foo", + Required = false + )] + public string? Foo { get; set; } + } - private class FakeCliOpts : CliOptions + [Verb("fake")] + private class FakeCliVerbOpts : CliOptions + { + [Option( + 'f', "foo", + Required = false + )] + public string? Foo { get; set; } + } + + #endregion + + #region Tests + + [Test] + public void SmiCliInit_SingleParser_HappyPath() + { + static int OnParse(GlobalOptions globals, FakeCliOpts opts) { - [Option( - 'f', "foo", - Required = false - )] - public string? Foo { get; set; } + if (opts.Foo == "bar") + return 123; + return -1; } - [Verb("fake")] - private class FakeCliVerbOpts : CliOptions + var args = new[] { - [Option( - 'f', "foo", - Required = false - )] - public string? Foo { get; set; } - } + "-y", "default.yaml", + "-f", "bar" + }; - #endregion + int ret = SmiCliInit.ParseAndRun(args, nameof(SmiCliInit_SingleParser_HappyPath), OnParse); + Assert.That(ret, Is.EqualTo(123)); + } - #region Tests + [Test] + public void SmiCliInit_SingleParser_Help_ReturnsZero() + { + var args = new[] { "--help" }; - [Test] - public void SmiCliInit_SingleParser_HappyPath() - { - static int OnParse(GlobalOptions globals, FakeCliOpts opts) - { - if (opts.Foo == "bar") - return 123; - return -1; - } - - var args = new[] - { - "-y", "default.yaml", - "-f", "bar" - }; - - int ret = SmiCliInit.ParseAndRun(args, nameof(SmiCliInit_SingleParser_HappyPath), OnParse); - Assert.That(ret, Is.EqualTo(123)); - } + int ret = SmiCliInit.ParseAndRun(args, nameof(SmiCliInit_SingleParser_Help_ReturnsZero), (_, __) => -1); + Assert.That(ret, Is.EqualTo(0)); + } - [Test] - public void SmiCliInit_SingleParser_Help_ReturnsZero() + [Test] + public void SmiCliInit_VerbParser_HappyPath() + { + static int OnParse(GlobalOptions globals, object parsed) { - var args = new[] { "--help" }; + var opts = SmiCliInit.Verify(parsed); - int ret = SmiCliInit.ParseAndRun(args, nameof(SmiCliInit_SingleParser_Help_ReturnsZero), (_, __) => -1); - Assert.That(ret, Is.EqualTo(0)); + if (opts.Foo == "bar") + return 123; + return -1; } - [Test] - public void SmiCliInit_VerbParser_HappyPath() + var args = new[] { - static int OnParse(GlobalOptions globals, object parsed) - { - var opts = SmiCliInit.Verify(parsed); - - if (opts.Foo == "bar") - return 123; - return -1; - } - - var args = new[] - { - "fake", - "-y", "default.yaml", - "-f", "bar" - }; - - int ret = SmiCliInit.ParseAndRun(args, nameof(SmiCliInit_VerbParser_HappyPath), [typeof(FakeCliVerbOpts)], OnParse); - Assert.That(ret, Is.EqualTo(123)); - } + "fake", + "-y", "default.yaml", + "-f", "bar" + }; + + int ret = SmiCliInit.ParseAndRun(args, nameof(SmiCliInit_VerbParser_HappyPath), [typeof(FakeCliVerbOpts)], OnParse); + Assert.That(ret, Is.EqualTo(123)); + } - [Test] - public void SmiCliInit_VerbParser_Help_ReturnsZero() + [Test] + public void SmiCliInit_VerbParser_Help_ReturnsZero() + { + var args = new[] { - var args = new[] - { - "fake", - "--help" - }; - - int ret = SmiCliInit.ParseAndRun(args, nameof(SmiCliInit_VerbParser_Help_ReturnsZero), [typeof(FakeCliVerbOpts)], (_, __) => -1); - Assert.That(ret, Is.EqualTo(0)); - } + "fake", + "--help" + }; - #endregion + int ret = SmiCliInit.ParseAndRun(args, nameof(SmiCliInit_VerbParser_Help_ReturnsZero), [typeof(FakeCliVerbOpts)], (_, __) => -1); + Assert.That(ret, Is.EqualTo(0)); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Common/TestClassTemplate.cs b/tests/SmiServices.UnitTests/Common/TestClassTemplate.cs index 22ba6e2d3..de8f5591a 100644 --- a/tests/SmiServices.UnitTests/Common/TestClassTemplate.cs +++ b/tests/SmiServices.UnitTests/Common/TestClassTemplate.cs @@ -1,52 +1,51 @@ using NUnit.Framework; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +/// +/// Template for test classes. The test class name should match the class under test + 'Test', e.g. 'FooTests'. The test project layout should match the source project layout wherever possible, e.g.: +/// FooProj +/// - FooClass.cs +/// - BarDirectory +/// - BarClass.cs +/// FooProj.Test +/// - FooClassTest.cs +/// - BarDirectory +/// - BarClassTest.cs +/// +public class TestClassTemplate { - /// - /// Template for test classes. The test class name should match the class under test + 'Test', e.g. 'FooTests'. The test project layout should match the source project layout wherever possible, e.g.: - /// FooProj - /// - FooClass.cs - /// - BarDirectory - /// - BarClass.cs - /// FooProj.Test - /// - FooClassTest.cs - /// - BarDirectory - /// - BarClassTest.cs - /// - public class TestClassTemplate - { - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - /// - /// Test names should concisely describe what is being tested and what the expected result is, e.g. - /// - MethodName_ReturnsFoo_WhenBar - /// - MethodName_ThrowsException_OnInvalidBar - /// - [Test] - public void TestTemplate() { } + /// + /// Test names should concisely describe what is being tested and what the expected result is, e.g. + /// - MethodName_ReturnsFoo_WhenBar + /// - MethodName_ThrowsException_OnInvalidBar + /// + [Test] + public void TestTemplate() { } - #endregion - } + #endregion } diff --git a/tests/SmiServices.UnitTests/Common/TestData.cs b/tests/SmiServices.UnitTests/Common/TestData.cs index e31dbbe57..62de9af2b 100644 --- a/tests/SmiServices.UnitTests/Common/TestData.cs +++ b/tests/SmiServices.UnitTests/Common/TestData.cs @@ -1,36 +1,35 @@ using NUnit.Framework; using System.IO; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public sealed class TestData { - public sealed class TestData - { - // Paths to the test DICOM files relative to TestContext.CurrentContext.TestDirectory - // TODO(rkm 2020-11-16) Enum-ify these members so they can be strongly-typed instead of stringly-typed - private const string TEST_DATA_DIR = "TestData"; - public string IMG_013 = Path.Combine(TEST_DATA_DIR, "IM-0001-0013.dcm"); - public string IMG_019 = Path.Combine(TEST_DATA_DIR, "IM-0001-0019.dcm"); - public string IMG_024 = Path.Combine(TEST_DATA_DIR, "IM-0001-0024.dcm"); - public string MANY_TAGS = Path.Combine(TEST_DATA_DIR, "FileWithLotsOfTags.dcm"); - public string INVALID_DICOM = Path.Combine(TEST_DATA_DIR, "NotADicomFile.txt"); - public string BURNED_IN_TEXT_IMG = Path.Combine(TEST_DATA_DIR, "burned-in-text-test.dcm"); + // Paths to the test DICOM files relative to TestContext.CurrentContext.TestDirectory + // TODO(rkm 2020-11-16) Enum-ify these members so they can be strongly-typed instead of stringly-typed + private const string TEST_DATA_DIR = "TestData"; + public string IMG_013 = Path.Combine(TEST_DATA_DIR, "IM-0001-0013.dcm"); + public string IMG_019 = Path.Combine(TEST_DATA_DIR, "IM-0001-0019.dcm"); + public string IMG_024 = Path.Combine(TEST_DATA_DIR, "IM-0001-0024.dcm"); + public string MANY_TAGS = Path.Combine(TEST_DATA_DIR, "FileWithLotsOfTags.dcm"); + public string INVALID_DICOM = Path.Combine(TEST_DATA_DIR, "NotADicomFile.txt"); + public string BURNED_IN_TEXT_IMG = Path.Combine(TEST_DATA_DIR, "burned-in-text-test.dcm"); - /// - /// Creates the test image in the file location specified - /// - /// - /// The test file to create, should be a static member of this class. Defaults to - /// - public FileInfo Create(FileInfo fileInfo, string? testFile = null) - { - var from = Path.Combine(TestContext.CurrentContext.TestDirectory, testFile ?? IMG_013); + /// + /// Creates the test image in the file location specified + /// + /// + /// The test file to create, should be a static member of this class. Defaults to + /// + public FileInfo Create(FileInfo fileInfo, string? testFile = null) + { + var from = Path.Combine(TestContext.CurrentContext.TestDirectory, testFile ?? IMG_013); - if (!fileInfo.Directory!.Exists) - fileInfo.Directory.Create(); + if (!fileInfo.Directory!.Exists) + fileInfo.Directory.Create(); - File.Copy(from, fileInfo.FullName, true); + File.Copy(from, fileInfo.FullName, true); - return fileInfo; - } + return fileInfo; } } diff --git a/tests/SmiServices.UnitTests/Common/TestDateTimeProvider.cs b/tests/SmiServices.UnitTests/Common/TestDateTimeProvider.cs index f40a9f236..5cddb4d46 100644 --- a/tests/SmiServices.UnitTests/Common/TestDateTimeProvider.cs +++ b/tests/SmiServices.UnitTests/Common/TestDateTimeProvider.cs @@ -2,20 +2,19 @@ using System; -namespace SmiServices.UnitTests.Common -{ - /// - /// Helper class for working with DateTimes in test methods. Returns a constant DateTime - /// - public class TestDateTimeProvider : DateTimeProvider - { - private readonly DateTime _instance; +namespace SmiServices.UnitTests.Common; - public TestDateTimeProvider() - { - _instance = DateTime.UtcNow; - } +/// +/// Helper class for working with DateTimes in test methods. Returns a constant DateTime +/// +public class TestDateTimeProvider : DateTimeProvider +{ + private readonly DateTime _instance; - public override DateTime UtcNow() => _instance; + public TestDateTimeProvider() + { + _instance = DateTime.UtcNow; } + + public override DateTime UtcNow() => _instance; } diff --git a/tests/SmiServices.UnitTests/Common/TestException.cs b/tests/SmiServices.UnitTests/Common/TestException.cs index e98160344..89447d4c9 100644 --- a/tests/SmiServices.UnitTests/Common/TestException.cs +++ b/tests/SmiServices.UnitTests/Common/TestException.cs @@ -1,11 +1,10 @@ using System; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class TestException : Exception { - public class TestException : Exception - { - public override string StackTrace { get; } = "StackTrace"; + public override string StackTrace { get; } = "StackTrace"; - public TestException(string message) : base(message, new Exception("InnerException")) { } - } + public TestException(string message) : base(message, new Exception("InnerException")) { } } diff --git a/tests/SmiServices.UnitTests/Common/TestFileSystemHelpers.cs b/tests/SmiServices.UnitTests/Common/TestFileSystemHelpers.cs index 9014dee13..bdbad1ea6 100644 --- a/tests/SmiServices.UnitTests/Common/TestFileSystemHelpers.cs +++ b/tests/SmiServices.UnitTests/Common/TestFileSystemHelpers.cs @@ -2,14 +2,13 @@ using System; using System.IO; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public static class TestFileSystemHelpers { - public static class TestFileSystemHelpers + public static string GetTemporaryTestDirectory() { - public static string GetTemporaryTestDirectory() - { - string testName = TestContext.CurrentContext.Test.FullName.Replace('(', '_').Replace(")", ""); - return Path.Combine(Path.GetTempPath(), "smiservices-nunit", $"{testName}-{Guid.NewGuid().ToString().Split('-')[0]}"); - } + string testName = TestContext.CurrentContext.Test.FullName.Replace('(', '_').Replace(")", ""); + return Path.Combine(Path.GetTempPath(), "smiservices-nunit", $"{testName}-{Guid.NewGuid().ToString().Split('-')[0]}"); } } diff --git a/tests/SmiServices.UnitTests/Common/TestHelpers.cs b/tests/SmiServices.UnitTests/Common/TestHelpers.cs index a8066bbee..3093618dd 100644 --- a/tests/SmiServices.UnitTests/Common/TestHelpers.cs +++ b/tests/SmiServices.UnitTests/Common/TestHelpers.cs @@ -1,28 +1,27 @@ using NUnit.Framework; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class TestHelpers { - public class TestHelpers + // Assert two strings match apart from line endings + public static void AreEqualIgnoringCaseAndLineEndings(string a, string b) { - // Assert two strings match apart from line endings - public static void AreEqualIgnoringCaseAndLineEndings(string a, string b) - { - Assert.That(b.Replace("\r\n", "\n"), Is.EqualTo(a.Replace("\r\n", "\n")).IgnoreCase); - } + Assert.That(b.Replace("\r\n", "\n"), Is.EqualTo(a.Replace("\r\n", "\n")).IgnoreCase); + } - // Assert two strings match apart from line endings, case sensitive - public static void AreEqualIgnoringLineEndings(string a, string b) - { - Assert.That(b.Replace("\r\n", "\n"), Is.EqualTo(a.Replace("\r\n", "\n")).IgnoreCase); - } + // Assert two strings match apart from line endings, case sensitive + public static void AreEqualIgnoringLineEndings(string a, string b) + { + Assert.That(b.Replace("\r\n", "\n"), Is.EqualTo(a.Replace("\r\n", "\n")).IgnoreCase); + } - public static void Contains(string needle, string haystack) - { - Assert.That(haystack.Replace("\r\n", "\n"), Does.Contain(needle.Replace("\r\n", "\n"))); - } - public static void DoesNotContain(string needle, string haystack) - { - Assert.That(haystack.Replace("\r\n", "\n"), Does.Not.Contain(needle.Replace("\r\n", "\n"))); - } + public static void Contains(string needle, string haystack) + { + Assert.That(haystack.Replace("\r\n", "\n"), Does.Contain(needle.Replace("\r\n", "\n"))); + } + public static void DoesNotContain(string needle, string haystack) + { + Assert.That(haystack.Replace("\r\n", "\n"), Does.Not.Contain(needle.Replace("\r\n", "\n"))); } } diff --git a/tests/SmiServices.UnitTests/Common/TestTimeline.cs b/tests/SmiServices.UnitTests/Common/TestTimeline.cs index 9076d694c..659f103e2 100644 --- a/tests/SmiServices.UnitTests/Common/TestTimeline.cs +++ b/tests/SmiServices.UnitTests/Common/TestTimeline.cs @@ -5,57 +5,56 @@ using System.Threading; using System.Threading.Tasks; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +public class TestTimeline : IDisposable { - public class TestTimeline : IDisposable - { - private readonly MicroserviceTester _tester; - readonly Queue Operations = new(); + private readonly MicroserviceTester _tester; + readonly Queue Operations = new(); - public CancellationTokenSource cts = new(); + public CancellationTokenSource cts = new(); - /// - /// The exact time the TestTimeline was last started - /// - public DateTime StartTime { get; private set; } + /// + /// The exact time the TestTimeline was last started + /// + public DateTime StartTime { get; private set; } - public TestTimeline(MicroserviceTester tester) - { - _tester = tester; - } + public TestTimeline(MicroserviceTester tester) + { + _tester = tester; + } - public TestTimeline Wait(int milliseconds) - { - Operations.Enqueue(() => Task.Delay(milliseconds, cts.Token)); - return this; - } + public TestTimeline Wait(int milliseconds) + { + Operations.Enqueue(() => Task.Delay(milliseconds, cts.Token)); + return this; + } - public TestTimeline SendMessage(ConsumerOptions toConsumer, IMessage message) - { - Operations.Enqueue(() => _tester.SendMessage(toConsumer, message)); - return this; - } + public TestTimeline SendMessage(ConsumerOptions toConsumer, IMessage message) + { + Operations.Enqueue(() => _tester.SendMessage(toConsumer, message)); + return this; + } - public void StartTimeline() - { - new Task(() => - { - StartTime = DateTime.Now; - - foreach (Action a in Operations) - if (cts.IsCancellationRequested) - break; - else - a(); - }).Start(); - } - - public void Dispose() + public void StartTimeline() + { + new Task(() => { - _tester?.Dispose(); - cts.Cancel(); - cts.Dispose(); - GC.SuppressFinalize(this); - } + StartTime = DateTime.Now; + + foreach (Action a in Operations) + if (cts.IsCancellationRequested) + break; + else + a(); + }).Start(); + } + + public void Dispose() + { + _tester?.Dispose(); + cts.Cancel(); + cts.Dispose(); + GC.SuppressFinalize(this); } } diff --git a/tests/SmiServices.UnitTests/Common/ZipHelperTests.cs b/tests/SmiServices.UnitTests/Common/ZipHelperTests.cs index 53016a126..456113802 100644 --- a/tests/SmiServices.UnitTests/Common/ZipHelperTests.cs +++ b/tests/SmiServices.UnitTests/Common/ZipHelperTests.cs @@ -2,20 +2,19 @@ using SmiServices.Common; using System.IO; -namespace SmiServices.UnitTests.Common +namespace SmiServices.UnitTests.Common; + +class ZipHelperTests { - class ZipHelperTests + [TestCase("my.zip", true)] + [TestCase("my.dcm", false)] + [TestCase("my", false)] + public void TestZipHelper(string input, bool expectedOutput) { - [TestCase("my.zip", true)] - [TestCase("my.dcm", false)] - [TestCase("my", false)] - public void TestZipHelper(string input, bool expectedOutput) - { - Assert.That(ZipHelper.IsZip(input), Is.EqualTo(expectedOutput)); + Assert.That(ZipHelper.IsZip(input), Is.EqualTo(expectedOutput)); - var fs = new System.IO.Abstractions.FileSystem(); - var fi = new System.IO.Abstractions.FileInfoWrapper(fs, new FileInfo(input)); - Assert.That(ZipHelper.IsZip(fi), Is.EqualTo(expectedOutput)); - } + var fs = new System.IO.Abstractions.FileSystem(); + var fi = new System.IO.Abstractions.FileInfoWrapper(fs, new FileInfo(input)); + Assert.That(ZipHelper.IsZip(fi), Is.EqualTo(expectedOutput)); } } diff --git a/tests/SmiServices.UnitTests/LoggerFixture.cs b/tests/SmiServices.UnitTests/LoggerFixture.cs index 3ae3d162c..e2b1e5351 100644 --- a/tests/SmiServices.UnitTests/LoggerFixture.cs +++ b/tests/SmiServices.UnitTests/LoggerFixture.cs @@ -2,31 +2,30 @@ using NLog.Config; using NLog.Targets; -namespace SmiServices.UnitTests +namespace SmiServices.UnitTests; + +public static class LoggerFixture { - public static class LoggerFixture + private const string TestLoggerName = "TestLogger"; + + public static void Setup() { - private const string TestLoggerName = "TestLogger"; + if (LogManager.Configuration == null) + LogManager.Configuration = new LoggingConfiguration(); + else if (LogManager.Configuration.FindTargetByName(TestLoggerName) != null) + return; - public static void Setup() + var consoleTarget = new ConsoleTarget(TestLoggerName) { - if (LogManager.Configuration == null) - LogManager.Configuration = new LoggingConfiguration(); - else if (LogManager.Configuration.FindTargetByName(TestLoggerName) != null) - return; - - var consoleTarget = new ConsoleTarget(TestLoggerName) - { - Layout = @"${longdate}|${level}|${logger}|${message}|${exception:format=toString,Data:maxInnerExceptionLevel=5}", - AutoFlush = true - }; + Layout = @"${longdate}|${level}|${logger}|${message}|${exception:format=toString,Data:maxInnerExceptionLevel=5}", + AutoFlush = true + }; - LoggingConfiguration config = LogManager.Configuration; - config.AddTarget(consoleTarget); - config.AddRuleForAllLevels(consoleTarget); + LoggingConfiguration config = LogManager.Configuration; + config.AddTarget(consoleTarget); + config.AddRuleForAllLevels(consoleTarget); - LogManager.GlobalThreshold = LogLevel.Trace; - LogManager.GetCurrentClassLogger().Info("TestLogger added to LogManager config"); - } + LogManager.GlobalThreshold = LogLevel.Trace; + LogManager.GetCurrentClassLogger().Info("TestLogger added to LogManager config"); } } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ColumnValuesRejectorTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ColumnValuesRejectorTests.cs index d5e50a295..068be8897 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ColumnValuesRejectorTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ColumnValuesRejectorTests.cs @@ -5,60 +5,59 @@ using System.Collections.Generic; using System.Data.Common; -namespace SmiServices.UnitTests.Microservices.CohortExtractor +namespace SmiServices.UnitTests.Microservices.CohortExtractor; + +public class ColumnValuesRejectorTests { - public class ColumnValuesRejectorTests - { - private const string PatColName = "PatientID"; + private const string PatColName = "PatientID"; - [Test] - public void Test_ColumnValuesRejector_MissingColumn_Throws() - { - var rejector = new ColumnValuesRejector("fff", ["dave", "frank"]); + [Test] + public void Test_ColumnValuesRejector_MissingColumn_Throws() + { + var rejector = new ColumnValuesRejector("fff", ["dave", "frank"]); - var moqDave = new Mock(); - moqDave - .Setup(x => x["fff"]) - .Throws(); + var moqDave = new Mock(); + moqDave + .Setup(x => x["fff"]) + .Throws(); - var exc = Assert.Throws(() => rejector.Reject(moqDave.Object, out var _)); - Assert.That(exc!.Message, Does.Contain($"Expected a column called fff")); - } + var exc = Assert.Throws(() => rejector.Reject(moqDave.Object, out var _)); + Assert.That(exc!.Message, Does.Contain($"Expected a column called fff")); + } - [Test] - public void Test_ColumnValuesRejectorTests() - { - var rejector = new ColumnValuesRejector(PatColName, new HashSet(["Frank", "Peter", "David"], StringComparer.CurrentCultureIgnoreCase)); + [Test] + public void Test_ColumnValuesRejectorTests() + { + var rejector = new ColumnValuesRejector(PatColName, new HashSet(["Frank", "Peter", "David"], StringComparer.CurrentCultureIgnoreCase)); - var moqDave = new Mock(); - moqDave.Setup(x => x[PatColName]) - .Returns("Dave"); + var moqDave = new Mock(); + moqDave.Setup(x => x[PatColName]) + .Returns("Dave"); - Assert.Multiple(() => - { - Assert.That(rejector.Reject(moqDave.Object, out string? reason), Is.False); - Assert.That(reason, Is.Null); - }); + Assert.Multiple(() => + { + Assert.That(rejector.Reject(moqDave.Object, out string? reason), Is.False); + Assert.That(reason, Is.Null); + }); - var moqFrank = new Mock(); - moqFrank.Setup(x => x[PatColName]) - .Returns("Frank"); + var moqFrank = new Mock(); + moqFrank.Setup(x => x[PatColName]) + .Returns("Frank"); - Assert.Multiple(() => - { - Assert.That(rejector.Reject(moqFrank.Object, out var reason), Is.True); - Assert.That(reason, Is.EqualTo("Patient or Identifier was in reject list")); - }); + Assert.Multiple(() => + { + Assert.That(rejector.Reject(moqFrank.Object, out var reason), Is.True); + Assert.That(reason, Is.EqualTo("Patient or Identifier was in reject list")); + }); - var moqLowerCaseFrank = new Mock(); - moqLowerCaseFrank.Setup(x => x[PatColName]) - .Returns("frank"); + var moqLowerCaseFrank = new Mock(); + moqLowerCaseFrank.Setup(x => x[PatColName]) + .Returns("frank"); - Assert.Multiple(() => - { - Assert.That(rejector.Reject(moqLowerCaseFrank.Object, out var reason), Is.True); - Assert.That(reason, Is.EqualTo("Patient or Identifier was in reject list")); - }); - } + Assert.Multiple(() => + { + Assert.That(rejector.Reject(moqLowerCaseFrank.Object, out var reason), Is.True); + Assert.That(reason, Is.EqualTo("Patient or Identifier was in reject list")); + }); } } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/Execution/RequestFulfillers/QueryToExecuteResultTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/Execution/RequestFulfillers/QueryToExecuteResultTest.cs index ddffd6caa..304987bd9 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/Execution/RequestFulfillers/QueryToExecuteResultTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/Execution/RequestFulfillers/QueryToExecuteResultTest.cs @@ -4,50 +4,49 @@ using System; -namespace SmiServices.UnitTests.Microservices.CohortExtractor.Execution.RequestFulfillers +namespace SmiServices.UnitTests.Microservices.CohortExtractor.Execution.RequestFulfillers; + +public class QueryToExecuteResultTest { - public class QueryToExecuteResultTest - { - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - /// - /// Asserts that we always have a rejection reason when rejection=true - /// - [Test] - public void Test_QueryToExecuteResult_RejectReasonNullOrEmpty_ThrowsException() + /// + /// Asserts that we always have a rejection reason when rejection=true + /// + [Test] + public void Test_QueryToExecuteResult_RejectReasonNullOrEmpty_ThrowsException() + { + Assert.Throws(() => { - Assert.Throws(() => - { - var _ = new QueryToExecuteResult("foo", "bar", "baz", "whee", rejection: true, rejectionReason: null); - }); - Assert.Throws(() => - { - var _ = new QueryToExecuteResult("foo", "bar", "baz", "whee", rejection: true, rejectionReason: " "); - }); - } - - #endregion + var _ = new QueryToExecuteResult("foo", "bar", "baz", "whee", rejection: true, rejectionReason: null); + }); + Assert.Throws(() => + { + var _ = new QueryToExecuteResult("foo", "bar", "baz", "whee", rejection: true, rejectionReason: " "); + }); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ExtractionRequestMessageTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ExtractionRequestMessageTests.cs index de96bd824..a6df0aa6a 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ExtractionRequestMessageTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ExtractionRequestMessageTests.cs @@ -2,24 +2,23 @@ using NUnit.Framework; using SmiServices.Common.Messages.Extraction; -namespace SmiServices.UnitTests.Microservices.CohortExtractor +namespace SmiServices.UnitTests.Microservices.CohortExtractor; + +class ExtractionRequestMessageTests { - class ExtractionRequestMessageTests + [Test] + public void Test_ConstructMessage() { - [Test] - public void Test_ConstructMessage() + var msg = new ExtractionRequestMessage { - var msg = new ExtractionRequestMessage - { - KeyTag = DicomTag.StudyInstanceUID.DictionaryEntry.Keyword - }; - msg.ExtractionIdentifiers.Add("1.2.3"); + KeyTag = DicomTag.StudyInstanceUID.DictionaryEntry.Keyword + }; + msg.ExtractionIdentifiers.Add("1.2.3"); - Assert.Multiple(() => - { - Assert.That(msg.KeyTag, Is.EqualTo("StudyInstanceUID")); - Assert.That(msg.ExtractionIdentifiers, Does.Contain("1.2.3")); - }); - } + Assert.Multiple(() => + { + Assert.That(msg.KeyTag, Is.EqualTo("StudyInstanceUID")); + Assert.That(msg.ExtractionIdentifiers, Does.Contain("1.2.3")); + }); } } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/FromCataloguesExtractionRequestFulfillerUnitTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/FromCataloguesExtractionRequestFulfillerUnitTests.cs index 145b1e996..060b2b53a 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/FromCataloguesExtractionRequestFulfillerUnitTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/FromCataloguesExtractionRequestFulfillerUnitTests.cs @@ -14,229 +14,228 @@ using System.Linq; using System.Text.RegularExpressions; -namespace SmiServices.UnitTests.Microservices.CohortExtractor +namespace SmiServices.UnitTests.Microservices.CohortExtractor; + +public class FromCataloguesExtractionRequestFulfillerUnitTests { - public class FromCataloguesExtractionRequestFulfillerUnitTests + [OneTimeSetUp] + public void OneTimeSetUp() { - [OneTimeSetUp] - public void OneTimeSetUp() - { - FansiImplementations.Load(); - } + FansiImplementations.Load(); + } - [Test] - public void Constructor_HappyPath() - { - // Arrange + [Test] + public void Constructor_HappyPath() + { + // Arrange - var catalogue = CreateCatalogue("CT"); + var catalogue = CreateCatalogue("CT"); - // Act + // Act - FromCataloguesExtractionRequestFulfiller call() => new([catalogue]); + FromCataloguesExtractionRequestFulfiller call() => new([catalogue]); - // Assert + // Assert - Assert.DoesNotThrow(() => call()); - } + Assert.DoesNotThrow(() => call()); + } - [Test] - public void Constructor_NoCompatibleCatalogues_Throws() - { - // Arrange + [Test] + public void Constructor_NoCompatibleCatalogues_Throws() + { + // Arrange - var mockCatalogue = new Mock(MockBehavior.Strict); - mockCatalogue.Setup(x => x.ID).Returns(1); - mockCatalogue.Setup(x => x.GetAllExtractionInformation(It.IsAny())).Returns([]); + var mockCatalogue = new Mock(MockBehavior.Strict); + mockCatalogue.Setup(x => x.ID).Returns(1); + mockCatalogue.Setup(x => x.GetAllExtractionInformation(It.IsAny())).Returns([]); - // Act + // Act - FromCataloguesExtractionRequestFulfiller call() => new([mockCatalogue.Object]); + FromCataloguesExtractionRequestFulfiller call() => new([mockCatalogue.Object]); - // Assert + // Assert - var exc = Assert.Throws(() => call()); - Assert.That(exc.Message, Is.EqualTo("There are no compatible Catalogues in the repository (See QueryToExecuteColumnSet for required columns) (Parameter 'cataloguesToUseForImageLookup')")); - } + var exc = Assert.Throws(() => call()); + Assert.That(exc.Message, Is.EqualTo("There are no compatible Catalogues in the repository (See QueryToExecuteColumnSet for required columns) (Parameter 'cataloguesToUseForImageLookup')")); + } - [TestCase("(.)_(.)")] - [TestCase("._.")] - public void Constructor_InvalidRegex_Throws(string regexString) - { - // Arrange + [TestCase("(.)_(.)")] + [TestCase("._.")] + public void Constructor_InvalidRegex_Throws(string regexString) + { + // Arrange - var catalogue = CreateCatalogue("CT"); + var catalogue = CreateCatalogue("CT"); - // Act + // Act - FromCataloguesExtractionRequestFulfiller call() => new([catalogue], new Regex(regexString)); + FromCataloguesExtractionRequestFulfiller call() => new([catalogue], new Regex(regexString)); - // Assert + // Assert - var exc = Assert.Throws(() => call()); - Assert.That(exc.Message, Is.EqualTo("Must have exactly one non-default capture group (Parameter 'modalityRoutingRegex')")); - } + var exc = Assert.Throws(() => call()); + Assert.That(exc.Message, Is.EqualTo("Must have exactly one non-default capture group (Parameter 'modalityRoutingRegex')")); + } + + [Test] + public void GetAllMatchingFiles_MatchingModalityNoFiles_ReturnsEmpty() + { + // Arrange - [Test] - public void GetAllMatchingFiles_MatchingModalityNoFiles_ReturnsEmpty() + var catalogue = CreateCatalogue("CT"); + + var message = new ExtractionRequestMessage { - // Arrange + KeyTag = "SeriesInstanceUID", + Modality = "CT", + }; - var catalogue = CreateCatalogue("CT"); + var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); - var message = new ExtractionRequestMessage - { - KeyTag = "SeriesInstanceUID", - Modality = "CT", - }; + // Act - var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); + var files = fulfiller.GetAllMatchingFiles(message); - // Act + // Assert - var files = fulfiller.GetAllMatchingFiles(message); + Assert.That(files.ToList(), Is.Empty); + } - // Assert + [Test] + public void GetAllMatchingFiles_NoCatalogueForModality_Throws() + { + // Arrange - Assert.That(files.ToList(), Is.Empty); - } + var catalogue = CreateCatalogue("CT"); - [Test] - public void GetAllMatchingFiles_NoCatalogueForModality_Throws() + var message = new ExtractionRequestMessage { - // Arrange + KeyTag = "SeriesInstanceUID", + Modality = "MR", + }; - var catalogue = CreateCatalogue("CT"); + var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); - var message = new ExtractionRequestMessage - { - KeyTag = "SeriesInstanceUID", - Modality = "MR", - }; + // Act - var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); + List call() => fulfiller.GetAllMatchingFiles(message).ToList(); - // Act + // Assert - List call() => fulfiller.GetAllMatchingFiles(message).ToList(); + var exc = Assert.Throws(() => call().ToList()); + Assert.That(exc!.Message, Does.StartWith("Couldn't find any compatible Catalogues to run extraction queries against for query")); + } - // Assert + [Test] + public void GetAllMatchingFiles_NonMixedOverridingRejectors_Passes() + { + // Arrange - var exc = Assert.Throws(() => call().ToList()); - Assert.That(exc!.Message, Does.StartWith("Couldn't find any compatible Catalogues to run extraction queries against for query")); - } + var catalogue = CreateCatalogue("CT"); - [Test] - public void GetAllMatchingFiles_NonMixedOverridingRejectors_Passes() + var message = new ExtractionRequestMessage { - // Arrange - - var catalogue = CreateCatalogue("CT"); + KeyTag = "SeriesInstanceUID", + Modality = "CT", + }; - var message = new ExtractionRequestMessage + var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); + fulfiller.ModalitySpecificRejectors.Add( + new ModalitySpecificRejectorOptions { - KeyTag = "SeriesInstanceUID", - Modality = "CT", - }; - - var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); - fulfiller.ModalitySpecificRejectors.Add( - new ModalitySpecificRejectorOptions - { - Overrides = true, - Modalities = "CT", - }, - new RejectNone() - ); - fulfiller.ModalitySpecificRejectors.Add( - new ModalitySpecificRejectorOptions - { - Overrides = true, - Modalities = "CT", - }, - new RejectNone() - ); - - // Act - - List call() => fulfiller.GetAllMatchingFiles(message).ToList(); - - // Assert - - Assert.DoesNotThrow(() => call().ToList()); - } + Overrides = true, + Modalities = "CT", + }, + new RejectNone() + ); + fulfiller.ModalitySpecificRejectors.Add( + new ModalitySpecificRejectorOptions + { + Overrides = true, + Modalities = "CT", + }, + new RejectNone() + ); - [Test] - public void GetAllMatchingFiles_MixedOverridingRejectors_Throws() - { - // Arrange + // Act - var catalogue = CreateCatalogue("CT"); + List call() => fulfiller.GetAllMatchingFiles(message).ToList(); - var message = new ExtractionRequestMessage - { - KeyTag = "SeriesInstanceUID", - Modality = "CT", - }; - - var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); - fulfiller.ModalitySpecificRejectors.Add( - new ModalitySpecificRejectorOptions - { - Overrides = true, - Modalities = "CT", - }, - new RejectNone() - ); - fulfiller.ModalitySpecificRejectors.Add( - new ModalitySpecificRejectorOptions - { - Overrides = false, - Modalities = "CT", - }, - new RejectNone() - ); - - // Act - - List call() => fulfiller.GetAllMatchingFiles(message).ToList(); - - // Assert - - var exc = Assert.Throws(() => call().ToList()); - Assert.That(exc!.Message, Is.EqualTo("You cannot mix Overriding and non Overriding ModalitySpecificRejectors. Bad Modality was 'CT'")); - } + // Assert - private static ICatalogue CreateCatalogue(string modality) - { - var memoryRepo = new MemoryCatalogueRepository(); - var catalogue = new Catalogue(memoryRepo, $"{modality}_ImageTable"); - Add(catalogue, "RelativeFileArchiveURI"); - Add(catalogue, "StudyInstanceUID"); - Add(catalogue, "SeriesInstanceUID"); - Add(catalogue, "SOPInstanceUID"); - return catalogue; - } + Assert.DoesNotThrow(() => call().ToList()); + } + + [Test] + public void GetAllMatchingFiles_MixedOverridingRejectors_Throws() + { + // Arrange + + var catalogue = CreateCatalogue("CT"); - private static void Add(ICatalogue c, string col) + var message = new ExtractionRequestMessage { - var repo = c.CatalogueRepository; - var ci = new CatalogueItem(repo, c, col); - var ti = new TableInfo(repo, "ff") + KeyTag = "SeriesInstanceUID", + Modality = "CT", + }; + + var fulfiller = new FromCataloguesExtractionRequestFulfiller([catalogue]); + fulfiller.ModalitySpecificRejectors.Add( + new ModalitySpecificRejectorOptions { - Server = "ff", - Database = "db", - }; - _ = new ExtractionInformation(repo, ci, new ColumnInfo(repo, col, "varchar(10)", ti), col); - } + Overrides = true, + Modalities = "CT", + }, + new RejectNone() + ); + fulfiller.ModalitySpecificRejectors.Add( + new ModalitySpecificRejectorOptions + { + Overrides = false, + Modalities = "CT", + }, + new RejectNone() + ); - private class RejectNone : IRejector + // Act + + List call() => fulfiller.GetAllMatchingFiles(message).ToList(); + + // Assert + + var exc = Assert.Throws(() => call().ToList()); + Assert.That(exc!.Message, Is.EqualTo("You cannot mix Overriding and non Overriding ModalitySpecificRejectors. Bad Modality was 'CT'")); + } + + private static ICatalogue CreateCatalogue(string modality) + { + var memoryRepo = new MemoryCatalogueRepository(); + var catalogue = new Catalogue(memoryRepo, $"{modality}_ImageTable"); + Add(catalogue, "RelativeFileArchiveURI"); + Add(catalogue, "StudyInstanceUID"); + Add(catalogue, "SeriesInstanceUID"); + Add(catalogue, "SOPInstanceUID"); + return catalogue; + } + + private static void Add(ICatalogue c, string col) + { + var repo = c.CatalogueRepository; + var ci = new CatalogueItem(repo, c, col); + var ti = new TableInfo(repo, "ff") { - public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) - { - reason = null; - return false; - } + Server = "ff", + Database = "db", + }; + _ = new ExtractionInformation(repo, ci, new ColumnInfo(repo, col, "varchar(10)", ti), col); + } + + private class RejectNone : IRejector + { + public bool Reject(IDataRecord row, [NotNullWhen(true)] out string? reason) + { + reason = null; + return false; } } } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/Messaging/ExtractionRequestQueueConsumerTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/Messaging/ExtractionRequestQueueConsumerTest.cs index fcd66e59b..865df558d 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/Messaging/ExtractionRequestQueueConsumerTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/Messaging/ExtractionRequestQueueConsumerTest.cs @@ -17,152 +17,151 @@ using System.Text.RegularExpressions; using System.Threading; -namespace SmiServices.UnitTests.Microservices.CohortExtractor.Messaging +namespace SmiServices.UnitTests.Microservices.CohortExtractor.Messaging; + +public class ExtractionRequestQueueConsumerTest { - public class ExtractionRequestQueueConsumerTest - { - #region Fixture Methods + #region Fixture Methods - private static readonly IFileSystem _fileSystem = new MockFileSystem(); + private static readonly IFileSystem _fileSystem = new MockFileSystem(); - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - class FakeFulfiller : IExtractionRequestFulfiller - { - protected readonly Logger Logger; + class FakeFulfiller : IExtractionRequestFulfiller + { + protected readonly Logger Logger; - public List Rejectors { get; set; } = []; + public List Rejectors { get; set; } = []; - public Regex? ModalityRoutingRegex { get; set; } - public Dictionary ModalitySpecificRejectors { get; set; } - = []; + public Regex? ModalityRoutingRegex { get; set; } + public Dictionary ModalitySpecificRejectors { get; set; } + = []; - public FakeFulfiller() - { - Logger = LogManager.GetCurrentClassLogger(); - } + public FakeFulfiller() + { + Logger = LogManager.GetCurrentClassLogger(); + } + + public IEnumerable GetAllMatchingFiles(ExtractionRequestMessage message) + { + Logger.Debug($"Found {message.KeyTag}"); - public IEnumerable GetAllMatchingFiles(ExtractionRequestMessage message) + foreach (var valueToLookup in message.ExtractionIdentifiers) { - Logger.Debug($"Found {message.KeyTag}"); - - foreach (var valueToLookup in message.ExtractionIdentifiers) - { - var results = new ExtractImageCollection(valueToLookup); - var studyTagValue = "2"; - var seriesTagValue = "3"; - var instanceTagValue = "4"; - var rejection = false; - var rejectionReason = ""; - var result = new QueryToExecuteResult(valueToLookup, studyTagValue, seriesTagValue, instanceTagValue, rejection, rejectionReason); - if (!results.ContainsKey(result.SeriesTagValue!)) - results.Add(result.SeriesTagValue!, []); - results[result.SeriesTagValue!].Add(result); - - yield return results; - } + var results = new ExtractImageCollection(valueToLookup); + var studyTagValue = "2"; + var seriesTagValue = "3"; + var instanceTagValue = "4"; + var rejection = false; + var rejectionReason = ""; + var result = new QueryToExecuteResult(valueToLookup, studyTagValue, seriesTagValue, instanceTagValue, rejection, rejectionReason); + if (!results.ContainsKey(result.SeriesTagValue!)) + results.Add(result.SeriesTagValue!, []); + results[result.SeriesTagValue!].Add(result); + + yield return results; } } + } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() - { - } + [SetUp] + public void SetUp() + { + } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void Test_ExtractionRequestQueueConsumer_AnonExtraction_RoutingKey() - { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Test_ExtractionRequestQueueConsumer_AnonExtraction_RoutingKey)); - globals.CohortExtractorOptions!.ExtractAnonRoutingKey = "anon"; - globals.CohortExtractorOptions.ExtractIdentRoutingKey = ""; - AssertMessagePublishedWithSpecifiedKey(globals, false, "anon"); - } + [Test] + public void Test_ExtractionRequestQueueConsumer_AnonExtraction_RoutingKey() + { + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Test_ExtractionRequestQueueConsumer_AnonExtraction_RoutingKey)); + globals.CohortExtractorOptions!.ExtractAnonRoutingKey = "anon"; + globals.CohortExtractorOptions.ExtractIdentRoutingKey = ""; + AssertMessagePublishedWithSpecifiedKey(globals, false, "anon"); + } - [Test] - public void Test_ExtractionRequestQueueConsumer_IdentExtraction_RoutingKey() - { - GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Test_ExtractionRequestQueueConsumer_IdentExtraction_RoutingKey)); - globals.CohortExtractorOptions!.ExtractAnonRoutingKey = ""; - globals.CohortExtractorOptions.ExtractIdentRoutingKey = "ident"; - AssertMessagePublishedWithSpecifiedKey(globals, true, "ident"); - } + [Test] + public void Test_ExtractionRequestQueueConsumer_IdentExtraction_RoutingKey() + { + GlobalOptions globals = new GlobalOptionsFactory().Load(nameof(Test_ExtractionRequestQueueConsumer_IdentExtraction_RoutingKey)); + globals.CohortExtractorOptions!.ExtractAnonRoutingKey = ""; + globals.CohortExtractorOptions.ExtractIdentRoutingKey = "ident"; + AssertMessagePublishedWithSpecifiedKey(globals, true, "ident"); + } - /// - /// Checks that ExtractionRequestQueueConsumer publishes messages correctly according to the input message isIdentifiableExtraction value - /// - /// - /// - /// - private void AssertMessagePublishedWithSpecifiedKey(GlobalOptions globals, bool isIdentifiableExtraction, string expectedRoutingKey) + /// + /// Checks that ExtractionRequestQueueConsumer publishes messages correctly according to the input message isIdentifiableExtraction value + /// + /// + /// + /// + private void AssertMessagePublishedWithSpecifiedKey(GlobalOptions globals, bool isIdentifiableExtraction, string expectedRoutingKey) + { + var fakeFulfiller = new FakeFulfiller(); + + var mockFileMessageProducerModel = new Mock(MockBehavior.Strict); + string? fileMessageRoutingKey = null; + mockFileMessageProducerModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsNotNull())) + .Callback((IMessage _, IMessageHeader __, string routingKey) => { fileMessageRoutingKey = routingKey; }) + .Returns(new MessageHeader()); + mockFileMessageProducerModel.Setup(x => x.WaitForConfirms()); + + var mockFileInfoMessageProducerModel = new Mock(MockBehavior.Strict); + mockFileInfoMessageProducerModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), null)) + .Returns(new MessageHeader()); + mockFileInfoMessageProducerModel.Setup(x => x.WaitForConfirms()); + + var msg = new ExtractionRequestMessage { - var fakeFulfiller = new FakeFulfiller(); - - var mockFileMessageProducerModel = new Mock(MockBehavior.Strict); - string? fileMessageRoutingKey = null; - mockFileMessageProducerModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsNotNull())) - .Callback((IMessage _, IMessageHeader __, string routingKey) => { fileMessageRoutingKey = routingKey; }) - .Returns(new MessageHeader()); - mockFileMessageProducerModel.Setup(x => x.WaitForConfirms()); - - var mockFileInfoMessageProducerModel = new Mock(MockBehavior.Strict); - mockFileInfoMessageProducerModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), null)) - .Returns(new MessageHeader()); - mockFileInfoMessageProducerModel.Setup(x => x.WaitForConfirms()); - - var msg = new ExtractionRequestMessage - { - JobSubmittedAt = DateTime.UtcNow, - ExtractionJobIdentifier = Guid.NewGuid(), - ProjectNumber = "1234", - ExtractionDirectory = "1234/foo", - IsIdentifiableExtraction = isIdentifiableExtraction, - KeyTag = "foo", - ExtractionIdentifiers = ["foo"], - Modality = "CT", - }; - - var consumer = new ExtractionRequestQueueConsumer( - globals.CohortExtractorOptions!, - fakeFulfiller, - new StudySeriesOriginalFilenameProjectPathResolver(_fileSystem), - mockFileMessageProducerModel.Object, - mockFileInfoMessageProducerModel.Object); - - var fatalCalled = false; - FatalErrorEventArgs? fatalErrorEventArgs = null; - consumer.OnFatal += (sender, args) => - { - fatalCalled = true; - fatalErrorEventArgs = args; - }; - - consumer.ProcessMessage(new MessageHeader(), msg, 1); + JobSubmittedAt = DateTime.UtcNow, + ExtractionJobIdentifier = Guid.NewGuid(), + ProjectNumber = "1234", + ExtractionDirectory = "1234/foo", + IsIdentifiableExtraction = isIdentifiableExtraction, + KeyTag = "foo", + ExtractionIdentifiers = ["foo"], + Modality = "CT", + }; + + var consumer = new ExtractionRequestQueueConsumer( + globals.CohortExtractorOptions!, + fakeFulfiller, + new StudySeriesOriginalFilenameProjectPathResolver(_fileSystem), + mockFileMessageProducerModel.Object, + mockFileInfoMessageProducerModel.Object); + + var fatalCalled = false; + FatalErrorEventArgs? fatalErrorEventArgs = null; + consumer.OnFatal += (sender, args) => + { + fatalCalled = true; + fatalErrorEventArgs = args; + }; - Thread.Sleep(500); // Fatal call is race-y - Assert.That(fatalCalled, Is.False, $"Fatal was called with {fatalErrorEventArgs}"); - Assert.That(consumer.AckCount, Is.EqualTo(1)); - Assert.That(fileMessageRoutingKey, Is.EqualTo(expectedRoutingKey)); - } + consumer.ProcessMessage(new MessageHeader(), msg, 1); - #endregion + Thread.Sleep(500); // Fatal call is race-y + Assert.That(fatalCalled, Is.False, $"Fatal was called with {fatalErrorEventArgs}"); + Assert.That(consumer.AckCount, Is.EqualTo(1)); + Assert.That(fileMessageRoutingKey, Is.EqualTo(expectedRoutingKey)); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ModalitySpecificRejectorsTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ModalitySpecificRejectorsTests.cs index c16949c55..6b0463a5c 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ModalitySpecificRejectorsTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/ModalitySpecificRejectorsTests.cs @@ -3,18 +3,18 @@ using System; using System.IO; -namespace SmiServices.UnitTests.Microservices.CohortExtractor +namespace SmiServices.UnitTests.Microservices.CohortExtractor; + +class ModalitySpecificRejectorsTests { - class ModalitySpecificRejectorsTests - { - [Test] - public void TestDeserialization() - { - var factory = new GlobalOptionsFactory(); - string file; - var yaml = - @" + [Test] + public void TestDeserialization() + { + var factory = new GlobalOptionsFactory(); + string file; + var yaml = + @" LoggingOptions: LogConfigFile: CohortExtractorOptions: @@ -27,27 +27,27 @@ public void TestDeserialization() RejectorType: Microservices.CohortExtractor.Execution.RequestFulfillers.RejectNone "; - File.WriteAllText(file = Path.Combine(TestContext.CurrentContext.WorkDirectory, "ff.yaml"), yaml); + File.WriteAllText(file = Path.Combine(TestContext.CurrentContext.WorkDirectory, "ff.yaml"), yaml); - var opts = factory.Load("FF.DD", file); + var opts = factory.Load("FF.DD", file); - Assert.Multiple(() => - { - Assert.That(opts.CohortExtractorOptions!.ModalitySpecificRejectors!, Has.Length.EqualTo(1)); - Assert.That(opts.CohortExtractorOptions.ModalitySpecificRejectors?[0].Modalities, Is.EqualTo("CT,MR")); - Assert.That(opts.CohortExtractorOptions.ModalitySpecificRejectors?[0].GetModalities()[0], Is.EqualTo("CT")); - Assert.That(opts.CohortExtractorOptions.ModalitySpecificRejectors?[0].GetModalities()[1], Is.EqualTo("MR")); - Assert.That(opts.CohortExtractorOptions.ModalitySpecificRejectors?[0].RejectorType, Is.EqualTo("Microservices.CohortExtractor.Execution.RequestFulfillers.RejectNone")); - }); - } - - [Test] - public void TestValidation_MissingModalityRouting() + Assert.Multiple(() => { - var factory = new GlobalOptionsFactory(); - string file; - var yaml = - @" + Assert.That(opts.CohortExtractorOptions!.ModalitySpecificRejectors!, Has.Length.EqualTo(1)); + Assert.That(opts.CohortExtractorOptions.ModalitySpecificRejectors?[0].Modalities, Is.EqualTo("CT,MR")); + Assert.That(opts.CohortExtractorOptions.ModalitySpecificRejectors?[0].GetModalities()[0], Is.EqualTo("CT")); + Assert.That(opts.CohortExtractorOptions.ModalitySpecificRejectors?[0].GetModalities()[1], Is.EqualTo("MR")); + Assert.That(opts.CohortExtractorOptions.ModalitySpecificRejectors?[0].RejectorType, Is.EqualTo("Microservices.CohortExtractor.Execution.RequestFulfillers.RejectNone")); + }); + } + + [Test] + public void TestValidation_MissingModalityRouting() + { + var factory = new GlobalOptionsFactory(); + string file; + var yaml = + @" LoggingOptions: LogConfigFile: CohortExtractorOptions: @@ -62,12 +62,11 @@ public void TestValidation_MissingModalityRouting() RejectorType: Microservices.CohortExtractor.Execution.RequestFulfillers.RejectNone "; - File.WriteAllText(file = Path.Combine(TestContext.CurrentContext.WorkDirectory, "ff.yaml"), yaml); + File.WriteAllText(file = Path.Combine(TestContext.CurrentContext.WorkDirectory, "ff.yaml"), yaml); - var opts = factory.Load("FF.DD", file); + var opts = factory.Load("FF.DD", file); - var ex = Assert.Throws(() => opts.CohortExtractorOptions!.Validate()); - Assert.That(ex!.Message, Is.EqualTo("ModalitySpecificRejectors requires providing a ModalityRoutingRegex")); - } + var ex = Assert.Throws(() => opts.CohortExtractorOptions!.Validate()); + Assert.That(ex!.Message, Is.EqualTo("ModalitySpecificRejectors requires providing a ModalityRoutingRegex")); } } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/NoSuffixProjectPathResolverTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/NoSuffixProjectPathResolverTests.cs index 3a646e1c9..5e336dd34 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/NoSuffixProjectPathResolverTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/NoSuffixProjectPathResolverTests.cs @@ -6,70 +6,69 @@ using System.IO.Abstractions; using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Microservices.CohortExtractor +namespace SmiServices.UnitTests.Microservices.CohortExtractor; + +public class NoSuffixProjectPathResolverTests { - public class NoSuffixProjectPathResolverTests - { - private IFileSystem _fileSystem = new MockFileSystem(); + private IFileSystem _fileSystem = new MockFileSystem(); - [SetUp] - public void SetUp() - { - _fileSystem = new MockFileSystem(); - } + [SetUp] + public void SetUp() + { + _fileSystem = new MockFileSystem(); + } - [Test] - public void GetOutputPath_Basic() - { - // Arrange + [Test] + public void GetOutputPath_Basic() + { + // Arrange - var expectedPath = _fileSystem.Path.Combine("study", "series", "foo.dcm"); - var resolver = new NoSuffixProjectPathResolver(_fileSystem); - var result = new QueryToExecuteResult( - "foo.dcm", - "study", - "series", - "sop", - rejection: false, - rejectionReason: null - ); - var message = new ExtractionRequestMessage(); + var expectedPath = _fileSystem.Path.Combine("study", "series", "foo.dcm"); + var resolver = new NoSuffixProjectPathResolver(_fileSystem); + var result = new QueryToExecuteResult( + "foo.dcm", + "study", + "series", + "sop", + rejection: false, + rejectionReason: null + ); + var message = new ExtractionRequestMessage(); - // Act + // Act - var actualPath = resolver.GetOutputPath(result, message); + var actualPath = resolver.GetOutputPath(result, message); - // Assert - Assert.That(actualPath, Is.EqualTo(expectedPath)); - } + // Assert + Assert.That(actualPath, Is.EqualTo(expectedPath)); + } - [TestCase("file.dcm", "file.dcm")] - [TestCase("file.dcm", "file.dicom")] - [TestCase("file.dcm", "file")] - [TestCase("file.foo.dcm", "file.foo")] - public void GetOutputPath_Extensions(string expectedOutput, string inputFile) - { - // Arrange + [TestCase("file.dcm", "file.dcm")] + [TestCase("file.dcm", "file.dicom")] + [TestCase("file.dcm", "file")] + [TestCase("file.foo.dcm", "file.foo")] + public void GetOutputPath_Extensions(string expectedOutput, string inputFile) + { + // Arrange - var expectedPath = _fileSystem.Path.Combine("study", "series", expectedOutput); - var resolver = new NoSuffixProjectPathResolver(_fileSystem); - var result = new QueryToExecuteResult( - inputFile, - "study", - "series", - "sop", - rejection: false, - rejectionReason: null - ); - var message = new ExtractionRequestMessage(); + var expectedPath = _fileSystem.Path.Combine("study", "series", expectedOutput); + var resolver = new NoSuffixProjectPathResolver(_fileSystem); + var result = new QueryToExecuteResult( + inputFile, + "study", + "series", + "sop", + rejection: false, + rejectionReason: null + ); + var message = new ExtractionRequestMessage(); - // Act + // Act - var actualPath = resolver.GetOutputPath(result, message); + var actualPath = resolver.GetOutputPath(result, message); - // Assert + // Assert - Assert.That(actualPath, Is.EqualTo(expectedPath)); - } + Assert.That(actualPath, Is.EqualTo(expectedPath)); } } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/QueryToExecuteTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/QueryToExecuteTests.cs index 2427a8458..c8fe7a860 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/QueryToExecuteTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/QueryToExecuteTests.cs @@ -3,17 +3,16 @@ using SmiServices.Microservices.CohortExtractor.RequestFulfillers; using System; -namespace SmiServices.UnitTests.Microservices.CohortExtractor +namespace SmiServices.UnitTests.Microservices.CohortExtractor; + +public class QueryToExecuteTests : Tests.Common.UnitTests { - public class QueryToExecuteTests : Tests.Common.UnitTests + [Test] + public void Test_QueryToExecute_BasicSQL() { - [Test] - public void Test_QueryToExecute_BasicSQL() - { - var cata = WhenIHaveA(); + var cata = WhenIHaveA(); - var ex = Assert.Throws(() => new QueryToExecuteColumnSet(cata, null, null, null, null)); - Assert.That(ex!.Message, Does.Match(@"Parameter.+filePathColumn")); - } + var ex = Assert.Throws(() => new QueryToExecuteColumnSet(cata, null, null, null, null)); + Assert.That(ex!.Message, Does.Match(@"Parameter.+filePathColumn")); } } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/StudySeriesOriginalFilenameProjectPathResolverTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/StudySeriesOriginalFilenameProjectPathResolverTests.cs index 400a675db..b6efccbe1 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/StudySeriesOriginalFilenameProjectPathResolverTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/StudySeriesOriginalFilenameProjectPathResolverTests.cs @@ -7,121 +7,120 @@ using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Microservices.CohortExtractor +namespace SmiServices.UnitTests.Microservices.CohortExtractor; + +public class StudySeriesOriginalFilenameProjectPathResolverTests { - public class StudySeriesOriginalFilenameProjectPathResolverTests + #region Fixture Methods + + private ExtractionRequestMessage _requestMessage = new() { IsIdentifiableExtraction = false }; + private IFileSystem _fileSystem = new MockFileSystem(); + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - private ExtractionRequestMessage _requestMessage = new() { IsIdentifiableExtraction = false }; - private IFileSystem _fileSystem = new MockFileSystem(); + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + #endregion - [OneTimeTearDown] - public void OneTimeTearDown() { } + #region Test Methods - #endregion + [SetUp] + public void SetUp() + { + _fileSystem = new MockFileSystem(); + } - #region Test Methods + [TearDown] + public void TearDown() { } - [SetUp] - public void SetUp() - { - _fileSystem = new MockFileSystem(); - } + #endregion - [TearDown] - public void TearDown() { } + #region Tests - #endregion + [Test] + public void GetOutputPath_Basic() + { + // Arrange + + var expectedPath = _fileSystem.Path.Combine("study", "series", "foo-an.dcm"); + var resolver = new StudySeriesOriginalFilenameProjectPathResolver(_fileSystem); + var result = new QueryToExecuteResult( + "foo.dcm", + "study", + "series", + "sop", + rejection: false, + rejectionReason: null + ); + var message = new ExtractionRequestMessage(); + + // Act + + var actualPath = resolver.GetOutputPath(result, message); + + // Assert + Assert.That(actualPath, Is.EqualTo(expectedPath)); + } - #region Tests + [TestCase("file-an.dcm", "file.dcm")] + [TestCase("file-an.dcm", "file.dicom")] + [TestCase("file-an.dcm", "file")] + [TestCase("file.foo-an.dcm", "file.foo")] + public void GetOutputPath_Extensions(string expectedOutput, string inputFile) + { + // Arrange + + var expectedPath = _fileSystem.Path.Combine("study", "series", expectedOutput); + var resolver = new StudySeriesOriginalFilenameProjectPathResolver(_fileSystem); + var result = new QueryToExecuteResult( + inputFile, + "study", + "series", + "sop", + rejection: false, + rejectionReason: null + ); + var message = new ExtractionRequestMessage(); + + // Act + + var actualPath = resolver.GetOutputPath(result, message); + + // Assert + Assert.That(actualPath, Is.EqualTo(expectedPath)); + } - [Test] - public void GetOutputPath_Basic() - { - // Arrange - - var expectedPath = _fileSystem.Path.Combine("study", "series", "foo-an.dcm"); - var resolver = new StudySeriesOriginalFilenameProjectPathResolver(_fileSystem); - var result = new QueryToExecuteResult( - "foo.dcm", - "study", - "series", - "sop", - rejection: false, - rejectionReason: null - ); - var message = new ExtractionRequestMessage(); - - // Act - - var actualPath = resolver.GetOutputPath(result, message); - - // Assert - Assert.That(actualPath, Is.EqualTo(expectedPath)); - } - - [TestCase("file-an.dcm", "file.dcm")] - [TestCase("file-an.dcm", "file.dicom")] - [TestCase("file-an.dcm", "file")] - [TestCase("file.foo-an.dcm", "file.foo")] - public void GetOutputPath_Extensions(string expectedOutput, string inputFile) - { - // Arrange - - var expectedPath = _fileSystem.Path.Combine("study", "series", expectedOutput); - var resolver = new StudySeriesOriginalFilenameProjectPathResolver(_fileSystem); - var result = new QueryToExecuteResult( - inputFile, - "study", - "series", - "sop", - rejection: false, - rejectionReason: null - ); - var message = new ExtractionRequestMessage(); - - // Act - - var actualPath = resolver.GetOutputPath(result, message); - - // Assert - Assert.That(actualPath, Is.EqualTo(expectedPath)); - } - - [Test] - public void GetOutputPath_IdentExtraction() + [Test] + public void GetOutputPath_IdentExtraction() + { + // Arrange + + var expectedPath = _fileSystem.Path.Combine("study", "series", "foo.dcm"); + var resolver = new StudySeriesOriginalFilenameProjectPathResolver(_fileSystem); + var result = new QueryToExecuteResult( + "foo.dcm", + "study", + "series", + "sop", + rejection: false, + rejectionReason: null + ); + var message = new ExtractionRequestMessage() { - // Arrange - - var expectedPath = _fileSystem.Path.Combine("study", "series", "foo.dcm"); - var resolver = new StudySeriesOriginalFilenameProjectPathResolver(_fileSystem); - var result = new QueryToExecuteResult( - "foo.dcm", - "study", - "series", - "sop", - rejection: false, - rejectionReason: null - ); - var message = new ExtractionRequestMessage() - { - IsIdentifiableExtraction = true, - }; - - // Act - - var actualPath = resolver.GetOutputPath(result, message); - - // Assert - Assert.That(actualPath, Is.EqualTo(expectedPath)); - } - - #endregion + IsIdentifiableExtraction = true, + }; + + // Act + + var actualPath = resolver.GetOutputPath(result, message); + + // Assert + Assert.That(actualPath, Is.EqualTo(expectedPath)); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/StudySeriesSOPProjectPathResolverTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/StudySeriesSOPProjectPathResolverTests.cs index 5338b1409..d4cd082be 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortExtractor/StudySeriesSOPProjectPathResolverTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortExtractor/StudySeriesSOPProjectPathResolverTests.cs @@ -7,126 +7,125 @@ using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Microservices.CohortExtractor +namespace SmiServices.UnitTests.Microservices.CohortExtractor; + +public class StudySeriesSOPProjectPathResolverTests { - public class StudySeriesSOPProjectPathResolverTests - { - #region Fixture Methods + #region Fixture Methods - private ExtractionRequestMessage _requestMessage = null!; - private IFileSystem _fileSystem = new MockFileSystem(); + private ExtractionRequestMessage _requestMessage = null!; + private IFileSystem _fileSystem = new MockFileSystem(); - [OneTimeSetUp] - public void OneTimeSetUp() + [OneTimeSetUp] + public void OneTimeSetUp() + { + + _requestMessage = new ExtractionRequestMessage { + IsIdentifiableExtraction = false, + }; + } - _requestMessage = new ExtractionRequestMessage - { - IsIdentifiableExtraction = false, - }; - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + #endregion - #endregion + #region Test Methods - #region Test Methods + [SetUp] + public void SetUp() + { + _fileSystem = new MockFileSystem(); + } - [SetUp] - public void SetUp() - { - _fileSystem = new MockFileSystem(); - } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests + + [Test] + public void GetOutputPath_Basic() + { + // Arrange + + var expectedPath = _fileSystem.Path.Combine("study", "series", "sop-an.dcm"); + var resolver = new StudySeriesSOPProjectPathResolver(_fileSystem); + var result = new QueryToExecuteResult( + "foo.dcm", + "study", + "series", + "sop", + rejection: false, + rejectionReason: null + ); + var message = new ExtractionRequestMessage(); + + // Act + + var actualPath = resolver.GetOutputPath(result, message); + + // Assert + Assert.That(actualPath, Is.EqualTo(expectedPath)); + } - #region Tests + [TestCase("file.dcm")] + [TestCase("file.dicom")] + [TestCase("file")] + [TestCase("file.foo")] + public void GetOutputPath_Extensions(string inputFile) + { + // Arrange + + var expectedPath = _fileSystem.Path.Combine("study", "series", "sop-an.dcm"); + var resolver = new StudySeriesSOPProjectPathResolver(_fileSystem); + var result = new QueryToExecuteResult( + inputFile, + "study", + "series", + "sop", + rejection: false, + rejectionReason: null + ); + var message = new ExtractionRequestMessage(); + + // Act + + var actualPath = resolver.GetOutputPath(result, message); + + // Assert + Assert.That(actualPath, Is.EqualTo(expectedPath)); + } - [Test] - public void GetOutputPath_Basic() - { - // Arrange - - var expectedPath = _fileSystem.Path.Combine("study", "series", "sop-an.dcm"); - var resolver = new StudySeriesSOPProjectPathResolver(_fileSystem); - var result = new QueryToExecuteResult( - "foo.dcm", - "study", - "series", - "sop", - rejection: false, - rejectionReason: null - ); - var message = new ExtractionRequestMessage(); - - // Act - - var actualPath = resolver.GetOutputPath(result, message); - - // Assert - Assert.That(actualPath, Is.EqualTo(expectedPath)); - } - - [TestCase("file.dcm")] - [TestCase("file.dicom")] - [TestCase("file")] - [TestCase("file.foo")] - public void GetOutputPath_Extensions(string inputFile) - { - // Arrange - - var expectedPath = _fileSystem.Path.Combine("study", "series", "sop-an.dcm"); - var resolver = new StudySeriesSOPProjectPathResolver(_fileSystem); - var result = new QueryToExecuteResult( - inputFile, - "study", - "series", - "sop", - rejection: false, - rejectionReason: null - ); - var message = new ExtractionRequestMessage(); - - // Act - - var actualPath = resolver.GetOutputPath(result, message); - - // Assert - Assert.That(actualPath, Is.EqualTo(expectedPath)); - } - - [Test] - public void GetOutputPath_IdentExtraction() + [Test] + public void GetOutputPath_IdentExtraction() + { + // Arrange + + var expectedPath = _fileSystem.Path.Combine("study", "series", "sop.dcm"); + var resolver = new StudySeriesSOPProjectPathResolver(_fileSystem); + var result = new QueryToExecuteResult( + "foo.dcm", + "study", + "series", + "sop", + rejection: false, + rejectionReason: null + ); + var message = new ExtractionRequestMessage() { - // Arrange - - var expectedPath = _fileSystem.Path.Combine("study", "series", "sop.dcm"); - var resolver = new StudySeriesSOPProjectPathResolver(_fileSystem); - var result = new QueryToExecuteResult( - "foo.dcm", - "study", - "series", - "sop", - rejection: false, - rejectionReason: null - ); - var message = new ExtractionRequestMessage() - { - IsIdentifiableExtraction = true, - }; - - // Act - - var actualPath = resolver.GetOutputPath(result, message); - - // Assert - Assert.That(actualPath, Is.EqualTo(expectedPath)); - } - - #endregion + IsIdentifiableExtraction = true, + }; + + // Act + + var actualPath = resolver.GetOutputPath(result, message); + + // Assert + Assert.That(actualPath, Is.EqualTo(expectedPath)); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/CompletedExtractJobInfoTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/CompletedExtractJobInfoTest.cs index 08f172828..3dd548ef2 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/CompletedExtractJobInfoTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/CompletedExtractJobInfoTest.cs @@ -3,104 +3,103 @@ using SmiServices.UnitTests.Common; using System; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage; + +public class CompletedExtractJobInfoTest { - public class CompletedExtractJobInfoTest + private readonly TestDateTimeProvider _dateTimeProvider = new(); + + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - private readonly TestDateTimeProvider _dateTimeProvider = new(); - - #region Fixture Methods - - [OneTimeSetUp] - public void OneTimeSetUp() - { - } - - [OneTimeTearDown] - public void OneTimeTearDown() { } - - #endregion - - #region Test Methods - - [SetUp] - public void SetUp() { } - - [TearDown] - public void TearDown() { } - - #endregion - - #region Tests - - [Test] - public void Equality() - { - var guid = Guid.NewGuid(); - var info1 = new CompletedExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - _dateTimeProvider.UtcNow() + TimeSpan.FromHours(1), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - var info2 = new CompletedExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - _dateTimeProvider.UtcNow() + TimeSpan.FromHours(1), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - - Assert.That(info2, Is.EqualTo(info1)); - } - - [Test] - public void Test_GetHashCode() - { - var guid = Guid.NewGuid(); - var info1 = new CompletedExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - _dateTimeProvider.UtcNow() + TimeSpan.FromHours(1), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - var info2 = new CompletedExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - _dateTimeProvider.UtcNow() + TimeSpan.FromHours(1), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - - Assert.That(info2.GetHashCode(), Is.EqualTo(info1.GetHashCode())); - } - - #endregion } + + [OneTimeTearDown] + public void OneTimeTearDown() { } + + #endregion + + #region Test Methods + + [SetUp] + public void SetUp() { } + + [TearDown] + public void TearDown() { } + + #endregion + + #region Tests + + [Test] + public void Equality() + { + var guid = Guid.NewGuid(); + var info1 = new CompletedExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + _dateTimeProvider.UtcNow() + TimeSpan.FromHours(1), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + var info2 = new CompletedExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + _dateTimeProvider.UtcNow() + TimeSpan.FromHours(1), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + Assert.That(info2, Is.EqualTo(info1)); + } + + [Test] + public void Test_GetHashCode() + { + var guid = Guid.NewGuid(); + var info1 = new CompletedExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + _dateTimeProvider.UtcNow() + TimeSpan.FromHours(1), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + var info2 = new CompletedExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + _dateTimeProvider.UtcNow() + TimeSpan.FromHours(1), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + Assert.That(info2.GetHashCode(), Is.EqualTo(info1.GetHashCode())); + } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractJobInfoTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractJobInfoTest.cs index ec362b572..3406fcac7 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractJobInfoTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractJobInfoTest.cs @@ -3,350 +3,349 @@ using SmiServices.UnitTests.Common; using System; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage; + +[TestFixture] +public class ExtractJobInfoTest { - [TestFixture] - public class ExtractJobInfoTest + private readonly TestDateTimeProvider _dateTimeProvider = new(); + + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - private readonly TestDateTimeProvider _dateTimeProvider = new(); + } - #region Fixture Methods + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + #endregion - [OneTimeTearDown] - public void OneTimeTearDown() { } + #region Test Methods - #endregion + [SetUp] + public void SetUp() { } - #region Test Methods + [TearDown] + public void TearDown() { } - [SetUp] - public void SetUp() { } + #endregion - [TearDown] - public void TearDown() { } + #region Tests - #endregion + [TestCase("proj/foo/extract-name")] + [TestCase("proj\\foo\\extract-name")] + public void Test_ExtractJobInfo_ExtractionName(string extractionDir) + { + var info = new ExtractJobInfo( + Guid.NewGuid(), + DateTime.UtcNow, + "1234", + extractionDir, + "KeyTag", + 123, + "testUser", + "MR", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: false, + isNoFilterExtraction: false + ); + + Assert.That(info.ExtractionName(), Is.EqualTo("extract-name")); + } - #region Tests + [TestCase("proj/foo/extract-name", "proj/foo")] + [TestCase("proj\\foo\\extract-name", "proj\\foo")] + public void Test_ExtractJobInfo_ProjectExtractionDir(string extractionDir, string expected) + { + var info = new ExtractJobInfo( + Guid.NewGuid(), + DateTime.UtcNow, + "1234", + extractionDir, + "KeyTag", + 123, + "testUser", + "MR", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: false, + isNoFilterExtraction: false + ); + + Assert.That(info.ProjectExtractionDir(), Is.EqualTo(expected)); + } - [TestCase("proj/foo/extract-name")] - [TestCase("proj\\foo\\extract-name")] - public void Test_ExtractJobInfo_ExtractionName(string extractionDir) - { - var info = new ExtractJobInfo( - Guid.NewGuid(), - DateTime.UtcNow, - "1234", - extractionDir, - "KeyTag", - 123, - "testUser", - "MR", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: false, - isNoFilterExtraction: false - ); - Assert.That(info.ExtractionName(), Is.EqualTo("extract-name")); - } - - [TestCase("proj/foo/extract-name", "proj/foo")] - [TestCase("proj\\foo\\extract-name", "proj\\foo")] - public void Test_ExtractJobInfo_ProjectExtractionDir(string extractionDir, string expected) - { - var info = new ExtractJobInfo( - Guid.NewGuid(), - DateTime.UtcNow, - "1234", - extractionDir, - "KeyTag", - 123, - "testUser", - "MR", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: false, - isNoFilterExtraction: false + [Test] + public void TestExtractJobInfo_Equality() + { + Guid guid = Guid.NewGuid(); + var info1 = new ExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true ); - - Assert.That(info.ProjectExtractionDir(), Is.EqualTo(expected)); - } - - - [Test] - public void TestExtractJobInfo_Equality() - { - Guid guid = Guid.NewGuid(); - var info1 = new ExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - var info2 = new ExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - - Assert.That(info2, Is.EqualTo(info1)); - } - - [Test] - public void TestExtractJobInfo_GetHashCode() - { - Guid guid = Guid.NewGuid(); - var info1 = new ExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - var info2 = new ExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - - Assert.That(info2.GetHashCode(), Is.EqualTo(info1.GetHashCode())); - } - - [Test] - public void Constructor_DefaultExtractionJobIdentifier_ThrowsException() - { - // Arrange - var jobId = Guid.Empty; - - // Act - - ExtractJobInfo call() => new( - jobId, - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true + var info2 = new ExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true ); - // Assert - - var exc = Assert.Throws(() => call()); - Assert.That(exc!.Message, Is.EqualTo("Must not be the default Guid (Parameter 'extractionJobIdentifier')")); - } - - [Test] - public void Constructor_InvalidModality_ThrowsException() - { - // Arrange - - var modality = " "; - - // Act - - ExtractJobInfo call() => new( - Guid.NewGuid(), - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - modality, - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); + Assert.That(info2, Is.EqualTo(info1)); + } - // Assert - - var exc = Assert.Throws(() => call()); - Assert.That(exc!.Message, Is.EqualTo("Must not be whitespace if passed (Parameter 'extractionModality')")); - } - - [Test] - public void Constructor_DefaultJobSubmittedAt_ThrowsException() - { - // Arrange - - var jobSubmittedAt = default(DateTime); - - // Act - - ExtractJobInfo call() => new( - Guid.NewGuid(), - jobSubmittedAt, - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "CT", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true + [Test] + public void TestExtractJobInfo_GetHashCode() + { + Guid guid = Guid.NewGuid(); + var info1 = new ExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true ); - - // Assert - - var exc = Assert.Throws(() => call()); - Assert.That(exc!.Message, Is.EqualTo("Must not be the default DateTime (Parameter 'jobSubmittedAt')")); - } - - [Test] - public void Constructor_InvalidProjectNumber_ThrowsException() - { - // Arrange - - var projectNumber = " "; - - // Act - - ExtractJobInfo call() => new( - Guid.NewGuid(), - _dateTimeProvider.UtcNow(), - projectNumber, - "test/directory", - "KeyTag", - 123, - "testUser", - "CT", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true + var info2 = new ExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true ); - // Assert - - var exc = Assert.Throws(() => call()); - Assert.That(exc!.Message, Is.EqualTo("Must not be null or whitespace (Parameter 'projectNumber')")); - } - - [Test] - public void Constructor_InvalidExtractionDirectory_ThrowsException() - { - // Arrange - - var extractionDirectory = " "; - - // Act - - ExtractJobInfo call() => new( - Guid.NewGuid(), - _dateTimeProvider.UtcNow(), - "1234", - extractionDirectory, - "KeyTag", - 123, - "testUser", - "CT", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); + Assert.That(info2.GetHashCode(), Is.EqualTo(info1.GetHashCode())); + } - // Assert - - var exc = Assert.Throws(() => call()); - Assert.That(exc!.Message, Is.EqualTo("Must not be null or whitespace (Parameter 'extractionDirectory')")); - } - - [Test] - public void Constructor_InvalidKeyTag_ThrowsException() - { - // Arrange - - var keyTag = " "; - - // Act - - ExtractJobInfo call() => new( - Guid.NewGuid(), - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - keyTag, - 123, - "testUser", - "CT", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); + [Test] + public void Constructor_DefaultExtractionJobIdentifier_ThrowsException() + { + // Arrange + var jobId = Guid.Empty; + + // Act + + ExtractJobInfo call() => new( + jobId, + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + // Assert + + var exc = Assert.Throws(() => call()); + Assert.That(exc!.Message, Is.EqualTo("Must not be the default Guid (Parameter 'extractionJobIdentifier')")); + } - // Assert - - var exc = Assert.Throws(() => call()); - Assert.That(exc!.Message, Is.EqualTo("Must not be null or whitespace (Parameter 'keyTag')")); - } - - [Test] - public void Constructor_InvalidKeyValue_ThrowsException() - { - // Arrange - - uint keyValue = 0; - - // Act - - ExtractJobInfo call() => new( - Guid.NewGuid(), - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - "KeyTag", - keyValue, - "testUser", - "CT", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); + [Test] + public void Constructor_InvalidModality_ThrowsException() + { + // Arrange + + var modality = " "; + + // Act + + ExtractJobInfo call() => new( + Guid.NewGuid(), + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + modality, + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + // Assert + + var exc = Assert.Throws(() => call()); + Assert.That(exc!.Message, Is.EqualTo("Must not be whitespace if passed (Parameter 'extractionModality')")); + } - // Assert + [Test] + public void Constructor_DefaultJobSubmittedAt_ThrowsException() + { + // Arrange + + var jobSubmittedAt = default(DateTime); + + // Act + + ExtractJobInfo call() => new( + Guid.NewGuid(), + jobSubmittedAt, + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "CT", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + // Assert + + var exc = Assert.Throws(() => call()); + Assert.That(exc!.Message, Is.EqualTo("Must not be the default DateTime (Parameter 'jobSubmittedAt')")); + } - var exc = Assert.Throws(() => call()); - Assert.That(exc!.Message, Is.EqualTo("Must not be zero (Parameter 'keyValueCount')")); - } + [Test] + public void Constructor_InvalidProjectNumber_ThrowsException() + { + // Arrange + + var projectNumber = " "; + + // Act + + ExtractJobInfo call() => new( + Guid.NewGuid(), + _dateTimeProvider.UtcNow(), + projectNumber, + "test/directory", + "KeyTag", + 123, + "testUser", + "CT", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + // Assert + + var exc = Assert.Throws(() => call()); + Assert.That(exc!.Message, Is.EqualTo("Must not be null or whitespace (Parameter 'projectNumber')")); + } - #endregion + [Test] + public void Constructor_InvalidExtractionDirectory_ThrowsException() + { + // Arrange + + var extractionDirectory = " "; + + // Act + + ExtractJobInfo call() => new( + Guid.NewGuid(), + _dateTimeProvider.UtcNow(), + "1234", + extractionDirectory, + "KeyTag", + 123, + "testUser", + "CT", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + // Assert + + var exc = Assert.Throws(() => call()); + Assert.That(exc!.Message, Is.EqualTo("Must not be null or whitespace (Parameter 'extractionDirectory')")); + } + + [Test] + public void Constructor_InvalidKeyTag_ThrowsException() + { + // Arrange + + var keyTag = " "; + + // Act + + ExtractJobInfo call() => new( + Guid.NewGuid(), + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + keyTag, + 123, + "testUser", + "CT", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + // Assert + + var exc = Assert.Throws(() => call()); + Assert.That(exc!.Message, Is.EqualTo("Must not be null or whitespace (Parameter 'keyTag')")); } + + [Test] + public void Constructor_InvalidKeyValue_ThrowsException() + { + // Arrange + + uint keyValue = 0; + + // Act + + ExtractJobInfo call() => new( + Guid.NewGuid(), + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + "KeyTag", + keyValue, + "testUser", + "CT", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + // Assert + + var exc = Assert.Throws(() => call()); + Assert.That(exc!.Message, Is.EqualTo("Must not be zero (Parameter 'keyValueCount')")); + } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractJobStoreTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractJobStoreTest.cs index 33c50da44..2222092d9 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractJobStoreTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractJobStoreTest.cs @@ -8,141 +8,140 @@ using System.Collections.Generic; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage; + + +[TestFixture] +public class ExtractJobStoreTest { + #region Fixture Methods - [TestFixture] - public class ExtractJobStoreTest + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + private class TestExtractJobStore : ExtractJobStore + { + public override ConcurrentQueue> ProcessedVerificationMessages => throw new NotImplementedException(); + + protected override void PersistMessageToStoreImpl(ExtractionRequestInfoMessage message, IMessageHeader header) { } + protected override void PersistMessageToStoreImpl(ExtractFileCollectionInfoMessage collectionInfoMessage, IMessageHeader header) => throw new NotImplementedException(); + protected override void PersistMessageToStoreImpl(ExtractedFileStatusMessage message, IMessageHeader header) { } + protected override void PersistMessageToStoreImpl(ExtractedFileVerificationMessage message, IMessageHeader header) { } + protected override List GetReadyJobsImpl(Guid specificJobId = new Guid()) => throw new NotImplementedException(); + protected override void CompleteJobImpl(Guid jobId) { } + protected override void MarkJobFailedImpl(Guid jobId, Exception e) { } + protected override CompletedExtractJobInfo GetCompletedJobInfoImpl(Guid jobId) => throw new NotImplementedException(); + protected override IEnumerable GetCompletedJobRejectionsImpl(Guid jobId) => throw new NotImplementedException(); + protected override IEnumerable GetCompletedJobAnonymisationFailuresImpl(Guid jobId) => throw new NotImplementedException(); + protected override IEnumerable GetCompletedJobVerificationFailuresImpl(Guid jobId) => throw new NotImplementedException(); + protected override IEnumerable GetCompletedJobMissingFileListImpl(Guid jobId) => ["missing"]; + protected override void AddToWriteQueueImpl(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag) => throw new NotImplementedException(); + public override void ProcessVerificationMessageQueue() => throw new NotImplementedException(); + } - private class TestExtractJobStore : ExtractJobStore - { - public override ConcurrentQueue> ProcessedVerificationMessages => throw new NotImplementedException(); - - protected override void PersistMessageToStoreImpl(ExtractionRequestInfoMessage message, IMessageHeader header) { } - protected override void PersistMessageToStoreImpl(ExtractFileCollectionInfoMessage collectionInfoMessage, IMessageHeader header) => throw new NotImplementedException(); - protected override void PersistMessageToStoreImpl(ExtractedFileStatusMessage message, IMessageHeader header) { } - protected override void PersistMessageToStoreImpl(ExtractedFileVerificationMessage message, IMessageHeader header) { } - protected override List GetReadyJobsImpl(Guid specificJobId = new Guid()) => throw new NotImplementedException(); - protected override void CompleteJobImpl(Guid jobId) { } - protected override void MarkJobFailedImpl(Guid jobId, Exception e) { } - protected override CompletedExtractJobInfo GetCompletedJobInfoImpl(Guid jobId) => throw new NotImplementedException(); - protected override IEnumerable GetCompletedJobRejectionsImpl(Guid jobId) => throw new NotImplementedException(); - protected override IEnumerable GetCompletedJobAnonymisationFailuresImpl(Guid jobId) => throw new NotImplementedException(); - protected override IEnumerable GetCompletedJobVerificationFailuresImpl(Guid jobId) => throw new NotImplementedException(); - protected override IEnumerable GetCompletedJobMissingFileListImpl(Guid jobId) => ["missing"]; - protected override void AddToWriteQueueImpl(ExtractedFileVerificationMessage message, IMessageHeader header, ulong tag) => throw new NotImplementedException(); - public override void ProcessVerificationMessageQueue() => throw new NotImplementedException(); - } - - #endregion - - #region Test Methods - - [Test] - public void TestPersistMessageToStore_ExtractFileStatusMessage() - { - var testExtractJobStore = new TestExtractJobStore(); - var message = new ExtractedFileStatusMessage(); - var header = new MessageHeader(); + #endregion + + #region Test Methods + + [Test] + public void TestPersistMessageToStore_ExtractFileStatusMessage() + { + var testExtractJobStore = new TestExtractJobStore(); + var message = new ExtractedFileStatusMessage(); + var header = new MessageHeader(); - message.Status = ExtractedFileStatus.None; - Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); + message.Status = ExtractedFileStatus.None; + Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); - message.Status = ExtractedFileStatus.Anonymised; - Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); + message.Status = ExtractedFileStatus.Anonymised; + Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); - message.Status = ExtractedFileStatus.ErrorWontRetry; - testExtractJobStore.PersistMessageToStore(message, header); + message.Status = ExtractedFileStatus.ErrorWontRetry; + testExtractJobStore.PersistMessageToStore(message, header); - } + } + + [Test] + public void TestPersistMessageToStore_IsIdentifiableMessage() + { + var testExtractJobStore = new TestExtractJobStore(); + var header = new MessageHeader(); - [Test] - public void TestPersistMessageToStore_IsIdentifiableMessage() + // Must have AnonymisedFileName + var message = new ExtractedFileVerificationMessage { - var testExtractJobStore = new TestExtractJobStore(); - var header = new MessageHeader(); - - // Must have AnonymisedFileName - var message = new ExtractedFileVerificationMessage - { - OutputFilePath = "" - }; - Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); - - // Report shouldn't be an empty string or null - message = new ExtractedFileVerificationMessage - { - OutputFilePath = "anon.dcm", - Report = "" - }; - Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); - - // Report needs to contain content if marked as IsIdentifiable - message = new ExtractedFileVerificationMessage - { - OutputFilePath = "anon.dcm", - Status = VerifiedFileStatus.IsIdentifiable, - Report = "[]" - }; - Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); - // NOTE(rkm 2020-07-23) The actual report content is verified to be valid the message consumer, so don't need to re-check here - message.Report = "['foo': 'bar']"; - testExtractJobStore.PersistMessageToStore(message, header); - - // Report can be empty if not marked as IsIdentifiable - message = new ExtractedFileVerificationMessage - { - OutputFilePath = "anon.dcm", - Status = VerifiedFileStatus.NotIdentifiable, - Report = "[]" - }; - testExtractJobStore.PersistMessageToStore(message, header); - } - - [Test] - public void TestMarkJobCompleted() + OutputFilePath = "" + }; + Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); + + // Report shouldn't be an empty string or null + message = new ExtractedFileVerificationMessage { - var store = new TestExtractJobStore(); + OutputFilePath = "anon.dcm", + Report = "" + }; + Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); - Assert.Throws(() => store.MarkJobCompleted(Guid.Empty)); + // Report needs to contain content if marked as IsIdentifiable + message = new ExtractedFileVerificationMessage + { + OutputFilePath = "anon.dcm", + Status = VerifiedFileStatus.IsIdentifiable, + Report = "[]" + }; + Assert.Throws(() => testExtractJobStore.PersistMessageToStore(message, header)); + // NOTE(rkm 2020-07-23) The actual report content is verified to be valid the message consumer, so don't need to re-check here + message.Report = "['foo': 'bar']"; + testExtractJobStore.PersistMessageToStore(message, header); + + // Report can be empty if not marked as IsIdentifiable + message = new ExtractedFileVerificationMessage + { + OutputFilePath = "anon.dcm", + Status = VerifiedFileStatus.NotIdentifiable, + Report = "[]" + }; + testExtractJobStore.PersistMessageToStore(message, header); + } - store.MarkJobCompleted(Guid.NewGuid()); - } + [Test] + public void TestMarkJobCompleted() + { + var store = new TestExtractJobStore(); - [Test] - public void TestMarkJobFailed() - { - var store = new TestExtractJobStore(); + Assert.Throws(() => store.MarkJobCompleted(Guid.Empty)); - Assert.Throws(() => store.MarkJobFailed(Guid.Empty, new Exception())); + store.MarkJobCompleted(Guid.NewGuid()); + } - store.MarkJobFailed(Guid.NewGuid(), new Exception()); - } + [Test] + public void TestMarkJobFailed() + { + var store = new TestExtractJobStore(); - [Test] - public void Test_GetCompletedJobMissingFileList() - { - var store = new TestExtractJobStore(); - Assert.Throws(() => store.GetCompletedJobMissingFileList(default)); - Assert.That(store.GetCompletedJobMissingFileList(Guid.NewGuid()), Is.EqualTo(new[] { "missing" })); - } + Assert.Throws(() => store.MarkJobFailed(Guid.Empty, new Exception())); + + store.MarkJobFailed(Guid.NewGuid(), new Exception()); + } - #endregion + [Test] + public void Test_GetCompletedJobMissingFileList() + { + var store = new TestExtractJobStore(); + Assert.Throws(() => store.GetCompletedJobMissingFileList(default)); + Assert.That(store.GetCompletedJobMissingFileList(Guid.NewGuid()), Is.EqualTo(new[] { "missing" })); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractionIdentifierRejectionInfoTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractionIdentifierRejectionInfoTest.cs index b87948f67..2a83e59c0 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractionIdentifierRejectionInfoTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/ExtractionIdentifierRejectionInfoTest.cs @@ -5,61 +5,60 @@ using System.Collections.Generic; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage; + +public class ExtractionIdentifierRejectionInfoTest { - public class ExtractionIdentifierRejectionInfoTest + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + #endregion - #endregion + #region Test Methods - #region Test Methods + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests - #region Tests + [Test] + public void Constructor_ThrowsArgumentException_OnInvalidArgs() + { + // Check keyValue arg + Assert.Throws(() => { var _ = new ExtractionIdentifierRejectionInfo(" ", new Dictionary { { "bar", 1 } }); }); + + // Check rejectionItems arg + Assert.Throws(() => { var _ = new ExtractionIdentifierRejectionInfo("foo", []); }); - [Test] - public void Constructor_ThrowsArgumentException_OnInvalidArgs() + // Check empty dict key + Assert.Throws(() => { var _ = new ExtractionIdentifierRejectionInfo("foo", new Dictionary { { " ", 1 } }); }); + + // Check entries with 0 count + var exc = Assert.Throws(() => { - // Check keyValue arg - Assert.Throws(() => { var _ = new ExtractionIdentifierRejectionInfo(" ", new Dictionary { { "bar", 1 } }); }); - - // Check rejectionItems arg - Assert.Throws(() => { var _ = new ExtractionIdentifierRejectionInfo("foo", []); }); - - // Check empty dict key - Assert.Throws(() => { var _ = new ExtractionIdentifierRejectionInfo("foo", new Dictionary { { " ", 1 } }); }); - - // Check entries with 0 count - var exc = Assert.Throws(() => - { - var _ = new ExtractionIdentifierRejectionInfo( - "foo", - new Dictionary - { - { "bar", 0 }, - { "baz", 0 }, - } - ); - }); - Assert.That(exc!.Message, Is.EqualTo("Dict contains key(s) with a zero count: bar,baz")); - } - - #endregion + var _ = new ExtractionIdentifierRejectionInfo( + "foo", + new Dictionary + { + { "bar", 0 }, + { "baz", 0 }, + } + ); + }); + Assert.That(exc!.Message, Is.EqualTo("Dict contains key(s) with a zero count: bar,baz")); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/FileAnonFailureInfoTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/FileAnonFailureInfoTest.cs index 621f2ea35..d4f42e6cc 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/FileAnonFailureInfoTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/FileAnonFailureInfoTest.cs @@ -4,41 +4,40 @@ using System; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage -{ - public class FileAnonFailureInfoTest - { - #region Fixture Methods +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage; - [OneTimeSetUp] - public void OneTimeSetUp() - { - } +public class FileAnonFailureInfoTest +{ + #region Fixture Methods - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - #endregion + [OneTimeTearDown] + public void OneTimeTearDown() { } - #region Test Methods + #endregion - [SetUp] - public void SetUp() { } + #region Test Methods - [TearDown] - public void TearDown() { } + [SetUp] + public void SetUp() { } - #endregion + [TearDown] + public void TearDown() { } - #region Tests + #endregion - [TestCase(" ", "bar")] - [TestCase("foo", " ")] - public void Constructor_ThrowsArgumentException_OnInvalidArgs(string dicomFilePath, string reason) - { - Assert.Throws(() => { var _ = new FileAnonFailureInfo(dicomFilePath, reason); }); - } + #region Tests - #endregion + [TestCase(" ", "bar")] + [TestCase("foo", " ")] + public void Constructor_ThrowsArgumentException_OnInvalidArgs(string dicomFilePath, string reason) + { + Assert.Throws(() => { var _ = new FileAnonFailureInfo(dicomFilePath, reason); }); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/MongoExtractJobInfoExtensionsTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/MongoExtractJobInfoExtensionsTest.cs index 3f5315edc..04bbf0855 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/MongoExtractJobInfoExtensionsTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/MongoExtractJobInfoExtensionsTest.cs @@ -9,80 +9,79 @@ using System; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB; + +[TestFixture] +public class MongoExtractJobInfoExtensionsTest { - [TestFixture] - public class MongoExtractJobInfoExtensionsTest - { - private readonly DateTimeProvider _dateTimeProvider = new TestDateTimeProvider(); + private readonly DateTimeProvider _dateTimeProvider = new TestDateTimeProvider(); - private readonly MessageHeader _messageHeader = new() - { - Parents = [Guid.NewGuid(),], - }; + private readonly MessageHeader _messageHeader = new() + { + Parents = [Guid.NewGuid(),], + }; - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void TestToExtractJobInfo() + [Test] + public void TestToExtractJobInfo() + { + Guid guid = Guid.NewGuid(); + var message = new ExtractionRequestInfoMessage { - Guid guid = Guid.NewGuid(); - var message = new ExtractionRequestInfoMessage - { - Modality = "MR", - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "1234", - ExtractionJobIdentifier = guid, - ExtractionDirectory = "test/directory", - KeyTag = "KeyTag", - KeyValueCount = 123, - UserName = "testUser", - IsIdentifiableExtraction = true, - IsNoFilterExtraction = true, - }; - - MongoExtractJobDoc doc = MongoExtractJobDoc.FromMessage(message, _messageHeader, _dateTimeProvider); - ExtractJobInfo extractJobInfo = doc.ToExtractJobInfo(); - - var expected = new ExtractJobInfo( - guid, - _dateTimeProvider.UtcNow(), - "1234", - "test/directory", - "KeyTag", - 123, - "testUser", - "MR", - ExtractJobStatus.WaitingForCollectionInfo, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - - Assert.That(extractJobInfo, Is.EqualTo(expected)); - } - - #endregion + Modality = "MR", + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "1234", + ExtractionJobIdentifier = guid, + ExtractionDirectory = "test/directory", + KeyTag = "KeyTag", + KeyValueCount = 123, + UserName = "testUser", + IsIdentifiableExtraction = true, + IsNoFilterExtraction = true, + }; + + MongoExtractJobDoc doc = MongoExtractJobDoc.FromMessage(message, _messageHeader, _dateTimeProvider); + ExtractJobInfo extractJobInfo = doc.ToExtractJobInfo(); + + var expected = new ExtractJobInfo( + guid, + _dateTimeProvider.UtcNow(), + "1234", + "test/directory", + "KeyTag", + 123, + "testUser", + "MR", + ExtractJobStatus.WaitingForCollectionInfo, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + Assert.That(extractJobInfo, Is.EqualTo(expected)); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/MongoExtractJobStoreTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/MongoExtractJobStoreTest.cs index 82ea33320..7a1348a8e 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/MongoExtractJobStoreTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/MongoExtractJobStoreTest.cs @@ -17,846 +17,845 @@ using System.Threading; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB; + +[TestFixture] +public class MongoExtractJobStoreTest { - [TestFixture] - public class MongoExtractJobStoreTest - { - private const string ExtractionDatabaseName = "testExtraction"; + private const string ExtractionDatabaseName = "testExtraction"; - private readonly TestDateTimeProvider _dateTimeProvider = new(); + private readonly TestDateTimeProvider _dateTimeProvider = new(); - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [SetUp] - public void SetUp() - { - _extractionDatabase = new TestExtractionDatabase(); - _mockSessionHandle.Reset(); - } + [SetUp] + public void SetUp() + { + _extractionDatabase = new TestExtractionDatabase(); + _mockSessionHandle.Reset(); + } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - private static TestExtractionDatabase _extractionDatabase = new(); - private static readonly Mock _mockSessionHandle = new(); + private static TestExtractionDatabase _extractionDatabase = new(); + private static readonly Mock _mockSessionHandle = new(); - private static IMongoClient GetTestMongoClient() - { - _extractionDatabase = new TestExtractionDatabase(); - - var m = new Mock(); - m.Setup(static m => m.GetDatabase(ExtractionDatabaseName, It.IsAny())) - .Returns(_extractionDatabase); - m.Setup(static m => m.StartSession(It.IsAny(), It.IsAny())) - .Returns(_mockSessionHandle.Object); - return m.Object; - } + private static IMongoClient GetTestMongoClient() + { + _extractionDatabase = new TestExtractionDatabase(); + + var m = new Mock(); + m.Setup(static m => m.GetDatabase(ExtractionDatabaseName, It.IsAny())) + .Returns(_extractionDatabase); + m.Setup(static m => m.StartSession(It.IsAny(), It.IsAny())) + .Returns(_mockSessionHandle.Object); + return m.Object; + } - /// - /// Test mock of the extraction database - /// - private sealed class TestExtractionDatabase : StubMongoDatabase - { - public readonly MockExtractCollection InProgressCollection = new(); - public readonly MockExtractCollection CompletedJobCollection = new(); - public readonly Dictionary> ExpectedFilesCollections = []; - public readonly Dictionary> StatusCollections = []; + /// + /// Test mock of the extraction database + /// + private sealed class TestExtractionDatabase : StubMongoDatabase + { + public readonly MockExtractCollection InProgressCollection = new(); + public readonly MockExtractCollection CompletedJobCollection = new(); + public readonly Dictionary> ExpectedFilesCollections = []; + public readonly Dictionary> StatusCollections = []; - public override IMongoCollection GetCollection(string name, MongoCollectionSettings? settings = null) + public override IMongoCollection GetCollection(string name, MongoCollectionSettings? settings = null) + { + dynamic? retCollection = null; + switch (name) { - dynamic? retCollection = null; - switch (name) - { - case "inProgressJobs": - retCollection = InProgressCollection; - break; - case "completedJobs": - retCollection = CompletedJobCollection; - break; - default: + case "inProgressJobs": + retCollection = InProgressCollection; + break; + case "completedJobs": + retCollection = CompletedJobCollection; + break; + default: + { + if (name.StartsWith("expectedFiles")) + { + if (!ExpectedFilesCollections.ContainsKey(name)) + ExpectedFilesCollections[name] = new MockExtractCollection(); + retCollection = ExpectedFilesCollections[name]; + } + else if (name.StartsWith("statuses")) { - if (name.StartsWith("expectedFiles")) - { - if (!ExpectedFilesCollections.ContainsKey(name)) - ExpectedFilesCollections[name] = new MockExtractCollection(); - retCollection = ExpectedFilesCollections[name]; - } - else if (name.StartsWith("statuses")) - { - if (!StatusCollections.ContainsKey(name)) - StatusCollections[name] = new MockExtractCollection(); - retCollection = StatusCollections[name]; - } - - break; + if (!StatusCollections.ContainsKey(name)) + StatusCollections[name] = new MockExtractCollection(); + retCollection = StatusCollections[name]; } - } - return retCollection != null - ? (IMongoCollection)retCollection - : throw new ArgumentException($"No implementation for {typeof(TDocument)} with name {name}"); + break; + } } - public override void DropCollection(string name, CancellationToken cancellationToken = new CancellationToken()) - { - ExpectedFilesCollections.Remove(name); - StatusCollections.Remove(name); - } + return retCollection != null + ? (IMongoCollection)retCollection + : throw new ArgumentException($"No implementation for {typeof(TDocument)} with name {name}"); } - /// - /// Mock of a collection in the extraction database. Can be keyed by string or Guid. - /// - /// - /// - private sealed class MockExtractCollection : StubMongoCollection where TKey : struct + public override void DropCollection(string name, CancellationToken cancellationToken = new CancellationToken()) { - public readonly Dictionary Documents = []; - - public bool RejectChanges { get; set; } - - public override long CountDocuments(FilterDefinition filter, CountOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => Documents.Count; + ExpectedFilesCollections.Remove(name); + StatusCollections.Remove(name); + } + } - public override IAsyncCursor FindSync(FilterDefinition filter, FindOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) - { - var mockCursor = new Mock>(); - mockCursor - .SetupSequence(_ => _.MoveNext(It.IsAny())) - .Returns(true) - .Returns(false); + /// + /// Mock of a collection in the extraction database. Can be keyed by string or Guid. + /// + /// + /// + private sealed class MockExtractCollection : StubMongoCollection where TKey : struct + { + public readonly Dictionary Documents = []; - if (filter == FilterDefinition.Empty) - { -#pragma warning disable IDE0305 // Simplify collection initialization - mockCursor - .Setup(x => x.Current) - .Returns((IEnumerable)Documents.Values.ToList()); -#pragma warning restore IDE0305 // Simplify collection initialization - return mockCursor.Object; - } + public bool RejectChanges { get; set; } - var rendered = filter.Render(new RenderArgs(BsonSerializer.SerializerRegistry.GetSerializer(), BsonSerializer.SerializerRegistry)); - var key = GetKey(rendered["_id"]); + public override long CountDocuments(FilterDefinition filter, CountOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) => Documents.Count; - if (Documents.TryGetValue(key, out var value)) - { -#pragma warning disable IDE0028 // Simplify collection initialization - mockCursor - .Setup(static x => x.Current) - .Returns((IEnumerable)new List { Documents[key] }); -#pragma warning restore IDE0028 // Simplify collection initialization - return mockCursor.Object; - } + public override IAsyncCursor FindSync(FilterDefinition filter, FindOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) + { + var mockCursor = new Mock>(); + mockCursor + .SetupSequence(_ => _.MoveNext(It.IsAny())) + .Returns(true) + .Returns(false); - mockCursor.Reset(); + if (filter == FilterDefinition.Empty) + { +#pragma warning disable IDE0305 // Simplify collection initialization mockCursor - .Setup(_ => _.MoveNext(It.IsAny())) - .Returns(false); + .Setup(x => x.Current) + .Returns((IEnumerable)Documents.Values.ToList()); +#pragma warning restore IDE0305 // Simplify collection initialization return mockCursor.Object; } - public override void InsertOne(TVal document, InsertOneOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) - { - if (RejectChanges) - throw new Exception("Rejecting changes"); - - BsonDocument bsonDoc = document.ToBsonDocument(); - if (!bsonDoc.Contains("_id")) - bsonDoc.Add("_id", Guid.NewGuid().ToString()); - if (!Documents.TryAdd(GetKey(bsonDoc["_id"].ToString()!), document)) - throw new Exception("Document already exists"); - } - - public override void InsertMany(IEnumerable documents, InsertManyOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) - { - foreach (TVal doc in documents) - InsertOne(doc, null, cancellationToken); - } - - public override ReplaceOneResult ReplaceOne(FilterDefinition filter, TVal replacement, - ReplaceOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) - { - if (RejectChanges) - return ReplaceOneResult.Unacknowledged.Instance; - - BsonDocument bsonDoc = replacement.ToBsonDocument(); - TKey key = GetKey(bsonDoc["_id"].ToString()!); - if (!Documents.ContainsKey(key)) - return ReplaceOneResult.Unacknowledged.Instance; + var rendered = filter.Render(new RenderArgs(BsonSerializer.SerializerRegistry.GetSerializer(), BsonSerializer.SerializerRegistry)); + var key = GetKey(rendered["_id"]); - Documents[key] = replacement; - return new ReplaceOneResult.Acknowledged(1, 1, 0); - } - - public override DeleteResult DeleteOne(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) + if (Documents.TryGetValue(key, out var value)) { - if (RejectChanges) - return DeleteResult.Unacknowledged.Instance; - - var filterDoc = filter.Render(new RenderArgs(BsonSerializer.SerializerRegistry.GetSerializer(), BsonSerializer.SerializerRegistry)); - if (!filterDoc.Contains("_id") || filterDoc.Count() > 1) - throw new NotImplementedException("No support for deleting multiple docs"); - - return Documents.Remove(GetKey(filterDoc["_id"].ToString()!)) - ? new DeleteResult.Acknowledged(1) - : DeleteResult.Unacknowledged.Instance; +#pragma warning disable IDE0028 // Simplify collection initialization + mockCursor + .Setup(static x => x.Current) + .Returns((IEnumerable)new List { Documents[key] }); +#pragma warning restore IDE0028 // Simplify collection initialization + return mockCursor.Object; } - private static TKey GetKey(dynamic key) - { - if (typeof(TKey) == typeof(string)) - return key; - if (typeof(TKey) == typeof(Guid)) - // Dynamic typing is fun! - return (TKey)Convert.ChangeType(Guid.Parse(((BsonString)key).Value), typeof(TKey)); - throw new Exception($"Unsupported key type {typeof(TKey)}"); - } + mockCursor.Reset(); + mockCursor + .Setup(_ => _.MoveNext(It.IsAny())) + .Returns(false); + return mockCursor.Object; } - #endregion - - #region Tests - - [Test] - public void TestPersistMessageToStoreImpl_ExtractionRequestInfoMessage() + public override void InsertOne(TVal document, InsertOneOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) { - Guid guid = Guid.NewGuid(); - var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage - { - ExtractionJobIdentifier = guid, - ProjectNumber = "1234-5678", - ExtractionDirectory = "1234-5678/testExtract", - JobSubmittedAt = _dateTimeProvider.UtcNow(), - KeyTag = "StudyInstanceUID", - KeyValueCount = 1, - UserName = "testUser", - Modality = "CT", - IsIdentifiableExtraction = true, - IsNoFilterExtraction = true, - }; - var testHeader = new MessageHeader - { - MessageGuid = Guid.NewGuid(), - OriginalPublishTimestamp = MessageHeader.UnixTime(_dateTimeProvider.UtcNow()), - Parents = [Guid.NewGuid(),], - ProducerExecutableName = "MongoExtractStoreTests", - ProducerProcessID = 1234, - }; - - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - - store.PersistMessageToStore(testExtractionRequestInfoMessage, testHeader); - - Dictionary docs = _extractionDatabase.InProgressCollection.Documents; - Assert.That(docs, Has.Count.EqualTo(1)); - MongoExtractJobDoc extractJob = docs.Values.ToList()[0]; - - var expected = new MongoExtractJobDoc( - guid, - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, testHeader, _dateTimeProvider), - "1234-5678", - ExtractJobStatus.WaitingForCollectionInfo, - "1234-5678/testExtract", - _dateTimeProvider.UtcNow(), - "StudyInstanceUID", - 1, - "testUser", - "CT", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - - Assert.That(extractJob, Is.EqualTo(expected)); + if (RejectChanges) + throw new Exception("Rejecting changes"); + + BsonDocument bsonDoc = document.ToBsonDocument(); + if (!bsonDoc.Contains("_id")) + bsonDoc.Add("_id", Guid.NewGuid().ToString()); + if (!Documents.TryAdd(GetKey(bsonDoc["_id"].ToString()!), document)) + throw new Exception("Document already exists"); } - [Test] - public void PersistMessageToStoreImpl_ExtractionRequestInfoMessage_CompletedJob() + public override void InsertMany(IEnumerable documents, InsertManyOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) { - // Arrange - - var jobId = Guid.NewGuid(); - var job = new MongoExtractJobDoc( - jobId, - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "1234", - ExtractJobStatus.Failed, - "test/dir", - _dateTimeProvider.UtcNow(), - "SeriesInstanceUID", - 1, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - - var client = GetTestMongoClient(); - _extractionDatabase.CompletedJobCollection.InsertOne(new MongoCompletedExtractJobDoc(job, DateTime.Now)); - - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - - var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage - { - ExtractionJobIdentifier = jobId, - ProjectNumber = "1234-5678", - ExtractionDirectory = "1234-5678/testExtract", - JobSubmittedAt = _dateTimeProvider.UtcNow(), - KeyTag = "StudyInstanceUID", - KeyValueCount = 1, - UserName = "testUser", - Modality = "CT", - IsIdentifiableExtraction = true, - IsNoFilterExtraction = true, - }; - - // Act - - void call() => store.PersistMessageToStore(testExtractionRequestInfoMessage, new MessageHeader()); - - // Assert - - var exc = Assert.Throws(() => call()); - Assert.That(exc?.Message, Is.EqualTo("Received an ExtractionRequestInfoMessage for a job that is already completed")); + foreach (TVal doc in documents) + InsertOne(doc, null, cancellationToken); } - [Test] - public void TestPersistMessageToStoreImpl_ExtractFileCollectionInfoMessage() + public override ReplaceOneResult ReplaceOne(FilterDefinition filter, TVal replacement, + ReplaceOptions? options = null, CancellationToken cancellationToken = new CancellationToken()) { - Guid jobId = Guid.NewGuid(); - var header1 = new MessageHeader(); - var header2 = new MessageHeader(); - var testExtractFileCollectionInfoMessage = new ExtractFileCollectionInfoMessage - { - ExtractionJobIdentifier = jobId, - ProjectNumber = "1234-5678", - RejectionReasons = new Dictionary - { - {"reject1", 1 }, - {"reject2", 2 }, - }, - JobSubmittedAt = DateTime.UtcNow, - ExtractionDirectory = "1234-5678/testExtract", - ExtractFileMessagesDispatched = new JsonCompatibleDictionary - { - { header1, "file1" }, - { header2, "file2" } - }, - KeyValue = "series-1", - }; - var header = new MessageHeader - { - MessageGuid = Guid.NewGuid(), - OriginalPublishTimestamp = MessageHeader.UnixTimeNow(), - Parents = [Guid.NewGuid(),], - ProducerExecutableName = "MongoExtractStoreTests", - ProducerProcessID = 1234, - }; + if (RejectChanges) + return ReplaceOneResult.Unacknowledged.Instance; - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + BsonDocument bsonDoc = replacement.ToBsonDocument(); + TKey key = GetKey(bsonDoc["_id"].ToString()!); + if (!Documents.ContainsKey(key)) + return ReplaceOneResult.Unacknowledged.Instance; - store.PersistMessageToStore(testExtractFileCollectionInfoMessage, header); + Documents[key] = replacement; + return new ReplaceOneResult.Acknowledged(1, 1, 0); + } - Dictionary docs = _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].Documents; - Assert.That(docs, Has.Count.EqualTo(1)); - MongoExpectedFilesDoc extractJob = docs.Values.ToList()[0]; + public override DeleteResult DeleteOne(FilterDefinition filter, CancellationToken cancellationToken = new CancellationToken()) + { + if (RejectChanges) + return DeleteResult.Unacknowledged.Instance; - var expected = new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, header, _dateTimeProvider), - "series-1", - [ - new MongoExpectedFileInfoDoc(header1.MessageGuid, "file1"), - new MongoExpectedFileInfoDoc(header2.MessageGuid, "file2"), - ], - new MongoRejectedKeyInfoDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, header, _dateTimeProvider), - new Dictionary - { - {"reject1", 1 }, - {"reject2", 2 }, - }) - ); + var filterDoc = filter.Render(new RenderArgs(BsonSerializer.SerializerRegistry.GetSerializer(), BsonSerializer.SerializerRegistry)); + if (!filterDoc.Contains("_id") || filterDoc.Count() > 1) + throw new NotImplementedException("No support for deleting multiple docs"); - Assert.That(extractJob, Is.EqualTo(expected)); + return Documents.Remove(GetKey(filterDoc["_id"].ToString()!)) + ? new DeleteResult.Acknowledged(1) + : DeleteResult.Unacknowledged.Instance; } - [Test] - public void TestPersistMessageToStoreImpl_ExtractFileCollectionInfoMessage_NoIdentifiers() + private static TKey GetKey(dynamic key) { - Guid jobId = Guid.NewGuid(); - var testExtractFileCollectionInfoMessage = new ExtractFileCollectionInfoMessage - { - ExtractionJobIdentifier = jobId, - ProjectNumber = "1234-5678", - RejectionReasons = new Dictionary - { - {"ImageType is not ORIGINAL", 1 }, - }, - JobSubmittedAt = DateTime.UtcNow, - ExtractionDirectory = "1234-5678/testExtract", - ExtractFileMessagesDispatched = [], // No files were extractable for this key - KeyValue = "series-1", - }; - var header = new MessageHeader - { - MessageGuid = Guid.NewGuid(), - OriginalPublishTimestamp = MessageHeader.UnixTimeNow(), - Parents = [Guid.NewGuid(),], - ProducerExecutableName = "MongoExtractStoreTests", - ProducerProcessID = 1234, - }; + if (typeof(TKey) == typeof(string)) + return key; + if (typeof(TKey) == typeof(Guid)) + // Dynamic typing is fun! + return (TKey)Convert.ChangeType(Guid.Parse(((BsonString)key).Value), typeof(TKey)); + throw new Exception($"Unsupported key type {typeof(TKey)}"); + } + } - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + #endregion - Assert.DoesNotThrow(() => store.PersistMessageToStore(testExtractFileCollectionInfoMessage, header)); - } + #region Tests + + [Test] + public void TestPersistMessageToStoreImpl_ExtractionRequestInfoMessage() + { + Guid guid = Guid.NewGuid(); + var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage + { + ExtractionJobIdentifier = guid, + ProjectNumber = "1234-5678", + ExtractionDirectory = "1234-5678/testExtract", + JobSubmittedAt = _dateTimeProvider.UtcNow(), + KeyTag = "StudyInstanceUID", + KeyValueCount = 1, + UserName = "testUser", + Modality = "CT", + IsIdentifiableExtraction = true, + IsNoFilterExtraction = true, + }; + var testHeader = new MessageHeader + { + MessageGuid = Guid.NewGuid(), + OriginalPublishTimestamp = MessageHeader.UnixTime(_dateTimeProvider.UtcNow()), + Parents = [Guid.NewGuid(),], + ProducerExecutableName = "MongoExtractStoreTests", + ProducerProcessID = 1234, + }; + + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + + store.PersistMessageToStore(testExtractionRequestInfoMessage, testHeader); + + Dictionary docs = _extractionDatabase.InProgressCollection.Documents; + Assert.That(docs, Has.Count.EqualTo(1)); + MongoExtractJobDoc extractJob = docs.Values.ToList()[0]; + + var expected = new MongoExtractJobDoc( + guid, + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, testHeader, _dateTimeProvider), + "1234-5678", + ExtractJobStatus.WaitingForCollectionInfo, + "1234-5678/testExtract", + _dateTimeProvider.UtcNow(), + "StudyInstanceUID", + 1, + "testUser", + "CT", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + + Assert.That(extractJob, Is.EqualTo(expected)); + } - [Test] - public void TestPersistMessageToStoreImpl_ExtractFileStatusMessage() + [Test] + public void PersistMessageToStoreImpl_ExtractionRequestInfoMessage_CompletedJob() + { + // Arrange + + var jobId = Guid.NewGuid(); + var job = new MongoExtractJobDoc( + jobId, + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "1234", + ExtractJobStatus.Failed, + "test/dir", + _dateTimeProvider.UtcNow(), + "SeriesInstanceUID", + 1, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + + var client = GetTestMongoClient(); + _extractionDatabase.CompletedJobCollection.InsertOne(new MongoCompletedExtractJobDoc(job, DateTime.Now)); + + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + + var testExtractionRequestInfoMessage = new ExtractionRequestInfoMessage { - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + ExtractionJobIdentifier = jobId, + ProjectNumber = "1234-5678", + ExtractionDirectory = "1234-5678/testExtract", + JobSubmittedAt = _dateTimeProvider.UtcNow(), + KeyTag = "StudyInstanceUID", + KeyValueCount = 1, + UserName = "testUser", + Modality = "CT", + IsIdentifiableExtraction = true, + IsNoFilterExtraction = true, + }; + + // Act + + void call() => store.PersistMessageToStore(testExtractionRequestInfoMessage, new MessageHeader()); + + // Assert + + var exc = Assert.Throws(() => call()); + Assert.That(exc?.Message, Is.EqualTo("Received an ExtractionRequestInfoMessage for a job that is already completed")); + } - Guid jobId = Guid.NewGuid(); - var testExtractFileStatusMessage = new ExtractedFileStatusMessage + [Test] + public void TestPersistMessageToStoreImpl_ExtractFileCollectionInfoMessage() + { + Guid jobId = Guid.NewGuid(); + var header1 = new MessageHeader(); + var header2 = new MessageHeader(); + var testExtractFileCollectionInfoMessage = new ExtractFileCollectionInfoMessage + { + ExtractionJobIdentifier = jobId, + ProjectNumber = "1234-5678", + RejectionReasons = new Dictionary { - OutputFilePath = "anon.dcm", - JobSubmittedAt = _dateTimeProvider.UtcNow(), - Status = ExtractedFileStatus.ErrorWontRetry, - ProjectNumber = "1234", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = "1234/test", - StatusMessage = "Could not anonymise", - DicomFilePath = "original.dcm", - }; - var header = new MessageHeader(); - - store.PersistMessageToStore(testExtractFileStatusMessage, header); - - Dictionary docs = _extractionDatabase.StatusCollections[$"statuses_{jobId}"].Documents; - Assert.That(docs, Has.Count.EqualTo(1)); - MongoFileStatusDoc statusDoc = docs.Values.ToList()[0]; - - var expected = new MongoFileStatusDoc( + {"reject1", 1 }, + {"reject2", 2 }, + }, + JobSubmittedAt = DateTime.UtcNow, + ExtractionDirectory = "1234-5678/testExtract", + ExtractFileMessagesDispatched = new JsonCompatibleDictionary + { + { header1, "file1" }, + { header2, "file2" } + }, + KeyValue = "series-1", + }; + var header = new MessageHeader + { + MessageGuid = Guid.NewGuid(), + OriginalPublishTimestamp = MessageHeader.UnixTimeNow(), + Parents = [Guid.NewGuid(),], + ProducerExecutableName = "MongoExtractStoreTests", + ProducerProcessID = 1234, + }; + + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + + store.PersistMessageToStore(testExtractFileCollectionInfoMessage, header); + + Dictionary docs = _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].Documents; + Assert.That(docs, Has.Count.EqualTo(1)); + MongoExpectedFilesDoc extractJob = docs.Values.ToList()[0]; + + var expected = new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, header, _dateTimeProvider), + "series-1", + [ + new MongoExpectedFileInfoDoc(header1.MessageGuid, "file1"), + new MongoExpectedFileInfoDoc(header2.MessageGuid, "file2"), + ], + new MongoRejectedKeyInfoDoc( MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, header, _dateTimeProvider), - "original.dcm", - "anon.dcm", - ExtractedFileStatus.ErrorWontRetry, - VerifiedFileStatus.NotVerified, - "Could not anonymise"); + new Dictionary + { + {"reject1", 1 }, + {"reject2", 2 }, + }) + ); - Assert.That(statusDoc, Is.EqualTo(expected)); - } + Assert.That(extractJob, Is.EqualTo(expected)); + } - [Test] - public void PersistMessageToStoreImpl_ExtractedFileVerificationMessage_CompletedJob() + [Test] + public void TestPersistMessageToStoreImpl_ExtractFileCollectionInfoMessage_NoIdentifiers() + { + Guid jobId = Guid.NewGuid(); + var testExtractFileCollectionInfoMessage = new ExtractFileCollectionInfoMessage { - // Arrange - - var jobId = Guid.NewGuid(); - var job = new MongoExtractJobDoc( - jobId, - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "1234", - ExtractJobStatus.Failed, - "test/dir", - _dateTimeProvider.UtcNow(), - "SeriesInstanceUID", - 1, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - - var client = GetTestMongoClient(); - _extractionDatabase.CompletedJobCollection.InsertOne(new MongoCompletedExtractJobDoc(job, DateTime.Now)); - - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - - var extractedFileStatusMessage = new ExtractedFileVerificationMessage() + ExtractionJobIdentifier = jobId, + ProjectNumber = "1234-5678", + RejectionReasons = new Dictionary { - ExtractionJobIdentifier = jobId, - OutputFilePath = "foo-an.dcm", - Report = "[]", - }; - - // Act + {"ImageType is not ORIGINAL", 1 }, + }, + JobSubmittedAt = DateTime.UtcNow, + ExtractionDirectory = "1234-5678/testExtract", + ExtractFileMessagesDispatched = [], // No files were extractable for this key + KeyValue = "series-1", + }; + var header = new MessageHeader + { + MessageGuid = Guid.NewGuid(), + OriginalPublishTimestamp = MessageHeader.UnixTimeNow(), + Parents = [Guid.NewGuid(),], + ProducerExecutableName = "MongoExtractStoreTests", + ProducerProcessID = 1234, + }; - void call() => store.PersistMessageToStore(extractedFileStatusMessage, new MessageHeader()); + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - // Assert + Assert.DoesNotThrow(() => store.PersistMessageToStore(testExtractFileCollectionInfoMessage, header)); + } - var exc = Assert.Throws(() => call()); - Assert.That(exc?.Message, Is.EqualTo($"Received an {nameof(ExtractedFileVerificationMessage)} for a job that is already completed")); - } + [Test] + public void TestPersistMessageToStoreImpl_ExtractFileStatusMessage() + { + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + Guid jobId = Guid.NewGuid(); + var testExtractFileStatusMessage = new ExtractedFileStatusMessage + { + OutputFilePath = "anon.dcm", + JobSubmittedAt = _dateTimeProvider.UtcNow(), + Status = ExtractedFileStatus.ErrorWontRetry, + ProjectNumber = "1234", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = "1234/test", + StatusMessage = "Could not anonymise", + DicomFilePath = "original.dcm", + }; + var header = new MessageHeader(); + + store.PersistMessageToStore(testExtractFileStatusMessage, header); + + Dictionary docs = _extractionDatabase.StatusCollections[$"statuses_{jobId}"].Documents; + Assert.That(docs, Has.Count.EqualTo(1)); + MongoFileStatusDoc statusDoc = docs.Values.ToList()[0]; + + var expected = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, header, _dateTimeProvider), + "original.dcm", + "anon.dcm", + ExtractedFileStatus.ErrorWontRetry, + VerifiedFileStatus.NotVerified, + "Could not anonymise"); + + Assert.That(statusDoc, Is.EqualTo(expected)); + } - [Test] - public void TestPersistMessageToStoreImpl_IsIdentifiableMessage() + [Test] + public void PersistMessageToStoreImpl_ExtractedFileVerificationMessage_CompletedJob() + { + // Arrange + + var jobId = Guid.NewGuid(); + var job = new MongoExtractJobDoc( + jobId, + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "1234", + ExtractJobStatus.Failed, + "test/dir", + _dateTimeProvider.UtcNow(), + "SeriesInstanceUID", + 1, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + + var client = GetTestMongoClient(); + _extractionDatabase.CompletedJobCollection.InsertOne(new MongoCompletedExtractJobDoc(job, DateTime.Now)); + + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + + var extractedFileStatusMessage = new ExtractedFileVerificationMessage() { - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + ExtractionJobIdentifier = jobId, + OutputFilePath = "foo-an.dcm", + Report = "[]", + }; - Guid jobId = Guid.NewGuid(); - var testIsIdentifiableMessage = new ExtractedFileVerificationMessage - { - OutputFilePath = "anon.dcm", - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "1234", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = "1234/test", - DicomFilePath = "original.dcm", - Status = VerifiedFileStatus.NotIdentifiable, - Report = "[]", // NOTE(rkm 2020-03-10) An "empty" report from IsIdentifiable - }; - var header = new MessageHeader(); - - store.PersistMessageToStore(testIsIdentifiableMessage, header); - - Dictionary docs = _extractionDatabase.StatusCollections[$"statuses_{jobId}"].Documents; - Assert.That(docs, Has.Count.EqualTo(1)); - MongoFileStatusDoc statusDoc = docs.Values.ToList()[0]; - - var expected = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, header, _dateTimeProvider), - "original.dcm", - "anon.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "[]"); + // Act - Assert.That(statusDoc, Is.EqualTo(expected)); - } + void call() => store.PersistMessageToStore(extractedFileStatusMessage, new MessageHeader()); - [Test] - public void TestGetReadJobsImpl() - { - var jobId = Guid.NewGuid(); - var testJob = new MongoExtractJobDoc( - jobId, - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "1234", - ExtractJobStatus.Failed, - "test/dir", - _dateTimeProvider.UtcNow(), - "SeriesInstanceUID", - 1, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - var testMongoExpectedFilesDoc = new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "1.2.3.4", - [ - new MongoExpectedFileInfoDoc(Guid.NewGuid(), "anon1.dcm"), - ], - new MongoRejectedKeyInfoDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - []) - ); - var testMongoFileStatusDoc = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "input.dcm", - "anon1.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "Verified"); - - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - - // Assert that jobs marked as failed are not returned - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _extractionDatabase.InProgressCollection.RejectChanges = true; - Assert.That(store.GetReadyJobs(), Is.Empty); + // Assert - // Assert that an in progress job is not returned - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _extractionDatabase.InProgressCollection.RejectChanges = true; - Assert.That(store.GetReadyJobs(), Is.Empty); + var exc = Assert.Throws(() => call()); + Assert.That(exc?.Message, Is.EqualTo($"Received an {nameof(ExtractedFileVerificationMessage)} for a job that is already completed")); + } - // Check we handle a bad ReplaceOneResult - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _extractionDatabase.InProgressCollection.RejectChanges = true; - _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"] = new MockExtractCollection(); - _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].InsertOne(testMongoExpectedFilesDoc); - Assert.Throws(() => store.GetReadyJobs()); - - // Check happy path - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"] = new MockExtractCollection(); - _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].InsertOne(testMongoExpectedFilesDoc); - Assert.Multiple(() => - { - Assert.That(store.GetReadyJobs(), Is.Empty); - Assert.That(_extractionDatabase.InProgressCollection.Documents.Single().Value.JobStatus, Is.EqualTo(ExtractJobStatus.WaitingForStatuses)); - }); - _extractionDatabase.StatusCollections[$"statuses_{jobId}"] = new MockExtractCollection(); - _extractionDatabase.StatusCollections[$"statuses_{jobId}"].InsertOne(testMongoFileStatusDoc); - ExtractJobInfo job = store.GetReadyJobs().Single(); - Assert.Multiple(() => - { - Assert.That(job.JobStatus, Is.EqualTo(ExtractJobStatus.ReadyForChecks)); - Assert.That(_extractionDatabase.InProgressCollection.Documents.Single().Value.JobStatus, Is.EqualTo(ExtractJobStatus.ReadyForChecks)); - }); - } - [Test] - public void TestCompleteJobImpl() + [Test] + public void TestPersistMessageToStoreImpl_IsIdentifiableMessage() + { + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + + Guid jobId = Guid.NewGuid(); + var testIsIdentifiableMessage = new ExtractedFileVerificationMessage { - Guid jobId = Guid.NewGuid(); - var testJob = new MongoExtractJobDoc( - jobId, - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "1234", - ExtractJobStatus.Failed, - "test/dir", - _dateTimeProvider.UtcNow(), - "SeriesInstanceUID", - 1, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - var testMongoExpectedFilesDoc = new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "1.2.3.4", - [ - new MongoExpectedFileInfoDoc(Guid.NewGuid(), "anon1.dcm"), - ], - new MongoRejectedKeyInfoDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - []) - ); - var testMongoFileStatusDoc = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "input.dcm", - "anon1.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "Verified"); - - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - - // Assert that an exception is thrown for a non-existent job - Assert.Throws(() => store.MarkJobCompleted(Guid.NewGuid())); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Assert that an exception is thrown for a job which is marked as failed - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _mockSessionHandle.Reset(); - Assert.Throws(() => store.MarkJobCompleted(Guid.NewGuid())); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Check that we handle a failed insertion - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - testJob.JobStatus = ExtractJobStatus.Completed; - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _extractionDatabase.CompletedJobCollection.RejectChanges = true; - _mockSessionHandle.Reset(); - Assert.Throws(() => store.MarkJobCompleted(jobId)); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Check we handle a bad DeleteResult - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - _extractionDatabase.InProgressCollection.RejectChanges = true; - _mockSessionHandle.Reset(); - Assert.Throws(() => store.MarkJobCompleted(jobId)); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Check we handle missing expectedFiles collection - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _mockSessionHandle.Reset(); - Assert.Throws(() => store.MarkJobCompleted(jobId)); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Check we handle missing statuses collection - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"] = new MockExtractCollection(); - _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].InsertOne(testMongoExpectedFilesDoc); - _mockSessionHandle.Reset(); - Assert.Throws(() => store.MarkJobCompleted(jobId)); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Check happy path - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"] = new MockExtractCollection(); - _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].InsertOne(testMongoExpectedFilesDoc); - _extractionDatabase.StatusCollections[$"statuses_{jobId}"] = new MockExtractCollection(); - _extractionDatabase.StatusCollections[$"statuses_{jobId}"].InsertOne(testMongoFileStatusDoc); - _mockSessionHandle.Reset(); - store.MarkJobCompleted(jobId); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Never); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Once); - Assert.Multiple(() => - { - Assert.That(_extractionDatabase.ExpectedFilesCollections, Has.Count.EqualTo(1)); - Assert.That(_extractionDatabase.StatusCollections, Has.Count.EqualTo(1)); - }); - } + OutputFilePath = "anon.dcm", + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "1234", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = "1234/test", + DicomFilePath = "original.dcm", + Status = VerifiedFileStatus.NotIdentifiable, + Report = "[]", // NOTE(rkm 2020-03-10) An "empty" report from IsIdentifiable + }; + var header = new MessageHeader(); + + store.PersistMessageToStore(testIsIdentifiableMessage, header); + + Dictionary docs = _extractionDatabase.StatusCollections[$"statuses_{jobId}"].Documents; + Assert.That(docs, Has.Count.EqualTo(1)); + MongoFileStatusDoc statusDoc = docs.Values.ToList()[0]; + + var expected = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, header, _dateTimeProvider), + "original.dcm", + "anon.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "[]"); + + Assert.That(statusDoc, Is.EqualTo(expected)); + } - [Test] - public void TestMarkJobFailedImpl() + [Test] + public void TestGetReadJobsImpl() + { + var jobId = Guid.NewGuid(); + var testJob = new MongoExtractJobDoc( + jobId, + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "1234", + ExtractJobStatus.Failed, + "test/dir", + _dateTimeProvider.UtcNow(), + "SeriesInstanceUID", + 1, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + var testMongoExpectedFilesDoc = new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "1.2.3.4", + [ + new MongoExpectedFileInfoDoc(Guid.NewGuid(), "anon1.dcm"), + ], + new MongoRejectedKeyInfoDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + []) + ); + var testMongoFileStatusDoc = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "input.dcm", + "anon1.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "Verified"); + + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + + // Assert that jobs marked as failed are not returned + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _extractionDatabase.InProgressCollection.RejectChanges = true; + Assert.That(store.GetReadyJobs(), Is.Empty); + + // Assert that an in progress job is not returned + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _extractionDatabase.InProgressCollection.RejectChanges = true; + Assert.That(store.GetReadyJobs(), Is.Empty); + + // Check we handle a bad ReplaceOneResult + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _extractionDatabase.InProgressCollection.RejectChanges = true; + _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"] = new MockExtractCollection(); + _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].InsertOne(testMongoExpectedFilesDoc); + Assert.Throws(() => store.GetReadyJobs()); + + // Check happy path + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"] = new MockExtractCollection(); + _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].InsertOne(testMongoExpectedFilesDoc); + Assert.Multiple(() => { - Guid jobId = Guid.NewGuid(); - var testJob = new MongoExtractJobDoc( - jobId, + Assert.That(store.GetReadyJobs(), Is.Empty); + Assert.That(_extractionDatabase.InProgressCollection.Documents.Single().Value.JobStatus, Is.EqualTo(ExtractJobStatus.WaitingForStatuses)); + }); + _extractionDatabase.StatusCollections[$"statuses_{jobId}"] = new MockExtractCollection(); + _extractionDatabase.StatusCollections[$"statuses_{jobId}"].InsertOne(testMongoFileStatusDoc); + ExtractJobInfo job = store.GetReadyJobs().Single(); + Assert.Multiple(() => + { + Assert.That(job.JobStatus, Is.EqualTo(ExtractJobStatus.ReadyForChecks)); + Assert.That(_extractionDatabase.InProgressCollection.Documents.Single().Value.JobStatus, Is.EqualTo(ExtractJobStatus.ReadyForChecks)); + }); + } + + [Test] + public void TestCompleteJobImpl() + { + Guid jobId = Guid.NewGuid(); + var testJob = new MongoExtractJobDoc( + jobId, + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "1234", + ExtractJobStatus.Failed, + "test/dir", + _dateTimeProvider.UtcNow(), + "SeriesInstanceUID", + 1, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + var testMongoExpectedFilesDoc = new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "1.2.3.4", + [ + new MongoExpectedFileInfoDoc(Guid.NewGuid(), "anon1.dcm"), + ], + new MongoRejectedKeyInfoDoc( MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), - "1234", - ExtractJobStatus.Failed, - "test/dir", - _dateTimeProvider.UtcNow(), - "1.2.3.4", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - - // Assert that an exception is thrown for a non-existent job - Assert.Throws(() => store.MarkJobFailed(Guid.NewGuid(), new Exception())); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Assert that a job can't be failed twice - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _mockSessionHandle.Reset(); - Assert.Throws(() => store.MarkJobFailed(jobId, new Exception())); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Check we handle a bad ReplaceOneResult - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _extractionDatabase.InProgressCollection.RejectChanges = true; - _mockSessionHandle.Reset(); - Assert.Throws(() => store.MarkJobFailed(jobId, new Exception())); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); - - // Check happy path - client = GetTestMongoClient(); - store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; - testJob.FailedJobInfoDoc = null; - _extractionDatabase.InProgressCollection.InsertOne(testJob); - _mockSessionHandle.Reset(); - store.MarkJobFailed(jobId, new Exception("TestMarkJobFailedImpl")); - _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Never); - _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Once); - Dictionary docs = _extractionDatabase.InProgressCollection.Documents; - Assert.That(docs, Has.Count.EqualTo(1)); - MongoExtractJobDoc failedDoc = docs[jobId]; - Assert.Multiple(() => - { - Assert.That(failedDoc.JobStatus, Is.EqualTo(ExtractJobStatus.Failed)); - Assert.That(failedDoc.FailedJobInfoDoc, Is.Not.Null); - }); - Assert.That(failedDoc.FailedJobInfoDoc!.ExceptionMessage, Is.EqualTo("TestMarkJobFailedImpl")); - } + []) + ); + var testMongoFileStatusDoc = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "input.dcm", + "anon1.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "Verified"); + + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + + // Assert that an exception is thrown for a non-existent job + Assert.Throws(() => store.MarkJobCompleted(Guid.NewGuid())); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Assert that an exception is thrown for a job which is marked as failed + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _mockSessionHandle.Reset(); + Assert.Throws(() => store.MarkJobCompleted(Guid.NewGuid())); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Check that we handle a failed insertion + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + testJob.JobStatus = ExtractJobStatus.Completed; + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _extractionDatabase.CompletedJobCollection.RejectChanges = true; + _mockSessionHandle.Reset(); + Assert.Throws(() => store.MarkJobCompleted(jobId)); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Check we handle a bad DeleteResult + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + _extractionDatabase.InProgressCollection.RejectChanges = true; + _mockSessionHandle.Reset(); + Assert.Throws(() => store.MarkJobCompleted(jobId)); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Check we handle missing expectedFiles collection + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _mockSessionHandle.Reset(); + Assert.Throws(() => store.MarkJobCompleted(jobId)); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Check we handle missing statuses collection + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"] = new MockExtractCollection(); + _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].InsertOne(testMongoExpectedFilesDoc); + _mockSessionHandle.Reset(); + Assert.Throws(() => store.MarkJobCompleted(jobId)); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Check happy path + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"] = new MockExtractCollection(); + _extractionDatabase.ExpectedFilesCollections[$"expectedFiles_{jobId}"].InsertOne(testMongoExpectedFilesDoc); + _extractionDatabase.StatusCollections[$"statuses_{jobId}"] = new MockExtractCollection(); + _extractionDatabase.StatusCollections[$"statuses_{jobId}"].InsertOne(testMongoFileStatusDoc); + _mockSessionHandle.Reset(); + store.MarkJobCompleted(jobId); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Never); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Once); + Assert.Multiple(() => + { + Assert.That(_extractionDatabase.ExpectedFilesCollections, Has.Count.EqualTo(1)); + Assert.That(_extractionDatabase.StatusCollections, Has.Count.EqualTo(1)); + }); + } - [Test] - public void AddToWriteQueue_ProcessVerificationMessageQueue() + [Test] + public void TestMarkJobFailedImpl() + { + Guid jobId = Guid.NewGuid(); + var testJob = new MongoExtractJobDoc( + jobId, + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, new MessageHeader(), _dateTimeProvider), + "1234", + ExtractJobStatus.Failed, + "test/dir", + _dateTimeProvider.UtcNow(), + "1.2.3.4", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + + // Assert that an exception is thrown for a non-existent job + Assert.Throws(() => store.MarkJobFailed(Guid.NewGuid(), new Exception())); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Assert that a job can't be failed twice + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _mockSessionHandle.Reset(); + Assert.Throws(() => store.MarkJobFailed(jobId, new Exception())); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Check we handle a bad ReplaceOneResult + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _extractionDatabase.InProgressCollection.RejectChanges = true; + _mockSessionHandle.Reset(); + Assert.Throws(() => store.MarkJobFailed(jobId, new Exception())); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Once); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Never); + + // Check happy path + client = GetTestMongoClient(); + store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + testJob.JobStatus = ExtractJobStatus.WaitingForCollectionInfo; + testJob.FailedJobInfoDoc = null; + _extractionDatabase.InProgressCollection.InsertOne(testJob); + _mockSessionHandle.Reset(); + store.MarkJobFailed(jobId, new Exception("TestMarkJobFailedImpl")); + _mockSessionHandle.Verify(x => x.AbortTransaction(It.IsAny()), Times.Never); + _mockSessionHandle.Verify(x => x.CommitTransaction(It.IsAny()), Times.Once); + Dictionary docs = _extractionDatabase.InProgressCollection.Documents; + Assert.That(docs, Has.Count.EqualTo(1)); + MongoExtractJobDoc failedDoc = docs[jobId]; + Assert.Multiple(() => { - // Arrange + Assert.That(failedDoc.JobStatus, Is.EqualTo(ExtractJobStatus.Failed)); + Assert.That(failedDoc.FailedJobInfoDoc, Is.Not.Null); + }); + Assert.That(failedDoc.FailedJobInfoDoc!.ExceptionMessage, Is.EqualTo("TestMarkJobFailedImpl")); + } - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + [Test] + public void AddToWriteQueue_ProcessVerificationMessageQueue() + { + // Arrange - Guid jobId = Guid.NewGuid(); - var message = new ExtractedFileVerificationMessage - { - OutputFilePath = "anon.dcm", - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "1234", - ExtractionJobIdentifier = jobId, - ExtractionDirectory = "1234/test", - DicomFilePath = "original.dcm", - Status = VerifiedFileStatus.NotIdentifiable, - Report = "[]", - }; - var header = new MessageHeader(); + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - var nMessages = 10; + Guid jobId = Guid.NewGuid(); + var message = new ExtractedFileVerificationMessage + { + OutputFilePath = "anon.dcm", + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "1234", + ExtractionJobIdentifier = jobId, + ExtractionDirectory = "1234/test", + DicomFilePath = "original.dcm", + Status = VerifiedFileStatus.NotIdentifiable, + Report = "[]", + }; + var header = new MessageHeader(); - // Act + var nMessages = 10; - for (int i = 0; i < nMessages; ++i) - store.AddToWriteQueue(message, header, (ulong)i); + // Act - store.ProcessVerificationMessageQueue(); + for (int i = 0; i < nMessages; ++i) + store.AddToWriteQueue(message, header, (ulong)i); - // Assert + store.ProcessVerificationMessageQueue(); - Assert.That( - _extractionDatabase.StatusCollections[$"statuses_{jobId}"].Documents, Has.Count -.EqualTo(nMessages)); - } + // Assert - [Test] - public void ProcessVerificationMessageQueue_Empty() - { - // Arrange + Assert.That( + _extractionDatabase.StatusCollections[$"statuses_{jobId}"].Documents, Has.Count +.EqualTo(nMessages)); + } - var client = GetTestMongoClient(); - var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); + [Test] + public void ProcessVerificationMessageQueue_Empty() + { + // Arrange - // Act - store.ProcessVerificationMessageQueue(); + var client = GetTestMongoClient(); + var store = new MongoExtractJobStore(client, ExtractionDatabaseName, _dateTimeProvider); - // Assert - // No exception - } + // Act + store.ProcessVerificationMessageQueue(); - #endregion + // Assert + // No exception } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoCompletedExtractJobDocTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoCompletedExtractJobDocTest.cs index 655e19bf1..31855d7f4 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoCompletedExtractJobDocTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoCompletedExtractJobDocTest.cs @@ -9,53 +9,53 @@ using System; using System.Reflection; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel; + +[TestFixture] +public class MongoCompletedExtractJobDocTest { - [TestFixture] - public class MongoCompletedExtractJobDocTest + private static readonly DateTimeProvider _dateTimeProvider = new TestDateTimeProvider(); + + private readonly MongoExtractJobDoc _testExtractJobDoc = new( + Guid.NewGuid(), + MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader { Parents = [Guid.NewGuid()] }, _dateTimeProvider), + "1234", + ExtractJobStatus.ReadyForChecks, + "test", + DateTime.UtcNow, + "test", + 1, + "testUser", + null, + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - private static readonly DateTimeProvider _dateTimeProvider = new TestDateTimeProvider(); - - private readonly MongoExtractJobDoc _testExtractJobDoc = new( - Guid.NewGuid(), - MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader { Parents = [Guid.NewGuid()] }, _dateTimeProvider), - "1234", - ExtractJobStatus.ReadyForChecks, - "test", - DateTime.UtcNow, - "test", - 1, - "testUser", - null, - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - - #region Fixture Methods - - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void Test_MongoCompletedExtractJobDoc_ParseOldFormat() - { - Console.WriteLine(Guid.NewGuid()); - const string jsonDoc = @" + [Test] + public void Test_MongoCompletedExtractJobDoc_ParseOldFormat() + { + Console.WriteLine(Guid.NewGuid()); + const string jsonDoc = @" { '_id' : 'bfead735-d5c0-4f7c-b0a7-88d873704dab', 'header' : { @@ -78,47 +78,46 @@ public void Test_MongoCompletedExtractJobDoc_ParseOldFormat() 'completedAt' : ISODate('2020-08-28T12:00:00Z'), }"; - var mongoExtractJobDoc = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); + var mongoExtractJobDoc = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); - Assert.Multiple(() => - { - // NOTE(rkm 2020-08-28) This works by chance since the missing bool will default to false, so we don't require MongoCompletedExtractJobDoc to implement ISupportInitialize - Assert.That(mongoExtractJobDoc.IsIdentifiableExtraction, Is.False); - Assert.That(mongoExtractJobDoc.IsNoFilterExtraction, Is.False); - }); - } - - [Test] - public void TestMongoCompletedExtractJobDoc_SettersAvailable() + Assert.Multiple(() => { - foreach (PropertyInfo p in typeof(MongoCompletedExtractJobDoc).GetProperties()) - Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); - } + // NOTE(rkm 2020-08-28) This works by chance since the missing bool will default to false, so we don't require MongoCompletedExtractJobDoc to implement ISupportInitialize + Assert.That(mongoExtractJobDoc.IsIdentifiableExtraction, Is.False); + Assert.That(mongoExtractJobDoc.IsNoFilterExtraction, Is.False); + }); + } - [Test] - public void TestMongoCompletedExtractJobDoc_Constructor_ExtractJobStatus() - { - var doc = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); + [Test] + public void TestMongoCompletedExtractJobDoc_SettersAvailable() + { + foreach (PropertyInfo p in typeof(MongoCompletedExtractJobDoc).GetProperties()) + Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); + } - Assert.That(doc.JobStatus, Is.EqualTo(ExtractJobStatus.Completed)); - } + [Test] + public void TestMongoCompletedExtractJobDoc_Constructor_ExtractJobStatus() + { + var doc = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); - [Test] - public void TestMongoCompletedExtractJobDoc_Equality() - { - var doc1 = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); - var doc2 = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); - Assert.That(doc2, Is.EqualTo(doc1)); - } + Assert.That(doc.JobStatus, Is.EqualTo(ExtractJobStatus.Completed)); + } - [Test] - public void TestMongoCompletedExtractJobDoc_GetHashCode() - { - var doc1 = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); - var doc2 = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); - Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); - } + [Test] + public void TestMongoCompletedExtractJobDoc_Equality() + { + var doc1 = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); + var doc2 = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); + Assert.That(doc2, Is.EqualTo(doc1)); + } - #endregion + [Test] + public void TestMongoCompletedExtractJobDoc_GetHashCode() + { + var doc1 = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); + var doc2 = new MongoCompletedExtractJobDoc(_testExtractJobDoc, _dateTimeProvider.UtcNow()); + Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExpectedFilesDocTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExpectedFilesDocTest.cs index 874f29828..ef5e1a3ac 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExpectedFilesDocTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExpectedFilesDocTest.cs @@ -11,231 +11,230 @@ using System.Reflection; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel; + +[TestFixture] +public class MongoExpectedFilesDocTest { - [TestFixture] - public class MongoExpectedFilesDocTest + private readonly DateTimeProvider _dateTimeProvider = new TestDateTimeProvider(); + + private readonly MessageHeader _testHeader = new() { - private readonly DateTimeProvider _dateTimeProvider = new TestDateTimeProvider(); + Parents = [Guid.NewGuid(),], + }; - private readonly MessageHeader _testHeader = new() - { - Parents = [Guid.NewGuid(),], - }; + #region Fixture Methods - #region Fixture Methods + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests - #region Tests + [Test] + public void TestMongoExpectedFilesDoc_SettersAvailable() + { + foreach (PropertyInfo p in typeof(MongoExpectedFilesDoc).GetProperties()) + Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); + } - [Test] - public void TestMongoExpectedFilesDoc_SettersAvailable() + [Test] + public void TestMongoExpectedFilesDoc_FromMessage() + { + var mockMessage = new Mock(); + mockMessage.Object.KeyValue = "TestKey"; + Guid jobId = Guid.NewGuid(); + mockMessage.Object.ExtractionJobIdentifier = jobId; + var header1 = new MessageHeader(); + var header2 = new MessageHeader(); + mockMessage.Object.ExtractFileMessagesDispatched = new JsonCompatibleDictionary { - foreach (PropertyInfo p in typeof(MongoExpectedFilesDoc).GetProperties()) - Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); - } - - [Test] - public void TestMongoExpectedFilesDoc_FromMessage() + { header1, "AnonFile1.dcm"}, + { header2, "AnonFile2.dcm"}, + }; + mockMessage.Object.RejectionReasons = new Dictionary { - var mockMessage = new Mock(); - mockMessage.Object.KeyValue = "TestKey"; - Guid jobId = Guid.NewGuid(); - mockMessage.Object.ExtractionJobIdentifier = jobId; - var header1 = new MessageHeader(); - var header2 = new MessageHeader(); - mockMessage.Object.ExtractFileMessagesDispatched = new JsonCompatibleDictionary - { - { header1, "AnonFile1.dcm"}, - { header2, "AnonFile2.dcm"}, - }; - mockMessage.Object.RejectionReasons = new Dictionary - { - { "Reject1", 1 }, - { "Reject2", 2 }, - }; - - MongoExpectedFilesDoc doc = MongoExpectedFilesDoc.FromMessage(mockMessage.Object, _testHeader, _dateTimeProvider); - - var expected = new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), - "TestKey", - [ - new MongoExpectedFileInfoDoc(header1.MessageGuid,"AnonFile1.dcm"), - new MongoExpectedFileInfoDoc(header2.MessageGuid,"AnonFile2.dcm"), - ], - MongoRejectedKeyInfoDoc.FromMessage(mockMessage.Object, _testHeader, _dateTimeProvider)); - - Assert.That(doc, Is.EqualTo(expected)); - } - - [Test] - public void TestMongoExpectedFilesDoc_Equality() + { "Reject1", 1 }, + { "Reject2", 2 }, + }; + + MongoExpectedFilesDoc doc = MongoExpectedFilesDoc.FromMessage(mockMessage.Object, _testHeader, _dateTimeProvider); + + var expected = new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), + "TestKey", + [ + new MongoExpectedFileInfoDoc(header1.MessageGuid,"AnonFile1.dcm"), + new MongoExpectedFileInfoDoc(header2.MessageGuid,"AnonFile2.dcm"), + ], + MongoRejectedKeyInfoDoc.FromMessage(mockMessage.Object, _testHeader, _dateTimeProvider)); + + Assert.That(doc, Is.EqualTo(expected)); + } + + [Test] + public void TestMongoExpectedFilesDoc_Equality() + { + var expectedFiles = new HashSet { - var expectedFiles = new HashSet + new(Guid.NewGuid(), "anon1.dcm"), + new(Guid.NewGuid(), "anon2.dcm"), + }; + Guid jobId = Guid.NewGuid(); + var rejectedKeys = new MongoRejectedKeyInfoDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), + new Dictionary { - new(Guid.NewGuid(), "anon1.dcm"), - new(Guid.NewGuid(), "anon2.dcm"), - }; - Guid jobId = Guid.NewGuid(); - var rejectedKeys = new MongoRejectedKeyInfoDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), - new Dictionary - { - {"reject-1", 1 }, - {"reject-2", 2 }, - }); - - var doc1 = new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), - "TestKey", - expectedFiles, - rejectedKeys); - var doc2 = new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), - "TestKey", - expectedFiles, - rejectedKeys); - - Assert.That(doc2, Is.EqualTo(doc1)); - } - - [Test] - public void TestMongoExpectedFilesDoc_GetHashCode() + {"reject-1", 1 }, + {"reject-2", 2 }, + }); + + var doc1 = new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), + "TestKey", + expectedFiles, + rejectedKeys); + var doc2 = new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), + "TestKey", + expectedFiles, + rejectedKeys); + + Assert.That(doc2, Is.EqualTo(doc1)); + } + + [Test] + public void TestMongoExpectedFilesDoc_GetHashCode() + { + var expectedFiles = new HashSet { - var expectedFiles = new HashSet + new(Guid.NewGuid(), "anon1.dcm"), + new(Guid.NewGuid(), "anon2.dcm"), + }; + Guid jobId = Guid.NewGuid(); + var rejectedKeys = new MongoRejectedKeyInfoDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), + new Dictionary { - new(Guid.NewGuid(), "anon1.dcm"), - new(Guid.NewGuid(), "anon2.dcm"), - }; - Guid jobId = Guid.NewGuid(); - var rejectedKeys = new MongoRejectedKeyInfoDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), - new Dictionary - { - {"reject-1", 1 }, - {"reject-2", 2 }, - } - ); - - var doc1 = new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), - "TestKey", - expectedFiles, - rejectedKeys); - var doc2 = new MongoExpectedFilesDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), - "TestKey", - expectedFiles, - rejectedKeys); - - Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); - } - - [Test] - public void TestMongoExpectedFileInfoDoc_SettersAvailable() - { - foreach (PropertyInfo p in typeof(MongoExpectedFileInfoDoc).GetProperties()) - Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); - } + {"reject-1", 1 }, + {"reject-2", 2 }, + } + ); + + var doc1 = new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), + "TestKey", + expectedFiles, + rejectedKeys); + var doc2 = new MongoExpectedFilesDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), + "TestKey", + expectedFiles, + rejectedKeys); + + Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); + } - [Test] - public void TestMongoExpectedFileInfoDoc_Equality() - { - Guid guid = Guid.NewGuid(); - var doc1 = new MongoExpectedFileInfoDoc(guid, "AnonFile1.dcm"); - var doc2 = new MongoExpectedFileInfoDoc(guid, "AnonFile1.dcm"); - Assert.That(doc2, Is.EqualTo(doc1)); - } - - [Test] - public void TestMongoExpectedFileInfoDoc_GetHashcode() - { - Guid guid = Guid.NewGuid(); - var doc1 = new MongoExpectedFileInfoDoc(guid, "AnonFile1.dcm"); - var doc2 = new MongoExpectedFileInfoDoc(guid, "AnonFile1.dcm"); - Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); - } - - [Test] - public void TestMongoRejectedKeyInfoDoc_SettersAvailable() - { - foreach (PropertyInfo p in typeof(MongoRejectedKeyInfoDoc).GetProperties()) - Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); - } + [Test] + public void TestMongoExpectedFileInfoDoc_SettersAvailable() + { + foreach (PropertyInfo p in typeof(MongoExpectedFileInfoDoc).GetProperties()) + Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); + } + + [Test] + public void TestMongoExpectedFileInfoDoc_Equality() + { + Guid guid = Guid.NewGuid(); + var doc1 = new MongoExpectedFileInfoDoc(guid, "AnonFile1.dcm"); + var doc2 = new MongoExpectedFileInfoDoc(guid, "AnonFile1.dcm"); + Assert.That(doc2, Is.EqualTo(doc1)); + } + + [Test] + public void TestMongoExpectedFileInfoDoc_GetHashcode() + { + Guid guid = Guid.NewGuid(); + var doc1 = new MongoExpectedFileInfoDoc(guid, "AnonFile1.dcm"); + var doc2 = new MongoExpectedFileInfoDoc(guid, "AnonFile1.dcm"); + Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); + } + + [Test] + public void TestMongoRejectedKeyInfoDoc_SettersAvailable() + { + foreach (PropertyInfo p in typeof(MongoRejectedKeyInfoDoc).GetProperties()) + Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); + } - [Test] - public void TestMongoRejectedKeyInfoDoc_FromMessage() + [Test] + public void TestMongoRejectedKeyInfoDoc_FromMessage() + { + var mockMessage = new Mock(); + Guid jobId = Guid.NewGuid(); + mockMessage.Object.ExtractionJobIdentifier = jobId; + mockMessage.Object.RejectionReasons = new Dictionary { - var mockMessage = new Mock(); - Guid jobId = Guid.NewGuid(); - mockMessage.Object.ExtractionJobIdentifier = jobId; - mockMessage.Object.RejectionReasons = new Dictionary - { - {"Reject1", 1 }, - {"Reject2", 2 }, - }; + {"Reject1", 1 }, + {"Reject2", 2 }, + }; - MongoRejectedKeyInfoDoc doc = MongoRejectedKeyInfoDoc.FromMessage(mockMessage.Object, _testHeader, _dateTimeProvider); + MongoRejectedKeyInfoDoc doc = MongoRejectedKeyInfoDoc.FromMessage(mockMessage.Object, _testHeader, _dateTimeProvider); - var expected = new MongoRejectedKeyInfoDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), - new Dictionary - { - {"Reject1", 1}, - {"Reject2", 2}, - }); + var expected = new MongoRejectedKeyInfoDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobId, _testHeader, _dateTimeProvider), + new Dictionary + { + {"Reject1", 1}, + {"Reject2", 2}, + }); - Assert.That(doc, Is.EqualTo(expected)); - } + Assert.That(doc, Is.EqualTo(expected)); + } - [Test] - public void TestMongoRejectedKeyInfoDoc_Equality() + [Test] + public void TestMongoRejectedKeyInfoDoc_Equality() + { + Guid guid = Guid.NewGuid(); + var rejectReasons = new Dictionary { - Guid guid = Guid.NewGuid(); - var rejectReasons = new Dictionary - { - {"Reject1", 1 }, - {"Reject2", 2 }, - }; + {"Reject1", 1 }, + {"Reject2", 2 }, + }; - var doc1 = new MongoRejectedKeyInfoDoc(MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _testHeader, _dateTimeProvider), rejectReasons); - var doc2 = new MongoRejectedKeyInfoDoc(MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _testHeader, _dateTimeProvider), rejectReasons); + var doc1 = new MongoRejectedKeyInfoDoc(MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _testHeader, _dateTimeProvider), rejectReasons); + var doc2 = new MongoRejectedKeyInfoDoc(MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _testHeader, _dateTimeProvider), rejectReasons); - Assert.That(doc2, Is.EqualTo(doc1)); - } + Assert.That(doc2, Is.EqualTo(doc1)); + } - [Test] - public void TestMongoRejectedKeyInfoDoc_GetHashCode() + [Test] + public void TestMongoRejectedKeyInfoDoc_GetHashCode() + { + Guid guid = Guid.NewGuid(); + var rejectReasons = new Dictionary { - Guid guid = Guid.NewGuid(); - var rejectReasons = new Dictionary - { - {"Reject1", 1 }, - {"Reject2", 2 }, - }; - - var doc1 = new MongoRejectedKeyInfoDoc(MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _testHeader, _dateTimeProvider), rejectReasons); - var doc2 = new MongoRejectedKeyInfoDoc(MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _testHeader, _dateTimeProvider), rejectReasons); + {"Reject1", 1 }, + {"Reject2", 2 }, + }; - Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); - } + var doc1 = new MongoRejectedKeyInfoDoc(MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _testHeader, _dateTimeProvider), rejectReasons); + var doc2 = new MongoRejectedKeyInfoDoc(MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _testHeader, _dateTimeProvider), rejectReasons); - #endregion + Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractJobDocTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractJobDocTest.cs index a09f5e997..255e3746d 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractJobDocTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractJobDocTest.cs @@ -11,112 +11,112 @@ using System.Reflection; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel; + +[TestFixture] +public class MongoExtractJobDocTest { - [TestFixture] - public class MongoExtractJobDocTest + private readonly DateTimeProvider _dateTimeProvider = new TestDateTimeProvider(); + + private readonly MessageHeader _messageHeader = new() { - private readonly DateTimeProvider _dateTimeProvider = new TestDateTimeProvider(); + Parents = [Guid.NewGuid()] + }; - private readonly MessageHeader _messageHeader = new() - { - Parents = [Guid.NewGuid()] - }; + #region Fixture Methods - #region Fixture Methods + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests - #region Tests + [Test] + public void Test_MongoExtractJobDoc_CopyConstructor() + { + var jobid = Guid.NewGuid(); + var original = new MongoExtractJobDoc( + jobid, + MongoExtractionMessageHeaderDoc.FromMessageHeader(jobid, _messageHeader, _dateTimeProvider), + "1234", + ExtractJobStatus.WaitingForCollectionInfo, + "test/directory", + _dateTimeProvider.UtcNow(), + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + new MongoFailedJobInfoDoc(new Exception("foo"), _dateTimeProvider) + ); + var copied = new MongoExtractJobDoc(original); + + Assert.That(copied, Is.EqualTo(original)); + } - [Test] - public void Test_MongoExtractJobDoc_CopyConstructor() - { - var jobid = Guid.NewGuid(); - var original = new MongoExtractJobDoc( - jobid, - MongoExtractionMessageHeaderDoc.FromMessageHeader(jobid, _messageHeader, _dateTimeProvider), - "1234", - ExtractJobStatus.WaitingForCollectionInfo, - "test/directory", - _dateTimeProvider.UtcNow(), - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - new MongoFailedJobInfoDoc(new Exception("foo"), _dateTimeProvider) - ); - var copied = new MongoExtractJobDoc(original); - - Assert.That(copied, Is.EqualTo(original)); - } - - - [Test] - public void TestMongoExtractJobDoc_SettersAvailable() - { - foreach (PropertyInfo p in typeof(MongoExtractJobDoc).GetProperties()) - Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); - } - [Test] - public void TestMongoExtractJobDoc_FromMessage() - { - Guid guid = Guid.NewGuid(); - var message = new ExtractionRequestInfoMessage - { - Modality = "MR", - JobSubmittedAt = _dateTimeProvider.UtcNow(), - ProjectNumber = "1234", - ExtractionJobIdentifier = guid, - ExtractionDirectory = "test/directory", - KeyTag = "KeyTag", - KeyValueCount = 123, - UserName = "testUser", - IsIdentifiableExtraction = true, - IsNoFilterExtraction = true, - }; - - MongoExtractJobDoc doc = MongoExtractJobDoc.FromMessage(message, _messageHeader, _dateTimeProvider); - - var expected = new MongoExtractJobDoc( - guid, - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "1234", - ExtractJobStatus.WaitingForCollectionInfo, - "test/directory", - _dateTimeProvider.UtcNow(), - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - - Assert.That(doc, Is.EqualTo(expected)); - } - - [Test] - public void TestMongoExtractJobDoc_Parse_v5_4_0() + [Test] + public void TestMongoExtractJobDoc_SettersAvailable() + { + foreach (PropertyInfo p in typeof(MongoExtractJobDoc).GetProperties()) + Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); + } + + [Test] + public void TestMongoExtractJobDoc_FromMessage() + { + Guid guid = Guid.NewGuid(); + var message = new ExtractionRequestInfoMessage { - const string jsonDoc = @" + Modality = "MR", + JobSubmittedAt = _dateTimeProvider.UtcNow(), + ProjectNumber = "1234", + ExtractionJobIdentifier = guid, + ExtractionDirectory = "test/directory", + KeyTag = "KeyTag", + KeyValueCount = 123, + UserName = "testUser", + IsIdentifiableExtraction = true, + IsNoFilterExtraction = true, + }; + + MongoExtractJobDoc doc = MongoExtractJobDoc.FromMessage(message, _messageHeader, _dateTimeProvider); + + var expected = new MongoExtractJobDoc( + guid, + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "1234", + ExtractJobStatus.WaitingForCollectionInfo, + "test/directory", + _dateTimeProvider.UtcNow(), + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + + Assert.That(doc, Is.EqualTo(expected)); + } + + [Test] + public void TestMongoExtractJobDoc_Parse_v5_4_0() + { + const string jsonDoc = @" { _id: '898a207b-cc2a-4014-97f0-f881c07a3d65', header: { @@ -139,114 +139,113 @@ public void TestMongoExtractJobDoc_Parse_v5_4_0() IsNoFilterExtraction: false, failedJobInfo: null }"; - var mongoExtractJobDoc = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); - Assert.That(mongoExtractJobDoc.UserName, Is.Null); - } + var mongoExtractJobDoc = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); + Assert.That(mongoExtractJobDoc.UserName, Is.Null); + } - [Test] - public void TestMongoExtractJobDoc_Equality() - { - Guid guid = Guid.NewGuid(); - var failedInfoDoc = new MongoFailedJobInfoDoc(new TestException("aaah"), _dateTimeProvider); - - var doc1 = new MongoExtractJobDoc( - guid, - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "1234", - ExtractJobStatus.WaitingForCollectionInfo, - "test/directory", - _dateTimeProvider.UtcNow(), - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - failedInfoDoc); - var doc2 = new MongoExtractJobDoc( - guid, - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "1234", - ExtractJobStatus.WaitingForCollectionInfo, - "test/directory", - _dateTimeProvider.UtcNow(), - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - failedInfoDoc); - - Assert.That(doc2, Is.EqualTo(doc1)); - } - - [Test] - public void TestMongoExtractJobDoc_GetHashCode() - { - Guid guid = Guid.NewGuid(); - - var doc1 = new MongoExtractJobDoc( - guid, - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "1234", - ExtractJobStatus.WaitingForCollectionInfo, - "test/directory", - _dateTimeProvider.UtcNow(), - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - var doc2 = new MongoExtractJobDoc( - guid, - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "1234", - ExtractJobStatus.WaitingForCollectionInfo, - "test/directory", - _dateTimeProvider.UtcNow(), - "KeyTag", - 123, - "testUser", - "MR", - isIdentifiableExtraction: true, - isNoFilterExtraction: true, - null); - - Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); - } - - [Test] - public void TestMongoFailedJobInfoDoc_SettersAvailable() - { - foreach (PropertyInfo p in typeof(MongoFailedJobInfoDoc).GetProperties()) - Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); - } + [Test] + public void TestMongoExtractJobDoc_Equality() + { + Guid guid = Guid.NewGuid(); + var failedInfoDoc = new MongoFailedJobInfoDoc(new TestException("aaah"), _dateTimeProvider); + + var doc1 = new MongoExtractJobDoc( + guid, + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "1234", + ExtractJobStatus.WaitingForCollectionInfo, + "test/directory", + _dateTimeProvider.UtcNow(), + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + failedInfoDoc); + var doc2 = new MongoExtractJobDoc( + guid, + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "1234", + ExtractJobStatus.WaitingForCollectionInfo, + "test/directory", + _dateTimeProvider.UtcNow(), + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + failedInfoDoc); + + Assert.That(doc2, Is.EqualTo(doc1)); + } - [Test] - public void TestMongoFailedJobInfo_Equality() - { - var exception = new TestException("aaah"); + [Test] + public void TestMongoExtractJobDoc_GetHashCode() + { + Guid guid = Guid.NewGuid(); + + var doc1 = new MongoExtractJobDoc( + guid, + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "1234", + ExtractJobStatus.WaitingForCollectionInfo, + "test/directory", + _dateTimeProvider.UtcNow(), + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + var doc2 = new MongoExtractJobDoc( + guid, + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "1234", + ExtractJobStatus.WaitingForCollectionInfo, + "test/directory", + _dateTimeProvider.UtcNow(), + "KeyTag", + 123, + "testUser", + "MR", + isIdentifiableExtraction: true, + isNoFilterExtraction: true, + null); + + Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); + } - var doc1 = new MongoFailedJobInfoDoc(exception, _dateTimeProvider); - var doc2 = new MongoFailedJobInfoDoc(exception, _dateTimeProvider); + [Test] + public void TestMongoFailedJobInfoDoc_SettersAvailable() + { + foreach (PropertyInfo p in typeof(MongoFailedJobInfoDoc).GetProperties()) + Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); + } - Assert.That(doc2, Is.EqualTo(doc1)); - } + [Test] + public void TestMongoFailedJobInfo_Equality() + { + var exception = new TestException("aaah"); - [Test] - public void TestMongoFailedJobInfo_GetHashCode() - { - var exception = new TestException("aaah"); + var doc1 = new MongoFailedJobInfoDoc(exception, _dateTimeProvider); + var doc2 = new MongoFailedJobInfoDoc(exception, _dateTimeProvider); - var doc1 = new MongoFailedJobInfoDoc(exception, _dateTimeProvider); - var doc2 = new MongoFailedJobInfoDoc(exception, _dateTimeProvider); + Assert.That(doc2, Is.EqualTo(doc1)); + } + + [Test] + public void TestMongoFailedJobInfo_GetHashCode() + { + var exception = new TestException("aaah"); - Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); - } + var doc1 = new MongoFailedJobInfoDoc(exception, _dateTimeProvider); + var doc2 = new MongoFailedJobInfoDoc(exception, _dateTimeProvider); - #endregion + Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractionMessageHeaderDocTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractionMessageHeaderDocTest.cs index c1146a06f..aca5b580a 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractionMessageHeaderDocTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoExtractionMessageHeaderDocTest.cs @@ -6,96 +6,95 @@ using System.Reflection; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel; + +[TestFixture] +public class MongoExtractionMessageHeaderDocTest { - [TestFixture] - public class MongoExtractionMessageHeaderDocTest + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests - #region Tests + [Test] + public void TestMongoExtractionMessageHeaderDoc_SettersAvailable() + { + foreach (PropertyInfo p in typeof(MongoExtractionMessageHeaderDoc).GetProperties()) + Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); + } - [Test] - public void TestMongoExtractionMessageHeaderDoc_SettersAvailable() - { - foreach (PropertyInfo p in typeof(MongoExtractionMessageHeaderDoc).GetProperties()) - Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); - } + [Test] + public void TestMongoExtractionMessageHeaderDoc_FromMessageHeader() + { + Guid guid = Guid.NewGuid(); + Guid p1 = Guid.NewGuid(); + Guid p2 = Guid.NewGuid(); + const long unixTimeNow = 1234; + var dateTimeProvider = new TestDateTimeProvider(); - [Test] - public void TestMongoExtractionMessageHeaderDoc_FromMessageHeader() + var header = new MessageHeader { - Guid guid = Guid.NewGuid(); - Guid p1 = Guid.NewGuid(); - Guid p2 = Guid.NewGuid(); - const long unixTimeNow = 1234; - var dateTimeProvider = new TestDateTimeProvider(); - - var header = new MessageHeader - { - MessageGuid = guid, - ProducerExecutableName = "TestFromMessageHeader", - ProducerProcessID = 1234, - Parents = [p1, p2], - OriginalPublishTimestamp = unixTimeNow, - }; - - MongoExtractionMessageHeaderDoc doc = MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, header, dateTimeProvider); - - var expected = new MongoExtractionMessageHeaderDoc( - guid, - guid, - "TestFromMessageHeader", - 1234, - DateTime.UnixEpoch + TimeSpan.FromSeconds(unixTimeNow), - $"{p1}->{p2}", - dateTimeProvider.UtcNow() - ); - - Assert.That(doc, Is.EqualTo(expected)); - } - - [Test] - public void TestMongoExtractionMessageHeaderDoc_Equality() - { - Guid guid = Guid.NewGuid(); - DateTime now = DateTime.UtcNow; + MessageGuid = guid, + ProducerExecutableName = "TestFromMessageHeader", + ProducerProcessID = 1234, + Parents = [p1, p2], + OriginalPublishTimestamp = unixTimeNow, + }; + + MongoExtractionMessageHeaderDoc doc = MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, header, dateTimeProvider); + + var expected = new MongoExtractionMessageHeaderDoc( + guid, + guid, + "TestFromMessageHeader", + 1234, + DateTime.UnixEpoch + TimeSpan.FromSeconds(unixTimeNow), + $"{p1}->{p2}", + dateTimeProvider.UtcNow() + ); + + Assert.That(doc, Is.EqualTo(expected)); + } - var doc1 = new MongoExtractionMessageHeaderDoc(guid, guid, "Test1", 123, now, "parents", now); - var doc2 = new MongoExtractionMessageHeaderDoc(guid, guid, "Test1", 123, now, "parents", now); + [Test] + public void TestMongoExtractionMessageHeaderDoc_Equality() + { + Guid guid = Guid.NewGuid(); + DateTime now = DateTime.UtcNow; - Assert.That(doc2, Is.EqualTo(doc1)); - } + var doc1 = new MongoExtractionMessageHeaderDoc(guid, guid, "Test1", 123, now, "parents", now); + var doc2 = new MongoExtractionMessageHeaderDoc(guid, guid, "Test1", 123, now, "parents", now); - [Test] - public void TestMongoExtractionMessageHeaderDoc_GetHashCode() - { + Assert.That(doc2, Is.EqualTo(doc1)); + } - Guid guid = Guid.NewGuid(); - DateTime now = DateTime.UtcNow; + [Test] + public void TestMongoExtractionMessageHeaderDoc_GetHashCode() + { - var doc1 = new MongoExtractionMessageHeaderDoc(guid, guid, "Test1", 123, now, "parents", now); - var doc2 = new MongoExtractionMessageHeaderDoc(guid, guid, "Test1", 123, now, "parents", now); + Guid guid = Guid.NewGuid(); + DateTime now = DateTime.UtcNow; - Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); - } + var doc1 = new MongoExtractionMessageHeaderDoc(guid, guid, "Test1", 123, now, "parents", now); + var doc2 = new MongoExtractionMessageHeaderDoc(guid, guid, "Test1", 123, now, "parents", now); - #endregion + Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoFileStatusDocTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoFileStatusDocTest.cs index bd58d2761..99989f418 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoFileStatusDocTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/MongoDB/ObjectModel/MongoFileStatusDocTest.cs @@ -9,96 +9,96 @@ using System; using System.Reflection; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage.MongoDB.ObjectModel; + +[TestFixture] +public class MongoFileStatusDocTest { - [TestFixture] - public class MongoFileStatusDocTest + private readonly TestDateTimeProvider _dateTimeProvider = new(); + + private readonly MessageHeader _messageHeader = new() { - private readonly TestDateTimeProvider _dateTimeProvider = new(); + Parents = [Guid.NewGuid(),], + }; - private readonly MessageHeader _messageHeader = new() - { - Parents = [Guid.NewGuid(),], - }; + #region Fixture Methods - #region Fixture Methods + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + private static void AssertDocsEqualExceptHeader(MongoFileStatusDoc expected, MongoFileStatusDoc actual) + { + actual.ExtraElements = null; - private static void AssertDocsEqualExceptHeader(MongoFileStatusDoc expected, MongoFileStatusDoc actual) + foreach (PropertyInfo prop in expected.GetType().GetProperties()) { - actual.ExtraElements = null; - - foreach (PropertyInfo prop in expected.GetType().GetProperties()) - { - if (prop.Name == "Header") - continue; + if (prop.Name == "Header") + continue; - var expectedProp = prop.GetValue(expected); - var parsedProp = prop.GetValue(actual); - Assert.That(parsedProp, Is.EqualTo(expectedProp)); - } + var expectedProp = prop.GetValue(expected); + var parsedProp = prop.GetValue(actual); + Assert.That(parsedProp, Is.EqualTo(expectedProp)); } + } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void Test_MongoFileStatusDoc_IsIdentifiable_StatusMessage() - { - var exc = Assert.Throws(() => - new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), - "input.dcm", - "anon.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - null - ) - ); - Assert.That(exc!.Message, Is.EqualTo("Cannot be null or whitespace except for successful file copies (Parameter 'statusMessage')")); - - exc = Assert.Throws(() => - new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), - "input.dcm", - "anon.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - " " - ) - ); - Assert.That(exc!.Message, Is.EqualTo("Cannot be null or whitespace except for successful file copies (Parameter 'statusMessage')")); - - var _ = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), - "input.dcm", - "anon.dcm", - ExtractedFileStatus.Copied, - VerifiedFileStatus.NotVerified, - " " - ); - } + [Test] + public void Test_MongoFileStatusDoc_IsIdentifiable_StatusMessage() + { + var exc = Assert.Throws(() => + new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), + "input.dcm", + "anon.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + null + ) + ); + Assert.That(exc!.Message, Is.EqualTo("Cannot be null or whitespace except for successful file copies (Parameter 'statusMessage')")); - [Test] - public void ParseVerificationMessage_v1_11_1() - { - // Arrange + exc = Assert.Throws(() => + new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), + "input.dcm", + "anon.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + " " + ) + ); + Assert.That(exc!.Message, Is.EqualTo("Cannot be null or whitespace except for successful file copies (Parameter 'statusMessage')")); + + var _ = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), + "input.dcm", + "anon.dcm", + ExtractedFileStatus.Copied, + VerifiedFileStatus.NotVerified, + " " + ); + } + + [Test] + public void ParseVerificationMessage_v1_11_1() + { + // Arrange - const string jsonDoc = @" + const string jsonDoc = @" { '_id' : ObjectId('5f490ef8473b9739448cbe4c'), 'header': { @@ -116,30 +116,30 @@ public void ParseVerificationMessage_v1_11_1() 'statusMessage' : '[]' }"; - var expected = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), - "", - "anon.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "[]" - ); + var expected = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), + "", + "anon.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "[]" + ); - // Act + // Act - var parsed = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); + var parsed = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); - // Assert + // Assert - AssertDocsEqualExceptHeader(expected, parsed); - } + AssertDocsEqualExceptHeader(expected, parsed); + } - [Test] - public void ParseAnonFailedMessage_v1_11_1() - { - // Arrange + [Test] + public void ParseAnonFailedMessage_v1_11_1() + { + // Arrange - const string jsonDoc = @" + const string jsonDoc = @" { '_id' : ObjectId('5f490ef8473b9739448cbe4c'), 'header': { @@ -157,30 +157,30 @@ public void ParseAnonFailedMessage_v1_11_1() 'statusMessage' : 'failed to anonymise' }"; - var expected = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), - "", - null, - ExtractedFileStatus.ErrorWontRetry, - VerifiedFileStatus.NotVerified, - "failed to anonymise" - ); + var expected = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), + "", + null, + ExtractedFileStatus.ErrorWontRetry, + VerifiedFileStatus.NotVerified, + "failed to anonymise" + ); - // Act + // Act - var parsed = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); + var parsed = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); - // Assert + // Assert - AssertDocsEqualExceptHeader(expected, parsed); - } + AssertDocsEqualExceptHeader(expected, parsed); + } - [Test] - public void ParseAnonFailedMessage_v5_1_3() - { - // Arrange + [Test] + public void ParseAnonFailedMessage_v5_1_3() + { + // Arrange - const string jsonDoc = @" + const string jsonDoc = @" { '_id' : ObjectId('5f490ef8473b9739448cbe4c'), 'header': { @@ -200,30 +200,30 @@ public void ParseAnonFailedMessage_v5_1_3() 'statusMessage' : 'failed to anonymise' }"; - var expected = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), - "foo.dcm", - null, - ExtractedFileStatus.ErrorWontRetry, - VerifiedFileStatus.NotVerified, - "failed to anonymise" - ); + var expected = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), + "foo.dcm", + null, + ExtractedFileStatus.ErrorWontRetry, + VerifiedFileStatus.NotVerified, + "failed to anonymise" + ); - // Act + // Act - var parsed = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); + var parsed = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); - // Assert + // Assert - AssertDocsEqualExceptHeader(expected, parsed); - } + AssertDocsEqualExceptHeader(expected, parsed); + } - [Test] - public void ParseVerificationMessage_v5_1_3() - { - // Arrange + [Test] + public void ParseVerificationMessage_v5_1_3() + { + // Arrange - const string jsonDoc = @" + const string jsonDoc = @" { '_id' : ObjectId('5f490ef8473b9739448cbe4c'), 'header': { @@ -243,77 +243,76 @@ public void ParseVerificationMessage_v5_1_3() 'statusMessage' : '[]' }"; - var expected = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), - "foo.dcm", - "foo-an.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "[]" - ); + var expected = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(Guid.NewGuid(), new MessageHeader(), new DateTimeProvider()), + "foo.dcm", + "foo-an.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "[]" + ); - // Act + // Act - var parsed = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); + var parsed = BsonSerializer.Deserialize(BsonDocument.Parse(jsonDoc)); - // Assert - - AssertDocsEqualExceptHeader(expected, parsed); - } - - [Test] - public void TestMongoFileStatusDoc_SettersAvailable() - { - foreach (PropertyInfo p in typeof(MongoFileStatusDoc).GetProperties()) - Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); - } + // Assert - [Test] - public void TestMongoFileStatusDoc_Equality() - { - Guid guid = Guid.NewGuid(); - var doc1 = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "input.dcm", - "anon.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "anonymised"); - - var doc2 = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "input.dcm", - "anon.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "anonymised"); - - Assert.That(doc2, Is.EqualTo(doc1)); - } - - [Test] - public void TestMongoFileStatusDoc_GetHashCode() - { - Guid guid = Guid.NewGuid(); - var doc1 = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "input.dcm", - "anon.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "anonymised"); + AssertDocsEqualExceptHeader(expected, parsed); + } - var doc2 = new MongoFileStatusDoc( - MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), - "input.dcm", - "anon.dcm", - ExtractedFileStatus.Anonymised, - VerifiedFileStatus.NotIdentifiable, - "anonymised"); + [Test] + public void TestMongoFileStatusDoc_SettersAvailable() + { + foreach (PropertyInfo p in typeof(MongoFileStatusDoc).GetProperties()) + Assert.That(p.CanWrite, Is.True, $"Property '{p.Name}' is not writeable"); + } - Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); - } + [Test] + public void TestMongoFileStatusDoc_Equality() + { + Guid guid = Guid.NewGuid(); + var doc1 = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "input.dcm", + "anon.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "anonymised"); + + var doc2 = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "input.dcm", + "anon.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "anonymised"); + + Assert.That(doc2, Is.EqualTo(doc1)); + } - #endregion + [Test] + public void TestMongoFileStatusDoc_GetHashCode() + { + Guid guid = Guid.NewGuid(); + var doc1 = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "input.dcm", + "anon.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "anonymised"); + + var doc2 = new MongoFileStatusDoc( + MongoExtractionMessageHeaderDoc.FromMessageHeader(guid, _messageHeader, _dateTimeProvider), + "input.dcm", + "anon.dcm", + ExtractedFileStatus.Anonymised, + VerifiedFileStatus.NotIdentifiable, + "anonymised"); + + Assert.That(doc2.GetHashCode(), Is.EqualTo(doc1.GetHashCode())); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/VerificationFailureInfoTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/VerificationFailureInfoTest.cs index 17808af74..d72710739 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/VerificationFailureInfoTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/ExtractJobStorage/VerificationFailureInfoTest.cs @@ -4,41 +4,40 @@ using System; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage -{ - public class VerificationFailureInfoTest - { - #region Fixture Methods +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.ExtractJobStorage; - [OneTimeSetUp] - public void OneTimeSetUp() - { - } +public class VerificationFailureInfoTest +{ + #region Fixture Methods - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - #endregion + [OneTimeTearDown] + public void OneTimeTearDown() { } - #region Test Methods + #endregion - [SetUp] - public void SetUp() { } + #region Test Methods - [TearDown] - public void TearDown() { } + [SetUp] + public void SetUp() { } - #endregion + [TearDown] + public void TearDown() { } - #region Tests + #endregion - [TestCase(" ", "bar")] - [TestCase("foo", " ")] - public void Constructor_ThrowsArgumentException_OnInvalidArgs(string anonFilePath, string failureData) - { - Assert.Throws(() => { var _ = new FileVerificationFailureInfo(anonFilePath, failureData); }); - } + #region Tests - #endregion + [TestCase(" ", "bar")] + [TestCase("foo", " ")] + public void Constructor_ThrowsArgumentException_OnInvalidArgs(string anonFilePath, string failureData) + { + Assert.Throws(() => { var _ = new FileVerificationFailureInfo(anonFilePath, failureData); }); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/JobProcessing/ExtractJobWatcherTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/JobProcessing/ExtractJobWatcherTest.cs index 94ab30018..258fe4ebe 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/JobProcessing/ExtractJobWatcherTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/JobProcessing/ExtractJobWatcherTest.cs @@ -9,112 +9,111 @@ using System; using System.Collections.Generic; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.JobProcessing +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.JobProcessing; + +[TestFixture] +public class ExtractJobWatcherTest { - [TestFixture] - public class ExtractJobWatcherTest + #region Fixture Methods + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods + } - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + #endregion - #endregion + #region Test Methods - #region Test Methods + [SetUp] + public void SetUp() { } - [SetUp] - public void SetUp() { } + [TearDown] + public void TearDown() { } - [TearDown] - public void TearDown() { } + #endregion - #endregion + #region Tests - #region Tests + private class TestJobCompleteNotifier : IJobCompleteNotifier + { + public bool Notified { get; set; } - private class TestJobCompleteNotifier : IJobCompleteNotifier + public void NotifyJobCompleted(ExtractJobInfo jobInfo) { - public bool Notified { get; set; } - - public void NotifyJobCompleted(ExtractJobInfo jobInfo) - { - Notified = true; - } + Notified = true; } + } - private class TestJobReporter : IJobReporter + private class TestJobReporter : IJobReporter + { + public bool Reported { get; set; } + public void CreateReports(Guid jobId) { - public bool Reported { get; set; } - public void CreateReports(Guid jobId) - { - Reported = true; - } + Reported = true; } + } - [Test] - public void TestProcessJobs() + [Test] + public void TestProcessJobs() + { + Guid jobId = Guid.NewGuid(); + var testJobInfo = new ExtractJobInfo( + jobId, + DateTime.UtcNow, + "123", + "test/dir", + "KeyTag", + 123, + "testUser", + null, + ExtractJobStatus.ReadyForChecks, + isIdentifiableExtraction: true, + isNoFilterExtraction: true + ); + + var opts = new CohortPackagerOptions { JobWatcherTimeoutInSeconds = 123 }; + var mockJobStore = new Mock(); + var callbackUsed = false; + var mockCallback = new Action(_ => callbackUsed = true); + var testNotifier = new TestJobCompleteNotifier(); + var testReporter = new TestJobReporter(); + + var watcher = new ExtractJobWatcher(opts, mockJobStore.Object, mockCallback, testNotifier, testReporter); + + // Check that we can call ProcessJobs with no Guid to process all jobs + mockJobStore.Setup(x => x.GetReadyJobs(default)).Returns([]); + watcher.ProcessJobs(); + mockJobStore.Verify(); + + // Check that we MarkJobFailed for known exceptions + mockJobStore.Reset(); + mockJobStore.Setup(x => x.GetReadyJobs(It.IsAny())).Returns([testJobInfo]); + mockJobStore.Setup(x => x.MarkJobCompleted(It.IsAny())).Throws(new ApplicationException("aah")); + watcher.ProcessJobs(jobId); + mockJobStore.Verify(x => x.MarkJobFailed(jobId, It.IsAny()), Times.Once); + + // Check that we call the exception callback for unhandled exceptions + mockJobStore.Reset(); + mockJobStore.Setup(x => x.GetReadyJobs(It.IsAny())).Returns([testJobInfo]); + mockJobStore.Setup(x => x.MarkJobCompleted(It.IsAny())).Throws(new Exception("aah")); + watcher.ProcessJobs(jobId); + Assert.That(callbackUsed, Is.True); + + // Check happy path + mockJobStore.Reset(); + mockJobStore.Setup(x => x.GetReadyJobs(It.IsAny())).Returns([testJobInfo]); + testNotifier.Notified = false; + watcher.ProcessJobs(jobId); + Assert.Multiple(() => { - Guid jobId = Guid.NewGuid(); - var testJobInfo = new ExtractJobInfo( - jobId, - DateTime.UtcNow, - "123", - "test/dir", - "KeyTag", - 123, - "testUser", - null, - ExtractJobStatus.ReadyForChecks, - isIdentifiableExtraction: true, - isNoFilterExtraction: true - ); - - var opts = new CohortPackagerOptions { JobWatcherTimeoutInSeconds = 123 }; - var mockJobStore = new Mock(); - var callbackUsed = false; - var mockCallback = new Action(_ => callbackUsed = true); - var testNotifier = new TestJobCompleteNotifier(); - var testReporter = new TestJobReporter(); - - var watcher = new ExtractJobWatcher(opts, mockJobStore.Object, mockCallback, testNotifier, testReporter); - - // Check that we can call ProcessJobs with no Guid to process all jobs - mockJobStore.Setup(x => x.GetReadyJobs(default)).Returns([]); - watcher.ProcessJobs(); - mockJobStore.Verify(); - - // Check that we MarkJobFailed for known exceptions - mockJobStore.Reset(); - mockJobStore.Setup(x => x.GetReadyJobs(It.IsAny())).Returns([testJobInfo]); - mockJobStore.Setup(x => x.MarkJobCompleted(It.IsAny())).Throws(new ApplicationException("aah")); - watcher.ProcessJobs(jobId); - mockJobStore.Verify(x => x.MarkJobFailed(jobId, It.IsAny()), Times.Once); - - // Check that we call the exception callback for unhandled exceptions - mockJobStore.Reset(); - mockJobStore.Setup(x => x.GetReadyJobs(It.IsAny())).Returns([testJobInfo]); - mockJobStore.Setup(x => x.MarkJobCompleted(It.IsAny())).Throws(new Exception("aah")); - watcher.ProcessJobs(jobId); - Assert.That(callbackUsed, Is.True); - - // Check happy path - mockJobStore.Reset(); - mockJobStore.Setup(x => x.GetReadyJobs(It.IsAny())).Returns([testJobInfo]); - testNotifier.Notified = false; - watcher.ProcessJobs(jobId); - Assert.Multiple(() => - { - Assert.That(testNotifier.Notified, Is.True); - Assert.That(testReporter.Reported, Is.True); - }); - } - - #endregion + Assert.That(testNotifier.Notified, Is.True); + Assert.That(testReporter.Reported, Is.True); + }); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/JobProcessing/Notifying/JobCompleteNotifierFactoryTest.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/JobProcessing/Notifying/JobCompleteNotifierFactoryTest.cs index 7034fbd75..dcd6d476b 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/JobProcessing/Notifying/JobCompleteNotifierFactoryTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Execution/JobProcessing/Notifying/JobCompleteNotifierFactoryTest.cs @@ -4,47 +4,46 @@ using System; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.JobProcessing.Notifying -{ - public class JobCompleteNotifierFactoryTest - { - #region Fixture Methods +namespace SmiServices.UnitTests.Microservices.CohortPackager.Execution.JobProcessing.Notifying; - [OneTimeSetUp] - public void OneTimeSetUp() - { - } +public class JobCompleteNotifierFactoryTest +{ + #region Fixture Methods - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - #endregion + [OneTimeTearDown] + public void OneTimeTearDown() { } - #region Test Methods + #endregion - [SetUp] - public void SetUp() { } + #region Test Methods - [TearDown] - public void TearDown() { } + [SetUp] + public void SetUp() { } - #endregion + [TearDown] + public void TearDown() { } - #region Tests + #endregion - [Test] - public void GetNotifier_ConstructsLoggingNotifier() - { - IJobCompleteNotifier notifier = JobCompleteNotifierFactory.GetNotifier(notifierTypeStr: "LoggingNotifier"); - Assert.That(notifier is LoggingNotifier, Is.True); - } + #region Tests - [Test] - public void GetNotifier_ThrowsException_OnInvalidNotifierTypeStr() - { - Assert.Throws(() => JobCompleteNotifierFactory.GetNotifier(notifierTypeStr: "foo")); - } + [Test] + public void GetNotifier_ConstructsLoggingNotifier() + { + IJobCompleteNotifier notifier = JobCompleteNotifierFactory.GetNotifier(notifierTypeStr: "LoggingNotifier"); + Assert.That(notifier is LoggingNotifier, Is.True); + } - #endregion + [Test] + public void GetNotifier_ThrowsException_OnInvalidNotifierTypeStr() + { + Assert.Throws(() => JobCompleteNotifierFactory.GetNotifier(notifierTypeStr: "foo")); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Messaging/CohortPackagerControlMessageHandlerTests.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Messaging/CohortPackagerControlMessageHandlerTests.cs index 296e765f2..c1c57a936 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/Messaging/CohortPackagerControlMessageHandlerTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/Messaging/CohortPackagerControlMessageHandlerTests.cs @@ -6,82 +6,81 @@ using System; -namespace SmiServices.UnitTests.Microservices.CohortPackager.Messaging +namespace SmiServices.UnitTests.Microservices.CohortPackager.Messaging; + +internal class CohortPackagerControlMessageHandlerTests { - internal class CohortPackagerControlMessageHandlerTests + [OneTimeSetUp] + public void OneTimeSetUp() { - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - [TestCase(null)] - [TestCase("00000000-0000-0000-0000-000000000001")] - public void ControlMessageHandler_ProcessJobs_ValidGuids(string? jobIdStr) - { - // Arrange + [TestCase(null)] + [TestCase("00000000-0000-0000-0000-000000000001")] + public void ControlMessageHandler_ProcessJobs_ValidGuids(string? jobIdStr) + { + // Arrange - Guid jobId = default; - if (!string.IsNullOrWhiteSpace(jobIdStr)) - jobId = Guid.Parse(jobIdStr); + Guid jobId = default; + if (!string.IsNullOrWhiteSpace(jobIdStr)) + jobId = Guid.Parse(jobIdStr); - var jobWatcherMock = new Mock(MockBehavior.Strict); - jobWatcherMock.Setup(x => x.ProcessJobs(jobId)); + var jobWatcherMock = new Mock(MockBehavior.Strict); + jobWatcherMock.Setup(x => x.ProcessJobs(jobId)); - var consumer = new CohortPackagerControlMessageHandler(jobWatcherMock.Object); + var consumer = new CohortPackagerControlMessageHandler(jobWatcherMock.Object); - // Act + // Act - consumer.ControlMessageHandler("processjobs", jobIdStr); + consumer.ControlMessageHandler("processjobs", jobIdStr); - // Assert + // Assert - jobWatcherMock.VerifyAll(); - } + jobWatcherMock.VerifyAll(); + } - [Test] - public void ControlMessageHandler_ProcessJobs_InvalidGuid() - { - // Arrange + [Test] + public void ControlMessageHandler_ProcessJobs_InvalidGuid() + { + // Arrange - var jobWatcherMock = new Mock(MockBehavior.Strict); + var jobWatcherMock = new Mock(MockBehavior.Strict); - var consumer = new CohortPackagerControlMessageHandler(jobWatcherMock.Object); + var consumer = new CohortPackagerControlMessageHandler(jobWatcherMock.Object); - // Act + // Act - consumer.ControlMessageHandler("processjobs", "not-a-guid"); + consumer.ControlMessageHandler("processjobs", "not-a-guid"); - // Assert + // Assert - jobWatcherMock.VerifyAll(); - } + jobWatcherMock.VerifyAll(); + } - [Test] - public void ControlMessageHandler_OtherAction_Ignored() - { - // Arrange + [Test] + public void ControlMessageHandler_OtherAction_Ignored() + { + // Arrange - var jobWatcherMock = new Mock(MockBehavior.Strict); + var jobWatcherMock = new Mock(MockBehavior.Strict); - var consumer = new CohortPackagerControlMessageHandler(jobWatcherMock.Object); + var consumer = new CohortPackagerControlMessageHandler(jobWatcherMock.Object); - // Act + // Act - consumer.ControlMessageHandler("something-else", "foo"); + consumer.ControlMessageHandler("something-else", "foo"); - // Assert + // Assert - jobWatcherMock.VerifyAll(); - } + jobWatcherMock.VerifyAll(); } } diff --git a/tests/SmiServices.UnitTests/Microservices/CohortPackager/PathFixtures.cs b/tests/SmiServices.UnitTests/Microservices/CohortPackager/PathFixtures.cs index a5b2a0924..53fcd4a26 100644 --- a/tests/SmiServices.UnitTests/Microservices/CohortPackager/PathFixtures.cs +++ b/tests/SmiServices.UnitTests/Microservices/CohortPackager/PathFixtures.cs @@ -5,44 +5,43 @@ using System.IO; -namespace SmiServices.UnitTests.Microservices.CohortPackager +namespace SmiServices.UnitTests.Microservices.CohortPackager; + +// TODO(rkm 2020-12-17) Test if the old form of this is fixed in NUnit 3.13 (see https://github.com/nunit/nunit/issues/2574) +public class PathFixtures : IDisposable { - // TODO(rkm 2020-12-17) Test if the old form of this is fixed in NUnit 3.13 (see https://github.com/nunit/nunit/issues/2574) - public class PathFixtures : IDisposable + public readonly string ExtractName; + public readonly string TestDirAbsolute; + public readonly string ExtractRootAbsolute; + public readonly string ProjExtractionsDirRelative = Path.Combine("proj", "extractions"); + public readonly string ProjExtractDirRelative; + public readonly string ProjExtractDirAbsolute; + public readonly string ProjReportsDirAbsolute; + + public PathFixtures(string extractName) { - public readonly string ExtractName; - public readonly string TestDirAbsolute; - public readonly string ExtractRootAbsolute; - public readonly string ProjExtractionsDirRelative = Path.Combine("proj", "extractions"); - public readonly string ProjExtractDirRelative; - public readonly string ProjExtractDirAbsolute; - public readonly string ProjReportsDirAbsolute; - - public PathFixtures(string extractName) - { - ExtractName = extractName; + ExtractName = extractName; - TestDirAbsolute = TestFileSystemHelpers.GetTemporaryTestDirectory(); + TestDirAbsolute = TestFileSystemHelpers.GetTemporaryTestDirectory(); - ExtractRootAbsolute = Path.Combine(TestDirAbsolute, "extractRoot"); + ExtractRootAbsolute = Path.Combine(TestDirAbsolute, "extractRoot"); - ProjExtractDirRelative = Path.Combine(ProjExtractionsDirRelative, extractName); - ProjExtractDirAbsolute = Path.Combine(ExtractRootAbsolute, ProjExtractDirRelative); + ProjExtractDirRelative = Path.Combine(ProjExtractionsDirRelative, extractName); + ProjExtractDirAbsolute = Path.Combine(ExtractRootAbsolute, ProjExtractDirRelative); - ProjReportsDirAbsolute = Path.Combine(ExtractRootAbsolute, ProjExtractionsDirRelative, "reports"); + ProjReportsDirAbsolute = Path.Combine(ExtractRootAbsolute, ProjExtractionsDirRelative, "reports"); - // NOTE(rkm 2020-11-19) This would normally be created by one of the other services - Directory.CreateDirectory(ProjExtractDirAbsolute); - } + // NOTE(rkm 2020-11-19) This would normally be created by one of the other services + Directory.CreateDirectory(ProjExtractDirAbsolute); + } - public void Dispose() - { - ResultState outcome = TestContext.CurrentContext.Result.Outcome; - if (outcome == ResultState.Failure || outcome == ResultState.Error) - return; + public void Dispose() + { + ResultState outcome = TestContext.CurrentContext.Result.Outcome; + if (outcome == ResultState.Failure || outcome == ResultState.Error) + return; - Directory.Delete(TestDirAbsolute, recursive: true); - GC.SuppressFinalize(this); - } + Directory.Delete(TestDirAbsolute, recursive: true); + GC.SuppressFinalize(this); } } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomAnonymiser/Anonymisers/AnonymiserFactoryTests.cs b/tests/SmiServices.UnitTests/Microservices/DicomAnonymiser/Anonymisers/AnonymiserFactoryTests.cs index 2cef2ca5b..732e8e257 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomAnonymiser/Anonymisers/AnonymiserFactoryTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomAnonymiser/Anonymisers/AnonymiserFactoryTests.cs @@ -4,58 +4,57 @@ using SmiServices.UnitTests.Common; using System; -namespace SmiServices.UnitTests.Microservices.DicomAnonymiser.Anonymisers +namespace SmiServices.UnitTests.Microservices.DicomAnonymiser.Anonymisers; + +public class AnonymiserFactoryTests { - public class AnonymiserFactoryTests - { - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void CreateAnonymiser_InvalidAnonymiserName_ThrowsException() + [Test] + public void CreateAnonymiser_InvalidAnonymiserName_ThrowsException() + { + var e = Assert.Throws(() => { - var e = Assert.Throws(() => - { - // TODO (da 2024-02-28) Review if this is the correct way to test this - // AnonymiserFactory.CreateAnonymiser(new DefaultAnonymiser { AnonymiserType = "whee" }); - AnonymiserFactory.CreateAnonymiser(new GlobalOptions { DicomAnonymiserOptions = new DicomAnonymiserOptions { AnonymiserType = "whee" } }); - }); - Assert.That(e!.Message, Is.EqualTo("Could not parse 'whee' to a valid AnonymiserType")); - } - - [Test] - public void CreateAnonymiser_NoCaseForAnonymiser_ThrowsException() + // TODO (da 2024-02-28) Review if this is the correct way to test this + // AnonymiserFactory.CreateAnonymiser(new DefaultAnonymiser { AnonymiserType = "whee" }); + AnonymiserFactory.CreateAnonymiser(new GlobalOptions { DicomAnonymiserOptions = new DicomAnonymiserOptions { AnonymiserType = "whee" } }); + }); + Assert.That(e!.Message, Is.EqualTo("Could not parse 'whee' to a valid AnonymiserType")); + } + + [Test] + public void CreateAnonymiser_NoCaseForAnonymiser_ThrowsException() + { + var e = Assert.Throws(() => { - var e = Assert.Throws(() => - { - // TODO (da 2024-02-28) Review if this is the correct way to test this - // AnonymiserFactory.CreateAnonymiser(new DicomAnonymiserOptions { AnonymiserType = "None" }); - AnonymiserFactory.CreateAnonymiser(new GlobalOptions { DicomAnonymiserOptions = new DicomAnonymiserOptions { AnonymiserType = "None" } }); - }); - Assert.That(e!.Message, Is.EqualTo("No case for AnonymiserType 'None'")); - } - - #endregion + // TODO (da 2024-02-28) Review if this is the correct way to test this + // AnonymiserFactory.CreateAnonymiser(new DicomAnonymiserOptions { AnonymiserType = "None" }); + AnonymiserFactory.CreateAnonymiser(new GlobalOptions { DicomAnonymiserOptions = new DicomAnonymiserOptions { AnonymiserType = "None" } }); + }); + Assert.That(e!.Message, Is.EqualTo("No case for AnonymiserType 'None'")); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomAnonymiser/DicomAnonymiserConsumerTests.cs b/tests/SmiServices.UnitTests/Microservices/DicomAnonymiser/DicomAnonymiserConsumerTests.cs index 468836321..9880902e0 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomAnonymiser/DicomAnonymiserConsumerTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomAnonymiser/DicomAnonymiserConsumerTests.cs @@ -17,332 +17,331 @@ using System.IO.Abstractions.TestingHelpers; using System.Linq.Expressions; -namespace SmiServices.UnitTests.Microservices.DicomAnonymiser +namespace SmiServices.UnitTests.Microservices.DicomAnonymiser; + +public class DicomAnonymiserConsumerTests { - public class DicomAnonymiserConsumerTests + #region Fixture Methods + + private MockFileSystem _mockFs = null!; + private IDirectoryInfo _dicomRootDirInfo = null!; + private IDirectoryInfo _extractRootDirInfo = null!; + private string _extractDir = null!; + private string _sourceDcmPathAbs = null!; + private ExtractFileMessage _extractFileMessage = null!; + private DicomAnonymiserOptions _options = null!; + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods - - private MockFileSystem _mockFs = null!; - private IDirectoryInfo _dicomRootDirInfo = null!; - private IDirectoryInfo _extractRootDirInfo = null!; - private string _extractDir = null!; - private string _sourceDcmPathAbs = null!; - private ExtractFileMessage _extractFileMessage = null!; - private DicomAnonymiserOptions _options = null!; - - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() - { - _mockFs = new MockFileSystem(); - - _dicomRootDirInfo = _mockFs.Directory.CreateDirectory("dicom"); - _extractRootDirInfo = _mockFs.Directory.CreateDirectory("extract"); - - var extractDirName = "extractDir"; - _extractDir = _mockFs.Path.Combine(_extractRootDirInfo.FullName, extractDirName); - _mockFs.Directory.CreateDirectory(_extractDir); - - _sourceDcmPathAbs = _mockFs.Path.Combine(_dicomRootDirInfo.FullName, "foo.dcm"); - - var dicomFile = new DicomFile(); - dicomFile.Dataset.Add(DicomTag.PatientID, "12345678"); - dicomFile.Dataset.Add(DicomTag.Modality, "CT"); - dicomFile.Dataset.Add(DicomTag.StudyInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); - dicomFile.Dataset.Add(DicomTag.SeriesInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); - dicomFile.Dataset.Add(DicomTag.SOPInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); - dicomFile.FileMetaInfo.MediaStorageSOPClassUID = DicomUID.SecondaryCaptureImageStorage; - dicomFile.FileMetaInfo.MediaStorageSOPInstanceUID = DicomUIDGenerator.GenerateDerivedFromUUID(); - dicomFile.FileMetaInfo.ImplementationClassUID = DicomUIDGenerator.GenerateDerivedFromUUID(); - dicomFile.FileMetaInfo.TransferSyntax = DicomTransferSyntax.ExplicitVRLittleEndian; - - using var stream = new MemoryStream(); - dicomFile.Save(stream); - - var dicomBytes = stream.ToArray(); - _mockFs.AddFile(_sourceDcmPathAbs, new MockFileData(dicomBytes)); - - // _mockFs.File.Create(_sourceDcmPathAbs).Dispose(); - _mockFs.File.SetAttributes(_sourceDcmPathAbs, _mockFs.File.GetAttributes(_sourceDcmPathAbs) | FileAttributes.ReadOnly); - - _extractFileMessage = new ExtractFileMessage - { - JobSubmittedAt = DateTime.UtcNow, - ExtractionJobIdentifier = Guid.NewGuid(), - ProjectNumber = "1234", - ExtractionDirectory = extractDirName, - DicomFilePath = "foo.dcm", - OutputPath = "foo-an.dcm", - }; - - _options = new DicomAnonymiserOptions - { - RoutingKeySuccess = "yay", - FailIfSourceWriteable = true, - RoutingKeyFailure = "nay" - }; - - Console.WriteLine($"_dicomRootDirInfo.FullName: {_dicomRootDirInfo.FullName}"); - Console.WriteLine($"_extractRootDirInfo.FullName: {_extractRootDirInfo.FullName}"); - Console.WriteLine($"_extractDir: {_extractDir}"); - Console.WriteLine($"_sourceDcmPathAbs: {_sourceDcmPathAbs}"); - } - - private DicomAnonymiserConsumer GetNewDicomAnonymiserConsumer( - IDicomAnonymiser? mockDicomAnonymiser = null, - IProducerModel? mockProducerModel = null - ) - { - var consumer = new DicomAnonymiserConsumer( - _options, - _dicomRootDirInfo.FullName, - _extractRootDirInfo.FullName, - mockDicomAnonymiser ?? new Mock(MockBehavior.Strict).Object, - mockProducerModel ?? new Mock(MockBehavior.Strict).Object, - _mockFs - ); - return consumer; - } + [SetUp] + public void SetUp() + { + _mockFs = new MockFileSystem(); + + _dicomRootDirInfo = _mockFs.Directory.CreateDirectory("dicom"); + _extractRootDirInfo = _mockFs.Directory.CreateDirectory("extract"); + + var extractDirName = "extractDir"; + _extractDir = _mockFs.Path.Combine(_extractRootDirInfo.FullName, extractDirName); + _mockFs.Directory.CreateDirectory(_extractDir); + + _sourceDcmPathAbs = _mockFs.Path.Combine(_dicomRootDirInfo.FullName, "foo.dcm"); + + var dicomFile = new DicomFile(); + dicomFile.Dataset.Add(DicomTag.PatientID, "12345678"); + dicomFile.Dataset.Add(DicomTag.Modality, "CT"); + dicomFile.Dataset.Add(DicomTag.StudyInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); + dicomFile.Dataset.Add(DicomTag.SeriesInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); + dicomFile.Dataset.Add(DicomTag.SOPInstanceUID, DicomUIDGenerator.GenerateDerivedFromUUID()); + dicomFile.FileMetaInfo.MediaStorageSOPClassUID = DicomUID.SecondaryCaptureImageStorage; + dicomFile.FileMetaInfo.MediaStorageSOPInstanceUID = DicomUIDGenerator.GenerateDerivedFromUUID(); + dicomFile.FileMetaInfo.ImplementationClassUID = DicomUIDGenerator.GenerateDerivedFromUUID(); + dicomFile.FileMetaInfo.TransferSyntax = DicomTransferSyntax.ExplicitVRLittleEndian; - [TearDown] - public void TearDown() { } + using var stream = new MemoryStream(); + dicomFile.Save(stream); - #endregion + var dicomBytes = stream.ToArray(); + _mockFs.AddFile(_sourceDcmPathAbs, new MockFileData(dicomBytes)); - #region Tests + // _mockFs.File.Create(_sourceDcmPathAbs).Dispose(); + _mockFs.File.SetAttributes(_sourceDcmPathAbs, _mockFs.File.GetAttributes(_sourceDcmPathAbs) | FileAttributes.ReadOnly); - // TODO (da 2024-03-28) Extract modality from cohort extractor instead of opening DICOM file - // This test is disabled because of FellowOakDicom.DicomFileException caused by DicomFile.Open - // Once the above TODO is implemented, this test can be enabled. - /* - [Test] - public void ProcessMessageImpl_HappyPath() + _extractFileMessage = new ExtractFileMessage { - // Arrange - - Expression> expectedAnonCall = - x => x.Anonymise( - It.Is(x => x == _extractFileMessage), - It.Is(x => x.FullName == _sourceDcmPathAbs), - It.Is(x => x.FullName == _mockFs.Path.Combine(_extractDir, _extractFileMessage.OutputPath)), - out It.Ref.IsAny - ); - - var mockAnonymiser = new Mock(MockBehavior.Strict); - mockAnonymiser - .Setup(expectedAnonCall) - .Returns(ExtractedFileStatus.Anonymised); - - Expression> expectedSendCall = - x => x.SendMessage( - It.Is(x => - x.Status == ExtractedFileStatus.Anonymised && - x.StatusMessage == null && - x.OutputFilePath == _extractFileMessage.OutputPath - ), - It.IsAny(), - _options.RoutingKeySuccess - ); - - var mockProducerModel = new Mock(); - mockProducerModel.Setup(expectedSendCall); - - var consumer = GetNewDicomAnonymiserConsumer(mockAnonymiser.Object, mockProducerModel.Object); - - // Act - consumer.TestMessage(_extractFileMessage); - - // Assert - - TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); - - mockAnonymiser.Verify(expectedAnonCall, Times.Once); - mockProducerModel.Verify(expectedSendCall, Times.Once); - } - */ - - [Test] - public void ProcessMessageImpl_IsIdentifiableExtraction_ThrowsException() + JobSubmittedAt = DateTime.UtcNow, + ExtractionJobIdentifier = Guid.NewGuid(), + ProjectNumber = "1234", + ExtractionDirectory = extractDirName, + DicomFilePath = "foo.dcm", + OutputPath = "foo-an.dcm", + }; + + _options = new DicomAnonymiserOptions { - // Arrange + RoutingKeySuccess = "yay", + FailIfSourceWriteable = true, + RoutingKeyFailure = "nay" + }; + + Console.WriteLine($"_dicomRootDirInfo.FullName: {_dicomRootDirInfo.FullName}"); + Console.WriteLine($"_extractRootDirInfo.FullName: {_extractRootDirInfo.FullName}"); + Console.WriteLine($"_extractDir: {_extractDir}"); + Console.WriteLine($"_sourceDcmPathAbs: {_sourceDcmPathAbs}"); + } - _extractFileMessage.IsIdentifiableExtraction = true; + private DicomAnonymiserConsumer GetNewDicomAnonymiserConsumer( + IDicomAnonymiser? mockDicomAnonymiser = null, + IProducerModel? mockProducerModel = null + ) + { + var consumer = new DicomAnonymiserConsumer( + _options, + _dicomRootDirInfo.FullName, + _extractRootDirInfo.FullName, + mockDicomAnonymiser ?? new Mock(MockBehavior.Strict).Object, + mockProducerModel ?? new Mock(MockBehavior.Strict).Object, + _mockFs + ); + return consumer; + } - var consumer = GetNewDicomAnonymiserConsumer(); + [TearDown] + public void TearDown() { } - FatalErrorEventArgs? fatalArgs = null; - consumer.OnFatal += (_, args) => fatalArgs = args; + #endregion - // Act + #region Tests + + // TODO (da 2024-03-28) Extract modality from cohort extractor instead of opening DICOM file + // This test is disabled because of FellowOakDicom.DicomFileException caused by DicomFile.Open + // Once the above TODO is implemented, this test can be enabled. + /* + [Test] + public void ProcessMessageImpl_HappyPath() + { + // Arrange + + Expression> expectedAnonCall = + x => x.Anonymise( + It.Is(x => x == _extractFileMessage), + It.Is(x => x.FullName == _sourceDcmPathAbs), + It.Is(x => x.FullName == _mockFs.Path.Combine(_extractDir, _extractFileMessage.OutputPath)), + out It.Ref.IsAny + ); - consumer.ProcessMessage(new MessageHeader(), _extractFileMessage, 1); + var mockAnonymiser = new Mock(MockBehavior.Strict); + mockAnonymiser + .Setup(expectedAnonCall) + .Returns(ExtractedFileStatus.Anonymised); + + Expression> expectedSendCall = + x => x.SendMessage( + It.Is(x => + x.Status == ExtractedFileStatus.Anonymised && + x.StatusMessage == null && + x.OutputFilePath == _extractFileMessage.OutputPath + ), + It.IsAny(), + _options.RoutingKeySuccess + ); - // Assert + var mockProducerModel = new Mock(); + mockProducerModel.Setup(expectedSendCall); - TestTimelineAwaiter.Await(() => fatalArgs != null, "Expected Fatal to be called"); - Assert.Multiple(() => - { - Assert.That(fatalArgs?.Message, Is.EqualTo("ProcessMessageImpl threw unhandled exception")); - Assert.That(fatalArgs!.Exception!.Message, Is.EqualTo("DicomAnonymiserConsumer should not handle identifiable extraction messages")); - Assert.That(consumer.AckCount, Is.EqualTo(0)); - Assert.That(consumer.NackCount, Is.EqualTo(0)); - }); - } + var consumer = GetNewDicomAnonymiserConsumer(mockAnonymiser.Object, mockProducerModel.Object); - [Test] - public void ProcessMessageImpl_SourceFileMissing_AcksWithFailureStatus() - { - // Arrange + // Act + consumer.TestMessage(_extractFileMessage); - _mockFs.File.SetAttributes(_sourceDcmPathAbs, _mockFs.File.GetAttributes(_sourceDcmPathAbs) & ~FileAttributes.ReadOnly); - _mockFs.File.Delete(_sourceDcmPathAbs); + // Assert - Expression> expectedCall = - x => x.SendMessage( - It.Is(x => - x.Status == ExtractedFileStatus.FileMissing && - x.StatusMessage == $"Could not find file to anonymise: '{_sourceDcmPathAbs}'" && - x.OutputFilePath == null - ), - It.IsAny(), - _options.RoutingKeyFailure - ); + TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); - var mockProducerModel = new Mock(); - mockProducerModel.Setup(expectedCall); + mockAnonymiser.Verify(expectedAnonCall, Times.Once); + mockProducerModel.Verify(expectedSendCall, Times.Once); + } + */ - var consumer = GetNewDicomAnonymiserConsumer(null, mockProducerModel.Object); + [Test] + public void ProcessMessageImpl_IsIdentifiableExtraction_ThrowsException() + { + // Arrange - // Act + _extractFileMessage.IsIdentifiableExtraction = true; - consumer.ProcessMessage(new MessageHeader(), _extractFileMessage, 1); + var consumer = GetNewDicomAnonymiserConsumer(); - // Assert + FatalErrorEventArgs? fatalArgs = null; + consumer.OnFatal += (_, args) => fatalArgs = args; - TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); + // Act - mockProducerModel.Verify(expectedCall, Times.Once); - } + consumer.ProcessMessage(new MessageHeader(), _extractFileMessage, 1); - [Test] - public void ProcessMessageImpl_FailIfSourceWriteable_AcksWithFailureStatus() + // Assert + + TestTimelineAwaiter.Await(() => fatalArgs != null, "Expected Fatal to be called"); + Assert.Multiple(() => { - // Arrange + Assert.That(fatalArgs?.Message, Is.EqualTo("ProcessMessageImpl threw unhandled exception")); + Assert.That(fatalArgs!.Exception!.Message, Is.EqualTo("DicomAnonymiserConsumer should not handle identifiable extraction messages")); + Assert.That(consumer.AckCount, Is.EqualTo(0)); + Assert.That(consumer.NackCount, Is.EqualTo(0)); + }); + } + + [Test] + public void ProcessMessageImpl_SourceFileMissing_AcksWithFailureStatus() + { + // Arrange + + _mockFs.File.SetAttributes(_sourceDcmPathAbs, _mockFs.File.GetAttributes(_sourceDcmPathAbs) & ~FileAttributes.ReadOnly); + _mockFs.File.Delete(_sourceDcmPathAbs); + + Expression> expectedCall = + x => x.SendMessage( + It.Is(x => + x.Status == ExtractedFileStatus.FileMissing && + x.StatusMessage == $"Could not find file to anonymise: '{_sourceDcmPathAbs}'" && + x.OutputFilePath == null + ), + It.IsAny(), + _options.RoutingKeyFailure + ); - _mockFs.File.SetAttributes(_sourceDcmPathAbs, _mockFs.File.GetAttributes(_sourceDcmPathAbs) & ~FileAttributes.ReadOnly); + var mockProducerModel = new Mock(); + mockProducerModel.Setup(expectedCall); - Expression> expectedCall = - x => x.SendMessage( - It.Is(x => - x.Status == ExtractedFileStatus.ErrorWontRetry && - x.StatusMessage == $"Source file was writeable and FailIfSourceWriteable is set: '{_sourceDcmPathAbs}'" && - x.OutputFilePath == null - ), - It.IsAny(), - _options.RoutingKeyFailure - ); - var mockProducerModel = new Mock(); - mockProducerModel.Setup(expectedCall); + var consumer = GetNewDicomAnonymiserConsumer(null, mockProducerModel.Object); - var consumer = GetNewDicomAnonymiserConsumer(null, mockProducerModel.Object); + // Act - // Act + consumer.ProcessMessage(new MessageHeader(), _extractFileMessage, 1); - consumer.ProcessMessage(new MessageHeader(), _extractFileMessage, 1); + // Assert - // Assert + TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); - TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); + mockProducerModel.Verify(expectedCall, Times.Once); + } - mockProducerModel.Verify(expectedCall, Times.Once); - } + [Test] + public void ProcessMessageImpl_FailIfSourceWriteable_AcksWithFailureStatus() + { + // Arrange + + _mockFs.File.SetAttributes(_sourceDcmPathAbs, _mockFs.File.GetAttributes(_sourceDcmPathAbs) & ~FileAttributes.ReadOnly); + + Expression> expectedCall = + x => x.SendMessage( + It.Is(x => + x.Status == ExtractedFileStatus.ErrorWontRetry && + x.StatusMessage == $"Source file was writeable and FailIfSourceWriteable is set: '{_sourceDcmPathAbs}'" && + x.OutputFilePath == null + ), + It.IsAny(), + _options.RoutingKeyFailure + ); + var mockProducerModel = new Mock(); + mockProducerModel.Setup(expectedCall); - [Test] - public void ProcessMessageImpl_ExtractionDirMissing_ThrowsException() - { - // Arrange + var consumer = GetNewDicomAnonymiserConsumer(null, mockProducerModel.Object); + + // Act - _mockFs.Directory.Delete(_extractDir); + consumer.ProcessMessage(new MessageHeader(), _extractFileMessage, 1); - var consumer = GetNewDicomAnonymiserConsumer(); + // Assert - FatalErrorEventArgs? fatalArgs = null; - consumer.OnFatal += (_, args) => fatalArgs = args; + TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); - // Act + mockProducerModel.Verify(expectedCall, Times.Once); + } - consumer.ProcessMessage(new MessageHeader(), _extractFileMessage, 1); + [Test] + public void ProcessMessageImpl_ExtractionDirMissing_ThrowsException() + { + // Arrange - // Assert + _mockFs.Directory.Delete(_extractDir); - TestTimelineAwaiter.Await(() => fatalArgs != null, "Expected Fatal to be called"); + var consumer = GetNewDicomAnonymiserConsumer(); - Assert.Multiple(() => - { - Assert.That(fatalArgs?.Message, Is.EqualTo("ProcessMessageImpl threw unhandled exception")); - Assert.That(fatalArgs!.Exception!.Message, Is.EqualTo($"Expected extraction directory to exist: '{_extractDir}'")); - Assert.That(consumer.AckCount, Is.EqualTo(0)); - Assert.That(consumer.NackCount, Is.EqualTo(0)); - }); - } + FatalErrorEventArgs? fatalArgs = null; + consumer.OnFatal += (_, args) => fatalArgs = args; - // TODO (da 2024-03-28) Extract modality from cohort extractor instead of opening DICOM file - // This test is disabled because of FellowOakDicom.DicomFileException caused by DicomFile.Open - // Once the above TODO is implemented, this test can be enabled. - /* - [Test] - public void ProcessMessageImpl_AnonymisationFailed_AcksWithFailureStatus() - { - // Arrange + // Act + + consumer.ProcessMessage(new MessageHeader(), _extractFileMessage, 1); + + // Assert - var mockAnonymiser = new Mock(MockBehavior.Strict); - mockAnonymiser - .Setup(x => x.Anonymise( - It.IsAny(), - It.IsAny(), - It.IsAny(), - out It.Ref.IsAny)) - .Throws(new Exception("oh no")); + TestTimelineAwaiter.Await(() => fatalArgs != null, "Expected Fatal to be called"); - Expression> expectedCall = - x => x.SendMessage( - It.Is(x => - x.Status == ExtractedFileStatus.ErrorWontRetry && - x.StatusMessage!.StartsWith($"Error anonymising '{_sourceDcmPathAbs}'. Exception message: IDicomAnonymiser") && - x.OutputFilePath == null - ), - It.IsAny(), - _options.RoutingKeyFailure - ); + Assert.Multiple(() => + { + Assert.That(fatalArgs?.Message, Is.EqualTo("ProcessMessageImpl threw unhandled exception")); + Assert.That(fatalArgs!.Exception!.Message, Is.EqualTo($"Expected extraction directory to exist: '{_extractDir}'")); + Assert.That(consumer.AckCount, Is.EqualTo(0)); + Assert.That(consumer.NackCount, Is.EqualTo(0)); + }); + } - var mockProducerModel = new Mock(); - mockProducerModel.Setup(expectedCall); + // TODO (da 2024-03-28) Extract modality from cohort extractor instead of opening DICOM file + // This test is disabled because of FellowOakDicom.DicomFileException caused by DicomFile.Open + // Once the above TODO is implemented, this test can be enabled. + /* + [Test] + public void ProcessMessageImpl_AnonymisationFailed_AcksWithFailureStatus() + { + // Arrange + + var mockAnonymiser = new Mock(MockBehavior.Strict); + mockAnonymiser + .Setup(x => x.Anonymise( + It.IsAny(), + It.IsAny(), + It.IsAny(), + out It.Ref.IsAny)) + .Throws(new Exception("oh no")); + + Expression> expectedCall = + x => x.SendMessage( + It.Is(x => + x.Status == ExtractedFileStatus.ErrorWontRetry && + x.StatusMessage!.StartsWith($"Error anonymising '{_sourceDcmPathAbs}'. Exception message: IDicomAnonymiser") && + x.OutputFilePath == null + ), + It.IsAny(), + _options.RoutingKeyFailure + ); - var consumer = GetNewDicomAnonymiserConsumer(null, mockProducerModel.Object); + var mockProducerModel = new Mock(); + mockProducerModel.Setup(expectedCall); - // Act - consumer.TestMessage(_extractFileMessage); + var consumer = GetNewDicomAnonymiserConsumer(null, mockProducerModel.Object); - // Assert + // Act + consumer.TestMessage(_extractFileMessage); - TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); + // Assert - mockProducerModel.Verify(expectedCall, Times.Once); - } - */ + TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); - #endregion + mockProducerModel.Verify(expectedCall, Times.Once); } + */ + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/AutoRoutingAttacherTests.cs b/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/AutoRoutingAttacherTests.cs index fd5c6475b..451c19220 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/AutoRoutingAttacherTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/AutoRoutingAttacherTests.cs @@ -8,42 +8,41 @@ using SmiServices.Microservices.DicomRelationalMapper; using System.IO; -namespace SmiServices.UnitTests.Microservices.DicomRelationalMapper +namespace SmiServices.UnitTests.Microservices.DicomRelationalMapper; + +public class AutoRoutingAttacherTests { - public class AutoRoutingAttacherTests + + [Test] + public void TestPatientAgeTag() { + string filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "test.dcm"); - [Test] - public void TestPatientAgeTag() + var dataset = new DicomDataset { - string filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "test.dcm"); + { DicomTag.SOPInstanceUID, "123.123.123" }, + { DicomTag.SOPClassUID, "123.123.123" }, + new DicomAgeString(DicomTag.PatientAge, "009Y") + }; - var dataset = new DicomDataset - { - { DicomTag.SOPInstanceUID, "123.123.123" }, - { DicomTag.SOPClassUID, "123.123.123" }, - new DicomAgeString(DicomTag.PatientAge, "009Y") - }; + var cSharpValue = DicomTypeTranslaterReader.GetCSharpValue(dataset, DicomTag.PatientAge); - var cSharpValue = DicomTypeTranslaterReader.GetCSharpValue(dataset, DicomTag.PatientAge); + Assert.That(cSharpValue, Is.EqualTo("009Y")); - Assert.That(cSharpValue, Is.EqualTo("009Y")); + var file = new DicomFile(dataset); + file.Save(filename); - var file = new DicomFile(dataset); - file.Save(filename); - - var source = new DicomFileCollectionSource - { - FilenameField = "Path" - }; - source.PreInitialize(new ExplicitListDicomFileWorklist([filename]), ThrowImmediatelyDataLoadEventListener.Quiet); + var source = new DicomFileCollectionSource + { + FilenameField = "Path" + }; + source.PreInitialize(new ExplicitListDicomFileWorklist([filename]), ThrowImmediatelyDataLoadEventListener.Quiet); - var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - Assert.That(chunk.Rows[0]["PatientAge"], Is.EqualTo("009Y")); - } + Assert.That(chunk.Rows[0]["PatientAge"], Is.EqualTo("009Y")); } } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/DicomDatasetCollectionSourceTests.cs b/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/DicomDatasetCollectionSourceTests.cs index 728889791..9745818ef 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/DicomDatasetCollectionSourceTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/DicomDatasetCollectionSourceTests.cs @@ -16,218 +16,218 @@ using System.Linq; using System.Text.RegularExpressions; -namespace SmiServices.UnitTests.Microservices.DicomRelationalMapper +namespace SmiServices.UnitTests.Microservices.DicomRelationalMapper; + +public class DicomDatasetCollectionSourceTests { - public class DicomDatasetCollectionSourceTests + [OneTimeSetUp] + public void InitializeFansi() { - [OneTimeSetUp] - public void InitializeFansi() - { - ImplementationManager.Load(); - } + ImplementationManager.Load(); + } - /// - /// Demonstrates the basic scenario in which a dicom dataset is turned into a data table by the DicomDatasetCollectionSource - /// - [Test] - public void SourceReadSimpleTagToTable() - { - var source = new DicomDatasetCollectionSource(); + /// + /// Demonstrates the basic scenario in which a dicom dataset is turned into a data table by the DicomDatasetCollectionSource + /// + [Test] + public void SourceReadSimpleTagToTable() + { + var source = new DicomDatasetCollectionSource(); - var ds = new DicomDataset - { - { DicomTag.PatientAge, "123Y" } - }; + var ds = new DicomDataset + { + { DicomTag.PatientAge, "123Y" } + }; - var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm"); + var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm"); - source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); - source.FilenameField = "RelFileName"; + source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); + source.FilenameField = "RelFileName"; - var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - Assert.Multiple(() => - { - Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); - Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); - }); - } + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.Multiple(() => + { + Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); + Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); + }); + } - [TestCase(DataTooWideHandling.None)] - [TestCase(DataTooWideHandling.TruncateAndWarn)] - [TestCase(DataTooWideHandling.MarkCorrupt)] - [TestCase(DataTooWideHandling.ConvertToNullAndWarn)] - public void TestStringTooLong(DataTooWideHandling strategy) + [TestCase(DataTooWideHandling.None)] + [TestCase(DataTooWideHandling.TruncateAndWarn)] + [TestCase(DataTooWideHandling.MarkCorrupt)] + [TestCase(DataTooWideHandling.ConvertToNullAndWarn)] + public void TestStringTooLong(DataTooWideHandling strategy) + { + var ds = new DicomDataset { - var ds = new DicomDataset - { #pragma warning disable CS0618 // Obsolete - AutoValidate = false - }; + AutoValidate = false + }; #pragma warning restore CS0618 - ds.AddOrUpdate(DicomTag.AccessionNumber, "1342340123129473279427572495349757459347839479375974"); - ds.GetValues(DicomTag.AccessionNumber); + ds.AddOrUpdate(DicomTag.AccessionNumber, "1342340123129473279427572495349757459347839479375974"); + ds.GetValues(DicomTag.AccessionNumber); - var source = new DicomDatasetCollectionSource(); + var source = new DicomDatasetCollectionSource(); - var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm", []); - source.DataTooLongHandlingStrategy = strategy; - source.FilenameField = "abc"; - source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); + var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm", []); + source.DataTooLongHandlingStrategy = strategy; + source.FilenameField = "abc"; + source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); - var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - switch (strategy) - { - case DataTooWideHandling.None: - Assert.That(dt.Rows[0]["AccessionNumber"], Is.EqualTo("1342340123129473279427572495349757459347839479375974")); - Assert.That(worklist.CorruptMessages, Is.Empty); - break; - case DataTooWideHandling.TruncateAndWarn: - Assert.That(dt.Rows[0]["AccessionNumber"], Is.EqualTo("1342340123129473")); - Assert.That(worklist.CorruptMessages, Is.Empty); - break; - case DataTooWideHandling.MarkCorrupt: - Assert.That(dt, Is.Null); //since dt has no rows it just returns null - Assert.That(worklist.CorruptMessages, Has.Count.EqualTo(1)); - break; - case DataTooWideHandling.ConvertToNullAndWarn: - Assert.That(dt.Rows[0]["AccessionNumber"], Is.EqualTo(DBNull.Value)); - Assert.That(worklist.CorruptMessages, Is.Empty); - break; - default: - throw new ArgumentOutOfRangeException(nameof(strategy)); - } + switch (strategy) + { + case DataTooWideHandling.None: + Assert.That(dt.Rows[0]["AccessionNumber"], Is.EqualTo("1342340123129473279427572495349757459347839479375974")); + Assert.That(worklist.CorruptMessages, Is.Empty); + break; + case DataTooWideHandling.TruncateAndWarn: + Assert.That(dt.Rows[0]["AccessionNumber"], Is.EqualTo("1342340123129473")); + Assert.That(worklist.CorruptMessages, Is.Empty); + break; + case DataTooWideHandling.MarkCorrupt: + Assert.That(dt, Is.Null); //since dt has no rows it just returns null + Assert.That(worklist.CorruptMessages, Has.Count.EqualTo(1)); + break; + case DataTooWideHandling.ConvertToNullAndWarn: + Assert.That(dt.Rows[0]["AccessionNumber"], Is.EqualTo(DBNull.Value)); + Assert.That(worklist.CorruptMessages, Is.Empty); + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); } + } - /// - /// Demonstrates that invalid float values are not a problem and get deserialized as strings - /// - [TestCase(InvalidDataHandling.ConvertToNullAndWarn)] - [TestCase(InvalidDataHandling.ThrowException)] - public void SourceRead_InvalidFloat_ToTable(InvalidDataHandling dataHandlingStrategy) + /// + /// Demonstrates that invalid float values are not a problem and get deserialized as strings + /// + [TestCase(InvalidDataHandling.ConvertToNullAndWarn)] + [TestCase(InvalidDataHandling.ThrowException)] + public void SourceRead_InvalidFloat_ToTable(InvalidDataHandling dataHandlingStrategy) + { + var source = new DicomDatasetCollectionSource { - var source = new DicomDatasetCollectionSource - { - InvalidDataHandlingStrategy = dataHandlingStrategy - }; + InvalidDataHandlingStrategy = dataHandlingStrategy + }; - var ds = new DicomDataset - { - { DicomTag.PatientAge, "123Y" }, - { DicomTag.WedgeAngleFloat, "3.40282347e+038" } - }; - - var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm", new Dictionary { { "MessageGuid", "123x321" } }); + var ds = new DicomDataset + { + { DicomTag.PatientAge, "123Y" }, + { DicomTag.WedgeAngleFloat, "3.40282347e+038" } + }; - source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); - source.FilenameField = "RelFileName"; + var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm", new Dictionary { { "MessageGuid", "123x321" } }); - DataTable? dt = null; + source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); + source.FilenameField = "RelFileName"; - switch (dataHandlingStrategy) - { - case InvalidDataHandling.ThrowException: - Assert.Throws(() => source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); - return; + DataTable? dt = null; - case InvalidDataHandling.ConvertToNullAndWarn: - var toMem = new ToMemoryDataLoadEventListener(true); - dt = source.GetChunk(toMem, new GracefulCancellationToken()); + switch (dataHandlingStrategy) + { + case InvalidDataHandling.ThrowException: + Assert.Throws(() => source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + return; - Assert.That(dt.Rows[0]["WedgeAngleFloat"], Is.EqualTo(DBNull.Value)); + case InvalidDataHandling.ConvertToNullAndWarn: + var toMem = new ToMemoryDataLoadEventListener(true); + dt = source.GetChunk(toMem, new GracefulCancellationToken()); - //should be a warning about WedgeAngleFloat logged - var warning = toMem.EventsReceivedBySender.SelectMany(static e => e.Value).Single(v => v.ProgressEventType == ProgressEventType.Warning); - Assert.That(warning.Message, Does.Contain("WedgeAngleFloat")); - Assert.That(warning.Message, Does.Contain("MessageGuid")); - Assert.That(warning.Message, Does.Contain("123x321")); - Assert.That(warning.Message, Does.Contain("fish.dcm")); + Assert.That(dt.Rows[0]["WedgeAngleFloat"], Is.EqualTo(DBNull.Value)); - break; + //should be a warning about WedgeAngleFloat logged + var warning = toMem.EventsReceivedBySender.SelectMany(static e => e.Value).Single(v => v.ProgressEventType == ProgressEventType.Warning); + Assert.That(warning.Message, Does.Contain("WedgeAngleFloat")); + Assert.That(warning.Message, Does.Contain("MessageGuid")); + Assert.That(warning.Message, Does.Contain("123x321")); + Assert.That(warning.Message, Does.Contain("fish.dcm")); - default: - throw new ArgumentOutOfRangeException(nameof(dataHandlingStrategy)); - } + break; - Assert.Multiple(() => - { - Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); - Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); - }); + default: + throw new ArgumentOutOfRangeException(nameof(dataHandlingStrategy)); } - [TestCase(InvalidDataHandling.ConvertToNullAndWarn)] - [TestCase(InvalidDataHandling.ThrowException)] - [TestCase(InvalidDataHandling.MarkCorrupt)] - public void SourceRead_InvalidFloatInSequence_ToTable(InvalidDataHandling dataHandlingStrategy) + Assert.Multiple(() => { + Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); + Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); + }); + } - var source = new DicomDatasetCollectionSource - { - InvalidDataHandlingStrategy = dataHandlingStrategy - }; - - //when we have a dicom file with an invalid Float number - var ds = new DicomDataset - { - { DicomTag.PatientAge, "123Y" } - }; + [TestCase(InvalidDataHandling.ConvertToNullAndWarn)] + [TestCase(InvalidDataHandling.ThrowException)] + [TestCase(InvalidDataHandling.MarkCorrupt)] + public void SourceRead_InvalidFloatInSequence_ToTable(InvalidDataHandling dataHandlingStrategy) + { - var sequence = new DicomSequence(DicomTag.AcquisitionContextSequence, - new DicomDataset - { - {DicomTag.WedgeAngleFloat, "3.40282347e+038"} - }); + var source = new DicomDatasetCollectionSource + { + InvalidDataHandlingStrategy = dataHandlingStrategy + }; - ds.Add(sequence); + //when we have a dicom file with an invalid Float number + var ds = new DicomDataset + { + { DicomTag.PatientAge, "123Y" } + }; - var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm"); + var sequence = new DicomSequence(DicomTag.AcquisitionContextSequence, + new DicomDataset + { + {DicomTag.WedgeAngleFloat, "3.40282347e+038"} + }); - source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); - source.FilenameField = "RelFileName"; + ds.Add(sequence); - DataTable? dt = null; + var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm"); - switch (dataHandlingStrategy) - { + source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); + source.FilenameField = "RelFileName"; - case InvalidDataHandling.MarkCorrupt: - dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + DataTable? dt = null; - //row was not processed (which leaves data table with 0 rows and hence component returns null) - Assert.That(dt, Is.Null); + switch (dataHandlingStrategy) + { - //corrupt message should appear in the worklist - Assert.That(worklist.CorruptMessages, Has.Count.EqualTo(1)); - return; - case InvalidDataHandling.ConvertToNullAndWarn: - dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + case InvalidDataHandling.MarkCorrupt: + dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); - Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); - Assert.That(dt.Rows[0]["AcquisitionContextSequence"], Is.EqualTo(DBNull.Value)); - Assert.That(worklist.CorruptMessages, Is.Empty); - break; - case InvalidDataHandling.ThrowException: - Assert.Throws(() => source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); - return; + //row was not processed (which leaves data table with 0 rows and hence component returns null) + Assert.That(dt, Is.Null); - default: - throw new ArgumentOutOfRangeException(nameof(dataHandlingStrategy)); - } + //corrupt message should appear in the worklist + Assert.That(worklist.CorruptMessages, Has.Count.EqualTo(1)); + return; + case InvalidDataHandling.ConvertToNullAndWarn: + dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); + Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); + Assert.That(dt.Rows[0]["AcquisitionContextSequence"], Is.EqualTo(DBNull.Value)); + Assert.That(worklist.CorruptMessages, Is.Empty); + break; + case InvalidDataHandling.ThrowException: + Assert.Throws(() => source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + return; + + default: + throw new ArgumentOutOfRangeException(nameof(dataHandlingStrategy)); } - [TestCase(InvalidDataHandling.ConvertToNullAndWarn)] - [TestCase(InvalidDataHandling.ThrowException)] - public void SourceRead_InvalidFloatInSequence_WithElevation_ToTable(InvalidDataHandling dataHandlingStrategy) - { - //create the elevation configuration - var elevationRules = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "ElevationConfig.xml")); + } + + [TestCase(InvalidDataHandling.ConvertToNullAndWarn)] + [TestCase(InvalidDataHandling.ThrowException)] + public void SourceRead_InvalidFloatInSequence_WithElevation_ToTable(InvalidDataHandling dataHandlingStrategy) + { + //create the elevation configuration + var elevationRules = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "ElevationConfig.xml")); - File.WriteAllText(elevationRules.FullName, + File.WriteAllText(elevationRules.FullName, @" @@ -245,166 +245,165 @@ public void SourceRead_InvalidFloatInSequence_WithElevation_ToTable(InvalidDataH "); - //setup the source reader - var source = new DicomDatasetCollectionSource - { - InvalidDataHandlingStrategy = dataHandlingStrategy, - TagElevationConfigurationFile = elevationRules, - - //don't load the sequence, just the elevation - TagBlacklist = new Regex("AcquisitionContextSequence") - }; - - //The dataset we are trying to load - var ds = new DicomDataset - { - { DicomTag.PatientAge, "123Y" } - }; - - var sequence = new DicomSequence(DicomTag.AcquisitionContextSequence, - new DicomDataset - { - {DicomTag.WedgeAngleFloat, "3.40282347e+038"} //dodgy float in sequence (the sequence we are trying to elevate) - }); - - ds.Add(sequence); - - var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm", new Dictionary { { "MessageGuid", "123x321" } }); + //setup the source reader + var source = new DicomDatasetCollectionSource + { + InvalidDataHandlingStrategy = dataHandlingStrategy, + TagElevationConfigurationFile = elevationRules, - source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); - source.FilenameField = "RelFileName"; + //don't load the sequence, just the elevation + TagBlacklist = new Regex("AcquisitionContextSequence") + }; - DataTable? dt = null; + //The dataset we are trying to load + var ds = new DicomDataset + { + { DicomTag.PatientAge, "123Y" } + }; - switch (dataHandlingStrategy) + var sequence = new DicomSequence(DicomTag.AcquisitionContextSequence, + new DicomDataset { - case InvalidDataHandling.ThrowException: - Assert.Throws(() => source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); - return; + {DicomTag.WedgeAngleFloat, "3.40282347e+038"} //dodgy float in sequence (the sequence we are trying to elevate) + }); - case InvalidDataHandling.ConvertToNullAndWarn: - var tomem = new ToMemoryDataLoadEventListener(true); - dt = source.GetChunk(tomem, new GracefulCancellationToken()); - Assert.That(dt.Rows[0]["WedgeAngleFloat"], Is.EqualTo(DBNull.Value)); + ds.Add(sequence); - //should be a warning about WedgeAngleFloat logged - var warning = tomem.EventsReceivedBySender.SelectMany(e => e.Value).Single(v => v.ProgressEventType == ProgressEventType.Warning); - Assert.That(warning.Message, Does.Contain("WedgeAngleFloat")); - Assert.That(warning.Message, Does.Contain("MessageGuid")); - Assert.That(warning.Message, Does.Contain("123x321")); - Assert.That(warning.Message, Does.Contain("fish.dcm")); + var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm", new Dictionary { { "MessageGuid", "123x321" } }); - break; + source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); + source.FilenameField = "RelFileName"; - default: - throw new ArgumentOutOfRangeException(nameof(dataHandlingStrategy)); - } + DataTable? dt = null; - Assert.Multiple(() => - { - Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); - Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); - }); + switch (dataHandlingStrategy) + { + case InvalidDataHandling.ThrowException: + Assert.Throws(() => source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + return; + + case InvalidDataHandling.ConvertToNullAndWarn: + var tomem = new ToMemoryDataLoadEventListener(true); + dt = source.GetChunk(tomem, new GracefulCancellationToken()); + Assert.That(dt.Rows[0]["WedgeAngleFloat"], Is.EqualTo(DBNull.Value)); + + //should be a warning about WedgeAngleFloat logged + var warning = tomem.EventsReceivedBySender.SelectMany(e => e.Value).Single(v => v.ProgressEventType == ProgressEventType.Warning); + Assert.That(warning.Message, Does.Contain("WedgeAngleFloat")); + Assert.That(warning.Message, Does.Contain("MessageGuid")); + Assert.That(warning.Message, Does.Contain("123x321")); + Assert.That(warning.Message, Does.Contain("fish.dcm")); + + break; + + default: + throw new ArgumentOutOfRangeException(nameof(dataHandlingStrategy)); } - [Test] - public void SourceRead_ToTable_IgnoringSuperflousColumn_TableInfo() + Assert.Multiple(() => { - var repo = new MemoryCatalogueRepository(); + Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); + Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); + }); + } - var ti = new TableInfo(repo, "MyTable"); - _ = new ColumnInfo(repo, "PatientAge", "varchar(100)", ti); - _ = new ColumnInfo(repo, "RelFileName", "varchar(100)", ti); + [Test] + public void SourceRead_ToTable_IgnoringSuperflousColumn_TableInfo() + { + var repo = new MemoryCatalogueRepository(); - var source = new DicomDatasetCollectionSource - { - InvalidDataHandlingStrategy = InvalidDataHandling.ThrowException - }; + var ti = new TableInfo(repo, "MyTable"); + _ = new ColumnInfo(repo, "PatientAge", "varchar(100)", ti); + _ = new ColumnInfo(repo, "RelFileName", "varchar(100)", ti); + + var source = new DicomDatasetCollectionSource + { + InvalidDataHandlingStrategy = InvalidDataHandling.ThrowException + }; + + var ds = new DicomDataset + { + { DicomTag.PatientAge, "123Y" } + }; - var ds = new DicomDataset + var sequence = new DicomSequence(DicomTag.AcquisitionContextSequence, + new DicomDataset { - { DicomTag.PatientAge, "123Y" } - }; + {DicomTag.WedgeAngleFloat, "3.40282347e+038"} + }); - var sequence = new DicomSequence(DicomTag.AcquisitionContextSequence, - new DicomDataset - { - {DicomTag.WedgeAngleFloat, "3.40282347e+038"} - }); + ds.Add(sequence); - ds.Add(sequence); + var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm"); - var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm"); + source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); + source.FilenameField = "RelFileName"; + source.FieldMapTableIfAny = ti; - source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); - source.FilenameField = "RelFileName"; - source.FieldMapTableIfAny = ti; + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.Multiple(() => + { + Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); + Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); + Assert.That(dt.Columns, Has.Count.EqualTo(2)); + }); + } - var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - Assert.Multiple(() => - { - Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); - Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); - Assert.That(dt.Columns, Has.Count.EqualTo(2)); - }); - } + [Test] + public void SourceRead_ToTable_IgnoringSuperflousColumn_LoadMetadata() + { + var repo = new MemoryCatalogueRepository(); + + var lmd = new LoadMetadata(repo, "MyLoad"); + + var cata1 = new Catalogue(repo, "PatientCatalogue"); + var ci1 = new CatalogueItem(repo, cata1, "PatientAge"); + var ti1 = new TableInfo(repo, "PatientTableInfo"); + var colInfo1 = new ColumnInfo(repo, "PatientAge", "varchar(100)", ti1); + ci1.ColumnInfo_ID = colInfo1.ID; + ci1.SaveToDatabase(); + lmd.LinkToCatalogue(cata1); + cata1.SaveToDatabase(); + + var cata2 = new Catalogue(repo, "FileCatalogue"); + var ci2 = new CatalogueItem(repo, cata2, "RelFileName"); + var ti2 = new TableInfo(repo, "FileTableInfo"); + var colInfo2 = new ColumnInfo(repo, "RelFileName", "varchar(100)", ti2); + ci2.ColumnInfo_ID = colInfo2.ID; + ci2.SaveToDatabase(); + lmd.LinkToCatalogue(cata2); + cata2.SaveToDatabase(); + + var source = new DicomDatasetCollectionSource + { + InvalidDataHandlingStrategy = InvalidDataHandling.ThrowException + }; - [Test] - public void SourceRead_ToTable_IgnoringSuperflousColumn_LoadMetadata() + var ds = new DicomDataset { - var repo = new MemoryCatalogueRepository(); - - var lmd = new LoadMetadata(repo, "MyLoad"); - - var cata1 = new Catalogue(repo, "PatientCatalogue"); - var ci1 = new CatalogueItem(repo, cata1, "PatientAge"); - var ti1 = new TableInfo(repo, "PatientTableInfo"); - var colInfo1 = new ColumnInfo(repo, "PatientAge", "varchar(100)", ti1); - ci1.ColumnInfo_ID = colInfo1.ID; - ci1.SaveToDatabase(); - lmd.LinkToCatalogue(cata1); - cata1.SaveToDatabase(); - - var cata2 = new Catalogue(repo, "FileCatalogue"); - var ci2 = new CatalogueItem(repo, cata2, "RelFileName"); - var ti2 = new TableInfo(repo, "FileTableInfo"); - var colInfo2 = new ColumnInfo(repo, "RelFileName", "varchar(100)", ti2); - ci2.ColumnInfo_ID = colInfo2.ID; - ci2.SaveToDatabase(); - lmd.LinkToCatalogue(cata2); - cata2.SaveToDatabase(); - - var source = new DicomDatasetCollectionSource - { - InvalidDataHandlingStrategy = InvalidDataHandling.ThrowException - }; + { DicomTag.PatientAge, "123Y" } + }; - var ds = new DicomDataset + var sequence = new DicomSequence(DicomTag.AcquisitionContextSequence, + new DicomDataset { - { DicomTag.PatientAge, "123Y" } - }; - - var sequence = new DicomSequence(DicomTag.AcquisitionContextSequence, - new DicomDataset - { - {DicomTag.WedgeAngleFloat, "3.40282347e+038"} - }); + {DicomTag.WedgeAngleFloat, "3.40282347e+038"} + }); - ds.Add(sequence); + ds.Add(sequence); - var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm"); + var worklist = new ExplicitListDicomDatasetWorklist([ds], "fish.dcm"); - source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); - source.FilenameField = "RelFileName"; - source.UseAllTableInfoInLoadAsFieldMap = lmd; + source.PreInitialize(worklist, ThrowImmediatelyDataLoadEventListener.Quiet); + source.FilenameField = "RelFileName"; + source.UseAllTableInfoInLoadAsFieldMap = lmd; - var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - Assert.Multiple(() => - { - Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); - Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); - Assert.That(dt.Columns, Has.Count.EqualTo(2)); - }); - } + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.Multiple(() => + { + Assert.That(dt.Rows[0]["PatientAge"], Is.EqualTo("123Y")); + Assert.That(dt.Rows[0]["RelFileName"], Is.EqualTo("fish.dcm")); + Assert.That(dt.Columns, Has.Count.EqualTo(2)); + }); } } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/DicomRelationalMapperTestHelper.cs b/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/DicomRelationalMapperTestHelper.cs index 174163345..f40b09717 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/DicomRelationalMapperTestHelper.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/DicomRelationalMapperTestHelper.cs @@ -19,141 +19,141 @@ using System.IO; using System.Linq; -namespace SmiServices.UnitTests.Microservices.DicomRelationalMapper +namespace SmiServices.UnitTests.Microservices.DicomRelationalMapper; + +public class DicomRelationalMapperTestHelper { - public class DicomRelationalMapperTestHelper - { - public LoadMetadata? LoadMetadata { get; private set; } - public DiscoveredTable? ImageTable { get; private set; } - public DiscoveredTable? SeriesTable { get; private set; } - public DiscoveredTable? StudyTable { get; private set; } + public LoadMetadata? LoadMetadata { get; private set; } + public DiscoveredTable? ImageTable { get; private set; } + public DiscoveredTable? SeriesTable { get; private set; } + public DiscoveredTable? StudyTable { get; private set; } - public TableInfo? ImageTableInfo { get; private set; } - public TableInfo? SeriesTableInfo { get; private set; } - public TableInfo? StudyTableInfo { get; private set; } + public TableInfo? ImageTableInfo { get; private set; } + public TableInfo? SeriesTableInfo { get; private set; } + public TableInfo? StudyTableInfo { get; private set; } - public PipelineComponent? DicomSourcePipelineComponent { get; private set; } + public PipelineComponent? DicomSourcePipelineComponent { get; private set; } - public void SetupSuite(DiscoveredDatabase databaseToCreateInto, IRDMPPlatformRepositoryServiceLocator repositoryLocator, GlobalOptions globalOptions, Type pipelineDicomSourceType, string? root = null, ImageTableTemplateCollection? template = null, bool persistentRaw = false, string? modalityPrefix = null) - { - ImageTable = databaseToCreateInto.ExpectTable($"{modalityPrefix}ImageTable"); - SeriesTable = databaseToCreateInto.ExpectTable($"{modalityPrefix}SeriesTable"); - StudyTable = databaseToCreateInto.ExpectTable($"{modalityPrefix}StudyTable"); + public void SetupSuite(DiscoveredDatabase databaseToCreateInto, IRDMPPlatformRepositoryServiceLocator repositoryLocator, GlobalOptions globalOptions, Type pipelineDicomSourceType, string? root = null, ImageTableTemplateCollection? template = null, bool persistentRaw = false, string? modalityPrefix = null) + { + ImageTable = databaseToCreateInto.ExpectTable($"{modalityPrefix}ImageTable"); + SeriesTable = databaseToCreateInto.ExpectTable($"{modalityPrefix}SeriesTable"); + StudyTable = databaseToCreateInto.ExpectTable($"{modalityPrefix}StudyTable"); - try - { - var dest = Path.Combine(TestContext.CurrentContext.TestDirectory, "Rdmp.Dicom.dll"); - if (!File.Exists(dest)) - File.Copy(typeof(InvalidDataHandling).Assembly.Location, dest, false); - } - catch (IOException) - { - //never mind, it's probably locked - } + try + { + var dest = Path.Combine(TestContext.CurrentContext.TestDirectory, "Rdmp.Dicom.dll"); + if (!File.Exists(dest)) + File.Copy(typeof(InvalidDataHandling).Assembly.Location, dest, false); + } + catch (IOException) + { + //never mind, it's probably locked + } - var catalogueRepository = repositoryLocator.CatalogueRepository; - var dataExportRepository = repositoryLocator.DataExportRepository; + var catalogueRepository = repositoryLocator.CatalogueRepository; + var dataExportRepository = repositoryLocator.DataExportRepository; - foreach (var t in new[] { ImageTable, SeriesTable, StudyTable }) - if (t.Exists()) - t.Drop(); + foreach (var t in new[] { ImageTable, SeriesTable, StudyTable }) + if (t.Exists()) + t.Drop(); - // delete any remnants - foreach (var p in catalogueRepository.GetAllObjects()) + // delete any remnants + foreach (var p in catalogueRepository.GetAllObjects()) + { + if (p.Name.Contains("(Image Loading Pipe)")) { - if (p.Name.Contains("(Image Loading Pipe)")) - { - p.DeleteInDatabase(); - } + p.DeleteInDatabase(); } + } - var suite = new ExecuteCommandCreateNewImagingDatasetSuite(repositoryLocator, databaseToCreateInto, new DirectoryInfo(TestContext.CurrentContext.TestDirectory)) - { - Template = template ?? GetDefaultTemplate(databaseToCreateInto.Server.DatabaseType), + var suite = new ExecuteCommandCreateNewImagingDatasetSuite(repositoryLocator, databaseToCreateInto, new DirectoryInfo(TestContext.CurrentContext.TestDirectory)) + { + Template = template ?? GetDefaultTemplate(databaseToCreateInto.Server.DatabaseType), - PersistentRaw = persistentRaw, - TablePrefix = modalityPrefix, + PersistentRaw = persistentRaw, + TablePrefix = modalityPrefix, - DicomSourceType = pipelineDicomSourceType, - CreateCoalescer = true - }; + DicomSourceType = pipelineDicomSourceType, + CreateCoalescer = true + }; - suite.Execute(); - DicomSourcePipelineComponent = suite.DicomSourcePipelineComponent; //store the component created so we can inject/adjust the arguments e.g. adding ElevationRequests to it + suite.Execute(); + DicomSourcePipelineComponent = suite.DicomSourcePipelineComponent; //store the component created so we can inject/adjust the arguments e.g. adding ElevationRequests to it - LoadMetadata = suite.NewLoadMetadata; + LoadMetadata = suite.NewLoadMetadata; - var tableInfos = LoadMetadata.GetAllCatalogues().SelectMany(c => c.GetTableInfoList(false)).Distinct().ToArray(); + var tableInfos = LoadMetadata.GetAllCatalogues().SelectMany(c => c.GetTableInfoList(false)).Distinct().ToArray(); - ImageTableInfo = (TableInfo)tableInfos.Single(t => t.GetRuntimeName()?.Equals(ImageTable.GetRuntimeName()) == true); - SeriesTableInfo = (TableInfo)tableInfos.Single(t => t.GetRuntimeName()?.Equals(SeriesTable.GetRuntimeName()) == true); - StudyTableInfo = (TableInfo)tableInfos.Single(t => t.GetRuntimeName()?.Equals(StudyTable.GetRuntimeName()) == true); + ImageTableInfo = (TableInfo)tableInfos.Single(t => t.GetRuntimeName()?.Equals(ImageTable.GetRuntimeName()) == true); + SeriesTableInfo = (TableInfo)tableInfos.Single(t => t.GetRuntimeName()?.Equals(SeriesTable.GetRuntimeName()) == true); + StudyTableInfo = (TableInfo)tableInfos.Single(t => t.GetRuntimeName()?.Equals(StudyTable.GetRuntimeName()) == true); - // Override the options with stuff coming from Core RDMP DatabaseTests (TestDatabases.txt) - globalOptions.FileSystemOptions!.FileSystemRoot = root ?? TestContext.CurrentContext.TestDirectory; + // Override the options with stuff coming from Core RDMP DatabaseTests (TestDatabases.txt) + globalOptions.FileSystemOptions!.FileSystemRoot = root ?? TestContext.CurrentContext.TestDirectory; - globalOptions.RDMPOptions!.CatalogueConnectionString = (catalogueRepository as TableRepository)?.DiscoveredServer.Builder.ConnectionString; - globalOptions.RDMPOptions.DataExportConnectionString = (dataExportRepository as TableRepository)?.DiscoveredServer.Builder.ConnectionString; + globalOptions.RDMPOptions!.CatalogueConnectionString = (catalogueRepository as TableRepository)?.DiscoveredServer.Builder.ConnectionString; + globalOptions.RDMPOptions.DataExportConnectionString = (dataExportRepository as TableRepository)?.DiscoveredServer.Builder.ConnectionString; - globalOptions.DicomRelationalMapperOptions!.LoadMetadataId = LoadMetadata.ID; - globalOptions.DicomRelationalMapperOptions.MinimumBatchSize = 1; - globalOptions.DicomRelationalMapperOptions.UseInsertIntoForRAWMigration = true; + globalOptions.DicomRelationalMapperOptions!.LoadMetadataId = LoadMetadata.ID; + globalOptions.DicomRelationalMapperOptions.MinimumBatchSize = 1; + globalOptions.DicomRelationalMapperOptions.UseInsertIntoForRAWMigration = true; - //Image table now needs all the UIDs in order to be extractable - var adder = new TagColumnAdder("StudyInstanceUID", "varchar(100)", ImageTableInfo, new AcceptAllCheckNotifier()); - adder.Execute(); - } + //Image table now needs all the UIDs in order to be extractable + var adder = new TagColumnAdder("StudyInstanceUID", "varchar(100)", ImageTableInfo, new AcceptAllCheckNotifier()); + adder.Execute(); + } - private static ImageTableTemplateCollection GetDefaultTemplate(FAnsi.DatabaseType databaseType) - { - var collection = ImageTableTemplateCollection.LoadFrom(DefaultTemplateYaml); - collection.DatabaseType = databaseType; - return collection; - } + private static ImageTableTemplateCollection GetDefaultTemplate(FAnsi.DatabaseType databaseType) + { + var collection = ImageTableTemplateCollection.LoadFrom(DefaultTemplateYaml); + collection.DatabaseType = databaseType; + return collection; + } - public void TruncateTablesIfExists() - { - foreach (var t in new[] { ImageTable, SeriesTable, StudyTable }) - if (t != null && t.Exists()) - t.Truncate(); - } + public void TruncateTablesIfExists() + { + foreach (var t in new[] { ImageTable, SeriesTable, StudyTable }) + if (t != null && t.Exists()) + t.Truncate(); + } - public static DicomFileMessage GetDicomFileMessage(string fileSystemRoot, FileInfo fi) + public static DicomFileMessage GetDicomFileMessage(string fileSystemRoot, FileInfo fi) + { + var toReturn = new DicomFileMessage(fileSystemRoot, fi) { - var toReturn = new DicomFileMessage(fileSystemRoot, fi) - { - StudyInstanceUID = "999", - SeriesInstanceUID = "999", - SOPInstanceUID = "999", - DicomFileSize = fi.Length - }; + StudyInstanceUID = "999", + SeriesInstanceUID = "999", + SOPInstanceUID = "999", + DicomFileSize = fi.Length + }; - var ds = DicomFile.Open(fi.FullName).Dataset; - ds.Remove(DicomTag.PixelData); + var ds = DicomFile.Open(fi.FullName).Dataset; + ds.Remove(DicomTag.PixelData); - toReturn.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); + toReturn.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); - return toReturn; - } - public static DicomFileMessage GetDicomFileMessage(DicomDataset ds, string fileSystemRoot, string file) + return toReturn; + } + public static DicomFileMessage GetDicomFileMessage(DicomDataset ds, string fileSystemRoot, string file) + { + var toReturn = new DicomFileMessage(fileSystemRoot, file) { - var toReturn = new DicomFileMessage(fileSystemRoot, file) - { - StudyInstanceUID = "999", - SeriesInstanceUID = "999", - SOPInstanceUID = "999" - }; + StudyInstanceUID = "999", + SeriesInstanceUID = "999", + SOPInstanceUID = "999" + }; - ds.Remove(DicomTag.PixelData); + ds.Remove(DicomTag.PixelData); - toReturn.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); + toReturn.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); - return toReturn; - } + return toReturn; + } - const string DefaultTemplateYaml = - @"Tables: + const string DefaultTemplateYaml = + @"Tables: - TableName: StudyTable Columns: - ColumnName: PatientID @@ -280,5 +280,4 @@ public static DicomFileMessage GetDicomFileMessage(DicomDataset ds, string fileS Type: CSharpType: System.Int64 "; - } } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/Namers/GuidDatabaseNamerTests.cs b/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/Namers/GuidDatabaseNamerTests.cs index cbe831ce2..f9b2c2822 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/Namers/GuidDatabaseNamerTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomRelationalMapper/Namers/GuidDatabaseNamerTests.cs @@ -3,27 +3,26 @@ using SmiServices.Microservices.DicomRelationalMapper.Namers; using System; -namespace SmiServices.UnitTests.Microservices.DicomRelationalMapper.Namers +namespace SmiServices.UnitTests.Microservices.DicomRelationalMapper.Namers; + +public class GuidDatabaseNamerTests { - public class GuidDatabaseNamerTests + [Test] + public void GetExampleName() { - [Test] - public void GetExampleName() - { - //t6ff062af5538473f801ced2b751c7897test_RAW - //t6ff062af5538473f801ced2b751c7897DLE_STAGING - var namer = new GuidDatabaseNamer("test", new Guid("6ff062af-5538-473f-801c-ed2b751c7897")); - - var raw = namer.GetDatabaseName("test", LoadBubble.Raw); - Console.WriteLine(raw); + //t6ff062af5538473f801ced2b751c7897test_RAW + //t6ff062af5538473f801ced2b751c7897DLE_STAGING + var namer = new GuidDatabaseNamer("test", new Guid("6ff062af-5538-473f-801c-ed2b751c7897")); - Assert.That(raw, Does.Contain("6ff")); + var raw = namer.GetDatabaseName("test", LoadBubble.Raw); + Console.WriteLine(raw); - var staging = namer.GetDatabaseName("test", LoadBubble.Staging); - Console.WriteLine(staging); + Assert.That(raw, Does.Contain("6ff")); - Assert.That(staging, Does.Contain("6ff")); - } + var staging = namer.GetDatabaseName("test", LoadBubble.Staging); + Console.WriteLine(staging); + Assert.That(staging, Does.Contain("6ff")); } + } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomReprocessor/DicomFileProcessorTest.cs b/tests/SmiServices.UnitTests/Microservices/DicomReprocessor/DicomFileProcessorTest.cs index 6afaf0908..c948467dd 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomReprocessor/DicomFileProcessorTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomReprocessor/DicomFileProcessorTest.cs @@ -9,54 +9,53 @@ using SmiServices.Common.Options; using SmiServices.Microservices.DicomReprocessor; -namespace SmiServices.UnitTests.Microservices.DicomReprocessor +namespace SmiServices.UnitTests.Microservices.DicomReprocessor; + +public class DicomFileProcessorTest { - public class DicomFileProcessorTest - { - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() { } + [OneTimeSetUp] + public void OneTimeSetUp() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void ProcessDocument_NationalPacsAccessionNumber_IsIgnored() + [Test] + public void ProcessDocument_NationalPacsAccessionNumber_IsIgnored() + { + var processor = new DicomFileProcessor(Mock.Of(), ""); + + var msg = new DicomFileMessage { - var processor = new DicomFileProcessor(Mock.Of(), ""); - - var msg = new DicomFileMessage - { - DicomFilePath = "foo", - DicomFileSize = 123, - }; - BsonDocument bsonHeader = MongoDocumentHeaders.ImageDocumentHeader(msg, new MessageHeader()); - bsonHeader.Add("NationalPACSAccessionNumber", "foo"); - BsonDocument datasetDoc = DicomTypeTranslaterReader.BuildBsonDocument([]); - - BsonDocument document = new BsonDocument() - .Add("_id", "foo") - .Add("header", bsonHeader) - .AddRange(datasetDoc); - - processor.ProcessDocument(document); - } - - #endregion + DicomFilePath = "foo", + DicomFileSize = 123, + }; + BsonDocument bsonHeader = MongoDocumentHeaders.ImageDocumentHeader(msg, new MessageHeader()); + bsonHeader.Add("NationalPACSAccessionNumber", "foo"); + BsonDocument datasetDoc = DicomTypeTranslaterReader.BuildBsonDocument([]); + + BsonDocument document = new BsonDocument() + .Add("_id", "foo") + .Add("header", bsonHeader) + .AddRange(datasetDoc); + + processor.ProcessDocument(document); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomReprocessor/DicomReprocessorCliOptionsTests.cs b/tests/SmiServices.UnitTests/Microservices/DicomReprocessor/DicomReprocessorCliOptionsTests.cs index 42fb0e8cd..70d238b50 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomReprocessor/DicomReprocessorCliOptionsTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomReprocessor/DicomReprocessorCliOptionsTests.cs @@ -3,42 +3,41 @@ using SmiServices.Microservices.DicomReprocessor; using System; -namespace SmiServices.UnitTests.Microservices.DicomReprocessor +namespace SmiServices.UnitTests.Microservices.DicomReprocessor; + +public class DicomReprocessorCliOptionsTests { - public class DicomReprocessorCliOptionsTests - { - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() { } + [OneTimeSetUp] + public void OneTimeSetUp() { } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void TestInvalidCollectionArgument() - { - Assert.Throws(() => - new DicomReprocessorCliOptions { SourceCollection = "database.collection" } - ); - } + [Test] + public void TestInvalidCollectionArgument() + { + Assert.Throws(() => + new DicomReprocessorCliOptions { SourceCollection = "database.collection" } + ); + } - #endregion + #endregion - } } diff --git a/tests/SmiServices.UnitTests/Microservices/DicomTagReader/DicomTagReaderTestHelper.cs b/tests/SmiServices.UnitTests/Microservices/DicomTagReader/DicomTagReaderTestHelper.cs index ee92ac08d..1d5041548 100644 --- a/tests/SmiServices.UnitTests/Microservices/DicomTagReader/DicomTagReaderTestHelper.cs +++ b/tests/SmiServices.UnitTests/Microservices/DicomTagReader/DicomTagReaderTestHelper.cs @@ -10,105 +10,104 @@ using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Microservices.DicomTagReader +namespace SmiServices.UnitTests.Microservices.DicomTagReader; + +public class DicomTagReaderTestHelper { - public class DicomTagReaderTestHelper - { - private const string TestSeriesQueueName = "TEST.SeriesQueue"; - private const string TestImageQueueName = "TEST.ImageQueue"; + private const string TestSeriesQueueName = "TEST.SeriesQueue"; + private const string TestImageQueueName = "TEST.ImageQueue"; - public readonly ILogger MockLogger = Mock.Of(); + public readonly ILogger MockLogger = Mock.Of(); - public ConsumerOptions AccessionConsumerOptions = null!; + public ConsumerOptions AccessionConsumerOptions = null!; - public AccessionDirectoryMessage TestAccessionDirectoryMessage = null!; + public AccessionDirectoryMessage TestAccessionDirectoryMessage = null!; - private IConnection _testConnection = null!; - private IModel _testModel = null!; + private IConnection _testConnection = null!; + private IModel _testModel = null!; - public Mock TestSeriesModel = null!; - public Mock TestImageModel = null!; + public Mock TestSeriesModel = null!; + public Mock TestImageModel = null!; - public MockFileSystem MockFileSystem = null!; - public IMicroserviceHost MockHost = null!; + public MockFileSystem MockFileSystem = null!; + public IMicroserviceHost MockHost = null!; - public DirectoryInfo TestDir = null!; - public GlobalOptions Options = null!; + public DirectoryInfo TestDir = null!; + public GlobalOptions Options = null!; - /// - /// Returns the number of image messages in - /// - public uint ImageCount => _testModel.MessageCount(TestImageQueueName); + /// + /// Returns the number of image messages in + /// + public uint ImageCount => _testModel.MessageCount(TestImageQueueName); - /// - /// Returns the number of series messages in - /// - public uint SeriesCount => _testModel.MessageCount(TestSeriesQueueName); + /// + /// Returns the number of series messages in + /// + public uint SeriesCount => _testModel.MessageCount(TestSeriesQueueName); - public void SetUpSuite() - { - SetUpDefaults(); + public void SetUpSuite() + { + SetUpDefaults(); - // Create the test Series/Image exchanges - var tester = new MicroserviceTester(Options.RabbitOptions!); - tester.CreateExchange(Options.DicomTagReaderOptions!.ImageProducerOptions!.ExchangeName!, TestImageQueueName); - tester.CreateExchange(Options.DicomTagReaderOptions.SeriesProducerOptions!.ExchangeName!, TestSeriesQueueName); - tester.CreateExchange(Options.RabbitOptions!.FatalLoggingExchange!, null); - tester.Shutdown(); + // Create the test Series/Image exchanges + var tester = new MicroserviceTester(Options.RabbitOptions!); + tester.CreateExchange(Options.DicomTagReaderOptions!.ImageProducerOptions!.ExchangeName!, TestImageQueueName); + tester.CreateExchange(Options.DicomTagReaderOptions.SeriesProducerOptions!.ExchangeName!, TestSeriesQueueName); + tester.CreateExchange(Options.RabbitOptions!.FatalLoggingExchange!, null); + tester.Shutdown(); - _testConnection = Options.RabbitOptions.Connection; + _testConnection = Options.RabbitOptions.Connection; - _testModel = _testConnection.CreateModel(); + _testModel = _testConnection.CreateModel(); - MockHost = Mock.Of(); - } + MockHost = Mock.Of(); + } - public void ResetSuite() - { - SetUpDefaults(); + public void ResetSuite() + { + SetUpDefaults(); - _testModel.QueuePurge(TestSeriesQueueName); - _testModel.QueuePurge(TestImageQueueName); - } + _testModel.QueuePurge(TestSeriesQueueName); + _testModel.QueuePurge(TestImageQueueName); + } - private void SetUpDefaults() - { - Options = new GlobalOptionsFactory().Load(nameof(DicomTagReaderTestHelper)); + private void SetUpDefaults() + { + Options = new GlobalOptionsFactory().Load(nameof(DicomTagReaderTestHelper)); - AccessionConsumerOptions = Options.DicomTagReaderOptions!; + AccessionConsumerOptions = Options.DicomTagReaderOptions!; - TestAccessionDirectoryMessage = new AccessionDirectoryMessage - { - DirectoryPath = @"C:\Temp\", - }; + TestAccessionDirectoryMessage = new AccessionDirectoryMessage + { + DirectoryPath = @"C:\Temp\", + }; - TestSeriesModel = new Mock(); - TestImageModel = new Mock(); + TestSeriesModel = new Mock(); + TestImageModel = new Mock(); - MockFileSystem = new MockFileSystem(); - MockFileSystem.AddDirectory(@"C:\Temp"); + MockFileSystem = new MockFileSystem(); + MockFileSystem.AddDirectory(@"C:\Temp"); - TestDir = new DirectoryInfo("DicomTagReaderTests"); - TestDir.Create(); + TestDir = new DirectoryInfo("DicomTagReaderTests"); + TestDir.Create(); - foreach (FileInfo f in TestDir.GetFiles()) - f.Delete(); + foreach (FileInfo f in TestDir.GetFiles()) + f.Delete(); - new TestData().Create(new FileInfo(Path.Combine(TestDir.FullName, "MyTestFile.dcm"))); - } + new TestData().Create(new FileInfo(Path.Combine(TestDir.FullName, "MyTestFile.dcm"))); + } - public bool CheckQueues(int nInSeriesQueue, int nInImageQueue) - { - return - _testModel.MessageCount(TestSeriesQueueName) == nInSeriesQueue && - _testModel.MessageCount(TestImageQueueName) == nInImageQueue; - } + public bool CheckQueues(int nInSeriesQueue, int nInImageQueue) + { + return + _testModel.MessageCount(TestSeriesQueueName) == nInSeriesQueue && + _testModel.MessageCount(TestImageQueueName) == nInImageQueue; + } - public void Dispose() - { - _testModel.Close(); - _testConnection.Close(); - } + public void Dispose() + { + _testModel.Close(); + _testConnection.Close(); } } diff --git a/tests/SmiServices.UnitTests/Microservices/FileCopier/FileCopierTest.cs b/tests/SmiServices.UnitTests/Microservices/FileCopier/FileCopierTest.cs index 187748000..c785f00b1 100644 --- a/tests/SmiServices.UnitTests/Microservices/FileCopier/FileCopierTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/FileCopier/FileCopierTest.cs @@ -9,179 +9,178 @@ using System; using System.IO.Abstractions.TestingHelpers; -namespace SmiServices.UnitTests.Microservices.FileCopier +namespace SmiServices.UnitTests.Microservices.FileCopier; + +public class FileCopierTest { - public class FileCopierTest - { - private FileCopierOptions _options = null!; + private FileCopierOptions _options = null!; - private MockFileSystem _mockFileSystem = null!; - private const string FileSystemRoot = "PACS"; - private const string ExtractRoot = "extract"; - private string _relativeSrc = null!; - private readonly byte[] _expectedContents = [0b00, 0b01, 0b10, 0b11]; - private ExtractFileMessage _requestMessage = null!; + private MockFileSystem _mockFileSystem = null!; + private const string FileSystemRoot = "PACS"; + private const string ExtractRoot = "extract"; + private string _relativeSrc = null!; + private readonly byte[] _expectedContents = [0b00, 0b01, 0b10, 0b11]; + private ExtractFileMessage _requestMessage = null!; - #region Fixture Methods + #region Fixture Methods - [OneTimeSetUp] - public void OneTimeSetUp() + [OneTimeSetUp] + public void OneTimeSetUp() + { + _options = new FileCopierOptions { - _options = new FileCopierOptions - { - NoVerifyRoutingKey = "noverify", - }; - - _mockFileSystem = new MockFileSystem(); - _mockFileSystem.Directory.CreateDirectory(FileSystemRoot); - _mockFileSystem.Directory.CreateDirectory(ExtractRoot); - _relativeSrc = _mockFileSystem.Path.Combine("input", "a.dcm"); - string src = _mockFileSystem.Path.Combine(FileSystemRoot, _relativeSrc); - _mockFileSystem.Directory.CreateDirectory(_mockFileSystem.Directory.GetParent(src)!.FullName); - _mockFileSystem.File.WriteAllBytes(src, _expectedContents); - } + NoVerifyRoutingKey = "noverify", + }; + + _mockFileSystem = new MockFileSystem(); + _mockFileSystem.Directory.CreateDirectory(FileSystemRoot); + _mockFileSystem.Directory.CreateDirectory(ExtractRoot); + _relativeSrc = _mockFileSystem.Path.Combine("input", "a.dcm"); + string src = _mockFileSystem.Path.Combine(FileSystemRoot, _relativeSrc); + _mockFileSystem.Directory.CreateDirectory(_mockFileSystem.Directory.GetParent(src)!.FullName); + _mockFileSystem.File.WriteAllBytes(src, _expectedContents); + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() + [SetUp] + public void SetUp() + { + _requestMessage = new ExtractFileMessage { - _requestMessage = new ExtractFileMessage - { - JobSubmittedAt = DateTime.UtcNow, - ExtractionJobIdentifier = Guid.NewGuid(), - ProjectNumber = "123", - ExtractionDirectory = "proj1", - DicomFilePath = _relativeSrc, - OutputPath = "out.dcm", - }; - } + JobSubmittedAt = DateTime.UtcNow, + ExtractionJobIdentifier = Guid.NewGuid(), + ProjectNumber = "123", + ExtractionDirectory = "proj1", + DicomFilePath = _relativeSrc, + OutputPath = "out.dcm", + }; + } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void Test_FileCopier_HappyPath() - { - var mockProducerModel = new Mock(MockBehavior.Strict); - ExtractedFileStatusMessage? sentStatusMessage = null; - string? sentRoutingKey = null; - mockProducerModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((IMessage message, IMessageHeader header, string routingKey) => - { - sentStatusMessage = (ExtractedFileStatusMessage)message; - sentRoutingKey = routingKey; - }) - .Returns(() => null!); - - var requestHeader = new MessageHeader(); - - var copier = new ExtractionFileCopier(_options, mockProducerModel.Object, FileSystemRoot, ExtractRoot, _mockFileSystem); - copier.ProcessMessage(_requestMessage, requestHeader); - - var expectedStatusMessage = new ExtractedFileStatusMessage(_requestMessage) - { - DicomFilePath = _requestMessage.DicomFilePath, - Status = ExtractedFileStatus.Copied, - OutputFilePath = _requestMessage.OutputPath, - }; - Assert.Multiple(() => + [Test] + public void Test_FileCopier_HappyPath() + { + var mockProducerModel = new Mock(MockBehavior.Strict); + ExtractedFileStatusMessage? sentStatusMessage = null; + string? sentRoutingKey = null; + mockProducerModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((IMessage message, IMessageHeader header, string routingKey) => { - Assert.That(sentStatusMessage, Is.EqualTo(expectedStatusMessage)); - Assert.That(sentRoutingKey, Is.EqualTo(_options.NoVerifyRoutingKey)); - }); + sentStatusMessage = (ExtractedFileStatusMessage)message; + sentRoutingKey = routingKey; + }) + .Returns(() => null!); - string expectedDest = _mockFileSystem.Path.Combine(ExtractRoot, _requestMessage.ExtractionDirectory, "out.dcm"); - Assert.Multiple(() => - { - Assert.That(_mockFileSystem.File.Exists(expectedDest), Is.True); - Assert.That(_mockFileSystem.File.ReadAllBytes(expectedDest), Is.EqualTo(_expectedContents)); - }); - } + var requestHeader = new MessageHeader(); - [Test] - public void Test_FileCopier_MissingFile_SendsMessage() + var copier = new ExtractionFileCopier(_options, mockProducerModel.Object, FileSystemRoot, ExtractRoot, _mockFileSystem); + copier.ProcessMessage(_requestMessage, requestHeader); + + var expectedStatusMessage = new ExtractedFileStatusMessage(_requestMessage) { - var mockProducerModel = new Mock(MockBehavior.Strict); - ExtractedFileStatusMessage? sentStatusMessage = null; - string? sentRoutingKey = null; - mockProducerModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((IMessage message, IMessageHeader header, string routingKey) => - { - sentStatusMessage = (ExtractedFileStatusMessage)message; - sentRoutingKey = routingKey; - }) - .Returns(() => null!); - - _requestMessage.DicomFilePath = "missing.dcm"; - var requestHeader = new MessageHeader(); - - var copier = new ExtractionFileCopier(_options, mockProducerModel.Object, FileSystemRoot, ExtractRoot, _mockFileSystem); - copier.ProcessMessage(_requestMessage, requestHeader); - - var expectedStatusMessage = new ExtractedFileStatusMessage(_requestMessage) - { - DicomFilePath = _requestMessage.DicomFilePath, - Status = ExtractedFileStatus.FileMissing, - OutputFilePath = null, - StatusMessage = $"Could not find '{_mockFileSystem.Path.Combine(FileSystemRoot, "missing.dcm")}'" - }; - Assert.Multiple(() => - { - Assert.That(sentStatusMessage, Is.EqualTo(expectedStatusMessage)); - Assert.That(sentRoutingKey, Is.EqualTo(_options.NoVerifyRoutingKey)); - }); - } + DicomFilePath = _requestMessage.DicomFilePath, + Status = ExtractedFileStatus.Copied, + OutputFilePath = _requestMessage.OutputPath, + }; + Assert.Multiple(() => + { + Assert.That(sentStatusMessage, Is.EqualTo(expectedStatusMessage)); + Assert.That(sentRoutingKey, Is.EqualTo(_options.NoVerifyRoutingKey)); + }); - [Test] - public void Test_FileCopier_ExistingOutputFile_IsOverwritten() + string expectedDest = _mockFileSystem.Path.Combine(ExtractRoot, _requestMessage.ExtractionDirectory, "out.dcm"); + Assert.Multiple(() => { - var mockProducerModel = new Mock(MockBehavior.Strict); - ExtractedFileStatusMessage? sentStatusMessage = null; - string? sentRoutingKey = null; - mockProducerModel - .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) - .Callback((IMessage message, IMessageHeader header, string routingKey) => - { - sentStatusMessage = (ExtractedFileStatusMessage)message; - sentRoutingKey = routingKey; - }) - .Returns(() => null!); - - var requestHeader = new MessageHeader(); - string expectedDest = _mockFileSystem.Path.Combine(ExtractRoot, _requestMessage.ExtractionDirectory, "out.dcm"); - _mockFileSystem.Directory.GetParent(expectedDest)!.Create(); - _mockFileSystem.File.WriteAllBytes(expectedDest, [0b0]); - - var copier = new ExtractionFileCopier(_options, mockProducerModel.Object, FileSystemRoot, ExtractRoot, _mockFileSystem); - copier.ProcessMessage(_requestMessage, requestHeader); - - var expectedStatusMessage = new ExtractedFileStatusMessage(_requestMessage) + Assert.That(_mockFileSystem.File.Exists(expectedDest), Is.True); + Assert.That(_mockFileSystem.File.ReadAllBytes(expectedDest), Is.EqualTo(_expectedContents)); + }); + } + + [Test] + public void Test_FileCopier_MissingFile_SendsMessage() + { + var mockProducerModel = new Mock(MockBehavior.Strict); + ExtractedFileStatusMessage? sentStatusMessage = null; + string? sentRoutingKey = null; + mockProducerModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((IMessage message, IMessageHeader header, string routingKey) => { - DicomFilePath = _requestMessage.DicomFilePath, - Status = ExtractedFileStatus.Copied, - OutputFilePath = _requestMessage.OutputPath, - StatusMessage = null, - }; - Assert.Multiple(() => + sentStatusMessage = (ExtractedFileStatusMessage)message; + sentRoutingKey = routingKey; + }) + .Returns(() => null!); + + _requestMessage.DicomFilePath = "missing.dcm"; + var requestHeader = new MessageHeader(); + + var copier = new ExtractionFileCopier(_options, mockProducerModel.Object, FileSystemRoot, ExtractRoot, _mockFileSystem); + copier.ProcessMessage(_requestMessage, requestHeader); + + var expectedStatusMessage = new ExtractedFileStatusMessage(_requestMessage) + { + DicomFilePath = _requestMessage.DicomFilePath, + Status = ExtractedFileStatus.FileMissing, + OutputFilePath = null, + StatusMessage = $"Could not find '{_mockFileSystem.Path.Combine(FileSystemRoot, "missing.dcm")}'" + }; + Assert.Multiple(() => + { + Assert.That(sentStatusMessage, Is.EqualTo(expectedStatusMessage)); + Assert.That(sentRoutingKey, Is.EqualTo(_options.NoVerifyRoutingKey)); + }); + } + + [Test] + public void Test_FileCopier_ExistingOutputFile_IsOverwritten() + { + var mockProducerModel = new Mock(MockBehavior.Strict); + ExtractedFileStatusMessage? sentStatusMessage = null; + string? sentRoutingKey = null; + mockProducerModel + .Setup(x => x.SendMessage(It.IsAny(), It.IsAny(), It.IsAny())) + .Callback((IMessage message, IMessageHeader header, string routingKey) => { - Assert.That(sentStatusMessage, Is.EqualTo(expectedStatusMessage)); - Assert.That(sentRoutingKey, Is.EqualTo(_options.NoVerifyRoutingKey)); - Assert.That(_mockFileSystem.File.ReadAllBytes(expectedDest), Is.EqualTo(_expectedContents)); - }); - } + sentStatusMessage = (ExtractedFileStatusMessage)message; + sentRoutingKey = routingKey; + }) + .Returns(() => null!); + + var requestHeader = new MessageHeader(); + string expectedDest = _mockFileSystem.Path.Combine(ExtractRoot, _requestMessage.ExtractionDirectory, "out.dcm"); + _mockFileSystem.Directory.GetParent(expectedDest)!.Create(); + _mockFileSystem.File.WriteAllBytes(expectedDest, [0b0]); - #endregion + var copier = new ExtractionFileCopier(_options, mockProducerModel.Object, FileSystemRoot, ExtractRoot, _mockFileSystem); + copier.ProcessMessage(_requestMessage, requestHeader); + + var expectedStatusMessage = new ExtractedFileStatusMessage(_requestMessage) + { + DicomFilePath = _requestMessage.DicomFilePath, + Status = ExtractedFileStatus.Copied, + OutputFilePath = _requestMessage.OutputPath, + StatusMessage = null, + }; + Assert.Multiple(() => + { + Assert.That(sentStatusMessage, Is.EqualTo(expectedStatusMessage)); + Assert.That(sentRoutingKey, Is.EqualTo(_options.NoVerifyRoutingKey)); + Assert.That(_mockFileSystem.File.ReadAllBytes(expectedDest), Is.EqualTo(_expectedContents)); + }); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/FileCopier/FileCopyQueueConsumerTest.cs b/tests/SmiServices.UnitTests/Microservices/FileCopier/FileCopyQueueConsumerTest.cs index 3eda35ff4..48b250722 100644 --- a/tests/SmiServices.UnitTests/Microservices/FileCopier/FileCopyQueueConsumerTest.cs +++ b/tests/SmiServices.UnitTests/Microservices/FileCopier/FileCopyQueueConsumerTest.cs @@ -9,119 +9,118 @@ using System; -namespace SmiServices.UnitTests.Microservices.FileCopier +namespace SmiServices.UnitTests.Microservices.FileCopier; + +public class FileCopyQueueConsumerTest { - public class FileCopyQueueConsumerTest - { - #region Fixture Methods + #region Fixture Methods - private ExtractFileMessage _message = null!; - private Mock _mockFileCopier = null!; + private ExtractFileMessage _message = null!; + private Mock _mockFileCopier = null!; - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() - { - _message = new ExtractFileMessage - { - JobSubmittedAt = DateTime.UtcNow, - ExtractionJobIdentifier = Guid.NewGuid(), - ProjectNumber = "1234", - ExtractionDirectory = "foo", - DicomFilePath = "foo.dcm", - IsIdentifiableExtraction = true, - OutputPath = "bar", - }; - - _mockFileCopier = new Mock(MockBehavior.Strict); - _mockFileCopier.Setup(x => x.ProcessMessage(It.IsAny(), It.IsAny())); - } - - [TearDown] - public void TearDown() { } - - #endregion - - #region Tests - - [Test] - public void Test_FileCopyQueueConsumer_ValidMessage_IsAcked() + [SetUp] + public void SetUp() + { + _message = new ExtractFileMessage { - var consumer = new FileCopyQueueConsumer(_mockFileCopier.Object); + JobSubmittedAt = DateTime.UtcNow, + ExtractionJobIdentifier = Guid.NewGuid(), + ProjectNumber = "1234", + ExtractionDirectory = "foo", + DicomFilePath = "foo.dcm", + IsIdentifiableExtraction = true, + OutputPath = "bar", + }; + + _mockFileCopier = new Mock(MockBehavior.Strict); + _mockFileCopier.Setup(x => x.ProcessMessage(It.IsAny(), It.IsAny())); + } - consumer.ProcessMessage(new MessageHeader(), _message, 1); + [TearDown] + public void TearDown() { } - TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); - } + #endregion - [Test] - public void Test_FileCopyQueueConsumer_ApplicationException_IsNacked() - { - _mockFileCopier.Reset(); - _mockFileCopier.Setup(x => x.ProcessMessage(It.IsAny(), It.IsAny())).Throws(); + #region Tests + + [Test] + public void Test_FileCopyQueueConsumer_ValidMessage_IsAcked() + { + var consumer = new FileCopyQueueConsumer(_mockFileCopier.Object); - var consumer = new FileCopyQueueConsumer(_mockFileCopier.Object); + consumer.ProcessMessage(new MessageHeader(), _message, 1); - consumer.ProcessMessage(new MessageHeader(), _message, 1); + TestTimelineAwaiter.Await(() => consumer.AckCount == 1 && consumer.NackCount == 0); + } - TestTimelineAwaiter.Await(() => consumer.AckCount == 0 && consumer.NackCount == 1); - } + [Test] + public void Test_FileCopyQueueConsumer_ApplicationException_IsNacked() + { + _mockFileCopier.Reset(); + _mockFileCopier.Setup(x => x.ProcessMessage(It.IsAny(), It.IsAny())).Throws(); - [Test] - public void Test_FileCopyQueueConsumer_UnknownException_CallsFatalCallback() - { - _mockFileCopier.Reset(); - _mockFileCopier.Setup(x => x.ProcessMessage(It.IsAny(), It.IsAny())).Throws(); + var consumer = new FileCopyQueueConsumer(_mockFileCopier.Object); - var consumer = new FileCopyQueueConsumer(_mockFileCopier.Object); + consumer.ProcessMessage(new MessageHeader(), _message, 1); - var fatalCalled = false; - consumer.OnFatal += (sender, _) => fatalCalled = true; + TestTimelineAwaiter.Await(() => consumer.AckCount == 0 && consumer.NackCount == 1); + } + + [Test] + public void Test_FileCopyQueueConsumer_UnknownException_CallsFatalCallback() + { + _mockFileCopier.Reset(); + _mockFileCopier.Setup(x => x.ProcessMessage(It.IsAny(), It.IsAny())).Throws(); - consumer.ProcessMessage(new MessageHeader(), _message, 1); + var consumer = new FileCopyQueueConsumer(_mockFileCopier.Object); - TestTimelineAwaiter.Await(() => fatalCalled, "Expected Fatal to be called"); - Assert.Multiple(() => - { - Assert.That(consumer.AckCount, Is.EqualTo(0)); - Assert.That(consumer.NackCount, Is.EqualTo(0)); - }); - } + var fatalCalled = false; + consumer.OnFatal += (sender, _) => fatalCalled = true; - [Test] - public void Test_FileCopyQueueConsumer_AnonExtraction_ThrowsException() + consumer.ProcessMessage(new MessageHeader(), _message, 1); + + TestTimelineAwaiter.Await(() => fatalCalled, "Expected Fatal to be called"); + Assert.Multiple(() => { - _message.IsIdentifiableExtraction = false; + Assert.That(consumer.AckCount, Is.EqualTo(0)); + Assert.That(consumer.NackCount, Is.EqualTo(0)); + }); + } - _mockFileCopier.Reset(); - _mockFileCopier.Setup(x => x.ProcessMessage(It.IsAny(), It.IsAny())).Throws(); + [Test] + public void Test_FileCopyQueueConsumer_AnonExtraction_ThrowsException() + { + _message.IsIdentifiableExtraction = false; - var consumer = new FileCopyQueueConsumer(_mockFileCopier.Object); + _mockFileCopier.Reset(); + _mockFileCopier.Setup(x => x.ProcessMessage(It.IsAny(), It.IsAny())).Throws(); - var fatalCalled = false; - consumer.OnFatal += (sender, _) => fatalCalled = true; + var consumer = new FileCopyQueueConsumer(_mockFileCopier.Object); - consumer.ProcessMessage(new MessageHeader(), _message, 1); + var fatalCalled = false; + consumer.OnFatal += (sender, _) => fatalCalled = true; - TestTimelineAwaiter.Await(() => fatalCalled, "Expected Fatal to be called"); - Assert.Multiple(() => - { - Assert.That(consumer.AckCount, Is.EqualTo(0)); - Assert.That(consumer.NackCount, Is.EqualTo(0)); - }); - } + consumer.ProcessMessage(new MessageHeader(), _message, 1); - #endregion + TestTimelineAwaiter.Await(() => fatalCalled, "Expected Fatal to be called"); + Assert.Multiple(() => + { + Assert.That(consumer.AckCount, Is.EqualTo(0)); + Assert.That(consumer.NackCount, Is.EqualTo(0)); + }); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/IdentifierMapper/IdentifierMapperUnitTests.cs b/tests/SmiServices.UnitTests/Microservices/IdentifierMapper/IdentifierMapperUnitTests.cs index a0b4a9403..2f07df87b 100644 --- a/tests/SmiServices.UnitTests/Microservices/IdentifierMapper/IdentifierMapperUnitTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/IdentifierMapper/IdentifierMapperUnitTests.cs @@ -3,33 +3,32 @@ using NUnit.Framework; using System.Linq; -namespace SmiServices.UnitTests.Microservices.IdentifierMapper +namespace SmiServices.UnitTests.Microservices.IdentifierMapper; + +public class IdentifierMapperUnitTests { - public class IdentifierMapperUnitTests + [Test] + public void Test_IdentifierMapper_LoggingCounts() { - [Test] - public void Test_IdentifierMapper_LoggingCounts() + MemoryTarget target = new() { - MemoryTarget target = new() - { - Layout = "${message}" - }; + Layout = "${message}" + }; - var mapper = new SwapForFixedValueTester("fish"); - Assert.Multiple(() => - { - Assert.That(mapper.GetSubstitutionFor("heyyy", out _), Is.EqualTo("fish").IgnoreCase); + var mapper = new SwapForFixedValueTester("fish"); + Assert.Multiple(() => + { + Assert.That(mapper.GetSubstitutionFor("heyyy", out _), Is.EqualTo("fish").IgnoreCase); - Assert.That(mapper.Success, Is.EqualTo(1)); - }); + Assert.That(mapper.Success, Is.EqualTo(1)); + }); - LogManager.Setup().LoadConfiguration(x => x.ForLogger(LogLevel.Debug).WriteTo(target)); + LogManager.Setup().LoadConfiguration(x => x.ForLogger(LogLevel.Debug).WriteTo(target)); - Logger logger = LogManager.GetLogger("Example"); + Logger logger = LogManager.GetLogger("Example"); - mapper.LogProgress(logger, LogLevel.Info); + mapper.LogProgress(logger, LogLevel.Info); - Assert.That(target.Logs.Single(), Does.StartWith("SwapForFixedValueTester: CacheRatio=1:0 SuccessRatio=1:0:0")); - } + Assert.That(target.Logs.Single(), Does.StartWith("SwapForFixedValueTester: CacheRatio=1:0 SuccessRatio=1:0:0")); } } diff --git a/tests/SmiServices.UnitTests/Microservices/IdentifierMapper/SwapForFixedValueTester.cs b/tests/SmiServices.UnitTests/Microservices/IdentifierMapper/SwapForFixedValueTester.cs index fdb8b364c..6899acfa5 100644 --- a/tests/SmiServices.UnitTests/Microservices/IdentifierMapper/SwapForFixedValueTester.cs +++ b/tests/SmiServices.UnitTests/Microservices/IdentifierMapper/SwapForFixedValueTester.cs @@ -6,38 +6,37 @@ using System.Threading; -namespace SmiServices.UnitTests.Microservices.IdentifierMapper +namespace SmiServices.UnitTests.Microservices.IdentifierMapper; + +public class SwapForFixedValueTester : SwapIdentifiers { - public class SwapForFixedValueTester : SwapIdentifiers - { - private readonly string? _swapForString; + private readonly string? _swapForString; - public SwapForFixedValueTester(string? swapForString) - { - _swapForString = swapForString; - } + public SwapForFixedValueTester(string? swapForString) + { + _swapForString = swapForString; + } - public override void Setup(IMappingTableOptions mappingTableOptions) { } + public override void Setup(IMappingTableOptions mappingTableOptions) { } - public override string? GetSubstitutionFor(string toSwap, out string? reason) - { - reason = null; - Success++; - CacheHit++; + public override string? GetSubstitutionFor(string toSwap, out string? reason) + { + reason = null; + Success++; + CacheHit++; - using (new TimeTracker(DatabaseStopwatch)) - Thread.Sleep(500); + using (new TimeTracker(DatabaseStopwatch)) + Thread.Sleep(500); - return _swapForString; - } + return _swapForString; + } - public override void ClearCache() { } + public override void ClearCache() { } - public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) - { - return null; - } + public override DiscoveredTable? GetGuidTableIfAny(IMappingTableOptions options) + { + return null; } } diff --git a/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/IsIdentifiableQueueConsumerTests.cs b/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/IsIdentifiableQueueConsumerTests.cs index 59870a42d..78a5d6c82 100644 --- a/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/IsIdentifiableQueueConsumerTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/IsIdentifiableQueueConsumerTests.cs @@ -15,257 +15,256 @@ using System.IO.Abstractions.TestingHelpers; using System.Linq.Expressions; -namespace SmiServices.UnitTests.Microservices.IsIdentifiable +namespace SmiServices.UnitTests.Microservices.IsIdentifiable; + +public class IsIdentifiableQueueConsumerTests { - public class IsIdentifiableQueueConsumerTests + #region Fixture Methods + + private MockFileSystem _mockFs = null!; + private IDirectoryInfo _extractRootDirInfo = null!; + private string _extractDir = null!; + ExtractedFileStatusMessage _extractedFileStatusMessage = null!; + FatalErrorEventArgs? _fatalArgs; + Mock _mockProducerModel = null!; + Expression> _expectedSendMessageCall = null!; + ExtractedFileVerificationMessage _response = null!; + + [OneTimeSetUp] + public void OneTimeSetUp() { - #region Fixture Methods - - private MockFileSystem _mockFs = null!; - private IDirectoryInfo _extractRootDirInfo = null!; - private string _extractDir = null!; - ExtractedFileStatusMessage _extractedFileStatusMessage = null!; - FatalErrorEventArgs? _fatalArgs; - Mock _mockProducerModel = null!; - Expression> _expectedSendMessageCall = null!; - ExtractedFileVerificationMessage _response = null!; - - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() - { - _mockFs = new MockFileSystem(); - _extractRootDirInfo = _mockFs.Directory.CreateDirectory("extract"); - - var extractDirName = "extractDir"; - _extractDir = _mockFs.Path.Combine(_extractRootDirInfo.FullName, extractDirName); - _mockFs.Directory.CreateDirectory(_extractDir); - _mockFs.AddFile(_mockFs.Path.Combine(_extractDir, "foo-an.dcm"), null); - - _extractedFileStatusMessage = new ExtractedFileStatusMessage - { - DicomFilePath = "foo.dcm", - Status = ExtractedFileStatus.Anonymised, - ProjectNumber = "proj1", - ExtractionDirectory = extractDirName, - OutputFilePath = "foo-an.dcm", - }; - - _fatalArgs = null; - - _mockProducerModel = new Mock(MockBehavior.Strict); - _expectedSendMessageCall = x => x.SendMessage(It.IsAny(), It.IsAny(), null); - _mockProducerModel - .Setup(_expectedSendMessageCall) - .Callback((x, _, _) => _response = (ExtractedFileVerificationMessage)x) - .Returns(new MessageHeader()); - } - - [TearDown] - public void TearDown() { } - - private IsIdentifiableQueueConsumer GetNewIsIdentifiableQueueConsumer( - IProducerModel? mockProducerModel = null, - IClassifier? mockClassifier = null - ) + [SetUp] + public void SetUp() + { + _mockFs = new MockFileSystem(); + _extractRootDirInfo = _mockFs.Directory.CreateDirectory("extract"); + + var extractDirName = "extractDir"; + _extractDir = _mockFs.Path.Combine(_extractRootDirInfo.FullName, extractDirName); + _mockFs.Directory.CreateDirectory(_extractDir); + _mockFs.AddFile(_mockFs.Path.Combine(_extractDir, "foo-an.dcm"), null); + + _extractedFileStatusMessage = new ExtractedFileStatusMessage { - var consumer = new IsIdentifiableQueueConsumer( - mockProducerModel ?? new Mock(MockBehavior.Strict).Object, - _extractRootDirInfo.FullName, - mockClassifier ?? new Mock(MockBehavior.Strict).Object, - _mockFs - ); - consumer.OnFatal += (_, args) => _fatalArgs = args; - return consumer; - } + DicomFilePath = "foo.dcm", + Status = ExtractedFileStatus.Anonymised, + ProjectNumber = "proj1", + ExtractionDirectory = extractDirName, + OutputFilePath = "foo-an.dcm", + }; + + _fatalArgs = null; + + _mockProducerModel = new Mock(MockBehavior.Strict); + _expectedSendMessageCall = x => x.SendMessage(It.IsAny(), It.IsAny(), null); + _mockProducerModel + .Setup(_expectedSendMessageCall) + .Callback((x, _, _) => _response = (ExtractedFileVerificationMessage)x) + .Returns(new MessageHeader()); + } - #endregion + [TearDown] + public void TearDown() { } - #region Tests + private IsIdentifiableQueueConsumer GetNewIsIdentifiableQueueConsumer( + IProducerModel? mockProducerModel = null, + IClassifier? mockClassifier = null + ) + { + var consumer = new IsIdentifiableQueueConsumer( + mockProducerModel ?? new Mock(MockBehavior.Strict).Object, + _extractRootDirInfo.FullName, + mockClassifier ?? new Mock(MockBehavior.Strict).Object, + _mockFs + ); + consumer.OnFatal += (_, args) => _fatalArgs = args; + return consumer; + } - [Test] - public void Constructor_WhitespaceExtractionRoot_ThrowsException() - { - var exc = Assert.Throws(() => - { - new IsIdentifiableQueueConsumer( - new Mock().Object, - " ", - new Mock().Object - ); - }); - Assert.That(exc!.Message, Is.EqualTo("Argument cannot be null or whitespace (Parameter 'extractionRoot')")); - } - - [Test] - public void Constructor_MissingExtractRoot_ThrowsException() + #endregion + + #region Tests + + [Test] + public void Constructor_WhitespaceExtractionRoot_ThrowsException() + { + var exc = Assert.Throws(() => { - var mockFs = new MockFileSystem(); - - var exc = Assert.Throws(() => - { - new IsIdentifiableQueueConsumer( - new Mock().Object, - "foo", - new Mock().Object, - mockFs - ); - }); - Assert.That(exc!.Message, Is.EqualTo("Could not find the extraction root 'foo' in the filesystem")); - } - - [Test] - public void ProcessMessage_HappyPath_NoFailures() + new IsIdentifiableQueueConsumer( + new Mock().Object, + " ", + new Mock().Object + ); + }); + Assert.That(exc!.Message, Is.EqualTo("Argument cannot be null or whitespace (Parameter 'extractionRoot')")); + } + + [Test] + public void Constructor_MissingExtractRoot_ThrowsException() + { + var mockFs = new MockFileSystem(); + + var exc = Assert.Throws(() => { - // Arrange + new IsIdentifiableQueueConsumer( + new Mock().Object, + "foo", + new Mock().Object, + mockFs + ); + }); + Assert.That(exc!.Message, Is.EqualTo("Could not find the extraction root 'foo' in the filesystem")); + } - var mockClassifier = new Mock(MockBehavior.Strict); - mockClassifier.Setup(x => x.Classify(It.IsAny())).Returns([]); + [Test] + public void ProcessMessage_HappyPath_NoFailures() + { + // Arrange - var consumer = GetNewIsIdentifiableQueueConsumer(_mockProducerModel.Object, mockClassifier.Object); + var mockClassifier = new Mock(MockBehavior.Strict); + mockClassifier.Setup(x => x.Classify(It.IsAny())).Returns([]); - // Act + var consumer = GetNewIsIdentifiableQueueConsumer(_mockProducerModel.Object, mockClassifier.Object); - consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); + // Act - Assert.Multiple(() => - { - // Assert + consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); - Assert.That(consumer.NackCount, Is.EqualTo(0)); - Assert.That(consumer.AckCount, Is.EqualTo(1)); - }); - _mockProducerModel.Verify(_expectedSendMessageCall, Times.Once); - Assert.Multiple(() => - { - Assert.That(_response.Status, Is.EqualTo(VerifiedFileStatus.NotIdentifiable)); - Assert.That(_response.Report, Is.EqualTo("[]")); - }); - } + Assert.Multiple(() => + { + // Assert - [Test] - public void ProcessMessage_HappyPath_WithFailures() + Assert.That(consumer.NackCount, Is.EqualTo(0)); + Assert.That(consumer.AckCount, Is.EqualTo(1)); + }); + _mockProducerModel.Verify(_expectedSendMessageCall, Times.Once); + Assert.Multiple(() => { - // Arrange + Assert.That(_response.Status, Is.EqualTo(VerifiedFileStatus.NotIdentifiable)); + Assert.That(_response.Report, Is.EqualTo("[]")); + }); + } - var mockClassifier = new Mock(MockBehavior.Strict); - var failure = new Failure([new("foo", FailureClassification.Person, 123)]); - var failures = new List { failure }; - mockClassifier.Setup(x => x.Classify(It.IsAny())).Returns(failures); + [Test] + public void ProcessMessage_HappyPath_WithFailures() + { + // Arrange - var consumer = GetNewIsIdentifiableQueueConsumer(_mockProducerModel.Object, mockClassifier.Object); + var mockClassifier = new Mock(MockBehavior.Strict); + var failure = new Failure([new("foo", FailureClassification.Person, 123)]); + var failures = new List { failure }; + mockClassifier.Setup(x => x.Classify(It.IsAny())).Returns(failures); - // Act + var consumer = GetNewIsIdentifiableQueueConsumer(_mockProducerModel.Object, mockClassifier.Object); - consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); + // Act - Assert.Multiple(() => - { - // Assert + consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); - Assert.That(consumer.NackCount, Is.EqualTo(0)); - Assert.That(consumer.AckCount, Is.EqualTo(1)); - }); - _mockProducerModel.Verify(_expectedSendMessageCall, Times.Once); - Assert.Multiple(() => - { - Assert.That(_response.Status, Is.EqualTo(VerifiedFileStatus.IsIdentifiable)); - Assert.That(_response.Report, Is.EqualTo(JsonConvert.SerializeObject(failures))); - }); - } + Assert.Multiple(() => + { + // Assert - [Test] - public void ProcessMessage_ExtractedFileStatusNotAnonymised_CallsFatal() + Assert.That(consumer.NackCount, Is.EqualTo(0)); + Assert.That(consumer.AckCount, Is.EqualTo(1)); + }); + _mockProducerModel.Verify(_expectedSendMessageCall, Times.Once); + Assert.Multiple(() => { - // Arrange + Assert.That(_response.Status, Is.EqualTo(VerifiedFileStatus.IsIdentifiable)); + Assert.That(_response.Report, Is.EqualTo(JsonConvert.SerializeObject(failures))); + }); + } - var consumer = GetNewIsIdentifiableQueueConsumer(); + [Test] + public void ProcessMessage_ExtractedFileStatusNotAnonymised_CallsFatal() + { + // Arrange - _extractedFileStatusMessage.Status = ExtractedFileStatus.ErrorWontRetry; - _extractedFileStatusMessage.StatusMessage = "foo"; + var consumer = GetNewIsIdentifiableQueueConsumer(); - // Act + _extractedFileStatusMessage.Status = ExtractedFileStatus.ErrorWontRetry; + _extractedFileStatusMessage.StatusMessage = "foo"; - consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); + // Act - // Assert + consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); - TestTimelineAwaiter.Await(() => _fatalArgs != null, "Expected Fatal to be called"); - Assert.Multiple(() => - { - Assert.That(_fatalArgs?.Message, Is.EqualTo("ProcessMessageImpl threw unhandled exception")); - Assert.That(_fatalArgs!.Exception!.Message, Is.EqualTo("Received an ExtractedFileStatusMessage message with Status 'ErrorWontRetry' and StatusMessage 'foo'")); - Assert.That(consumer.NackCount, Is.EqualTo(0)); - Assert.That(consumer.AckCount, Is.EqualTo(0)); - }); - } - - [Test] - public void ProcessMessage_MissingFile_SendsErrorWontRetry() - { - // Arrange + // Assert - var consumer = GetNewIsIdentifiableQueueConsumer(_mockProducerModel.Object); + TestTimelineAwaiter.Await(() => _fatalArgs != null, "Expected Fatal to be called"); + Assert.Multiple(() => + { + Assert.That(_fatalArgs?.Message, Is.EqualTo("ProcessMessageImpl threw unhandled exception")); + Assert.That(_fatalArgs!.Exception!.Message, Is.EqualTo("Received an ExtractedFileStatusMessage message with Status 'ErrorWontRetry' and StatusMessage 'foo'")); + Assert.That(consumer.NackCount, Is.EqualTo(0)); + Assert.That(consumer.AckCount, Is.EqualTo(0)); + }); + } - _extractedFileStatusMessage.OutputFilePath = "bar-an.dcm"; + [Test] + public void ProcessMessage_MissingFile_SendsErrorWontRetry() + { + // Arrange - // Act + var consumer = GetNewIsIdentifiableQueueConsumer(_mockProducerModel.Object); - consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); + _extractedFileStatusMessage.OutputFilePath = "bar-an.dcm"; - Assert.Multiple(() => - { - // Assert + // Act - Assert.That(consumer.NackCount, Is.EqualTo(0)); - Assert.That(consumer.AckCount, Is.EqualTo(1)); - }); - _mockProducerModel.Verify(_expectedSendMessageCall, Times.Once); - Assert.That(_response.Status, Is.EqualTo(VerifiedFileStatus.ErrorWontRetry)); - var outPath = _mockFs.Path.Combine(_extractDir, "bar-an.dcm"); - Assert.That(_response.Report, Is.EqualTo($"Exception while processing ExtractedFileStatusMessage: Could not find file to process '{outPath}'")); - } + consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); - [Test] - public void ProcessMessage_ClassifierArithmeticException_SendsErrorWontRetry() + Assert.Multiple(() => { - // Arrange + // Assert - var mockClassifier = new Mock(MockBehavior.Strict); - mockClassifier.Setup(x => x.Classify(It.IsAny())).Throws(new ArithmeticException("divide by zero")); + Assert.That(consumer.NackCount, Is.EqualTo(0)); + Assert.That(consumer.AckCount, Is.EqualTo(1)); + }); + _mockProducerModel.Verify(_expectedSendMessageCall, Times.Once); + Assert.That(_response.Status, Is.EqualTo(VerifiedFileStatus.ErrorWontRetry)); + var outPath = _mockFs.Path.Combine(_extractDir, "bar-an.dcm"); + Assert.That(_response.Report, Is.EqualTo($"Exception while processing ExtractedFileStatusMessage: Could not find file to process '{outPath}'")); + } - var consumer = GetNewIsIdentifiableQueueConsumer(_mockProducerModel.Object, mockClassifier.Object); + [Test] + public void ProcessMessage_ClassifierArithmeticException_SendsErrorWontRetry() + { + // Arrange - // Act + var mockClassifier = new Mock(MockBehavior.Strict); + mockClassifier.Setup(x => x.Classify(It.IsAny())).Throws(new ArithmeticException("divide by zero")); - consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); + var consumer = GetNewIsIdentifiableQueueConsumer(_mockProducerModel.Object, mockClassifier.Object); - Assert.Multiple(() => - { - // Assert + // Act - Assert.That(consumer.NackCount, Is.EqualTo(0)); - Assert.That(consumer.AckCount, Is.EqualTo(1)); - }); - _mockProducerModel.Verify(_expectedSendMessageCall, Times.Once); - Assert.Multiple(() => - { - Assert.That(_response.Status, Is.EqualTo(VerifiedFileStatus.ErrorWontRetry)); - Assert.That(_response.Report, Does.StartWith("Exception while classifying ExtractedFileStatusMessage:\nSystem.ArithmeticException: divide by zero")); - }); - } + consumer.ProcessMessage(new MessageHeader(), _extractedFileStatusMessage, 1); - #endregion + Assert.Multiple(() => + { + // Assert + + Assert.That(consumer.NackCount, Is.EqualTo(0)); + Assert.That(consumer.AckCount, Is.EqualTo(1)); + }); + _mockProducerModel.Verify(_expectedSendMessageCall, Times.Once); + Assert.Multiple(() => + { + Assert.That(_response.Status, Is.EqualTo(VerifiedFileStatus.ErrorWontRetry)); + Assert.That(_response.Report, Does.StartWith("Exception while classifying ExtractedFileStatusMessage:\nSystem.ArithmeticException: divide by zero")); + }); } + + #endregion } diff --git a/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/NoChisInAnyColumnsConstraintTests.cs b/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/NoChisInAnyColumnsConstraintTests.cs index 17aba8b62..95d716d29 100644 --- a/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/NoChisInAnyColumnsConstraintTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/NoChisInAnyColumnsConstraintTests.cs @@ -1,28 +1,27 @@ using NUnit.Framework; using SmiServices.Microservices.IsIdentifiable; -namespace SmiServices.UnitTests.Microservices.IsIdentifiable +namespace SmiServices.UnitTests.Microservices.IsIdentifiable; + +class NoChisInAnyColumnsConstraintTests { - class NoChisInAnyColumnsConstraintTests + [TestCase("0101010101")] + [TestCase("0101010101 fish")] + [TestCase("test)0101010101.")] + [TestCase("1110101010.1")] + public void Test_SingleValue_IsChi(string testValue) { - [TestCase("0101010101")] - [TestCase("0101010101 fish")] - [TestCase("test)0101010101.")] - [TestCase("1110101010.1")] - public void Test_SingleValue_IsChi(string testValue) - { - _ = new NoChisInAnyColumnsConstraint(); - Assert.That(NoChisInAnyColumnsConstraint.Validate([testValue], [nameof(testValue)]), Is.EqualTo($"Found chi in field {nameof(testValue)}")); - } - - [TestCase("test)4401010101.")] //not a chi because there's no 44th day of the month - [TestCase("test)1120010101.")] //not a chi because there's no 20th month - [TestCase("11101010101")] //not a chi because 11 digits is too long - public void Test_SingleValue_IsNotChi(string testValue) - { - _ = new NoChisInAnyColumnsConstraint(); - Assert.That(NoChisInAnyColumnsConstraint.Validate([testValue], [nameof(testValue)]), Is.Null); - } + _ = new NoChisInAnyColumnsConstraint(); + Assert.That(NoChisInAnyColumnsConstraint.Validate([testValue], [nameof(testValue)]), Is.EqualTo($"Found chi in field {nameof(testValue)}")); + } + [TestCase("test)4401010101.")] //not a chi because there's no 44th day of the month + [TestCase("test)1120010101.")] //not a chi because there's no 20th month + [TestCase("11101010101")] //not a chi because 11 digits is too long + public void Test_SingleValue_IsNotChi(string testValue) + { + _ = new NoChisInAnyColumnsConstraint(); + Assert.That(NoChisInAnyColumnsConstraint.Validate([testValue], [nameof(testValue)]), Is.Null); } + } diff --git a/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/TesseractStanfordDicomFileClassifierTests.cs b/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/TesseractStanfordDicomFileClassifierTests.cs index ff2d07d63..7eef2be58 100644 --- a/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/TesseractStanfordDicomFileClassifierTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/IsIdentifiable/TesseractStanfordDicomFileClassifierTests.cs @@ -5,39 +5,38 @@ using System.IO; using Tesseract; -namespace SmiServices.UnitTests.Microservices.IsIdentifiable +namespace SmiServices.UnitTests.Microservices.IsIdentifiable; + +class TesseractStanfordDicomFileClassifierTests { - class TesseractStanfordDicomFileClassifierTests + [Test] + public void TestDataDirectory_DoesNotExist() + { + var d = new DirectoryInfo("asdflsdfjadfshsdfdsafldsf;dsfldsafj"); + Assert.Throws(() => new TesseractStanfordDicomFileClassifier(d, new IsIdentifiableDicomFileOptions())); + } + [Test] + public void TestDataDirectory_Empty() { - [Test] - public void TestDataDirectory_DoesNotExist() - { - var d = new DirectoryInfo("asdflsdfjadfshsdfdsafldsf;dsfldsafj"); - Assert.Throws(() => new TesseractStanfordDicomFileClassifier(d, new IsIdentifiableDicomFileOptions())); - } - [Test] - public void TestDataDirectory_Empty() - { - var path = Path.Combine(TestContext.CurrentContext.WorkDirectory, nameof(TestDataDirectory_Empty)); + var path = Path.Combine(TestContext.CurrentContext.WorkDirectory, nameof(TestDataDirectory_Empty)); - var d = new DirectoryInfo(path); - d.Create(); - Assert.Throws(() => new TesseractStanfordDicomFileClassifier(d, new IsIdentifiableDicomFileOptions())); - } + var d = new DirectoryInfo(path); + d.Create(); + Assert.Throws(() => new TesseractStanfordDicomFileClassifier(d, new IsIdentifiableDicomFileOptions())); + } - [Test] - [Platform(Exclude="Win")] - public void TesseractEngine_CanBeConstructed() - { - // Arrange - const string tessdataDirectory = @"../../../../../data/tessdata"; - var d = new DirectoryInfo(tessdataDirectory); + [Test] + [Platform(Exclude="Win")] + public void TesseractEngine_CanBeConstructed() + { + // Arrange + const string tessdataDirectory = @"../../../../../data/tessdata"; + var d = new DirectoryInfo(tessdataDirectory); - TesseractLinuxLoaderFix.Patch(); + TesseractLinuxLoaderFix.Patch(); - // Act - // Assert - Assert.DoesNotThrow(() => new TesseractEngine(d.FullName, "eng", EngineMode.Default)); - } + // Act + // Assert + Assert.DoesNotThrow(() => new TesseractEngine(d.FullName, "eng", EngineMode.Default)); } } diff --git a/tests/SmiServices.UnitTests/Microservices/MongoDbPopulator/Execution/Processing/ImageMessageProcessorTests_NoMongo.cs b/tests/SmiServices.UnitTests/Microservices/MongoDbPopulator/Execution/Processing/ImageMessageProcessorTests_NoMongo.cs index 1e48330d6..ad68d9114 100644 --- a/tests/SmiServices.UnitTests/Microservices/MongoDbPopulator/Execution/Processing/ImageMessageProcessorTests_NoMongo.cs +++ b/tests/SmiServices.UnitTests/Microservices/MongoDbPopulator/Execution/Processing/ImageMessageProcessorTests_NoMongo.cs @@ -14,82 +14,81 @@ using System.Linq; -namespace SmiServices.UnitTests.Microservices.MongoDbPopulator.Execution.Processing -{ - [TestFixture] - public class ImageMessageProcessorTests_NoMongo - { - private GlobalOptions _testOptions = null!; +namespace SmiServices.UnitTests.Microservices.MongoDbPopulator.Execution.Processing; - [OneTimeSetUp] - public void OneTimeSetUp() - { - } +[TestFixture] +public class ImageMessageProcessorTests_NoMongo +{ + private GlobalOptions _testOptions = null!; - [SetUp] - public void SetUp() - { - _testOptions = new GlobalOptionsFactory().Load(nameof(ImageMessageProcessorTests_NoMongo)); - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - /// - /// Asserts that messages in the write queue are acknowledged even if an error occurs later in the modality batch - /// - [Test] - public void ImageProcessor_FailInModalityBatch_AcksWrittenDocuments() - { - _testOptions.MongoDbPopulatorOptions!.FailedWriteLimit = 1; - _testOptions.MongoDbPopulatorOptions.MongoDbFlushTime = int.MaxValue / 1000; + [SetUp] + public void SetUp() + { + _testOptions = new GlobalOptionsFactory().Load(nameof(ImageMessageProcessorTests_NoMongo)); + } - var testModalities = new[] { "MR", "MR", "MR", "SR", "SR" }; + /// + /// Asserts that messages in the write queue are acknowledged even if an error occurs later in the modality batch + /// + [Test] + public void ImageProcessor_FailInModalityBatch_AcksWrittenDocuments() + { + _testOptions.MongoDbPopulatorOptions!.FailedWriteLimit = 1; + _testOptions.MongoDbPopulatorOptions.MongoDbFlushTime = int.MaxValue / 1000; - var testAdapter = new MongoTestAdapter(); - var processor = new ImageMessageProcessor(_testOptions.MongoDbPopulatorOptions, testAdapter, testModalities.Length + 1, null!); + var testModalities = new[] { "MR", "MR", "MR", "SR", "SR" }; - var ds = new DicomDataset(); - var msg = new DicomFileMessage - { - DicomFilePath = "", - }; + var testAdapter = new MongoTestAdapter(); + var processor = new ImageMessageProcessor(_testOptions.MongoDbPopulatorOptions, testAdapter, testModalities.Length + 1, null!); - for (var i = 0; i < testModalities.Length; ++i) - { - string modality = testModalities[i]; - ds.AddOrUpdate(DicomTag.Modality, modality); - msg.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); - processor.AddToWriteQueue(msg, new MessageHeader(), (ulong)i); - } + var ds = new DicomDataset(); + var msg = new DicomFileMessage + { + DicomFilePath = "", + }; - ds.AddOrUpdate(DicomTag.Modality, "CT"); + for (var i = 0; i < testModalities.Length; ++i) + { + string modality = testModalities[i]; + ds.AddOrUpdate(DicomTag.Modality, modality); msg.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); - - Assert.Throws(() => processor.AddToWriteQueue(msg, new MessageHeader(), ulong.MaxValue)); - Assert.That(processor.AckCount, Is.EqualTo(5)); + processor.AddToWriteQueue(msg, new MessageHeader(), (ulong)i); } + + ds.AddOrUpdate(DicomTag.Modality, "CT"); + msg.DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(ds); + + Assert.Throws(() => processor.AddToWriteQueue(msg, new MessageHeader(), ulong.MaxValue)); + Assert.That(processor.AckCount, Is.EqualTo(5)); } +} - public class MongoTestAdapter : IMongoDbAdapter +public class MongoTestAdapter : IMongoDbAdapter +{ + public WriteResult WriteMany(IList toWrite, string? collectionNamePostfix = null) { - public WriteResult WriteMany(IList toWrite, string? collectionNamePostfix = null) + Assert.That(toWrite, Is.Not.Empty); + + BsonDocument doc = toWrite.First(); + Assert.That(toWrite.All(x => x["Modality"] == doc["Modality"]), Is.True); + + // Fails for "CT" modalities + switch (doc["Modality"].AsString) { - Assert.That(toWrite, Is.Not.Empty); - - BsonDocument doc = toWrite.First(); - Assert.That(toWrite.All(x => x["Modality"] == doc["Modality"]), Is.True); - - // Fails for "CT" modalities - switch (doc["Modality"].AsString) - { - case "MR": - return WriteResult.Success; - case "CT": - return WriteResult.Failure; - case "SR": - return WriteResult.Success; - default: - Assert.Fail($"No case for {doc["Modality"]}"); - return WriteResult.Unknown; - } + case "MR": + return WriteResult.Success; + case "CT": + return WriteResult.Failure; + case "SR": + return WriteResult.Success; + default: + Assert.Fail($"No case for {doc["Modality"]}"); + return WriteResult.Unknown; } } } diff --git a/tests/SmiServices.UnitTests/Microservices/MongoDbPopulator/MongoDbPopulatorTestHelper.cs b/tests/SmiServices.UnitTests/Microservices/MongoDbPopulator/MongoDbPopulatorTestHelper.cs index 21fbf1b0f..5a9f6e620 100644 --- a/tests/SmiServices.UnitTests/Microservices/MongoDbPopulator/MongoDbPopulatorTestHelper.cs +++ b/tests/SmiServices.UnitTests/Microservices/MongoDbPopulator/MongoDbPopulatorTestHelper.cs @@ -8,86 +8,85 @@ using System; -namespace SmiServices.UnitTests.Microservices.MongoDbPopulator +namespace SmiServices.UnitTests.Microservices.MongoDbPopulator; + +public class MongoDbPopulatorTestHelper { - public class MongoDbPopulatorTestHelper + private const string TestDbName = "nUnitTests"; + + private MongoClient _mongoTestClient = null!; + + public IMongoDatabase TestDatabase = null!; + + public GlobalOptions Globals = null!; + + public DicomFileMessage TestImageMessage = null!; + public SeriesMessage TestSeriesMessage = null!; + + public void SetupSuite() { - private const string TestDbName = "nUnitTests"; + Globals = GetNewMongoDbPopulatorOptions(); - private MongoClient _mongoTestClient = null!; + _mongoTestClient = MongoClientHelpers.GetMongoClient(Globals.MongoDatabases!.DicomStoreOptions!, "MongoDbPopulatorTests"); - public IMongoDatabase TestDatabase = null!; + _mongoTestClient.DropDatabase(TestDbName); + TestDatabase = _mongoTestClient.GetDatabase(TestDbName); - public GlobalOptions Globals = null!; + Globals.MongoDbPopulatorOptions!.SeriesQueueConsumerOptions = new ConsumerOptions + { + QueueName = "TEST.SeriesQueue", + QoSPrefetchCount = 5, + AutoAck = false + }; - public DicomFileMessage TestImageMessage = null!; - public SeriesMessage TestSeriesMessage = null!; + Globals.MongoDbPopulatorOptions.ImageQueueConsumerOptions = new ConsumerOptions + { + QueueName = "TEST.MongoImageQueue", + QoSPrefetchCount = 50, + AutoAck = false + }; - public void SetupSuite() + var dataset = new DicomDataset { - Globals = GetNewMongoDbPopulatorOptions(); - - _mongoTestClient = MongoClientHelpers.GetMongoClient(Globals.MongoDatabases!.DicomStoreOptions!, "MongoDbPopulatorTests"); - - _mongoTestClient.DropDatabase(TestDbName); - TestDatabase = _mongoTestClient.GetDatabase(TestDbName); - - Globals.MongoDbPopulatorOptions!.SeriesQueueConsumerOptions = new ConsumerOptions - { - QueueName = "TEST.SeriesQueue", - QoSPrefetchCount = 5, - AutoAck = false - }; - - Globals.MongoDbPopulatorOptions.ImageQueueConsumerOptions = new ConsumerOptions - { - QueueName = "TEST.MongoImageQueue", - QoSPrefetchCount = 50, - AutoAck = false - }; - - var dataset = new DicomDataset - { - new DicomUniqueIdentifier(DicomTag.SOPInstanceUID, "1.2.3.4"), - new DicomCodeString(DicomTag.Modality, "SR") - }; - - string serialized = DicomTypeTranslater.SerializeDatasetToJson(dataset); - - TestImageMessage = new DicomFileMessage - { - DicomFilePath = "Path/To/File", - SeriesInstanceUID = "TestSeriesInstanceUID", - StudyInstanceUID = "TestStudyInstanceUID", - SOPInstanceUID = "TestSOPInstanceUID", - DicomDataset = serialized - }; - - TestSeriesMessage = new SeriesMessage - { - DirectoryPath = "Path/To/Series", - ImagesInSeries = 123, - SeriesInstanceUID = "TestSeriesInstanceUID", - StudyInstanceUID = "TestStudyInstanceUID", - DicomDataset = serialized - }; - } - - public static GlobalOptions GetNewMongoDbPopulatorOptions() + new DicomUniqueIdentifier(DicomTag.SOPInstanceUID, "1.2.3.4"), + new DicomCodeString(DicomTag.Modality, "SR") + }; + + string serialized = DicomTypeTranslater.SerializeDatasetToJson(dataset); + + TestImageMessage = new DicomFileMessage + { + DicomFilePath = "Path/To/File", + SeriesInstanceUID = "TestSeriesInstanceUID", + StudyInstanceUID = "TestStudyInstanceUID", + SOPInstanceUID = "TestSOPInstanceUID", + DicomDataset = serialized + }; + + TestSeriesMessage = new SeriesMessage { - GlobalOptions options = new GlobalOptionsFactory().Load(nameof(GetNewMongoDbPopulatorOptions)); + DirectoryPath = "Path/To/Series", + ImagesInSeries = 123, + SeriesInstanceUID = "TestSeriesInstanceUID", + StudyInstanceUID = "TestStudyInstanceUID", + DicomDataset = serialized + }; + } + + public static GlobalOptions GetNewMongoDbPopulatorOptions() + { + GlobalOptions options = new GlobalOptionsFactory().Load(nameof(GetNewMongoDbPopulatorOptions)); - options.MongoDatabases!.DicomStoreOptions!.DatabaseName = TestDbName; - options.MongoDbPopulatorOptions!.MongoDbFlushTime = 1; //1 second + options.MongoDatabases!.DicomStoreOptions!.DatabaseName = TestDbName; + options.MongoDbPopulatorOptions!.MongoDbFlushTime = 1; //1 second - return options; - } + return options; + } - public static string GetCollectionNameForTest(string testName) => testName + "-" + Guid.NewGuid(); + public static string GetCollectionNameForTest(string testName) => testName + "-" + Guid.NewGuid(); - public void Dispose() - { - _mongoTestClient.DropDatabase(TestDbName); - } + public void Dispose() + { + _mongoTestClient.DropDatabase(TestDbName); } } diff --git a/tests/SmiServices.UnitTests/Microservices/UpdateValues/TestUpdateValuesMessage.cs b/tests/SmiServices.UnitTests/Microservices/UpdateValues/TestUpdateValuesMessage.cs index 41638bb9a..e20c4bdd3 100644 --- a/tests/SmiServices.UnitTests/Microservices/UpdateValues/TestUpdateValuesMessage.cs +++ b/tests/SmiServices.UnitTests/Microservices/UpdateValues/TestUpdateValuesMessage.cs @@ -2,146 +2,145 @@ using SmiServices.Common.Messages.Updating; using System; -namespace SmiServices.UnitTests.Microservices.UpdateValues +namespace SmiServices.UnitTests.Microservices.UpdateValues; + +public class TestUpdateValuesMessage { - public class TestUpdateValuesMessage + [Test] + public void TestNoWhere() { - [Test] - public void TestNoWhere() - { - var msg = new UpdateValuesMessage(); - var ex = Assert.Throws(msg.Validate); + var msg = new UpdateValuesMessage(); + var ex = Assert.Throws(msg.Validate); - Assert.That(ex!.Message, Is.EqualTo("There must be at least one search field for WHERE section. Otherwise this would update entire tables")); - } + Assert.That(ex!.Message, Is.EqualTo("There must be at least one search field for WHERE section. Otherwise this would update entire tables")); + } - [Test] - public void TestNoWhereValue() + [Test] + public void TestNoWhereValue() + { + var msg = new UpdateValuesMessage { - var msg = new UpdateValuesMessage - { - WhereFields = ["ff"] - }; + WhereFields = ["ff"] + }; - var ex = Assert.Throws(msg.Validate); + var ex = Assert.Throws(msg.Validate); - Assert.That(ex!.Message, Is.EqualTo("WhereFields length must match HaveValues length")); - } - [Test] - public void TestNoSet() + Assert.That(ex!.Message, Is.EqualTo("WhereFields length must match HaveValues length")); + } + [Test] + public void TestNoSet() + { + var msg = new UpdateValuesMessage { - var msg = new UpdateValuesMessage - { - WhereFields = ["ff"], - HaveValues = [null] //where column ff has a null value - }; + WhereFields = ["ff"], + HaveValues = [null] //where column ff has a null value + }; - var ex = Assert.Throws(msg.Validate); + var ex = Assert.Throws(msg.Validate); - Assert.That(ex!.Message, Is.EqualTo("There must be at least one value to write")); - } + Assert.That(ex!.Message, Is.EqualTo("There must be at least one value to write")); + } - [Test] - public void TestNoSetValue() + [Test] + public void TestNoSetValue() + { + var msg = new UpdateValuesMessage { - var msg = new UpdateValuesMessage - { - WhereFields = ["ff"], - HaveValues = [null], //where column ff has a null value - WriteIntoFields = ["ff"] - }; + WhereFields = ["ff"], + HaveValues = [null], //where column ff has a null value + WriteIntoFields = ["ff"] + }; - var ex = Assert.Throws(msg.Validate); + var ex = Assert.Throws(msg.Validate); - Assert.That(ex!.Message, Is.EqualTo("WriteIntoFields length must match Values length")); - } + Assert.That(ex!.Message, Is.EqualTo("WriteIntoFields length must match Values length")); + } - [Test] - public void TestTwoValuesOneOperator() + [Test] + public void TestTwoValuesOneOperator() + { + var msg = new UpdateValuesMessage { - var msg = new UpdateValuesMessage - { - WhereFields = ["ff", "mm"], - HaveValues = ["111", "123"], - Operators = ["="], - WriteIntoFields = ["ff"], - Values = ["ff"] - }; + WhereFields = ["ff", "mm"], + HaveValues = ["111", "123"], + Operators = ["="], + WriteIntoFields = ["ff"], + Values = ["ff"] + }; - var ex = Assert.Throws(msg.Validate); + var ex = Assert.Throws(msg.Validate); - Assert.That(ex!.Message, Is.EqualTo("WhereFields length must match Operators length")); - } - [Test] - public void Test_GoodMessage() + Assert.That(ex!.Message, Is.EqualTo("WhereFields length must match Operators length")); + } + [Test] + public void Test_GoodMessage() + { + var msg = new UpdateValuesMessage { - var msg = new UpdateValuesMessage - { - WhereFields = ["ff"], - HaveValues = [null], //where column ff has a null value - WriteIntoFields = ["ff"], - Values = ["ddd"] //write the value ddd - }; + WhereFields = ["ff"], + HaveValues = [null], //where column ff has a null value + WriteIntoFields = ["ff"], + Values = ["ddd"] //write the value ddd + }; - msg.Validate(); + msg.Validate(); - } + } - [Test] - public void TestEquality() - { - var m1 = new UpdateValuesMessage(); - var m2 = new UpdateValuesMessage(); + [Test] + public void TestEquality() + { + var m1 = new UpdateValuesMessage(); + var m2 = new UpdateValuesMessage(); - Assert.That(m2, Is.EqualTo(m1)); - Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); + Assert.That(m2, Is.EqualTo(m1)); + Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); - m1.WhereFields = ["fff"]; + m1.WhereFields = ["fff"]; - Assert.That(m2, Is.Not.EqualTo(m1)); + Assert.That(m2, Is.Not.EqualTo(m1)); - m2.WhereFields = ["fff"]; + m2.WhereFields = ["fff"]; - Assert.That(m2, Is.EqualTo(m1)); - Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); + Assert.That(m2, Is.EqualTo(m1)); + Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); - m1.WhereFields = []; - m2.WhereFields = []; + m1.WhereFields = []; + m2.WhereFields = []; - Assert.That(m2, Is.EqualTo(m1)); - Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); + Assert.That(m2, Is.EqualTo(m1)); + Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); - foreach (var prop in typeof(UpdateValuesMessage).GetProperties()) + foreach (var prop in typeof(UpdateValuesMessage).GetProperties()) + { + if (prop.Name.Equals(nameof(UpdateValuesMessage.ExplicitTableInfo))) { - if (prop.Name.Equals(nameof(UpdateValuesMessage.ExplicitTableInfo))) - { - prop.SetValue(m1, new int[] { 6 }); - Assert.That(m2, Is.Not.EqualTo(m1)); - - prop.SetValue(m2, new int[] { 6 }); - Assert.That(m2, Is.EqualTo(m1)); - Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); + prop.SetValue(m1, new int[] { 6 }); + Assert.That(m2, Is.Not.EqualTo(m1)); + prop.SetValue(m2, new int[] { 6 }); + Assert.That(m2, Is.EqualTo(m1)); + Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); - prop.SetValue(m2, new int[] { 7 }); - Assert.That(m2, Is.Not.EqualTo(m1)); - prop.SetValue(m2, new int[] { 6 }); - Assert.That(m2, Is.EqualTo(m1)); - Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); - } - else - { - prop.SetValue(m1, new string[] { "ss" }); - Assert.That(m2, Is.Not.EqualTo(m1)); + prop.SetValue(m2, new int[] { 7 }); + Assert.That(m2, Is.Not.EqualTo(m1)); + prop.SetValue(m2, new int[] { 6 }); - prop.SetValue(m2, new string[] { "ss" }); - Assert.That(m2, Is.EqualTo(m1)); - Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); - } + Assert.That(m2, Is.EqualTo(m1)); + Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); + } + else + { + prop.SetValue(m1, new string[] { "ss" }); + Assert.That(m2, Is.Not.EqualTo(m1)); + prop.SetValue(m2, new string[] { "ss" }); + Assert.That(m2, Is.EqualTo(m1)); + Assert.That(m2.GetHashCode(), Is.EqualTo(m1.GetHashCode())); } } + } } diff --git a/tests/SmiServices.UnitTests/Microservices/UpdateValues/UpdateTableAuditTests.cs b/tests/SmiServices.UnitTests/Microservices/UpdateValues/UpdateTableAuditTests.cs index 8f113d87e..ba95ec0dc 100644 --- a/tests/SmiServices.UnitTests/Microservices/UpdateValues/UpdateTableAuditTests.cs +++ b/tests/SmiServices.UnitTests/Microservices/UpdateValues/UpdateTableAuditTests.cs @@ -4,64 +4,63 @@ using System.Collections.Generic; using System.Threading.Tasks; -namespace SmiServices.UnitTests.Microservices.UpdateValues +namespace SmiServices.UnitTests.Microservices.UpdateValues; + +class UpdateTableAuditTests { - class UpdateTableAuditTests + [Test] + public void TestTwoQueriesAtOnce() { - [Test] - public void TestTwoQueriesAtOnce() - { - var audit = new UpdateTableAudit(null); + var audit = new UpdateTableAudit(null); - Assert.That(audit.ExecutingQueries, Is.EqualTo(0)); + Assert.That(audit.ExecutingQueries, Is.EqualTo(0)); - audit.StartOne(); - audit.StartOne(); + audit.StartOne(); + audit.StartOne(); - Assert.Multiple(() => - { - Assert.That(audit.ExecutingQueries, Is.EqualTo(2)); - Assert.That(audit.Queries, Is.EqualTo(2)); - }); + Assert.Multiple(() => + { + Assert.That(audit.ExecutingQueries, Is.EqualTo(2)); + Assert.That(audit.Queries, Is.EqualTo(2)); + }); - audit.EndOne(2); - audit.EndOne(5); + audit.EndOne(2); + audit.EndOne(5); - Assert.Multiple(() => - { - Assert.That(audit.ExecutingQueries, Is.EqualTo(0)); - Assert.That(audit.Queries, Is.EqualTo(2)); - Assert.That(audit.AffectedRows, Is.EqualTo(7)); - }); - } - [Test] - public void TestManyQueriesAtOnce_MultiThreaded() + Assert.Multiple(() => { - var audit = new UpdateTableAudit(null); - Assert.That(audit.ExecutingQueries, Is.EqualTo(0)); + Assert.That(audit.Queries, Is.EqualTo(2)); + Assert.That(audit.AffectedRows, Is.EqualTo(7)); + }); + } + [Test] + public void TestManyQueriesAtOnce_MultiThreaded() + { + var audit = new UpdateTableAudit(null); - List tasks = []; - - for (int i = 0; i < 50; i++) - { - tasks.Add(Task.Run(() => - { - audit.StartOne(); - Task.Delay(TimeSpan.FromSeconds(5)); - audit.EndOne(1); - })); - } + Assert.That(audit.ExecutingQueries, Is.EqualTo(0)); - Task.WaitAll([.. tasks]); + List tasks = []; - Assert.Multiple(() => + for (int i = 0; i < 50; i++) + { + tasks.Add(Task.Run(() => { - Assert.That(audit.ExecutingQueries, Is.EqualTo(0)); - Assert.That(audit.Queries, Is.EqualTo(50)); - Assert.That(audit.Stopwatch.IsRunning, Is.False); - Assert.That(audit.Stopwatch.ElapsedMilliseconds, Is.LessThanOrEqualTo(TimeSpan.FromSeconds(10).TotalMilliseconds)); - }); + audit.StartOne(); + Task.Delay(TimeSpan.FromSeconds(5)); + audit.EndOne(1); + })); } + + Task.WaitAll([.. tasks]); + + Assert.Multiple(() => + { + Assert.That(audit.ExecutingQueries, Is.EqualTo(0)); + Assert.That(audit.Queries, Is.EqualTo(50)); + Assert.That(audit.Stopwatch.IsRunning, Is.False); + Assert.That(audit.Stopwatch.ElapsedMilliseconds, Is.LessThanOrEqualTo(TimeSpan.FromSeconds(10).TotalMilliseconds)); + }); } } diff --git a/tests/SmiServices.UnitTests/ProgramTests.cs b/tests/SmiServices.UnitTests/ProgramTests.cs index 92719d30a..d1c9bfae0 100644 --- a/tests/SmiServices.UnitTests/ProgramTests.cs +++ b/tests/SmiServices.UnitTests/ProgramTests.cs @@ -5,64 +5,63 @@ using System.Linq; -namespace SmiServices.UnitTests +namespace SmiServices.UnitTests; + +public class ProgramTests { - public class ProgramTests - { - #region Fixture Methods + #region Fixture Methods - private readonly IEnumerable _allVerbs = - typeof(VerbBase) - .Assembly - .GetTypes() - .Where(t => typeof(VerbBase).IsAssignableFrom(t) && !t.IsAbstract); + private readonly IEnumerable _allVerbs = + typeof(VerbBase) + .Assembly + .GetTypes() + .Where(t => typeof(VerbBase).IsAssignableFrom(t) && !t.IsAbstract); - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - /// - /// Checks all defined verb types are actually used - /// - [Test] - public void AllVerbTypes_AreUsed() + /// + /// Checks all defined verb types are actually used + /// + [Test] + public void AllVerbTypes_AreUsed() + { + foreach (Type t in _allVerbs) { - foreach (Type t in _allVerbs) + if (t.BaseType == typeof(ApplicationVerbBase)) { - if (t.BaseType == typeof(ApplicationVerbBase)) - { - Assert.That(Program.AllApplications, Does.Contain(t), $"{t} not in the list of applications"); - } - else if (t.BaseType == typeof(MicroservicesVerbBase)) - { - Assert.That(Program.AllServices, Does.Contain(t), $"{t} not in the list of services"); - } - else - { - Assert.Fail($"No case for {t.BaseType}"); - } + Assert.That(Program.AllApplications, Does.Contain(t), $"{t} not in the list of applications"); + } + else if (t.BaseType == typeof(MicroservicesVerbBase)) + { + Assert.That(Program.AllServices, Does.Contain(t), $"{t} not in the list of services"); + } + else + { + Assert.Fail($"No case for {t.BaseType}"); } - } - #endregion } + + #endregion } diff --git a/tests/SmiServices.UnitTests/ServiceVerbsTests.cs b/tests/SmiServices.UnitTests/ServiceVerbsTests.cs index bdafd5157..0b43576f0 100644 --- a/tests/SmiServices.UnitTests/ServiceVerbsTests.cs +++ b/tests/SmiServices.UnitTests/ServiceVerbsTests.cs @@ -7,59 +7,58 @@ using System.Text.RegularExpressions; -namespace SmiServices.UnitTests +namespace SmiServices.UnitTests; + +public class ServiceVerbsTests { - public class ServiceVerbsTests - { - #region Fixture Methods + #region Fixture Methods - private readonly IEnumerable _allVerbs = - typeof(VerbBase) - .Assembly - .GetTypes() - .Where(t => typeof(VerbBase).IsAssignableFrom(t) && !t.IsAbstract); + private readonly IEnumerable _allVerbs = + typeof(VerbBase) + .Assembly + .GetTypes() + .Where(t => typeof(VerbBase).IsAssignableFrom(t) && !t.IsAbstract); - [OneTimeSetUp] - public void OneTimeSetUp() - { - } + [OneTimeSetUp] + public void OneTimeSetUp() + { + } - [OneTimeTearDown] - public void OneTimeTearDown() { } + [OneTimeTearDown] + public void OneTimeTearDown() { } - #endregion + #endregion - #region Test Methods + #region Test Methods - [SetUp] - public void SetUp() { } + [SetUp] + public void SetUp() { } - [TearDown] - public void TearDown() { } + [TearDown] + public void TearDown() { } - #endregion + #endregion - #region Tests + #region Tests - [Test] - public void VerbName_MatchesClassName() + [Test] + public void VerbName_MatchesClassName() + { + foreach (Type t in _allVerbs) { - foreach (Type t in _allVerbs) - { - string nameWithoutVerb = t.Name[..t.Name.LastIndexOf("Verb")]; - string[] splitWords = Regex.Split(nameWithoutVerb, @"(? +/// helper for asynchronous tests, awaits for certain conditions to be true within a given timeout (or infinite timeout if debugger is attached) +/// +public class TestTimelineAwaiter { /// - /// helper for asynchronous tests, awaits for certain conditions to be true within a given timeout (or infinite timeout if debugger is attached) + /// Blocks until is met or the is reached. Polls + /// (if provided) to check for Exceptions (which will break the wait). + /// + /// During debugging is ignored /// - public class TestTimelineAwaiter + /// + /// + /// + /// + public static void Await( + Func condition, + string? timeoutMessage = null, + int timeout = 30000, + Func>? throwIfAnyFunc = null + ) { - /// - /// Blocks until is met or the is reached. Polls - /// (if provided) to check for Exceptions (which will break the wait). - /// - /// During debugging is ignored - /// - /// - /// - /// - /// - public static void Await( - Func condition, - string? timeoutMessage = null, - int timeout = 30000, - Func>? throwIfAnyFunc = null - ) - { - if (Debugger.IsAttached) - timeout = int.MaxValue; - - while (!condition() && timeout > 0) - { - Thread.Sleep(100); - timeout -= 100; + if (Debugger.IsAttached) + timeout = int.MaxValue; - var exceptions = throwIfAnyFunc?.Invoke()?.ToArray(); + while (!condition() && timeout > 0) + { + Thread.Sleep(100); + timeout -= 100; - if (exceptions == null || exceptions.Length == 0) continue; - var logger = LogManager.GetCurrentClassLogger(); + var exceptions = throwIfAnyFunc?.Invoke()?.ToArray(); - foreach (var ex in exceptions) - logger.Error(ex); + if (exceptions == null || exceptions.Length == 0) continue; + var logger = LogManager.GetCurrentClassLogger(); - LogManager.Flush(); + foreach (var ex in exceptions) + logger.Error(ex); - throw exceptions.Length == 1 - ? exceptions.Single() - : new AggregateException(exceptions); + LogManager.Flush(); - } + throw exceptions.Length == 1 + ? exceptions.Single() + : new AggregateException(exceptions); - if (timeout <= 0) - Assert.Fail(timeoutMessage ?? "Failed to reach the condition after the expected timeout"); } + + if (timeout <= 0) + Assert.Fail(timeoutMessage ?? "Failed to reach the condition after the expected timeout"); } }