From c4d539c74fc723bb77a331b9fe63fa9c7dc2db38 Mon Sep 17 00:00:00 2001 From: James Friel Date: Wed, 20 Sep 2023 08:44:26 +0100 Subject: [PATCH] 8.1.0 Release (#1628) * Bump Newtonsoft.Json from 13.0.1 to 13.0.2 Bumps [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json) from 13.0.1 to 13.0.2. - [Release notes](https://github.com/JamesNK/Newtonsoft.Json/releases) - [Commits](https://github.com/JamesNK/Newtonsoft.Json/compare/13.0.1...13.0.2) --- updated-dependencies: - dependency-name: Newtonsoft.Json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump NLog from 5.0.5 to 5.1.0 Bumps [NLog](https://github.com/NLog/NLog) from 5.0.5 to 5.1.0. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.0.5...v5.1.0) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump NLog from 5.0.5 to 5.1.0 * Fix -r flag - should have been --results-directory all along * Bump Newtonsoft.Json from 13.0.1 to 13.0.2 * Bump YamlDotNet from 12.0.2 to 12.1.0 Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 12.0.2 to 12.1.0. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v12.0.2...v12.1.0) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump Moq from 4.18.2 to 4.18.3 Bumps [Moq](https://github.com/moq/moq4) from 4.18.2 to 4.18.3. - [Release notes](https://github.com/moq/moq4/releases) - [Changelog](https://github.com/moq/moq4/blob/main/CHANGELOG.md) - [Commits](https://github.com/moq/moq4/compare/v4.18.2...v4.18.3) --- updated-dependencies: - dependency-name: Moq dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Moq from 4.18.2 to 4.18.3 * Bump YamlDotNet from 12.0.2 to 12.1.0 * Add CodeQL workflow for GitHub code scanning * Bump YamlDotNet from 12.1.0 to 12.2.0 Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 12.1.0 to 12.2.0. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v12.1.0...v12.2.0) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Add EnableWindowsTargeting for CodeQL to run on Linux properly * Bump YamlDotNet from 12.1.0 to 12.2.0 * Bump to latest actions-setup-perl release * Bump YamlDotNet from 12.2.0 to 12.2.1 Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 12.2.0 to 12.2.1. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v12.2.0...v12.2.1) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump YamlDotNet from 12.2.0 to 12.2.1 * Bump Microsoft.NET.Test.Sdk from 17.4.0 to 17.4.1 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.4.0 to 17.4.1. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.4.0...v17.4.1) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Microsoft.NET.Test.Sdk from 17.4.0 to 17.4.1 * Bump YamlDotNet from 12.2.1 to 12.3.1 Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 12.2.1 to 12.3.1. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v12.2.1...v12.3.1) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump YamlDotNet from 12.2.1 to 12.3.1 * Bump NLog from 5.1.0 to 5.1.1 Bumps [NLog](https://github.com/NLog/NLog) from 5.1.0 to 5.1.1. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.1.0...v5.1.1) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump NLog from 5.1.0 to 5.1.1 * Bump Moq from 4.18.3 to 4.18.4 Bumps [Moq](https://github.com/moq/moq4) from 4.18.3 to 4.18.4. - [Release notes](https://github.com/moq/moq4/releases) - [Changelog](https://github.com/moq/moq4/blob/main/CHANGELOG.md) - [Commits](https://github.com/moq/moq4/compare/v4.18.3...v4.18.4) --- updated-dependencies: - dependency-name: Moq dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Moq from 4.18.3 to 4.18.4 * Bump Terminal.Gui from 1.7.2 to 1.9.0 Bumps [Terminal.Gui](https://github.com/gui-cs/Terminal.Gui) from 1.7.2 to 1.9.0. - [Release notes](https://github.com/gui-cs/Terminal.Gui/releases) - [Commits](https://github.com/gui-cs/Terminal.Gui/compare/v1.7.2...v1.9.0) --- updated-dependencies: - dependency-name: Terminal.Gui dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump actions/setup-node from 3.5.1 to 3.6.0 Bumps [actions/setup-node](https://github.com/actions/setup-node) from 3.5.1 to 3.6.0. - [Release notes](https://github.com/actions/setup-node/releases) - [Commits](https://github.com/actions/setup-node/compare/v3.5.1...v3.6.0) --- updated-dependencies: - dependency-name: actions/setup-node dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Method name fixups * Bump Spectre.Console from 0.45.0 to 0.46.0 Bumps [Spectre.Console](https://github.com/spectreconsole/spectre.console) from 0.45.0 to 0.46.0. - [Release notes](https://github.com/spectreconsole/spectre.console/releases) - [Commits](https://github.com/spectreconsole/spectre.console/compare/0.45.0...0.46.0) --- updated-dependencies: - dependency-name: Spectre.Console dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump svenstaro/upload-release-action from 2.3.0 to 2.4.0 Bumps [svenstaro/upload-release-action](https://github.com/svenstaro/upload-release-action) from 2.3.0 to 2.4.0. - [Release notes](https://github.com/svenstaro/upload-release-action/releases) - [Changelog](https://github.com/svenstaro/upload-release-action/blob/master/CHANGELOG.md) - [Commits](https://github.com/svenstaro/upload-release-action/compare/2.3.0...2.4.0) --- updated-dependencies: - dependency-name: svenstaro/upload-release-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump Spectre.Console from 0.45.0 to 0.46.0 * Bump System.Threading.AccessControl from 7.0.0 to 7.0.1 Bumps [System.Threading.AccessControl](https://github.com/dotnet/runtime) from 7.0.0 to 7.0.1. - [Release notes](https://github.com/dotnet/runtime/releases) - [Commits](https://github.com/dotnet/runtime/compare/v7.0.0...v7.0.1) --- updated-dependencies: - dependency-name: System.Threading.AccessControl dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump System.Threading.AccessControl from 7.0.0 to 7.0.1 * Bump NunitXml.TestLogger from 3.0.127 to 3.0.131 Bumps [NunitXml.TestLogger](https://github.com/spekt/nunit.testlogger) from 3.0.127 to 3.0.131. - [Release notes](https://github.com/spekt/nunit.testlogger/releases) - [Changelog](https://github.com/spekt/nunit.testlogger/blob/master/CHANGELOG.md) - [Commits](https://github.com/spekt/nunit.testlogger/compare/v3.0.127...v3.0.131) --- updated-dependencies: - dependency-name: NunitXml.TestLogger dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump svenstaro/upload-release-action from 2.4.0 to 2.4.1 Bumps [svenstaro/upload-release-action](https://github.com/svenstaro/upload-release-action) from 2.4.0 to 2.4.1. - [Release notes](https://github.com/svenstaro/upload-release-action/releases) - [Changelog](https://github.com/svenstaro/upload-release-action/blob/master/CHANGELOG.md) - [Commits](https://github.com/svenstaro/upload-release-action/compare/2.4.0...2.4.1) --- updated-dependencies: - dependency-name: svenstaro/upload-release-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Syntax cleanup * Make DB file locator accept both / and \ as delimiters * Make file locator handle Unix and DOS style paths more smartly, fix typos * Trim trailing slashes from SQL Server directory location * Replace clunky slow file copier which got file permissions wrong with integral .Net version * Bump NLog from 5.1.1 to 5.1.2 Bumps [NLog](https://github.com/NLog/NLog) from 5.1.1 to 5.1.2. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.1.1...v5.1.2) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump NLog from 5.1.1 to 5.1.2 * famfamfam.com icon site domain expired, use archive.org copies for now * Bump Microsoft.NET.Test.Sdk from 17.4.1 to 17.5.0 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.4.1 to 17.5.0. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.4.1...v17.5.0) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump svenstaro/upload-release-action from 2.4.1 to 2.5.0 Bumps [svenstaro/upload-release-action](https://github.com/svenstaro/upload-release-action) from 2.4.1 to 2.5.0. - [Release notes](https://github.com/svenstaro/upload-release-action/releases) - [Changelog](https://github.com/svenstaro/upload-release-action/blob/master/CHANGELOG.md) - [Commits](https://github.com/svenstaro/upload-release-action/compare/2.4.1...2.5.0) --- updated-dependencies: - dependency-name: svenstaro/upload-release-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump Microsoft.NET.Test.Sdk from 17.4.1 to 17.5.0 * Bump YamlDotNet from 12.3.1 to 13.0.1 Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 12.3.1 to 13.0.1. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v12.3.1...v13.0.1) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Bump YamlDotNet from 12.3.1 to 13.0.1 * Bump NUnit3TestAdapter from 4.3.1 to 4.4.2 Bumps [NUnit3TestAdapter](https://github.com/nunit/nunit3-vs-adapter) from 4.3.1 to 4.4.2. - [Release notes](https://github.com/nunit/nunit3-vs-adapter/releases) - [Commits](https://github.com/nunit/nunit3-vs-adapter/compare/V4.3.1...V4.4.2) --- updated-dependencies: - dependency-name: NUnit3TestAdapter dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump SixLabors.ImageSharp from 2.1.3 to 3.0.0 Bumps [SixLabors.ImageSharp](https://github.com/SixLabors/ImageSharp) from 2.1.3 to 3.0.0. - [Release notes](https://github.com/SixLabors/ImageSharp/releases) - [Commits](https://github.com/SixLabors/ImageSharp/compare/v2.1.3...v3.0.0) --- updated-dependencies: - dependency-name: SixLabors.ImageSharp dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Bump SixLabors.ImageSharp from 2.1.3 to 3.0.0 * Bump Autoupdater.NET.Official from 1.7.6 to 1.7.7 Bumps [Autoupdater.NET.Official](https://github.com/ravibpatel/AutoUpdater.NET) from 1.7.6 to 1.7.7. - [Release notes](https://github.com/ravibpatel/AutoUpdater.NET/releases) - [Commits](https://github.com/ravibpatel/AutoUpdater.NET/compare/v1.7.6...v1.7.7) --- updated-dependencies: - dependency-name: Autoupdater.NET.Official dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Newtonsoft.Json from 13.0.2 to 13.0.3 Bumps [Newtonsoft.Json](https://github.com/JamesNK/Newtonsoft.Json) from 13.0.2 to 13.0.3. - [Release notes](https://github.com/JamesNK/Newtonsoft.Json/releases) - [Commits](https://github.com/JamesNK/Newtonsoft.Json/compare/13.0.2...13.0.3) --- updated-dependencies: - dependency-name: Newtonsoft.Json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update solution file * Bump NLog from 5.1.2 to 5.1.3 Bumps [NLog](https://github.com/NLog/NLog) from 5.1.2 to 5.1.3. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.1.2...v5.1.3) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump NLog from 5.1.2 to 5.1.3 * Revert premature .sln change * Bump shogo82148/actions-setup-perl from 1.20.1 to 1.20.2 (#1535) * Bump SixLabors.ImageSharp from 3.0.0 to 3.0.1 (#1539) * Bump Newtonsoft.Json from 13.0.2 to 13.0.3 * Bump Terminal.Gui from 1.9.0 to 1.10.0 (#1536) * Bump YamlDotNet from 13.0.1 to 13.0.2 (#1533) * Bump Terminal.Gui from 1.10.0 to 1.10.1 (#1541) * Bump Autoupdater.NET.Official from 1.7.7 to 1.8.0 Bumps [Autoupdater.NET.Official](https://github.com/ravibpatel/AutoUpdater.NET) from 1.7.7 to 1.8.0. - [Release notes](https://github.com/ravibpatel/AutoUpdater.NET/releases) - [Commits](https://github.com/ravibpatel/AutoUpdater.NET/compare/v1.7.7...v1.8) --- updated-dependencies: - dependency-name: Autoupdater.NET.Official dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump YamlDotNet from 13.0.2 to 13.1.0 (#1542) * Bump shogo82148/actions-setup-perl from 1.20.2 to 1.21.0 Bumps [shogo82148/actions-setup-perl](https://github.com/shogo82148/actions-setup-perl) from 1.20.2 to 1.21.0. - [Release notes](https://github.com/shogo82148/actions-setup-perl/releases) - [Commits](https://github.com/shogo82148/actions-setup-perl/compare/v1.20.2...v1.21.0) --- updated-dependencies: - dependency-name: shogo82148/actions-setup-perl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump NLog from 5.1.3 to 5.1.4 Bumps [NLog](https://github.com/NLog/NLog) from 5.1.3 to 5.1.4. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.1.3...v5.1.4) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Autoupdater.NET.Official from 1.8.0 to 1.8.1 Bumps [Autoupdater.NET.Official](https://github.com/ravibpatel/AutoUpdater.NET) from 1.8.0 to 1.8.1. - [Release notes](https://github.com/ravibpatel/AutoUpdater.NET/releases) - [Commits](https://github.com/ravibpatel/AutoUpdater.NET/compare/v1.8...v1.8.1) --- updated-dependencies: - dependency-name: Autoupdater.NET.Official dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Microsoft.NET.Test.Sdk from 17.5.0 to 17.6.0 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.5.0 to 17.6.0. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.5.0...v17.6.0) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump Autoupdater.NET.Official from 1.8.1 to 1.8.2 Bumps [Autoupdater.NET.Official](https://github.com/ravibpatel/AutoUpdater.NET) from 1.8.1 to 1.8.2. - [Release notes](https://github.com/ravibpatel/AutoUpdater.NET/releases) - [Commits](https://github.com/ravibpatel/AutoUpdater.NET/compare/v1.8.1...v1.8.2) --- updated-dependencies: - dependency-name: Autoupdater.NET.Official dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * v8.1.0-rc2 (#1534) Dependency and syntax updates, notably Scintilla, updates to plugin handling (WIP) * Bump Spectre.Console from 0.46.0 to 0.47.0 Bumps [Spectre.Console](https://github.com/spectreconsole/spectre.console) from 0.46.0 to 0.47.0. - [Release notes](https://github.com/spectreconsole/spectre.console/releases) - [Commits](https://github.com/spectreconsole/spectre.console/compare/0.46.0...0.47.0) --- updated-dependencies: - dependency-name: Spectre.Console dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump Scintilla.NET from 5.3.2.6 to 5.3.2.7 Bumps [Scintilla.NET](https://github.com/VPKSoft/ScintillaNET) from 5.3.2.6 to 5.3.2.7. - [Release notes](https://github.com/VPKSoft/ScintillaNET/releases) - [Commits](https://github.com/VPKSoft/ScintillaNET/compare/v.5.3.2.6...v.5.3.2.7) --- updated-dependencies: - dependency-name: Scintilla.NET dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump coverlet.collector from 3.2.0 to 6.0.0 Bumps [coverlet.collector](https://github.com/coverlet-coverage/coverlet) from 3.2.0 to 6.0.0. - [Release notes](https://github.com/coverlet-coverage/coverlet/releases) - [Commits](https://github.com/coverlet-coverage/coverlet/compare/v3.2.0...v6.0.0) --- updated-dependencies: - dependency-name: coverlet.collector dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Bump Terminal.Gui from 1.10.1 to 1.11.2 Bumps [Terminal.Gui](https://github.com/gui-cs/Terminal.Gui) from 1.10.1 to 1.11.2. - [Release notes](https://github.com/gui-cs/Terminal.Gui/releases) - [Commits](https://github.com/gui-cs/Terminal.Gui/compare/v1.10.1...v1.11.2) --- updated-dependencies: - dependency-name: Terminal.Gui dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump svenstaro/upload-release-action from 2.5.0 to 2.6.0 Bumps [svenstaro/upload-release-action](https://github.com/svenstaro/upload-release-action) from 2.5.0 to 2.6.0. - [Release notes](https://github.com/svenstaro/upload-release-action/releases) - [Changelog](https://github.com/svenstaro/upload-release-action/blob/master/CHANGELOG.md) - [Commits](https://github.com/svenstaro/upload-release-action/compare/2.5.0...2.6.0) --- updated-dependencies: - dependency-name: svenstaro/upload-release-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump Terminal.Gui from 1.11.2 to 1.12.1 Bumps [Terminal.Gui](https://github.com/gui-cs/Terminal.Gui) from 1.11.2 to 1.12.1. - [Release notes](https://github.com/gui-cs/Terminal.Gui/releases) - [Commits](https://github.com/gui-cs/Terminal.Gui/compare/v1.11.2...v1.12.1) --- updated-dependencies: - dependency-name: Terminal.Gui dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump NLog from 5.1.4 to 5.1.5 Bumps [NLog](https://github.com/NLog/NLog) from 5.1.4 to 5.1.5. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.1.4...v5.1.5) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump actions/setup-dotnet from 3.0.3 to 3.1.0 Bumps [actions/setup-dotnet](https://github.com/actions/setup-dotnet) from 3.0.3 to 3.1.0. - [Release notes](https://github.com/actions/setup-dotnet/releases) - [Commits](https://github.com/actions/setup-dotnet/compare/v3.0.3...v3.1.0) --- updated-dependencies: - dependency-name: actions/setup-dotnet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump actions/setup-dotnet from 3.1.0 to 3.2.0 Bumps [actions/setup-dotnet](https://github.com/actions/setup-dotnet) from 3.1.0 to 3.2.0. - [Release notes](https://github.com/actions/setup-dotnet/releases) - [Commits](https://github.com/actions/setup-dotnet/compare/v3.1.0...v3.2.0) --- updated-dependencies: - dependency-name: actions/setup-dotnet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * RC3 (#1548) * Remove old SafeDirectoryCatalog constructor, update XML doc, ensure all pre-loaded assemblies reflected in lookup * Syntax cleanup * Fix typo, mark AssemblyResolver class static since it is never instantiated * Enable PublishSingleFile for main GUI EXE now it works cleanly * Remove redundant resolver check for already-loaded assemblies * Fix typos, give more information on problem types * Move string-handling unit test out of db-specific test class, parameterise test cases * Stop documentation code trying to find docs for Moq fake classes * Streamline Github CI * Mark DatabaseTests non-parallel for nUnit * Remove unused usings * PermissionWindow test fixup, avoid confusion between UTC and local time * Merge MapsDirectlyToDatabaseTable into Rdmp.Core * Tidy tests, write CSV to streams * Add CHI unit tests * Fix CHANGELOG test to be flexible about paths, speed up misses on RecentHistoryOfControls add/update path * Rename the multiple Readme.md files to reduce confusion * Remove appveyor leftover file * Refactor into nuspec-free build * Streamline comment store loading, use libarchive for more flexible formats (7z) * Update Scintilla and Hunspell * Tidy multi-threaded catalogue unit tests * Use 'read committed' since snapshot is problematic to enable * Make UsefulStuff NonBatchQuery respect transactions when in use * Re-do RSA code avoiding XML assembly exceptions * Make MDFAttacher less eager to delete files and re-copy * XMLDoc fixup * Reinstate warning when MDFAttacher overwrites destination, fix up unit test accordingly * Make ToMemoryCheckNotifier get worst constant time not linear * Update RDMP versioning call * Update YamlDotNet to 13.0.2 * Remove disused pencil.cur, nhunspell DLLs * Stop UpdateMaxRowVer throwing exceptions querying non-existent versioning data * Make CommentStore ignore non-XML files * Make displayed version info consistent * Bump NLog from 5.1.1 to 5.1.2 * Fix MDF attacher test cleanup * Typo fixes; only set isolation on MSSQL transactions not other engines * Update build.yml * Update pack,push stages for nospec build * Fix syntax for setting Github variables * Add license info, fix name of UI package * Fixup unit test exception specs * Fix up prune implementation, add new entries to known files list * Fix HIC.RDMP.Plugin.UI nupkg name * Add Github headers for dead link checker per docs * Skip checking RDMP source compare links, too many Github errors * Warning fixups * Namespace and Array.Empty cleanup * Replace direct hex-RSA-encrypted string with base64(rsa encrypted aes key)+base64(aes encrypted string) - removing length limits on plaintext. Support reading existing ciphertext for backwards compatibility. * Rename RSA object, make non-static * Update for Terminal.Gui Autocomplete API change * Fallback crypto: use the really bad crypto if we have no key for now * Update CommitInProgress.cs * Remove LibHarmony thanks to Scintilla fix upstream * Command type checking fixups * Dependencies update * Improve package list error reporting, remove disused Protocols package (only used for Oracle, and already imported transitively by FAnsiSql once we reach 3..1) * HTTP fetcher update * Remove duplicate inheritance * Swallow file not found exceptions on wonky dependencies too * Adjust iteration in type loader * Update ThrowImmediatelyActivator.cs: ThrowImmediatelyActivator is not interactive, so mark it as such * Update RowVerCache.cs - Get buffer sizing right when building hex strings * Update DitaExtractorTests.cs - Tidy syntax, try to avoid quadratic-time search for name duplicates * Refactor HTML clipboard handling, add unit test - No more random numbers and string searching! * Lazy without wrapping constants: Give Lazy constants directly instead of capturing as Lambdas * Server GC * Update AggregateGraphUI.cs * Bump NLog from 5.1.5 to 5.2.0 Bumps [NLog](https://github.com/NLog/NLog) from 5.1.5 to 5.2.0. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.1.5...v5.2.0) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump NUnit3TestAdapter from 4.4.2 to 4.5.0 Bumps [NUnit3TestAdapter](https://github.com/nunit/nunit3-vs-adapter) from 4.4.2 to 4.5.0. - [Release notes](https://github.com/nunit/nunit3-vs-adapter/releases) - [Commits](https://github.com/nunit/nunit3-vs-adapter/compare/V4.4.2...V4.5.0) --- updated-dependencies: - dependency-name: NUnit3TestAdapter dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump svenstaro/upload-release-action from 2.6.0 to 2.6.1 Bumps [svenstaro/upload-release-action](https://github.com/svenstaro/upload-release-action) from 2.6.0 to 2.6.1. - [Release notes](https://github.com/svenstaro/upload-release-action/releases) - [Changelog](https://github.com/svenstaro/upload-release-action/blob/master/CHANGELOG.md) - [Commits](https://github.com/svenstaro/upload-release-action/compare/2.6.0...2.6.1) --- updated-dependencies: - dependency-name: svenstaro/upload-release-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump HIC.FAnsiSql from 3.0.1 to 3.1.0 (#1559) * Bump Microsoft.NET.Test.Sdk from 17.6.0 to 17.6.1 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.6.0 to 17.6.1. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.6.0...v17.6.1) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump shogo82148/actions-setup-perl from 1.21.0 to 1.21.1 Bumps [shogo82148/actions-setup-perl](https://github.com/shogo82148/actions-setup-perl) from 1.21.0 to 1.21.1. - [Release notes](https://github.com/shogo82148/actions-setup-perl/releases) - [Commits](https://github.com/shogo82148/actions-setup-perl/compare/v1.21.0...v1.21.1) --- updated-dependencies: - dependency-name: shogo82148/actions-setup-perl dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Microsoft.NET.Test.Sdk from 17.6.1 to 17.6.2 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.6.1 to 17.6.2. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.6.1...v17.6.2) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump shogo82148/actions-setup-perl from 1.21.1 to 1.21.2 Bumps [shogo82148/actions-setup-perl](https://github.com/shogo82148/actions-setup-perl) from 1.21.1 to 1.21.2. - [Release notes](https://github.com/shogo82148/actions-setup-perl/releases) - [Commits](https://github.com/shogo82148/actions-setup-perl/compare/v1.21.1...v1.21.2) --- updated-dependencies: - dependency-name: shogo82148/actions-setup-perl dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Autoupdater.NET.Official from 1.8.2 to 1.8.3 Bumps [Autoupdater.NET.Official](https://github.com/ravibpatel/AutoUpdater.NET) from 1.8.2 to 1.8.3. - [Release notes](https://github.com/ravibpatel/AutoUpdater.NET/releases) - [Commits](https://github.com/ravibpatel/AutoUpdater.NET/compare/v1.8.2...v1.8.3) --- updated-dependencies: - dependency-name: Autoupdater.NET.Official dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Microsoft.NET.Test.Sdk from 17.6.2 to 17.6.3 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.6.2 to 17.6.3. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.6.2...v17.6.3) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump shogo82148/actions-setup-perl from 1.21.2 to 1.21.3 Bumps [shogo82148/actions-setup-perl](https://github.com/shogo82148/actions-setup-perl) from 1.21.2 to 1.21.3. - [Release notes](https://github.com/shogo82148/actions-setup-perl/releases) - [Commits](https://github.com/shogo82148/actions-setup-perl/compare/v1.21.2...v1.21.3) --- updated-dependencies: - dependency-name: shogo82148/actions-setup-perl dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump YamlDotNet from 13.1.0 to 13.1.1 Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 13.1.0 to 13.1.1. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v13.1.0...v13.1.1) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump NLog from 5.2.0 to 5.2.1 Bumps [NLog](https://github.com/NLog/NLog) from 5.2.0 to 5.2.1. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.2.0...v5.2.1) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump LibArchive.Net from 0.1.3 to 0.1.4 Bumps LibArchive.Net from 0.1.3 to 0.1.4. --- updated-dependencies: - dependency-name: LibArchive.Net dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump shogo82148/actions-setup-perl from 1.21.3 to 1.22.0 Bumps [shogo82148/actions-setup-perl](https://github.com/shogo82148/actions-setup-perl) from 1.21.3 to 1.22.0. - [Release notes](https://github.com/shogo82148/actions-setup-perl/releases) - [Commits](https://github.com/shogo82148/actions-setup-perl/compare/v1.21.3...v1.22.0) --- updated-dependencies: - dependency-name: shogo82148/actions-setup-perl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump NLog from 5.2.1 to 5.2.2 Bumps [NLog](https://github.com/NLog/NLog) from 5.2.1 to 5.2.2. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.2.1...v5.2.2) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump actions/setup-node from 3.6.0 to 3.7.0 Bumps [actions/setup-node](https://github.com/actions/setup-node) from 3.6.0 to 3.7.0. - [Release notes](https://github.com/actions/setup-node/releases) - [Commits](https://github.com/actions/setup-node/compare/v3.6.0...v3.7.0) --- updated-dependencies: - dependency-name: actions/setup-node dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump NunitXml.TestLogger from 3.0.131 to 3.1.15 Bumps [NunitXml.TestLogger](https://github.com/spekt/nunit.testlogger) from 3.0.131 to 3.1.15. - [Release notes](https://github.com/spekt/nunit.testlogger/releases) - [Changelog](https://github.com/spekt/nunit.testlogger/blob/master/CHANGELOG.md) - [Commits](https://github.com/spekt/nunit.testlogger/compare/v3.0.131...v3.1.15) --- updated-dependencies: - dependency-name: NunitXml.TestLogger dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Syntax cleanup (#1577) * [auto] Accessibility modifiers * [auto] Trailing commas * [auto] Redundant parentheses * [auto] Redundant qualifiers * [auto] Built in types * Update PluginCohortCompilerFactory.cs Missed redundant qualifier * [auto] Nameof not bare string for variable names * [auto] Type specifiers * [auto] Nullprop * [auto] Range indexer * Update DatabaseTests.cs Need to load FAnsi before trying to change settings in it, not after! * Fix typo in 'committed', use collection inits * Docs fix Match example code in Markdown with actual code * [auto] Remove redundant brackets on object construction calls * [auto] Remove redundant lambda parentheses * [auto] Semicolon not needed after Enum * Update ICheckable.cs Typos * [auto] Redundant array creation syntax * [auto] Redundant array parens * [auto] Redundant else * [auto] Static methods * Remove disused test bits * [auto] TryGetValue Avoid double-lookups * [auto] TryAdd instead of Contains/Add * [auto] Stray semicolons * Indentation fixes * [auto] Inline out variable declarations * [auto] Use pattern matching instead of cast-nulls * [auto] Pattern matching 2 * [auto] Static invocation fixes * [auto] Object initialisers * Update TestDatabasesSettings.cs Yaml fixup * Update SelectedDataSetsCheckerTests.cs TestDB prefix fix * Update DoubleClickAndDragDrop.md Docs fix * Update DelimitedFlatFileAttacher.cs Circular dependency - tried to copy Culture from itself * Update FlatFileAttacher.cs Fix typo * Annotation cleanup Remove disused, add Usage annotation where needed * Statics * .Net 7 * Adjust CodeQL build step * Put TargetFramework back in csproj since sharing fails * [auto] Statics * Array.Empty instead of new * Avoid counting when not needed * [auto] StringBuilder Append char * Container tuning * [auto] String.Contains char * [auto] Reference comparison cleanup * Update UITests.cs Re-do grotty reflection based poking inside WinForms innards for error finding * Clean up persistence strings, UI list handling * Inline variable declarations * Update UITests.cs Fix up ErrorProvider retrieval * [auto] Variable init * [auto] MaxBy/MinBy instead of order+first * [auto] Null coalesce * [auto] EventArgs.Empty * [auto] Variable scoping * More efficient delete and key handling * [auto] Variable scoping and whitespace * [auto] LINQ and ternaries * Fix indent * Ternaries and whitespace * [auto] Nameof, with, interpolations, whitespace * [Auto] Switch expressions * Regex, simplify news * [auto] Pattern matching, double lookups * Fix double-lookups * Update ConsoleGuiActivator.cs Avoid using FilePaths as Enumerable * Update CohortIdentificationTaskExecution.cs Adjust GC finalize * Update UITimeoutAttribute.cs Adjust UI timeout for GUI tests * switch expression * Timeouts, library import * Update Rdmp.UI.Tests.csproj Allow unsafe blocks for static library import code * [auto] Switch expressions * Update Annotations.cs Flip order of enums so Default isn't flagged as a "duplicate" * Update RDMPConcept.cs Fix copyright line * Make colour values internal not public --------- Co-authored-by: James A Sutherland <> * Bump Terminal.Gui from 1.12.1 to 1.13.1 Bumps [Terminal.Gui](https://github.com/gui-cs/Terminal.Gui) from 1.12.1 to 1.13.1. - [Release notes](https://github.com/gui-cs/Terminal.Gui/releases) - [Commits](https://github.com/gui-cs/Terminal.Gui/compare/v1.12.1...v1.13.1) --- updated-dependencies: - dependency-name: Terminal.Gui dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump shogo82148/actions-setup-perl from 1.22.0 to 1.23.0 Bumps [shogo82148/actions-setup-perl](https://github.com/shogo82148/actions-setup-perl) from 1.22.0 to 1.23.0. - [Release notes](https://github.com/shogo82148/actions-setup-perl/releases) - [Commits](https://github.com/shogo82148/actions-setup-perl/compare/v1.22.0...v1.23.0) --- updated-dependencies: - dependency-name: shogo82148/actions-setup-perl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump Terminal.Gui from 1.13.1 to 1.13.4 Bumps [Terminal.Gui](https://github.com/gui-cs/Terminal.Gui) from 1.13.1 to 1.13.4. - [Release notes](https://github.com/gui-cs/Terminal.Gui/releases) - [Commits](https://github.com/gui-cs/Terminal.Gui/compare/v1.13.1...v1.13.4) --- updated-dependencies: - dependency-name: Terminal.Gui dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Terminal.Gui from 1.13.4 to 1.13.5 Bumps [Terminal.Gui](https://github.com/gui-cs/Terminal.Gui) from 1.13.4 to 1.13.5. - [Release notes](https://github.com/gui-cs/Terminal.Gui/releases) - [Commits](https://github.com/gui-cs/Terminal.Gui/compare/v1.13.4...v1.13.5) --- updated-dependencies: - dependency-name: Terminal.Gui dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump svenstaro/upload-release-action from 2.6.1 to 2.7.0 Bumps [svenstaro/upload-release-action](https://github.com/svenstaro/upload-release-action) from 2.6.1 to 2.7.0. - [Release notes](https://github.com/svenstaro/upload-release-action/releases) - [Changelog](https://github.com/svenstaro/upload-release-action/blob/master/CHANGELOG.md) - [Commits](https://github.com/svenstaro/upload-release-action/compare/2.6.1...2.7.0) --- updated-dependencies: - dependency-name: svenstaro/upload-release-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump NLog from 5.2.2 to 5.2.3 Bumps [NLog](https://github.com/NLog/NLog) from 5.2.2 to 5.2.3. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.2.2...v5.2.3) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Microsoft.NET.Test.Sdk from 17.6.3 to 17.7.0 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.6.3 to 17.7.0. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.6.3...v17.7.0) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Feature/whitespaceetc (#1583) * Automated formatting and syntax clean * Match docs to source; whitespace unit test cleanup * Bump Moq from 4.18.4 to 4.20.1 Bumps [Moq](https://github.com/moq/moq) from 4.18.4 to 4.20.1. - [Release notes](https://github.com/moq/moq/releases) - [Changelog](https://github.com/moq/moq/blob/main/CHANGELOG.md) - [Commits](https://github.com/moq/moq/compare/v4.18.4...v4.20.1) --- updated-dependencies: - dependency-name: Moq dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump shogo82148/actions-setup-perl from 1.23.0 to 1.23.1 Bumps [shogo82148/actions-setup-perl](https://github.com/shogo82148/actions-setup-perl) from 1.23.0 to 1.23.1. - [Release notes](https://github.com/shogo82148/actions-setup-perl/releases) - [Commits](https://github.com/shogo82148/actions-setup-perl/compare/v1.23.0...v1.23.1) --- updated-dependencies: - dependency-name: shogo82148/actions-setup-perl dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Feature/rdmp 13 removable default logging server (#1589) * make external serverremovable * no longer throwing out errors * add catalogue fix * added todo reminder * working startup option * remove fk drop * fix creation issue * add todo * working delete * invert checkbox ui * add throw for unexpected no logging server * rename variables to match descriptions * fix bad merge * fix autosave issue * Bump Moq from 4.20.1 to 4.20.2 Bumps [Moq](https://github.com/moq/moq) from 4.20.1 to 4.20.2. - [Release notes](https://github.com/moq/moq/releases) - [Changelog](https://github.com/moq/moq/blob/main/CHANGELOG.md) - [Commits](https://github.com/moq/moq/compare/v4.20.1...v4.20.2) --- updated-dependencies: - dependency-name: Moq dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Add Clear User Settings Functionality (RDMP-7/GH#749) (#1586) * Add Clear User Settings button, command and unit test * Update SelfCertifyingDataAccessPoint.cs - Fix typo --------- Co-authored-by: jas88 * Update UserSettings.cs Merge fixup * Bump Moq from 4.20.2 to 4.20.69 Bumps [Moq](https://github.com/moq/moq) from 4.20.2 to 4.20.69. - [Release notes](https://github.com/moq/moq/releases) - [Changelog](https://github.com/moq/moq/blob/main/CHANGELOG.md) - [Commits](https://github.com/moq/moq/compare/v4.20.2...v4.20.69) --- updated-dependencies: - dependency-name: Moq dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump actions/setup-node from 3.7.0 to 3.8.0 Bumps [actions/setup-node](https://github.com/actions/setup-node) from 3.7.0 to 3.8.0. - [Release notes](https://github.com/actions/setup-node/releases) - [Commits](https://github.com/actions/setup-node/compare/v3.7.0...v3.8.0) --- updated-dependencies: - dependency-name: actions/setup-node dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump YamlDotNet from 13.1.1 to 13.2.0 Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 13.1.1 to 13.2.0. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v13.1.1...v13.2.0) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump SixLabors.ImageSharp.Drawing from 1.0.0-beta15 to 1.0.0 Bumps [SixLabors.ImageSharp.Drawing](https://github.com/SixLabors/ImageSharp.Drawing) from 1.0.0-beta15 to 1.0.0. - [Release notes](https://github.com/SixLabors/ImageSharp.Drawing/releases) - [Commits](https://github.com/SixLabors/ImageSharp.Drawing/compare/v1.0.0-beta15...v1.0.0) --- updated-dependencies: - dependency-name: SixLabors.ImageSharp.Drawing dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump actions/setup-node from 3.8.0 to 3.8.1 Bumps [actions/setup-node](https://github.com/actions/setup-node) from 3.8.0 to 3.8.1. - [Release notes](https://github.com/actions/setup-node/releases) - [Commits](https://github.com/actions/setup-node/compare/v3.8.0...v3.8.1) --- updated-dependencies: - dependency-name: actions/setup-node dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Microsoft.NET.Test.Sdk from 17.7.0 to 17.7.1 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.7.0 to 17.7.1. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.7.0...v17.7.1) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Autoupdater.NET.Official from 1.8.3 to 1.8.4 Bumps [Autoupdater.NET.Official](https://github.com/ravibpatel/AutoUpdater.NET) from 1.8.3 to 1.8.4. - [Release notes](https://github.com/ravibpatel/AutoUpdater.NET/releases) - [Commits](https://github.com/ravibpatel/AutoUpdater.NET/compare/v1.8.3...v1.8.4) --- updated-dependencies: - dependency-name: Autoupdater.NET.Official dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Redo ProgressLog handling (#1603) * Update DataLoadInfo.cs - Implement worker thread for writing ProgressLog entries in background --------- Co-authored-by: James A Sutherland <> Co-authored-by: James Friel * Bump VPKSoft.ScintillaLexers.NET from 1.1.15 to 1.1.16 Bumps [VPKSoft.ScintillaLexers.NET](https://github.com/VPKSoft/ScintillaLexers) from 1.1.15 to 1.1.16. - [Release notes](https://github.com/VPKSoft/ScintillaLexers/releases) - [Commits](https://github.com/VPKSoft/ScintillaLexers/commits) --- updated-dependencies: - dependency-name: VPKSoft.ScintillaLexers.NET dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update LoadDirectory.cs Fix typo * Feature/rdmp 18 remove moq (#1602) * start to update tests * compiling tests * tidy up unused * attempt to fix some tests * update tests * update ui tests * remove moq * fix up tests * fix up test * fix test * update tests * tidy up code * backout incorrect change --------- Co-authored-by: James A Sutherland * Feature/RDMP-28 Add BeginLoadData & EndLoadData to Datatables (#1598) * partial fix * add row peaker update * fix up whitespace * add a lot of daat begin loads * more data load * fix typo * RDMP-16 Add Max Message Length Check to Logging Notifications (#1595) * add message length check * fix tabbing * fix codescan alert * update max length and add env config * attempt to allow RDMP_ * fix typo * add codeql update * Update ToLoggingDatabaseDataLoadEventListener.cs Adjust string trimming logic, handle ultra-short limits, only parse the environment variable once instead of per logging, avoid double-lookup in OnProgress handler --------- Co-authored-by: James A Sutherland * Bump YamlDotNet from 13.2.0 to 13.3.1 (#1611) Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 13.2.0 to 13.3.1. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v13.2.0...v13.3.1) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump Microsoft.NET.Test.Sdk from 17.7.1 to 17.7.2 Bumps [Microsoft.NET.Test.Sdk](https://github.com/microsoft/vstest) from 17.7.1 to 17.7.2. - [Release notes](https://github.com/microsoft/vstest/releases) - [Changelog](https://github.com/microsoft/vstest/blob/main/docs/releases.md) - [Commits](https://github.com/microsoft/vstest/compare/v17.7.1...v17.7.2) --- updated-dependencies: - dependency-name: Microsoft.NET.Test.Sdk dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump SixLabors.ImageSharp from 3.0.1 to 3.0.2 (#1616) Bumps [SixLabors.ImageSharp](https://github.com/SixLabors/ImageSharp) from 3.0.1 to 3.0.2. - [Release notes](https://github.com/SixLabors/ImageSharp/releases) - [Commits](https://github.com/SixLabors/ImageSharp/compare/v3.0.1...v3.0.2) --- updated-dependencies: - dependency-name: SixLabors.ImageSharp dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump Terminal.Gui from 1.13.5 to 1.14.0 (#1615) Bumps [Terminal.Gui](https://github.com/gui-cs/Terminal.Gui) from 1.13.5 to 1.14.0. - [Release notes](https://github.com/gui-cs/Terminal.Gui/releases) - [Commits](https://github.com/gui-cs/Terminal.Gui/compare/v1.13.5...v1.14.0) --- updated-dependencies: - dependency-name: Terminal.Gui dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump NLog from 5.2.3 to 5.2.4 Bumps [NLog](https://github.com/NLog/NLog) from 5.2.3 to 5.2.4. - [Release notes](https://github.com/NLog/NLog/releases) - [Changelog](https://github.com/NLog/NLog/blob/dev/CHANGELOG.md) - [Commits](https://github.com/NLog/NLog/compare/v5.2.3...v5.2.4) --- updated-dependencies: - dependency-name: NLog dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump Scintilla.NET from 5.3.2.7 to 5.3.2.9 Bumps [Scintilla.NET](https://github.com/VPKSoft/ScintillaNET) from 5.3.2.7 to 5.3.2.9. - [Release notes](https://github.com/VPKSoft/ScintillaNET/releases) - [Commits](https://github.com/VPKSoft/ScintillaNET/compare/v.5.3.2.7...v.5.3.2.9) --- updated-dependencies: - dependency-name: Scintilla.NET dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Bump crazy-max/ghaction-chocolatey from 2 to 3 Bumps [crazy-max/ghaction-chocolatey](https://github.com/crazy-max/ghaction-chocolatey) from 2 to 3. - [Release notes](https://github.com/crazy-max/ghaction-chocolatey/releases) - [Commits](https://github.com/crazy-max/ghaction-chocolatey/compare/v2...v3) --- updated-dependencies: - dependency-name: crazy-max/ghaction-chocolatey dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Bump SixLabors.ImageSharp.Drawing from 1.0.0 to 2.0.0 Bumps [SixLabors.ImageSharp.Drawing](https://github.com/SixLabors/ImageSharp.Drawing) from 1.0.0 to 2.0.0. - [Release notes](https://github.com/SixLabors/ImageSharp.Drawing/releases) - [Commits](https://github.com/SixLabors/ImageSharp.Drawing/compare/v1.0.0...v2.0.0) --- updated-dependencies: - dependency-name: SixLabors.ImageSharp.Drawing dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Bump NSubstitute from 5.0.0 to 5.1.0 Bumps NSubstitute from 5.0.0 to 5.1.0. --- updated-dependencies: - dependency-name: NSubstitute dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump actions/checkout from 3 to 4 Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Feature/rc4 (#1570) * Syntax tidying * Dependency updates * Event handling singletons (ThrowImmediately and co) --------- Signed-off-by: dependabot[bot] Co-authored-by: James A Sutherland <> Co-authored-by: James Friel Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * 8.1.0 changelog (#1627) * 8.1.0 changelog --------- Co-authored-by: James A Sutherland <> * Bump HIC.FAnsiSql from 3.1.0 to 3.1.1 (#1621) * Bump Terminal.Gui from 1.13.5 to 1.14.0 Bumps [Terminal.Gui](https://github.com/gui-cs/Terminal.Gui) from 1.13.5 to 1.14.0. - [Release notes](https://github.com/gui-cs/Terminal.Gui/releases) - [Commits](https://github.com/gui-cs/Terminal.Gui/compare/v1.13.5...v1.14.0) --- updated-dependencies: - dependency-name: Terminal.Gui dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump MongoDB.Driver from 2.20.0 to 2.21.0 Bumps [MongoDB.Driver](https://github.com/mongodb/mongo-csharp-driver) from 2.20.0 to 2.21.0. - [Release notes](https://github.com/mongodb/mongo-csharp-driver/releases) - [Commits](https://github.com/mongodb/mongo-csharp-driver/compare/v2.20.0...v2.21.0) --- updated-dependencies: - dependency-name: MongoDB.Driver dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Bump YamlDotNet from 13.1.1 to 13.3.1 Bumps [YamlDotNet](https://github.com/aaubry/YamlDotNet) from 13.1.1 to 13.3.1. - [Release notes](https://github.com/aaubry/YamlDotNet/releases) - [Commits](https://github.com/aaubry/YamlDotNet/compare/v13.1.1...v13.3.1) --- updated-dependencies: - dependency-name: YamlDotNet dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Fix logging server lookup logic (#1629) Co-authored-by: James A Sutherland <> --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: James A Sutherland <> Co-authored-by: James A Sutherland Co-authored-by: LGTM Migrator Co-authored-by: malmas001 <110163841+malmas001@users.noreply.github.com> --- .github/workflows/build.yml | 120 +- .github/workflows/codeql.yml | 41 + .github/workflows/docker.yml | 4 +- .github/workflows/links.yml | 2 +- .nuget/packages.config | 1 + AnalysisReport.sarif | 500 + .../Menus/MenuItems/DataExportMenu.cs | 38 +- .../MenuItems/DisableTutorialsMenuItem.cs | 47 +- .../Menus/MenuItems/LaunchTutorialMenuItem.cs | 78 +- .../Menus/MenuItems/ResetTutorialsMenuItem.cs | 47 +- .../Menus/RDMPTopMenuStripUI.cs | 857 +- .../NLog.template.config | 14 +- .../ResearchDataManagementPlatform/Program.cs | 92 +- .../Properties/AssemblyInfo.cs | 8 +- .../RDMP.nuspec | 15 - .../RDMPMainForm.cs | 435 +- .../ResearchDataManagementPlatform.csproj | 42 +- .../Theme/ThemeExtender.cs | 69 +- .../Theme/Themes.cs | 104 +- .../Updates/Asset.cs | 39 +- .../Updates/Author.cs | 49 +- .../Updates/GHRelease.cs | 49 +- .../Updates/GithubReleases.cs | 21 +- .../Updates/Uploader.cs | 49 +- .../WindowManagement/ActivateItems.cs | 1504 +- .../WindowManagement/CollectionNavigation.cs | 73 +- .../Persistence/DeserializeInstruction.cs | 53 +- .../PersistableObjectCollectionDockContent.cs | 140 +- ...sistableSingleDatabaseObjectDockContent.cs | 126 +- .../PersistableToolboxDockContent.cs | 82 +- .../Persistence/PersistenceDecisionFactory.cs | 218 +- .../Persistence/RDMPSingleControlTab.cs | 141 +- .../RDMPCollectionCreatedEventHandler.cs | 17 +- .../RDMPCollectionCreatedEventHandlerArgs.cs | 21 +- .../CustomFloatWindow.cs | 60 +- .../CustomFloatWindowFactory.cs | 26 +- .../WindowManagement/HomePane/HomeBoxUI.cs | 161 +- .../WindowManagement/HomePane/HomeUI.cs | 113 +- .../WindowManagement/INavigation.cs | 21 +- .../WindowManagement/Licenses/License.cs | 88 +- .../WindowManagement/Licenses/LicenseUI.cs | 98 +- .../WindowManagement/TabChangedHandler.cs | 17 +- .../WindowManagement/TabNavigation.cs | 65 +- .../RDMPSingleControlTabMenu.cs | 66 +- .../WindowManagement/TopBar/RDMPTaskBarUI.cs | 433 +- .../WindowManagement/UIObjectConstructor.cs | 19 +- .../WindowArranging/WindowArranger.cs | 90 +- .../WindowManagement/WindowFactory.cs | 201 +- .../WindowManagement/WindowManager.cs | 724 +- .../publish.bat | 74 - CHANGELOG.md | 20 +- .../CreatingANewCollectionTreeNode.md | 61 +- .../CreatingANewRightClickMenu.md | 2 +- .../CodeTutorials/DoubleClickAndDragDrop.md | 29 +- Documentation/CodeTutorials/Packages.md | 92 +- Documentation/CodeTutorials/PluginWriting.md | 14 +- HIC.DataManagementPlatform.sln | 200 +- HIC.DataManagementPlatform.sln.DotSettings | 17 +- LIBRARYLICENSES | 2 +- Plugins/Plugin.Test/Plugin.Test.csproj | 26 - Plugins/Plugin.Test/Plugin.Test.nuspec | 59 - Plugins/Plugin.UI/Plugin.UI.csproj | 26 - Plugins/Plugin.UI/Plugin.UI.nuspec | 62 - Plugins/Plugin/Plugin.csproj | 26 - Plugins/Plugin/Plugin.nuspec | 50 - .../Caching/Integration/CachingHostTests.cs | 204 +- .../Integration/CustomDateCachingTests.cs | 290 +- .../Caching/Unit/PipelineExecutionTests.cs | 154 +- .../Unit/TestIFileDataFlowComponents.cs | 124 +- Rdmp.Core.Tests/Caching/Unit/ZipTests.cs | 98 +- .../CohortCommitting/CommitCohortExample.cs | 120 +- .../CreateNewCohortDatabaseWizardTests.cs | 396 +- .../AggregateFilterPublishingTests.cs | 337 +- .../CohortCompilerRunnerTests.cs | 139 +- .../CohortCreation/CohortCompilerTests.cs | 301 +- .../CohortContainerAndCloningTests.cs | 331 +- ...tIdentificationConfigurationMergerTests.cs | 235 +- .../CohortIdentificationTests.cs | 107 +- .../CohortMandatoryFilterImportingTests.cs | 338 +- .../PluginCohortCompilerTests.cs | 305 +- .../CohortCompilerCacheJoinableTest.cs | 235 +- .../QueryTests/CohortQueryBuilderTests.cs | 918 +- ...lderTestsInvolvingTableValuedParameters.cs | 179 +- .../CohortQueryBuilderWithCacheTests.cs | 138 +- .../CohortSummaryQueryBuilderTests.cs | 362 +- .../JoinableCohortConfigurationTests.cs | 752 +- .../SimpleCohortIdentificationTests.cs | 143 +- .../CommandExecution/AxisAndPivotCLITests.cs | 111 +- .../CommandExecution/CommandCliTests.cs | 83 +- .../CommandExecution/CommandInvokerTests.cs | 140 +- ...xecuteCommandAddNewFilterContainerTests.cs | 95 +- ...ExecuteCommandAddPipelineComponentTests.cs | 97 +- ...ecuteCommandAlterTableMakeDistinctTests.cs | 66 +- .../ExecuteCommandConfirmLogsTests.cs | 266 +- ...eCommandCreateNewDataLoadDirectoryTests.cs | 63 +- .../ExecuteCommandCreateNewFilterCliTests.cs | 87 +- .../ExecuteCommandDeleteTestsCli.cs | 150 +- .../ExecuteCommandDeprecateTests.cs | 21 +- .../ExecuteCommandListTests.cs | 95 +- ...ExecuteCommandRefreshBrokenCohortsTests.cs | 94 +- .../ExecuteCommandReplacedByTests.cs | 105 +- .../ExecuteCommandSetArgumentTests.cs | 347 +- .../ExecuteCommandSetExtendedPropertyTests.cs | 83 +- ...cuteCommandSetExtractionIdentifierTests.cs | 111 +- .../ExecuteCommandSimilarTests.cs | 103 +- .../TestCommandsAreSupported.cs | 313 +- ...mmandAssociateCatalogueWithLoadMetadata.cs | 39 +- .../TestExecuteCommandClearUserSettings.cs | 34 + .../TestExecuteCommandDescribe.cs | 36 +- .../TestExecuteCommandDescribeCommand.cs | 90 +- ...ExecuteCommandImportFilterContainerTree.cs | 262 +- .../TestExecuteCommandImportTableInfo.cs | 64 +- .../TestExecuteCommandNewObject.cs | 88 +- .../CommandExecution/TestExecuteCommandSet.cs | 97 +- .../TestExecuteCommandSetUserSetting.cs | 94 +- .../CommandExecution/TestStartup.cs | 21 +- .../CommandLine/AbstractBaseRunnerTests.cs | 311 +- .../AutomationLoopTests/EndToEndCacheTest.cs | 142 +- .../EndToEndDLECacheTest.cs | 105 +- .../AutomationLoopTests/EndToEndDLETest.cs | 74 +- .../Reading/TestDataCacheReader.cs | 61 +- .../CommandLineObjectPickerTests.cs | 469 +- .../ExampleDatasetsCreationTests.cs | 54 +- .../Interactive/ConsoleInputManagerTests.cs | 26 +- .../CommandLine/NewObjectPoolTests.cs | 67 +- Rdmp.Core.Tests/CommandLine/PickTableTests.cs | 43 +- .../CommandLine/RdmpScriptTests.cs | 139 +- Rdmp.Core.Tests/CommentStoreTests.cs | 186 +- .../Anonymisation/ANOMigrationTests.cs | 175 +- .../ANOStoreFunctionalityTests.cs | 114 +- .../Curation/Anonymisation/ANOTableTests.cs | 492 +- ...orwardEngineerANOCatalogueTwoTableTests.cs | 252 +- .../IdentifierDumpFunctionalityTests.cs | 598 +- .../BasicParameterUseTests.cs | 156 +- Rdmp.Core.Tests/Curation/DublinCoreTests.cs | 200 +- .../Curation/ExtendedPropertyTests.cs | 42 +- .../ImportTests/GatherAndShareTests.cs | 532 +- .../Curation/ImportTests/PluginClassTests.cs | 211 +- .../ImportTests/ShareLoadMetadataTests.cs | 298 +- .../ImportTests/TestImportingAnObject.cs | 267 +- .../Integration/AllKeywordsDescribedTest.cs | 200 +- .../ArgumentTests/ArgumentTypeTests.cs | 79 +- .../ArgumentTests/ProcessTaskArgumentTests.cs | 591 +- .../ArgumentTests/ProcessTaskTests.cs | 218 +- .../ArgumentTests/TestArgumentedClass.cs | 11 +- .../Integration/BundledLookupTableTests.cs | 70 +- .../Integration/CatalogueCheckTests.cs | 77 +- .../Integration/CatalogueItemTests.cs | 278 +- .../Curation/Integration/CatalogueTests.cs | 917 +- .../Curation/Integration/ColumnInfoTests.cs | 190 +- .../Integration/CommitInProgressTests.cs | 123 +- ...mprehensiveQueryPerformanceCounterTests.cs | 52 +- .../Curation/Integration/CredentialsTests.cs | 578 +- .../Integration/CrossDatabaseTriggerTests.cs | 183 +- .../SuperMultiThreadedVolumeAccess.cs | 228 +- .../Integration/DataAccess/TestDataAccess.cs | 422 +- .../Dependencies/DependencyTests.cs | 58 +- .../Integration/DitaExtractorTests.cs | 209 +- .../Curation/Integration/EncryptionTests.cs | 338 +- .../ExtractionFilterParameterSetTests.cs | 50 +- .../Integration/ExtractionFilterTests.cs | 57 +- .../Integration/ExtractionInformationTests.cs | 217 +- .../ExtractionInformationUnitTests.cs | 85 +- .../FilterImporterTests.cs | 395 +- .../ParameterCreatorTests.cs | 238 +- .../ForwardEngineerANOCatalogueTests.cs | 763 +- .../GetDatabaseDiagramBinaryTest.cs | 54 +- .../Curation/Integration/GovernanceTests.cs | 230 +- .../Integration/HangingConnectionTest.cs | 111 +- .../Curation/Integration/LinkerTests.cs | 110 +- .../Curation/Integration/LoadMetadataTests.cs | 145 +- .../Curation/Integration/LoadProgressTest.cs | 89 +- .../Integration/LoadProgressUnitTests.cs | 152 +- .../Curation/Integration/LookupTest.cs | 733 +- .../Curation/Integration/MEFCheckerTests.cs | 122 +- .../Curation/Integration/MementoTests.cs | 122 +- ...MetadataLoggingConfigurationChecksTests.cs | 200 +- .../MySqlTriggerImplementerTests.cs | 24 +- ...dDataExportObscureDependencyFinderTests.cs | 123 +- ...jectSharingObscureDependencyFinderTests.cs | 143 +- ...lidationXMLObscureDependencyFinderTests.cs | 270 +- .../PasswordEncryptionKeyLocationTests.cs | 118 +- .../Curation/Integration/PipelineTests.cs | 366 +- .../AggregateBuilderTestsBase.cs | 78 +- .../AggregateDataBasedTests.cs | 1224 +- .../MicrosoftAggregateBuilderTests.cs | 189 +- .../MySqlAggregateBuilderTests.cs | 88 +- .../MicrosoftQueryBuilderTests.cs | 60 +- .../MySqlQueryBuilderTests.cs | 88 +- .../QueryBuilderUnitTests.cs | 192 +- .../Integration/ServerDefaultsTests.cs | 100 +- .../Integration/SupportingDocumentTests.cs | 51 +- .../Integration/TableInfoSynchronizerTests.cs | 259 +- .../Curation/Integration/TableInfoTests.cs | 323 +- .../Integration/TableNamingConventionTests.cs | 75 +- .../AggregationTests.cs | 150 +- .../ImportAndTestTests.cs | 326 +- .../Curation/Integration/TriggerTests.cs | 395 +- .../ReferentialIntegrityConstraintTests.cs | 129 +- .../Validation/StandardRegexTests.cs | 56 +- .../JsonSerializationTests.cs | 98 +- .../MemoryRepositoryTests.cs | 66 +- .../MemoryRepositoryVsDatabaseRepository.cs | 146 +- .../SelectSQLRefactorerTests.cs | 289 +- .../Curation/SimpleExampleTests.cs | 127 +- .../Unit/AggregateConfigurationTests.cs | 81 +- .../Unit/CacheFetchRequestProviderTests.cs | 199 +- .../Curation/Unit/CacheLagPeriodUnitTests.cs | 33 +- .../Curation/Unit/CatalogueNamingTests.cs | 45 +- .../Curation/Unit/CommitAssemblyTest.cs | 43 +- .../ExerciseData/TestBiochemistryCreation.cs | 58 +- .../ExerciseData/TestDemographyCreation.cs | 65 +- .../ExerciseData/TestPrescribingCreation.cs | 60 +- Rdmp.Core.Tests/Curation/Unit/IColumnTests.cs | 225 +- .../Curation/Unit/IMightBeReadOnlyTests.cs | 169 +- .../Curation/Unit/ObjectConstructorTests.cs | 245 +- .../Curation/Unit/ParameterManagerTests.cs | 217 +- .../Curation/Unit/PermissionWindowTests.cs | 114 +- .../Curation/Unit/PreInitializeTests.cs | 230 +- .../Curation/Unit/SimpleColumnInfoTests.cs | 47 +- .../Curation/Unit/SqlSyntaxHelperTests.cs | 81 +- .../Curation/Unit/TestAcronymGeneration.cs | 32 +- .../Curation/UnitTestsAllObjectsSupported.cs | 104 +- .../Curation/YamlRepositoryTests.cs | 478 +- .../CloneExtractionConfigurationTests.cs | 199 +- .../DataExport/Cohort/CohortTests.cs | 128 +- .../Cohort/CommittingNewCohortsTests.cs | 397 +- ...tentGuidReleaseIdentifierAllocatorTests.cs | 263 +- .../ConfigurationPropertiesTests.cs | 25 +- .../CustomData/CustomDataImportingTests.cs | 556 +- .../Data/ExternalCohortTableTests.cs | 192 +- .../ExtractableCohortAuditLogBuilderTests.cs | 106 +- .../DataExport/Data/ExtractableCohortTests.cs | 47 +- .../Data/ExtractionProgressTests.cs | 244 +- .../Data/SelectedDataSetsCheckerTests.cs | 143 +- .../DataAccess/PackageContentsTests.cs | 62 +- .../DataAccess/SelectedColumnsTests.cs | 76 +- .../DataExport/DataExportRepositoryTests.cs | 79 +- .../EmptyDataExtractionTests.cs | 108 +- ...eCrossServerDatasetExtractionSourceTest.cs | 170 +- ...tasetExtractionFlatFileDestinationTests.cs | 76 +- ...ullExtractionToDatabaseMSSqlChecksTests.cs | 212 +- ...xtractionToDatabaseMSSqlDestinationTest.cs | 372 +- ...SynthesizerDatasetExtractionSourceTests.cs | 357 +- .../ExtractionSubdirectoryPatternTests.cs | 141 +- .../HashedDataExtractionTests.cs | 68 +- .../NormalDataExtractionTests.cs | 83 +- .../DataExtraction/RowPeekerTests.cs | 125 +- .../SimpleFileExtractorTests.cs | 422 +- .../SupplementalExtractionResultsTest.cs | 32 +- .../DataExtraction/TestCohortRefreshing.cs | 292 +- .../DataExport/ExtractionConfigurationTest.cs | 103 +- Rdmp.Core.Tests/DataExport/ImportFileTests.cs | 203 +- .../DataExport/ProjectChecksTestsComplex.cs | 53 +- .../DataExport/ProjectChecksTestsSimple.cs | 307 +- ...tificationConfigurationAssociationTests.cs | 93 +- .../EndToEndTableValuedFunction.cs | 687 +- .../DataExport/TestExtractableTables.cs | 192 +- .../HICDatabaseConfigurationTests.cs | 84 +- .../TableInfoCloneOperationTests.cs | 54 +- .../Integration/BackfillSqlHelperTests.cs | 227 +- .../Engine/Integration/BackfillTests.cs | 1669 +- .../Integration/CachedFileRetrieverTests.cs | 308 +- .../CheckingTests/ProcessTaskCheckingTests.cs | 323 +- .../Engine/Integration/CoalescerTests.cs | 165 +- .../CrossDatabaseDataLoadTests.cs | 726 +- .../CrossDatabaseMergeCommandTest.cs | 212 +- .../HowDoWeAchieveMd5Test.cs | 96 +- .../Integration/DataLoadEngineTestsBase.cs | 118 +- .../DataLoadProgressUpdateInfoTests.cs | 221 +- .../DataTableUploadDestinationTests.cs | 2105 +- .../Integration/DatabaseOperationTests.cs | 183 +- .../DilutionTests/DilutionCheckTests.cs | 67 +- .../DilutionOperationFactoryTests.cs | 39 +- .../DilutionTests/DilutionOperationTests.cs | 331 +- .../Engine/Integration/DistincterTests.cs | 167 +- .../Engine/Integration/ExcelConversionTest.cs | 160 +- .../Engine/Integration/ExcelDatabaseTests.cs | 74 +- .../Integration/ExecutableProcessTaskTests.cs | 80 +- .../ExecuteSqlFileRuntimeTaskTests.cs | 265 +- .../Engine/Integration/FixedWidthTests.cs | 363 +- .../Integration/FlatFileAttacherTests.cs | 758 +- .../Engine/Integration/HICPipelineTests.cs | 459 +- .../Engine/Integration/HousekeepingTests.cs | 79 +- .../ImportFilesDataProviderTests.cs | 132 +- ...nerationStrategyFactoryTestsIntegration.cs | 333 +- .../Engine/Integration/KVPAttacherTest.cs | 375 +- .../Integration/MigrationStrategyTests.cs | 33 +- .../Engine/Integration/PayloadTest.cs | 111 +- .../PipelineTests/ArchiveFilesTests.cs | 154 +- .../ComponentCompatibilityTests.cs | 35 +- .../Components/AliasHandlerTests.cs | 314 +- .../Components/ColumnSwapperTests.cs | 853 +- .../Components/RemoveDuplicatesTests.cs | 143 +- .../Components/TransposerTests.cs | 136 +- .../PipelineTests/PipelineArgumentTests.cs | 72 +- .../PipelineReadPerformanceTest.cs | 73 +- .../Sources/DelimitedFileSourceTests.cs | 1149 +- .../Sources/DelimitedFileSourceTestsBase.cs | 89 +- ...edFileSourceTests_AutomaticallyResolved.cs | 291 +- ...SourceTests_ResolvedAccordingToStrategy.cs | 589 +- .../DelimitedFileSourceTests_Unresolveable.cs | 123 +- .../PipelineTests/Sources/SourceTests.cs | 412 +- .../Integration/PrematureLoadEnderTests.cs | 99 +- .../PrimaryKeyCollisionResolverTests.cs | 266 +- .../RemoteDatabaseAttacherTests.cs | 181 +- .../Integration/RuntimeTaskFactoryTests.cs | 53 +- ...maryKeyCollisionResolverMutilationTests.cs | 517 +- .../Integration/SingleJobPipelineTests.cs | 42 +- .../TableInfoJoiningQueryBuilderTests.cs | 145 +- .../Integration/TableVarcharMaxerTests.cs | 185 +- .../Engine/Integration/TestTemporalTables.cs | 197 +- .../Engine/Integration/ToMemoryDataLoadJob.cs | 95 +- .../Integration/WebFileDownloaderTests.cs | 62 - .../WebServiceConfigurationTests.cs | 17 +- .../Engine/Resources/XmlTestForExcel.xml | 349 +- .../Engine/Unit/CohortSamplerTests.cs | 294 +- .../Engine/Unit/CommandLineHelperTests.cs | 109 +- .../Engine/Unit/DataFlowComponentTests.cs | 155 +- .../DataLoad/Engine/Unit/ExcelTests.cs | 491 +- .../Engine/Unit/ExecutableProcessTaskTests.cs | 52 +- .../Engine/Unit/IAttacherCompositionTests.cs | 16 +- ...bDateGenerationStrategyFactoryTestsUnit.cs | 35 +- .../Unit/JobDateGenerationStrategyTests.cs | 99 +- .../DataLoad/Engine/Unit/MDFAttacherTests.cs | 489 +- .../DataLoad/Engine/Unit/SchedulingTests.cs | 64 +- .../Attachers/RemoteTableAttacherTests.cs | 270 +- .../DataFlowOperations/RowDeleterTests.cs | 56 +- .../DataFlowOperations/SetNullTests.cs | 62 +- .../CatalogueConstraintReportTests.cs | 370 +- .../DQEGraphAnnotationTests.cs | 73 +- .../PeriodicityStateTests.cs | 47 +- .../MasterDatabaseScriptExecutorTests.cs | 35 +- .../Databases/Patch68FixNamespacesTest.cs | 193 +- .../DatabaseEntityConventionTests.cs | 54 +- Rdmp.Core.Tests/IHasSummaryTests.cs | 41 +- .../Logging/ArchivalDataLoadInfoTests.cs | 27 +- Rdmp.Core.Tests/Logging/DataLoadTaskHelper.cs | 82 +- .../Logging/FatalErrorLoggingTest.cs | 118 +- Rdmp.Core.Tests/Logging/LogManagerTest.cs | 341 +- Rdmp.Core.Tests/Properties/AssemblyInfo.cs | 2 +- .../CatalogueProblemProviderTests.cs | 781 +- Rdmp.Core.Tests/Providers/RowVerTest.cs | 125 +- .../Providers/SearchablesMatchScorerTests.cs | 375 +- ...gregateConfigurationResultsManagerTests.cs | 224 +- .../DataAccessPortalCollectionTests.cs | 489 +- .../ExtractableAggregateCachingTests.cs | 154 +- .../QueryCachingCrossServerTests.cs | 1485 +- .../QueryCaching/QueryCachingDatabaseTests.cs | 45 +- Rdmp.Core.Tests/Rdmp.Core.Tests.csproj | 206 +- .../Reports/CustomMetadataReportTests.cs | 1364 +- ...ocumentationReportDatabaseEntitiesTests.cs | 36 +- .../WordDataReleaseFileGeneratorTests.cs | 21 +- .../Reports/MetadataReportTests.cs | 80 +- .../Repositories/CatalogueRepositoryTests.cs | 89 +- .../ChangeLogIsCorrectTests.cs | 43 + .../DataTableExtensionsTests.cs | 80 + .../ExpectedIdenticalStringsExceptionTests.cs | 113 + .../PackageListIsCorrectTests.cs | 121 + .../ReusableCodeTests/UsefulStuffTests.cs | 83 + .../ReusableCodeTests/UsefulStuffUnitTests.cs | 49 + .../Constraints/Primary/AlphaNumericTest.cs | 86 +- .../Constraints/Primary/AlphaTest.cs | 87 +- .../Primary/BoundsValidationDateTest.cs | 825 +- .../Primary/BoundsValidationIntegerTest.cs | 36 +- .../Validation/Constraints/Primary/ChiTest.cs | 80 +- .../Constraints/Primary/ChiValidationTest.cs | 100 +- .../Constraints/Primary/DateEuTest.cs | 106 +- .../Constraints/Primary/ValidationTests.cs | 14 +- .../Constraints/Secondary/BoundDateTest.cs | 148 +- .../Secondary/PredictionChiSexTest.cs | 144 +- .../Secondary/PredictionNotNullTest.cs | 76 +- .../Secondary/RegularExpressionTest.cs | 82 +- .../Validation/ExceptionHandlingTests.cs | 63 +- .../Validation/ItemValidatorTest.cs | 124 +- .../Validation/PredictionValidationTest.cs | 275 +- Rdmp.Core.Tests/Validation/TestConstants.cs | 246 +- .../Validation/TestData/ChiAgeDomainObject.cs | 19 +- .../Validation/TestData/ChiDomainObject.cs | 16 +- .../ValidationDeserializationMemoryTest.cs | 48 +- .../LegacySerializationTest.cs | 24 +- .../PluginValidationSerializationTest.cs | 74 +- Rdmp.Core.Tests/Validation/ValidatorTest.cs | 427 +- Rdmp.Core/AmbiguousDatabaseTypeException.cs | 24 +- Rdmp.Core/AssemblyInfo.cs | 10 +- .../Autocomplete/AutoCompleteProvider.cs | 322 +- Rdmp.Core/Caching/CachingHost.cs | 243 +- .../Caching/CachingPreExecutionChecker.cs | 161 +- Rdmp.Core/Caching/CustomDateCaching.cs | 68 +- Rdmp.Core/Caching/Layouts/BasicCacheLayout.cs | 24 +- Rdmp.Core/Caching/Layouts/CacheLayout.cs | 311 +- .../Caching/Layouts/CacheLayoutFactory.cs | 85 +- Rdmp.Core/Caching/Layouts/ICacheLayout.cs | 56 +- .../Caching/Layouts/ILoadCachePathResolver.cs | 18 +- .../NoSubdirectoriesCachePathResolver.cs | 18 +- .../Layouts/ZipCacheLayoutOnePerDay.cs | 19 +- .../PermissionWindowCacheDownloader.cs | 347 +- .../Pipeline/CachingPipelineUseCase.cs | 199 +- .../CacheFilesystemDestination.cs | 152 +- .../ICacheFileSystemDestination.cs | 19 +- .../IMultiPipelineEngineExecutionStrategy.cs | 18 +- .../Pipeline/RoundRobinPipelineExecution.cs | 53 +- .../Pipeline/SerialPipelineExecution.cs | 32 +- .../Caching/Pipeline/Sources/CacheSource.cs | 157 +- .../Caching/Pipeline/Sources/ICacheSource.cs | 18 +- .../Requests/BackfillCacheFetchRequest.cs | 99 +- .../Caching/Requests/CacheFetchRequest.cs | 150 +- .../Caching/Requests/DoNothingCacheChunk.cs | 23 +- .../CacheFetchRequestProvider.cs | 122 +- .../FailedCacheFetchRequestProvider.cs | 92 +- .../ICacheFetchRequestProvider.cs | 19 +- .../MultiDayCacheFetchRequestProvider.cs | 57 +- .../SingleDayCacheFetchRequestProvider.cs | 57 +- Rdmp.Core/Caching/Requests/ICacheChunk.cs | 17 +- .../Caching/Requests/ICacheFetchRequest.cs | 43 +- Rdmp.Core/Caching/RetrievalResult.cs | 23 +- .../{Readme.md => CohortCommitting.md} | 0 .../CreateNewCohortDatabaseWizard.cs | 276 +- .../Pipeline/CohortCreationRequest.cs | 400 +- .../Pipeline/CohortRefreshEngine.cs | 78 +- .../CreateTableFromAggregateUseCase.cs | 121 +- .../Destinations/BasicCohortDestination.cs | 416 +- .../GuidReleaseIdentifierAllocator.cs | 39 +- .../IAllocateReleaseIdentifiers.cs | 38 +- .../NullAllocateReleaseIdentifiers.cs | 17 +- ...onsistentGuidReleaseIdentifierAllocator.cs | 163 +- .../Pipeline/ICohortCreationRequest.cs | 31 +- .../Pipeline/ICohortPipelineDestination.cs | 18 +- .../Pipeline/IPluginCohortDestination.cs | 16 +- .../AggregateConfigurationTableSource.cs | 188 +- ...CohortIdentificationConfigurationSource.cs | 334 +- .../Sources/PatientIdentifierColumnSource.cs | 156 +- .../Sources/PatientIndexTableSource.cs | 104 +- .../PrivateIdentifierPrototype.cs | 76 +- .../{Readme.md => CohortCreation.md} | 0 ...hortIdentificationConfigurationUICommon.cs | 166 +- Rdmp.Core/CohortCreation/Compileable.cs | 123 +- .../Execution/AggregationContainerTask.cs | 136 +- .../Execution/AggregationTask.cs | 109 +- .../CohortCreation/Execution/CachableTask.cs | 49 +- .../Execution/CohortCompiler.cs | 873 +- .../Execution/CohortCompilerRunner.cs | 234 +- .../CohortIdentificationTaskExecution.cs | 202 +- .../Execution/ExamplePluginCohortCompiler.cs | 150 +- .../Execution/ExecutingAggregateState.cs | 67 +- .../Execution/ICacheableTask.cs | 27 +- .../Execution/IPluginCohortCompiler.cs | 95 +- .../Execution/Joinables/JoinableTask.cs | 160 +- .../Execution/PluginCohortCompiler.cs | 231 +- .../Execution/PluginCohortCompilerFactory.cs | 27 +- .../Execution/PluginCohortCompilerTask.cs | 36 +- Rdmp.Core/CohortCreation/ICompileable.cs | 55 +- Rdmp.Core/CommandExecution/AliasAttribute.cs | 29 +- .../CommandExecution/AtomicCommandFactory.cs | 1485 +- .../Alter/AlterTableCommandExecution.cs | 76 +- .../Alter/ExecuteCommandAlterColumnType.cs | 135 +- ...ecuteCommandAlterTableAddArchiveTrigger.cs | 75 +- ...xecuteCommandAlterTableCreatePrimaryKey.cs | 124 +- .../ExecuteCommandAlterTableMakeDistinct.cs | 87 +- .../Alter/ExecuteCommandAlterTableName.cs | 55 +- .../Automation/AutomationCommandExecution.cs | 116 +- .../ExecuteCommandGenerateRunCommand.cs | 38 +- .../Automation/ExecuteCommandRunDetached.cs | 63 +- .../CatalogueCreationCommandExecution.cs | 82 +- ...ogueByExecutingAnAggregateConfiguration.cs | 150 +- ...ewCatalogueByImportingExistingDataTable.cs | 106 +- ...ommandCreateNewCatalogueByImportingFile.cs | 244 +- ...eCommandCreateNewCatalogueFromTableInfo.cs | 50 +- .../CohortCreationCommandExecution.cs | 266 +- ...utingACohortIdentificationConfiguration.cs | 168 +- ...cuteCommandCreateNewCohortFromCatalogue.cs | 219 +- .../ExecuteCommandCreateNewCohortFromFile.cs | 135 +- .../ExecuteCommandCreateNewCohortFromTable.cs | 136 +- ...ecuteCommandImportAlreadyExistingCohort.cs | 135 +- .../AtomicCommands/ExecuteCommandActivate.cs | 73 +- ...ationToCohortIdentificationSetContainer.cs | 298 +- ...CohortIdentificationAsPatientIndexTable.cs | 82 +- ...logueToCohortIdentificationSetContainer.cs | 229 +- ...teCommandAddCatalogueToGovernancePeriod.cs | 61 +- ...mmandAddCohortToExtractionConfiguration.cs | 103 +- ...xecuteCommandAddDatasetsToConfiguration.cs | 159 +- .../ExecuteCommandAddDimension.cs | 158 +- .../ExecuteCommandAddExtractionProgress.cs | 57 +- .../ExecuteCommandAddFavourite.cs | 81 +- .../ExecuteCommandAddMissingParameters.cs | 63 +- .../ExecuteCommandAddNewAggregateGraph.cs | 107 +- .../ExecuteCommandAddNewCatalogueItem.cs | 229 +- ...mmandAddNewExtractionFilterParameterSet.cs | 71 +- .../ExecuteCommandAddNewFilterContainer.cs | 112 +- .../ExecuteCommandAddNewGovernanceDocument.cs | 92 +- .../ExecuteCommandAddNewSupportingDocument.cs | 145 +- .../ExecuteCommandAddNewSupportingSqlTable.cs | 105 +- ...ExecuteCommandAddPackageToConfiguration.cs | 61 +- .../ExecuteCommandAddParameter.cs | 182 +- .../ExecuteCommandAddPipelineComponent.cs | 230 +- .../ExecuteCommandAddPlugins.cs | 99 +- ...mmandAssociateCatalogueWithLoadMetadata.cs | 175 +- ...tIdentificationConfigurationWithProject.cs | 172 +- .../ExecuteCommandBulkImportTableInfos.cs | 227 +- .../ExecuteCommandChangeExtractability.cs | 116 +- .../ExecuteCommandChangeExtractionCategory.cs | 148 +- .../ExecuteCommandChangeLoadStage.cs | 62 +- .../AtomicCommands/ExecuteCommandCheck.cs | 51 +- .../ExecuteCommandChooseCohort.cs | 144 +- .../ExecuteCommandClearQueryCache.cs | 166 +- .../ExecuteCommandClearUserSettings.cs | 24 + ...dCloneCohortIdentificationConfiguration.cs | 132 +- ...cuteCommandCloneExtractionConfiguration.cs | 62 +- .../ExecuteCommandClonePipeline.cs | 45 +- .../ExecuteCommandConfirmLogs.cs | 246 +- ...gregateConfigurationToPatientIndexTable.cs | 94 +- .../ExecuteCommandCreateLookup.cs | 187 +- .../ExecuteCommandCreateNewANOTable.cs | 79 +- .../ExecuteCommandCreateNewCacheProgress.cs | 64 +- ...teCommandCreateNewClassBasedProcessTask.cs | 123 +- ...ateNewCohortIdentificationConfiguration.cs | 279 +- .../ExecuteCommandCreateNewCohortStore.cs | 87 +- ...xecuteCommandCreateNewDataLoadDirectory.cs | 104 +- .../ExecuteCommandCreateNewEmptyCatalogue.cs | 52 +- ...eCommandCreateNewExternalDatabaseServer.cs | 181 +- ...mmandCreateNewExtractableDataSetPackage.cs | 51 +- ...ateNewExtractionConfigurationForProject.cs | 241 +- ...uteCommandCreateNewFileBasedProcessTask.cs | 178 +- .../ExecuteCommandCreateNewFilter.cs | 377 +- ...ExecuteCommandCreateNewGovernancePeriod.cs | 67 +- .../ExecuteCommandCreateNewLoadMetadata.cs | 124 +- .../ExecuteCommandCreateNewLoadProgress.cs | 54 +- ...ExecuteCommandCreateNewPermissionWindow.cs | 65 +- .../ExecuteCommandCreateNewRemoteRDMP.cs | 43 +- .../ExecuteCommandCreateNewStandardRegex.cs | 42 +- .../ExecuteCommandCreatePrivateKey.cs | 52 +- .../AtomicCommands/ExecuteCommandDelete.cs | 232 +- .../AtomicCommands/ExecuteCommandDeprecate.cs | 100 +- .../AtomicCommands/ExecuteCommandDescribe.cs | 543 +- .../ExecuteCommandDisableOrEnable.cs | 127 +- .../ExecuteCommandExecuteAggregateGraph.cs | 68 +- .../ExecuteCommandExportLoggedDataToCsv.cs | 76 +- .../ExecuteCommandExportObjectsToFile.cs | 186 +- .../ExecuteCommandExportPlugins.cs | 66 +- .../ExecuteCommandExtractMetadata.cs | 154 +- ...FreezeCohortIdentificationConfiguration.cs | 63 +- ...uteCommandFreezeExtractionConfiguration.cs | 53 +- .../ExecuteCommandGenerateReleaseDocument.cs | 113 +- .../ExecuteCommandGenerateTestData.cs | 79 +- .../ExecuteCommandGuessAssociatedColumns.cs | 160 +- ...teCommandImportCatalogueItemDescription.cs | 119 +- ...eCommandImportCatalogueItemDescriptions.cs | 63 +- ...ImportCohortIdentificationConfiguration.cs | 101 +- ...ExecuteCommandImportFilterContainerTree.cs | 363 +- .../ExecuteCommandImportTableInfo.cs | 100 +- ...uteCommandLinkCatalogueItemToColumnInfo.cs | 111 +- .../AtomicCommands/ExecuteCommandList.cs | 78 +- .../ExecuteCommandListSupportedCommands.cs | 143 +- .../ExecuteCommandListUserSettings.cs | 38 +- ...cuteCommandMakeCatalogueItemExtractable.cs | 73 +- ...cuteCommandMakeCatalogueProjectSpecific.cs | 155 +- ...IntoRegularCohortIdentificationSetAgain.cs | 90 +- ...MakeProjectSpecificCatalogueNormalAgain.cs | 85 +- ...MergeCohortIdentificationConfigurations.cs | 70 +- ...xecuteCommandMoveAggregateIntoContainer.cs | 92 +- ...ohortAggregateContainerIntoSubContainer.cs | 74 +- ...xecuteCommandMoveContainerIntoContainer.cs | 62 +- .../ExecuteCommandMoveFilterIntoContainer.cs | 62 +- .../AtomicCommands/ExecuteCommandNewObject.cs | 153 +- .../ExecuteCommandOverrideRawServer.cs | 78 +- .../ExecuteCommandPrunePlugin.cs | 170 +- .../ExecuteCommandPutIntoFolder.cs | 120 +- .../ExecuteCommandQueryPlatformDatabase.cs | 225 +- .../ExecuteCommandRefreshBrokenCohorts.cs | 104 +- .../AtomicCommands/ExecuteCommandRename.cs | 123 +- .../ExecuteCommandReplacedBy.cs | 117 +- .../ExecuteCommandResetExtractionProgress.cs | 135 +- .../ExecuteCommandRunSupportingSql.cs | 88 +- .../ExecuteCommandScriptTable.cs | 42 +- .../ExecuteCommandScriptTables.cs | 109 +- .../AtomicCommands/ExecuteCommandSet.cs | 324 +- .../ExecuteCommandSetAggregateDimension.cs | 145 +- .../ExecuteCommandSetArgument.cs | 216 +- .../AtomicCommands/ExecuteCommandSetAxis.cs | 184 +- .../ExecuteCommandSetColumnSettingBase.cs | 285 +- .../ExecuteCommandSetContainerOperation.cs | 123 +- ...mmandSetDataAccessContextForCredentials.cs | 73 +- .../ExecuteCommandSetDefault.cs | 40 +- .../ExecuteCommandSetExtendedProperty.cs | 151 +- .../ExecuteCommandSetExtractionIdentifier.cs | 94 +- .../ExecuteCommandSetExtractionPrimaryKeys.cs | 68 +- .../ExecuteCommandSetFilterTreeShortcut.cs | 157 +- ...ExecuteCommandSetGlobalDleIgnorePattern.cs | 91 +- .../ExecuteCommandSetIgnoredColumns.cs | 81 +- .../ExecuteCommandSetPermissionWindow.cs | 73 +- .../AtomicCommands/ExecuteCommandSetPivot.cs | 165 +- ...uteCommandSetProjectExtractionDirectory.cs | 61 +- .../ExecuteCommandSetQueryCachingDatabase.cs | 75 +- .../ExecuteCommandSetUserSetting.cs | 173 +- .../AtomicCommands/ExecuteCommandShow.cs | 268 +- .../ExecuteCommandShowRelatedObject.cs | 38 +- .../AtomicCommands/ExecuteCommandSimilar.cs | 262 +- .../ExecuteCommandSyncTableInfo.cs | 125 +- ...nMergeCohortIdentificationConfiguration.cs | 105 +- ...eCommandUnfreezeExtractionConfiguration.cs | 56 +- ...mandUseCredentialsToAccessTableInfoData.cs | 73 +- .../AtomicCommands/ExecuteCommandViewData.cs | 309 +- .../ExecuteCommandViewDataBase.cs | 89 +- .../ExecuteCommandViewExtractionSql.cs | 141 +- .../ExecuteCommandViewFilterMatchData.cs | 209 +- .../AtomicCommands/ExecuteCommandViewLogs.cs | 178 +- .../ExecutecommandAddCohortSubContainer.cs | 59 +- .../AtomicCommands/IAtomicCommand.cs | 63 +- .../IAtomicCommandWithTarget.cs | 27 +- .../LogsNotConfirmedException.cs | 38 +- .../ExecuteCommandExportInDublinCoreFormat.cs | 39 +- ...andImportCatalogueDescriptionsFromShare.cs | 91 +- .../ExecuteCommandImportDublinCoreFormat.cs | 46 +- ...ommandImportFilterDescriptionsFromShare.cs | 83 +- .../Sharing/ExecuteCommandImportShare.cs | 70 +- ...ExecuteCommandImportShareDefinitionList.cs | 109 +- .../CommandExecution/BasicActivateItems.cs | 1261 +- .../CommandExecution/BasicCommandExecution.cs | 939 +- .../AggregateConfigurationCombineable.cs | 176 +- .../Combining/CacheProgressCombineable.cs | 28 +- .../Combining/CatalogueCombineable.cs | 88 +- .../Combining/CatalogueItemCombineable.cs | 42 +- .../CohortAggregateContainerCombineable.cs | 37 +- .../CohortCombineToCreateCommandHelper.cs | 28 +- ...ohortIdentificationConfigurationCommand.cs | 32 +- .../Combining/ColumnCombineable.cs | 42 +- .../Combining/ColumnInfoCombineable.cs | 37 +- .../Combining/ContainerCombineable.cs | 56 +- .../DataAccessCredentialsCombineable.cs | 30 +- .../Combining/ExtractableCohortCombineable.cs | 57 +- .../ExtractableDataSetCombineable.cs | 47 +- ...ExtractionFilterParameterSetCombineable.cs | 21 +- .../Combining/FileCollectionCombineable.cs | 51 +- .../Combining/FilterCombineable.cs | 75 +- .../Combining/IHasFolderCombineable.cs | 17 +- .../Combining/LoadMetadataCombineable.cs | 34 +- .../Combining/ManyCataloguesCombineable.cs | 40 +- .../Combining/PipelineCombineable.cs | 32 +- .../Combining/ProcessTaskCombineable.cs | 28 +- .../Combining/ProjectCombineable.cs | 34 +- .../Combining/SqlTextOnlyCombineable.cs | 29 +- .../Combining/TableInfoCombineable.cs | 40 +- .../CommandExecution/CommandEventArgs.cs | 15 +- .../CommandExecution/CommandFactoryBase.cs | 45 +- Rdmp.Core/CommandExecution/CommandInvoker.cs | 717 +- .../CommandInvokerArrayDelegate.cs | 41 +- .../CommandInvokerDelegate.cs | 87 +- .../CommandInvokerFixedValueDelegate.cs | 25 +- .../CommandInvokerValueTypeDelegate.cs | 26 +- Rdmp.Core/CommandExecution/DialogArgs.cs | 185 +- .../CommandExecution/EmphasiseEventArgs.cs | 33 +- .../CommandExecution/EmphasiseItemHandler.cs | 7 +- .../CommandExecution/EmphasiseRequest.cs | 27 +- ...ommandPasteClipboardAsNewCatalogueItems.cs | 118 +- .../FromActivateItemsToCheckNotifier.cs | 47 +- .../CommandExecution/GoToCommandFactory.cs | 550 +- .../CommandExecution/IBasicActivateItems.cs | 1062 +- .../CommandExecution/ICombineToMakeCommand.cs | 33 +- .../CommandExecution/ICombineableSource.cs | 15 +- .../CommandExecution/ICommandExecution.cs | 67 +- .../CommandExecution/ImpossibleCommand.cs | 19 +- .../ImpossibleCommandException.cs | 27 +- .../CommandExecution/RequiredArgument.cs | 78 +- .../ThrowImmediatelyActivator.cs | 106 +- ...CataloguePipelinesAndReferencesCreation.cs | 268 +- .../ExampleDatasetsCreation.cs | 1041 +- .../DatabaseCreation/NightmareDatasets.cs | 423 +- .../PlatformDatabaseCreation.cs | 112 +- .../PlatformDatabaseCreationOptions.cs | 185 +- ...latformDatabaseCreationRepositoryFinder.cs | 81 +- .../CommandLine/Interactive/AutoComplete.cs | 35 +- .../Interactive/ConsoleInputManager.cs | 734 +- .../Interactive/InputDisallowedException.cs | 18 +- .../Picking/CommandLineObjectPicker.cs | 126 +- .../CommandLineObjectPickerArgumentValue.cs | 421 +- .../CommandLineObjectPickerParseException.cs | 19 +- .../Interactive/Picking/PickDatabase.cs | 85 +- .../Interactive/Picking/PickObjectBase.cs | 222 +- .../Interactive/Picking/PickObjectByID.cs | 90 +- .../Interactive/Picking/PickObjectByName.cs | 116 +- .../Interactive/Picking/PickObjectByQuery.cs | 105 +- .../Interactive/Picking/PickTable.cs | 82 +- .../Interactive/Picking/PickType.cs | 60 +- Rdmp.Core/CommandLine/Options/CacheOptions.cs | 56 +- .../Options/CohortCreationOptions.cs | 54 +- .../Options/CommandLineActivity.cs | 33 +- .../ConcurrentRDMPCommandLineOptions.cs | 21 +- .../Options/ConnectionStringsYamlFile.cs | 109 +- Rdmp.Core/CommandLine/Options/DleOptions.cs | 84 +- Rdmp.Core/CommandLine/Options/DqeOptions.cs | 19 +- .../Options/ExecuteCommandOptions.cs | 61 +- .../CommandLine/Options/ExtractionOptions.cs | 63 +- Rdmp.Core/CommandLine/Options/PackOptions.cs | 42 +- .../Options/PatchDatabaseOptions.cs | 20 +- .../Options/RDMPCommandLineOptions.cs | 307 +- Rdmp.Core/CommandLine/Options/RdmpScript.cs | 31 +- .../CommandLine/Options/ReleaseOptions.cs | 44 +- .../RdmpCommandLineBootStrapper.cs | 313 +- Rdmp.Core/CommandLine/Runners/CacheRunner.cs | 115 +- .../Runners/CohortCreationRunner.cs | 64 +- Rdmp.Core/CommandLine/Runners/DleRunner.cs | 164 +- Rdmp.Core/CommandLine/Runners/DqeRunner.cs | 50 +- .../Runners/ExecuteCommandRunner.cs | 390 +- .../CommandLine/Runners/ExtractionRunner.cs | 346 +- .../CommandLine/Runners/IPipelineRunner.cs | 29 +- Rdmp.Core/CommandLine/Runners/IRunner.cs | 20 +- Rdmp.Core/CommandLine/Runners/ManyRunner.cs | 282 +- .../CommandLine/Runners/PackPluginRunner.cs | 166 +- .../CommandLine/Runners/PipelineRunner.cs | 89 +- .../CommandLine/Runners/ReleaseRunner.cs | 481 +- Rdmp.Core/CommandLine/Runners/Runner.cs | 99 +- .../CommandLine/Runners/RunnerFactory.cs | 60 +- .../ANOEngineering/ColumnInfoANOPlan.cs | 436 +- .../ForwardEngineerANOCatalogueEngine.cs | 662 +- .../ForwardEngineerANOCataloguePlanManager.cs | 414 +- .../ANOEngineering/IDilutionOperation.cs | 25 +- .../Curation/Checks/BadAssembliesChecker.cs | 72 +- .../Curation/Checks/ClonedFilterChecker.cs | 140 +- Rdmp.Core/Curation/Checks/MEFChecker.cs | 160 +- .../Curation/Checks/MissingFieldsChecker.cs | 208 +- Rdmp.Core/Curation/Checks/PipelineChecker.cs | 58 +- Rdmp.Core/Curation/Data/AggregateTopX.cs | 201 +- .../Data/AggregateTopXOrderByDirection.cs | 25 +- .../Aggregation/AggregateConfiguration.cs | 1331 +- .../AggregateContinuousDateAxis.cs | 196 +- .../Data/Aggregation/AggregateDimension.cs | 484 +- .../Data/Aggregation/AggregateFilter.cs | 333 +- .../Aggregation/AggregateFilterContainer.cs | 245 +- .../Aggregation/AggregateFilterParameter.cs | 215 +- .../Data/Aggregation/AggregateForcedJoin.cs | 95 +- .../IAggregateForcedJoinManager.cs | 67 +- .../Curation/Data/Cache/CacheFetchFailure.cs | 185 +- .../Curation/Data/Cache/CacheLagPeriod.cs | 317 +- .../Curation/Data/Cache/CacheProgress.cs | 449 +- .../Curation/Data/Cache/ICacheFetchFailure.cs | 85 +- .../Curation/Data/Cache/ICacheProgress.cs | 191 +- Rdmp.Core/Curation/Data/Catalogue.cs | 2189 +- .../Data/CatalogueExtractabilityStatus.cs | 49 +- Rdmp.Core/Curation/Data/CatalogueItem.cs | 773 +- .../Data/CatalogueObscureDependencyFinder.cs | 105 +- .../Data/Cohort/AnyTableSqlParameter.cs | 422 +- .../Data/Cohort/CohortAggregateContainer.cs | 774 +- .../CohortIdentificationConfiguration.cs | 1187 +- ...CohortIdentificationConfigurationMerger.cs | 347 +- Rdmp.Core/Curation/Data/Cohort/IOrderable.cs | 19 +- .../JoinableCohortAggregateConfiguration.cs | 242 +- ...JoinableCohortAggregateConfigurationUse.cs | 253 +- .../Curation/Data/Cohort/SetOperation.cs | 37 +- Rdmp.Core/Curation/Data/ColumnInfo.cs | 938 +- Rdmp.Core/Curation/Data/Commit.cs | 124 +- Rdmp.Core/Curation/Data/CommitInProgress.cs | 405 +- .../Curation/Data/CommitInProgressSettings.cs | 69 +- Rdmp.Core/Curation/Data/ConcreteColumn.cs | 205 +- Rdmp.Core/Curation/Data/ConcreteContainer.cs | 242 +- Rdmp.Core/Curation/Data/ConcreteFilter.cs | 318 +- .../Curation/Data/ConnectionStringKeyword.cs | 176 +- .../Data/CredentialsInUseException.cs | 35 +- .../Data/Dashboarding/DashboardControl.cs | 310 +- .../Data/Dashboarding/DashboardLayout.cs | 152 +- .../Data/Dashboarding/DashboardObjectUse.cs | 89 +- .../IPersistableObjectCollection.cs | 60 +- .../Data/Dashboarding/PersistStringHelper.cs | 315 +- .../PersistableObjectCollection.cs | 75 +- .../Data/Dashboarding/PersistenceException.cs | 38 +- .../Curation/Data/DataAccessCredentials.cs | 255 +- .../Data/DataAccessCredentialsFactory.cs | 74 +- Rdmp.Core/Curation/Data/DataLoad/ANOTable.cs | 712 +- Rdmp.Core/Curation/Data/DataLoad/Argument.cs | 815 +- .../Curation/Data/DataLoad/ArgumentFactory.cs | 259 +- .../Extensions/LoadBubbleExtensions.cs | 41 +- .../Extensions/LoadStageExtensions.cs | 43 +- Rdmp.Core/Curation/Data/DataLoad/IArgument.cs | 103 +- .../Curation/Data/DataLoad/IArgumentHost.cs | 57 +- Rdmp.Core/Curation/Data/DataLoad/ICustomUI.cs | 61 +- .../Data/DataLoad/ICustomUIDrivenClass.cs | 47 +- .../DataLoad/IHasStageSpecificRuntimeName.cs | 33 +- .../Curation/Data/DataLoad/ILoadMetadata.cs | 89 +- .../DataLoad/ILoggedActivityRootObject.cs | 55 +- .../Data/DataLoad/IPreLoadDiscardedColumn.cs | 62 +- .../Curation/Data/DataLoad/IProcessTask.cs | 70 +- .../Data/DataLoad/IResolveDuplication.cs | 43 +- .../Curation/Data/DataLoad/LoadMetadata.cs | 611 +- Rdmp.Core/Curation/Data/DataLoad/LoadStage.cs | 81 +- .../Data/DataLoad/PreLoadDiscardedColumn.cs | 391 +- .../Curation/Data/DataLoad/ProcessTask.cs | 687 +- .../Data/DataLoad/ProcessTaskArgument.cs | 195 +- .../Curation/Data/DataLoad/ProcessTaskType.cs | 61 +- .../Data/DataLoad/RequiredPropertyInfo.cs | 83 +- Rdmp.Core/Curation/Data/DatabaseEntity.cs | 586 +- .../Curation/Data/Defaults/IServerDefaults.cs | 47 +- .../Data/Defaults/PermissableDefaults.cs | 91 +- .../Defaults/PermissableDefaultsExtensions.cs | 51 +- .../Curation/Data/Defaults/ServerDefaults.cs | 43 +- Rdmp.Core/Curation/Data/DemandType.cs | 29 +- .../Data/DemandsInitializationAttribute.cs | 109 +- .../DemandsNestedInitializationAttribute.cs | 15 +- Rdmp.Core/Curation/Data/DestinationType.cs | 33 +- .../Curation/Data/EncryptedPasswordHost.cs | 149 +- Rdmp.Core/Curation/Data/EncryptedString.cs | 125 +- .../EntityNaming/FixedStagingDatabaseNamer.cs | 94 +- .../INameDatabasesAndTablesDuringLoads.cs | 43 +- .../Data/EntityNaming/SuffixBasedNamer.cs | 65 +- Rdmp.Core/Curation/Data/ExtendedProperty.cs | 382 +- .../Curation/Data/ExternalDatabaseServer.cs | 460 +- Rdmp.Core/Curation/Data/ExtractionCategory.cs | 80 +- Rdmp.Core/Curation/Data/ExtractionFilter.cs | 310 +- .../Data/ExtractionFilterParameter.cs | 254 +- .../Data/ExtractionFilterParameterSet.cs | 242 +- .../Data/ExtractionFilterParameterSetValue.cs | 278 +- .../Curation/Data/ExtractionInformation.cs | 459 +- Rdmp.Core/Curation/Data/Favourite.cs | 102 +- Rdmp.Core/Curation/Data/FolderHelper.cs | 199 +- Rdmp.Core/Curation/Data/FolderNode.cs | 122 +- .../Data/Governance/GovernanceDocument.cs | 226 +- .../Data/Governance/GovernancePeriod.cs | 350 +- Rdmp.Core/Curation/Data/IAggregateTopX.cs | 38 +- Rdmp.Core/Curation/Data/ICatalogue.cs | 513 +- .../Curation/Data/ICollectSqlParameters.cs | 29 +- Rdmp.Core/Curation/Data/IContainer.cs | 228 +- Rdmp.Core/Curation/Data/IEncryptedString.cs | 35 +- .../Curation/Data/IExternalDatabaseServer.cs | 62 +- .../Data/IExtractableDataSetPackage.cs | 19 +- Rdmp.Core/Curation/Data/IExtractionResults.cs | 98 +- Rdmp.Core/Curation/Data/IFilter.cs | 122 +- Rdmp.Core/Curation/Data/IFolderNode.cs | 19 +- Rdmp.Core/Curation/Data/IHasFolder.cs | 25 +- Rdmp.Core/Curation/Data/IJoin.cs | 103 +- Rdmp.Core/Curation/Data/ILoadProgress.cs | 89 +- Rdmp.Core/Curation/Data/IMasqueradeAs.cs | 21 +- Rdmp.Core/Curation/Data/IMightBeReadOnly.cs | 19 +- Rdmp.Core/Curation/Data/IPermissionWindow.cs | 82 +- ...tIdentificationConfigurationAssociation.cs | 44 +- Rdmp.Core/Curation/Data/ISqlParameter.cs | 73 +- .../Data/ISupplementalExtractionResults.cs | 55 +- Rdmp.Core/Curation/Data/ISupplementalJoin.cs | 29 +- Rdmp.Core/Curation/Data/ISupportingObject.cs | 19 +- Rdmp.Core/Curation/Data/ITableInfo.cs | 265 +- .../Data/ImportExport/ObjectExport.cs | 143 +- .../Data/ImportExport/ObjectImport.cs | 121 +- .../Data/ImportExport/ShareManager.cs | 949 +- .../Data/ImportExport/SharingException.cs | 29 +- Rdmp.Core/Curation/Data/JoinInfo.cs | 423 +- Rdmp.Core/Curation/Data/LoadBubble.cs | 45 +- Rdmp.Core/Curation/Data/LoadModuleAssembly.cs | 339 +- Rdmp.Core/Curation/Data/LoadProgress.cs | 242 +- Rdmp.Core/Curation/Data/Lookup.cs | 515 +- .../Curation/Data/LookupCompositeJoinInfo.cs | 195 +- Rdmp.Core/Curation/Data/Memento.cs | 137 +- Rdmp.Core/Curation/Data/MementoInProgress.cs | 96 +- Rdmp.Core/Curation/Data/MementoType.cs | 23 +- Rdmp.Core/Curation/Data/PermissionWindow.cs | 228 +- .../Curation/Data/PermissionWindowPeriod.cs | 146 +- ...rtisedPipelineComponentTypeUnderContext.cs | 122 +- .../Data/Pipelines/IDataFlowPipelineEngine.cs | 94 +- .../Data/Pipelines/IDemandToUseAPipeline.cs | 35 +- .../Data/Pipelines/IHasDesignTimeMode.cs | 27 +- .../Curation/Data/Pipelines/IPipeline.cs | 92 +- .../Data/Pipelines/IPipelineComponent.cs | 94 +- .../Pipelines/IPipelineComponentArgument.cs | 34 +- .../Data/Pipelines/IPipelineUseCase.cs | 105 +- .../Curation/Data/Pipelines/IPipelineUser.cs | 29 +- Rdmp.Core/Curation/Data/Pipelines/Pipeline.cs | 349 +- .../Data/Pipelines/PipelineComponent.cs | 339 +- .../Pipelines/PipelineComponentArgument.cs | 156 +- .../Data/Pipelines/PipelineComponentRole.cs | 15 +- .../Data/Pipelines/PipelineUseCase.cs | 222 +- .../Curation/Data/Pipelines/PipelineUser.cs | 164 +- Rdmp.Core/Curation/Data/Plugin.cs | 256 +- .../Data/Referencing/IReferenceOtherObject.cs | 37 +- .../IReferenceOtherObjectWithPersist.cs | 35 +- .../ReferenceOtherObjectDatabaseEntity.cs | 167 +- .../Curation/Data/Remoting/RemoteRDMP.cs | 276 +- .../Curation/Data/SafeDirectoryCatalog.cs | 661 +- .../Data/SelfCertifyingDataAccessPoint.cs | 144 +- .../DatabaseEntityJsonConverter.cs | 147 +- .../DictionaryAsArrayResolver.cs | 33 +- .../IPickAnyConstructorFinishedCallback.cs | 19 +- .../Serialization/JsonConvertExtensions.cs | 100 +- .../PickAnyConstructorJsonConverter.cs | 177 +- .../Data/Serialization/ShareDefinition.cs | 128 +- .../Data/Spontaneous/SpontaneousObject.cs | 57 +- .../SpontaneouslyInventedAggregateTopX.cs | 52 +- .../SpontaneouslyInventedArgument.cs | 64 +- .../SpontaneouslyInventedColumn.cs | 34 +- .../SpontaneouslyInventedFilter.cs | 122 +- .../SpontaneouslyInventedFilterContainer.cs | 99 +- .../SpontaneouslyInventedPermissionWindow.cs | 95 +- .../SpontaneouslyInventedSqlParameter.cs | 86 +- Rdmp.Core/Curation/Data/StandardRegex.cs | 154 +- Rdmp.Core/Curation/Data/SupportingDocument.cs | 301 +- Rdmp.Core/Curation/Data/SupportingSQLTable.cs | 396 +- Rdmp.Core/Curation/Data/TableInfo.cs | 852 +- .../Data/TicketingSystemConfiguration.cs | 212 +- Rdmp.Core/Curation/Data/WindowLayout.cs | 116 +- .../Curation/DataHelper/ITableInfoImporter.cs | 41 +- Rdmp.Core/Curation/DataHelper/JoinHelper.cs | 321 +- .../AggregateFilterUIOptions.cs | 68 +- .../Construction/AggregateFilterFactory.cs | 71 +- .../Construction/ExtractionFilterFactory.cs | 78 +- .../Construction/IFilterFactory.cs | 75 +- .../DeployedExtractionFilterUIOptions.cs | 55 +- .../ExtractionFilterUIOptions.cs | 53 +- .../FilterImporting/FilterImportWizard.cs | 350 +- .../FilterImporting/FilterImporter.cs | 288 +- .../FilterImporting/FilterUIOptions.cs | 31 +- .../FilterImporting/FilterUIOptionsFactory.cs | 42 +- .../FilterImporting/IParameterRefactorer.cs | 27 +- .../ParameterCollectionUIOptions.cs | 138 +- .../ParameterCollectionUIOptionsFactory.cs | 247 +- .../FilterImporting/ParameterCreator.cs | 320 +- .../FilterImporting/ParameterRefactorer.cs | 83 +- .../Curation/ForwardEngineerCatalogue.cs | 148 +- Rdmp.Core/Curation/IEncryptStrings.cs | 45 +- Rdmp.Core/Curation/ILoadDirectory.cs | 73 +- .../Curation/IRootFilterContainerHost.cs | 58 +- Rdmp.Core/Curation/LoadDirectory.cs | 198 +- .../Curation/SimpleStringValueEncryption.cs | 153 +- .../Curation/SupportingDocumentsFetcher.cs | 140 +- .../SynchronizationFailedException.cs | 26 +- Rdmp.Core/Curation/TableInfoImporter.cs | 647 +- Rdmp.Core/Curation/TableInfoSynchronizer.cs | 580 +- .../Curation/TableValuedFunctionImporter.cs | 245 +- ...gueAndDataExportObscureDependencyFinder.cs | 99 +- .../Checks/ExtractionConfigurationChecker.cs | 226 +- .../Checks/GlobalExtractionChecker.cs | 89 +- .../Checks/GlobalsReleaseChecker.cs | 165 +- Rdmp.Core/DataExport/Checks/ProjectChecker.cs | 168 +- .../Checks/SelectedDataSetsChecker.cs | 570 +- .../Checks/SupportingSQLTableChecker.cs | 93 +- .../CohortDescriptionDataTableAsyncFetch.cs | 75 +- .../CohortDescriptionFactory.cs | 79 +- .../ExtractableCohortDescription.cs | 316 +- Rdmp.Core/DataExport/Data/CohortDefinition.cs | 177 +- .../Data/CumulativeExtractionResults.cs | 434 +- .../Data/DeployedExtractionFilter.cs | 300 +- .../Data/DeployedExtractionFilterFactory.cs | 71 +- .../Data/DeployedExtractionFilterParameter.cs | 235 +- .../Data/ExternalCohortDefinitionData.cs | 104 +- .../DataExport/Data/ExternalCohortTable.cs | 821 +- .../DataExport/Data/ExtractableCohort.cs | 998 +- .../Data/ExtractableCohortAuditLogBuilder.cs | 249 +- .../DataExport/Data/ExtractableColumn.cs | 479 +- .../DataExport/Data/ExtractableDataSet.cs | 299 +- .../Data/ExtractableDataSetPackage.cs | 146 +- .../Data/ExtractionConfiguration.cs | 1251 +- .../DataExport/Data/ExtractionProgress.cs | 109 +- Rdmp.Core/DataExport/Data/FilterContainer.cs | 253 +- .../Data/GlobalExtractionFilterParameter.cs | 257 +- Rdmp.Core/DataExport/Data/HICProjectSalt.cs | 40 +- .../DataExport/Data/ICohortDefinition.cs | 92 +- .../Data/ICumulativeExtractionResults.cs | 121 +- .../Data/IExternalCohortDefinitionData.cs | 75 +- .../DataExport/Data/IExternalCohortTable.cs | 185 +- .../DataExport/Data/IExtractableCohort.cs | 256 +- .../DataExport/Data/IExtractableDataSet.cs | 71 +- .../Data/IExtractionConfiguration.cs | 369 +- .../DataExport/Data/IExtractionProgress.cs | 120 +- Rdmp.Core/DataExport/Data/IHICProjectSalt.cs | 21 +- Rdmp.Core/DataExport/Data/IProject.cs | 123 +- .../DataExport/Data/ISelectedDataSets.cs | 81 +- .../Data/ISelectedDataSetsForcedJoin.cs | 42 +- Rdmp.Core/DataExport/Data/Project.cs | 361 +- ...tIdentificationConfigurationAssociation.cs | 221 +- Rdmp.Core/DataExport/Data/RetryStrategy.cs | 12 +- Rdmp.Core/DataExport/Data/SelectedDataSets.cs | 345 +- .../Data/SelectedDataSetsForcedJoin.cs | 150 +- .../Data/SupplementalExtractionResults.cs | 357 +- .../DataExtraction/Commands/ExtractCommand.cs | 51 +- .../Commands/ExtractCommandCollection.cs | 35 +- .../ExtractCommandCollectionFactory.cs | 87 +- .../Commands/ExtractCommandState.cs | 31 +- .../Commands/ExtractDatasetCommand.cs | 271 +- .../Commands/ExtractGlobalsCommand.cs | 57 +- .../Commands/IExtractCommand.cs | 37 +- .../Commands/IExtractDatasetCommand.cs | 73 +- .../DataExtraction/ExtractTableVerbatim.cs | 355 +- .../ExtractTimeTransformationObserved.cs | 31 +- .../DataExtraction/ExtractionDirectory.cs | 243 +- .../ExtractionTimeTimeCoverageAggregator.cs | 302 +- ...ractionTimeTimeCoverageAggregatorBucket.cs | 146 +- .../DataExtraction/ExtractionTimeValidator.cs | 142 +- .../FileOutputFormats/CSVOutputFormat.cs | 189 +- .../FileOutputFormats/FileOutputFormat.cs | 39 +- .../FileOutputFormats/IFileOutputFormat.cs | 24 +- .../DataExtraction/IExtractionDirectory.cs | 19 +- .../Listeners/ElevateStateListener.cs | 38 +- ...uteDatasetExtractionFlatFileDestination.cs | 369 +- .../ExecuteFullExtractionToDatabaseMSSql.cs | 975 +- .../Destinations/ExtractionDestination.cs | 793 +- .../IExecuteDatasetExtractionDestination.cs | 104 +- .../Pipeline/ExtractionPipelineUseCase.cs | 517 +- .../DataExtraction/Pipeline/FileExtractor.cs | 95 +- .../Pipeline/SimpleFileExtractor.cs | 409 +- .../Pipeline/Sources/DistinctStrategy.cs | 34 +- ...ecuteCrossServerDatasetExtractionSource.cs | 511 +- .../Sources/ExecuteDatasetExtractionSource.cs | 890 +- ...utePkSynthesizerDatasetExtractionSource.cs | 198 +- .../Pipeline/Sources/RowPeeker.cs | 138 +- .../DataExtraction/UserPicks/Bundle.cs | 75 +- .../UserPicks/BundledLookupTable.cs | 137 +- .../UserPicks/ExtractableDatasetBundle.cs | 100 +- .../DataExtraction/UserPicks/GlobalsBundle.cs | 64 +- .../UserPicks/IBundledLookupTable.cs | 15 +- .../UserPicks/IExtractableDatasetBundle.cs | 25 +- .../DataRelease/Audit/IReleaseLog.cs | 31 +- .../DataRelease/Audit/ReleaseLog.cs | 256 +- .../Pipeline/BasicDataReleaseDestination.cs | 191 +- .../Pipeline/FixedReleaseSource.cs | 246 +- .../Pipeline/FlatFileReleaseSource.cs | 61 +- .../Pipeline/MsSqlReleaseSource.cs | 311 +- .../DataRelease/Pipeline/NullReleaseSource.cs | 51 +- .../DataRelease/Pipeline/ReleaseAudit.cs | 17 +- .../DataRelease/Pipeline/ReleaseData.cs | 45 +- .../Pipeline/ReleaseFolderProvider.cs | 171 +- .../DataRelease/Pipeline/ReleaseState.cs | 33 +- .../DataRelease/Pipeline/ReleaseUseCase.cs | 141 +- .../FlatFileGlobalsReleasePotential.cs | 32 +- .../Potential/FlatFileReleasePotential.cs | 135 +- .../Potential/GlobalReleasePotential.cs | 102 +- .../MsSqlExtractionReleasePotential.cs | 89 +- .../Potential/MsSqlGlobalsReleasePotential.cs | 60 +- .../Potential/NoGlobalReleasePotential.cs | 43 +- .../Potential/NoReleasePotential.cs | 41 +- .../DataRelease/Potential/ReleasePotential.cs | 466 +- .../DataRelease/Potential/Releaseability.cs | 105 +- .../DataExport/DataRelease/ReleaseEngine.cs | 476 +- .../DataRelease/ReleaseEngineSettings.cs | 34 +- .../ReleaseEnvironmentPotential.cs | 163 +- .../DataRelease/ReleaseFolderSettings.cs | 45 +- .../DataExport/ProjectNumberException.cs | 18 +- .../DataFlowPipelineEngine.cs | 453 +- .../DataFlowPipelineEngineFactory.cs | 511 +- .../Events/PipelineEngineEventArgs.cs | 39 +- .../GracefulCancellationToken.cs | 157 +- .../GracefulCancellationTokenSource.cs | 69 +- .../DataFlowPipeline/IDataFlowComponent.cs | 67 +- .../DataFlowPipeline/IDataFlowDestination.cs | 20 +- .../IDataFlowPipelineEngineFactory.cs | 27 +- Rdmp.Core/DataFlowPipeline/IDataFlowSource.cs | 81 +- .../IPluginDataFlowComponent.cs | 22 +- .../DataFlowPipeline/IPluginDataFlowSource.cs | 20 +- .../PipelineCrashedException.cs | 15 +- .../Requirements/DataFlowPipelineContext.cs | 576 +- .../DataFlowPipelineContextFactory.cs | 88 +- ...MultipleMatchingImplementationException.cs | 23 + .../OverlappingImplementationsException.cs | 19 +- .../PropertyDemandNotMetException.cs | 30 +- .../Requirements/FlatFileToLoad.cs | 51 +- .../Requirements/IDataFlowPipelineContext.cs | 113 +- .../IPipelineOptionalRequirement.cs | 29 +- .../Requirements/IPipelineRequirement.cs | 47 +- .../Requirements/PipelineUsage.cs | 71 +- .../DataLoad/Engine/Attachers/Attacher.cs | 104 +- .../DataLoad/Engine/Attachers/IAttacher.cs | 25 +- .../Engine/Attachers/IPluginAttacher.cs | 14 +- .../Checks/CheckEntireDataLoadProcess.cs | 115 +- .../Checks/Checkers/AnonymisationChecks.cs | 62 +- .../Checks/Checkers/CatalogueLoadChecks.cs | 488 +- .../Checks/Checkers/CommandLineParser.cs | 244 +- .../MetadataLoggingConfigurationChecks.cs | 305 +- .../Checks/Checkers/PreExecutionChecker.cs | 406 +- .../Checks/Checkers/ProcessTaskChecks.cs | 71 +- .../FromCache/ArchiveExtractor.cs | 80 +- .../FromCache/BasicCacheDataProvider.cs | 38 +- .../FromCache/CachedFileRetriever.cs | 387 +- .../FromCache/DeleteCachedFilesOperation.cs | 60 +- .../FromCache/ICachedDataProvider.cs | 21 +- .../Engine/DataProvider/IDataProvider.cs | 21 +- .../DataProvider/IPluginDataProvider.cs | 16 +- .../EntityNaming/HICDatabaseConfiguration.cs | 215 +- .../Operations/DatabaseCloner.cs | 182 +- .../Operations/TableInfoCloneOperation.cs | 245 +- .../StandardDatabaseHelper.cs | 53 +- .../DataLoad/Engine/IDisposeAfterDataLoad.cs | 19 +- Rdmp.Core/DataLoad/Engine/Job/DataLoadJob.cs | 321 +- Rdmp.Core/DataLoad/Engine/Job/IDataLoadJob.cs | 115 +- Rdmp.Core/DataLoad/Engine/Job/IJobFactory.cs | 18 +- Rdmp.Core/DataLoad/Engine/Job/JobFactory.cs | 41 +- .../DataLoadProgressUpdateException.cs | 22 +- .../CacheDataProviderFindingException.cs | 18 +- .../LoadOrCacheProgressUnclearException.cs | 20 +- .../Scheduling/IJobDateGenerationStrategy.cs | 21 +- .../Job/Scheduling/IUpdateLoadProgress.cs | 13 +- .../JobDateGenerationStrategyFactory.cs | 67 +- .../Scheduling/MultipleScheduleJobFactory.cs | 95 +- .../Job/Scheduling/ScheduledDataLoadJob.cs | 29 +- .../Job/Scheduling/ScheduledJobFactory.cs | 54 +- ...SingleScheduleCacheDateTrackingStrategy.cs | 203 +- .../SingleScheduleConsecutiveDateStrategy.cs | 98 +- .../Scheduling/SingleScheduledJobFactory.cs | 60 +- .../UpdateProgressIfLoadsuccessful.cs | 82 +- .../Engine/Job/ThrowImmediatelyDataLoadJob.cs | 192 +- .../Components/Arguments/CommandLineHelper.cs | 103 +- .../Components/Arguments/IStageArgs.cs | 21 +- .../Arguments/LoadArgsDictionary.cs | 60 +- .../Arguments/RuntimeArgumentCollection.cs | 144 +- .../Components/Arguments/StageArgs.cs | 45 +- .../Components/CompositeDataLoadComponent.cs | 51 +- .../Components/DataLoadComponent.cs | 41 +- .../DeleteForLoadingFilesOperation.cs | 101 +- .../Components/IDataLoadComponent.cs | 21 +- .../LoadExecution/Components/LoadFiles.cs | 87 +- .../LoadExecution/Components/PopulateRAW.cs | 145 +- .../Components/Runtime/AttacherRuntimeTask.cs | 156 +- .../Runtime/DataProviderRuntimeTask.cs | 99 +- .../Runtime/ExecutableRuntimeTask.cs | 290 +- .../Runtime/ExecuteSqlFileRuntimeTask.cs | 154 +- .../ExecuteSqlFileRuntimeTaskException.cs | 26 +- .../Runtime/ExecuteSqlInDleStage.cs | 157 +- .../Components/Runtime/IMEFRuntimeTask.cs | 17 +- .../Components/Runtime/IRuntimeTask.cs | 25 +- .../Runtime/MandatoryPropertyChecker.cs | 56 +- .../Runtime/MutilateDataTablesRuntimeTask.cs | 119 +- .../Components/Runtime/RuntimeTask.cs | 132 +- .../Components/Runtime/RuntimeTaskFactory.cs | 54 +- .../Components/RuntimeTaskPackager.cs | 97 +- .../Components/Standard/ArchiveFiles.cs | 125 +- .../Standard/MigrateRAWTableToStaging.cs | 270 +- .../Standard/MigrateRAWToStaging.cs | 126 +- .../Standard/MigrateStagingToLive.cs | 84 +- .../Delegates/JobFinishedHandler.cs | 7 +- .../LoadExecution/HICDataLoadFactory.cs | 159 +- .../LoadExecution/IDataLoadExecution.cs | 19 +- .../LoadExecution/SingleJobExecution.cs | 181 +- .../Engine/LoadProcess/DataLoadProcess.cs | 125 +- .../LoadProcess/HICLoadConfigurationFlags.cs | 31 +- .../Engine/LoadProcess/IDataLoadOperation.cs | 13 +- .../Engine/LoadProcess/IDataLoadProcess.cs | 18 +- .../IterativeScheduledDataLoadProcess.cs | 93 +- .../Scheduling/ScheduledDataLoadProcess.cs | 42 +- .../SingleJobScheduledDataLoadProcess.cs | 78 +- ...yAvailableLoadProgressSelectionStrategy.cs | 28 +- .../ILoadProgressSelectionStrategy.cs | 19 +- .../SingleLoadProgressSelectionStrategy.cs | 32 +- .../BackfillMigrationFieldProcessor.cs | 67 +- .../Migration/DatabaseMigrationStrategy.cs | 90 +- .../Migration/IMigrationFieldProcessor.cs | 42 +- .../Engine/Migration/MigrationColumnSet.cs | 115 +- .../Migration/MigrationConfiguration.cs | 95 +- .../Engine/Migration/MigrationHost.cs | 123 +- .../QueryBuilding/LiveMigrationQueryHelper.cs | 57 +- .../MigrationColumnSetQueryHelper.cs | 84 +- .../QueryBuilding/MigrationQueryHelper.cs | 89 +- .../OverwriteMigrationStrategy.cs | 320 +- .../ReverseMigrationQueryHelper.cs | 35 +- .../StagingToLiveMigrationFieldProcessor.cs | 177 +- .../Engine/Mutilators/IMutilateDataTables.cs | 32 +- .../Mutilators/IPluginMutilateDataTables.cs | 15 +- .../ANOConfigurationException.cs | 25 +- .../Anonymisation/ANOTableInfoSynchronizer.cs | 92 +- .../Anonymisation/ANOTransformer.cs | 387 +- .../Anonymisation/BasicAnonymisationEngine.cs | 197 +- .../ColumnInfoToANOTableConverter.cs | 331 +- .../Anonymisation/IdentifierDumper.cs | 720 +- .../IdentifierDumperSynchronizer.cs | 357 +- .../Pipeline/Components/CleanStrings.cs | 181 +- .../Pipeline/Components/RemoveDuplicates.cs | 168 +- .../DataTableUploadDestination.cs | 775 +- .../Destinations/SqlBulkInsertDestination.cs | 263 +- .../Sources/DbDataCommandDataFlowSource.cs | 309 +- .../Sources/IDbDataCommandDataFlowSource.cs | 19 +- .../Engine/Pipeline/UploadFileUseCase.cs | 62 +- Rdmp.Core/DataLoad/ExitCodeType.cs | 45 +- .../Attachers/AnySeparatorFileAttacher.cs | 60 +- .../Attachers/DelimitedFlatFileAttacher.cs | 254 +- .../Modules/Attachers/ExcelAttacher.cs | 268 +- .../Modules/Attachers/FixedWidthAttacher.cs | 148 +- .../Modules/Attachers/FixedWidthColumn.cs | 57 +- .../Modules/Attachers/FixedWidthFormatFile.cs | 216 +- .../Modules/Attachers/FlatFileAttacher.cs | 407 +- .../DataLoad/Modules/Attachers/KVPAttacher.cs | 290 +- .../Attachers/KVPAttacherPipelineUseCase.cs | 37 +- .../DataLoad/Modules/Attachers/MDFAttacher.cs | 544 +- .../Modules/Attachers/MdfAttachStrategy.cs | 25 +- .../Attachers/MdfFileAttachLocations.cs | 86 +- .../Attachers/RemoteDatabaseAttacher.cs | 190 +- .../Modules/Attachers/RemoteTableAttacher.cs | 721 +- .../DoNothingCacheDestination.cs | 71 +- .../Aliases/AliasHandler.cs | 310 +- .../Aliases/AliasResolutionStrategy.cs | 27 +- .../Aliases/Exceptions/AliasException.cs | 18 +- .../Exceptions/AliasTableFetchException.cs | 18 +- .../DataFlowOperations/CohortSampler.cs | 170 +- .../DataFlowOperations/ColumnDropper.cs | 76 +- .../DataFlowOperations/ColumnForbidder.cs | 128 +- .../DataFlowOperations/ColumnRenamer.cs | 85 +- .../ExtractCatalogueMetadata.cs | 160 +- .../DataFlowOperations/ReleaseMetadata.cs | 107 +- .../Modules/DataFlowOperations/RowDeleter.cs | 98 +- .../Modules/DataFlowOperations/SetNull.cs | 93 +- .../Swapping/ColumnSwapper.cs | 644 +- .../DataFlowOperations/TableVersionNamer.cs | 92 +- .../Modules/DataFlowOperations/Transposer.cs | 139 +- .../DelimitedFlatFileDataFlowSource.cs | 797 +- .../DataFlowSources/DoNothingCacheSource.cs | 81 +- .../DataFlowSources/ExcelDataFlowSource.cs | 570 +- .../ExplicitTypingCollection.cs | 79 +- .../SubComponents/FlatFileColumnCollection.cs | 615 +- .../SubComponents/FlatFileEventHandlers.cs | 200 +- .../SubComponents/FlatFileLine.cs | 89 +- .../FlatFileToDataTablePusher.cs | 505 +- .../DataProvider/DoNothingDataProvider.cs | 50 +- .../ExcelToCSVFilesConverter.cs | 189 +- .../DataProvider/ImportFilesDataProvider.cs | 115 +- .../DataProvider/ShareDefinitionImporter.cs | 81 +- .../DataProvider/WebServiceConfiguration.cs | 108 +- .../Exceptions/FlatFileLoadException.cs | 28 +- .../MultipleMatchingFilesException.cs | 16 +- .../DataLoad/Modules/FTP/FTPDownloader.cs | 473 +- .../DataLoad/Modules/FTP/SFTPDownloader.cs | 151 +- .../Modules/FileOperations/FileUnzipper.cs | 210 +- .../DataLoadProgressUpdateInfo.cs | 289 +- .../DataLoadProgressUpdateStrategy.cs | 45 +- .../UpdateProgressToResultOfDelegate.cs | 40 +- ...ProgressToSpecificValueIfLoadsuccessful.cs | 19 +- .../Mutilators/BackupDatabaseMutilation.cs | 88 +- .../DataLoad/Modules/Mutilators/Coalescer.cs | 148 +- .../Modules/Mutilators/Dilution/Dilution.cs | 167 +- .../Dilution/DilutionOperationFactory.cs | 55 +- .../DilutionColumnNotSetException.cs | 22 +- .../Dilution/Operations/CrushToBitFlag.cs | 32 +- .../Dilution/Operations/DilutionOperation.cs | 53 +- .../Operations/ExcludeRight3OfUKPostcodes.cs | 47 +- .../Operations/IPluginDilutionOperation.cs | 17 +- .../Operations/RoundDateToMiddleOfQuarter.cs | 54 +- .../DataLoad/Modules/Mutilators/Distincter.cs | 37 +- .../Mutilators/ExecuteSqlMutilation.cs | 70 +- .../Mutilators/MatchingTablesMutilator.cs | 143 +- .../Mutilators/PrematureLoadEndCondition.cs | 37 +- .../Modules/Mutilators/PrematureLoadEnder.cs | 150 +- .../PrimaryKeyCollisionResolverMutilation.cs | 162 +- .../QueryBuilders/BackfillSqlHelper.cs | 217 +- ...fePrimaryKeyCollisionResolverMutilation.cs | 154 +- .../Mutilators/StagingBackfillMutilator.cs | 486 +- .../Modules/Mutilators/TableVarcharMaxer.cs | 103 +- Rdmp.Core/DataLoad/Modules/NativeFileIO.cs | 65 - .../DataLoad/Modules/Web/WebFileDownloader.cs | 262 +- .../Triggers/DiffDatabaseDataFetcher.cs | 484 +- ...ableColumnDifferencesInArchiveException.cs | 16 +- .../Triggers/Exceptions/TriggerException.cs | 18 +- .../Exceptions/TriggerMissingException.cs | 18 +- .../DataLoad/Triggers/ITriggerImplementer.cs | 67 +- .../MicrosoftSQLTriggerImplementer.cs | 602 +- .../MySqlTriggerImplementer.cs | 250 +- .../OracleTriggerImplementer.cs | 96 +- .../PostgreSqlTriggerImplementer.cs | 248 +- .../Implementations/TriggerImplementer.cs | 316 +- .../TriggerImplementerFactory.cs | 46 +- .../DataLoad/Triggers/SpecialFieldNames.cs | 32 +- Rdmp.Core/DataLoad/Triggers/TriggerChecks.cs | 216 +- Rdmp.Core/DataLoad/Triggers/TriggerStatus.cs | 33 +- .../Data/ArchivalPeriodicityCount.cs | 17 +- .../DataQualityEngine/Data/ColumnState.cs | 248 +- .../Data/DQEGraphAnnotation.cs | 117 +- .../DataQualityEngine/Data/Evaluation.cs | 299 +- .../Data/PeriodicityState.cs | 303 +- Rdmp.Core/DataQualityEngine/Data/RowState.cs | 95 +- .../DatasetTimespanCalculator.cs | 152 +- .../Reports/CatalogueConstraintReport.cs | 874 +- .../Reports/DQEStateOverDataLoadRunId.cs | 254 +- .../Reports/DataQualityReport.cs | 32 +- .../Reports/IDataQualityReport.cs | 23 +- .../Reports/LoadProgressSummaryReport.cs | 508 +- .../PeriodicityHelpers/PeriodicityCube.cs | 56 +- .../PeriodicityCubesOverTime.cs | 98 +- .../ArbitraryTableExtractionUICollection.cs | 204 +- .../DataViewing/IAutoCompleteProvider.cs | 55 +- .../IViewSQLAndResultsCollection.cs | 27 +- .../ViewAggregateExtractUICollection.cs | 187 +- .../ViewCatalogueDataCollection.cs | 187 +- .../ViewCohortExtractionUICollection.cs | 200 +- ...dentificationConfigurationSqlCollection.cs | 116 +- .../ViewColumnExtractCollection.cs | 342 +- Rdmp.Core/DataViewing/ViewLogsCollection.cs | 81 +- ...ewSelectedDatasetExtractionUICollection.cs | 139 +- .../ViewSupportingSqlCollection.cs | 76 +- .../ViewTableInfoExtractUICollection.cs | 182 +- Rdmp.Core/DataViewing/ViewType.cs | 15 +- .../Databases/DataQualityEnginePatcher.cs | 137 +- Rdmp.Core/Databases/LoggingDatabasePatcher.cs | 293 +- Rdmp.Core/Databases/PatcherManager.cs | 95 +- Rdmp.Core/Databases/PlatformDatabase.cs | 42 +- Rdmp.Core/Databases/QueryCachingPatcher.cs | 74 +- Rdmp.Core/EnumExtensions.cs | 48 +- Rdmp.Core/GlobalStrings.Designer.cs | 28 + Rdmp.Core/GlobalStrings.resx | 15 + Rdmp.Core/IPluginUserInterface.cs | 47 +- .../Icons/IconOverlays/IconOverlayProvider.cs | 87 +- .../IconProvision/CatalogueIconProvider.cs | 411 +- .../IconProvision/DataExportIconProvider.cs | 68 +- .../IconProvision/EnumImageCollection.cs | 21 +- .../Icons/IconProvision/ICoreIconProvider.cs | 23 +- .../IconProvision/IconProvisionException.cs | 42 +- Rdmp.Core/Icons/IconProvision/RDMPConcept.cs | 398 +- ...gateConfigurationStateBasedIconProvider.cs | 72 +- .../CatalogueItemStateBasedIconProvider.cs | 98 +- ...atalogueItemsNodeStateBasedIconProvider.cs | 67 +- .../CatalogueStateBasedIconProvider.cs | 78 +- .../CheckResultStateBasedIconProvider.cs | 38 +- ...ggregateContainerStateBasedIconProvider.cs | 58 +- ...tionConfigurationStateBasedIconProvider.cs | 37 +- .../ColumnInfoStateBasedIconProvider.cs | 37 +- ...nalDatabaseServerStateBasedIconProvider.cs | 120 +- .../ExtractCommandStateBasedIconProvider.cs | 45 +- ...ExtractableCohortStateBasedIconProvider.cs | 35 +- ...ExtractableColumnStateBasedIconProvider.cs | 70 +- ...xtractableDataSetStateBasedIconProvider.cs | 44 +- ...tionConfigurationStateBasedIconProvider.cs | 36 +- ...actionInformationStateBasedIconProvider.cs | 105 +- .../FilterStateBasedIconProvider.cs | 47 +- .../IObjectStateBasedIconProvider.cs | 11 +- .../LoadStageNodeStateBasedIconProvider.cs | 56 +- ...PipelineComponentStateBasedIconProvider.cs | 42 +- .../ProcessTaskStateBasedIconProvider.cs | 51 +- .../ReleaseabilityStateBasedIconProvider.cs | 81 +- .../SupportingObjectStateBasedIconProvider.cs | 74 +- ...bleInfoServerNodeStateBasedIconProvider.cs | 35 +- .../TableInfoStateBasedIconProvider.cs | 31 +- Rdmp.Core/Logging/DataLoadInfo.cs | 544 +- Rdmp.Core/Logging/DataLoadInfoHandler.cs | 17 +- Rdmp.Core/Logging/DataSource.cs | 62 +- Rdmp.Core/Logging/IDataLoadInfo.cs | 31 +- Rdmp.Core/Logging/ILogManager.cs | 19 +- Rdmp.Core/Logging/ITableLoadInfo.cs | 29 +- .../Extensions/CheckEventArgsExtensions.cs | 27 +- .../Extensions/NotifyEventArgsExtensions.cs | 33 +- .../NLogListeners/NLogICheckNotifier.cs | 52 +- .../NLogIDataLoadEventListener.cs | 38 +- .../Listeners/NLogListeners/NLogListener.cs | 43 +- .../OverrideSenderIDataLoadEventListener.cs | 45 +- .../Listeners/ToFileDataLoadEventListener.cs | 66 +- .../ToLoggingDatabaseDataLoadEventListener.cs | 192 +- Rdmp.Core/Logging/LogManager.cs | 520 +- Rdmp.Core/Logging/LogViewerFilter.cs | 156 +- Rdmp.Core/Logging/LoggingDatabaseChecker.cs | 392 +- Rdmp.Core/Logging/LoggingTables.cs | 27 +- .../PastEvents/ArchivalDataLoadInfo.cs | 265 +- .../Logging/PastEvents/ArchivalDataSource.cs | 103 +- .../Logging/PastEvents/ArchivalFatalError.cs | 97 +- .../Logging/PastEvents/ArchivalProgressLog.cs | 85 +- .../PastEvents/ArchivalTableLoadInfo.cs | 177 +- .../IArchivalLoggingRecordOfPastEvent.cs | 19 +- Rdmp.Core/Logging/TableLoadInfo.cs | 416 +- .../Attributes/AdjustableLocationAttribute.cs | 19 +- .../Attributes/AttributePropertyFinder.cs | 57 + .../Attributes/IAttributePropertyFinder.cs | 19 +- .../Attributes/RelationshipAttribute.cs | 66 + .../Attributes/RelationshipType.cs | 35 + .../Attributes/SqlAttribute.cs | 21 +- .../Attributes/UniqueAttribute.cs | 19 + .../Attributes/UsefulPropertyAttribute.cs | 19 +- .../DoNotImportDescriptionsAttribute.cs} | 23 +- .../EmptyDisposeable.cs | 9 +- .../ICanBeSummarised.cs | 24 + .../IDeletableWithCustomMessage.cs | 27 + .../IDeleteable.cs | 19 + .../IDisableable.cs | 18 +- .../IMapsDirectlyToDatabaseTable.cs | 45 + .../IMapsDirectlyToDatabaseTableEventArgs.cs | 21 +- .../IMightBeDeprecated.cs | 21 + .../MapsDirectlyToDatabaseTable/INamed.cs | 27 +- .../IObscureDependencyFinder.cs | 40 + .../IRepository.cs | 223 + .../MapsDirectlyToDatabaseTable/ISaveable.cs | 20 + .../ITableRepository.cs | 38 + .../Injection/IInjectKnown.cs | 57 + .../Injection/Injection.md | 0 .../MapsDirectlyToDatabaseTable.cd | 0 .../MemoryRepository.cs | 372 + .../NewObjectPool.cs | 79 + .../NoMappingToDatabase.cs | 10 +- .../ObjectDeletedException.cs | 25 + .../Revertable/ChangeDescription.cs | 13 +- .../Revertable/IRevertable.cs | 32 + .../PropertyChangedExtendedEventArgs.cs | 27 +- .../Revertable/RevertableObjectReport.cs | 30 + .../RevertablePropertyDifference.cs | 28 + .../SaveEventArgs.cs | 34 +- .../TableRepository.cs | 880 + .../UpdateCommandStore.cs | 62 + .../Versioning/DatabaseVersionProvider.cs | 35 + .../Versioning/IPatcher.cs | 67 + .../Versioning/InvalidPatchException.cs | 23 + .../MasterDatabaseScriptExecutor.cs | 428 + .../Versioning/Patch.cs | 172 + .../Versioning/Patcher.cs | 127 + .../Versioning/PluginPatcher.cs | 17 +- Rdmp.Core/MissingColumnInfoException.cs | 25 +- Rdmp.Core/OrderableComparer.cs | 109 +- Rdmp.Core/PluginUserInterface.cs | 79 +- Rdmp.Core/Providers/CatalogueChildProvider.cs | 2854 +- .../Providers/CatalogueProblemProvider.cs | 488 +- .../Providers/DataExportChildProvider.cs | 1330 +- .../Providers/DataExportProblemProvider.cs | 152 +- .../Providers/DecryptionPrivateKeyNode.cs | 21 +- Rdmp.Core/Providers/DescendancyList.cs | 291 +- .../ExamplePluginCohortCompilerUI.cs | 63 +- Rdmp.Core/Providers/FavouritesProvider.cs | 85 +- Rdmp.Core/Providers/IChildProvider.cs | 23 +- Rdmp.Core/Providers/ICoreChildProvider.cs | 282 +- Rdmp.Core/Providers/IProblemProvider.cs | 50 +- Rdmp.Core/Providers/Nodes/AggregatesNode.cs | 61 +- Rdmp.Core/Providers/Nodes/AllANOTablesNode.cs | 18 +- Rdmp.Core/Providers/Nodes/AllCohortsNode.cs | 20 +- .../Nodes/AllConnectionStringKeywordsNode.cs | 19 +- .../Providers/Nodes/AllDashboardsNode.cs | 18 +- .../Nodes/AllDataAccessCredentialsNode.cs | 20 +- .../Providers/Nodes/AllExpiredPluginsNode.cs | 11 +- .../Providers/Nodes/AllExternalServersNode.cs | 20 +- .../Providers/Nodes/AllGovernanceNode.cs | 25 +- .../Nodes/AllPermissionWindowsNode.cs | 23 +- Rdmp.Core/Providers/Nodes/AllPluginsNode.cs | 20 +- .../Providers/Nodes/AllRDMPRemotesNode.cs | 17 +- Rdmp.Core/Providers/Nodes/AllServersNode.cs | 16 +- .../Providers/Nodes/AllStandardRegexesNode.cs | 19 +- .../Providers/Nodes/ArbitraryFolderNode.cs | 31 +- .../Providers/Nodes/CatalogueItemsNode.cs | 84 +- .../Nodes/CatalogueLookupUsageNode.cs | 37 +- .../Providers/Nodes/CatalogueLookupsNode.cs | 61 +- Rdmp.Core/Providers/Nodes/Category.cs | 7 +- .../AllOrphanAggregateConfigurationsNode.cs | 21 +- .../AllTemplateAggregateConfigurationsNode.cs | 21 +- ...ueryCacheUsedByCohortIdentificationNode.cs | 55 +- .../Nodes/DataAccessCredentialUsageNode.cs | 91 +- .../Providers/Nodes/DocumentationNode.cs | 67 +- .../Nodes/ExtractionArbitraryFolderNode.cs | 22 +- .../Nodes/ExtractionConfigurationsNode.cs | 67 +- .../Nodes/ExtractionDirectoryNode.cs | 74 +- .../FrozenExtractionConfigurationsNode.cs | 69 +- .../Nodes/IdentifierDumpServerUsageNode.cs | 78 +- .../Providers/Nodes/JoinableCollectionNode.cs | 149 +- Rdmp.Core/Providers/Nodes/LinkedCohortNode.cs | 91 +- .../Providers/Nodes/LinkedColumnInfoNode.cs | 79 +- .../AllCataloguesUsedByLoadMetadataNode.cs | 73 +- .../AllProcessTasksUsedByLoadMetadataNode.cs | 62 +- .../CatalogueUsedByLoadMetadataNode.cs | 40 +- .../LoadMetadataNodes/IDirectoryInfoNode.cs | 19 +- .../LoadMetadataNodes/LoadDirectoryNode.cs | 82 +- .../LoadMetadataScheduleNode.cs | 62 +- .../Nodes/LoadMetadataNodes/LoadStageNode.cs | 72 +- .../OverrideRawServerNode.cs | 44 +- ...PermissionWindowUsedByCacheProgressNode.cs | 106 +- Rdmp.Core/Providers/Nodes/Node.cs | 15 +- .../Providers/Nodes/PackageContentNode.cs | 98 +- .../Nodes/PipelineNodes/AllPipelinesNode.cs | 20 +- .../Nodes/PipelineNodes/OtherPipelinesNode.cs | 26 +- .../PipelineCompatibleWithUseCaseNode.cs | 99 +- .../StandardPipelineUseCaseNode.cs | 52 +- .../Nodes/PreLoadDiscardedColumnsNode.cs | 64 +- .../Providers/Nodes/ProjectCataloguesNode.cs | 69 +- ...tificationConfigurationAssociationsNode.cs | 75 +- .../ProjectCohortNodes/ProjectCohortsNode.cs | 76 +- .../ProjectSavedCohortsNode.cs | 70 +- .../SharingNodes/AllObjectExportsNode.cs | 17 +- .../SharingNodes/AllObjectImportsNode.cs | 18 +- .../SharingNodes/AllObjectSharingNode.cs | 17 +- Rdmp.Core/Providers/Nodes/SingletonNode.cs | 54 +- .../Providers/Nodes/TableInfoDatabaseNode.cs | 70 +- .../Providers/Nodes/TableInfoServerNode.cs | 76 +- .../IObjectUsedByOtherObjectNode.cs | 24 +- .../ObjectUsedByOtherObjectNode.cs | 151 +- .../CohortSourceUsedByProjectNode.cs | 23 +- Rdmp.Core/Providers/ProblemProvider.cs | 51 +- .../Providers/{Readme.md => Providers.md} | 0 .../Providers/RdmpEnumerableExtensions.cs | 46 +- Rdmp.Core/Providers/SearchablesMatchScorer.cs | 641 +- Rdmp.Core/QueryBuilding/AggregateBuilder.cs | 1075 +- .../QueryBuilding/AggregateCountColumn.cs | 180 +- Rdmp.Core/QueryBuilding/CacheUsage.cs | 41 +- Rdmp.Core/QueryBuilding/CohortQueryBuilder.cs | 340 +- .../CohortQueryBuilderDependency.cs | 482 +- .../CohortQueryBuilderDependencySql.cs | 82 +- .../QueryBuilding/CohortQueryBuilderHelper.cs | 255 +- .../QueryBuilding/CohortQueryBuilderResult.cs | 788 +- .../CohortSummaryQueryBuilder.cs | 457 +- .../QueryBuilding/ColumnInfoToIColumn.cs | 111 +- Rdmp.Core/QueryBuilding/ConstantParameter.cs | 204 +- .../DataAccessPointCollection.cs | 304 +- .../QueryBuilding/ExtractionQueryBuilder.cs | 332 +- Rdmp.Core/QueryBuilding/IColumn.cs | 98 +- Rdmp.Core/QueryBuilding/ISqlQueryBuilder.cs | 245 +- .../Options/AggregateBuilderBasicOptions.cs | 155 +- .../Options/AggregateBuilderCohortOptions.cs | 220 +- .../Options/AggregateBuilderOptionsFactory.cs | 34 +- .../Options/IAggregateBuilderOptions.cs | 206 +- .../Parameters/ParameterFoundAtLevel.cs | 64 +- .../Parameters/ParameterLevels.cs | 47 +- .../Parameters/ParameterManager.cs | 865 +- .../ParameterManagerLifecycleState.cs | 44 +- .../PrimaryKeyCollisionResolver.cs | 569 +- Rdmp.Core/QueryBuilding/QueryBuilder.cs | 557 +- Rdmp.Core/QueryBuilding/QueryBuilderArgs.cs | 73 +- .../QueryBuilding/QueryBuilderCustomArgs.cs | 58 +- .../QueryBuilding/QueryBuildingException.cs | 85 +- Rdmp.Core/QueryBuilding/QueryTimeColumn.cs | 483 +- .../ReleaseIdentifierSubstitution.cs | 196 +- .../QueryBuilding/SqlQueryBuilderHelper.cs | 1195 +- .../SyntaxChecking/ColumnSyntaxChecker.cs | 78 +- .../SyntaxChecking/FilterSyntaxChecker.cs | 55 +- .../SyntaxChecking/ParameterSyntaxChecker.cs | 43 +- .../SyntaxChecking/SyntaxChecker.cs | 148 +- .../Aggregation/AggregateOperation.cs | 51 +- .../Arguments/CacheCommitArguments.cs | 45 +- .../CacheCommitExtractableAggregate.cs | 63 +- .../Arguments/CacheCommitIdentifierList.cs | 95 +- .../CacheCommitJoinableInceptionQuery.cs | 39 +- ...hedAggregateConfigurationResultsManager.cs | 342 +- Rdmp.Core/RDMPCollection.cs | 30 +- Rdmp.Core/Rdmp.Core.csproj | 270 +- Rdmp.Core/Reports/BitmapWithDescription.cs | 29 +- Rdmp.Core/Reports/CustomMetadataReport.cs | 900 +- .../Reports/CustomMetadataReportException.cs | 42 +- Rdmp.Core/Reports/DitaCatalogueExtractor.cs | 528 +- Rdmp.Core/Reports/DocXHelper.cs | 479 +- .../DocumentationReportDatabaseEntities.cs | 153 +- .../DublinCore/DublinCoreDefinition.cs | 263 +- .../DublinCore/DublinCoreTranslater.cs | 105 +- .../ExtractionTime/WordCatalogueExtractor.cs | 272 +- .../WordDataReleaseFileGenerator.cs | 314 +- .../Reports/ExtractionTime/WordDataWriter.cs | 722 +- Rdmp.Core/Reports/GovernanceReport.cs | 248 +- .../Reports/IDetermineDatasetTimespan.cs | 44 +- Rdmp.Core/Reports/MetadataReport.cs | 734 +- Rdmp.Core/Reports/MetadataReportArgs.cs | 100 +- .../Repositories/AmbiguousTypeException.cs | 24 +- Rdmp.Core/Repositories/CatalogueRepository.cs | 621 +- .../Construction/ObjectConstructor.cs | 579 +- ...jectLacksCompatibleConstructorException.cs | 20 +- .../UseWithCommandLineAttribute.cs | 29 +- .../UseWithObjectConstructorAttribute.cs | 20 +- Rdmp.Core/Repositories/DQERepository.cs | 128 +- .../Repositories/DataExportRepository.cs | 345 +- .../Repositories/ICatalogueRepository.cs | 286 +- .../ICatalogueRepositoryServiceLocator.cs | 15 +- Rdmp.Core/Repositories/IDQERepository.cs | 59 +- .../Repositories/IDataExportRepository.cs | 57 +- .../IDataExportRepositoryServiceLocator.cs | 15 +- .../IRDMPPlatformRepositoryServiceLocator.cs | 92 +- Rdmp.Core/Repositories/IRowVerCache.cs | 16 +- Rdmp.Core/Repositories/MEF.cs | 459 +- .../Managers/AggregateFilterManager.cs | 86 +- .../Managers/CohortContainerManager.cs | 205 +- .../Managers/CommentStoreWithKeywords.cs | 54 +- .../Managers/DataExportFilterManager.cs | 101 +- .../Managers/DataExportProperty.cs | 25 +- .../Managers/DataExportPropertyManager.cs | 245 +- .../Managers/GovernanceManager.cs | 94 +- ...CohortContainerManagerFromChildProvider.cs | 133 +- ...ataExportFilterManagerFromChildProvider.cs | 92 +- .../FilterManagerFromChildProvider.cs | 86 +- .../Managers/ICohortContainerManager.cs | 116 +- .../Managers/IDataExportPropertyManager.cs | 33 +- .../Managers/IEncryptionManager.cs | 19 +- .../IExtractableDataSetPackageManager.cs | 53 +- .../Repositories/Managers/IFilterManager.cs | 27 +- .../Managers/IGovernanceManager.cs | 29 +- .../Repositories/Managers/IJoinManager.cs | 21 +- .../Repositories/Managers/IPluginManager.cs | 17 +- .../Managers/ITableInfoCredentialsManager.cs | 134 +- .../Repositories/Managers/JoinInfoType.cs | 37 +- .../Repositories/Managers/JoinManager.cs | 95 +- .../Managers/PasswordEncryptionKeyLocation.cs | 259 +- .../Repositories/Managers/PluginManager.cs | 64 +- .../Managers/TableInfoCredentialsManager.cs | 457 +- .../Repositories/MemoryCatalogueRepository.cs | 867 +- .../MemoryDataExportRepository.cs | 192 +- Rdmp.Core/Repositories/RepositoryProvider.cs | 206 +- Rdmp.Core/Repositories/RowVerCache.cs | 223 +- .../Repositories/VersionYamlTypeConverter.cs | 35 +- Rdmp.Core/Repositories/YamlRepository.cs | 221 +- .../Annotations/Annotations.cs | 152 + .../ReusableLibraryCode/AssemblyResolver.cs | 74 + .../Checks/AcceptAllCheckNotifier.cs | 39 + .../Checks/CheckEventArgs.cs | 112 + .../ReusableLibraryCode/Checks/ErrorCode.cs | 41 + .../ReusableLibraryCode/Checks/ErrorCodes.cs | 90 + .../Checks/ICheckNotifier.cs | 16 + .../ReusableLibraryCode/Checks/ICheckable.cs | 40 +- .../Checks/IgnoreAllErrorsCheckNotifier.cs | 33 +- .../Checks/ReplayCheckable.cs | 32 + .../Checks/ThrowImmediatelyCheckNotifier.cs | 58 + .../Checks/ToMemoryCheckNotifier.cs | 64 + .../Comments/CommentStore.cs | 289 + .../Comments/XmlDocumentExtensions.cs | 38 + .../DataAccess/DataAccessContext.cs | 26 +- .../DataAccess/DataAccessPortal.cs | 124 + .../DataAccess/IDataAccessCredentials.cs | 14 +- .../DataAccess/IDataAccessPoint.cs | 55 + .../DataAccess/IEncryptedPasswordHost.cs | 27 + .../DatabaseCommandHelper.cs | 111 + Rdmp.Core/ReusableLibraryCode/Diff.cs | 570 + .../DirectoryInfoComparer.cs | 41 + .../DoNotExtractProperty.cs | 15 +- .../ReusableLibraryCode/ExceptionHelper.cs | 66 + .../ExpectedIdenticalStringsException.cs | 86 + .../Extensions/DataTableExtensions.cs | 44 + .../Extensions/VersionExtensions.cs | 49 + .../ReusableLibraryCode/Helpers/Settings.cs | 79 +- .../ICustomSearchString.cs | 20 + .../ReusableLibraryCode/IHasDependencies.cs | 28 + Rdmp.Core/ReusableLibraryCode/IHasSummary.cs | 24 + Rdmp.Core/ReusableLibraryCode/IKnowWhatIAm.cs | 20 + .../Icons/DatabaseProviderIcons.Designer.cs | 4 +- .../Icons/DatabaseProviderIcons.resx | 0 .../Icons/DatabaseProviderIcons/Microsoft.png | Bin .../MicrosoftOverlay.png | Bin .../Icons/DatabaseProviderIcons/MySql.png | Bin .../DatabaseProviderIcons/MySqlOverlay.png | Bin .../Icons/DatabaseProviderIcons/Oracle.png | Bin .../DatabaseProviderIcons/OracleOverlay.png | Bin .../DatabaseProviderIcons/PostgreSql.png | Bin .../PostgreSqlOverlay.png | Bin .../Icons/DatabaseProviderIcons/Unknown.png | Bin .../DatabaseProviderIcons/UnknownOverlay.png | Bin .../IconProvision/DatabaseTypeIconProvider.cs | 74 + .../Icons/IconProvision/IIconProvider.cs | 17 +- .../Icons/IconProvision/OverlayKind.cs | 39 + .../ReusableLibraryCode/ObjectExtensions.cs | 31 +- .../ComprehensiveQueryPerformanceCounter.cs | 34 + .../Performance/QueryPerformed.cs | 31 +- .../ReusableLibraryCode/Progress/EventType.cs | 17 +- .../Progress/ForkDataLoadEventListener.cs | 34 + ...romCheckNotifierToDataLoadEventListener.cs | 44 + ...romDataLoadEventListenerToCheckNotifier.cs | 32 + .../Progress/IDataLoadEventListener.cs | 19 + .../Progress/NotifyEventArgs.cs | 70 + .../Progress/ProgressEventArgs.cs | 38 + .../Progress/ProgressMeasurement.cs | 32 + .../Progress/ProgressType.cs | 11 +- .../ThrowImmediatelyDataLoadEventListener.cs | 53 + .../Progress/ToMemoryDataLoadEventListener.cs | 86 + .../ReusableLibraryCode/Rfc4180Writer.cs | 69 + .../IgnorableSerializerContractResolver.cs | 72 + .../Settings/RDMPApplicationSettings.cs | 423 + .../Settings/UserSettings.cs | 590 + Rdmp.Core/ReusableLibraryCode/UsefulStuff.cs | 582 + .../VisualStudioProjectReference.cs | 24 + .../VisualStudioSolutionFile.cs | 111 + .../VisualStudioSolutionFolder.cs | 27 + .../Dependency/Gathering/GatheredObject.cs | 261 +- .../Sharing/Dependency/Gathering/Gatherer.cs | 231 +- .../ObjectSharingObscureDependencyFinder.cs | 54 +- .../Dependency/SharedPluginImporter.cs | 32 +- .../Exceptions/RefactoringException.cs | 26 +- .../Refactoring/SelectSQLRefactorer.cs | 437 +- .../Transmission/RemotePushingService.cs | 157 +- ...uptRepositoryConnectionDetailsException.cs | 39 +- Rdmp.Core/Startup/EnvironmentInfo.cs | 101 +- .../Events/MEFFileDownloadEventStatus.cs | 36 +- .../MEFFileDownloadProgressEventArgs.cs | 54 +- .../Events/PlatformDatabaseFoundEventArgs.cs | 61 +- .../Events/PluginPatcherFoundEventArgs.cs | 46 +- .../Startup/Events/PluginPatcherStatus.cs | 27 +- .../Events/RDMPPlatformDatabaseStatus.cs | 51 +- Rdmp.Core/Startup/Events/StartupDelegates.cs | 41 +- Rdmp.Core/Startup/IPluginRepositoryFinder.cs | 43 +- Rdmp.Core/Startup/LinkedRepositoryProvider.cs | 179 +- Rdmp.Core/Startup/PluginRepository.cs | 44 +- Rdmp.Core/Startup/PluginRepositoryFinder.cs | 31 +- .../Startup/SourceCodeNotFoundException.cs | 18 +- Rdmp.Core/Startup/Startup.cs | 510 +- .../Startup/UserSettingsRepositoryFinder.cs | 179 +- Rdmp.Core/Ticketing/ITicketingSystem.cs | 92 +- Rdmp.Core/Ticketing/PluginTicketingSystem.cs | 37 +- Rdmp.Core/Ticketing/SimpleTicketingSystem.cs | 95 +- .../TicketingSystemConstructorParameters.cs | 33 +- Rdmp.Core/Ticketing/TicketingSystemFactory.cs | 88 +- .../Validation/Constraints/Consequence.cs | 35 +- .../Validation/Constraints/IConstraint.cs | 45 +- .../Validation/Constraints/Primary/Alpha.cs | 54 +- .../Constraints/Primary/AlphaNumeric.cs | 52 +- .../Validation/Constraints/Primary/Chi.cs | 248 +- .../Validation/Constraints/Primary/Date.cs | 102 +- .../Constraints/Primary/IPrimaryConstraint.cs | 25 +- .../Primary/PluginPrimaryConstraint.cs | 15 +- .../Constraints/Primary/PrimaryConstraint.cs | 31 +- .../Validation/Constraints/Secondary/Bound.cs | 134 +- .../Constraints/Secondary/BoundDate.cs | 300 +- .../Constraints/Secondary/BoundDouble.cs | 270 +- .../Secondary/ISecondaryConstraint.cs | 41 +- .../Constraints/Secondary/NotNull.cs | 47 +- .../Secondary/PluginSecondaryConstraint.cs | 16 +- .../Secondary/Predictor/ChiSexPredictor.cs | 79 +- .../Predictor/PluginPredictionRule.cs | 13 +- .../Secondary/Predictor/Prediction.cs | 110 +- .../Secondary/Predictor/PredictionRule.cs | 29 +- .../ValuePredictsOtherValueNullity.cs | 24 + .../ValuePredictsOtherValueNullness.cs | 22 - .../ReferentialIntegrityConstraint.cs | 382 +- .../Secondary/RegularExpression.cs | 98 +- .../Secondary/SecondaryConstraint.cs | 26 +- .../Secondary/StandardRegexConstraint.cs | 146 +- .../ValidationXmlDependencyException.cs | 16 +- Rdmp.Core/Validation/Dependency/Suspect.cs | 35 +- .../ValidationXMLObscureDependencyFinder.cs | 283 +- Rdmp.Core/Validation/ItemValidator.cs | 278 +- .../Validation/UIAttributes/Attributes.cs | 52 +- Rdmp.Core/Validation/ValidationFailure.cs | 72 +- Rdmp.Core/Validation/Validator.cs | 755 +- .../Validation/VerboseValidationResults.cs | 186 +- Rdmp.UI.Tests/ANOTableUITests.cs | 78 +- Rdmp.UI.Tests/AggregateEditorUITests.cs | 242 +- Rdmp.UI.Tests/ArbitraryFolderNodeTests.cs | 59 +- Rdmp.UI.Tests/CatalogueIconProviderTests.cs | 151 +- Rdmp.UI.Tests/CatalogueItemUITests.cs | 114 +- Rdmp.UI.Tests/CatalogueUITests.cs | 191 +- Rdmp.UI.Tests/ChildProviderTests.cs | 435 +- ...ortIdentificationConfigurationUnitTests.cs | 246 +- .../ExternalCohortTableUITests.cs | 31 +- .../ExecuteCommandAlterColumnTypeTests.cs | 123 +- .../ExecuteCommandClearFavouritesTests.cs | 94 +- .../ExecuteCommandDeleteTests.cs | 134 +- ...ecuteCommandImportDublinCoreFormatTests.cs | 64 +- .../Integration/ExtractionFilterUITests.cs | 49 +- .../ColumnInfoToANOTableConverterUITests.cs | 27 +- .../DataRelease/DataReleaseUITests.cs | 20 +- .../AllObjectsHaveImages.cs | 82 +- .../AllUIsDocumentedTest.cs | 106 +- .../AllImportantClassesDocumented.cs | 233 +- .../DocumentationCrossExaminationTest.cs | 943 +- .../ExplicitDatabaseNameChecker.cs | 89 +- .../RDMPFormInitializationTests.cs | 209 +- ...mptyChecksMethodsOrNotICheckablePlugins.cs | 93 +- .../SuspiciousRelationshipPropertyUse.cs | 453 +- .../DesignPatternTests/CsProjFileTidy.cs | 196 +- .../DependenciesEvaluation.cs | 255 +- ...aluateNamespacesAndSolutionFoldersTests.cs | 546 +- .../InterfaceDeclarationsCorrect.cs | 59 +- .../MarkdownCodeBlockTests.cs | 395 +- .../DesignPatternTests/RunUITests.cs | 220 +- .../UserInterfaceStandardisationChecker.cs | 364 +- .../ExternalDatabaseServerUITests.cs | 35 +- .../ExtractionUIs/ConfigureDatasetUITests.cs | 69 +- .../JoinsAndLookups/LookupBrowserUITests.cs | 22 +- Rdmp.UI.Tests/ForEachUITests.cs | 27 +- .../ForwardEngineerANOCatalogueUITests.cs | 49 +- Rdmp.UI.Tests/HelpIconTests.cs | 39 +- Rdmp.UI.Tests/HistoryProviderTests.cs | 205 +- Rdmp.UI.Tests/LoadDiagramUITests.cs | 55 +- Rdmp.UI.Tests/LogViewer/LoggingTabUITests.cs | 20 +- Rdmp.UI.Tests/PluginProcessTaskUITests.cs | 144 +- Rdmp.UI.Tests/Rdmp.UI.Tests.csproj | 71 +- Rdmp.UI.Tests/RecentHistoryOfControlsTests.cs | 29 +- Rdmp.UI.Tests/TestActivateItems.cs | 507 +- Rdmp.UI.Tests/TypeTextOrCancelDialogTests.cs | 72 +- Rdmp.UI.Tests/UITests.cs | 788 +- Rdmp.UI.Tests/UITimeoutAttribute.cs | 179 +- Rdmp.UI.Tests/WideMessageBoxTests.cs | 99 +- Rdmp.UI/ANOEngineeringUIs/ANOTableUI.cs | 216 +- .../ForwardEngineerANOCatalogueUI.cs | 874 +- Rdmp.UI/AbstractControlDescriptionProvider.cs | 41 +- .../Advanced/AggregateEditorUI.cs | 840 +- .../Advanced/AggregateTopXUI.cs | 245 +- .../AggregationUIs/Advanced/SelectColumnUI.cs | 481 +- .../AggregateContinuousDateAxisUI.cs | 211 +- Rdmp.UI/AggregationUIs/AggregateGraphUI.cs | 1338 +- Rdmp.UI/ApplicationRestarter.cs | 72 +- Rdmp.UI/AssemblyInfo.cs | 2 +- .../AutoComplete/AutoCompleteProviderWin.cs | 115 +- .../CatalogueSummary/CatalogueDQEResultsUI.cs | 148 +- .../DataQualityReporting/ColumnStatesChart.cs | 110 +- .../DQEGraphAnnotationUI.cs | 104 +- .../IDataQualityReportingChart.cs | 29 +- .../SubComponents/ConsequenceBar.cs | 239 +- .../SubComponents/ConsequenceKey.cs | 43 +- .../SubComponents/DQEPivotCategorySelector.cs | 77 +- .../SubComponents/EvaluationTrackBar.cs | 156 +- .../SubComponents/EvaluationTrackBar.resx | 2 +- .../TimePeriodicityChart.cs | 373 +- .../LoadEvents/DiffDataTables.cs | 125 +- .../LoadEvents/LoadEventsTreeView.cs | 635 +- .../LoadEventsTreeViewObjectCollection.cs | 40 +- .../LoadEvents/ResolveFatalErrors.cs | 87 +- .../ViewInsertsAndUpdatesDialog.Designer.cs | 2 +- .../LoadEvents/ViewInsertsAndUpdatesDialog.cs | 271 +- Rdmp.UI/ChartLookAndFeelSetter.cs | 214 +- .../ChecksUI/AllChecksCompleteHandlerArgs.cs | 23 +- Rdmp.UI/ChecksUI/ChecksUI.cs | 391 +- Rdmp.UI/ChecksUI/IRAGSmiley.cs | 31 +- Rdmp.UI/ChecksUI/MakeChangePopup.cs | 87 +- Rdmp.UI/ChecksUI/PopupChecksUI.cs | 127 +- Rdmp.UI/ChecksUI/RAGSmiley.cs | 362 +- Rdmp.UI/ChecksUI/RAGSmileyToolStrip.cs | 298 +- .../CreateNewCohortDatabaseWizardUI.cs | 161 +- .../ExternalCohortTableUI.cs | 136 +- .../CohortUI/ExtractableCohortCollectionUI.cs | 353 +- Rdmp.UI/CohortUI/ExtractableCohortUI.cs | 372 +- .../CohortCreationRequestUI.cs | 506 +- .../Collections/AlwaysShowListOnlyFilter.cs | 30 +- .../CatalogueCollectionFilterUI.cs | 112 +- Rdmp.UI/Collections/CatalogueCollectionUI.cs | 448 +- .../Collections/CherryPickingTailFilter.cs | 83 +- .../CohortIdentificationCollectionUI.cs | 78 +- Rdmp.UI/Collections/DataExportCollectionUI.cs | 323 +- Rdmp.UI/Collections/FavouritesCollectionUI.cs | 198 +- .../Collections/LoadMetadataCollectionUI.cs | 201 +- Rdmp.UI/Collections/MenuBuiltEventArgs.cs | 33 +- .../Providers/CheckColumnProvider.cs | 180 +- .../Providers/Copying/CopyPasteProvider.cs | 42 +- .../Providers/Copying/DragDropProvider.cs | 326 +- .../Providers/FavouriteColumnProvider.cs | 139 +- .../Filtering/CatalogueCollectionFilter.cs | 56 +- Rdmp.UI/Collections/Providers/HistoryEntry.cs | 97 +- .../Collections/Providers/HistoryProvider.cs | 193 +- .../Collections/Providers/IDColumnProvider.cs | 61 +- .../Collections/Providers/RenameProvider.cs | 148 +- .../RDMPCollectionCommonFunctionality.cs | 1392 +- ...MPCollectionCommonFunctionalitySettings.cs | 94 +- ...llectionCommonFunctionalityTreeHijacker.cs | 58 +- Rdmp.UI/Collections/RDMPCollectionUI.cs | 42 +- .../Collections/SavedCohortsCollectionUI.cs | 104 +- Rdmp.UI/Collections/SessionCollection.cs | 49 +- Rdmp.UI/Collections/SessionCollectionUI.cs | 344 +- Rdmp.UI/Collections/TableInfoCollectionUI.cs | 226 +- .../TextMatchFilterWithAlwaysShowList.cs | 70 +- .../AtomicCommands/BasicUICommandExecution.cs | 66 +- .../ExecuteCommandAddJoinInfo.cs | 78 +- ...uteCommandAddNewLookupTableRelationship.cs | 100 +- .../ExecuteCommandAddToSession.cs | 70 +- .../ExecuteCommandAnonymiseColumnInfo.cs | 56 +- .../ExecuteCommandBrowseLookup.cs | 65 +- ...ExecuteCommandBulkProcessCatalogueItems.cs | 31 +- .../ExecuteCommandCheckAsync.cs | 75 +- ...ExecuteCommandChooseHICProjectDirectory.cs | 49 +- .../ExecuteCommandChoosePlatformDatabase.cs | 77 +- .../ExecuteCommandClearFavourites.cs | 44 +- .../ExecuteCommandCollapseChildNodes.cs | 105 +- ...ommandConfigureCatalogueValidationRules.cs | 76 +- .../ExecuteCommandConfigureDefaultServers.cs | 28 +- .../ExecuteCommandCreateANOVersion.cs | 69 +- ...mandCreateNewCatalogueByImportingFileUI.cs | 40 +- ...mmandCreateNewCohortDatabaseUsingWizard.cs | 48 +- .../ExecuteCommandCreateNewDashboard.cs | 40 +- ...teCommandCreateNewDataExtractionProject.cs | 81 +- .../ExecuteCommandCreateNewPipeline.cs | 45 +- ...eCommandCreateNewPreLoadDiscardedColumn.cs | 150 +- ...ecuteCommandCreateNewQueryCacheDatabase.cs | 66 +- .../ExecuteCommandEditCacheProgress.cs | 42 +- ...ecuteCommandEditLoadMetadataDescription.cs | 39 +- .../ExecuteCommandEditPipelineWithUseCase.cs | 37 +- .../ExecuteCommandExecuteCacheProgress.cs | 73 +- ...eCommandExecuteExtractionAggregateGraph.cs | 48 +- ...teCommandExecuteExtractionConfiguration.cs | 119 +- .../ExecuteCommandExpandAllNodes.cs | 89 +- .../ExecuteCommandGenerateMetadataReport.cs | 28 +- .../ExecuteCommandGenerateTestDataUI.cs | 21 +- .../ExecuteCommandOpenExtractionDirectory.cs | 137 +- .../ExecuteCommandOpenInExplorer.cs | 65 +- .../ExecuteCommandPublishFilter.cs | 102 +- .../ExecuteCommandReOrderAggregate.cs | 106 +- ...ExecuteCommandReOrderAggregateContainer.cs | 119 +- .../ExecuteCommandReOrderColumns.cs | 34 +- .../ExecuteCommandReOrderProcessTask.cs | 105 +- ...ndRefreshExtractionConfigurationsCohort.cs | 112 +- .../ExecuteCommandRefreshObject.cs | 53 +- .../AtomicCommands/ExecuteCommandRelease.cs | 139 +- .../ExecuteCommandRunDQEOnCatalogue.cs | 74 +- .../ExecuteCommandShowCacheFetchFailures.cs | 78 +- .../ExecuteCommandShowKeywordHelp.cs | 110 +- .../ExecuteCommandShowSummaryOfCohorts.cs | 93 +- .../ExecuteCommandShowTooltip.cs | 72 +- .../ExecuteCommandShowXmlDoc.cs | 56 +- .../ExecuteCommandStartSession.cs | 89 +- ...cuteCommandViewCatalogueExtractionSqlUI.cs | 78 +- .../ExecuteCommandViewCohortAggregateGraph.cs | 57 +- .../ExecuteCommandViewCommits.cs | 54 +- ...xecuteCommandViewDQEResultsForCatalogue.cs | 98 +- .../ExecuteCommandViewFilterMatchGraph.cs | 104 +- .../ExecuteCommandViewLoadDiagram.cs | 37 +- .../ExecuteCommandViewThenVsNowSql.cs | 71 +- .../UIFactory/AtomicCommandLinkLabel.cs | 71 +- .../UIFactory/AtomicCommandUIFactory.cs | 88 +- Rdmp.UI/CommandExecution/CachedDropTarget.cs | 51 +- .../CommandExecution/ICombineableFactory.cs | 86 +- .../ICommandExecutionFactory.cs | 50 +- .../Proposals/ICommandExecutionProposal.cs | 20 +- .../ProposeExecutionWhenTargetIsANOTable.cs | 35 +- ...utionWhenTargetIsAggregateConfiguration.cs | 91 +- ...etIsAllCataloguesUsedByLoadMetadataNode.cs | 52 +- ...poseExecutionWhenTargetIsAllPluginsNode.cs | 41 +- ...oposeExecutionWhenTargetIsCacheProgress.cs | 32 +- .../ProposeExecutionWhenTargetIsCatalogue.cs | 45 +- ...oposeExecutionWhenTargetIsCatalogueItem.cs | 41 +- ...ExecutionWhenTargetIsCatalogueItemsNode.cs | 61 +- ...ionWhenTargetIsCohortAggregateContainer.cs | 138 +- ...rgetIsCohortIdentificationConfiguration.cs | 38 +- ...poseExecutionWhenTargetIsConcreteFilter.cs | 35 +- ...tionWhenTargetIsConnectionStringKeyword.cs | 36 +- ...oseExecutionWhenTargetIsDashboardLayout.cs | 30 +- ...enTargetIsDataAccessCredentialUsageNode.cs | 34 +- ...cutionWhenTargetIsDataAccessCredentials.cs | 32 +- ...xecutionWhenTargetIsExternalCohortTable.cs | 32 +- ...utionWhenTargetIsExternalDatabaseServer.cs | 32 +- ...eExecutionWhenTargetIsExtractableCohort.cs | 33 +- ...enTargetIsExtractionArbitraryFolderNode.cs | 49 +- ...tionWhenTargetIsExtractionConfiguration.cs | 72 +- ...henTargetIsExtractionFilterParameterSet.cs | 34 +- ...cutionWhenTargetIsExtractionInformation.cs | 32 +- ...ExecutionWhenTargetIsExtractionProgress.cs | 35 +- ...oseExecutionWhenTargetIsFilterContainer.cs | 89 +- ...ExecutionWhenTargetIsGovernanceDocument.cs | 30 +- ...seExecutionWhenTargetIsGovernancePeriod.cs | 55 +- ...ExecutionWhenTargetIsIDirectoryInfoNode.cs | 35 +- ...roposeExecutionWhenTargetIsLoadMetadata.cs | 35 +- ...roposeExecutionWhenTargetIsLoadProgress.cs | 32 +- ...oposeExecutionWhenTargetIsLoadStageNode.cs | 64 +- .../ProposeExecutionWhenTargetIsLookup.cs | 30 +- ...seExecutionWhenTargetIsPermissionWindow.cs | 38 +- ...PermissionWindowUsedByCacheProgressNode.cs | 42 +- .../ProposeExecutionWhenTargetIsPipeline.cs | 46 +- ...rgetIsPipelineCompatibleWithUseCaseNode.cs | 39 +- ...eExecutionWhenTargetIsPipelineComponent.cs | 38 +- ...onWhenTargetIsPipelineComponentArgument.cs | 40 +- ...utionWhenTargetIsPreLoadDiscardedColumn.cs | 32 +- ...ProposeExecutionWhenTargetIsProcessTask.cs | 42 +- ...xecutionWhenTargetIsProcessTaskArgument.cs | 40 +- .../ProposeExecutionWhenTargetIsProject.cs | 65 +- ...cutionWhenTargetIsProjectCataloguesNode.cs | 45 +- ...tificationConfigurationAssociationsNode.cs | 47 +- ...tionWhenTargetIsProjectSavedCohortsNode.cs | 63 +- .../ProposeExecutionWhenTargetIsRemoteRDMP.cs | 32 +- ...seExecutionWhenTargetIsSelectedDataSets.cs | 48 +- ...WhenTargetIsStandardPipelineUseCaseNode.cs | 37 +- ...oposeExecutionWhenTargetIsStandardRegex.cs | 35 +- ...ExecutionWhenTargetIsSupportingDocument.cs | 32 +- ...ExecutionWhenTargetIsSupportingSQLTable.cs | 32 +- .../ProposeExecutionWhenTargetIsTableInfo.cs | 33 +- ...roposeExecutionWhenTargetIsWindowLayout.cs | 32 +- .../Proposals/RDMPCommandExecutionProposal.cs | 81 +- .../RDMPCommandExecutionFactory.cs | 253 +- Rdmp.UI/Copying/RDMPCombineableFactory.cs | 293 +- .../CredentialsUIs/DataAccessCredentialsUI.cs | 43 +- .../Construction/DashboardControlFactory.cs | 173 +- .../DashboardControlHydrationException.cs | 9 +- .../Construction/IDashboardableControl.cs | 66 +- .../Construction/INotifyMeOfEditState.cs | 9 +- .../DashboardEditModeFunctionality.cs | 339 +- Rdmp.UI/DashboardTabs/DashboardLayoutUI.cs | 199 +- .../DashboardableControlHostPanel.cs | 140 +- .../ColumnInfoToANOTableConverterUI.cs | 668 +- .../ANOTableManagement/PatternPredictor.cs | 85 +- .../PreLoadDiscardedColumnUI.cs | 119 +- .../DataLoadUIs/CreateNewLoadMetadataUI.cs | 72 +- .../LoadMetadataUIs/ChooseLoadFolderUI.cs | 219 +- .../LoadDiagram/LoadDiagramColumnNode.cs | 210 +- .../LoadDiagram/LoadDiagramDatabaseNode.cs | 178 +- .../LoadDiagram/LoadDiagramServerNode.cs | 201 +- .../LoadDiagram/LoadDiagramTableNode.cs | 259 +- .../LoadDiagram/LoadDiagramUI.cs | 563 +- .../StateDiscovery/IHasLoadDiagramState.cs | 12 +- .../StateDiscovery/LoadDiagramState.cs | 18 +- .../LoadDiagram/StateDiscovery/LoadStateUI.cs | 96 +- .../StateDiscovery/UnplannedTable.cs | 39 +- .../LoadMetadataUIs/LoadMetadataUI.cs | 40 +- .../CacheProgressUI.cs | 317 +- .../Diagrams/LoadProgressAnnotation.cs | 306 +- .../Diagrams/LoadProgressDiagramUI.cs | 296 +- .../LoadProgressAndCacheUIs/LoadProgressUI.cs | 164 +- .../PermissionWindowUI.cs | 187 +- .../LoadMetadataUIs/LoadStageIconUI.cs | 34 +- .../ProcessTasks/ExeProcessTaskUI.cs | 193 +- .../ProcessTasks/PluginProcessTaskUI.cs | 200 +- .../ProcessTasks/SqlProcessTaskUI.cs | 229 +- .../ModuleUIs/AdjustColumnDataTypesUI.cs | 50 +- .../DataFlowSources/ExplicitColumnTypeUI.cs | 84 +- .../ExplicitTypingCollectionUI.cs | 163 +- .../DataProvider/WebServiceConfigurationUI.cs | 121 +- .../ModuleUIs/DatabaseColumnRequestUI.cs | 193 +- .../DataLoadProgressUpdateInfoUI.cs | 261 +- .../DataQualityUIs/DQEExecutionControlUI.cs | 94 +- Rdmp.UI/DataRelease/DataReleaseUI.cs | 399 +- .../ViewSQLAndResultsWithDataGridUI.cs | 523 +- Rdmp.UI/DoTransparencyProperly.cs | 33 +- Rdmp.UI/ExceptionCounterUI.cs | 139 +- .../ExtractionUIs/ExtractionInformationUI.cs | 455 +- .../FilterUIs/ExtractionFilterUI.cs | 319 +- .../FilterUIs/FilterGraphObjectCollection.cs | 61 +- .../ExtractionUIs/FilterUIs/FilterGraphUI.cs | 87 +- .../ExtractionFilterParameterSetUI.cs | 111 +- .../ParameterUIs/ParameterCollectionUI.cs | 714 +- .../ParameterEditorScintillaControlUI.cs | 293 +- .../ParameterEditorScintillaSection.cs | 135 +- .../JoinsAndLookups/JoinConfigurationUI.cs | 434 +- .../JoinsAndLookups/KeyDropLocationUI.cs | 159 +- .../JoinsAndLookups/LookupBrowserUI.cs | 167 +- .../JoinsAndLookups/LookupConfigurationUI.cs | 785 +- .../ExtractionUIs/JoinsAndLookups/LookupUI.cs | 112 +- .../ExtractionUIs/ReOrderCatalogueItemsUI.cs | 551 +- Rdmp.UI/ExtractionUIs/ViewExtractionSqlUI.cs | 358 +- Rdmp.UI/FormsHelper.cs | 44 +- Rdmp.UI/Hunspellx64.dll | Bin 524288 -> 0 bytes Rdmp.UI/Hunspellx86.dll | Bin 442368 -> 0 bytes Rdmp.UI/IconFactory.cs | 68 +- Rdmp.UI/ImageTools.cs | 27 +- .../Arranging/IArrangeWindows.cs | 25 +- Rdmp.UI/ItemActivation/IActivateItems.cs | 252 +- Rdmp.UI/LinkLabels/PathLinkLabel.cs | 65 +- .../ExecuteCacheProgressUI.cs | 83 +- .../LoadExecutionUIs/ExecuteLoadMetadataUI.cs | 287 +- .../ChoosePlatformDatabasesUI.Designer.cs | 17 +- .../ChoosePlatformDatabasesUI.cs | 688 +- .../PasswordEncryptionKeyLocationUI.cs | 187 +- Rdmp.UI/LocationsMenu/ServerDefaultsUI.cs | 354 +- .../Ticketing/TicketingControlUI.cs | 150 +- .../TicketingSystemConfigurationUI.cs | 352 +- Rdmp.UI/Logging/LoggingTabUI.cs | 682 +- Rdmp.UI/MainFormUITabs/CatalogueItemUI.cs | 144 +- Rdmp.UI/MainFormUITabs/CatalogueUI.cs | 403 +- .../ConnectionStringKeywordUI.cs | 92 +- Rdmp.UI/MainFormUITabs/DitaExtractorUI.cs | 106 +- .../MainFormUITabs/ExtractionProgressUI.cs | 214 +- .../SubComponents/ExternalDatabaseServerUI.cs | 168 +- .../SubComponents/ImportSQLTableUI.cs | 207 +- .../SubComponents/TableInfoUI.cs | 239 +- Rdmp.UI/Menus/AggregateConfigurationMenu.cs | 118 +- Rdmp.UI/Menus/AllCohortsNodeMenu.cs | 43 +- Rdmp.UI/Menus/AllDashboardsNodeMenu.cs | 16 +- Rdmp.UI/Menus/AllExternalServersNodeMenu.cs | 19 +- Rdmp.UI/Menus/CacheProgressMenu.cs | 20 +- Rdmp.UI/Menus/CatalogueFolderMenu.cs | 32 +- Rdmp.UI/Menus/CatalogueItemsNodeMenu.cs | 14 +- Rdmp.UI/Menus/CatalogueMenu.cs | 129 +- Rdmp.UI/Menus/CohortAggregateContainerMenu.cs | 73 +- Rdmp.UI/Menus/ColumnInfoMenu.cs | 26 +- Rdmp.UI/Menus/DocumentationNodeMenu.cs | 21 +- Rdmp.UI/Menus/ExternalCohortTableMenu.cs | 31 +- Rdmp.UI/Menus/ExternalDatabaseServerMenu.cs | 69 +- .../Menus/ExtractableDataSetPackageMenu.cs | 61 +- Rdmp.UI/Menus/ExtractableDatasetMenu.cs | 49 +- Rdmp.UI/Menus/ExtractionConfigurationMenu.cs | 33 +- .../Menus/ExtractionConfigurationsNodeMenu.cs | 18 +- Rdmp.UI/Menus/ExtractionDirectoryNodeMenu.cs | 19 +- Rdmp.UI/Menus/FilterMenu.cs | 19 +- Rdmp.UI/Menus/GovernancePeriodMenu.cs | 14 +- Rdmp.UI/Menus/HICProjectDirectoryNodeMenu.cs | 15 +- Rdmp.UI/Menus/JoinInfoMenu.cs | 11 +- Rdmp.UI/Menus/JoinableCollectionNodeMenu.cs | 16 +- Rdmp.UI/Menus/LoadMetadataMenu.cs | 21 +- Rdmp.UI/Menus/LoadStageNodeMenu.cs | 97 +- Rdmp.UI/Menus/LookupMenu.cs | 13 +- .../Menus/MenuItems/AtomicCommandMenuItem.cs | 99 +- .../MenuItems/AtomicCommandToolStripItem.cs | 43 +- Rdmp.UI/Menus/MenuItems/DQEMenuItem.cs | 59 +- .../Menus/MenuItems/RDMPToolStripMenuItem.cs | 80 +- Rdmp.UI/Menus/MenuItems/SaveMenuItem.cs | 60 +- .../Menus/MenuItems/SetDumpServerMenuItem.cs | 92 +- .../PipelineCompatibleWithUseCaseNodeMenu.cs | 43 +- .../Menus/PreLoadDiscardedColumnsNodeMenu.cs | 24 +- Rdmp.UI/Menus/ProjectCataloguesNodeMenu.cs | 20 +- Rdmp.UI/Menus/ProjectFolderMenu.cs | 21 +- Rdmp.UI/Menus/ProjectMenu.cs | 31 +- Rdmp.UI/Menus/RDMPContextMenuStrip.cs | 474 +- Rdmp.UI/Menus/RDMPContextMenuStripArgs.cs | 106 +- Rdmp.UI/Menus/SelectedDataSetsMenu.cs | 63 +- Rdmp.UI/Menus/TableInfoMenu.cs | 58 +- Rdmp.UI/NavigationTrack.cs | 361 +- Rdmp.UI/Overview/DataLoadsGraph.cs | 414 +- .../DataLoadsGraphObjectCollection.cs | 13 +- Rdmp.UI/Overview/DataLoadsGraphResult.cs | 15 +- .../Overview/DataLoadsGraphResultStatus.cs | 13 +- Rdmp.UI/Pencil.cur | Bin 326 -> 0 bytes Rdmp.UI/Performance/LastCommandUI.cs | 41 +- .../PerformanceCounterResultsUI.cs | 291 +- .../StackTraceProcessing/StackFramesTree.cs | 160 +- Rdmp.UI/PieCharts/GoodBadCataloguePieChart.cs | 448 +- ...oodBadCataloguePieChartObjectCollection.cs | 226 +- .../DataFlowComponentVisualisation.cs | 277 +- .../PipelineComponentVisualisation.cs | 152 +- .../ArgumentCollectionUI.cs | 362 +- .../ArgumentValueArrayUI.cs | 121 +- .../ArgumentValueBoolUI.cs | 69 +- .../ArgumentValueComboBoxUI.cs | 228 +- .../ArgumentValueCustomUIDrivenClassUI.cs | 128 +- .../ArgumentValueDictionaryUI.cs | 230 +- .../ArgumentValueLabelUI.cs | 36 +- .../ArgumentValuePipelineUI.cs | 75 +- .../ArgumentValueSqlUI.cs | 78 +- .../ArgumentValueTextUI.cs | 111 +- .../ArgumentValueUIArgs.cs | 71 +- .../ArgumentValueUIFactory.cs | 283 +- .../ArgumentValueControls/IArgumentValueUI.cs | 23 +- .../ConfigureAndExecutePipelineUI.cs | 415 +- .../Pipelines/ConfigurePipelineUI.cs | 140 +- .../Pipelines/IPipelineSelectionUI.cs | 33 +- .../Pipelines/Models/DividerLineControl.cs | 83 +- .../Pipelines/PipelineDiagramUI.cs | 764 +- .../Pipelines/PipelineSelectionUI.cs | 390 +- .../Pipelines/PipelineWorkAreaUI.cs | 256 +- .../PipelineSelectionUIFactory.cs | 93 +- .../PluginPipelineUsers/PluginPipelineUser.cs | 71 +- .../CodeGeneration/CodeGenerationException.cs | 20 +- .../GenerateClassCodeFromTableUI.cs | 54 +- ...rectlyToDatabaseTableClassCodeGenerator.cs | 220 +- Rdmp.UI/Progress/ProgressBarsUI.cs | 186 +- Rdmp.UI/Progress/ProgressUI.cs | 703 +- Rdmp.UI/Progress/ProgressUIEntry.cs | 85 +- .../ProjectUI/Datasets/ConfigureDatasetUI.cs | 1226 +- .../Datasets/Node/AvailableForceJoinNode.cs | 140 +- Rdmp.UI/ProjectUI/ExecuteExtractionUI.cs | 581 +- .../ProjectUI/ExtractCommandStateMonitor.cs | 100 +- .../ProjectUI/ExtractionConfigurationUI.cs | 330 +- .../Graphs/ExtractionAggregateGraph.cs | 272 +- ...xtractionAggregateGraphObjectCollection.cs | 94 +- Rdmp.UI/ProjectUI/ProjectUI.cs | 537 +- Rdmp.UI/Properties/AssemblyInfo.cs | 6 +- Rdmp.UI/Raceway/DatasetRaceway.cs | 472 +- .../Raceway/DatasetRacewayObjectCollection.cs | 98 +- Rdmp.UI/Raceway/RacewayRenderAreaUI.cs | 797 +- Rdmp.UI/Rdmp.UI.csproj | 110 +- Rdmp.UI/RecentHistoryOfControls.cs | 154 +- Rdmp.UI/Refreshing/ILifetimeSubscriber.cs | 23 +- Rdmp.UI/Refreshing/IRefreshBusSubscriber.cs | 25 +- Rdmp.UI/Refreshing/RefreshBus.cs | 269 +- Rdmp.UI/Refreshing/RefreshObjectEventArgs.cs | 33 +- .../Refreshing/RefreshObjectEventHandler.cs | 7 +- Rdmp.UI/Refreshing/SelfDestructProtocol.cs | 77 +- Rdmp.UI/Refreshing/SubscriptionException.cs | 18 +- .../ResearchDataManagementPlatformOptions.cs | 18 +- Rdmp.UI/RichTextBoxEx.cs | 502 +- Rdmp.UI/Rules/BinderRule.cs | 92 +- .../Rules/BinderWithErrorProviderFactory.cs | 59 +- Rdmp.UI/Rules/IBinderRule.cs | 17 +- Rdmp.UI/Rules/MaxLengthRule.cs | 101 +- Rdmp.UI/Rules/NoBadNamesRule.cs | 30 +- Rdmp.UI/Rules/NotNullRule.cs | 24 +- Rdmp.UI/Rules/UniqueRule.cs | 62 +- Rdmp.UI/ScintillaHelper/CSharpLexer.cs | 227 +- .../ScintillaLineHighlightingHelper.cs | 33 +- Rdmp.UI/ScintillaHelper/ScintillaMenu.cs | 245 +- .../ScintillaTextEditorFactory.cs | 489 +- Rdmp.UI/SimpleControls/CheckAndExecuteUI.cs | 426 +- .../SimpleControls/CommandGetterHandler.cs | 7 +- .../SimpleControls/ConnectionStringTextBox.cs | 262 +- Rdmp.UI/SimpleControls/DatabaseTypeUI.cs | 91 +- Rdmp.UI/SimpleControls/ExecutionEventArgs.cs | 23 +- Rdmp.UI/SimpleControls/HeatmapUI.cs | 696 +- Rdmp.UI/SimpleControls/HelpIcon.cs | 125 +- Rdmp.UI/SimpleControls/HelpSection.cs | 19 +- Rdmp.UI/SimpleControls/ISaveableUI.cs | 21 +- Rdmp.UI/SimpleControls/ObjectSaverButton.cs | 380 +- Rdmp.UI/SimpleControls/RainbowColorPicker.cs | 102 +- ...ectIMapsDirectlyToDatabaseTableComboBox.cs | 139 +- .../ServerDatabaseTableSelector.cs | 827 +- Rdmp.UI/SimpleCounterButton.cs | 76 +- .../BulkProcessCatalogueItemsUI.cs | 424 +- Rdmp.UI/SimpleDialogs/ChooseLoggingTaskUI.cs | 389 +- Rdmp.UI/SimpleDialogs/CommitsUI.cs | 181 +- .../SimpleDialogs/ConfigureDisclaimerUI.cs | 149 +- .../ConfigureHashingAlgorithmUI.cs | 121 +- .../ConfigurePrimaryKeyCollisionResolverUI.cs | 542 +- Rdmp.UI/SimpleDialogs/DataTableViewerUI.cs | 77 +- Rdmp.UI/SimpleDialogs/ExceptionViewer.cs | 178 +- ...ExceptionViewerStackTraceWithHyperlinks.cs | 269 +- Rdmp.UI/SimpleDialogs/FindAndReplaceNode.cs | 108 +- Rdmp.UI/SimpleDialogs/FindAndReplaceUI.cs | 261 +- .../FindSearchTailFilterWithAlwaysShowList.cs | 72 +- .../ConfigureCatalogueExtractabilityUI.cs | 1081 +- .../Governance/GovernanceDocumentUI.cs | 90 +- .../Governance/GovernancePeriodUI.cs | 313 +- Rdmp.UI/SimpleDialogs/NavigateTo/RunUI.cs | 105 +- Rdmp.UI/SimpleDialogs/PerformanceCounterUI.cs | 123 +- ...gueItemChangesToSimilarNamedUI.Designer.cs | 2 +- ...ateCatalogueItemChangesToSimilarNamedUI.cs | 386 +- .../SimpleDialogs/Remoting/RemoteRDMPUI.cs | 45 +- .../Reports/DataGeneratorUI.Designer.cs | 4 +- .../SimpleDialogs/Reports/DataGeneratorUI.cs | 133 +- .../Reports/GenerateTestDataUI.cs | 392 +- .../SimpleDialogs/Reports/MetadataReportUI.cs | 412 +- .../Revertable/OfferChanceToSaveDialogUI.cs | 136 +- .../RevertablePropertyDifferenceUI.cs | 132 +- Rdmp.UI/SimpleDialogs/SelectDialog.cs | 1279 +- .../ServerDatabaseTableSelectorDialog.cs | 114 +- Rdmp.UI/SimpleDialogs/ShowSQLUI.cs | 41 +- .../CreateNewCatalogueByImportingFileUI.cs | 719 +- ...eNewCatalogueByImportingFileUI_Advanced.cs | 149 +- .../SqlDialogs/SQLBeforeAndAfterViewer.cs | 156 +- .../SqlDialogs/SQLPreviewWindow.cs | 79 +- .../SimpleDialogs/SqlDialogs/SetSQLDialog.cs | 81 +- Rdmp.UI/SimpleDialogs/SupportingDocumentUI.cs | 192 +- Rdmp.UI/SimpleDialogs/SupportingSQLTableUI.cs | 272 +- Rdmp.UI/SimpleDialogs/TaskDescriptionLabel.cs | 165 +- .../SimpleDialogs/TypeTextOrCancelDialog.cs | 266 +- .../SimpleDialogs/UserSettingsUI.Designer.cs | 14 + Rdmp.UI/SimpleDialogs/UserSettingsUI.cs | 433 +- Rdmp.UI/SimpleDialogs/ViewSourceCodeDialog.cs | 236 +- .../SimpleDialogs/ViewSourceCodeToolTip.cs | 204 +- Rdmp.UI/SimpleDialogs/WaitUI.cs | 67 +- Rdmp.UI/SimpleDialogs/WideMessageBox.cs | 668 +- Rdmp.UI/SimpleDialogs/WideMessageBoxArgs.cs | 40 +- Rdmp.UI/SimpleDialogs/WideMessageBoxTheme.cs | 13 +- Rdmp.UI/SimpleDialogs/YesNoYesToAllDialog.cs | 165 +- .../IConsultableBeforeClosing.cs | 15 +- .../SingleControlForms/SingleControlForm.cs | 85 +- .../CohortIdentificationConfigurationUI.cs | 645 +- .../CohortIdentificationHeader.cs | 71 +- ...rtSummaryAggregateGraphObjectCollection.cs | 175 +- .../Graphs/CohortSummaryAggregateGraphUI.cs | 200 +- Rdmp.UI/SuggestComboBox.cs | 472 +- Rdmp.UI/TechnicalUI.cs | 15 +- .../TestsAndSetup/GlobalExceptionHandler.cs | 46 +- Rdmp.UI/TestsAndSetup/RDMPBootStrapper.cs | 189 +- .../BeforeCheckingEventArgs.cs | 29 +- .../ServicePropogation/INamedTab.cs | 23 +- .../IObjectCollectionControl.cs | 51 +- .../ServicePropogation/IRDMPControl.cs | 19 +- .../IRDMPSingleDatabaseObjectControl.cs | 14 +- .../RDMPControlCommonFunctionality.cs | 831 +- .../ServicePropogation/RDMPForm.cs | 118 +- .../RDMPSingleDatabaseObjectControl.cs | 489 +- .../ServicePropogation/RDMPUserControl.cs | 100 +- Rdmp.UI/TestsAndSetup/StartupUI.Designer.cs | 4 +- Rdmp.UI/TestsAndSetup/StartupUI.cs | 539 +- Rdmp.UI/Theme/BackColorProvider.cs | 68 +- Rdmp.UI/Theme/ITheme.cs | 19 +- Rdmp.UI/ToolStripTimeout.cs | 70 +- Rdmp.UI/TransparentHelpSystem/HelpBox.cs | 105 +- Rdmp.UI/TransparentHelpSystem/HelpStage.cs | 191 +- Rdmp.UI/TransparentHelpSystem/HelpWorkflow.cs | 221 +- .../IHelpWorkflowProgressProvider.cs | 21 +- .../NullHelpWorkflowProgressProvider.cs | 22 +- .../TransparentHelpForm.cs | 376 +- Rdmp.UI/TreeHelper/TreeNodeParentFinder.cs | 73 +- Rdmp.UI/Tutorials/Tutorial.cs | 54 +- Rdmp.UI/Tutorials/TutorialTracker.cs | 153 +- .../ResolveMissingTargetPropertiesUI.cs | 224 +- Rdmp.UI/Validation/SecondaryConstraintUI.cs | 494 +- Rdmp.UI/Validation/StandardRegexUI.cs | 108 +- Rdmp.UI/Validation/ValidationSetupUI.cs | 596 +- Rdmp.UI/Versioning/CreatePlatformDatabase.cs | 244 +- Rdmp.UI/Versioning/PatchingUI.cs | 185 +- ...eNewCohortIdentificationConfigurationUI.cs | 258 +- .../CreateNewDataExtractionProjectUI.cs | 755 +- Rdmp.UI/Wizard/SimpleCohortSetUI.cs | 390 +- Rdmp.UI/Wizard/SimpleFilterUI.cs | 253 +- Rdmp.UI/Wizard/SimpleParameterUI.cs | 84 +- Rdmp.UI/Wizard/SimpleSetOperation.cs | 130 +- Rdmp.UI/en_GB.aff | 1347 + Rdmp.UI/en_GB.dic | 96971 ++++++++++++++++ Rdmp.UI/en_US.aff | 201 - Rdmp.UI/en_US.dic | 62120 ---------- .../Attributes/AttributePropertyFinder.cs | 74 - .../Attributes/RelationshipAttribute.cs | 77 - .../Attributes/RelationshipType.cs | 36 - .../Attributes/UsefulPropertyAttribute.cs | 20 - .../DoNotImportDescriptionsAttribute.cs | 24 - .../IDeletableWithCustomMessage.cs | 28 - .../IDeleteable.cs | 20 - .../IDisableable.cs | 22 - .../IMapsDirectlyToDatabaseTable.cs | 46 - .../MapsDirectlyToDatabaseTable/INamed.cs | 26 - .../IObscureDependencyFinder.cs | 41 - .../IRepository.cs | 219 - .../MapsDirectlyToDatabaseTable/ISaveable.cs | 21 - .../ITableRepository.cs | 40 - .../Injection/IInjectKnown.cs | 58 - .../MapsDirectlyToDatabaseTable.csproj | 30 - .../MemoryRepository.cs | 387 - .../NewObjectPool.cs | 81 - .../ObjectDeletedException.cs | 25 - .../Properties/AssemblyInfo.cs | 17 - .../Revertable/IRevertable.cs | 33 - .../Revertable/RevertableObjectReport.cs | 31 - .../RevertablePropertyDifference.cs | 29 - .../TableRepository.cs | 985 - .../UpdateCommandStore.cs | 79 - .../Versioning/DatabaseVersionProvider.cs | 41 - .../Versioning/IPatcher.cs | 68 - .../MasterDatabaseScriptExecutor.cs | 439 - .../Versioning/Patch.cs | 177 - .../Versioning/Patcher.cs | 133 - .../Annotations/Annotations.cs | 412 - .../ReusableLibraryCode/AssemblyResolver.cs | 82 - .../Checks/AcceptAllCheckNotifier.cs | 41 - .../Checks/CheckEventArgs.cs | 127 - .../ReusableLibraryCode/Checks/ErrorCode.cs | 47 - .../ReusableLibraryCode/Checks/ErrorCodes.cs | 57 - .../Checks/ICheckNotifier.cs | 17 - .../ReusableLibraryCode/Checks/ICheckable.cs | 29 - .../Checks/IgnoreAllErrorsCheckNotifier.cs | 20 - .../Checks/ReplayCheckable.cs | 34 - .../Checks/ThrowImmediatelyCheckNotifier.cs | 49 - .../Checks/ToMemoryCheckNotifier.cs | 73 - .../Comments/CommentStore.cs | 357 - .../Comments/XmlDocumentExtensions.cs | 51 - .../DataAccess/DataAccessContext.cs | 18 - .../DataAccess/DataAccessPortal.cs | 138 - .../DataAccess/IDataAccessCredentials.cs | 19 - .../DataAccess/IDataAccessPoint.cs | 57 - .../DataAccess/IEncryptedPasswordHost.cs | 28 - .../DatabaseCommandHelper.cs | 129 - Reusable/ReusableLibraryCode/Diff.cs | 543 - .../DirectoryInfoComparer.cs | 64 - .../ReusableLibraryCode/ExceptionHelper.cs | 68 - .../ExpectedIdenticalStringsException.cs | 108 - .../Extensions/DataTableExtensions.cs | 40 - .../Extensions/VersionExtensions.cs | 70 - .../ICustomSearchString.cs | 21 - .../ReusableLibraryCode/IHasDependencies.cs | 29 - Reusable/ReusableLibraryCode/IHasSummary.cs | 25 - Reusable/ReusableLibraryCode/IKnowWhatIAm.cs | 21 - .../IconProvision/DatabaseTypeIconProvider.cs | 76 - .../Icons/IconProvision/OverlayKind.cs | 40 - .../ReusableLibraryCode/ObjectExtensions.cs | 36 - .../ComprehensiveQueryPerformanceCounter.cs | 41 - .../Progress/ForkDataLoadEventListener.cs | 35 - ...romCheckNotifierToDataLoadEventListener.cs | 44 - ...romDataLoadEventListenerToCheckNotifier.cs | 33 - .../Progress/IDataLoadEventListener.cs | 20 - .../Progress/NotifyEventArgs.cs | 80 - .../Progress/ProgressEventArgs.cs | 39 - .../Progress/ProgressMeasurement.cs | 32 - .../ThrowImmediatelyDataLoadEventListener.cs | 44 - .../Progress/ToMemoryDataLoadEventListener.cs | 95 - .../Properties/AssemblyInfo.cs | 17 - .../ReusableLibraryCode.csproj | 79 - Reusable/ReusableLibraryCode/Rfc4180Writer.cs | 90 - .../IgnorableSerializerContractResolver.cs | 79 - .../Settings/RDMPApplicationSettings.cs | 533 - .../Settings/UserSettings.cs | 612 - Reusable/ReusableLibraryCode/UsefulStuff.cs | 646 - .../VisualStudioSolutionFile.cs | 105 - .../VisualStudioSolutionFolder.cs | 29 - .../ChangeLogIsCorrectTests.cs | 48 - .../DataTableExtensionsTests.cs | 82 - .../ExpectedIdenticalStringsExceptionTests.cs | 135 - .../ReusableCodeTests/NuspecIsCorrectTests.cs | 129 - .../Properties/AssemblyInfo.cs | 17 - .../ReusableCodeTests.csproj | 36 - .../ReusableCodeTests/UsefulStuffTests.cs | 88 - .../ReusableCodeTests/UsefulStuffUnitTests.cs | 33 - SharedAssemblyInfo.cs | 12 +- Tests.Common/All.cs | 60 +- Tests.Common/DatabaseTests.cs | 1211 +- Tests.Common/Databases.appveyor.yaml | 2 - Tests.Common/GovernancePeriod_Catalogue.cs | 7 +- Tests.Common/Helpers/TestDataInventor.cs | 113 +- .../Helpers/TestDataPipelineAssembler.cs | 91 +- Tests.Common/Helpers/TestDataWriter.cs | 65 +- Tests.Common/Helpers/TestDataWriterChunk.cs | 21 +- Tests.Common/Properties/AssemblyInfo.cs | 6 +- Tests.Common/RdmpMockFactory.cs | 128 +- Tests.Common/Scenarios/BulkTestsData.cs | 352 +- Tests.Common/Scenarios/FromToDatabaseTests.cs | 89 +- Tests.Common/Scenarios/TestsRequiringA.cs | 73 +- .../Scenarios/TestsRequiringACohort.cs | 307 +- Tests.Common/Scenarios/TestsRequiringADle.cs | 257 +- .../Scenarios/TestsRequiringANOStore.cs | 136 +- ...TestsRequiringAnExtractionConfiguration.cs | 325 +- .../TestsRequiringFullAnonymisationSuite.cs | 56 +- .../TestCaseNotWrittenYetException.cs | 16 +- Tests.Common/TestDatabases.appveyor.txt | 25 - Tests.Common/TestDatabasesSettings.cs | 88 +- Tests.Common/TestDirectoryHelper.cs | 70 +- Tests.Common/TestLoadDirectory.cs | 34 +- Tests.Common/TestableTableValuedFunction.cs | 81 +- Tests.Common/Tests.Common.csproj | 84 +- Tests.Common/UnitTests.cs | 1296 +- Tools/rdmp/CommandLine/ColorSettings.cs | 24 +- .../CommandLine/Gui/ConsoleGuiActivator.cs | 574 +- .../CommandLine/Gui/ConsoleGuiBigListBox.cs | 486 +- ...eGuiCohortIdentificationConfigurationUI.cs | 382 +- .../Gui/ConsoleGuiContextMenuFactory.cs | 271 +- .../Gui/ConsoleGuiDataTableViewerUI.cs | 26 +- .../rdmp/CommandLine/Gui/ConsoleGuiOptions.cs | 26 +- .../CommandLine/Gui/ConsoleGuiRunPipeline.cs | 399 +- .../rdmp/CommandLine/Gui/ConsoleGuiRunner.cs | 104 +- .../CommandLine/Gui/ConsoleGuiSelectOne.cs | 148 +- .../ConsoleGuiServerDatabaseTableSelector.cs | 508 +- .../CommandLine/Gui/ConsoleGuiSqlEditor.cs | 661 +- .../CommandLine/Gui/ConsoleGuiTextDialog.cs | 208 +- .../CommandLine/Gui/ConsoleGuiUserSettings.cs | 234 +- .../CommandLine/Gui/ConsoleGuiViewGraph.cs | 533 +- .../CommandLine/Gui/ConsoleGuiViewLogs.cs | 395 +- .../rdmp/CommandLine/Gui/ConsoleMainWindow.cs | 871 +- .../Gui/ExecuteCommandRunConsoleGuiView.cs | 41 +- Tools/rdmp/CommandLine/Gui/LoadingDialog.cs | 3 +- .../CommandLine/Gui/Windows/ConsoleGuiEdit.cs | 252 +- .../Gui/Windows/ConsoleGuiSelectMany.cs | 207 +- .../Windows/RunnerWindows/RunCacheWindow.cs | 20 +- .../RunDataQualityEngineWindow.cs | 21 +- .../Gui/Windows/RunnerWindows/RunDleWindow.cs | 61 +- .../Windows/RunnerWindows/RunEngineWindow.cs | 365 +- .../RunnerWindows/RunExtractionWindow.cs | 59 +- .../Windows/RunnerWindows/RunReleaseWindow.cs | 69 +- Tools/rdmp/Databases.yaml | 4 +- Tools/rdmp/NLog.config | 12 +- Tools/rdmp/Program.cs | 301 +- Tools/rdmp/Properties/AssemblyInfo.cs | 2 +- Tools/rdmp/rdmp.csproj | 13 +- deadlinksconfig.json | 15 +- directory.build.props | 6 + lgtm.yml | 6 - rdmp-client.xml | 6 +- scripts/orphan_extractable_column.yaml | 2 +- upgrade-assistant.clef | 5434 + yaml/LoadModuleAssembly/2.yaml | 4 + yaml/Plugin/1.yaml | 5 + 2397 files changed, 292788 insertions(+), 260810 deletions(-) create mode 100644 .github/workflows/codeql.yml create mode 100644 AnalysisReport.sarif delete mode 100644 Application/ResearchDataManagementPlatform/RDMP.nuspec delete mode 100644 Application/ResearchDataManagementPlatform/publish.bat delete mode 100644 Plugins/Plugin.Test/Plugin.Test.csproj delete mode 100644 Plugins/Plugin.Test/Plugin.Test.nuspec delete mode 100644 Plugins/Plugin.UI/Plugin.UI.csproj delete mode 100644 Plugins/Plugin.UI/Plugin.UI.nuspec delete mode 100644 Plugins/Plugin/Plugin.csproj delete mode 100644 Plugins/Plugin/Plugin.nuspec create mode 100644 Rdmp.Core.Tests/CommandExecution/TestExecuteCommandClearUserSettings.cs delete mode 100644 Rdmp.Core.Tests/DataLoad/Engine/Integration/WebFileDownloaderTests.cs create mode 100644 Rdmp.Core.Tests/ReusableCodeTests/ChangeLogIsCorrectTests.cs create mode 100644 Rdmp.Core.Tests/ReusableCodeTests/DataTableExtensionsTests.cs create mode 100644 Rdmp.Core.Tests/ReusableCodeTests/ExpectedIdenticalStringsExceptionTests.cs create mode 100644 Rdmp.Core.Tests/ReusableCodeTests/PackageListIsCorrectTests.cs create mode 100644 Rdmp.Core.Tests/ReusableCodeTests/UsefulStuffTests.cs create mode 100644 Rdmp.Core.Tests/ReusableCodeTests/UsefulStuffUnitTests.cs rename Rdmp.Core/CohortCommitting/{Readme.md => CohortCommitting.md} (100%) rename Rdmp.Core/CohortCreation/{Readme.md => CohortCreation.md} (100%) create mode 100644 Rdmp.Core/CommandExecution/AtomicCommands/ExecuteCommandClearUserSettings.cs create mode 100644 Rdmp.Core/DataFlowPipeline/Requirements/Exceptions/MultipleMatchingImplementationException.cs delete mode 100644 Rdmp.Core/DataLoad/Modules/NativeFileIO.cs rename Rdmp.UI/NativeMethods.cs => Rdmp.Core/MapsDirectlyToDatabaseTable/Attributes/AdjustableLocationAttribute.cs (68%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Attributes/AttributePropertyFinder.cs rename {Reusable => Rdmp.Core}/MapsDirectlyToDatabaseTable/Attributes/IAttributePropertyFinder.cs (61%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Attributes/RelationshipAttribute.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Attributes/RelationshipType.cs rename Reusable/MapsDirectlyToDatabaseTable/Versioning/InvalidPatchException.cs => Rdmp.Core/MapsDirectlyToDatabaseTable/Attributes/SqlAttribute.cs (61%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Attributes/UniqueAttribute.cs rename Reusable/MapsDirectlyToDatabaseTable/Attributes/UniqueAttribute.cs => Rdmp.Core/MapsDirectlyToDatabaseTable/Attributes/UsefulPropertyAttribute.cs (65%) rename Rdmp.Core/{DataFlowPipeline/Requirements/Exceptions/MultipleMatchingImplmentationException.cs => MapsDirectlyToDatabaseTable/DoNotImportDescriptionsAttribute.cs} (55%) rename {Reusable => Rdmp.Core}/MapsDirectlyToDatabaseTable/EmptyDisposeable.cs (84%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/ICanBeSummarised.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/IDeletableWithCustomMessage.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/IDeleteable.cs rename Reusable/MapsDirectlyToDatabaseTable/Attributes/SqlAttribute.cs => Rdmp.Core/MapsDirectlyToDatabaseTable/IDisableable.cs (64%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/IMapsDirectlyToDatabaseTable.cs rename {Reusable => Rdmp.Core}/MapsDirectlyToDatabaseTable/IMapsDirectlyToDatabaseTableEventArgs.cs (64%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/IMightBeDeprecated.cs rename Reusable/MapsDirectlyToDatabaseTable/Revertable/PropertyChangedExtendedEventArgs.cs => Rdmp.Core/MapsDirectlyToDatabaseTable/INamed.cs (54%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/IObscureDependencyFinder.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/IRepository.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/ISaveable.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/ITableRepository.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Injection/IInjectKnown.cs rename Reusable/MapsDirectlyToDatabaseTable/Injection/README.md => Rdmp.Core/MapsDirectlyToDatabaseTable/Injection/Injection.md (100%) rename {Reusable => Rdmp.Core}/MapsDirectlyToDatabaseTable/MapsDirectlyToDatabaseTable.cd (100%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/MemoryRepository.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/NewObjectPool.cs rename Reusable/MapsDirectlyToDatabaseTable/DatabaseConnectionConfiguration.cs => Rdmp.Core/MapsDirectlyToDatabaseTable/NoMappingToDatabase.cs (72%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/ObjectDeletedException.cs rename {Reusable => Rdmp.Core}/MapsDirectlyToDatabaseTable/Revertable/ChangeDescription.cs (80%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Revertable/IRevertable.cs rename Reusable/MapsDirectlyToDatabaseTable/IMightBeDeprecated.cs => Rdmp.Core/MapsDirectlyToDatabaseTable/Revertable/PropertyChangedExtendedEventArgs.cs (52%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Revertable/RevertableObjectReport.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Revertable/RevertablePropertyDifference.cs rename {Reusable => Rdmp.Core}/MapsDirectlyToDatabaseTable/SaveEventArgs.cs (53%) create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/TableRepository.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/UpdateCommandStore.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Versioning/DatabaseVersionProvider.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Versioning/IPatcher.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Versioning/InvalidPatchException.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Versioning/MasterDatabaseScriptExecutor.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Versioning/Patch.cs create mode 100644 Rdmp.Core/MapsDirectlyToDatabaseTable/Versioning/Patcher.cs rename Reusable/MapsDirectlyToDatabaseTable/Attributes/AdjustableLocationAttribute.cs => Rdmp.Core/MapsDirectlyToDatabaseTable/Versioning/PluginPatcher.cs (67%) rename Rdmp.Core/Providers/{Readme.md => Providers.md} (100%) create mode 100644 Rdmp.Core/ReusableLibraryCode/Annotations/Annotations.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/AssemblyResolver.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Checks/AcceptAllCheckNotifier.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Checks/CheckEventArgs.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Checks/ErrorCode.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Checks/ErrorCodes.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Checks/ICheckNotifier.cs rename Reusable/ReusableLibraryCode/Performance/QueryPerformed.cs => Rdmp.Core/ReusableLibraryCode/Checks/ICheckable.cs (50%) rename Rdmp.Core.Tests/Curation/LazyTest.cs => Rdmp.Core/ReusableLibraryCode/Checks/IgnoreAllErrorsCheckNotifier.cs (56%) create mode 100644 Rdmp.Core/ReusableLibraryCode/Checks/ReplayCheckable.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Checks/ThrowImmediatelyCheckNotifier.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Checks/ToMemoryCheckNotifier.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Comments/CommentStore.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Comments/XmlDocumentExtensions.cs rename Reusable/MapsDirectlyToDatabaseTable/Versioning/PluginPatcher.cs => Rdmp.Core/ReusableLibraryCode/DataAccess/DataAccessContext.cs (65%) create mode 100644 Rdmp.Core/ReusableLibraryCode/DataAccess/DataAccessPortal.cs rename Reusable/MapsDirectlyToDatabaseTable/NoMappingToDatabase.cs => Rdmp.Core/ReusableLibraryCode/DataAccess/IDataAccessCredentials.cs (70%) create mode 100644 Rdmp.Core/ReusableLibraryCode/DataAccess/IDataAccessPoint.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/DataAccess/IEncryptedPasswordHost.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/DatabaseCommandHelper.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Diff.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/DirectoryInfoComparer.cs rename {Reusable => Rdmp.Core}/ReusableLibraryCode/DoNotExtractProperty.cs (66%) create mode 100644 Rdmp.Core/ReusableLibraryCode/ExceptionHelper.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Exceptions/ExpectedIdenticalStringsException.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Extensions/DataTableExtensions.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Extensions/VersionExtensions.cs rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Helpers/Settings.cs (52%) create mode 100644 Rdmp.Core/ReusableLibraryCode/ICustomSearchString.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/IHasDependencies.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/IHasSummary.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/IKnowWhatIAm.cs rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons.Designer.cs (96%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons.resx (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/Microsoft.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/MicrosoftOverlay.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/MySql.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/MySqlOverlay.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/Oracle.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/OracleOverlay.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/PostgreSql.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/PostgreSqlOverlay.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/Unknown.png (100%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/DatabaseProviderIcons/UnknownOverlay.png (100%) create mode 100644 Rdmp.Core/ReusableLibraryCode/Icons/IconProvision/DatabaseTypeIconProvider.cs rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Icons/IconProvision/IIconProvider.cs (67%) create mode 100644 Rdmp.Core/ReusableLibraryCode/Icons/IconProvision/OverlayKind.cs rename Reusable/MapsDirectlyToDatabaseTable/ICanBeSummarised.cs => Rdmp.Core/ReusableLibraryCode/ObjectExtensions.cs (51%) create mode 100644 Rdmp.Core/ReusableLibraryCode/Performance/ComprehensiveQueryPerformanceCounter.cs rename Reusable/ReusableLibraryCode/VisualStudioSolutionFileProcessing/VisualStudioProjectReference.cs => Rdmp.Core/ReusableLibraryCode/Performance/QueryPerformed.cs (57%) rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Progress/EventType.cs (80%) create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/ForkDataLoadEventListener.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/FromCheckNotifierToDataLoadEventListener.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/FromDataLoadEventListenerToCheckNotifier.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/IDataLoadEventListener.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/NotifyEventArgs.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/ProgressEventArgs.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/ProgressMeasurement.cs rename {Reusable => Rdmp.Core}/ReusableLibraryCode/Progress/ProgressType.cs (86%) create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/ThrowImmediatelyDataLoadEventListener.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Progress/ToMemoryDataLoadEventListener.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Rfc4180Writer.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Serialization/IgnorableSerializerContractResolver.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Settings/RDMPApplicationSettings.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/Settings/UserSettings.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/UsefulStuff.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/VisualStudioSolutionFileProcessing/VisualStudioProjectReference.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/VisualStudioSolutionFileProcessing/VisualStudioSolutionFile.cs create mode 100644 Rdmp.Core/ReusableLibraryCode/VisualStudioSolutionFileProcessing/VisualStudioSolutionFolder.cs create mode 100644 Rdmp.Core/Validation/Constraints/Secondary/Predictor/ValuePredictsOtherValueNullity.cs delete mode 100644 Rdmp.Core/Validation/Constraints/Secondary/Predictor/ValuePredictsOtherValueNullness.cs delete mode 100644 Rdmp.UI/Hunspellx64.dll delete mode 100644 Rdmp.UI/Hunspellx86.dll delete mode 100644 Rdmp.UI/Pencil.cur create mode 100644 Rdmp.UI/en_GB.aff create mode 100644 Rdmp.UI/en_GB.dic delete mode 100644 Rdmp.UI/en_US.aff delete mode 100644 Rdmp.UI/en_US.dic delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Attributes/AttributePropertyFinder.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Attributes/RelationshipAttribute.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Attributes/RelationshipType.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Attributes/UsefulPropertyAttribute.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/DoNotImportDescriptionsAttribute.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/IDeletableWithCustomMessage.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/IDeleteable.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/IDisableable.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/IMapsDirectlyToDatabaseTable.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/INamed.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/IObscureDependencyFinder.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/IRepository.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/ISaveable.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/ITableRepository.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Injection/IInjectKnown.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/MapsDirectlyToDatabaseTable.csproj delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/MemoryRepository.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/NewObjectPool.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/ObjectDeletedException.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Properties/AssemblyInfo.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Revertable/IRevertable.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Revertable/RevertableObjectReport.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Revertable/RevertablePropertyDifference.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/TableRepository.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/UpdateCommandStore.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Versioning/DatabaseVersionProvider.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Versioning/IPatcher.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Versioning/MasterDatabaseScriptExecutor.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Versioning/Patch.cs delete mode 100644 Reusable/MapsDirectlyToDatabaseTable/Versioning/Patcher.cs delete mode 100644 Reusable/ReusableLibraryCode/Annotations/Annotations.cs delete mode 100644 Reusable/ReusableLibraryCode/AssemblyResolver.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/AcceptAllCheckNotifier.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/CheckEventArgs.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/ErrorCode.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/ErrorCodes.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/ICheckNotifier.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/ICheckable.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/IgnoreAllErrorsCheckNotifier.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/ReplayCheckable.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/ThrowImmediatelyCheckNotifier.cs delete mode 100644 Reusable/ReusableLibraryCode/Checks/ToMemoryCheckNotifier.cs delete mode 100644 Reusable/ReusableLibraryCode/Comments/CommentStore.cs delete mode 100644 Reusable/ReusableLibraryCode/Comments/XmlDocumentExtensions.cs delete mode 100644 Reusable/ReusableLibraryCode/DataAccess/DataAccessContext.cs delete mode 100644 Reusable/ReusableLibraryCode/DataAccess/DataAccessPortal.cs delete mode 100644 Reusable/ReusableLibraryCode/DataAccess/IDataAccessCredentials.cs delete mode 100644 Reusable/ReusableLibraryCode/DataAccess/IDataAccessPoint.cs delete mode 100644 Reusable/ReusableLibraryCode/DataAccess/IEncryptedPasswordHost.cs delete mode 100644 Reusable/ReusableLibraryCode/DatabaseCommandHelper.cs delete mode 100644 Reusable/ReusableLibraryCode/Diff.cs delete mode 100644 Reusable/ReusableLibraryCode/DirectoryInfoComparer.cs delete mode 100644 Reusable/ReusableLibraryCode/ExceptionHelper.cs delete mode 100644 Reusable/ReusableLibraryCode/Exceptions/ExpectedIdenticalStringsException.cs delete mode 100644 Reusable/ReusableLibraryCode/Extensions/DataTableExtensions.cs delete mode 100644 Reusable/ReusableLibraryCode/Extensions/VersionExtensions.cs delete mode 100644 Reusable/ReusableLibraryCode/ICustomSearchString.cs delete mode 100644 Reusable/ReusableLibraryCode/IHasDependencies.cs delete mode 100644 Reusable/ReusableLibraryCode/IHasSummary.cs delete mode 100644 Reusable/ReusableLibraryCode/IKnowWhatIAm.cs delete mode 100644 Reusable/ReusableLibraryCode/Icons/IconProvision/DatabaseTypeIconProvider.cs delete mode 100644 Reusable/ReusableLibraryCode/Icons/IconProvision/OverlayKind.cs delete mode 100644 Reusable/ReusableLibraryCode/ObjectExtensions.cs delete mode 100644 Reusable/ReusableLibraryCode/Performance/ComprehensiveQueryPerformanceCounter.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/ForkDataLoadEventListener.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/FromCheckNotifierToDataLoadEventListener.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/FromDataLoadEventListenerToCheckNotifier.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/IDataLoadEventListener.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/NotifyEventArgs.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/ProgressEventArgs.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/ProgressMeasurement.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/ThrowImmediatelyDataLoadEventListener.cs delete mode 100644 Reusable/ReusableLibraryCode/Progress/ToMemoryDataLoadEventListener.cs delete mode 100644 Reusable/ReusableLibraryCode/Properties/AssemblyInfo.cs delete mode 100644 Reusable/ReusableLibraryCode/ReusableLibraryCode.csproj delete mode 100644 Reusable/ReusableLibraryCode/Rfc4180Writer.cs delete mode 100644 Reusable/ReusableLibraryCode/Serialization/IgnorableSerializerContractResolver.cs delete mode 100644 Reusable/ReusableLibraryCode/Settings/RDMPApplicationSettings.cs delete mode 100644 Reusable/ReusableLibraryCode/Settings/UserSettings.cs delete mode 100644 Reusable/ReusableLibraryCode/UsefulStuff.cs delete mode 100644 Reusable/ReusableLibraryCode/VisualStudioSolutionFileProcessing/VisualStudioSolutionFile.cs delete mode 100644 Reusable/ReusableLibraryCode/VisualStudioSolutionFileProcessing/VisualStudioSolutionFolder.cs delete mode 100644 Reusable/Tests/ReusableCodeTests/ChangeLogIsCorrectTests.cs delete mode 100644 Reusable/Tests/ReusableCodeTests/DataTableExtensionsTests.cs delete mode 100644 Reusable/Tests/ReusableCodeTests/ExpectedIdenticalStringsExceptionTests.cs delete mode 100644 Reusable/Tests/ReusableCodeTests/NuspecIsCorrectTests.cs delete mode 100644 Reusable/Tests/ReusableCodeTests/Properties/AssemblyInfo.cs delete mode 100644 Reusable/Tests/ReusableCodeTests/ReusableCodeTests.csproj delete mode 100644 Reusable/Tests/ReusableCodeTests/UsefulStuffTests.cs delete mode 100644 Reusable/Tests/ReusableCodeTests/UsefulStuffUnitTests.cs delete mode 100644 Tests.Common/Databases.appveyor.yaml delete mode 100644 Tests.Common/TestDatabases.appveyor.txt create mode 100644 directory.build.props delete mode 100644 lgtm.yml create mode 100644 upgrade-assistant.clef create mode 100644 yaml/LoadModuleAssembly/2.yaml create mode 100644 yaml/Plugin/1.yaml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index baecbd7ade..a9ab6ded0d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -22,30 +22,26 @@ jobs: shell: bash run: touch package-lock.json - name: Install Node for coverage reporting - uses: actions/setup-node@v3.5.1 + uses: actions/setup-node@v3.8.1 with: node-version: '16.x' cache: 'npm' - name: LCov merger tool run: npm install -g lcov-result-merger - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Determine RDMP build version id: version - shell: cmd - run: perl -ne "print \"::set-output name=rdmpversion::$1\" if /AssemblyVersion\(\"([0-9.]+)\"\)/;" SharedAssemblyInfo.cs - - uses: actions/cache@v3 - with: - path: ~/.nuget/packages - key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} - restore-keys: | - ${{ runner.os }}-nuget- + shell: bash + run: perl -ne "print \"rdmpversion=\$1\n\" if /AssemblyInformationalVersion\(\"([0-9a-z.-]+)\"\)/;" SharedAssemblyInfo.cs >> $GITHUB_OUTPUT + - name: Check output + run: echo ${{ steps.version.outputs.rdmpversion }} - name: Setup .NET Core - uses: actions/setup-dotnet@v3.0.3 + uses: actions/setup-dotnet@v3.2.0 with: dotnet-version: 6.0.x - name: Install MS SQL 2019 Express LocalDB - uses: crazy-max/ghaction-chocolatey@v2 + uses: crazy-max/ghaction-chocolatey@v3 with: args: install -r sqllocaldb --no-progress - name: Initialise LocalDB @@ -59,17 +55,20 @@ jobs: mysql-version: '8.0' root-password: 'YourStrong!Passw0rd' auto-start: true - - name: Create MySql Logging Db - run: dotnet run -c Release --project Tools/rdmp/rdmp.csproj -- createnewexternaldatabaseserver LiveLoggingServer_ID "DatabaseType:MySQL:Server=127.0.0.1;Uid=root;Pwd=YourStrong!Passw0rd;Database=rdmp_logging2" --dir ~/rdmp/rdmp-yaml/ - - name: Create MySql DQE Db - run: dotnet run -c Release --project Tools/rdmp/rdmp.csproj -- createnewexternaldatabaseserver DQE "DatabaseType:MySQL:Server=127.0.0.1;Uid=root;Pwd=YourStrong!Passw0rd;Database=rdmp_dqe" --dir ~/rdmp/rdmp-yaml/ - - name: Create MySql Cohort Building Query Caching Db - run: dotnet run -c Release --project Tools/rdmp/rdmp.csproj -- createnewexternaldatabaseserver CohortIdentificationQueryCachingServer_ID "DatabaseType:MySQL:Server=127.0.0.1;Uid=root;Pwd=YourStrong!Passw0rd;Database=rdmp_cache" --dir ~/rdmp/rdmp-yaml/ + - name: Populate Databases.yaml + shell: bash + run: | + find ./Tools/rdmp/Databases.yaml -type f -exec sed -i 's/RDMP_/TEST_/g' {} \; - name: Build run: dotnet build --configuration Release --verbosity minimal + - name: Create MySql Logging, DQE and Cohort Building Cache Db + run: | + dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- createnewexternaldatabaseserver LiveLoggingServer_ID "DatabaseType:MySQL:Server=127.0.0.1;Uid=root;Pwd=YourStrong!Passw0rd;Database=rdmp_logging2" --dir ~/rdmp/rdmp-yaml/ + dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- createnewexternaldatabaseserver DQE "DatabaseType:MySQL:Server=127.0.0.1;Uid=root;Pwd=YourStrong!Passw0rd;Database=rdmp_dqe" --dir ~/rdmp/rdmp-yaml/ + dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- createnewexternaldatabaseserver CohortIdentificationQueryCachingServer_ID "DatabaseType:MySQL:Server=127.0.0.1;Uid=root;Pwd=YourStrong!Passw0rd;Database=rdmp_cache" --dir ~/rdmp/rdmp-yaml/ - name: Initialise RDMP - run: dotnet run -c Release --project Tools/rdmp/rdmp.csproj -- install --createdatabasetimeout 180 "(localdb)\MSSQLLocalDB" TEST_ -e - - name: Populate Databases.yaml + run: dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- install --createdatabasetimeout 180 "(localdb)\MSSQLLocalDB" TEST_ -e + - name: Populate Databases.yaml shell: bash run: | cat > ./Tools/rdmp/Databases.yaml << EOF @@ -78,61 +77,40 @@ jobs: EOF - name: Run integration test scripts run: | - dotnet run -c Release --project Tools/rdmp/rdmp.csproj -- -f ./scripts/create_list_destroy_catalogue.yaml && - dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- -f ./scripts/create_cohort.yaml && - dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- -f ./scripts/create_dataload.yaml && - dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- -f ./scripts/orphan_extractable_column.yaml && - - name: Test Reusable code + dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- -f ./scripts/create_list_destroy_catalogue.yaml + dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- -f ./scripts/create_cohort.yaml + dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- -f ./scripts/create_dataload.yaml + dotnet run -c Release --no-build --project Tools/rdmp/rdmp.csproj -- -f ./scripts/orphan_extractable_column.yaml + - name: Test (DB) shell: bash run: | rm -rf coverage - dotnet test "Reusable/Tests/ReusableCodeTests/ReusableCodeTests.csproj" --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release -r coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov - mv `find coverage -type f` recode.lcov - - name: Test Core code - shell: bash - run: | - dotnet test "./Rdmp.Core.Tests/Rdmp.Core.Tests.csproj" --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release -r coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov - mv `find coverage -type f` core.lcov - - name: Test UI code - shell: bash - run: | - dotnet test "./Rdmp.UI.Tests/Rdmp.UI.Tests.csproj" --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release -r coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov - mv `find coverage -type f` ui.lcov + dotnet test Rdmp.UI.Tests/Rdmp.UI.Tests.csproj --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release --results-directory coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov + mv `find coverage -type f` db-ui.lcov + dotnet test Rdmp.Core.Tests/Rdmp.Core.Tests.csproj --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release --results-directory coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov + mv `find coverage -type f` db-core.lcov - name: Test with local file system shell: bash run: | - echo "UseFileSystemRepo: true" >> Tests.Common/TestDatabases.txt - cat Tests.Common/TestDatabases.txt - - name: Test Reusable (with file system repo) - shell: bash - run: | - dotnet test "Reusable/Tests/ReusableCodeTests/ReusableCodeTests.csproj" --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release -r coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov - mv `find coverage -type f` recodefs.lcov - - name: Test Core (with file system repo) - shell: bash - run: | - dotnet test "./Rdmp.Core.Tests/Rdmp.Core.Tests.csproj" --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release -r coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov - mv `find coverage -type f` corefs.lcov - - name: Test UI (with file system repo) - shell: bash - run: | - dotnet test "./Rdmp.UI.Tests/Rdmp.UI.Tests.csproj" --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release -r coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov - mv `find coverage -type f` uifs.lcov - + echo "UseFileSystemRepo: true" >> Tests.Common/TestDatabases.txt + dotnet test Rdmp.UI.Tests/Rdmp.UI.Tests.csproj --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release --results-directory coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov + mv `find coverage -type f` fs-ui.lcov + dotnet test Rdmp.Core.Tests/Rdmp.Core.Tests.csproj --nologo --collect:"XPlat Code Coverage" --no-build --verbosity minimal -c Release --results-directory coverage -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=lcov + mv `find coverage -type f` fs-core.lcov - name: Merge LCovs - run: lcov-result-merger "{ui,core,recode}{,fs}.lcov" all.lcov + run: lcov-result-merger "{db,fs}-{ui,core}.lcov" all.lcov - name: Coveralls uses: coverallsapp/github-action@master with: github-token: ${{ secrets.github_token }} path-to-lcov: all.lcov flag-name: unit tests - + - name: Package run: | - dotnet publish Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj -r win-x64 --self-contained -c Release -o PublishWinForms -p:GenerateDocumentationFile=false -p:PublishSingleFile=false --verbosity minimal --nologo - dotnet publish Tools/rdmp/rdmp.csproj -r win-x64 --self-contained -c Release -o PublishWindows -p:GenerateDocumentationFile=false -p:PublishSingleFile=false --verbosity minimal --nologo - dotnet publish Tools/rdmp/rdmp.csproj -r linux-x64 --self-contained -c Release -o PublishLinux -p:GenerateDocumentationFile=false -p:PublishSingleFile=false --verbosity minimal --nologo + dotnet publish Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj -r win-x64 --self-contained -c Release -o PublishWinForms -p:GenerateDocumentationFile=false -p:PublishSingleFile=true -p:PublishReadyToRun=true --verbosity minimal --nologo + dotnet publish Tools/rdmp/rdmp.csproj -r win-x64 --self-contained -c Release -o PublishWindows -p:GenerateDocumentationFile=false -p:PublishSingleFile=true -p:PublishReadyToRun=true --verbosity minimal --nologo + dotnet publish Tools/rdmp/rdmp.csproj -r linux-x64 --self-contained -c Release -o PublishLinux -p:GenerateDocumentationFile=false -p:PublishSingleFile=true -p:PublishReadyToRun=true --verbosity minimal --nologo - name: BundleSource shell: bash run: | @@ -148,7 +126,7 @@ jobs: signtool=${signtool[${#signtool[@]}-1]} signtool=`echo $signtool | sed -e 's#^/c#c:#' | tr / \\\\` echo ${{ secrets.DIGICERT_PFX }} | base64 --decode > GitHubActionsWorkflow.pfx - echo '"'$signtool'"' 'Sign /f GitHubActionsWorkflow.pfx /fd sha256 /tr http://timestamp.digicert.com /td sha256 /p ${{ secrets.DIGICERT_PASSWORD }} PublishWindows/*.exe PublishWinForms/*.exe PublishWinForms/Hunspell*.dll' | cmd + echo '"'$signtool'"' 'Sign /f GitHubActionsWorkflow.pfx /fd sha256 /tr http://timestamp.digicert.com /td sha256 /p ${{ secrets.DIGICERT_PASSWORD }} PublishWindows/*.exe PublishWinForms/*.exe' | cmd mkdir -p dist cmd /c wix\\build.cmd ${{ steps.version.outputs.rdmpversion }} echo '"'$signtool'"' 'Sign /f GitHubActionsWorkflow.pfx /fd sha256 /tr http://timestamp.digicert.com /td sha256 /p ${{ secrets.DIGICERT_PASSWORD }} dist/rdmp.msi' | cmd @@ -159,7 +137,7 @@ jobs: (cd PublishWinForms ; echo 7z a -mx=9 ../dist/rdmp-${{ steps.version.outputs.rdmpversion }}-client.zip . | cmd) - name: Install Perl dependencies - uses: shogo82148/actions-setup-perl@v1 + uses: shogo82148/actions-setup-perl@v1.23.1 with: install-modules-with: cpanm install-modules: Archive::Zip Archive::Tar @@ -200,18 +178,16 @@ jobs: rm dist/rdmp-${{ steps.version.outputs.rdmpversion }}-cli-linux-x64.tar - name: Build Nuget packages + shell: bash run: | - nuget pack Plugins/Plugin/Plugin.nuspec -Properties Configuration=Release -IncludeReferencedProjects -Symbols -Version ${{ steps.version.outputs.rdmpversion }} - nuget pack Plugins/Plugin.UI/Plugin.UI.nuspec -Properties Configuration=Release -IncludeReferencedProjects -Symbols -Version ${{ steps.version.outputs.rdmpversion }} - nuget pack Plugins/Plugin.Test/Plugin.Test.nuspec -Properties Configuration=Release -IncludeReferencedProjects -Symbols -Version ${{ steps.version.outputs.rdmpversion }} - nuget pack Application/ResearchDataManagementPlatform/RDMP.nuspec -Properties Configuration=Release -Version ${{ steps.version.outputs.rdmpversion }} + for i in Rdmp.Core/Rdmp.Core.csproj Rdmp.UI/Rdmp.UI.csproj Tests.Common/Tests.Common.csproj + do + dotnet pack $i -c Release --include-symbols --nologo -o . -v:m -p:Version=${{ steps.version.outputs.rdmpversion }} + done - - name: Push Nuget packages + - name: Upload Nuget packages if: contains(github.ref, 'refs/tags/v') - run: | - nuget push HIC.RDMP.Plugin.${{ steps.version.outputs.rdmpversion }}.nupkg -skipDuplicate -Source https://api.nuget.org/v3/index.json -ApiKey ${{ secrets.NUGET_KEY }} - nuget push HIC.RDMP.Plugin.UI.${{ steps.version.outputs.rdmpversion }}.nupkg -skipDuplicate -Source https://api.nuget.org/v3/index.json -ApiKey ${{ secrets.NUGET_KEY }} - nuget push HIC.RDMP.Plugin.Test.${{ steps.version.outputs.rdmpversion }}.nupkg -skipDuplicate -Source https://api.nuget.org/v3/index.json -ApiKey ${{ secrets.NUGET_KEY }} + run: dotnet nuget push HIC.RDMP.Plugin*${{ steps.version.outputs.rdmpversion }}.nupkg -s https://api.nuget.org/v3/index.json --skip-duplicate -k ${{ secrets.NUGET_KEY }} - name: Calculate SHA256SUMS run: '&{foreach ($i in Get-ChildItem dist -Exclude *SUMS|Get-FileHash) { echo "$($i.Hash) $(echo $i | Split-Path -Leaf)" }} > dist/SHA256SUMS' @@ -223,7 +199,7 @@ jobs: path: | dist - name: Upload binaries to release - uses: svenstaro/upload-release-action@2.3.0 + uses: svenstaro/upload-release-action@2.7.0 if: contains(github.ref, 'refs/tags/v') with: repo_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000000..ed58b6036a --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,41 @@ +name: "CodeQL" + +on: + push: + branches: [ "develop" ] + pull_request: + branches: [ "develop" ] + schedule: + - cron: "16 21 * * 6" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ csharp ] + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + queries: +security-and-quality + + - name: Build + run: dotnet build + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{ matrix.language }}" diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 9ea94fb7b1..94e1036058 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -13,11 +13,11 @@ jobs: packages: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: true - name: Setup .NET Core - uses: actions/setup-dotnet@v3.0.3 + uses: actions/setup-dotnet@v3.2.0 with: dotnet-version: 6.0.x - name: Cache Nuget diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 7db0bb4767..e149d9ac23 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive - name: Run link check diff --git a/.nuget/packages.config b/.nuget/packages.config index 4aed222aba..8588e1c7c2 100644 --- a/.nuget/packages.config +++ b/.nuget/packages.config @@ -1,4 +1,5 @@  + diff --git a/AnalysisReport.sarif b/AnalysisReport.sarif new file mode 100644 index 0000000000..9b10381330 --- /dev/null +++ b/AnalysisReport.sarif @@ -0,0 +1,500 @@ +{ + "$schema": "https://schemastore.azurewebsites.net/schemas/json/sarif-2.1.0-rtm.5.json", + "version": "2.1.0", + "runs": [ + { + "tool": { + "driver": { + "name": "Dependency Analysis", + "semanticVersion": "0.4.410601", + "informationUri": "https://docs.microsoft.com/en-us/dotnet/core/porting/upgrade-assistant-overview", + "rules": [ + { + "id": "UA106", + "name": "PackageToBeAdded", + "fullDescription": { + "text": "Packages that need to be added in order to upgrade the project to chosen TFM" + }, + "helpUri": "https://docs.microsoft.com/en-us/dotnet/core/porting/upgrade-assistant-overview" + }, + { + "id": "UA105", + "name": "PackageToBeDeleted", + "fullDescription": { + "text": "Packages that need to be deleted in order to upgrade the project to chosen TFM" + }, + "helpUri": "https://docs.microsoft.com/en-us/dotnet/core/porting/upgrade-assistant-overview" + } + ] + } + }, + "results": [ + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Reusable/ReusableLibraryCode/ReusableLibraryCode.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Tests.Common/Tests.Common.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Reusable/MapsDirectlyToDatabaseTable/MapsDirectlyToDatabaseTable.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Reusable/Tests/ReusableCodeTests/ReusableCodeTests.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.Windows.Compatibility, Version=7.0.0 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Tools/rdmp/rdmp.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA105", + "level": "note", + "message": { + "text": "Package SixLabors.ImageSharp needs to be removed as its a transitive dependency that is not required" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.Core/Rdmp.Core.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA105", + "level": "note", + "message": { + "text": "Package System.IO.FileSystem.Primitives, Version=4.3.0 needs to be deleted." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.Core/Rdmp.Core.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA105", + "level": "note", + "message": { + "text": "Package System.Threading, Version=4.3.0 needs to be deleted." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.Core/Rdmp.Core.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA105", + "level": "note", + "message": { + "text": "Package System.Globalization, Version=4.3.0 needs to be deleted." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.Core/Rdmp.Core.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.Core/Rdmp.Core.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.Core.Tests/Rdmp.Core.Tests.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA105", + "level": "note", + "message": { + "text": "Package System.Security.Permissions needs to be removed as its a transitive dependency that is not required" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.UI/Rdmp.UI.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.UI/Rdmp.UI.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.Windows.Compatibility, Version=7.0.0 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.UI/Rdmp.UI.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.UI.Tests/Rdmp.UI.Tests.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.Windows.Compatibility, Version=7.0.0 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.UI.Tests/Rdmp.UI.Tests.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Plugins/Plugin/Plugin.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Plugins/Plugin.UI/Plugin.UI.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.Windows.Compatibility, Version=7.0.0 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Plugins/Plugin.UI/Plugin.UI.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA106", + "level": "note", + "message": { + "text": "Package Microsoft.DotNet.UpgradeAssistant.Extensions.Default.Analyzers, Version=0.4.410601 needs to be added." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Plugins/Plugin.Test/Plugin.Test.csproj" + }, + "region": {} + } + } + ] + } + ], + "columnKind": "utf16CodeUnits" + }, + { + "tool": { + "driver": { + "name": "API Upgradability", + "semanticVersion": "0.4.410601", + "informationUri": "https://docs.microsoft.com/en-us/dotnet/core/porting/upgrade-assistant-overview" + } + }, + "results": [], + "columnKind": "utf16CodeUnits" + }, + { + "tool": { + "driver": { + "name": "Component Analysis", + "semanticVersion": "0.4.410601", + "informationUri": "https://docs.microsoft.com/en-us/dotnet/core/porting/upgrade-assistant-overview", + "rules": [ + { + "id": "UA209", + "name": "Microsoft.DotNet.UpgradeAssistant.Extensions.Windows.WinformsDefaultFontUpdater", + "fullDescription": { + "text": "Default Font API Alert" + } + }, + { + "id": "UA202", + "name": "Microsoft.DotNet.UpgradeAssistant.Extensions.Windows.WinformsDpiSettingUpdater", + "fullDescription": { + "text": "Winforms Source Updater" + } + } + ] + } + }, + "results": [ + { + "ruleId": "UA209", + "level": "note", + "message": { + "text": "Default font in Windows Forms has been changed from Microsoft Sans Serif to Seg Segoe UI, in order to change the default font use the API - Application.SetDefaultFont(Font font). For more details see here - https://devblogs.microsoft.com/dotnet/whats-new-in-windows-forms-in-net-6-0-preview-5/#application-wide-default-font." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA209", + "level": "note", + "message": { + "text": "Default font in Windows Forms has been changed from Microsoft Sans Serif to Seg Segoe UI, in order to change the default font use the API - Application.SetDefaultFont(Font font). For more details see here - https://devblogs.microsoft.com/dotnet/whats-new-in-windows-forms-in-net-6-0-preview-5/#application-wide-default-font." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Rdmp.UI/Rdmp.UI.csproj" + }, + "region": {} + } + } + ] + }, + { + "ruleId": "UA202", + "level": "note", + "message": { + "text": "HighDpiMode needs to set in Main() instead of app.config or app.manifest - Application.SetHighDpiMode(HighDpiMode.). It is recommended to use SystemAware as the HighDpiMode option for better results." + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "file:///C:/Users/AzureUser_JS/Documents/RDMP/Application/ResearchDataManagementPlatform/Program.cs" + }, + "region": {} + } + } + ] + } + ], + "columnKind": "utf16CodeUnits" + } + ] +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Menus/MenuItems/DataExportMenu.cs b/Application/ResearchDataManagementPlatform/Menus/MenuItems/DataExportMenu.cs index a49c0db65d..77c2334595 100644 --- a/Application/ResearchDataManagementPlatform/Menus/MenuItems/DataExportMenu.cs +++ b/Application/ResearchDataManagementPlatform/Menus/MenuItems/DataExportMenu.cs @@ -1,4 +1,4 @@ -// Copyright (c) The University of Dundee 2018-2019 +// Copyright (c) The University of Dundee 2018-2023 // This file is part of the Research Data Management Platform (RDMP). // RDMP is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. @@ -10,29 +10,27 @@ using Rdmp.UI.Menus.MenuItems; using Rdmp.UI.SimpleDialogs; -namespace ResearchDataManagementPlatform.Menus.MenuItems +namespace ResearchDataManagementPlatform.Menus.MenuItems; + +internal sealed class DataExportMenu : RDMPToolStripMenuItem { - internal class DataExportMenu : RDMPToolStripMenuItem + public DataExportMenu(IActivateItems activator) : base(activator, "Data Export Options") { - public DataExportMenu(IActivateItems activator):base(activator,"Data Export Options") - { - - Enabled = _activator.RepositoryLocator.DataExportRepository != null; + Enabled = _activator.RepositoryLocator.DataExportRepository != null; - DropDownItems.Add(new ToolStripMenuItem("Configure Disclaimer", null, ConfigureDisclaimer)); - DropDownItems.Add(new ToolStripMenuItem("Configure Hashing Algorithm", null, ConfigureHashingAlgorithm)); - } + DropDownItems.Add(new ToolStripMenuItem("Configure Disclaimer", null, ConfigureDisclaimer)); + DropDownItems.Add(new ToolStripMenuItem("Configure Hashing Algorithm", null, ConfigureHashingAlgorithm)); + } - private void ConfigureHashingAlgorithm(object sender, EventArgs e) - { - var hash = new ConfigureHashingAlgorithmUI(_activator); - hash.ShowDialog(); - } + private void ConfigureHashingAlgorithm(object sender, EventArgs e) + { + var hash = new ConfigureHashingAlgorithmUI(_activator); + hash.ShowDialog(); + } - private void ConfigureDisclaimer(object sender, EventArgs e) - { - var disclaimer = new ConfigureDisclaimerUI(_activator); - disclaimer.ShowDialog(); - } + private void ConfigureDisclaimer(object sender, EventArgs e) + { + var disclaimer = new ConfigureDisclaimerUI(_activator); + disclaimer.ShowDialog(); } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Menus/MenuItems/DisableTutorialsMenuItem.cs b/Application/ResearchDataManagementPlatform/Menus/MenuItems/DisableTutorialsMenuItem.cs index 7b773b0e31..da8491f3b8 100644 --- a/Application/ResearchDataManagementPlatform/Menus/MenuItems/DisableTutorialsMenuItem.cs +++ b/Application/ResearchDataManagementPlatform/Menus/MenuItems/DisableTutorialsMenuItem.cs @@ -6,35 +6,34 @@ using System; using System.Windows.Forms; +using Rdmp.Core.ReusableLibraryCode.Settings; using Rdmp.UI.Tutorials; -using ReusableLibraryCode.Settings; -namespace ResearchDataManagementPlatform.Menus.MenuItems +namespace ResearchDataManagementPlatform.Menus.MenuItems; + +/// +/// Disables displaying Tutorials in RDMP +/// +[System.ComponentModel.DesignerCategory("")] +public sealed class DisableTutorialsMenuItem : ToolStripMenuItem { - /// - /// Disables displaying Tutorials in RDMP - /// - [System.ComponentModel.DesignerCategory("")] - public class DisableTutorialsMenuItem : ToolStripMenuItem - { - private readonly TutorialTracker _tracker; + private readonly TutorialTracker _tracker; - public DisableTutorialsMenuItem(ToolStripMenuItem parent, TutorialTracker tracker) - { - parent.DropDownOpened += parent_DropDownOpened; - _tracker = tracker; - Text = "Disable Tutorials"; - } + public DisableTutorialsMenuItem(ToolStripMenuItem parent, TutorialTracker tracker) + { + parent.DropDownOpened += Parent_DropDownOpened; + _tracker = tracker; + Text = "Disable Tutorials"; + } - void parent_DropDownOpened(object sender, EventArgs e) - { - Checked = UserSettings.DisableTutorials; - } + private void Parent_DropDownOpened(object sender, EventArgs e) + { + Checked = UserSettings.DisableTutorials; + } - protected override void OnClick(EventArgs e) - { - base.OnClick(e); - _tracker.DisableAllTutorials(); - } + protected override void OnClick(EventArgs e) + { + base.OnClick(e); + TutorialTracker.DisableAllTutorials(); } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Menus/MenuItems/LaunchTutorialMenuItem.cs b/Application/ResearchDataManagementPlatform/Menus/MenuItems/LaunchTutorialMenuItem.cs index f4e70427cc..f82cad5cb5 100644 --- a/Application/ResearchDataManagementPlatform/Menus/MenuItems/LaunchTutorialMenuItem.cs +++ b/Application/ResearchDataManagementPlatform/Menus/MenuItems/LaunchTutorialMenuItem.cs @@ -9,47 +9,45 @@ using Rdmp.UI.ItemActivation; using Rdmp.UI.Tutorials; -namespace ResearchDataManagementPlatform.Menus.MenuItems +namespace ResearchDataManagementPlatform.Menus.MenuItems; + +/// +/// Launches the given Tutorial, Tutorials which the user has already been exposed to will be marked (Seen) +/// +[System.ComponentModel.DesignerCategory("")] +public class LaunchTutorialMenuItem : ToolStripMenuItem { - /// - /// Launches the given Tutorial, Tutorials which the user has already been exposed to will be marked (Seen) - /// - [System.ComponentModel.DesignerCategory("")] - public class LaunchTutorialMenuItem : ToolStripMenuItem + private readonly Tutorial _tutorial; + private readonly TutorialTracker _tracker; + + public LaunchTutorialMenuItem(ToolStripMenuItem parent, IActivateItems activator, Tutorial tutorial, + TutorialTracker tracker) + { + parent.DropDownOpening += parent_DropDownOpening; + _tutorial = tutorial; + _tracker = tracker; + + UpdateText(); + } + + private void parent_DropDownOpening(object sender, EventArgs e) + { + UpdateText(); + } + + private void UpdateText() { - private IActivateItems _activator; - private readonly Tutorial _tutorial; - private readonly TutorialTracker _tracker; - - public LaunchTutorialMenuItem(ToolStripMenuItem parent,IActivateItems activator, Tutorial tutorial, TutorialTracker tracker) - { - parent.DropDownOpening += parent_DropDownOpening; - _activator = activator; - _tutorial = tutorial; - _tracker = tracker; - - UpdateText(); - } - - void parent_DropDownOpening(object sender, EventArgs e) - { - UpdateText(); - } - - private void UpdateText() - { - Text = _tutorial.Name; - - if (_tracker.HasSeen(_tutorial)) - Text += " (Seen)"; - } - - protected override void OnClick(EventArgs e) - { - base.OnClick(e); - - _tracker.ClearCompleted(_tutorial); - _tracker.LaunchTutorial(_tutorial); - } + Text = _tutorial.Name; + + if (TutorialTracker.HasSeen(_tutorial)) + Text += " (Seen)"; + } + + protected override void OnClick(EventArgs e) + { + base.OnClick(e); + + TutorialTracker.ClearCompleted(_tutorial); + TutorialTracker.LaunchTutorial(_tutorial); } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Menus/MenuItems/ResetTutorialsMenuItem.cs b/Application/ResearchDataManagementPlatform/Menus/MenuItems/ResetTutorialsMenuItem.cs index 9f4503a800..2ccaf29edc 100644 --- a/Application/ResearchDataManagementPlatform/Menus/MenuItems/ResetTutorialsMenuItem.cs +++ b/Application/ResearchDataManagementPlatform/Menus/MenuItems/ResetTutorialsMenuItem.cs @@ -8,35 +8,32 @@ using System.Windows.Forms; using Rdmp.UI.Tutorials; -namespace ResearchDataManagementPlatform.Menus.MenuItems -{ - /// - /// Clears all user progress on Tutorials - /// - [System.ComponentModel.DesignerCategory("")] - public class ResetTutorialsMenuItem : ToolStripMenuItem - { - private readonly TutorialTracker _tracker; - - public ResetTutorialsMenuItem(ToolStripMenuItem parent, TutorialTracker tracker) - { - _tracker = tracker; - Text = "Reset Tutorials"; - parent.DropDownOpening += parent_DropDownOpening; - } +namespace ResearchDataManagementPlatform.Menus.MenuItems; - protected override void OnClick(EventArgs e) - { - base.OnClick(e); +/// +/// Clears all user progress on Tutorials +/// +[System.ComponentModel.DesignerCategory("")] +public sealed class ResetTutorialsMenuItem : ToolStripMenuItem +{ + private readonly TutorialTracker _tracker; - _tracker.ClearCompleted(); - } + public ResetTutorialsMenuItem(ToolStripMenuItem parent, TutorialTracker tracker) + { + _tracker = tracker; + Text = "Reset Tutorials"; + parent.DropDownOpening += parent_DropDownOpening; + } - void parent_DropDownOpening(object sender, EventArgs e) - { - Enabled = _tracker.IsClearable(); - } + protected override void OnClick(EventArgs e) + { + base.OnClick(e); + _tracker.ClearCompleted(); + } + private void parent_DropDownOpening(object sender, EventArgs e) + { + Enabled = _tracker.IsClearable(); } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Menus/RDMPTopMenuStripUI.cs b/Application/ResearchDataManagementPlatform/Menus/RDMPTopMenuStripUI.cs index a81299b3ce..34b5eac8ca 100644 --- a/Application/ResearchDataManagementPlatform/Menus/RDMPTopMenuStripUI.cs +++ b/Application/ResearchDataManagementPlatform/Menus/RDMPTopMenuStripUI.cs @@ -10,7 +10,6 @@ using System.Linq; using System.Windows.Forms; using AutoUpdaterDotNET; -using MapsDirectlyToDatabaseTable; using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.CommandExecution.AtomicCommands.CatalogueCreationCommands; @@ -19,9 +18,11 @@ using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.Databases; using Rdmp.Core.DataQualityEngine; -using Rdmp.Core.DataViewing; using Rdmp.Core.Logging; using Rdmp.Core.Reports; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Settings; using Rdmp.UI; using Rdmp.UI.ChecksUI; using Rdmp.UI.CommandExecution.AtomicCommands; @@ -34,7 +35,6 @@ using Rdmp.UI.SimpleControls; using Rdmp.UI.SimpleDialogs; using Rdmp.UI.SimpleDialogs.NavigateTo; -using Rdmp.UI.SingleControlForms; using Rdmp.UI.TestsAndSetup; using Rdmp.UI.TestsAndSetup.ServicePropogation; using Rdmp.UI.Tutorials; @@ -42,554 +42,543 @@ using ResearchDataManagementPlatform.WindowManagement; using ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; using ResearchDataManagementPlatform.WindowManagement.Licenses; -using ReusableLibraryCode; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Settings; using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.Menus +namespace ResearchDataManagementPlatform.Menus; + +/// +/// The Top menu of the RDMP lets you do most tasks that do not relate directly to a single object (most single object tasks are accessed by right clicking the object). +/// +/// Locations: +/// - Change which DataCatalogue database you are pointed at (not usually needed unless you have two different databases e.g. a Test database and a Live database) +/// - Setup Logging / Anonymisation / Query Caching / Data Quality Engine databases +/// - Configure a Ticketing system e.g. Jira for tracking time against tickets (you can set a ticket identifier for datasets, project extractions etc) +/// - Perform bulk renaming operations across your entire catalogue database (useful for when someone remaps your server drives to a new letter! e.g. 'D:\Datasets\Private\' becomes 'E:\') +/// - Refresh the window by reloading all Catalogues/TableInfos etc +/// +/// View: +/// - View/Edit dataset loading logic +/// - View/Edit the governance approvals your datasets have (including attachments, period covered, datasets included in approval etc) +/// - View the Logging database contents (a relational view of all activities undertaken by all Data Analysts using the RDMP - loading, extractions, dqe runs etc). +/// +/// Reports +/// - Generate a variety of reports that summarise the state of your datasets / governance etc +/// +/// Help +/// - View the user manual +/// - View a technical description of each of the core objects maintained by RDMP (Catalogues, TableInfos etc) and what they mean (intended for programmers) +/// +public partial class RDMPTopMenuStripUI : RDMPUserControl { + private WindowManager _windowManager; - /// - /// The Top menu of the RDMP lets you do most tasks that do not relate directly to a single object (most single object tasks are accessed by right clicking the object). - /// - /// Locations: - /// - Change which DataCatalogue database you are pointed at (not usually needed unless you have two different databases e.g. a Test database and a Live database) - /// - Setup Logging / Anonymisation / Query Caching / Data Quality Engine databases - /// - Configure a Ticketing system e.g. Jira for tracking time against tickets (you can set a ticket identifier for datasets, project extractions etc) - /// - Perform bulk renaming operations across your entire catalogue database (useful for when someone remaps your server drives to a new letter! e.g. 'D:\Datasets\Private\' becomes 'E:\') - /// - Refresh the window by reloading all Catalogues/TableInfos etc - /// - /// View: - /// - View/Edit dataset loading logic - /// - View/Edit the governance approvals your datasets have (including attachments, period covered, datasets included in approval etc) - /// - View the Logging database contents (a relational view of all activities undertaken by all Data Analysts using the RDMP - loading, extractions, dqe runs etc). - /// - /// Reports - /// - Generate a variety of reports that summarise the state of your datasets / governance etc - /// - /// Help - /// - View the user manual - /// - View a technical description of each of the core objects maintained by RDMP (Catalogues, TableInfos etc) and what they mean (intended for programmers) - /// + private SaveMenuItem _saveToolStripMenuItem; + private AtomicCommandUIFactory _atomicCommandUIFactory; - public partial class RDMPTopMenuStripUI : RDMPUserControl + public RDMPTopMenuStripUI() { - private WindowManager _windowManager; - - private SaveMenuItem _saveToolStripMenuItem; - private AtomicCommandUIFactory _atomicCommandUIFactory; + InitializeComponent(); + } - public RDMPTopMenuStripUI() - { - InitializeComponent(); - } + private void BuildSwitchInstanceMenuItems() + { + var args = RDMPBootStrapper.ApplicationArguments; - private void BuildSwitchInstanceMenuItems() - { - var args = RDMPBootStrapper.ApplicationArguments; + // somehow app was launched without populating the load args + if (args == null) return; - // somehow app was launched without populating the load args - if (args == null) - { - return; - } + var origYamlFile = args.ConnectionStringsFileLoaded; - var origYamlFile = args.ConnectionStringsFileLoaded; + //default settings were used if no yaml file was specified or the file specified did not exist + var defaultsUsed = origYamlFile == null; - //default settings were used if no yaml file was specified or the file specified did not exist - var defaultsUsed = origYamlFile == null; + // if defaults were not used then it is valid to switch to them + switchToDefaultSettings.Enabled = !defaultsUsed; - // if defaults were not used then it is valid to switch to them - switchToDefaultSettings.Enabled = !defaultsUsed; + switchToDefaultSettings.Checked = defaultsUsed; + launchNewWithDefaultSettings.Checked = defaultsUsed; - switchToDefaultSettings.Checked = defaultsUsed; - launchNewWithDefaultSettings.Checked = defaultsUsed; + // load the yaml files in the RDMP binary directory + var exeDir = UsefulStuff.GetExecutableDirectory(); + AddMenuItemsForSwitchingToInstancesInYamlFilesOf(origYamlFile, exeDir); - // load the yaml files in the RDMP binary directory - var exeDir = UsefulStuff.GetExecutableDirectory(); - AddMenuItemsForSwitchingToInstancesInYamlFilesOf(origYamlFile, exeDir); + // also add yaml files from wherever they got their original yaml file + if (origYamlFile?.FileLoaded != null && !exeDir.FullName.Equals(origYamlFile.FileLoaded.Directory?.FullName)) + AddMenuItemsForSwitchingToInstancesInYamlFilesOf(origYamlFile, origYamlFile.FileLoaded.Directory); + } - // also add yaml files from wherever they got their original yaml file - if (origYamlFile?.FileLoaded != null && !exeDir.FullName.Equals(origYamlFile.FileLoaded.Directory.FullName)) - { - AddMenuItemsForSwitchingToInstancesInYamlFilesOf(origYamlFile, origYamlFile.FileLoaded.Directory); - } + private void AddMenuItemsForSwitchingToInstancesInYamlFilesOf(ConnectionStringsYamlFile origYamlFile, + DirectoryInfo dir) + { + foreach (var yaml in dir.GetFiles("*.yaml")) + { + // if the yaml file is invalid bail out early + if (!ConnectionStringsYamlFile.TryLoadFrom(yaml, out var connectionStrings)) + continue; - } + var isSameAsCurrent = origYamlFile?.FileLoaded != null && + yaml.FullName.Equals(origYamlFile.FileLoaded.FullName); - private void AddMenuItemsForSwitchingToInstancesInYamlFilesOf(ConnectionStringsYamlFile origYamlFile, DirectoryInfo dir) - { - foreach (var yaml in dir.GetFiles("*.yaml")) + var launchNew = new ToolStripMenuItem(connectionStrings.Name ?? yaml.Name, null, + (_, _) => { LaunchNew(connectionStrings); }) { - // if the yaml file is invalid bail out early - if (!ConnectionStringsYamlFile.TryLoadFrom(yaml, out var connectionStrings)) - continue; + Checked = isSameAsCurrent, + ToolTipText = connectionStrings.Description ?? yaml.FullName + }; - bool isSameAsCurrent = origYamlFile?.FileLoaded == null ? false : yaml.FullName.Equals(origYamlFile.FileLoaded.FullName); + var switchTo = new ToolStripMenuItem(connectionStrings.Name ?? yaml.Name, null, + (_, _) => { SwitchTo(connectionStrings); }) + { + Enabled = !isSameAsCurrent, + Checked = isSameAsCurrent, + ToolTipText = connectionStrings.Description ?? yaml.FullName + }; - var launchNew = new ToolStripMenuItem(connectionStrings.Name ?? yaml.Name, null, (s, e) => { LaunchNew(connectionStrings); }) - { - Checked = isSameAsCurrent, - ToolTipText = connectionStrings.Description ?? yaml.FullName - }; + launchAnotherInstanceToolStripMenuItem.DropDownItems.Add(launchNew); + switchToInstanceToolStripMenuItem.DropDownItems.Add(switchTo); + } + } - var switchTo = new ToolStripMenuItem(connectionStrings.Name ?? yaml.Name, null, (s, e) => { SwitchTo(connectionStrings); }) - { - Enabled = !isSameAsCurrent, - Checked = isSameAsCurrent, - ToolTipText = connectionStrings.Description ?? yaml.FullName - }; + private static void SwitchTo(ConnectionStringsYamlFile yaml) + { + LaunchNew(yaml); - launchAnotherInstanceToolStripMenuItem.DropDownItems.Add(launchNew); - switchToInstanceToolStripMenuItem.DropDownItems.Add(switchTo); + Application.Exit(); + } - } - } + private static void LaunchNew(ConnectionStringsYamlFile yaml) + { + var exeName = Path.Combine(UsefulStuff.GetExecutableDirectory().FullName, + Process.GetCurrentProcess().ProcessName); + if (yaml == null) + Process.Start(exeName); + else + Process.Start(exeName, + $"--{nameof(RDMPCommandLineOptions.ConnectionStringsFile)} \"{yaml.FileLoaded.FullName}\""); + } - private void SwitchTo(ConnectionStringsYamlFile yaml) - { - LaunchNew(yaml); + private void configureExternalServersToolStripMenuItem_Click(object sender, EventArgs e) + { + new ExecuteCommandConfigureDefaultServers(Activator).Execute(); + } - Application.Exit(); - } + private void setTicketingSystemToolStripMenuItem_Click(object sender, EventArgs e) + { + var ui = new TicketingSystemConfigurationUI(); + Activator.ShowWindow(ui, true); + } - private void LaunchNew(ConnectionStringsYamlFile yaml) - { - var exeName = Path.Combine(UsefulStuff.GetExecutableDirectory().FullName, Process.GetCurrentProcess().ProcessName); - if(yaml == null) - { - Process.Start(exeName); - } - else - { - Process.Start(exeName, $"--{nameof(RDMPCommandLineOptions.ConnectionStringsFile)} \"{yaml.FileLoaded.FullName}\""); - } - } - private void configureExternalServersToolStripMenuItem_Click(object sender, EventArgs e) - { - new ExecuteCommandConfigureDefaultServers(Activator).Execute(); - } + private void governanceReportToolStripMenuItem_Click(object sender, EventArgs e) + { + var generator = new GovernanceReport(new DatasetTimespanCalculator(), + Activator.RepositoryLocator.CatalogueRepository); + generator.GenerateReport(); + } - private void setTicketingSystemToolStripMenuItem_Click(object sender, EventArgs e) - { - TicketingSystemConfigurationUI ui = new TicketingSystemConfigurationUI(); - Activator.ShowWindow(ui, true); - } + private void logViewerToolStripMenuItem_Click(object sender, EventArgs e) + { + var cmd = new ExecuteCommandViewLogs(Activator, new LogViewerFilter(LoggingTables.DataLoadTask)); + cmd.Execute(); + } - private void governanceReportToolStripMenuItem_Click(object sender, EventArgs e) - { - var generator = new GovernanceReport(new DatasetTimespanCalculator(), Activator.RepositoryLocator.CatalogueRepository); - generator.GenerateReport(); - } - private void logViewerToolStripMenuItem_Click(object sender, EventArgs e) - { - var cmd = new ExecuteCommandViewLogs(Activator, new LogViewerFilter(LoggingTables.DataLoadTask)); - cmd.Execute(); - } - + private void metadataReportToolStripMenuItem_Click(object sender, EventArgs e) + { + var cmd = new ExecuteCommandGenerateMetadataReport(Activator); + cmd.Execute(); + } - private void metadataReportToolStripMenuItem_Click(object sender, EventArgs e) - { - var cmd = new ExecuteCommandGenerateMetadataReport(Activator); - cmd.Execute(); - } + private void dITAExtractionToolStripMenuItem_Click(object sender, EventArgs e) + { + var f = new Form + { + Text = "DITA Extraction of Catalogue Metadata" + }; + var d = new DitaExtractorUI(); + d.SetItemActivator(Activator); + f.Width = d.Width + 10; + f.Height = d.Height + 50; + f.Controls.Add(d); + f.Show(); + } - private void dITAExtractionToolStripMenuItem_Click(object sender, EventArgs e) - { - Form f = new Form(); - f.Text = "DITA Extraction of Catalogue Metadata"; - DitaExtractorUI d = new DitaExtractorUI(); - d.SetItemActivator(Activator); - f.Width = d.Width + 10; - f.Height = d.Height + 50; - f.Controls.Add(d); - f.Show(); - } + private void generateTestDataToolStripMenuItem_Click(object sender, EventArgs e) + { + new ExecuteCommandGenerateTestDataUI(Activator).Execute(); + } - private void generateTestDataToolStripMenuItem_Click(object sender, EventArgs e) - { - new ExecuteCommandGenerateTestDataUI(Activator).Execute(); - } - - private void showPerformanceCounterToolStripMenuItem_Click(object sender, EventArgs e) - { - new PerformanceCounterUI().Show(); - } + private void showPerformanceCounterToolStripMenuItem_Click(object sender, EventArgs e) + { + new PerformanceCounterUI().Show(); + } - private void openExeDirectoryToolStripMenuItem_Click(object sender, EventArgs e) + private void openExeDirectoryToolStripMenuItem_Click(object sender, EventArgs e) + { + try { - - try - { - UsefulStuff.GetInstance().ShowFolderInWindowsExplorer(UsefulStuff.GetExecutableDirectory()); - } - catch (Exception exception) - { - ExceptionViewer.Show(exception); - } + UsefulStuff.ShowPathInWindowsExplorer(UsefulStuff.GetExecutableDirectory()); } - - private void userManualToolStripMenuItem_Click(object sender, EventArgs e) + catch (Exception exception) { - try - { - UsefulStuff.OpenUrl("https://github.com/HicServices/RDMP#research-data-management-platform"); - } - catch (Exception exception) - { - ExceptionViewer.Show(exception); - } + ExceptionViewer.Show(exception); } + } - private void generateClassTableSummaryToolStripMenuItem_Click(object sender, EventArgs e) + private void userManualToolStripMenuItem_Click(object sender, EventArgs e) + { + try { - var report = new DocumentationReportDatabaseEntities(); - report.GenerateReport(Activator.RepositoryLocator.CatalogueRepository.CommentStore, - new PopupChecksUI("Generating class summaries", false), - Activator.CoreIconProvider, - Activator.RepositoryLocator.CatalogueRepository.MEF, - true); + UsefulStuff.OpenUrl("https://github.com/HicServices/RDMP#research-data-management-platform"); } - - private void showHelpToolStripMenuItem_Click(object sender, EventArgs e) + catch (Exception exception) { - if(_windowManager.Navigation.Current is RDMPSingleControlTab t) - t.ShowHelp(Activator); + ExceptionViewer.Show(exception); } + } - public void SetWindowManager(WindowManager windowManager) - { - SetItemActivator(windowManager.ActivateItems); + private void generateClassTableSummaryToolStripMenuItem_Click(object sender, EventArgs e) + { + var report = new DocumentationReportDatabaseEntities(); + report.GenerateReport(Activator.RepositoryLocator.CatalogueRepository.CommentStore, + new PopupChecksUI("Generating class summaries", false), + Activator.CoreIconProvider, + true); + } - _windowManager = windowManager; - _atomicCommandUIFactory = new AtomicCommandUIFactory(Activator); - + private void showHelpToolStripMenuItem_Click(object sender, EventArgs e) + { + if (_windowManager.Navigation.Current is RDMPSingleControlTab t) + t.ShowHelp(Activator); + } - //top menu strip setup / adjustment - LocationsMenu.DropDownItems.Add(new DataExportMenu(Activator)); - _saveToolStripMenuItem = new SaveMenuItem - { - Enabled = false, - Name = "saveToolStripMenuItem", - Size = new System.Drawing.Size(214, 22) - }; - fileToolStripMenuItem.DropDownItems.Insert(3,_saveToolStripMenuItem); + public void SetWindowManager(WindowManager windowManager) + { + SetItemActivator(windowManager.ActivateItems); - _windowManager.TabChanged += WindowFactory_TabChanged; - _windowManager.Navigation.Changed += (s,e)=>UpdateForwardBackEnabled(); + _windowManager = windowManager; + _atomicCommandUIFactory = new AtomicCommandUIFactory(Activator); - var tracker = new TutorialTracker(Activator); - foreach (Tutorial t in tracker.TutorialsAvailable) - tutorialsToolStripMenuItem.DropDownItems.Add(new LaunchTutorialMenuItem(tutorialsToolStripMenuItem, Activator, t, tracker)); - tutorialsToolStripMenuItem.DropDownItems.Add(new ToolStripSeparator()); + //top menu strip setup / adjustment + LocationsMenu.DropDownItems.Add(new DataExportMenu(Activator)); + _saveToolStripMenuItem = new SaveMenuItem + { + Enabled = false, + Name = "saveToolStripMenuItem", + Size = new System.Drawing.Size(214, 22) + }; + fileToolStripMenuItem.DropDownItems.Insert(3, _saveToolStripMenuItem); - tutorialsToolStripMenuItem.DropDownItems.Add(new DisableTutorialsMenuItem(tutorialsToolStripMenuItem, tracker)); - tutorialsToolStripMenuItem.DropDownItems.Add(new ResetTutorialsMenuItem(tutorialsToolStripMenuItem, tracker)); + _windowManager.TabChanged += WindowFactory_TabChanged; + _windowManager.Navigation.Changed += (s, e) => UpdateForwardBackEnabled(); - closeToolStripMenuItem.Enabled = false; + var tracker = new TutorialTracker(Activator); + foreach (var t in tracker.TutorialsAvailable) + tutorialsToolStripMenuItem.DropDownItems.Add(new LaunchTutorialMenuItem(tutorialsToolStripMenuItem, + Activator, t, tracker)); - rdmpTaskBar1.SetWindowManager(_windowManager); + tutorialsToolStripMenuItem.DropDownItems.Add(new ToolStripSeparator()); - // Location menu - instancesToolStripMenuItem.DropDownItems.Add(_atomicCommandUIFactory.CreateMenuItem( - new ExecuteCommandChoosePlatformDatabase(Activator.RepositoryLocator) { OverrideCommandName = "Change Default Instance" })); + tutorialsToolStripMenuItem.DropDownItems.Add(new DisableTutorialsMenuItem(tutorialsToolStripMenuItem, tracker)); + tutorialsToolStripMenuItem.DropDownItems.Add(new ResetTutorialsMenuItem(tutorialsToolStripMenuItem, tracker)); - Activator.Theme.ApplyTo(menuStrip1); + closeToolStripMenuItem.Enabled = false; - try - { - BuildSwitchInstanceMenuItems(); - } - catch (Exception ex) - { - Activator.GlobalErrorCheckNotifier.OnCheckPerformed( - new CheckEventArgs("Failed to BuildSwitchInstanceMenuItems", CheckResult.Fail, ex)); - } - - launchAnotherInstanceToolStripMenuItem.ToolTipText = "Start another copy of the RDMP process targetting the same (or another) RDMP platform database"; + rdmpTaskBar1.SetWindowManager(_windowManager); - if(switchToInstanceToolStripMenuItem.DropDownItems.Count > 1) - { - switchToInstanceToolStripMenuItem.Enabled = true; - switchToInstanceToolStripMenuItem.ToolTipText = "Close the application and start another copy of the RDMP process targetting another RDMP platform database"; - } - else - { - switchToInstanceToolStripMenuItem.Enabled = false; - switchToInstanceToolStripMenuItem.ToolTipText = "There are no other RDMP platform databases configured, create a .yaml file with connection strings to enable this feature"; - } - } + // Location menu + instancesToolStripMenuItem.DropDownItems.Add(_atomicCommandUIFactory.CreateMenuItem( + new ExecuteCommandChoosePlatformDatabase(Activator.RepositoryLocator) + { OverrideCommandName = "Change Default Instance" })); + Activator.Theme.ApplyTo(menuStrip1); - private IAtomicCommand[] GetNewCommands() + try { - //Catalogue commands - return new IAtomicCommand[] - { - new ExecuteCommandCreateNewCatalogueByImportingFileUI(Activator), - new ExecuteCommandCreateNewCatalogueByImportingExistingDataTable(Activator), - new ExecuteCommandImportTableInfo(Activator,null,false), - new ExecuteCommandCreateNewCohortIdentificationConfiguration(Activator), - new ExecuteCommandCreateNewLoadMetadata(Activator), - new ExecuteCommandCreateNewStandardRegex(Activator), - new ExecuteCommandCreateNewCohortDatabaseUsingWizard(Activator), - new ExecuteCommandCreateNewCohortByExecutingACohortIdentificationConfiguration(Activator,null), - new ExecuteCommandCreateNewCohortFromFile(Activator,null), - new ExecuteCommandCreateNewCohortFromCatalogue(Activator), - new ExecuteCommandCreateNewCohortFromTable(Activator,null), - new ExecuteCommandCreateNewExtractableDataSetPackage(Activator), - new ExecuteCommandCreateNewDataExtractionProject(Activator), - new ExecuteCommandRelease(Activator) { OverrideCommandName = "New Release..." }, - new ExecuteCommandCreateANOVersion(Activator) - }; + BuildSwitchInstanceMenuItems(); } - - void WindowFactory_TabChanged(object sender, IDockContent newTab) + catch (Exception ex) { - closeToolStripMenuItem.Enabled = newTab != null && !(newTab is PersistableToolboxDockContent); - showHelpToolStripMenuItem.Enabled = newTab is RDMPSingleControlTab; - - var singleObjectControlTab = newTab as RDMPSingleControlTab; - if (singleObjectControlTab == null) - { - _saveToolStripMenuItem.Saveable = null; - return; - } + Activator.GlobalErrorCheckNotifier.OnCheckPerformed( + new CheckEventArgs("Failed to BuildSwitchInstanceMenuItems", CheckResult.Fail, ex)); + } - var saveable = singleObjectControlTab.Control as ISaveableUI; - var singleObject = singleObjectControlTab.Control as IRDMPSingleDatabaseObjectControl; + launchAnotherInstanceToolStripMenuItem.ToolTipText = + "Start another copy of the RDMP process targetting the same (or another) RDMP platform database"; - //if user wants to emphasise on tab change and there's an object we can emphasise associated with the control - if (singleObject != null && UserSettings.EmphasiseOnTabChanged && singleObject.DatabaseObject != null) - { - bool? isCicChild = Activator.CoreChildProvider.GetDescendancyListIfAnyFor(singleObject.DatabaseObject)?.Parents?.Any(p=>p is CohortIdentificationConfiguration); - - //don't emphasise things that live under cics because it doesn't result in a collection being opened but instead opens the cic Tab (which could result in you being unable to get to your original tab!) - if(isCicChild == false) - { - _windowManager.Navigation.Suspend(); - Activator.RequestItemEmphasis(this, new EmphasiseRequest(singleObject.DatabaseObject)); - _windowManager.Navigation.Resume(); - } - - } - - - _saveToolStripMenuItem.Saveable = saveable; - } - - /// - /// Updates the enabled status (greyed out) of the Forward/Back menu items (includes the use of keyobard shortcuts) - /// - private void UpdateForwardBackEnabled() + if (switchToInstanceToolStripMenuItem.DropDownItems.Count > 1) { - navigateBackwardToolStripMenuItem.Enabled = _windowManager.Navigation.CanBack(); - navigateForwardToolStripMenuItem.Enabled = _windowManager.Navigation.CanForward(); + switchToInstanceToolStripMenuItem.Enabled = true; + switchToInstanceToolStripMenuItem.ToolTipText = + "Close the application and start another copy of the RDMP process targetting another RDMP platform database"; } - - - private void codeGenerationToolStripMenuItem_Click(object sender, EventArgs e) + else { - var ui = new GenerateClassCodeFromTableUI(); - ui.Show(); + switchToInstanceToolStripMenuItem.Enabled = false; + switchToInstanceToolStripMenuItem.ToolTipText = + "There are no other RDMP platform databases configured, create a .yaml file with connection strings to enable this feature"; } + } - private void runToolStripMenuItem_Click(object sender, EventArgs e) - { - var dialog = new RunUI(_windowManager.ActivateItems); - dialog.Show(); - } - private void userSettingsToolStripMenuItem_Click(object sender, EventArgs e) - { - var settings = new UserSettingsFileUI(Activator); - settings.Show(); - } + private IAtomicCommand[] GetNewCommands() + { + //Catalogue commands + return new IAtomicCommand[] + { + new ExecuteCommandCreateNewCatalogueByImportingFileUI(Activator), + new ExecuteCommandCreateNewCatalogueByImportingExistingDataTable(Activator), + new ExecuteCommandImportTableInfo(Activator, null, false), + new ExecuteCommandCreateNewCohortIdentificationConfiguration(Activator), + new ExecuteCommandCreateNewLoadMetadata(Activator), + new ExecuteCommandCreateNewStandardRegex(Activator), + new ExecuteCommandCreateNewCohortDatabaseUsingWizard(Activator), + new ExecuteCommandCreateNewCohortByExecutingACohortIdentificationConfiguration(Activator, null), + new ExecuteCommandCreateNewCohortFromFile(Activator, null), + new ExecuteCommandCreateNewCohortFromCatalogue(Activator), + new ExecuteCommandCreateNewCohortFromTable(Activator, null), + new ExecuteCommandCreateNewExtractableDataSetPackage(Activator), + new ExecuteCommandCreateNewDataExtractionProject(Activator), + new ExecuteCommandRelease(Activator) { OverrideCommandName = "New Release..." }, + new ExecuteCommandCreateANOVersion(Activator) + }; + } - private void licenseToolStripMenuItem_Click(object sender, EventArgs e) - { - var l = new LicenseUI(); - l.ShowDialog(); - } + private void WindowFactory_TabChanged(object sender, IDockContent newTab) + { + closeToolStripMenuItem.Enabled = newTab is not null and not PersistableToolboxDockContent; + showHelpToolStripMenuItem.Enabled = newTab is RDMPSingleControlTab; - public void InjectButton(ToolStripButton button) + if (newTab is not RDMPSingleControlTab singleObjectControlTab) { - rdmpTaskBar1.InjectButton(button); + _saveToolStripMenuItem.Saveable = null; + return; } - private void openToolStripMenuItem_Click(object sender, EventArgs e) + var saveable = singleObjectControlTab.Control as ISaveableUI; + + //if user wants to emphasise on tab change and there's an object we can emphasise associated with the control + if (singleObjectControlTab.Control is IRDMPSingleDatabaseObjectControl singleObject && + UserSettings.EmphasiseOnTabChanged && singleObject.DatabaseObject != null) { - Activator.SelectAnythingThen(new DialogArgs + var isCicChild = Activator.CoreChildProvider.GetDescendancyListIfAnyFor(singleObject.DatabaseObject) + ?.Parents?.Any(p => p is CohortIdentificationConfiguration); + + //don't emphasise things that live under cics because it doesn't result in a collection being opened but instead opens the cic Tab (which could result in you being unable to get to your original tab!) + if (isCicChild == false) { - WindowTitle = "Open" - }, (o) => Activator.WindowArranger.SetupEditAnything(this, o)); + _windowManager.Navigation.Suspend(); + Activator.RequestItemEmphasis(this, new EmphasiseRequest(singleObject.DatabaseObject)); + _windowManager.Navigation.Resume(); + } } - private void findToolStripMenuItem_Click(object sender, EventArgs e) - { - Activator.SelectAnythingThen(new DialogArgs - { - WindowTitle = "Find", - InitialSearchTextGuid = new Guid("00a0733b-848f-4bf3-bcde-7028fe159050"), - IsFind = true, - TaskDescription = "Enter the name of an object or part of the name or the dataset/project it is in." - }, (o) => Activator.RequestItemEmphasis(this, new EmphasiseRequest(o))); - } + _saveToolStripMenuItem.Saveable = saveable; + } - private void closeToolStripMenuItem_Click(object sender, EventArgs e) - { - _windowManager.CloseCurrentTab(); - } + /// + /// Updates the enabled status (greyed out) of the Forward/Back menu items (includes the use of keyobard shortcuts) + /// + private void UpdateForwardBackEnabled() + { + navigateBackwardToolStripMenuItem.Enabled = _windowManager.Navigation.CanBack(); + navigateForwardToolStripMenuItem.Enabled = _windowManager.Navigation.CanForward(); + } - private void findAndReplaceToolStripMenuItem_Click(object sender, EventArgs e) - { - Activator.ShowWindow(new FindAndReplaceUI(Activator),true); - } - private void navigateBackwardToolStripMenuItem_Click(object sender, EventArgs e) - { - _windowManager.Navigation.Back(true); - } + private void codeGenerationToolStripMenuItem_Click(object sender, EventArgs e) + { + var ui = new GenerateClassCodeFromTableUI(); + ui.Show(); + } + + private void runToolStripMenuItem_Click(object sender, EventArgs e) + { + var dialog = new RunUI(_windowManager.ActivateItems); + dialog.Show(); + } + + private void userSettingsToolStripMenuItem_Click(object sender, EventArgs e) + { + var settings = new UserSettingsFileUI(Activator); + settings.Show(); + } - private void navigateForwardToolStripMenuItem_Click(object sender, EventArgs e) + private void licenseToolStripMenuItem_Click(object sender, EventArgs e) + { + var l = new LicenseUI(); + l.ShowDialog(); + } + + public void InjectButton(ToolStripButton button) + { + rdmpTaskBar1.InjectButton(button); + } + + private void openToolStripMenuItem_Click(object sender, EventArgs e) + { + Activator.SelectAnythingThen(new DialogArgs { - _windowManager.Navigation.Forward(true); - } + WindowTitle = "Open" + }, o => Activator.WindowArranger.SetupEditAnything(this, o)); + } - private void checkForUpdatesToolStripMenuItem_Click(object sender, EventArgs e) + private void findToolStripMenuItem_Click(object sender, EventArgs e) + { + Activator.SelectAnythingThen(new DialogArgs { - // AutoUpdater.NET is Windows-only for now: - if (!OperatingSystem.IsWindowsVersionAtLeast(7)) - return; + WindowTitle = "Find", + InitialSearchTextGuid = new Guid("00a0733b-848f-4bf3-bcde-7028fe159050"), + IsFind = true, + TaskDescription = "Enter the name of an object or part of the name or the dataset/project it is in." + }, o => Activator.RequestItemEmphasis(this, new EmphasiseRequest(o))); + } - var url = "https://raw.githubusercontent.com/HicServices/RDMP/main/rdmp-client.xml"; + private void closeToolStripMenuItem_Click(object sender, EventArgs e) + { + _windowManager.CloseCurrentTab(); + } - // Give user a chance to change the URL that is updating from - if(!Activator.TypeText("Update Location","Url:",int.MaxValue,url,out url,false)) - { - return; - } + private void findAndReplaceToolStripMenuItem_Click(object sender, EventArgs e) + { + Activator.ShowWindow(new FindAndReplaceUI(Activator), true); + } - try - { - AutoUpdater.ReportErrors = true; - AutoUpdater.Start(url); - } - catch (Exception ex) - { - Activator.ShowException($"Failed to update from {url}", ex); - } - } + private void navigateBackwardToolStripMenuItem_Click(object sender, EventArgs e) + { + _windowManager.Navigation.Back(true); + } - private void ListAllTypesToolStripMenuItem_Click(object sender, EventArgs e) - { - var file = new FileInfo(Path.GetTempFileName()); - File.WriteAllText(file.FullName,string.Join(Environment.NewLine,Activator.RepositoryLocator.CatalogueRepository.MEF.GetAllTypes().Select(t=>t.FullName))); - UsefulStuff.GetInstance().ShowFileInWindowsExplorer(file); - } + private void navigateForwardToolStripMenuItem_Click(object sender, EventArgs e) + { + _windowManager.Navigation.Forward(true); + } - private void NewToolStripMenuItem_Click(object sender, EventArgs e) - { - var dlg = new SelectDialog(new DialogArgs - { - WindowTitle = "Create New", - TaskDescription = "What do you want to create?" - },Activator,GetNewCommands(),false); + private void checkForUpdatesToolStripMenuItem_Click(object sender, EventArgs e) + { + // AutoUpdater.NET is Windows-only for now: + if (!OperatingSystem.IsWindowsVersionAtLeast(7)) + return; - if (dlg.ShowDialog() == DialogResult.OK) - { - var picked = dlg.Selected; - picked.Execute(); - } - } + var url = "https://raw.githubusercontent.com/HicServices/RDMP/main/rdmp-client.xml"; + + // Give user a chance to change the URL that is updating from + if (!Activator.TypeText("Update Location", "Url:", int.MaxValue, url, out url, false)) + return; - private void quitToolStripMenuItem_Click(object sender, EventArgs e) + try { - Application.Exit(); + AutoUpdater.ReportErrors = true; + AutoUpdater.Start(url); } - - private void newSessionToolStripMenuItem_Click(object sender, EventArgs e) + catch (Exception ex) { - var cmd = new ExecuteCommandStartSession(Activator,null,null); - cmd.Execute(); + Activator.ShowException($"Failed to update from {url}", ex); } + } + private void ListAllTypesToolStripMenuItem_Click(object sender, EventArgs e) + { + var file = new FileInfo(Path.GetTempFileName()); + File.WriteAllLines(file.FullName, Rdmp.Core.Repositories.MEF.GetAllTypes().Select(t => t.FullName)); + UsefulStuff.ShowPathInWindowsExplorer(file); + } - private void queryDataExport_Click(object sender, EventArgs e) + private void NewToolStripMenuItem_Click(object sender, EventArgs e) + { + var dlg = new SelectDialog(new DialogArgs { - var cmd = new ExecuteCommandQueryPlatformDatabase(Activator, nameof(DataExportPatcher)); - - if (cmd.IsImpossible) - { - Activator.Show("Cannot Query Database", cmd.ReasonCommandImpossible); - return; - } - - cmd.Execute(); - } + WindowTitle = "Create New", + TaskDescription = "What do you want to create?" + }, Activator, GetNewCommands(), false); - private void queryCatalogue_Click(object sender, EventArgs e) + if (dlg.ShowDialog() == DialogResult.OK) { - var cmd = new ExecuteCommandQueryPlatformDatabase(Activator, nameof(CataloguePatcher)); - - if (cmd.IsImpossible) - { - Activator.Show("Cannot Query Database",cmd.ReasonCommandImpossible); - return; - } - cmd.Execute(); + var picked = dlg.Selected; + picked.Execute(); } + } - private void restartApplicationToolStripMenuItem_Click(object sender, EventArgs e) - { - if (UserSettings.ConfirmApplicationExiting && Activator.Confirm("Restart Application?", "Confirm Restart") == false) - return; + private void quitToolStripMenuItem_Click(object sender, EventArgs e) + { + Application.Exit(); + } + private void newSessionToolStripMenuItem_Click(object sender, EventArgs e) + { + var cmd = new ExecuteCommandStartSession(Activator, null, null); + cmd.Execute(); + } - ApplicationRestarter.Restart(); - } - private void lastCommandMonitorToolStripMenuItem_Click(object sender, EventArgs e) - { - var lastCommand = new LastCommandUI(); - lastCommand.Show(); - } + private void queryDataExport_Click(object sender, EventArgs e) + { + var cmd = new ExecuteCommandQueryPlatformDatabase(Activator, nameof(DataExportPatcher)); - private void switchToUsingUserSettings_Click(object sender, EventArgs e) + if (cmd.IsImpossible) { - SwitchTo(null); + Activator.Show("Cannot Query Database", cmd.ReasonCommandImpossible); + return; } - private void launchNewInstanceWithUserSettings_Click(object sender, EventArgs e) - { - LaunchNew(null); - } + cmd.Execute(); + } - private void terminateProcessToolStripMenuItem_Click(object sender, EventArgs e) - { - if(Activator.YesNo("Terminate the process without saving?","Terminate")) - { - Process.GetCurrentProcess().Kill(); - } - } + private void queryCatalogue_Click(object sender, EventArgs e) + { + var cmd = new ExecuteCommandQueryPlatformDatabase(Activator, nameof(CataloguePatcher)); - private void findMultipleToolStripMenuItem_Click(object sender, EventArgs e) + if (cmd.IsImpossible) { - var cmd = new ExecuteCommandStartSession(Activator, null, ExecuteCommandStartSession.FindResultsTitle); - cmd.Execute(); + Activator.Show("Cannot Query Database", cmd.ReasonCommandImpossible); + return; } - private void viewHistoryToolStripMenuItem_Click(object sender, EventArgs e) - { - Activator.ShowWindow(new CommitsUI(Activator), true); - } + cmd.Execute(); } -} + private void restartApplicationToolStripMenuItem_Click(object sender, EventArgs e) + { + if (UserSettings.ConfirmApplicationExiting && + Activator.Confirm("Restart Application?", "Confirm Restart") == false) + return; + + + ApplicationRestarter.Restart(); + } + + private void lastCommandMonitorToolStripMenuItem_Click(object sender, EventArgs e) + { + var lastCommand = new LastCommandUI(); + lastCommand.Show(); + } + private void switchToUsingUserSettings_Click(object sender, EventArgs e) + { + SwitchTo(null); + } + + private void launchNewInstanceWithUserSettings_Click(object sender, EventArgs e) + { + LaunchNew(null); + } + + private void terminateProcessToolStripMenuItem_Click(object sender, EventArgs e) + { + if (Activator.YesNo("Terminate the process without saving?", "Terminate")) Process.GetCurrentProcess().Kill(); + } + + private void findMultipleToolStripMenuItem_Click(object sender, EventArgs e) + { + var cmd = new ExecuteCommandStartSession(Activator, null, ExecuteCommandStartSession.FindResultsTitle); + cmd.Execute(); + } + + private void viewHistoryToolStripMenuItem_Click(object sender, EventArgs e) + { + Activator.ShowWindow(new CommitsUI(Activator), true); + } +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/NLog.template.config b/Application/ResearchDataManagementPlatform/NLog.template.config index 8e584958bc..348ad21979 100644 --- a/Application/ResearchDataManagementPlatform/NLog.template.config +++ b/Application/ResearchDataManagementPlatform/NLog.template.config @@ -1,4 +1,5 @@ - + + - - - + + + - - + \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Program.cs b/Application/ResearchDataManagementPlatform/Program.cs index d1588da19c..693670d108 100644 --- a/Application/ResearchDataManagementPlatform/Program.cs +++ b/Application/ResearchDataManagementPlatform/Program.cs @@ -6,70 +6,66 @@ using System; using System.IO; -using System.Linq; using System.Runtime.InteropServices; using CommandLine; using Rdmp.Core.Curation.Data; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Checks; using Rdmp.Core.Startup; using Rdmp.UI; using Rdmp.UI.SimpleDialogs; using Rdmp.UI.TestsAndSetup; -using ReusableLibraryCode; -using ReusableLibraryCode.Checks; -namespace ResearchDataManagementPlatform +namespace ResearchDataManagementPlatform; + +internal static partial class Program { - static class Program + [LibraryImport("kernel32.dll")] + private static partial void AttachConsole(int dwProcessId); + + /// + /// The main entry point for the application. + /// + [STAThread] + private static void Main(string[] args) { - [DllImport("kernel32.dll")] - [return: MarshalAs(UnmanagedType.Bool)] - private static extern bool AttachConsole([MarshalAs(UnmanagedType.U4)] int dwProcessId); - - /// - /// The main entry point for the application. - /// - [STAThread] - static void Main(string[] args) + try + { + AttachConsole(-1); + } + catch (Exception) { - // if user has the command line built and runnable from the windows - // client then don't load the dlls (or we end up with 2 copies!). - SafeDirectoryCatalog.IgnoreDll = (f) => Path.GetFileName(f.DirectoryName).Equals("cli"); + Console.WriteLine("Couldn't redirect console. Never mind"); + } - try - { - AttachConsole(-1); - } - catch (Exception) - { - Console.WriteLine("Couldn't redirect console. Nevermind"); - } + Startup.PreStartup(); - Startup.PreStartup(); + UsefulStuff.GetParser() + .ParseArguments(args) + .MapResult(RunApp, _ => -1); + } - UsefulStuff.GetParser() - .ParseArguments(args) - .MapResult(RunApp, err => -1); + private static object RunApp(ResearchDataManagementPlatformOptions arg) + { + try + { + arg.PopulateConnectionStringsFromYamlIfMissing(ThrowImmediatelyCheckNotifier.Quiet); } - - private static object RunApp(ResearchDataManagementPlatformOptions arg) + catch (Exception ex) { - try - { - arg.PopulateConnectionStringsFromYamlIfMissing(new ThrowImmediatelyCheckNotifier()); - } - catch(Exception ex) - { - ExceptionViewer.Show(ex); - return -500; - } + ExceptionViewer.Show(ex); + return -500; + } - RDMPBootStrapper bootStrapper = - new RDMPBootStrapper( - new EnvironmentInfo(PluginFolders.Main | PluginFolders.Windows), - arg); + var bootStrapper = + new RDMPBootStrapper(arg, locator => + { + var form = new RDMPMainForm(); + form.SetRepositoryLocator(locator); + return form; + }); - bootStrapper.Show(false); - return 0; - } + bootStrapper.Show(); + return 0; } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Properties/AssemblyInfo.cs b/Application/ResearchDataManagementPlatform/Properties/AssemblyInfo.cs index edb8d4291b..77496c9a01 100644 --- a/Application/ResearchDataManagementPlatform/Properties/AssemblyInfo.cs +++ b/Application/ResearchDataManagementPlatform/Properties/AssemblyInfo.cs @@ -1,9 +1,9 @@ using System.Runtime.InteropServices; -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("cac9817c-fc66-48e8-9333-c740af6ab5b9")] +// The following GUID is for the ID of the type library if this project is exposed to COM +[assembly: Guid("cac9817c-fc66-48e8-9333-c740af6ab5b9")] \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/RDMP.nuspec b/Application/ResearchDataManagementPlatform/RDMP.nuspec deleted file mode 100644 index 2c0a9ed8eb..0000000000 --- a/Application/ResearchDataManagementPlatform/RDMP.nuspec +++ /dev/null @@ -1,15 +0,0 @@ - - - - ResearchDataManagementPlatform - $version$ - Health Informatics Service, University of Dundee - Research Data Management Platform - - - - - - - - \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/RDMPMainForm.cs b/Application/ResearchDataManagementPlatform/RDMPMainForm.cs index 7d8e77aa0d..a382c426fa 100644 --- a/Application/ResearchDataManagementPlatform/RDMPMainForm.cs +++ b/Application/ResearchDataManagementPlatform/RDMPMainForm.cs @@ -5,17 +5,16 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Diagnostics; using System.Drawing; using System.IO; using System.Linq; -using System.Reflection; using System.Text; using System.Windows.Forms; -using MapsDirectlyToDatabaseTable; -using Rdmp.Core.CommandLine.Options; using Rdmp.Core.Curation.Data; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Repositories; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Settings; using Rdmp.UI; using Rdmp.UI.Refreshing; using Rdmp.UI.SimpleDialogs; @@ -27,289 +26,257 @@ using ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; using ResearchDataManagementPlatform.WindowManagement.ExtenderFunctionality; using ResearchDataManagementPlatform.WindowManagement.Licenses; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Settings; - - using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform +namespace ResearchDataManagementPlatform; + +/// +/// Main entry point into the RDMP software. Hosts all tab collections and document windows for all RDMP tasks. See CatalogueCollectionUI , DataExportCollectionUI , +/// TableInfoCollectionUI , LoadMetadataCollectionUI and CohortIdentificationCollectionUI +/// See +/// +public partial class RDMPMainForm : RDMPForm { + private readonly PersistenceDecisionFactory _persistenceFactory = new(); + private readonly ITheme _theme; + private IRDMPPlatformRepositoryServiceLocator RepositoryLocator { get; set; } + /// - /// Main entry point into the RDMP software. Hosts all tab collections and document windows for all RDMP tasks. See CatalogueCollectionUI , DataExportCollectionUI , - /// TableInfoCollectionUI , LoadMetadataCollectionUI and CohortIdentificationCollectionUI - /// See + /// True while the main form is loading (e.g. from a persistence file) /// - public partial class RDMPMainForm : RDMPForm + public static bool Loading = true; + + public RDMPMainForm() { - private readonly PersistenceDecisionFactory _persistenceFactory = new PersistenceDecisionFactory(); - private ITheme _theme; - IRDMPPlatformRepositoryServiceLocator RepositoryLocator { get; set; } + InitializeComponent(); - /// - /// True while the main form is loading (e.g. from a persistence file) - /// - public static bool Loading = true; + PatchController.EnableAll = true; - public RDMPMainForm() + try { - InitializeComponent(); - - PatchController.EnableAll = true; - - try + var t = UserSettings.Theme; + if (!string.IsNullOrWhiteSpace(t)) { - var t = UserSettings.Theme; - if (!string.IsNullOrWhiteSpace(t)) - { - var type = Type.GetType(t); - _theme = type == null ? new MyVS2015BlueTheme() : (ITheme) System.Activator.CreateInstance(type); - } - else - _theme = new MyVS2015BlueTheme(); + var type = Type.GetType(t); + _theme = type == null ? new MyVS2015BlueTheme() : (ITheme)System.Activator.CreateInstance(type); } - catch (Exception) + else { _theme = new MyVS2015BlueTheme(); } + } + catch (Exception) + { + _theme = new MyVS2015BlueTheme(); + } - _theme.ApplyThemeToMenus = UserSettings.ApplyThemeToMenus; + _theme.ApplyThemeToMenus = UserSettings.ApplyThemeToMenus; - dockPanel1.Theme = (ThemeBase)_theme; - dockPanel1.Theme.Extender.FloatWindowFactory = new CustomFloatWindowFactory(); - dockPanel1.DefaultFloatWindowSize = new Size(640, 520); - dockPanel1.ShowDocumentIcon = true; - dockPanel1.DocumentStyle = DocumentStyle.DockingWindow; + dockPanel1.Theme = (ThemeBase)_theme; + dockPanel1.Theme.Extender.FloatWindowFactory = new CustomFloatWindowFactory(); + dockPanel1.DefaultFloatWindowSize = new Size(640, 520); + dockPanel1.ShowDocumentIcon = true; + dockPanel1.DocumentStyle = DocumentStyle.DockingWindow; - WindowState = FormWindowState.Maximized; - CloseOnEscape = false; + WindowState = FormWindowState.Maximized; + CloseOnEscape = false; - if (UserSettings.LicenseAccepted != new License("LIBRARYLICENSES").GetHashOfLicense()) - new LicenseUI().ShowDialog(); - } + if (UserSettings.LicenseAccepted != new License("LIBRARYLICENSES").GetHashOfLicense()) + new LicenseUI().ShowDialog(); + } + + private WindowManager _windowManager; + private readonly RefreshBus _refreshBus = new(); + private FileInfo _persistenceFile; + private ICheckNotifier _globalErrorCheckNotifier; + private string _version; + private string _connectedTo; + + public void SetRepositoryLocator(IRDMPPlatformRepositoryServiceLocator repositoryLocator) + { + RepositoryLocator = repositoryLocator; + } + + private void RDMPMainForm_Load(object sender, EventArgs e) + { + var exceptionCounter = new ExceptionCounterUI(); + _globalErrorCheckNotifier = exceptionCounter; + _rdmpTopMenuStrip1.InjectButton(exceptionCounter); - WindowManager _windowManager; - readonly RefreshBus _refreshBus = new RefreshBus(); - private FileInfo _persistenceFile; - private ICheckNotifier _globalErrorCheckNotifier; - private string _version; - private string _connectedTo; + _windowManager = new WindowManager(_theme, this, _refreshBus, dockPanel1, RepositoryLocator, exceptionCounter); + SetItemActivator(_windowManager.ActivateItems); - public void SetRepositoryLocator(IRDMPPlatformRepositoryServiceLocator repositoryLocator) + _rdmpTopMenuStrip1.SetWindowManager(_windowManager); + + //put the version of the software into the window title + + _version = StartupUI.GetVersion(); + + //put the current platform database into the window title too + if (Activator?.RepositoryLocator?.CatalogueRepository is TableRepository connectedTo) { - RepositoryLocator = repositoryLocator; + var database = connectedTo.DiscoveredServer?.GetCurrentDatabase(); + var instanceDescription = ""; + + var connectionStringsFileLoaded = RDMPBootStrapper.ApplicationArguments?.ConnectionStringsFileLoaded; + if (connectionStringsFileLoaded != null) + instanceDescription = + $" - {connectionStringsFileLoaded.Name ?? connectionStringsFileLoaded.FileLoaded.Name}"; + if (database != null) + _connectedTo = $"({database.GetRuntimeName()} on {database.Server.Name}){instanceDescription}"; } - private void RDMPMainForm_Load(object sender, EventArgs e) - { - var exceptionCounter = new ExceptionCounterUI(); - _globalErrorCheckNotifier = exceptionCounter; - _rdmpTopMenuStrip1.InjectButton(exceptionCounter); - - _windowManager = new WindowManager(_theme,this,_refreshBus, dockPanel1, RepositoryLocator, exceptionCounter); - SetItemActivator(_windowManager.ActivateItems); - - _rdmpTopMenuStrip1.SetWindowManager(_windowManager); - - //put the version of the software into the window title - - _version = Assembly.GetExecutingAssembly().GetName().Version.ToString(); - - //put the current platform database into the window title too - if (Activator?.RepositoryLocator?.CatalogueRepository is TableRepository connectedTo) - { - var database = connectedTo.DiscoveredServer?.GetCurrentDatabase(); - var instanceDescription = ""; - - var connectionStringsFileLoaded = RDMPBootStrapper.ApplicationArguments?.ConnectionStringsFileLoaded; - if (connectionStringsFileLoaded != null) - { - instanceDescription = " - " + (connectionStringsFileLoaded.Name ?? connectionStringsFileLoaded.FileLoaded.Name); - } - if (database != null) - _connectedTo = $"({database.GetRuntimeName()} on {database.Server.Name}){instanceDescription}"; - } - - Text = "Research Data Management Platform"; + Text = "Research Data Management Platform"; - var rdmpDir = new DirectoryInfo(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "RDMP")); - if(!rdmpDir.Exists) - rdmpDir.Create(); + var rdmpDir = + new DirectoryInfo( + Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "RDMP")); + if (!rdmpDir.Exists) + rdmpDir.Create(); - _persistenceFile = new FileInfo(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData),"RDMP", "RDMPDockPanelPersist.xml")); + _persistenceFile = + new FileInfo(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), "RDMP", + "RDMPDockPanelPersist.xml")); - //if there is no persist file or user wants to show the home screen always on startup - if (!_persistenceFile.Exists || UserSettings.ShowHomeOnStartup) + //if there is no persist file or user wants to show the home screen always on startup + if (!_persistenceFile.Exists || UserSettings.ShowHomeOnStartup) + _windowManager.PopHome(); + else if (_persistenceFile.Exists) + try { - _windowManager.PopHome(); + //load the state using the method + LoadFromXml(new FileStream(_persistenceFile.FullName, FileMode.Open)); } - else + catch (Exception ex) { - try - { - if (_persistenceFile.Exists) - LoadFromXml(new FileStream(_persistenceFile.FullName, FileMode.Open)); - - //load the stateusing the method - } - catch (Exception ex) - { - _globalErrorCheckNotifier.OnCheckPerformed( - new CheckEventArgs("Could not load window persistence due to error in persistence file", - CheckResult.Fail, ex)); - - //delete the persistence file and try again - MessageBox.Show("Persistence file corrupt, application will restart without persistence"); - _persistenceFile.Delete(); - ApplicationRestarter.Restart(); - } + _globalErrorCheckNotifier.OnCheckPerformed( + new CheckEventArgs("Could not load window persistence due to error in persistence file", + CheckResult.Fail, ex)); + + //delete the persistence file and try again + MessageBox.Show("Persistence file corrupt, application will restart without persistence"); + _persistenceFile.Delete(); + ApplicationRestarter.Restart(); } - - FormClosing += CloseForm; - Loading = false; - } - public override string Text { - get => base.Text; - set => base.Text = (value + " v" + _version + " " + _connectedTo).Trim(); - } + FormClosing += CloseForm; + Loading = false; + } - public void LoadFromXml(Stream stream) - { - if (dockPanel1.DocumentStyle == DocumentStyle.SystemMdi) - { - foreach (Form form in MdiChildren) - form.Close(); - } - else - { - foreach (IDockContent document in dockPanel1.DocumentsToArray()) - { - // IMPORANT: dispose all panes. - document.DockHandler.DockPanel = null; - document.DockHandler.Close(); - } - } + public override string Text + { + get => base.Text; + set => base.Text = $"{value} v{_version} {_connectedTo}".Trim(); + } - foreach (var pane in dockPanel1.Panes.ToList()) + private void LoadFromXml(Stream stream) + { + if (dockPanel1.DocumentStyle == DocumentStyle.SystemMdi) + foreach (var form in MdiChildren) + form.Close(); + else + foreach (var document in dockPanel1.DocumentsToArray()) { - pane.CloseActiveContent(); - pane.Dispose(); + // IMPORTANT: dispose all panes. + document.DockHandler.DockPanel = null; + document.DockHandler.Close(); } - // IMPORTANT: dispose all float windows. - foreach (var window in dockPanel1.FloatWindows.ToList()) - window.Dispose(); - - System.Diagnostics.Debug.Assert(dockPanel1.Panes.Count == 0); - System.Diagnostics.Debug.Assert(dockPanel1.Contents.Count == 0); - System.Diagnostics.Debug.Assert(dockPanel1.FloatWindows.Count == 0); - - dockPanel1.LoadFromXml(stream, DeserializeContent); - } - public void LoadFromXml(WindowLayout target) + foreach (var pane in dockPanel1.Panes.ToList()) { - UnicodeEncoding uniEncoding = new UnicodeEncoding(); - - // You might not want to use the outer using statement that I have - // I wasn't sure how long you would need the MemoryStream object - using (MemoryStream ms = new MemoryStream()) - { - var sw = new StreamWriter(ms, uniEncoding); - try - { - sw.Write(target.LayoutData); - sw.Flush();//otherwise you are risking empty stream - ms.Seek(0, SeekOrigin.Begin); - - LoadFromXml(ms); - } - finally - { - sw.Dispose(); - } - } + pane.CloseActiveContent(); + pane.Dispose(); } + // IMPORTANT: dispose all float windows. + foreach (var window in dockPanel1.FloatWindows.ToList()) + window.Dispose(); - public string GetCurrentLayoutXml() - { - UnicodeEncoding uniEncoding = new UnicodeEncoding(); + System.Diagnostics.Debug.Assert(dockPanel1.Panes.Count == 0); + System.Diagnostics.Debug.Assert(dockPanel1.Contents.Count == 0); + System.Diagnostics.Debug.Assert(dockPanel1.FloatWindows.Count == 0); - using (MemoryStream ms = new MemoryStream()) - { - dockPanel1.SaveAsXml(ms, uniEncoding); - - ms.Seek(0, SeekOrigin.Begin); - - try - { - return new StreamReader(ms).ReadToEnd(); - } - finally - { - ms.Dispose(); - } - } - } + dockPanel1.LoadFromXml(stream, DeserializeContent); + } - private void CloseForm(object sender, FormClosingEventArgs e) - { - // give the window manager a chance to cancel closing - _windowManager.OnFormClosing(e); + public void LoadFromXml(WindowLayout target) + { + var uniEncoding = new UnicodeEncoding(); + + // You might not want to use the outer using statement that I have + // I wasn't sure how long you would need the MemoryStream object + using var ms = new MemoryStream(); + using var sw = new StreamWriter(ms, uniEncoding); + sw.Write(target.LayoutData); + sw.Flush(); //otherwise you are risking empty stream + ms.Seek(0, SeekOrigin.Begin); + LoadFromXml(ms); + } - if (e.Cancel) - { - return; - } - if (e.CloseReason == CloseReason.UserClosing && UserSettings.ConfirmApplicationExiting) - if (!Activator.YesNo("Are you sure you want to Exit?", "Confirm Exit")) - { - e.Cancel = true; - return; - } + public string GetCurrentLayoutXml() + { + var uniEncoding = new UnicodeEncoding(); - try - { - if (_persistenceFile != null) - { - if (!_persistenceFile.Directory.Exists) - _persistenceFile.Directory.Create(); + using var ms = new MemoryStream(); + dockPanel1.SaveAsXml(ms, uniEncoding); + ms.Seek(0, SeekOrigin.Begin); + return new StreamReader(ms).ReadToEnd(); + } - dockPanel1.SaveAsXml(_persistenceFile.FullName); //save when Form closes - } - } - catch(Exception ex) + private void CloseForm(object sender, FormClosingEventArgs e) + { + // give the window manager a chance to cancel closing + _windowManager.OnFormClosing(e); + + if (e.Cancel) return; + + if (e.CloseReason == CloseReason.UserClosing && UserSettings.ConfirmApplicationExiting) + if (!Activator.YesNo("Are you sure you want to Exit?", "Confirm Exit")) { - ExceptionViewer.Show("Could not write persistence file",ex); + e.Cancel = true; + return; } + + try + { + if (_persistenceFile == null) return; + if (_persistenceFile.Directory?.Exists == false) + _persistenceFile.Directory.Create(); + dockPanel1.SaveAsXml(_persistenceFile.FullName); //save when Form closes + } + catch (Exception ex) + { + ExceptionViewer.Show("Could not write persistence file", ex); } + } - private IDockContent DeserializeContent(string persiststring) + private IDockContent DeserializeContent(string persiststring) + { + try { - try - { - var toolbox = _persistenceFactory.ShouldCreateCollection(persiststring); - if (toolbox.HasValue) - { - var toolboxInstance = _windowManager.Create(toolbox.Value); - return toolboxInstance; - } - - var instruction = _persistenceFactory.ShouldCreateBasicControl(persiststring,RepositoryLocator) ?? - _persistenceFactory.ShouldCreateSingleObjectControl(persiststring,RepositoryLocator) ?? - _persistenceFactory.ShouldCreateObjectCollection(persiststring, RepositoryLocator); - - if (instruction != null) - return _windowManager.ActivateItems.Activate(instruction,_windowManager.ActivateItems); - } - catch (Exception e) + var toolbox = PersistenceDecisionFactory.ShouldCreateCollection(persiststring); + if (toolbox.HasValue) { - _globalErrorCheckNotifier.OnCheckPerformed(new CheckEventArgs("Could not work out what window to show for persistence string '" + persiststring + "'",CheckResult.Fail, e)); + var toolboxInstance = _windowManager.Create(toolbox.Value); + return toolboxInstance; } - return null; + var instruction = PersistenceDecisionFactory.ShouldCreateBasicControl(persiststring, RepositoryLocator) ?? + PersistenceDecisionFactory.ShouldCreateSingleObjectControl(persiststring, + RepositoryLocator) ?? + PersistenceDecisionFactory.ShouldCreateObjectCollection(persiststring, RepositoryLocator); + + if (instruction != null) + return _windowManager.ActivateItems.Activate(instruction, _windowManager.ActivateItems); + } + catch (Exception e) + { + _globalErrorCheckNotifier.OnCheckPerformed(new CheckEventArgs( + $"Could not work out what window to show for persistence string '{persiststring}'", CheckResult.Fail, + e)); } + + return null; } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj b/Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj index 1f0c310dcf..3f15434037 100644 --- a/Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj +++ b/Application/ResearchDataManagementPlatform/ResearchDataManagementPlatform.csproj @@ -1,15 +1,18 @@ - + {550988FD-F1FA-41D8-BE0F-00B4DE47D320} WinExe - net6.0-windows + net7.0-windows true true + true false 1701;1702;CS1591;NU1701 embedded true - false + true + true + true @@ -17,6 +20,7 @@ true Icon\main.ico + true @@ -30,14 +34,13 @@ - + all runtime; build; native; contentfiles; analyzers - - + @@ -65,18 +68,8 @@ RDMPTopMenuStripUI.cs - - Hunspellx64.dll - PreserveNewest - - - Hunspellx86.dll - PreserveNewest - - - Always - + PreserveNewest @@ -110,23 +103,10 @@ WindowManagement\Licenses\LICENSE - PreserveNewest WindowManagement\Licenses\LIBRARYLICENSES - - en_US.aff - PreserveNewest - - - en_US.dic - PreserveNewest - - - hyph_en_US.dic - PreserveNewest - SettingsSingleFileGenerator @@ -141,7 +121,5 @@ - - diff --git a/Application/ResearchDataManagementPlatform/Theme/ThemeExtender.cs b/Application/ResearchDataManagementPlatform/Theme/ThemeExtender.cs index 3ddea8a495..22e0a9cfc7 100644 --- a/Application/ResearchDataManagementPlatform/Theme/ThemeExtender.cs +++ b/Application/ResearchDataManagementPlatform/Theme/ThemeExtender.cs @@ -10,55 +10,50 @@ using System.Windows.Forms; using System.Xml.Linq; -namespace ResearchDataManagementPlatform.Theme +namespace ResearchDataManagementPlatform.Theme; + +internal class ThemeExtender { - class ThemeExtender - { - private XDocument _xml; - private const string Env = "Environment"; + private readonly XDocument _xml; + private const string Env = "Environment"; - public Color TextBoxBackground { get; set; } - - public Color ComboBoxBackground { get; set; } - public Color ComboBoxText { get; set; } + public Color TextBoxBackground { get; set; } + public Color ComboBoxBackground { get; set; } + public Color ComboBoxText { get; set; } - public ThemeExtender(byte[] bytes) - { - _xml = XDocument.Load(new StreamReader(new MemoryStream(bytes))); - TextBoxBackground = ColorTranslatorFromHtml("CommonControls", "TextBoxBackground"); - - ComboBoxBackground = ColorTranslatorFromHtml(Env, "ComboBoxBackground"); - ComboBoxText = ColorTranslatorFromHtml(Env, "ComboBoxText"); - } - private Color ColorTranslatorFromHtml(string category, string name, bool foreground = false) - { - string color = null; + public ThemeExtender(byte[] bytes) + { + _xml = XDocument.Load(new StreamReader(new MemoryStream(bytes))); + TextBoxBackground = ColorTranslatorFromHtml("CommonControls", "TextBoxBackground"); - XElement environmentElement = _xml.Root.Element("Theme").Elements("Category").FirstOrDefault(item => item.Attribute("Name").Value == category); + ComboBoxBackground = ColorTranslatorFromHtml(Env, "ComboBoxBackground"); + ComboBoxText = ColorTranslatorFromHtml(Env, "ComboBoxText"); + } - if (environmentElement != null) - { - var colourElement = environmentElement.Elements("Color").FirstOrDefault(item => item.Attribute("Name").Value == name); + private Color ColorTranslatorFromHtml(string category, string name, bool foreground = false) + { + string color = null; - if (colourElement != null) - color = colourElement.Element(foreground ? "Foreground" : "Background").Attribute("Source").Value; - } + var environmentElement = _xml.Root.Element("Theme").Elements("Category") + .FirstOrDefault(item => item.Attribute("Name").Value == category); - if (color == null) - return Color.Transparent; + var colourElement = environmentElement?.Elements("Color") + .FirstOrDefault(item => item.Attribute("Name").Value == name); - return ColorTranslator.FromHtml("#" + color); - } + if (colourElement != null) + color = colourElement.Element(foreground ? "Foreground" : "Background").Attribute("Source").Value; - public void ApplyTo(ToolStrip item) + return color == null ? Color.Transparent : ColorTranslator.FromHtml($"#{color}"); + } + + public void ApplyTo(ToolStrip item) + { + foreach (var comboBox in item.Items.OfType()) { - foreach (var comboBox in item.Items.OfType()) - { - comboBox.ForeColor = ComboBoxText; - comboBox.BackColor = ComboBoxBackground; - } + comboBox.ForeColor = ComboBoxText; + comboBox.BackColor = ComboBoxBackground; } } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Theme/Themes.cs b/Application/ResearchDataManagementPlatform/Theme/Themes.cs index f615cf0eae..7708d2abf3 100644 --- a/Application/ResearchDataManagementPlatform/Theme/Themes.cs +++ b/Application/ResearchDataManagementPlatform/Theme/Themes.cs @@ -9,76 +9,78 @@ using Rdmp.UI.Theme; using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.Theme +namespace ResearchDataManagementPlatform.Theme; + +//These classes should not be moved. They are referenced by name in UserSettingsFileUI and UserSettings +[System.ComponentModel.DesignerCategory("")] +public class MyVS2015BlueTheme : VS2015BlueTheme, ITheme { - //These classes should not be moved. They are referenced by name in UserSettingsFileUI and UserSettings - [System.ComponentModel.DesignerCategory("")] - public class MyVS2015BlueTheme : VS2015BlueTheme, ITheme + private ThemeExtender _extender; + public bool ApplyThemeToMenus { get; set; } + + public MyVS2015BlueTheme() { - private ThemeExtender _extender; - public bool ApplyThemeToMenus { get; set; } - - public MyVS2015BlueTheme() - { - var manager = new System.Resources.ResourceManager("WeifenLuo.WinFormsUI.ThemeVS2015.Resources", typeof(WeifenLuo.WinFormsUI.ThemeVS2015.VS2015ThemeBase).Assembly); - byte[] bytes = (byte[]) manager.GetObject("vs2015blue_vstheme",CultureInfo.CurrentCulture); + var manager = new System.Resources.ResourceManager("WeifenLuo.WinFormsUI.ThemeVS2015.Resources", + typeof(WeifenLuo.WinFormsUI.ThemeVS2015.VS2015ThemeBase).Assembly); + var bytes = (byte[])manager.GetObject("vs2015blue_vstheme", CultureInfo.CurrentCulture); - _extender = new ThemeExtender(Decompress(bytes)); - } + _extender = new ThemeExtender(Decompress(bytes)); + } - public new void ApplyTo(ToolStrip item) + public new void ApplyTo(ToolStrip item) + { + if (ApplyThemeToMenus) { - if(ApplyThemeToMenus) - { - base.ApplyTo(item); - _extender.ApplyTo(item); - } + base.ApplyTo(item); + _extender.ApplyTo(item); } } +} - public class MyVS2015DarkTheme : VS2015DarkTheme, ITheme - { - private ThemeExtender _extender; - public bool ApplyThemeToMenus { get; set; } +public class MyVS2015DarkTheme : VS2015DarkTheme, ITheme +{ + private ThemeExtender _extender; + public bool ApplyThemeToMenus { get; set; } - public MyVS2015DarkTheme() - { - var manager = new System.Resources.ResourceManager("WeifenLuo.WinFormsUI.ThemeVS2015.Resources", typeof(WeifenLuo.WinFormsUI.ThemeVS2015.VS2015ThemeBase).Assembly); - byte[] bytes = (byte[])manager.GetObject("vs2015dark_vstheme", CultureInfo.CurrentCulture); + public MyVS2015DarkTheme() + { + var manager = new System.Resources.ResourceManager("WeifenLuo.WinFormsUI.ThemeVS2015.Resources", + typeof(WeifenLuo.WinFormsUI.ThemeVS2015.VS2015ThemeBase).Assembly); + var bytes = (byte[])manager.GetObject("vs2015dark_vstheme", CultureInfo.CurrentCulture); - _extender = new ThemeExtender(Decompress(bytes)); - } + _extender = new ThemeExtender(Decompress(bytes)); + } - public new void ApplyTo(ToolStrip item) + public new void ApplyTo(ToolStrip item) + { + if (ApplyThemeToMenus) { - if (ApplyThemeToMenus) - { - base.ApplyTo(item); - _extender.ApplyTo(item); - } + base.ApplyTo(item); + _extender.ApplyTo(item); } } +} - public class MyVS2015LightTheme : VS2015LightTheme, ITheme - { - private ThemeExtender _extender; - public bool ApplyThemeToMenus { get; set; } +public class MyVS2015LightTheme : VS2015LightTheme, ITheme +{ + private ThemeExtender _extender; + public bool ApplyThemeToMenus { get; set; } - public MyVS2015LightTheme() - { - var manager = new System.Resources.ResourceManager("WeifenLuo.WinFormsUI.ThemeVS2015.Resources", typeof(WeifenLuo.WinFormsUI.ThemeVS2015.VS2015ThemeBase).Assembly); - byte[] bytes = (byte[])manager.GetObject("vs2015light_vstheme", CultureInfo.CurrentCulture); + public MyVS2015LightTheme() + { + var manager = new System.Resources.ResourceManager("WeifenLuo.WinFormsUI.ThemeVS2015.Resources", + typeof(WeifenLuo.WinFormsUI.ThemeVS2015.VS2015ThemeBase).Assembly); + var bytes = (byte[])manager.GetObject("vs2015light_vstheme", CultureInfo.CurrentCulture); - _extender = new ThemeExtender(Decompress(bytes)); - } + _extender = new ThemeExtender(Decompress(bytes)); + } - public new void ApplyTo(ToolStrip item) + public new void ApplyTo(ToolStrip item) + { + if (ApplyThemeToMenus) { - if (ApplyThemeToMenus) - { - base.ApplyTo(item); - _extender.ApplyTo(item); - } + base.ApplyTo(item); + _extender.ApplyTo(item); } } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Updates/Asset.cs b/Application/ResearchDataManagementPlatform/Updates/Asset.cs index dd46c34fe2..efe74e0c79 100644 --- a/Application/ResearchDataManagementPlatform/Updates/Asset.cs +++ b/Application/ResearchDataManagementPlatform/Updates/Asset.cs @@ -6,25 +6,24 @@ using System; -namespace ResearchDataManagementPlatform.Updates +namespace ResearchDataManagementPlatform.Updates; + +/// +/// Github Json class +/// +public class Asset { - /// - /// Github Json class - /// - public class Asset - { - public string url { get; set; } - public int id { get; set; } - public string node_id { get; set; } - public string name { get; set; } - public string label { get; set; } - public Uploader uploader { get; set; } - public string content_type { get; set; } - public string state { get; set; } - public int size { get; set; } - public int download_count { get; set; } - public DateTime created_at { get; set; } - public DateTime updated_at { get; set; } - public string browser_download_url { get; set; } - } + public string url { get; set; } + public int id { get; set; } + public string node_id { get; set; } + public string name { get; set; } + public string label { get; set; } + public Uploader uploader { get; set; } + public string content_type { get; set; } + public string state { get; set; } + public int size { get; set; } + public int download_count { get; set; } + public DateTime created_at { get; set; } + public DateTime updated_at { get; set; } + public string browser_download_url { get; set; } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Updates/Author.cs b/Application/ResearchDataManagementPlatform/Updates/Author.cs index 4f7d79d89c..607d4960e3 100644 --- a/Application/ResearchDataManagementPlatform/Updates/Author.cs +++ b/Application/ResearchDataManagementPlatform/Updates/Author.cs @@ -4,30 +4,29 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -namespace ResearchDataManagementPlatform.Updates +namespace ResearchDataManagementPlatform.Updates; + +/// +/// Github Json class +/// +public class Author { - /// - /// Github Json class - /// - public class Author - { - public string login { get; set; } - public int id { get; set; } - public string node_id { get; set; } - public string avatar_url { get; set; } - public string gravatar_id { get; set; } - public string url { get; set; } - public string html_url { get; set; } - public string followers_url { get; set; } - public string following_url { get; set; } - public string gists_url { get; set; } - public string starred_url { get; set; } - public string subscriptions_url { get; set; } - public string organizations_url { get; set; } - public string repos_url { get; set; } - public string events_url { get; set; } - public string received_events_url { get; set; } - public string type { get; set; } - public bool site_admin { get; set; } - } + public string login { get; set; } + public int id { get; set; } + public string node_id { get; set; } + public string avatar_url { get; set; } + public string gravatar_id { get; set; } + public string url { get; set; } + public string html_url { get; set; } + public string followers_url { get; set; } + public string following_url { get; set; } + public string gists_url { get; set; } + public string starred_url { get; set; } + public string subscriptions_url { get; set; } + public string organizations_url { get; set; } + public string repos_url { get; set; } + public string events_url { get; set; } + public string received_events_url { get; set; } + public string type { get; set; } + public bool site_admin { get; set; } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Updates/GHRelease.cs b/Application/ResearchDataManagementPlatform/Updates/GHRelease.cs index b1c540cf2b..7573f93499 100644 --- a/Application/ResearchDataManagementPlatform/Updates/GHRelease.cs +++ b/Application/ResearchDataManagementPlatform/Updates/GHRelease.cs @@ -6,30 +6,29 @@ using System; -namespace ResearchDataManagementPlatform.Updates +namespace ResearchDataManagementPlatform.Updates; + +/// +/// Github Json class +/// +public class GHRelease { - /// - /// Github Json class - /// - public class GHRelease - { - public string url { get; set; } - public string assets_url { get; set; } - public string upload_url { get; set; } - public string html_url { get; set; } - public int id { get; set; } - public string node_id { get; set; } - public string tag_name { get; set; } - public string target_commitish { get; set; } - public string name { get; set; } - public bool draft { get; set; } - public Author author { get; set; } - public bool prerelease { get; set; } - public DateTime created_at { get; set; } - public DateTime published_at { get; set; } - public Asset[] assets { get; set; } - public string tarball_url { get; set; } - public string zipball_url { get; set; } - public string body { get; set; } - } + public string url { get; set; } + public string assets_url { get; set; } + public string upload_url { get; set; } + public string html_url { get; set; } + public int id { get; set; } + public string node_id { get; set; } + public string tag_name { get; set; } + public string target_commitish { get; set; } + public string name { get; set; } + public bool draft { get; set; } + public Author author { get; set; } + public bool prerelease { get; set; } + public DateTime created_at { get; set; } + public DateTime published_at { get; set; } + public Asset[] assets { get; set; } + public string tarball_url { get; set; } + public string zipball_url { get; set; } + public string body { get; set; } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Updates/GithubReleases.cs b/Application/ResearchDataManagementPlatform/Updates/GithubReleases.cs index b248e2765f..55aad1def3 100644 --- a/Application/ResearchDataManagementPlatform/Updates/GithubReleases.cs +++ b/Application/ResearchDataManagementPlatform/Updates/GithubReleases.cs @@ -4,19 +4,12 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; +namespace ResearchDataManagementPlatform.Updates; -namespace ResearchDataManagementPlatform.Updates +/// +/// Github Json class +/// +public class GithubReleases { - /// - /// Github Json class - /// - public class GithubReleases - { - public GHRelease[] Entries { get; set; } - - } -} + public GHRelease[] Entries { get; set; } +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/Updates/Uploader.cs b/Application/ResearchDataManagementPlatform/Updates/Uploader.cs index 6282a06af9..85c1b26f8e 100644 --- a/Application/ResearchDataManagementPlatform/Updates/Uploader.cs +++ b/Application/ResearchDataManagementPlatform/Updates/Uploader.cs @@ -4,30 +4,29 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -namespace ResearchDataManagementPlatform.Updates +namespace ResearchDataManagementPlatform.Updates; + +/// +/// Github Json class +/// +public class Uploader { - /// - /// Github Json class - /// - public class Uploader - { - public string login { get; set; } - public int id { get; set; } - public string node_id { get; set; } - public string avatar_url { get; set; } - public string gravatar_id { get; set; } - public string url { get; set; } - public string html_url { get; set; } - public string followers_url { get; set; } - public string following_url { get; set; } - public string gists_url { get; set; } - public string starred_url { get; set; } - public string subscriptions_url { get; set; } - public string organizations_url { get; set; } - public string repos_url { get; set; } - public string events_url { get; set; } - public string received_events_url { get; set; } - public string type { get; set; } - public bool site_admin { get; set; } - } + public string login { get; set; } + public int id { get; set; } + public string node_id { get; set; } + public string avatar_url { get; set; } + public string gravatar_id { get; set; } + public string url { get; set; } + public string html_url { get; set; } + public string followers_url { get; set; } + public string following_url { get; set; } + public string gists_url { get; set; } + public string starred_url { get; set; } + public string subscriptions_url { get; set; } + public string organizations_url { get; set; } + public string repos_url { get; set; } + public string events_url { get; set; } + public string received_events_url { get; set; } + public string type { get; set; } + public bool site_admin { get; set; } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ActivateItems.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ActivateItems.cs index 04b8ba9426..c3a6dd8a37 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ActivateItems.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ActivateItems.cs @@ -9,19 +9,14 @@ using System.Diagnostics; using System.IO; using System.Linq; -using System.Reflection; using System.Threading; using System.Threading.Tasks; using System.Windows.Forms; using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable; -using MapsDirectlyToDatabaseTable.Revertable; -using MapsDirectlyToDatabaseTable.Versioning; using Rdmp.Core; using Rdmp.Core.CohortCommitting.Pipeline; using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; -using Rdmp.Core.CommandLine.Interactive; using Rdmp.Core.CommandLine.Runners; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; @@ -29,15 +24,19 @@ using Rdmp.Core.Curation.Data.Dashboarding; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.Curation.Data.Defaults; -using Rdmp.Core.Curation.Data.ImportExport; using Rdmp.Core.Curation.Data.Pipelines; using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataViewing; using Rdmp.Core.Icons.IconProvision; using Rdmp.Core.Logging; +using Rdmp.Core.MapsDirectlyToDatabaseTable; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Revertable; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Versioning; using Rdmp.Core.Providers; using Rdmp.Core.Repositories; -using Rdmp.UI; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Settings; using Rdmp.UI.AggregationUIs; using Rdmp.UI.CatalogueSummary.LoadEvents; using Rdmp.UI.CohortUI.ImportCustomData; @@ -55,7 +54,6 @@ using Rdmp.UI.Rules; using Rdmp.UI.SimpleDialogs; using Rdmp.UI.SimpleDialogs.ForwardEngineering; -using Rdmp.UI.SimpleDialogs.NavigateTo; using Rdmp.UI.SingleControlForms; using Rdmp.UI.SubComponents; using Rdmp.UI.TestsAndSetup.ServicePropogation; @@ -64,399 +62,380 @@ using Rdmp.UI.Wizard; using ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; using ResearchDataManagementPlatform.WindowManagement.WindowArranging; -using ReusableLibraryCode; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Comments; -using ReusableLibraryCode.Icons; -using ReusableLibraryCode.Settings; using SixLabors.ImageSharp; using SixLabors.ImageSharp.PixelFormats; using WeifenLuo.WinFormsUI.Docking; using Image = SixLabors.ImageSharp.Image; using Rectangle = System.Drawing.Rectangle; -namespace ResearchDataManagementPlatform.WindowManagement +namespace ResearchDataManagementPlatform.WindowManagement; + +/// +/// Central class for RDMP main application, this class provides access to all the main systems in RDMP user interface such as Emphasis, the RefreshBus, Child +/// provision etc. See IActivateItems for full details +/// +public class ActivateItems : BasicActivateItems, IActivateItems, IRefreshBusSubscriber { - /// - /// Central class for RDMP main application, this class provides acceess to all the main systems in RDMP user interface such as Emphasis, the RefreshBus, Child - /// provision etc. See IActivateItems for full details - /// - public class ActivateItems : BasicActivateItems, IActivateItems, IRefreshBusSubscriber - { - private readonly DockPanel _mainDockPanel; - private readonly WindowManager _windowManager; + private readonly DockPanel _mainDockPanel; + private readonly WindowManager _windowManager; - public WindowFactory WindowFactory { get; private set; } + private WindowFactory WindowFactory { get; } - public ITheme Theme { get; private set; } + public ITheme Theme { get; } - public RefreshBus RefreshBus { get; private set; } - - readonly UIObjectConstructor _constructor = new UIObjectConstructor(); + public RefreshBus RefreshBus { get; } - public IArrangeWindows WindowArranger { get; private set; } - - public override void Publish(IMapsDirectlyToDatabaseTable databaseEntity) + public IArrangeWindows WindowArranger { get; } + + public override void Publish(IMapsDirectlyToDatabaseTable databaseEntity) + { + if (databaseEntity is DatabaseEntity de) + RefreshBus.Publish(this, new RefreshObjectEventArgs(de)); + } + + public override void Show(string title, string message) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - if(databaseEntity is DatabaseEntity de) - RefreshBus.Publish(this,new RefreshObjectEventArgs(de)); + _mainDockPanel.Invoke(() => Show(title, message)); + return; } - public override void Show(string title, string message) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => Show(title, message)); - return; - } + WideMessageBox.Show(title, message, Environment.StackTrace, true, null, WideMessageBoxTheme.Help); + } - WideMessageBox.Show(title,message,Environment.StackTrace,true,null,WideMessageBoxTheme.Help); - } + public ICombineableFactory CommandFactory { get; } + public ICommandExecutionFactory CommandExecutionFactory { get; } + public HistoryProvider HistoryProvider { get; } - public ICombineableFactory CommandFactory { get; private set; } - public ICommandExecutionFactory CommandExecutionFactory { get; private set; } - public HistoryProvider HistoryProvider { get; private set; } + private List ProblemProviders { get; } - public List ProblemProviders { get; private set; } + public ActivateItems(ITheme theme, RefreshBus refreshBus, DockPanel mainDockPanel, + IRDMPPlatformRepositoryServiceLocator repositoryLocator, WindowFactory windowFactory, + WindowManager windowManager, ICheckNotifier globalErrorCheckNotifier) : base(repositoryLocator, + globalErrorCheckNotifier) + { + Theme = theme; + IsWinForms = true; + InteractiveDeletes = true; + WindowFactory = windowFactory; + _mainDockPanel = mainDockPanel; + _windowManager = windowManager; + RefreshBus = refreshBus; - public ActivateItems(ITheme theme,RefreshBus refreshBus, DockPanel mainDockPanel, IRDMPPlatformRepositoryServiceLocator repositoryLocator, WindowFactory windowFactory, WindowManager windowManager, ICheckNotifier globalErrorCheckNotifier):base(repositoryLocator,globalErrorCheckNotifier) - { - Theme = theme; - IsWinForms = true; - InteractiveDeletes = true; - WindowFactory = windowFactory; - _mainDockPanel = mainDockPanel; - _windowManager = windowManager; - RefreshBus = refreshBus; - - RefreshBus.ChildProvider = CoreChildProvider; - - HistoryProvider = new HistoryProvider(repositoryLocator); - - WindowArranger = new WindowArranger(this,_windowManager,_mainDockPanel); - - CommandFactory = new RDMPCombineableFactory(); - CommandExecutionFactory = new RDMPCommandExecutionFactory(this); - - ProblemProviders = new List(); - ProblemProviders.Add(new DataExportProblemProvider()); - ProblemProviders.Add(new CatalogueProblemProvider()); - RefreshProblemProviders(); - - RefreshBus.Subscribe(this); - - // We can run subprocesses - IsAbleToLaunchSubprocesses = true; - } + RefreshBus.ChildProvider = CoreChildProvider; - protected override ICoreChildProvider GetChildProvider() - { - var provider = base.GetChildProvider(); + HistoryProvider = new HistoryProvider(repositoryLocator); - if (RefreshBus != null) - { - RefreshBus.ChildProvider = provider; - } + WindowArranger = new WindowArranger(this, _windowManager, _mainDockPanel); - return provider; - } - + CommandFactory = new RDMPCombineableFactory(); + CommandExecutionFactory = new RDMPCommandExecutionFactory(this); - public Form ShowWindow(Control singleControlForm, bool asDocument = false) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke
(() => ShowWindow(singleControlForm, asDocument)); - } + ProblemProviders = new List + { + new DataExportProblemProvider(), + new CatalogueProblemProvider() + }; + RefreshProblemProviders(); - int width = singleControlForm.Size.Width + SystemInformation.BorderSize.Width; - int height = singleControlForm.Size.Height + SystemInformation.BorderSize.Height; + RefreshBus.Subscribe(this); - //use the .Text or fallback on .Name - string name = string.IsNullOrWhiteSpace(singleControlForm.Text) - ? singleControlForm.Name ?? singleControlForm.GetType().Name//or worst case scenario use the type name! - : singleControlForm.Text; + // We can run subprocesses + IsAbleToLaunchSubprocesses = true; + } - if(singleControlForm is Form && asDocument) - throw new Exception("Control '" + singleControlForm + "' is a Form and asDocument was passed as true. When asDocument is true you must be a Control not a Form e.g. inherit from RDMPUserControl instead of RDMPForm"); + protected override ICoreChildProvider GetChildProvider() + { + var provider = base.GetChildProvider(); - var c = singleControlForm as RDMPUserControl; - - if(c != null) - c.SetItemActivator(this); + if (RefreshBus != null) RefreshBus.ChildProvider = provider; - var content = WindowFactory.Create(this,singleControlForm,name , null); - - if (asDocument) - content.Show(_mainDockPanel,DockState.Document); - else - content.Show(_mainDockPanel,new Rectangle(0,0,width,height)); - - return content; - } - - public override void RequestItemEmphasis(object sender, EmphasiseRequest request) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => RequestItemEmphasis(sender, request)); - return; - } + return provider; + } - AdjustEmphasiseRequest(request); - - //ensure a relevant Toolbox is available - var descendancy = CoreChildProvider.GetDescendancyListIfAnyFor(request.ObjectToEmphasise); - object root = null; - - if (descendancy != null) - root = descendancy.Parents.FirstOrDefault(); - else - root = request.ObjectToEmphasise; //assume maybe o is a root object itself? - - if (root is CohortIdentificationConfiguration cic) - Activate(cic); - else - if (root != null) - _windowManager.ShowCollectionWhichSupportsRootObjectType(root); - - //really should be a listener now btw since we just launched the relevant Toolbox if it wasn't there before - //Look at assignments to Sender, the invocation list can change the Sender! - var args = new EmphasiseEventArgs(request); - base.OnEmphasise(this,args); - - //might be different than sender that was passed in - if(args.Sender is DockContent content) - content.Activate(); - - //user is being shown the given object so track it as a recent (e.g. GoTo etc) - if(args.Request.ObjectToEmphasise is IMapsDirectlyToDatabaseTable m) - HistoryProvider.Add(m); - } - public override bool SelectEnum(DialogArgs args, Type enumType, out Enum chosen) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - Enum result = default; - var rtn = _mainDockPanel.Invoke(()=>SelectEnum(args, enumType, out result)); - chosen = result; - return rtn; - } + public Form ShowWindow(Control singleControlForm, bool asDocument = false) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => ShowWindow(singleControlForm, asDocument)); - return SelectObject(args, Enum.GetValues(enumType).Cast().ToArray(), out chosen); - } + var width = singleControlForm.Size.Width + SystemInformation.BorderSize.Width; + var height = singleControlForm.Size.Height + SystemInformation.BorderSize.Height; - public override bool SelectType(DialogArgs args, Type[] available,out Type chosen) - { - return SelectObject(args, available, out chosen); - } + //use the .Text or fallback on .Name + var name = string.IsNullOrWhiteSpace(singleControlForm.Text) + ? singleControlForm.Name ?? singleControlForm.GetType().Name //or worst case scenario use the type name! + : singleControlForm.Text; - public override bool CanActivate(object target) + switch (singleControlForm) { - return CommandExecutionFactory.CanActivate(target); + case Form when asDocument: + throw new Exception( + $"Control '{singleControlForm}' is a Form and asDocument was passed as true. When asDocument is true you must be a Control not a Form e.g. inherit from RDMPUserControl instead of RDMPForm"); + case RDMPUserControl c: + c.SetItemActivator(this); + break; } - protected override void ActivateImpl(object o) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => ActivateImpl(o)); - return; - } - if (CommandExecutionFactory.CanActivate(o)) - CommandExecutionFactory.Activate(o); - } + var content = WindowFactory.Create(this, singleControlForm, name, null); - public bool IsRootObjectOfCollection(RDMPCollection collection, object rootObject) - { - //if the collection an arbitrary one then it is definetly not the root collection for anyone - if (collection == RDMPCollection.None) - return false; + if (asDocument) + content.Show(_mainDockPanel, DockState.Document); + else + content.Show(_mainDockPanel, new Rectangle(0, 0, width, height)); - return _windowManager.GetCollectionForRootObject(rootObject) == collection; - } + return content; + } - /// - /// Consults all currently configured IProblemProviders and returns true if any report a problem with the object - /// - /// - /// - public bool HasProblem(object model) + public override void RequestItemEmphasis(object sender, EmphasiseRequest request) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - return ProblemProviders.Any(p => p.HasProblem(model)); + _mainDockPanel.Invoke(() => RequestItemEmphasis(sender, request)); + return; } - /// - /// Consults all currently configured IProblemProviders and returns the first Problem reported by any about the object or null - /// if there are no problems reported. - /// - /// - /// - public string DescribeProblemIfAny(object model) + AdjustEmphasiseRequest(request); + + //ensure a relevant Toolbox is available + var descendancy = CoreChildProvider.GetDescendancyListIfAnyFor(request.ObjectToEmphasise); + + var root = descendancy != null + ? descendancy.Parents.FirstOrDefault() + : request.ObjectToEmphasise; //assume maybe o is a root object itself? + + if (root is CohortIdentificationConfiguration cic) + Activate(cic); + else if (root != null) + _windowManager.ShowCollectionWhichSupportsRootObjectType(root); + + //really should be a listener now btw since we just launched the relevant Toolbox if it wasn't there before + //Look at assignments to Sender, the invocation list can change the Sender! + var args = new EmphasiseEventArgs(request); + OnEmphasise(this, args); + + //might be different than sender that was passed in + if (args.Sender is DockContent content) + content.Activate(); + + //user is being shown the given object so track it as a recent (e.g. GoTo etc) + if (args.Request.ObjectToEmphasise is IMapsDirectlyToDatabaseTable m) + HistoryProvider.Add(m); + } + + public override bool SelectEnum(DialogArgs args, Type enumType, out Enum chosen) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - return ProblemProviders.Select(p => p.DescribeProblem(model)).FirstOrDefault(desc => desc != null); + Enum result = default; + var rtn = _mainDockPanel.Invoke(() => SelectEnum(args, enumType, out result)); + chosen = result; + return rtn; } - - public string GetDocumentation(Type type) + + return SelectObject(args, Enum.GetValues(enumType).Cast().ToArray(), out chosen); + } + + public override bool SelectType(DialogArgs args, Type[] available, out Type chosen) => + SelectObject(args, available, out chosen); + + public override bool CanActivate(object target) => CommandExecutionFactory.CanActivate(target); + + protected override void ActivateImpl(object o) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - return RepositoryLocator.CatalogueRepository.CommentStore.GetTypeDocumentationIfExists(type); + _mainDockPanel.Invoke(() => ActivateImpl(o)); + return; } - public string CurrentDirectory { get { return Environment.CurrentDirectory; }} - public DialogResult ShowDialog(Form form) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => ShowDialog(form)); - } + if (CommandExecutionFactory.CanActivate(o)) + CommandExecutionFactory.Activate(o); + } - return form.ShowDialog(); - } + public bool IsRootObjectOfCollection(RDMPCollection collection, object rootObject) => + //if the collection an arbitrary one then it is definitely not the root collection for anyone + collection != RDMPCollection.None && _windowManager.GetCollectionForRootObject(rootObject) == collection; - public void KillForm(Form f, Exception reason) - { - // if on wrong Thread - if (f.InvokeRequired) - { - f.Invoke(() => KillForm(f,reason)); - return; - } + /// + /// Consults all currently configured IProblemProviders and returns true if any report a problem with the object + /// + /// + /// + public bool HasProblem(object model) + { + return ProblemProviders.Any(p => p.HasProblem(model)); + } - f.Close(); - ExceptionViewer.Show("Window Closed",reason); - } - public void KillForm(Form f, string reason) - { - // if on wrong Thread - if (f.InvokeRequired) - { - f.Invoke(() => KillForm(f, reason)); - return; - } + /// + /// Consults all currently configured IProblemProviders and returns the first Problem reported by any about the object or null + /// if there are no problems reported. + /// + /// + /// + public string DescribeProblemIfAny(object model) + { + return ProblemProviders.Select(p => p.DescribeProblem(model)).FirstOrDefault(desc => desc != null); + } - f.Close(); - ExceptionViewer.Show("Window Closed",reason); - } - public void OnRuleRegistered(IBinderRule rule) + public string GetDocumentation(Type type) => + RepositoryLocator.CatalogueRepository.CommentStore.GetTypeDocumentationIfExists(type); + + public string CurrentDirectory => Environment.CurrentDirectory; + + public DialogResult ShowDialog(Form form) + { + // if on wrong Thread + return _mainDockPanel?.InvokeRequired ?? false + ? _mainDockPanel.Invoke(() => ShowDialog(form)) + : form.ShowDialog(); + } + + public void KillForm(Form f, Exception reason) + { + // if on wrong Thread + if (f.InvokeRequired) { - //no special action required + f.Invoke(() => KillForm(f, reason)); + return; } - /// - /// Asks the user if they want to reload a fresh copy with a Yes/No message box. - /// - /// - /// - public bool ShouldReloadFreshCopy(DatabaseEntity databaseEntity) + f.Close(); + ExceptionViewer.Show("Window Closed", reason); + } + + public void KillForm(Form f, string reason) + { + // if on wrong Thread + if (f.InvokeRequired) { - return YesNo(databaseEntity + " is out of date with database, would you like to reload a fresh copy?", - "Object Changed"); + f.Invoke(() => KillForm(f, reason)); + return; } - public T Activate(T2 databaseObject) - where T : RDMPSingleDatabaseObjectControl, new() - where T2 : DatabaseEntity + f.Close(); + WideMessageBox.Show("Window Closed", reason); + } + + public void OnRuleRegistered(IBinderRule rule) + { + //no special action required + } + + /// + /// Asks the user if they want to reload a fresh copy with a Yes/No message box. + /// + /// + /// + public bool ShouldReloadFreshCopy(DatabaseEntity databaseEntity) => + YesNo($"{databaseEntity} is out of date with database, would you like to reload a fresh copy?", + "Object Changed"); + + public T Activate(T2 databaseObject) + where T : RDMPSingleDatabaseObjectControl, new() + where T2 : DatabaseEntity => + Activate(databaseObject, CoreIconProvider.GetImage(databaseObject)); + + public T Activate(IPersistableObjectCollection collection) + where T : Control, IObjectCollectionControl, new() + + { + //if the window is already open + if (PopExisting(typeof(T), collection, out var existingHostedControlInstance)) { - return Activate(databaseObject, CoreIconProvider.GetImage(databaseObject)); + //just update its state + var existing = (T)existingHostedControlInstance; + existing.SetCollection(this, collection); + + return existing; } - - public T Activate(IPersistableObjectCollection collection) - where T: Control,IObjectCollectionControl,new() - { - //if the window is already open - if (PopExisting(typeof(T), collection, out var existingHostedControlInstance)) - { - //just update its state - var existing = (T) existingHostedControlInstance; - existing.SetCollection(this,collection); - return existing; - } - + var uiInstance = new T(); + Activate(uiInstance, collection); + return uiInstance; + } - var uiInstance = new T(); - Activate(uiInstance, collection); - return uiInstance; - } + private T Activate(T2 databaseObject, Image tabImage) + where T : RDMPSingleDatabaseObjectControl, new() + where T2 : DatabaseEntity + { + if (PopExisting(typeof(T), databaseObject, out var existingHostedControlInstance)) + return (T)existingHostedControlInstance; - private T Activate(T2 databaseObject, Image tabImage) - where T : RDMPSingleDatabaseObjectControl, new() - where T2 : DatabaseEntity - { - if (PopExisting(typeof(T), databaseObject, out var existingHostedControlInstance)) - return (T)existingHostedControlInstance; + var uiInstance = new T(); + var floatable = WindowFactory.Create(this, RefreshBus, uiInstance, tabImage, databaseObject); + floatable.Show(_mainDockPanel, DockState.Document); + uiInstance.SetDatabaseObject(this, databaseObject); - var uiInstance = new T(); - var floatable = WindowFactory.Create(this,RefreshBus, uiInstance, tabImage, databaseObject); - floatable.Show(_mainDockPanel, DockState.Document); - uiInstance.SetDatabaseObject(this, databaseObject); + SetTabText(floatable, uiInstance); - SetTabText(floatable,uiInstance); + return uiInstance; + } - return uiInstance; - } + private bool PopExisting(Type windowType, IMapsDirectlyToDatabaseTable databaseObject, + out Control existingHostedControlInstance) + { + var existing = _windowManager.GetActiveWindowIfAnyFor(windowType, databaseObject); + existingHostedControlInstance = null; - private bool PopExisting(Type windowType, IMapsDirectlyToDatabaseTable databaseObject, out Control existingHostedControlInstance) + if (existing != null) { - var existing = _windowManager.GetActiveWindowIfAnyFor(windowType, databaseObject); - existingHostedControlInstance = null; + existingHostedControlInstance = existing.Control; + existing.Activate(); - if (existing != null) - { - existingHostedControlInstance = existing.Control; - existing.Activate(); + // only refresh if there are changes to the underlying object + if (databaseObject is IRevertable r && + r.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyDifferent) + existing.HandleUserRequestingTabRefresh(this); + } - // only refresh if there are changes to the underlying object - if(databaseObject is IRevertable r && r.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyDifferent) - existing.HandleUserRequestingTabRefresh(this); - } + return existing != null; + } - return existing != null; - } + private bool PopExisting(Type windowType, IPersistableObjectCollection collection, + out Control existingHostedControlInstance) + { + var existing = _windowManager.GetActiveWindowIfAnyFor(windowType, collection); + existingHostedControlInstance = null; - private bool PopExisting(Type windowType, IPersistableObjectCollection collection, out Control existingHostedControlInstance) + if (existing != null) { - var existing = _windowManager.GetActiveWindowIfAnyFor(windowType, collection); - existingHostedControlInstance = null; - - if (existing != null) - { - existingHostedControlInstance = existing.Control; - existing.Activate(); - - // only refresh if there are changes to some of the underlying objects - if(collection.DatabaseObjects.OfType().Any(r=>r.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyDifferent)) - existing.HandleUserRequestingTabRefresh(this); - } + existingHostedControlInstance = existing.Control; + existing.Activate(); - return existing != null; + // only refresh if there are changes to some of the underlying objects + if (collection.DatabaseObjects.OfType().Any(r => + r.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyDifferent)) + existing.HandleUserRequestingTabRefresh(this); } - public DockContent Activate(DeserializeInstruction instruction, IActivateItems activator) - { - if (instruction.DatabaseObject != null && instruction.ObjectCollection != null) - throw new ArgumentException("DeserializeInstruction cannot have both a DatabaseObject and an ObjectCollection"); - var c = (Control)_constructor.Construct(instruction.UIControlType,activator,true); + return existing != null; + } + + public DockContent Activate(DeserializeInstruction instruction, IActivateItems activator) + { + if (instruction.DatabaseObject != null && instruction.ObjectCollection != null) + throw new ArgumentException( + "DeserializeInstruction cannot have both a DatabaseObject and an ObjectCollection"); - var uiInstance = c as IRDMPSingleDatabaseObjectControl; - var uiCollection = c as IObjectCollectionControl; + var c = (Control)UIObjectConstructor.Construct(instruction.UIControlType, activator, true); + switch (c) + { //it has a database object so call SetDatabaseObject - if (uiCollection != null) - //if we get here then Instruction wasn't for a + //if we get here then Instruction wasn't for a + case IObjectCollectionControl uiCollection: return Activate(uiCollection, instruction.ObjectCollection); - else - if (uiInstance != null) + case IRDMPSingleDatabaseObjectControl uiInstance: { var databaseObject = instruction.DatabaseObject; @@ -464,608 +443,557 @@ public DockContent Activate(DeserializeInstruction instruction, IActivateItems a if (databaseObject == null) return null; - DockContent floatable = WindowFactory.Create(this,RefreshBus, uiInstance,CoreIconProvider.GetImage(databaseObject), databaseObject); + DockContent floatable = WindowFactory.Create(this, RefreshBus, uiInstance, + CoreIconProvider.GetImage(databaseObject), databaseObject); floatable.Show(_mainDockPanel, DockState.Document); try { - uiInstance.SetDatabaseObject(this,(DatabaseEntity) databaseObject); - SetTabText(floatable,uiInstance); + uiInstance.SetDatabaseObject(this, (DatabaseEntity)databaseObject); + SetTabText(floatable, uiInstance); } catch (Exception e) { floatable.Close(); - throw new Exception("SetDatabaseObject failed on Control of Type '"+instruction.UIControlType.Name+"', control closed, see inner Exception for details",e); + throw new Exception( + $"SetDatabaseObject failed on Control of Type '{instruction.UIControlType.Name}', control closed, see inner Exception for details", + e); } return floatable; } - else - { + default: return (DockContent)activator.ShowWindow(c, true); - } } + } - private void SetTabText(DockContent floatable, INamedTab tab) - { - string tabText = tab.GetTabName(); - string tabToolTipText = tab.GetTabToolTip(); + private static void SetTabText(DockContent floatable, INamedTab tab) + { + var tabText = tab.GetTabName(); + var tabToolTipText = tab.GetTabToolTip(); - floatable.TabText = tabText; + floatable.TabText = tabText; - // set tool tip to the full tab name or custom representation - floatable.ToolTipText = string.IsNullOrEmpty(tabToolTipText) ? tabText : tabToolTipText; + // set tool tip to the full tab name or custom representation + floatable.ToolTipText = string.IsNullOrEmpty(tabToolTipText) ? tabText : tabToolTipText; - if (floatable != null && floatable.ParentForm != null) - floatable.ParentForm.Text = tabText + " - RDMP"; - } + if (floatable.ParentForm != null) + floatable.ParentForm.Text = $"{tabText} - RDMP"; + } + + public PersistableObjectCollectionDockContent Activate(IObjectCollectionControl collectionControl, + IPersistableObjectCollection objectCollection) + { + var floatable = WindowFactory.Create(this, collectionControl, objectCollection, null); + floatable.Show(_mainDockPanel, DockState.Document); + return floatable; + } - public PersistableObjectCollectionDockContent Activate(IObjectCollectionControl collectionControl, IPersistableObjectCollection objectCollection) + public void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) + { + // if we don't want to do selective refresh or can't (because partial refreshes are not supported on the type) + if (HardRefresh || !UserSettings.SelectiveRefresh || !CoreChildProvider.SelectiveRefresh(e.Object)) { - var floatable = WindowFactory.Create(this,collectionControl,objectCollection, null); - floatable.Show(_mainDockPanel, DockState.Document); - return floatable; + //update the child provider with a full refresh + GetChildProvider(); + HardRefresh = false; } - public void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) + RefreshProblemProviders(); + } + + private void RefreshProblemProviders() + { + foreach (var p in ProblemProviders) + p.RefreshProblems(CoreChildProvider); + } + + /// + public override bool YesNo(DialogArgs args, out bool chosen) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if we don't want to do selective refresh or can't (because partial refreshes are not supported on the type) - if(HardRefresh || !UserSettings.SelectiveRefresh || !CoreChildProvider.SelectiveRefresh(e.Object)) - { - //update the child provider with a full refresh - GetChildProvider(); - HardRefresh = false; - } - RefreshProblemProviders(); + bool result = default; + var rtn = _mainDockPanel.Invoke(() => YesNo(args, out result)); + chosen = result; + return rtn; } - private void RefreshProblemProviders() + var dr = MessageBox.Show(args.TaskDescription ?? args.EntryLabel, args.WindowTitle, MessageBoxButtons.YesNo); + chosen = dr == DialogResult.Yes; + return dr switch { - foreach (IProblemProvider p in ProblemProviders) - p.RefreshProblems(CoreChildProvider); - } + DialogResult.Yes => true, + DialogResult.No => true, + _ => false + }; + } - /// - public override bool YesNo(DialogArgs args,out bool chosen) + public override bool TypeText(DialogArgs args, int maxLength, string initialText, out string text, + bool requireSaneHeaderText) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) + string result = default; + var rtn = _mainDockPanel.Invoke(() => + TypeText(args, maxLength, initialText, out result, requireSaneHeaderText)); + text = result; + return rtn; + } + + var textTyper = + new TypeTextOrCancelDialog(args, maxLength, initialText, false, maxLength > MultiLineLengthThreshold) { - bool result = default; - var rtn = _mainDockPanel.Invoke(() => YesNo(args, out result)); - chosen = result; - return rtn; - } + RequireSaneHeaderText = requireSaneHeaderText + }; - var dr = MessageBox.Show(args.TaskDescription ?? args.EntryLabel, args.WindowTitle, MessageBoxButtons.YesNo); + text = textTyper.ShowDialog() == DialogResult.OK ? textTyper.ResultText : null; + return !string.IsNullOrWhiteSpace(text); + } - if (dr == DialogResult.Yes) - { - chosen = true; - return true; - } + public override DiscoveredDatabase SelectDatabase(bool allowDatabaseCreation, string taskDescription) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => SelectDatabase(allowDatabaseCreation, taskDescription)); - if (dr == DialogResult.No) - { - chosen = false; - return true; - } + using var dialog = new ServerDatabaseTableSelectorDialog(taskDescription, false, true, this); + dialog.ShowDialog(); + return dialog.DialogResult == DialogResult.OK ? dialog.SelectedDatabase : null; + } - chosen = false; - return false; - } + public override DiscoveredTable SelectTable(bool allowDatabaseCreation, string taskDescription) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => SelectTable(allowDatabaseCreation, taskDescription)); - public override bool TypeText(DialogArgs args, int maxLength, string initialText, out string text, bool requireSaneHeaderText) + using var dialog = new ServerDatabaseTableSelectorDialog(taskDescription, true, true, this) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - string result = default; - var rtn = _mainDockPanel.Invoke(() => TypeText(args,maxLength,initialText,out result,requireSaneHeaderText)); - text = result; - return rtn; - } + AllowTableValuedFunctionSelection = true + }; - var textTyper = new TypeTextOrCancelDialog(args, maxLength, initialText, allowBlankText: false, multiLine: maxLength > MultiLineLengthThreshold) - { - RequireSaneHeaderText = requireSaneHeaderText - }; + dialog.ShowDialog(); - text = textTyper.ShowDialog() == DialogResult.OK ? textTyper.ResultText : null; - return !string.IsNullOrWhiteSpace(text); - } + return dialog.DialogResult == DialogResult.OK ? dialog.SelectedTable : null; + } - public override DiscoveredDatabase SelectDatabase(bool allowDatabaseCreation, string taskDescription) + public override void ShowException(string errorText, Exception exception) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => SelectDatabase(allowDatabaseCreation,taskDescription)); - } + _mainDockPanel.Invoke(() => ShowException(errorText, exception)); + return; + } - var dialog = new ServerDatabaseTableSelectorDialog(taskDescription,false,true,this); - dialog.ShowDialog(); - - if (dialog.DialogResult != DialogResult.OK) - return null; + ExceptionViewer.Show(errorText, exception); + } - return dialog.SelectedDatabase; + public override void Wait(string title, Task task, CancellationTokenSource cts) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + { + _mainDockPanel.Invoke(() => Wait(title, task, cts)); + return; } - public override DiscoveredTable SelectTable(bool allowDatabaseCreation, string taskDescription) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => SelectTable(allowDatabaseCreation, taskDescription)); - } + var ui = new WaitUI(title, task, cts); + ui.ShowDialog(); + } - var dialog = new ServerDatabaseTableSelectorDialog(taskDescription,true,true,this); - dialog.AllowTableValuedFunctionSelection = true; - - dialog.ShowDialog(); - if (dialog.DialogResult != DialogResult.OK) - return null; + public override IEnumerable GetIgnoredCommands() + { + yield return typeof(ExecuteCommandRefreshObject); + yield return typeof(ExecuteCommandChangeExtractability); + yield return typeof(ExecuteCommandOpenInExplorer); + yield return typeof(ExecuteCommandCreateNewFileBasedProcessTask); + } - return dialog.SelectedTable; - } - public override void ShowException(string errorText, Exception exception) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => ShowException(errorText, exception)); - return; - } + public override IMapsDirectlyToDatabaseTable SelectOne(DialogArgs args, + IMapsDirectlyToDatabaseTable[] availableObjects) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => SelectOne(args, availableObjects)); - ExceptionViewer.Show(errorText, exception); + if (!availableObjects.Any()) + { + MessageBox.Show($"There are no compatible objects in your RMDP for:{Environment.NewLine}{args}"); + return null; } - public override void Wait(string title, Task task, CancellationTokenSource cts) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => Wait(title,task,cts)); - return; - } + //if there is only one object available to select + if (availableObjects.Length == 1) + if (args.AllowAutoSelect) + return availableObjects[0]; - var ui = new WaitUI(title,task,cts); - ui.ShowDialog(); - } + if (SelectObject(args, availableObjects, out var selected)) return selected; + return null; //user didn't select one of the IMapsDirectlyToDatabaseTable objects shown in the dialog + } - public override IEnumerable GetIgnoredCommands() + public override bool SelectObject(DialogArgs args, T[] available, out T selected) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - yield return typeof(ExecuteCommandRefreshObject); - yield return typeof(ExecuteCommandChangeExtractability); - yield return typeof (ExecuteCommandOpenInExplorer); - yield return typeof (ExecuteCommandCreateNewFileBasedProcessTask); + T result = default; + var rtn = _mainDockPanel.Invoke(() => SelectObject(args, available, out result)); + selected = result; + return rtn; } - - public override IMapsDirectlyToDatabaseTable SelectOne(DialogArgs args, IMapsDirectlyToDatabaseTable[] availableObjects) + var pick = new SelectDialog(args, this, available, false); + + if (pick.ShowDialog() == DialogResult.OK) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => SelectOne(args, availableObjects)); - } + selected = pick.Selected; + return true; + } - if (!availableObjects.Any()) - { - MessageBox.Show($"There are no compatible objects in your RMDP for:{Environment.NewLine}{args}"); - return null; - } + selected = default; + return false; + } - //if there is only one object available to select - if (availableObjects.Length == 1) - if(args.AllowAutoSelect) - { - return availableObjects[0]; - } + public override bool SelectObjects(DialogArgs args, T[] available, out T[] selected) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + { + T[] result = default; + var rtn = _mainDockPanel.Invoke(() => SelectObjects(args, available, out result)); + selected = result; + return rtn; + } - if(SelectObject(args,availableObjects, out var selected)) - { - return selected; - } + var pick = new SelectDialog(args, this, available, false) + { + AllowMultiSelect = true + }; - return null; //user didn't select one of the IMapsDirectlyToDatabaseTable objects shown in the dialog + if (pick.ShowDialog() == DialogResult.OK) + { + selected = pick.MultiSelected.ToArray(); + return true; } - public override bool SelectObject(DialogArgs args, T[] available, out T selected) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - T result = default; - var rtn = _mainDockPanel.Invoke(() => SelectObject(args, available, out result)); - selected = result; - return rtn; - } + selected = default; + return false; + } - var pick = new SelectDialog(args,this,available,false); + public override DirectoryInfo SelectDirectory(string prompt) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) return _mainDockPanel.Invoke(() => SelectDirectory(prompt)); - if (pick.ShowDialog() == DialogResult.OK) - { - selected = pick.Selected; - return true; - } + using var fb = new FolderBrowserDialog(); + return fb.ShowDialog() == DialogResult.OK ? new DirectoryInfo(fb.SelectedPath) : null; + } - selected = default(T); - return false; - } + public override FileInfo SelectFile(string prompt) + { + // if on wrong Thread + return _mainDockPanel?.InvokeRequired ?? false + ? _mainDockPanel.Invoke(() => SelectFile(prompt)) + : SelectFile(prompt, null, null); + } - public override bool SelectObjects(DialogArgs args, T[] available, out T[] selected) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - T[] result = default; - var rtn = _mainDockPanel.Invoke(() => SelectObjects(args, available, out result)); - selected = result; - return rtn; - } + public override FileInfo SelectFile(string prompt, string patternDescription, string pattern) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => SelectFile(prompt, patternDescription, pattern)); - var pick = new SelectDialog(args,this, available, false); - pick.AllowMultiSelect = true; + using var fb = new OpenFileDialog { CheckFileExists = false, Multiselect = false }; + if (patternDescription != null && pattern != null) + fb.Filter = $"{patternDescription}|{pattern}"; - if (pick.ShowDialog() == DialogResult.OK) - { - selected = pick.MultiSelected.ToArray(); - return true; - } + if (fb.ShowDialog() == DialogResult.OK) + // entering "null" in a winforms file dialog will return something like "D:\Blah\null" + return string.Equals(Path.GetFileName(fb.FileName), "null", StringComparison.CurrentCultureIgnoreCase) + ? null + : new FileInfo(fb.FileName); - selected = default(T[]); - return false; - } - public override DirectoryInfo SelectDirectory(string prompt) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => SelectDirectory(prompt)); - } + return null; + } - using (var fb = new FolderBrowserDialog()) - { - if (fb.ShowDialog() == DialogResult.OK) - return new DirectoryInfo(fb.SelectedPath); - - return null; - } - } + public override FileInfo[] SelectFiles(string prompt, string patternDescription, string pattern) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => SelectFiles(prompt, patternDescription, pattern)); - public override FileInfo SelectFile(string prompt) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => SelectFile(prompt)); - } + using var fb = new OpenFileDialog { CheckFileExists = false, Multiselect = true }; + if (patternDescription != null && pattern != null) + fb.Filter = $"{patternDescription}|{pattern}"; - return SelectFile(prompt, null, null); - } + return fb.ShowDialog() == DialogResult.OK ? fb.FileNames.Select(f => new FileInfo(f)).ToArray() : null; + } - public override FileInfo SelectFile(string prompt, string patternDescription, string pattern) + protected override bool SelectValueTypeImpl(DialogArgs args, Type paramType, object initialValue, out object chosen) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => SelectFile(prompt, patternDescription,pattern)); - } - - using (var fb = new OpenFileDialog {CheckFileExists = false,Multiselect = false}) - { - if (patternDescription != null && pattern != null) - fb.Filter = patternDescription + "|" + pattern; + object result = default; + var rtn = _mainDockPanel.Invoke(() => SelectValueTypeImpl(args, paramType, initialValue, out result)); + chosen = result; + return rtn; + } - if (fb.ShowDialog() == DialogResult.OK) - { - // entering "null" in a winforms file dialog will return something like "D:\Blah\null" - if (string.Equals(Path.GetFileName(fb.FileName),"null", StringComparison.CurrentCultureIgnoreCase)) - return null; + //whatever else it is use string + var typeTextDialog = new TypeTextOrCancelDialog(args, 1000, initialValue?.ToString()); - return new FileInfo(fb.FileName); - } - - return null; - } - } - - public override FileInfo[] SelectFiles(string prompt, string patternDescription, string pattern) + if (typeTextDialog.ShowDialog() == DialogResult.OK) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => SelectFiles(prompt, patternDescription, pattern)); - } + chosen = UsefulStuff.ChangeType(typeTextDialog.ResultText, paramType); + return true; + } - using (var fb = new OpenFileDialog {CheckFileExists = false,Multiselect = true}) - { - if (patternDescription != null && pattern != null) - fb.Filter = patternDescription + "|" + pattern; + chosen = null; + return false; + } - if (fb.ShowDialog() == DialogResult.OK) - return fb.FileNames.Select(f=>new FileInfo(f)).ToArray(); - - return null; - } + public override IMapsDirectlyToDatabaseTable[] SelectMany(DialogArgs args, Type arrayElementType, + IMapsDirectlyToDatabaseTable[] availableObjects) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => SelectMany(args, arrayElementType, availableObjects)); + + if (!availableObjects.Any()) + { + MessageBox.Show($"There are no '{arrayElementType.Name}' objects in your RMDP"); + return null; } - protected override bool SelectValueTypeImpl(DialogArgs args, Type paramType, object initialValue, out object chosen) + using var selectDialog = new SelectDialog(args, this, availableObjects, false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - object result = default; - var rtn = _mainDockPanel.Invoke(() => SelectValueTypeImpl(args, paramType, initialValue, out result)); - chosen = result; - return rtn; - } + AllowMultiSelect = true + }; - //whatever else it is use string - var typeTextDialog = new TypeTextOrCancelDialog(args,1000,initialValue?.ToString()); + if (selectDialog.ShowDialog() != DialogResult.OK) return null; + var ms = selectDialog.MultiSelected.ToList(); + var toReturn = Array.CreateInstance(arrayElementType, ms.Count); - if (typeTextDialog.ShowDialog() == DialogResult.OK) - { - chosen = UsefulStuff.ChangeType(typeTextDialog.ResultText, paramType); - return true; - } + for (var i = 0; i < ms.Count; i++) + toReturn.SetValue(ms[i], i); - chosen = null; - return false; - } + return toReturn.Cast().ToArray(); + } - public override IMapsDirectlyToDatabaseTable[] SelectMany(DialogArgs args, Type arrayElementType, - IMapsDirectlyToDatabaseTable[] availableObjects) + public override List GetDelegates() + { + return new List { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => SelectMany(args, arrayElementType, availableObjects)); - } - - if (!availableObjects.Any()) - { - MessageBox.Show("There are no '" + arrayElementType.Name + "' objects in your RMDP"); - return null; - } + new(typeof(IActivateItems), true, p => this) + }; + } - var selectDialog = new SelectDialog(args, this, availableObjects, false); - selectDialog.AllowMultiSelect = true; - - if (selectDialog.ShowDialog() == DialogResult.OK) + public void StartSession(string sessionName, IEnumerable initialObjects, + string initialSearch) + { + if (initialObjects == null) + { + initialObjects = SelectMany(new DialogArgs { - var ms = selectDialog.MultiSelected.ToList(); - var toReturn = Array.CreateInstance(arrayElementType, ms.Count); + WindowTitle = sessionName.StartsWith(ExecuteCommandStartSession.FindResultsTitle) + ? "Find Multiple" + : "Session Objects", + TaskDescription = + "Pick which objects you want added to the session window. You can always add more later", + InitialSearchText = initialSearch, - for(int i = 0;i().ToArray(); - } + IsFind = sessionName.StartsWith(ExecuteCommandStartSession.FindResultsTitle) + }, typeof(IMapsDirectlyToDatabaseTable), CoreChildProvider.GetAllSearchables().Keys.ToArray())?.ToList(); - return null; + if (initialObjects?.Any() != true) + // user cancelled picking objects + return; } - public override List GetDelegates() + var panel = WindowFactory.Create(this, new SessionCollectionUI(), new SessionCollection(sessionName) { - return new List - { - new CommandInvokerDelegate(typeof(IActivateItems),true,(p)=>this) - }; - } - public void StartSession(string sessionName, IEnumerable initialObjects, string initialSearch) - { - if(initialObjects == null) - { - initialObjects = SelectMany(new DialogArgs - { - WindowTitle = sessionName.StartsWith(ExecuteCommandStartSession.FindResultsTitle) ? "Find Multiple" : "Session Objects", - TaskDescription = "Pick which objects you want added to the session window. You can always add more later", - InitialSearchText = initialSearch, + DatabaseObjects = initialObjects.ToList() + }, Image.Load(CatalogueIcons.WindowLayout)); + panel.Show(_mainDockPanel, DockState.DockLeft); + } - IsFind = sessionName.StartsWith(ExecuteCommandStartSession.FindResultsTitle), - },typeof(IMapsDirectlyToDatabaseTable),CoreChildProvider.GetAllSearchables().Keys.ToArray())?.ToList(); - if(initialObjects == null || initialObjects.Count() == 0) - { - // user cancelled picking objects - return; - } - } + /// + public IEnumerable GetSessions() => _windowManager.GetAllWindows(); - var panel = WindowFactory.Create(this,new SessionCollectionUI(),new SessionCollection(sessionName) - { - DatabaseObjects = initialObjects.ToList() - },Image.Load(CatalogueIcons.WindowLayout)); - panel.Show(_mainDockPanel,DockState.DockLeft); - } - - - /// - public IEnumerable GetSessions() + public override IPipelineRunner GetPipelineRunner(DialogArgs args, IPipelineUseCase useCase, IPipeline pipeline) + { + var configureAndExecuteDialog = new ConfigureAndExecutePipelineUI(args, useCase, this) { - return _windowManager.GetAllWindows(); - } + Dock = DockStyle.Fill + }; - public override IPipelineRunner GetPipelineRunner(DialogArgs args,IPipelineUseCase useCase, IPipeline pipeline) - { - ConfigureAndExecutePipelineUI configureAndExecuteDialog = new ConfigureAndExecutePipelineUI(args,useCase, this); - configureAndExecuteDialog.Dock = DockStyle.Fill; - - return configureAndExecuteDialog; - } + return configureAndExecuteDialog; + } - public override CohortCreationRequest GetCohortCreationRequest(ExternalCohortTable externalCohortTable, IProject project, string cohortInitialDescription) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => GetCohortCreationRequest(externalCohortTable, project, cohortInitialDescription)); - } + public override CohortCreationRequest GetCohortCreationRequest(ExternalCohortTable externalCohortTable, + IProject project, string cohortInitialDescription) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => + GetCohortCreationRequest(externalCohortTable, project, cohortInitialDescription)); - var ui = new CohortCreationRequestUI(this,externalCohortTable,project); - - if(!string.IsNullOrWhiteSpace(cohortInitialDescription)) - ui.CohortDescription = cohortInitialDescription + " (" + Environment.UserName + " - " + DateTime.Now + ")"; + var ui = new CohortCreationRequestUI(this, externalCohortTable, project); - if (ui.ShowDialog() != DialogResult.OK) - return null; + if (!string.IsNullOrWhiteSpace(cohortInitialDescription)) + ui.CohortDescription = $"{cohortInitialDescription} ({Environment.UserName} - {DateTime.Now})"; - return ui.Result; - } + return ui.ShowDialog() == DialogResult.OK ? ui.Result : null; + } - public override ICatalogue CreateAndConfigureCatalogue(ITableInfo tableInfo, ColumnInfo[] extractionIdentifierColumns, string initialDescription, IProject projectSpecific, string folder) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => CreateAndConfigureCatalogue(tableInfo, extractionIdentifierColumns, initialDescription,projectSpecific,folder)); - } + public override ICatalogue CreateAndConfigureCatalogue(ITableInfo tableInfo, + ColumnInfo[] extractionIdentifierColumns, string initialDescription, IProject projectSpecific, string folder) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke(() => CreateAndConfigureCatalogue(tableInfo, extractionIdentifierColumns, + initialDescription, projectSpecific, folder)); - var ui = new ConfigureCatalogueExtractabilityUI(this, tableInfo, initialDescription, projectSpecific); - ui.TargetFolder = folder; - ui.ShowDialog(); - - return ui.CatalogueCreatedIfAny; - } - public override ExternalDatabaseServer CreateNewPlatformDatabase(ICatalogueRepository catalogueRepository, PermissableDefaults defaultToSet, IPatcher patcher, DiscoveredDatabase db) + var ui = new ConfigureCatalogueExtractabilityUI(this, tableInfo, initialDescription, projectSpecific) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - return _mainDockPanel.Invoke(() => CreateNewPlatformDatabase(catalogueRepository, defaultToSet, patcher, db)); - } + TargetFolder = folder + }; + ui.ShowDialog(); - //launch the winforms UI for creating a database - return CreatePlatformDatabase.CreateNewExternalServer(catalogueRepository,defaultToSet,patcher); - } + return ui.CatalogueCreatedIfAny; + } + + public override ExternalDatabaseServer CreateNewPlatformDatabase(ICatalogueRepository catalogueRepository, + PermissableDefaults defaultToSet, IPatcher patcher, DiscoveredDatabase db) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + return _mainDockPanel.Invoke( + () => CreateNewPlatformDatabase(catalogueRepository, defaultToSet, patcher, db)); + + //launch the winforms UI for creating a database + return CreatePlatformDatabase.CreateNewExternalServer(catalogueRepository, defaultToSet, patcher); + } - public override bool ShowCohortWizard(out CohortIdentificationConfiguration cic) + public override bool ShowCohortWizard(out CohortIdentificationConfiguration cic) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - CohortIdentificationConfiguration result = default; - var rtn = _mainDockPanel.Invoke(() => ShowCohortWizard(out result)); - cic = result; - return rtn; - } + CohortIdentificationConfiguration result = default; + var rtn = _mainDockPanel.Invoke(() => ShowCohortWizard(out result)); + cic = result; + return rtn; + } - var wizard = new CreateNewCohortIdentificationConfigurationUI(this); + var wizard = new CreateNewCohortIdentificationConfigurationUI(this); - if (wizard.ShowDialog() == DialogResult.OK) - { - cic = wizard.CohortIdentificationCriteriaCreatedIfAny; - } - else - { - cic = null; - } + cic = wizard.ShowDialog() == DialogResult.OK ? wizard.CohortIdentificationCriteriaCreatedIfAny : null; - // Wizard was shown so that's a thing - return true; - } + // Wizard was shown so that's a thing + return true; + } - public override void SelectAnythingThen(DialogArgs args, Action callback) + public override void SelectAnythingThen(DialogArgs args, Action callback) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => SelectAnythingThen(args,callback)); - return; - } + _mainDockPanel.Invoke(() => SelectAnythingThen(args, callback)); + return; + } - var select = new SelectDialog( - args, this, CoreChildProvider.GetAllSearchables().Select(k => k.Key), false); + var select = new SelectDialog( + args, this, CoreChildProvider.GetAllSearchables().Select(k => k.Key), false); - if(select.ShowDialog() == DialogResult.OK && select.Selected != null) - { - callback(select.Selected); - } - } + if (select.ShowDialog() == DialogResult.OK && select.Selected != null) callback(select.Selected); + } - public override void ShowData(IViewSQLAndResultsCollection collection) + public override void ShowData(IViewSQLAndResultsCollection collection) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => ShowData(collection)); - return; - } - - Activate(collection); + _mainDockPanel.Invoke(() => ShowData(collection)); + return; } - public override void ShowLogs(ILoggedActivityRootObject rootObject) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => ShowLogs(rootObject)); - return; - } + Activate(collection); + } - Activate(new LoadEventsTreeViewObjectCollection(rootObject)); + public override void ShowLogs(ILoggedActivityRootObject rootObject) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + { + _mainDockPanel.Invoke(() => ShowLogs(rootObject)); + return; } - public override void ShowLogs(ExternalDatabaseServer loggingServer, LogViewerFilter filter) + Activate(new LoadEventsTreeViewObjectCollection(rootObject)); + } + + public override void ShowLogs(ExternalDatabaseServer loggingServer, LogViewerFilter filter) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => ShowLogs(loggingServer,filter)); - return; - } + _mainDockPanel.Invoke(() => ShowLogs(loggingServer, filter)); + return; + } - LoggingTabUI loggingTabUI = Activate(loggingServer); - if(filter != null) - loggingTabUI.SetFilter(filter); - } + var loggingTabUI = Activate(loggingServer); + if (filter != null) + loggingTabUI.SetFilter(filter); + } - public override void ShowGraph(AggregateConfiguration aggregate) + public override void ShowGraph(AggregateConfiguration aggregate) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => ShowGraph(aggregate)); - return; - } - - var graph = Activate(aggregate); - graph.LoadGraphAsync(); + _mainDockPanel.Invoke(() => ShowGraph(aggregate)); + return; } - public override void LaunchSubprocess(ProcessStartInfo startInfo) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => LaunchSubprocess(startInfo)); - return; - } + var graph = Activate(aggregate); + graph.LoadGraphAsync(); + } - var ctrl = new ConsoleControl.ConsoleControl(); - ShowWindow(ctrl, true); - ctrl.StartProcess(startInfo); + public override void LaunchSubprocess(ProcessStartInfo startInfo) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + { + _mainDockPanel.Invoke(() => LaunchSubprocess(startInfo)); + return; } - public override void ShowData(System.Data.DataTable table) - { - // if on wrong Thread - if (_mainDockPanel?.InvokeRequired ?? false) - { - _mainDockPanel.Invoke(() => ShowData(table)); - return; - } + var ctrl = new ConsoleControl.ConsoleControl(); + ShowWindow(ctrl, true); + ctrl.StartProcess(startInfo); + } - var ui = new DataTableViewerUI(table, "Table"); - ShowDialog(new SingleControlForm(ui, true)); + public override void ShowData(System.Data.DataTable table) + { + // if on wrong Thread + if (_mainDockPanel?.InvokeRequired ?? false) + { + _mainDockPanel.Invoke(() => ShowData(table)); + return; } + + var ui = new DataTableViewerUI(table, "Table"); + ShowDialog(new SingleControlForm(ui, true)); } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/CollectionNavigation.cs b/Application/ResearchDataManagementPlatform/WindowManagement/CollectionNavigation.cs index 66eb9e0b8b..8e66a34146 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/CollectionNavigation.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/CollectionNavigation.cs @@ -5,62 +5,35 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable; using Rdmp.Core.CommandExecution; -using Rdmp.Core.Curation.Data; using System.Collections.Generic; +using Equ; +using Rdmp.Core.MapsDirectlyToDatabaseTable; -namespace ResearchDataManagementPlatform.WindowManagement -{ - /// - /// Records the fact that the user visited a specific object in a tree collection - /// - public class CollectionNavigation: INavigation - { - public IMapsDirectlyToDatabaseTable Object { get; } - - public bool IsAlive - { - get - { - if(Object is IMightNotExist o) - return o.Exists(); +namespace ResearchDataManagementPlatform.WindowManagement; - return true; - } - } - - public CollectionNavigation(IMapsDirectlyToDatabaseTable Object) - { - this.Object = Object; - } +/// +/// Records the fact that the user visited a specific object in a tree collection +/// +public sealed class CollectionNavigation : PropertywiseEquatable, INavigation +{ + public IMapsDirectlyToDatabaseTable Object { get; } - public void Activate(ActivateItems activateItems) - { - activateItems.RequestItemEmphasis(this,new EmphasiseRequest(Object,0)); - } + [MemberwiseEqualityIgnore] public bool IsAlive => Object is not IMightNotExist o || o.Exists(); - public void Close() - { - - } - public override string ToString() - { - return Object.ToString(); - } + public CollectionNavigation(IMapsDirectlyToDatabaseTable @object) + { + Object = @object; + } - public override bool Equals(object obj) - { - return obj is CollectionNavigation other && - Object.Equals(other.Object); - } + public void Activate(ActivateItems activateItems) + { + activateItems.RequestItemEmphasis(this, new EmphasiseRequest(Object, 0)); + } - public override int GetHashCode() - { - unchecked - { - return 162302186 + EqualityComparer.Default.GetHashCode(Object); - } - } + public void Close() + { } -} + + public override string ToString() => Object.ToString(); +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/DeserializeInstruction.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/DeserializeInstruction.cs index be79de29c8..aaba6ff836 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/DeserializeInstruction.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/DeserializeInstruction.cs @@ -5,37 +5,36 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using MapsDirectlyToDatabaseTable; using Rdmp.Core.Curation.Data.Dashboarding; +using Rdmp.Core.MapsDirectlyToDatabaseTable; -namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence +namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; + +/// +/// Hydrateable class used to represent an attempt to restore the state of a docked window after a user closed RDMP and reopened it +/// records the Type of the user interface Control which should be shown and the instance of the object (or collection) that should +/// be shown in it once it has been created. +/// +public class DeserializeInstruction { - /// - /// Hydrateable class used to represent an attempt to restore the state of a docked window after a user closed RDMP and reopened it - /// records the Type of the user interface Control which should be shown and the instance of the object (or collection) that should - /// be shown in it once it has been created. - /// - public class DeserializeInstruction - { - public Type UIControlType; - public IMapsDirectlyToDatabaseTable DatabaseObject; - public IPersistableObjectCollection ObjectCollection; + public Type UIControlType; + public IMapsDirectlyToDatabaseTable DatabaseObject; + public IPersistableObjectCollection ObjectCollection; - public DeserializeInstruction(Type uiControlType) - { - UIControlType = uiControlType; - } + public DeserializeInstruction(Type uiControlType) + { + UIControlType = uiControlType; + } - public DeserializeInstruction(Type uiControlType, IMapsDirectlyToDatabaseTable databaseObject) - { - UIControlType = uiControlType; - DatabaseObject = databaseObject; - } + public DeserializeInstruction(Type uiControlType, IMapsDirectlyToDatabaseTable databaseObject) + { + UIControlType = uiControlType; + DatabaseObject = databaseObject; + } - public DeserializeInstruction(Type uiControlType, IPersistableObjectCollection objectCollection) - { - UIControlType = uiControlType; - ObjectCollection = objectCollection; - } + public DeserializeInstruction(Type uiControlType, IPersistableObjectCollection objectCollection) + { + UIControlType = uiControlType; + ObjectCollection = objectCollection; } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableObjectCollectionDockContent.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableObjectCollectionDockContent.cs index 2a43be5bce..b61a2f7db7 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableObjectCollectionDockContent.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableObjectCollectionDockContent.cs @@ -7,9 +7,9 @@ using System.ComponentModel; using System.Text; using System.Windows.Forms; -using MapsDirectlyToDatabaseTable.Revertable; using Rdmp.Core.CommandExecution; using Rdmp.Core.Curation.Data.Dashboarding; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Revertable; using Rdmp.UI; using Rdmp.UI.ItemActivation; using Rdmp.UI.Refreshing; @@ -17,101 +17,93 @@ using ResearchDataManagementPlatform.WindowManagement.ExtenderFunctionality; -namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence +namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; + +/// +/// Allows you to persist user interfaces which are built on more than one RDMP database object (if you only require one object you should use RDMPSingleDatabaseObjectControl instead +/// +[DesignerCategory("")] +[TechnicalUI] +public class PersistableObjectCollectionDockContent : RDMPSingleControlTab { - /// - /// Allows you to persist user interfaces which are built on more than one RDMP database object (if you only require one object you should use RDMPSingleDatabaseObjectControl instead - /// - [DesignerCategory("")] - [TechnicalUI] - public class PersistableObjectCollectionDockContent : RDMPSingleControlTab - { - private readonly IObjectCollectionControl _control; - - public const string Prefix = "RDMPObjectCollection"; - - private PersistStringHelper persistStringHelper = new PersistStringHelper(); + private readonly IObjectCollectionControl _control; - public IPersistableObjectCollection Collection { get { return _control.GetCollection(); } } + public const string Prefix = "RDMPObjectCollection"; - public PersistableObjectCollectionDockContent(IActivateItems activator, IObjectCollectionControl control, IPersistableObjectCollection collection):base(activator.RefreshBus) - { - _control = control; - Control = (Control)control; + public IPersistableObjectCollection Collection => _control.GetCollection(); + + public PersistableObjectCollectionDockContent(IActivateItems activator, IObjectCollectionControl control, + IPersistableObjectCollection collection) : base(activator.RefreshBus) + { + _control = control; + Control = (Control)control; - //tell the control what its collection is - control.SetCollection(activator, collection); - - //ask the control what it wants its name to be - TabText = _control.GetTabName(); - } + //tell the control what its collection is + control.SetCollection(activator, collection); - protected override string GetPersistString() - { - var collection = _control.GetCollection(); - const char s = PersistStringHelper.Separator; + //ask the control what it wants its name to be + TabText = _control.GetTabName(); + } - //Looks something like this RDMPObjectCollection:MyCoolControlUI:MyControlUIsBundleOfObjects:[CatalogueRepository:AggregateConfiguration:105,CatalogueRepository:AggregateConfiguration:102,CatalogueRepository:AggregateConfiguration:101]###EXTRA_TEXT###I've got a lovely bunch of coconuts - StringBuilder sb = new StringBuilder(); + protected override string GetPersistString() + { + var collection = _control.GetCollection(); + const char s = PersistStringHelper.Separator; - //Output ::: - sb.Append(Prefix + s + _control.GetType().FullName + s + collection.GetType().Name + s); + //Looks something like this RDMPObjectCollection:MyCoolControlUI:MyControlUIsBundleOfObjects:[CatalogueRepository:AggregateConfiguration:105,CatalogueRepository:AggregateConfiguration:102,CatalogueRepository:AggregateConfiguration:101]###EXTRA_TEXT###I've got a lovely bunch of coconuts + var sb = new StringBuilder(); - sb.Append(persistStringHelper.GetObjectCollectionPersistString(collection.DatabaseObjects.ToArray())); + //Output ::: + sb.Append(Prefix + s + _control.GetType().FullName + s + collection.GetType().Name + s); - //now add the bit that starts the user specific text - sb.Append(PersistStringHelper.ExtraText); + sb.Append(PersistStringHelper.GetObjectCollectionPersistString(collection.DatabaseObjects.ToArray())); - //let him save whatever text he wants - sb.Append(collection.SaveExtraText()); + //now add the bit that starts the user specific text + sb.Append(PersistStringHelper.ExtraText); - return sb.ToString(); - } + //let him save whatever text he wants + sb.Append(collection.SaveExtraText()); - + return sb.ToString(); + } - public override void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) - { - var newTabName = _control.GetTabName(); - var floatWindow = ParentForm as CustomFloatWindow; - if (floatWindow != null) - floatWindow.Text = newTabName; + public override void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) + { + var newTabName = _control.GetTabName(); - TabText = newTabName; + if (ParentForm is CustomFloatWindow floatWindow) + floatWindow.Text = newTabName; - //pass the info on to the control - _control.RefreshBus_RefreshObject(sender,e); + TabText = newTabName; - } + //pass the info on to the control + _control.RefreshBus_RefreshObject(sender, e); + } - public override void HandleUserRequestingTabRefresh(IActivateItems activator) - { - var collection = _control.GetCollection(); + public override void HandleUserRequestingTabRefresh(IActivateItems activator) + { + var collection = _control.GetCollection(); - foreach (var o in collection.DatabaseObjects) - { - var revertable = o as IRevertable; - if (revertable != null) - revertable.RevertToDatabaseState(); - } + foreach (var o in collection.DatabaseObjects) + if (o is IRevertable revertable) + revertable.RevertToDatabaseState(); - _control.SetCollection(activator,collection); - } + _control.SetCollection(activator, collection); + } - public override void HandleUserRequestingEmphasis(IActivateItems activator) - { - var collection = _control.GetCollection(); + public override void HandleUserRequestingEmphasis(IActivateItems activator) + { + var collection = _control.GetCollection(); - if(collection != null) - if (collection.DatabaseObjects.Count >= 1) - { - var o = activator.SelectOne("Show", collection.DatabaseObjects.ToArray(),null,true); + if (collection != null) + if (collection.DatabaseObjects.Count >= 1) + { + var o = activator.SelectOne("Show", collection.DatabaseObjects.ToArray(), null, true); - if(o != null) - activator.RequestItemEmphasis(this, new EmphasiseRequest(o)); - } - } + if (o != null) + activator.RequestItemEmphasis(this, new EmphasiseRequest(o)); + } } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableSingleDatabaseObjectDockContent.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableSingleDatabaseObjectDockContent.cs index ffab9a0ba4..70f7888bc1 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableSingleDatabaseObjectDockContent.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableSingleDatabaseObjectDockContent.cs @@ -5,10 +5,10 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System.Windows.Forms; -using MapsDirectlyToDatabaseTable; using Rdmp.Core.CommandExecution; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Dashboarding; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.UI; using Rdmp.UI.CommandExecution.AtomicCommands; using Rdmp.UI.ItemActivation; @@ -17,68 +17,68 @@ using ResearchDataManagementPlatform.WindowManagement.ExtenderFunctionality; -namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence +namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; + +/// +/// A Document Tab that hosts an RDMPSingleDatabaseObjectControl T, the control knows how to save itself to the persistence settings file for the user ensuring that when they next open the +/// software the Tab can be reloaded and displayed. Persistance involves storing this Tab type, the Control type being hosted by the Tab (a RDMPSingleDatabaseObjectControl) and the object +/// ID , object Type and Repository (DataExport or Catalogue) of the T object currently held in the RDMPSingleDatabaseObjectControl. +/// +[System.ComponentModel.DesignerCategory("")] +[TechnicalUI] +public class PersistableSingleDatabaseObjectDockContent : RDMPSingleControlTab { - /// - /// A Document Tab that hosts an RDMPSingleDatabaseObjectControl T, the control knows how to save itself to the persistence settings file for the user ensuring that when they next open the - /// software the Tab can be reloaded and displayed. Persistance involves storing this Tab type, the Control type being hosted by the Tab (a RDMPSingleDatabaseObjectControl) and the object - /// ID , object Type and Repository (DataExport or Catalogue) of the T object currently held in the RDMPSingleDatabaseObjectControl. - /// - [System.ComponentModel.DesignerCategory("")] - [TechnicalUI] - public class PersistableSingleDatabaseObjectDockContent : RDMPSingleControlTab + public IMapsDirectlyToDatabaseTable DatabaseObject { get; private set; } + + public const string Prefix = "RDMPSingleDatabaseObjectControl"; + + public PersistableSingleDatabaseObjectDockContent(IRDMPSingleDatabaseObjectControl control, + IMapsDirectlyToDatabaseTable databaseObject, RefreshBus refreshBus) : base(refreshBus) + { + Control = (Control)control; + + DatabaseObject = databaseObject; + TabText = "Loading..."; + + control.UnSavedChanges += OnUnSavedChanges; + Closing += (s, e) => control.UnSavedChanges -= OnUnSavedChanges; + } + + private void OnUnSavedChanges(object sender, bool unsavedChanges) + { + if (TabText == null) + return; + + TabText = unsavedChanges ? $"{TabText.TrimEnd('*')}*" : TabText.TrimEnd('*'); + } + + protected override string GetPersistString() + { + const char s = PersistStringHelper.Separator; + return Prefix + s + Control.GetType().FullName + s + DatabaseObject.Repository.GetType().FullName + s + + DatabaseObject.GetType().FullName + s + DatabaseObject.ID; + } + + public override void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) + { + var newTabName = ((IRDMPSingleDatabaseObjectControl)Control).GetTabName(); + + if (ParentForm is CustomFloatWindow floatWindow) + floatWindow.Text = newTabName; + + TabText = newTabName; + } + + public override void HandleUserRequestingTabRefresh(IActivateItems activator) + { + var cmd = new ExecuteCommandRefreshObject(activator, DatabaseObject as DatabaseEntity); + + if (!cmd.IsImpossible) + cmd.Execute(); + } + + public override void HandleUserRequestingEmphasis(IActivateItems activator) { - public IMapsDirectlyToDatabaseTable DatabaseObject { get; private set; } - - public const string Prefix = "RDMPSingleDatabaseObjectControl"; - - public PersistableSingleDatabaseObjectDockContent(IRDMPSingleDatabaseObjectControl control, IMapsDirectlyToDatabaseTable databaseObject,RefreshBus refreshBus):base(refreshBus) - { - Control = (Control)control; - - DatabaseObject = databaseObject; - TabText = "Loading..."; - - control.UnSavedChanges += OnUnSavedChanges; - Closing += (s,e)=>control.UnSavedChanges -= OnUnSavedChanges; - } - - private void OnUnSavedChanges(object sender, bool unsavedChanges) - { - if(TabText == null) - return; - - TabText = unsavedChanges ? TabText.TrimEnd('*') + '*' : TabText.TrimEnd('*'); - } - - protected override string GetPersistString() - { - const char s = PersistStringHelper.Separator; - return Prefix + s + Control.GetType().FullName + s + DatabaseObject.Repository.GetType().FullName + s + DatabaseObject.GetType().FullName + s + DatabaseObject.ID; - } - - public override void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) - { - var newTabName = ((IRDMPSingleDatabaseObjectControl) Control).GetTabName(); - var floatWindow = ParentForm as CustomFloatWindow; - - if (floatWindow != null) - floatWindow.Text = newTabName; - - TabText = newTabName; - } - - public override void HandleUserRequestingTabRefresh(IActivateItems activator) - { - var cmd = new ExecuteCommandRefreshObject(activator, DatabaseObject as DatabaseEntity); - - if (!cmd.IsImpossible) - cmd.Execute(); - } - - public override void HandleUserRequestingEmphasis(IActivateItems activator) - { - activator.RequestItemEmphasis(this,new EmphasiseRequest(DatabaseObject)); - } + activator.RequestItemEmphasis(this, new EmphasiseRequest(DatabaseObject)); } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableToolboxDockContent.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableToolboxDockContent.cs index 05e6b3ce8a..eda71c2a72 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableToolboxDockContent.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistableToolboxDockContent.cs @@ -7,70 +7,54 @@ using System; using System.Collections.Generic; using System.Linq; -using MapsDirectlyToDatabaseTable; using Rdmp.Core; -using Rdmp.Core.CommandExecution; using Rdmp.Core.Curation.Data.Dashboarding; using Rdmp.UI; using Rdmp.UI.Collections; -using Rdmp.UI.ItemActivation; -using ReusableLibraryCode.Checks; - using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence +namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; + +/// +/// A Document Tab that hosts an RDMPCollection, the control knows how to save itself to the persistence settings file for the user ensuring that when they next open the +/// software the Tab can be reloaded and displayed. Persistance involves storing this Tab type, the Collection Control type being hosted by the Tab (an RDMPCollection). +/// Since there can only ever be one RDMPCollection of any Type active at a time this is all that must be stored to persist the control +/// +[TechnicalUI] +[System.ComponentModel.DesignerCategory("")] +public class PersistableToolboxDockContent : DockContent { - /// - /// A Document Tab that hosts an RDMPCollection, the control knows how to save itself to the persistence settings file for the user ensuring that when they next open the - /// software the Tab can be reloaded and displayed. Persistance involves storing this Tab type, the Collection Control type being hosted by the Tab (an RDMPCollection). - /// Since there can only ever be one RDMPCollection of any Type active at a time this is all that must be stored to persist the control - /// - [TechnicalUI] - [System.ComponentModel.DesignerCategory("")] - public class PersistableToolboxDockContent:DockContent - { - public const string Prefix = "Toolbox"; + public const string Prefix = "Toolbox"; - public readonly RDMPCollection CollectionType; + public readonly RDMPCollection CollectionType; - PersistStringHelper persistStringHelper = new PersistStringHelper(); + public PersistableToolboxDockContent(RDMPCollection collectionType) + { + CollectionType = collectionType; + } - public PersistableToolboxDockContent(RDMPCollection collectionType) - { - CollectionType = collectionType; - } - protected override string GetPersistString() + protected override string GetPersistString() + { + var args = new Dictionary { + { "Toolbox", CollectionType.ToString() } + }; - - - var args = new Dictionary(); - args.Add("Toolbox", CollectionType.ToString()); - - - return Prefix + PersistStringHelper.Separator + persistStringHelper.SaveDictionaryToString(args); - } - public RDMPCollectionUI GetCollection() - { - return Controls.OfType().SingleOrDefault(); - } + return $"{Prefix}{PersistStringHelper.Separator}{PersistStringHelper.SaveDictionaryToString(args)}"; + } - public static RDMPCollection? GetToolboxFromPersistString(string persistString) - { - var helper = new PersistStringHelper(); - var s = persistString.Substring(PersistableToolboxDockContent.Prefix.Length + 1); + public RDMPCollectionUI GetCollection() => Controls.OfType().SingleOrDefault(); - var args = helper.LoadDictionaryFromString(s); - RDMPCollection collection; + public static RDMPCollection? GetToolboxFromPersistString(string persistString) + { + var s = persistString[(Prefix.Length + 1)..]; - if (args.ContainsKey("Toolbox")) - { - Enum.TryParse(args["Toolbox"], true, out collection); - return collection; - } + var args = PersistStringHelper.LoadDictionaryFromString(s); - return null; - } + return args.TryGetValue("Toolbox", out var toolbox) && + Enum.TryParse(toolbox, true, out RDMPCollection collection) + ? collection + : null; } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistenceDecisionFactory.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistenceDecisionFactory.cs index b32947d578..1a7c4d8bff 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistenceDecisionFactory.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/PersistenceDecisionFactory.cs @@ -7,7 +7,6 @@ using System; using System.Reflection; using System.Windows.Forms; -using MapsDirectlyToDatabaseTable; using Rdmp.Core; using Rdmp.Core.Curation.Data.Dashboarding; using Rdmp.Core.Repositories; @@ -16,119 +15,114 @@ using Rdmp.UI.ProjectUI; using Rdmp.UI.Raceway; -namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence +namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; + +/// +/// Translates persistence strings into DeserializeInstructions for restoring the RDMP main application window layout after application close/restart. +/// +public class PersistenceDecisionFactory { + public PersistenceDecisionFactory() + { + //ensure dashboard UI assembly is loaded + Assembly.Load(typeof(RacewayRenderAreaUI).Assembly.FullName); + //ensure data export UI assembly is loaded + Assembly.Load(typeof(ExtractionConfigurationUI).Assembly.FullName); + //ensure DLE UI assembly is loaded + Assembly.Load(typeof(ExecuteLoadMetadataUI).Assembly.FullName); + } + /// - /// Translates persistence strings into DeserializeInstructions for restoring the RDMP main application window layout after application close/restart. + /// If describes the persisted control state + /// as described by the basic class (rather + /// than a more specialised class like ) then + /// we return a new instruction of what Type of control to create /// - public class PersistenceDecisionFactory + /// + /// + /// + public static DeserializeInstruction ShouldCreateBasicControl(string persistString, + IRDMPPlatformRepositoryServiceLocator repositoryLocator) + { + if (!persistString.StartsWith(RDMPSingleControlTab.BasicPrefix)) + return null; + + //return BasicPrefix + s + Control.GetType().Name + var tokens = persistString.Split(PersistStringHelper.Separator); + + if (tokens.Length != 2) + throw new PersistenceException( + $"Unexpected number of tokens ({tokens.Length}) for Persistence of Type {RDMPSingleControlTab.BasicPrefix}"); + + var controlType = GetTypeByName(tokens[1], typeof(Control), repositoryLocator); + + return new DeserializeInstruction(controlType); + } + + public static RDMPCollection? ShouldCreateCollection(string persistString) => + !persistString.StartsWith(PersistableToolboxDockContent.Prefix) + ? null + : PersistableToolboxDockContent.GetToolboxFromPersistString(persistString); + + public static DeserializeInstruction ShouldCreateSingleObjectControl(string persistString, + IRDMPPlatformRepositoryServiceLocator repositoryLocator) + { + if (!persistString.StartsWith(PersistableSingleDatabaseObjectDockContent.Prefix)) + return null; + + //return Prefix + s + _control.GetType().Name + s + _databaseObject.Repository.GetType() + s + _databaseObject.GetType().Name + s + _databaseObject.ID; + var tokens = persistString.Split(PersistStringHelper.Separator); + + if (tokens.Length != 5) + throw new PersistenceException( + $"Unexpected number of tokens ({tokens.Length}) for Persistence of Type {PersistableSingleDatabaseObjectDockContent.Prefix}"); + + var controlType = GetTypeByName(tokens[1], typeof(Control), repositoryLocator); + var o = repositoryLocator.GetArbitraryDatabaseObject(tokens[2], tokens[3], int.Parse(tokens[4])); + + return new DeserializeInstruction(controlType, o); + } + + public static DeserializeInstruction ShouldCreateObjectCollection(string persistString, + IRDMPPlatformRepositoryServiceLocator repositoryLocator) + { + if (!persistString.StartsWith(PersistableObjectCollectionDockContent.Prefix)) + return null; + + if (!persistString.Contains(PersistStringHelper.ExtraText)) + throw new PersistenceException($"Persistence string did not contain '{PersistStringHelper.ExtraText}"); + + //Looks something like this RDMPObjectCollection:MyCoolControlUI:MyControlUIsBundleOfObjects:[CatalogueRepository:AggregateConfiguration:105,CatalogueRepository:AggregateConfiguration:102,CatalogueRepository:AggregateConfiguration:101]###EXTRA_TEXT###I've got a lovely bunch of coconuts + var tokens = persistString.Split(PersistStringHelper.Separator); + + var uiType = GetTypeByName(tokens[1], typeof(Control), repositoryLocator); + var collectionType = GetTypeByName(tokens[2], typeof(IPersistableObjectCollection), repositoryLocator); + + var collectionInstance = (IPersistableObjectCollection)ObjectConstructor.Construct(collectionType); + + if (collectionInstance.DatabaseObjects == null) + throw new PersistenceException( + $"Constructor of Type '{collectionType}' did not initialise property DatabaseObjects"); + + var allObjectsString = PersistStringHelper.MatchCollectionInString(persistString); + + collectionInstance.DatabaseObjects.AddRange( + PersistStringHelper.GetObjectCollectionFromPersistString(allObjectsString, repositoryLocator)); + + var extraText = PersistStringHelper.GetExtraText(persistString); + collectionInstance.LoadExtraText(extraText); + + return new DeserializeInstruction(uiType, collectionInstance); + } + + private static Type GetTypeByName(string s, Type expectedBaseClassType, + IRDMPPlatformRepositoryServiceLocator repositoryLocator) { - PersistStringHelper _persistStringHelper = new PersistStringHelper(); - - public PersistenceDecisionFactory() - { - //ensure dashboard UI assembly is loaded - Assembly.Load(typeof (RacewayRenderAreaUI).Assembly.FullName); - //ensure data export UI assembly is loaded - Assembly.Load(typeof(ExtractionConfigurationUI).Assembly.FullName); - //ensure DLE UI assembly is loaded - Assembly.Load(typeof(ExecuteLoadMetadataUI).Assembly.FullName); - } - - /// - /// If describes the persisted control state - /// as described by the basic class (rather - /// than a more specialised class like ) then - /// we return a new instruction of what Type of control to create - /// - /// - /// - /// - public DeserializeInstruction ShouldCreateBasicControl(string persistString, IRDMPPlatformRepositoryServiceLocator repositoryLocator) - { - if (!persistString.StartsWith(RDMPSingleControlTab.BasicPrefix)) - return null; - - //return BasicPrefix + s + Control.GetType().Name - var tokens = persistString.Split(PersistStringHelper.Separator); - - if (tokens.Length != 2) - throw new PersistenceException("Unexpected number of tokens (" + tokens.Length + ") for Persistence of Type " + RDMPSingleControlTab.BasicPrefix); - - Type controlType = GetTypeByName(tokens[1], typeof(Control), repositoryLocator); - - return new DeserializeInstruction(controlType); - } - - public RDMPCollection? ShouldCreateCollection(string persistString) - { - if (!persistString.StartsWith(PersistableToolboxDockContent.Prefix)) - return null; - - return PersistableToolboxDockContent.GetToolboxFromPersistString(persistString); - } - - public DeserializeInstruction ShouldCreateSingleObjectControl(string persistString, IRDMPPlatformRepositoryServiceLocator repositoryLocator) - { - if (!persistString.StartsWith(PersistableSingleDatabaseObjectDockContent.Prefix)) - return null; - - //return Prefix + s + _control.GetType().Name + s + _databaseObject.Repository.GetType() + s + _databaseObject.GetType().Name + s + _databaseObject.ID; - var tokens = persistString.Split(PersistStringHelper.Separator); - - if (tokens.Length != 5) - throw new PersistenceException("Unexpected number of tokens (" + tokens.Length + ") for Persistence of Type " + PersistableSingleDatabaseObjectDockContent.Prefix); - - Type controlType = GetTypeByName(tokens[1], typeof(Control), repositoryLocator); - IMapsDirectlyToDatabaseTable o = repositoryLocator.GetArbitraryDatabaseObject(tokens[2], tokens[3], int.Parse(tokens[4])); - - return new DeserializeInstruction(controlType,o); - - } - - public DeserializeInstruction ShouldCreateObjectCollection(string persistString, IRDMPPlatformRepositoryServiceLocator repositoryLocator) - { - if (!persistString.StartsWith(PersistableObjectCollectionDockContent.Prefix)) - return null; - - if(!persistString.Contains(PersistStringHelper.ExtraText)) - throw new PersistenceException("Persistence string did not contain '" + PersistStringHelper.ExtraText); - - //Looks something like this RDMPObjectCollection:MyCoolControlUI:MyControlUIsBundleOfObjects:[CatalogueRepository:AggregateConfiguration:105,CatalogueRepository:AggregateConfiguration:102,CatalogueRepository:AggregateConfiguration:101]###EXTRA_TEXT###I've got a lovely bunch of coconuts - var tokens = persistString.Split(PersistStringHelper.Separator); - - var uiType = GetTypeByName(tokens[1],typeof(Control),repositoryLocator); - var collectionType = GetTypeByName(tokens[2], typeof (IPersistableObjectCollection), repositoryLocator); - - ObjectConstructor objectConstructor = new ObjectConstructor(); - IPersistableObjectCollection collectionInstance = (IPersistableObjectCollection)objectConstructor.Construct(collectionType); - - if(collectionInstance.DatabaseObjects == null) - throw new PersistenceException("Constructor of Type '" +collectionType + "' did not initialise property DatabaseObjects"); - - var allObjectsString = _persistStringHelper.MatchCollectionInString(persistString); - - collectionInstance.DatabaseObjects.AddRange(_persistStringHelper.GetObjectCollectionFromPersistString(allObjectsString,repositoryLocator)); - - var extraText = _persistStringHelper.GetExtraText(persistString); - collectionInstance.LoadExtraText(extraText); - - return new DeserializeInstruction(uiType,collectionInstance); - } - - private Type GetTypeByName(string s, Type expectedBaseClassType,IRDMPPlatformRepositoryServiceLocator repositoryLocator) - { - var toReturn = repositoryLocator.CatalogueRepository.MEF.GetType(s); - - if (toReturn == null) - throw new TypeLoadException("Could not find Type called '" + s + "'"); - - if (expectedBaseClassType != null) - if (!expectedBaseClassType.IsAssignableFrom(toReturn)) - throw new TypeLoadException("Persistence string included a reference to Type '" + s + "' which we managed to find but it did not match an expected base Type (" + expectedBaseClassType + ")"); - - return toReturn; - } - + var toReturn = MEF.GetType(s) ?? throw new TypeLoadException($"Could not find Type called '{s}'"); + if (expectedBaseClassType?.IsAssignableFrom(toReturn) == false) + throw new TypeLoadException( + $"Persistence string included a reference to Type '{s}' which we managed to find but it did not match an expected base Type ({expectedBaseClassType})"); + + return toReturn; } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/RDMPSingleControlTab.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/RDMPSingleControlTab.cs index 7e65998d55..87ec6121dc 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/RDMPSingleControlTab.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ContentWindowTracking/Persistence/RDMPSingleControlTab.cs @@ -12,83 +12,80 @@ using Rdmp.UI.ItemActivation; using Rdmp.UI.Refreshing; using Rdmp.UI.SimpleDialogs; - - using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence +namespace ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; + +/// +/// TECHNICAL: Base class for all dockable tabs that host a single control +/// +[System.ComponentModel.DesignerCategory("")] +[TechnicalUI] +public class RDMPSingleControlTab : DockContent, IRefreshBusSubscriber { /// - /// TECHNICAL: Base class for all dockable tabs that host a single control + /// The control hosted on this tab /// - [System.ComponentModel.DesignerCategory("")] - [TechnicalUI] - public class RDMPSingleControlTab:DockContent,IRefreshBusSubscriber + public Control Control { get; protected set; } + + public const string BasicPrefix = "BASIC"; + + protected RDMPSingleControlTab(RefreshBus refreshBus) + { + refreshBus.Subscribe(this); + FormClosed += (s, e) => refreshBus.Unsubscribe(this); + } + + /// + /// Creates instance and sets to . You + /// will still need to add and Dock the control etc yourself + /// + /// + /// + public RDMPSingleControlTab(RefreshBus refreshBus, Control c) + { + refreshBus.Subscribe(this); + FormClosed += (s, e) => refreshBus.Unsubscribe(this); + Control = c; + } + + public virtual void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) + { + } + + public virtual void HandleUserRequestingTabRefresh(IActivateItems activator) + { + } + + public virtual void HandleUserRequestingEmphasis(IActivateItems activator) { - /// - /// The control hosted on this tab - /// - public Control Control { get; protected set; } - public const string BasicPrefix = "BASIC"; - - protected RDMPSingleControlTab(RefreshBus refreshBus) - { - refreshBus.Subscribe(this); - FormClosed += (s, e) => refreshBus.Unsubscribe(this); - } - - /// - /// Creates instance and sets to . You - /// will still need to add and Dock the control etc yourself - /// - /// - /// - public RDMPSingleControlTab(RefreshBus refreshBus, Control c) - { - refreshBus.Subscribe(this); - FormClosed += (s, e) => refreshBus.Unsubscribe(this); - Control = c; - } - - public virtual void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) - { - - } - public virtual void HandleUserRequestingTabRefresh(IActivateItems activator) - { - - } - - public virtual void HandleUserRequestingEmphasis(IActivateItems activator) - { - - } - protected override string GetPersistString() - { - const char s = PersistStringHelper.Separator; - return BasicPrefix + s + Control.GetType().FullName; - } - - public void ShowHelp(IActivateItems activator) - { - var typeDocs = activator.RepositoryLocator.CatalogueRepository.CommentStore; - - StringBuilder sb = new StringBuilder(); - - string firstMatch = null; - - foreach (var c in Controls) - if (typeDocs.ContainsKey(c.GetType().Name)) - { - if (firstMatch == null) - firstMatch = c.GetType().Name; - - sb.AppendLine(typeDocs.GetDocumentationIfExists(c.GetType().Name,false,true)); - sb.AppendLine(); - } - - if (sb.Length > 0) - WideMessageBox.Show(firstMatch, sb.ToString(),Environment.StackTrace, true, firstMatch,WideMessageBoxTheme.Help); - } + } + + protected override string GetPersistString() + { + const char s = PersistStringHelper.Separator; + return BasicPrefix + s + Control.GetType().FullName; + } + + public void ShowHelp(IActivateItems activator) + { + var typeDocs = activator.RepositoryLocator.CatalogueRepository.CommentStore; + + var sb = new StringBuilder(); + + string firstMatch = null; + + foreach (var c in Controls) + if (typeDocs.ContainsKey(c.GetType().Name)) + { + firstMatch ??= c.GetType().Name; + + sb.AppendLine(typeDocs.GetDocumentationIfExists(c.GetType().Name, false, true)); + sb.AppendLine(); + } + + if (sb.Length > 0) + WideMessageBox.Show(firstMatch, sb.ToString(), Environment.StackTrace, true, firstMatch, + WideMessageBoxTheme.Help); } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/Events/RDMPCollectionCreatedEventHandler.cs b/Application/ResearchDataManagementPlatform/WindowManagement/Events/RDMPCollectionCreatedEventHandler.cs index 948ed10630..704262cf03 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/Events/RDMPCollectionCreatedEventHandler.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/Events/RDMPCollectionCreatedEventHandler.cs @@ -4,12 +4,11 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -namespace ResearchDataManagementPlatform.WindowManagement.Events -{ - /// - /// For responding to RDMPCollectionUI window opening events - /// - /// - /// - public delegate void RDMPCollectionCreatedEventHandler(object sender, RDMPCollectionCreatedEventHandlerArgs args); -} \ No newline at end of file +namespace ResearchDataManagementPlatform.WindowManagement.Events; + +/// +/// For responding to RDMPCollectionUI window opening events +/// +/// +/// +public delegate void RDMPCollectionCreatedEventHandler(object sender, RDMPCollectionCreatedEventHandlerArgs args); \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/Events/RDMPCollectionCreatedEventHandlerArgs.cs b/Application/ResearchDataManagementPlatform/WindowManagement/Events/RDMPCollectionCreatedEventHandlerArgs.cs index 46bb291339..1a41050783 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/Events/RDMPCollectionCreatedEventHandlerArgs.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/Events/RDMPCollectionCreatedEventHandlerArgs.cs @@ -6,18 +6,17 @@ using Rdmp.Core; -namespace ResearchDataManagementPlatform.WindowManagement.Events +namespace ResearchDataManagementPlatform.WindowManagement.Events; + +/// +/// Arguments for when an RDMPCollection has been made visible (opened) +/// +public class RDMPCollectionCreatedEventHandlerArgs { - /// - /// Arguments for when an RDMPCollection has been made visible (opened) - /// - public class RDMPCollectionCreatedEventHandlerArgs - { - public readonly RDMPCollection Collection; + public readonly RDMPCollection Collection; - public RDMPCollectionCreatedEventHandlerArgs(RDMPCollection collection) - { - Collection = collection; - } + public RDMPCollectionCreatedEventHandlerArgs(RDMPCollection collection) + { + Collection = collection; } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ExtenderFunctionality/CustomFloatWindow.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ExtenderFunctionality/CustomFloatWindow.cs index 5f2d6974eb..94af6aebea 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ExtenderFunctionality/CustomFloatWindow.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ExtenderFunctionality/CustomFloatWindow.cs @@ -10,44 +10,42 @@ using Rdmp.UI.Menus.MenuItems; using Rdmp.UI.SimpleControls; using ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; - using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.WindowManagement.ExtenderFunctionality +namespace ResearchDataManagementPlatform.WindowManagement.ExtenderFunctionality; + +/// +/// Determines the window style of tabs dragged out of the main RDMPMainForm window to create new windows of that tab only. Currently the only change is to allow the user to resize +/// and maximise new tab windows +/// +[TechnicalUI] +[System.ComponentModel.DesignerCategory("")] +public class CustomFloatWindow : FloatWindow { - /// - /// Determines the window style of tabs dragged out of the main RDMPMainForm window to create new windows of that tab only. Currently the only change is to allow the user to resize - /// and maximise new tab windows - /// - [TechnicalUI] - [System.ComponentModel.DesignerCategory("")] - public class CustomFloatWindow:FloatWindow + protected internal CustomFloatWindow(DockPanel dockPanel, DockPane pane) : base(dockPanel, pane) { - protected internal CustomFloatWindow(DockPanel dockPanel, DockPane pane) : base(dockPanel, pane) - { - Initialize(); - - } - protected internal CustomFloatWindow(DockPanel dockPanel, DockPane pane, Rectangle bounds): base(dockPanel, pane, bounds) - { - Initialize(); - } + Initialize(); + } - private void Initialize() - { - FormBorderStyle = FormBorderStyle.Sizable; + protected internal CustomFloatWindow(DockPanel dockPanel, DockPane pane, Rectangle bounds) : base(dockPanel, pane, + bounds) + { + Initialize(); + } - var saveToolStripMenuItem = new SaveMenuItem(); - var singleObjectControlTab = this.DockPanel.ActiveDocument as RDMPSingleControlTab; + private void Initialize() + { + FormBorderStyle = FormBorderStyle.Sizable; - if (singleObjectControlTab == null) - { - saveToolStripMenuItem.Saveable = null; - return; - } + var saveToolStripMenuItem = new SaveMenuItem(); - var saveable = singleObjectControlTab.Control as ISaveableUI; - saveToolStripMenuItem.Saveable = saveable; + if (DockPanel.ActiveDocument is not RDMPSingleControlTab singleObjectControlTab) + { + saveToolStripMenuItem.Saveable = null; + return; } + + var saveable = singleObjectControlTab.Control as ISaveableUI; + saveToolStripMenuItem.Saveable = saveable; } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/ExtenderFunctionality/CustomFloatWindowFactory.cs b/Application/ResearchDataManagementPlatform/WindowManagement/ExtenderFunctionality/CustomFloatWindowFactory.cs index 68c417a9da..e34dc54c9c 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/ExtenderFunctionality/CustomFloatWindowFactory.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/ExtenderFunctionality/CustomFloatWindowFactory.cs @@ -7,21 +7,15 @@ using System.Drawing; using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.WindowManagement.ExtenderFunctionality +namespace ResearchDataManagementPlatform.WindowManagement.ExtenderFunctionality; + +/// +/// Factory that creates custom Forms when a docked tab is dragged out into a new window (See CustomFloatWindow for implementation) +/// +public class CustomFloatWindowFactory : DockPanelExtender.IFloatWindowFactory { - /// - /// Factory that creates custom Forms when a docked tab is dragged out into a new window (See CustomFloatWindow for implementation) - /// - public class CustomFloatWindowFactory: DockPanelExtender.IFloatWindowFactory - { - public FloatWindow CreateFloatWindow(DockPanel dockPanel, DockPane pane) - { - return new CustomFloatWindow(dockPanel,pane); - } + public FloatWindow CreateFloatWindow(DockPanel dockPanel, DockPane pane) => new CustomFloatWindow(dockPanel, pane); - public FloatWindow CreateFloatWindow(DockPanel dockPanel, DockPane pane, Rectangle bounds) - { - return new CustomFloatWindow(dockPanel,pane,bounds); - } - } -} + public FloatWindow CreateFloatWindow(DockPanel dockPanel, DockPane pane, Rectangle bounds) => + new CustomFloatWindow(dockPanel, pane, bounds); +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/HomePane/HomeBoxUI.cs b/Application/ResearchDataManagementPlatform/WindowManagement/HomePane/HomeBoxUI.cs index 179b55ba2d..3577516bc7 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/HomePane/HomeBoxUI.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/HomePane/HomeBoxUI.cs @@ -7,122 +7,121 @@ using System; using System.Linq; using System.Windows.Forms; -using MapsDirectlyToDatabaseTable; using Rdmp.Core; using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.Curation.Data; using Rdmp.Core.Icons.IconProvision; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.UI; using Rdmp.UI.Collections; using Rdmp.UI.Collections.Providers; -using Rdmp.UI.CommandExecution.AtomicCommands; using Rdmp.UI.CommandExecution.AtomicCommands.UIFactory; using Rdmp.UI.ItemActivation; -using Rdmp.UI.SimpleDialogs.NavigateTo; -namespace ResearchDataManagementPlatform.WindowManagement.HomePane +namespace ResearchDataManagementPlatform.WindowManagement.HomePane; + +public partial class HomeBoxUI : UserControl { - public partial class HomeBoxUI : UserControl + private IActivateItems _activator; + private bool _doneSetup; + private Type _openType; + + private RDMPCollectionCommonFunctionality CommonTreeFunctionality { get; } = new(); + + public HomeBoxUI() + { + InitializeComponent(); + olvRecent.ItemActivate += OlvRecent_ItemActivate; + } + + public void SetUp(IActivateItems activator, string title, Type openType, AtomicCommandUIFactory factory, + params IAtomicCommand[] newCommands) { - private IActivateItems _activator; - private bool _doneSetup = false; - private Type _openType; + _openType = openType; - RDMPCollectionCommonFunctionality CommonTreeFunctionality { get;} = new RDMPCollectionCommonFunctionality(); - public HomeBoxUI() - { - InitializeComponent(); - olvRecent.ItemActivate += OlvRecent_ItemActivate; - } - public void SetUp(IActivateItems activator,string title, Type openType,AtomicCommandUIFactory factory, params IAtomicCommand[] newCommands) + if (!_doneSetup) { - _openType = openType; - - - if (!_doneSetup) - { - _activator = activator; - lblTitle.Text = title; + _activator = activator; + lblTitle.Text = title; - btnNew.Image = FamFamFamIcons.add.ImageToBitmap(); - btnNew.Text = "New"; - btnNew.DisplayStyle = ToolStripItemDisplayStyle.Text; + btnNew.Image = FamFamFamIcons.add.ImageToBitmap(); + btnNew.Text = "New"; + btnNew.DisplayStyle = ToolStripItemDisplayStyle.Text; - btnNewDropdown.Image = FamFamFamIcons.add.ImageToBitmap(); - btnNewDropdown.Text = "New..."; - btnNewDropdown.DisplayStyle = ToolStripItemDisplayStyle.Text; + btnNewDropdown.Image = FamFamFamIcons.add.ImageToBitmap(); + btnNewDropdown.Text = "New..."; + btnNewDropdown.DisplayStyle = ToolStripItemDisplayStyle.Text; - btnOpen.Text = "Open"; - btnOpen.DisplayStyle = ToolStripItemDisplayStyle.Text; - btnOpen.Click += (s, e) => - { - if(activator.SelectObject(new DialogArgs + btnOpen.Text = "Open"; + btnOpen.DisplayStyle = ToolStripItemDisplayStyle.Text; + btnOpen.Click += (s, e) => + { + if (activator.SelectObject(new DialogArgs { WindowTitle = "Open" - },activator.GetAll(openType).ToArray(),out var selected)) - { - Open(selected); - } - }; + }, activator.GetAll(openType).ToArray(), out var selected)) + Open(selected); + }; - - //if there's only one command for new - if (newCommands.Length == 1) - { - //don't use the dropdown - toolStrip1.Items.Remove(btnNewDropdown); - btnNew.Click += (s,e)=>newCommands.Single().Execute(); - } - else - { - toolStrip1.Items.Remove(btnNew); - btnNewDropdown.DropDownItems.AddRange(newCommands.Select(factory.CreateMenuItem).Cast().ToArray()); - } - olvName.AspectGetter = (o) => ((HistoryEntry)o).Object.ToString(); - CommonTreeFunctionality.SetUp(RDMPCollection.None,olvRecent,activator,olvName,olvName,new RDMPCollectionCommonFunctionalitySettings() + //if there's only one command for new + if (newCommands.Length == 1) + { + //don't use the dropdown + toolStrip1.Items.Remove(btnNewDropdown); + btnNew.Click += (s, e) => newCommands.Single().Execute(); + } + else + { + toolStrip1.Items.Remove(btnNew); + btnNewDropdown.DropDownItems.AddRange(newCommands.Select(factory.CreateMenuItem).Cast() + .ToArray()); + } + + olvName.AspectGetter = o => ((HistoryEntry)o).Object.ToString(); + CommonTreeFunctionality.SetUp(RDMPCollection.None, olvRecent, activator, olvName, olvName, + new RDMPCollectionCommonFunctionalitySettings { SuppressChildrenAdder = true }); - _doneSetup = true; - } - - RefreshHistory(); + _doneSetup = true; } - private void RefreshHistory() - { - olvRecent.ClearObjects(); - olvRecent.AddObjects(_activator.HistoryProvider.History.Where(h=>h.Object.GetType() == _openType).ToArray()); - } + RefreshHistory(); + } + + private void RefreshHistory() + { + olvRecent.ClearObjects(); + olvRecent.AddObjects(_activator.HistoryProvider.History.Where(h => h.Object.GetType() == _openType).ToArray()); + } - private void Open(IMapsDirectlyToDatabaseTable o) + private void Open(IMapsDirectlyToDatabaseTable o) + { + if (!((DatabaseEntity)o).Exists()) { - if (!((DatabaseEntity) o).Exists()) + if (_activator.YesNo($"'{o}' no longer exists, remove from Recent list?", "No longer exists")) { - if (_activator.YesNo($"'{o}' no longer exists, remove from Recent list?", "No longer exists")) - { - _activator.HistoryProvider.Remove(o); - RefreshHistory(); - } - - return; + _activator.HistoryProvider.Remove(o); + RefreshHistory(); } - var cmd = new ExecuteCommandActivate(_activator, o) - { - AlsoShow = true - }; - cmd.Execute(); + return; } - private void OlvRecent_ItemActivate(object sender, EventArgs e) + var cmd = new ExecuteCommandActivate(_activator, o) { - if (olvRecent.SelectedObject is HistoryEntry he) - Open(he.Object); - } + AlsoShow = true + }; + cmd.Execute(); + } + + private void OlvRecent_ItemActivate(object sender, EventArgs e) + { + if (olvRecent.SelectedObject is HistoryEntry he) + Open(he.Object); } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/HomePane/HomeUI.cs b/Application/ResearchDataManagementPlatform/WindowManagement/HomePane/HomeUI.cs index bfb6034813..4f57024a56 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/HomePane/HomeUI.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/HomePane/HomeUI.cs @@ -13,75 +13,74 @@ using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataExport.Data; -using Rdmp.UI.CohortUI.ImportCustomData; using Rdmp.UI.CommandExecution.AtomicCommands; using Rdmp.UI.CommandExecution.AtomicCommands.UIFactory; using Rdmp.UI.ItemActivation; using Rdmp.UI.Refreshing; using Rdmp.UI.TestsAndSetup.ServicePropogation; -namespace ResearchDataManagementPlatform.WindowManagement.HomePane +namespace ResearchDataManagementPlatform.WindowManagement.HomePane; + +/// +/// The starting page of RDMP. Provides a single easy access entry point into RDMP functionality for common tasks e.g. Data Management, Project Extraction etc. Click the links of commands +/// you want to carry out to access wizards that offer streamlined access to the RDMP functionality. +/// +/// You can access the HomeUI at any time by clicking the home icon in the top left of the RDMP tool bar. +/// +public partial class HomeUI : RDMPUserControl, ILifetimeSubscriber { - /// - /// The starting page of RDMP. Provides a single easy access entry point into RDMP functionality for common tasks e.g. Data Management, Project Extraction etc. Click the links of commands - /// you want to carry out to access wizards that offer streamlined access to the RDMP functionality. - /// - /// You can access the HomeUI at any time by clicking the home icon in the top left of the RDMP tool bar. - /// - public partial class HomeUI : RDMPUserControl,ILifetimeSubscriber + private readonly IActivateItems _activator; + private readonly AtomicCommandUIFactory _uiFactory; + + public HomeUI(IActivateItems activator) { - private readonly IActivateItems _activator; - private readonly AtomicCommandUIFactory _uiFactory; + _activator = activator; + _uiFactory = new AtomicCommandUIFactory(activator); + InitializeComponent(); + } - public HomeUI(IActivateItems activator) - { - _activator = activator; - _uiFactory = new AtomicCommandUIFactory(activator); - InitializeComponent(); - } + private void BuildCommandLists() + { + boxCatalogue.SetUp(Activator, "Catalogue", typeof(Catalogue), _uiFactory, + new ExecuteCommandCreateNewCatalogueByImportingFileUI(_activator) + { + OverrideCommandName = GlobalStrings.FromFile + }, + new ExecuteCommandCreateNewCatalogueByImportingExistingDataTable(_activator) + { + OverrideCommandName = GlobalStrings.FromDatabase + }); + boxProject.SetUp(Activator, "Project", typeof(Project), _uiFactory, + new ExecuteCommandCreateNewDataExtractionProject(_activator)); - private void BuildCommandLists() - { - boxCatalogue.SetUp(Activator,"Catalogue",typeof(Catalogue),_uiFactory, - new ExecuteCommandCreateNewCatalogueByImportingFileUI(_activator) - { - OverrideCommandName = GlobalStrings.FromFile - }, - new ExecuteCommandCreateNewCatalogueByImportingExistingDataTable(_activator) - { - OverrideCommandName = GlobalStrings.FromDatabase - }); - boxProject.SetUp(Activator,"Project",typeof(Project),_uiFactory, new ExecuteCommandCreateNewDataExtractionProject(_activator)); - - boxCohort.SetUp(Activator,"Cohort Builder", typeof(CohortIdentificationConfiguration),_uiFactory, - new ExecuteCommandCreateNewCohortIdentificationConfiguration(_activator) - { - OverrideCommandName = "Cohort Builder Query", - PromptToPickAProject = true + boxCohort.SetUp(Activator, "Cohort Builder", typeof(CohortIdentificationConfiguration), _uiFactory, + new ExecuteCommandCreateNewCohortIdentificationConfiguration(_activator) + { + OverrideCommandName = "Cohort Builder Query", + PromptToPickAProject = true + }, + new ExecuteCommandCreateNewCohortFromFile(_activator, null, null) + { + OverrideCommandName = GlobalStrings.FromFile + } + ); + boxDataLoad.SetUp(Activator, "Data Load", typeof(LoadMetadata), _uiFactory, + new ExecuteCommandCreateNewLoadMetadata(_activator)); + } - }, - new ExecuteCommandCreateNewCohortFromFile(_activator,null,null) - { - OverrideCommandName = GlobalStrings.FromFile - } - ); - boxDataLoad.SetUp(Activator,"Data Load",typeof(LoadMetadata),_uiFactory,new ExecuteCommandCreateNewLoadMetadata(_activator)); - } - - protected override void OnLoad(EventArgs e) - { - base.OnLoad(e); - - SetItemActivator(_activator); + protected override void OnLoad(EventArgs e) + { + base.OnLoad(e); + + SetItemActivator(_activator); - BuildCommandLists(); + BuildCommandLists(); - _activator.RefreshBus.EstablishLifetimeSubscription(this); - } + _activator.RefreshBus.EstablishLifetimeSubscription(this); + } - public void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) - { - BuildCommandLists(); - } + public void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) + { + BuildCommandLists(); } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/INavigation.cs b/Application/ResearchDataManagementPlatform/WindowManagement/INavigation.cs index 5564f1474c..1d5f344b3d 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/INavigation.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/INavigation.cs @@ -4,16 +4,15 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -namespace ResearchDataManagementPlatform.WindowManagement +namespace ResearchDataManagementPlatform.WindowManagement; + +/// +/// Describes a location the user visited that may or may not still exist (e.g. be an open tab) and which can be revisited through history (e.g. a back button). +/// +public interface INavigation { - /// - /// Describes a location the user visited that may or may not still exist (e.g. be an open tab) and which can be revisited through history (e.g. a back button). - /// - public interface INavigation - { - bool IsAlive { get; } + bool IsAlive { get; } - void Activate(ActivateItems activateItems); - void Close(); - } -} + void Activate(ActivateItems activateItems); + void Close(); +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/Licenses/License.cs b/Application/ResearchDataManagementPlatform/WindowManagement/Licenses/License.cs index f76ca967be..a27cf6aa47 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/Licenses/License.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/Licenses/License.cs @@ -8,62 +8,52 @@ using System.IO; using System.Security.Cryptography; -namespace ResearchDataManagementPlatform.WindowManagement.Licenses +namespace ResearchDataManagementPlatform.WindowManagement.Licenses; + +/// +/// Facilitates reading from the embedded license files for RDMP and third party libraries. Also generates MD5 for tracking when a user has +/// agreed to a license that has been subsequently changed in a software update (e.g. if we use a new library). +/// +public class License { + private readonly string _resourceFilename; + private const string LicenseResourcePath = "ResearchDataManagementPlatform.WindowManagement.Licenses."; + /// - /// Facilitates reading from the embedded license files for RDMP and third party libraries. Also generates MD5 for tracking when a user has - /// agreed to a license that has been subsequently changed in a software update (e.g. if we use a new library). + /// The local path to the license file resource within this assembly e.g. LICENSE / LIBRARYLICENSES /// - public class License + /// + public License(string resourceFilename = "LICENSE") { - private readonly string _resourceFilename; - private const string LicenseResourcePath = "ResearchDataManagementPlatform.WindowManagement.Licenses."; - - /// - /// The local path to the license file resource within this assembly e.g. LICENSE / LIBRARYLICENSES - /// - /// - public License(string resourceFilename = "LICENSE") - { - resourceFilename = LicenseResourcePath + resourceFilename; - _resourceFilename = resourceFilename; - } - - /// - /// Computes an MD5 Hash of the current License text - /// - /// - public string GetHashOfLicense() - { - using (var hashProvider = SHA512.Create()) - { - using (var stream = GetStream()) - { - return BitConverter.ToString(hashProvider.ComputeHash(stream)); - } - } - } + resourceFilename = LicenseResourcePath + resourceFilename; + _resourceFilename = resourceFilename; + } - /// - /// Returns the current License text - /// - /// - public string GetLicenseText() - { - using (var stream = GetStream()) - { - return new StreamReader(stream).ReadToEnd(); - } - } + /// + /// Computes an MD5 Hash of the current License text + /// + /// + public string GetHashOfLicense() + { + using var hashProvider = SHA512.Create(); + using var stream = GetStream(); + return BitConverter.ToString(hashProvider.ComputeHash(stream)); + } - private Stream GetStream() - { - var stream = typeof (License).Assembly.GetManifestResourceStream(_resourceFilename); + /// + /// Returns the current License text + /// + /// + public string GetLicenseText() + { + using var stream = GetStream(); + return new StreamReader(stream).ReadToEnd(); + } - if (stream == null) - throw new Exception("Could not find EmbeddedResource '" + _resourceFilename + "'"); + private Stream GetStream() + { + var stream = typeof(License).Assembly.GetManifestResourceStream(_resourceFilename); - return stream; - } + return stream ?? throw new Exception($"Could not find EmbeddedResource '{_resourceFilename}'"); } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/Licenses/LicenseUI.cs b/Application/ResearchDataManagementPlatform/WindowManagement/Licenses/LicenseUI.cs index 1b7eff3648..9160c4bdfc 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/Licenses/LicenseUI.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/Licenses/LicenseUI.cs @@ -7,73 +7,71 @@ using System; using System.Diagnostics; using System.Windows.Forms; +using Rdmp.Core.ReusableLibraryCode.Settings; using Rdmp.UI.SimpleDialogs; -using ReusableLibraryCode.Settings; -namespace ResearchDataManagementPlatform.WindowManagement.Licenses -{ +namespace ResearchDataManagementPlatform.WindowManagement.Licenses; - /// - /// Displays the open source license for RDMP and so shows the license for all the third party plugins. You must either accept or decline the license . - /// Declining will close the Form. This form is shown for the first time on startup or again any time you have declined the conditions. - /// - public partial class LicenseUI : Form +/// +/// Displays the open source license for RDMP and so shows the license for all the third party plugins. You must either accept or decline the license . +/// Declining will close the Form. This form is shown for the first time on startup or again any time you have declined the conditions. +/// +public partial class LicenseUI : Form +{ + public LicenseUI() { - public LicenseUI() + InitializeComponent(); + + try { - InitializeComponent(); + _main = new License("LICENSE"); + _thirdParth = new License("LIBRARYLICENSES"); - try + rtLicense.Text = _main.GetLicenseText(); + rtLicense.KeyDown += (s, e) => { - _main = new License("LICENSE"); - _thirdParth = new License("LIBRARYLICENSES"); + if (e.KeyCode == Keys.Enter) + btnAccept_Click(btnAccept, EventArgs.Empty); - rtLicense.Text = _main.GetLicenseText(); - rtLicense.KeyDown += (s, e) => - { - if (e.KeyCode == Keys.Enter) - btnAccept_Click(btnAccept, new EventArgs()); + // prevents it going BONG! + e.SuppressKeyPress = true; + }; - // prevents it going BONG! - e.SuppressKeyPress = true; - }; - - rtThirdPartyLicense.Text = _thirdParth.GetLicenseText(); - } - catch (Exception ex) - { - ExceptionViewer.Show(ex); - } + rtThirdPartyLicense.Text = _thirdParth.GetLicenseText(); } + catch (Exception ex) + { + ExceptionViewer.Show(ex); + } + } - private bool allowClose = false; + private bool allowClose; - private License _main; - private License _thirdParth; + private License _main; + private License _thirdParth; - private void btnAccept_Click(object sender, EventArgs e) - { - UserSettings.LicenseAccepted = _thirdParth.GetHashOfLicense(); - allowClose = true; - this.Close(); - } + private void btnAccept_Click(object sender, EventArgs e) + { + UserSettings.LicenseAccepted = _thirdParth.GetHashOfLicense(); + allowClose = true; + Close(); + } - private void btnDecline_Click(object sender, EventArgs e) - { - UserSettings.LicenseAccepted = null; - allowClose = true; - Process.GetCurrentProcess().Kill(); - } + private void btnDecline_Click(object sender, EventArgs e) + { + UserSettings.LicenseAccepted = null; + allowClose = true; + Process.GetCurrentProcess().Kill(); + } - private void LicenseUI_FormClosing(object sender, FormClosingEventArgs e) + private void LicenseUI_FormClosing(object sender, FormClosingEventArgs e) + { + if (UserSettings.LicenseAccepted != _thirdParth.GetHashOfLicense() && !allowClose) { - if (UserSettings.LicenseAccepted != _thirdParth.GetHashOfLicense() && !allowClose) - { - e.Cancel = true; - MessageBox.Show("You have not accepted/declined the license"); - } + e.Cancel = true; + MessageBox.Show("You have not accepted/declined the license"); } } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/TabChangedHandler.cs b/Application/ResearchDataManagementPlatform/WindowManagement/TabChangedHandler.cs index 6fa4934c6e..fbb205d769 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/TabChangedHandler.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/TabChangedHandler.cs @@ -6,12 +6,11 @@ using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.WindowManagement -{ - /// - /// Occurs when user changes which tab has focus - /// - /// - /// The newly focused tab - public delegate void TabChangedHandler(object sender, IDockContent newTab); -} \ No newline at end of file +namespace ResearchDataManagementPlatform.WindowManagement; + +/// +/// Occurs when user changes which tab has focus +/// +/// +/// The newly focused tab +public delegate void TabChangedHandler(object sender, IDockContent newTab); \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/TabNavigation.cs b/Application/ResearchDataManagementPlatform/WindowManagement/TabNavigation.cs index 0009582015..d598d51cb9 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/TabNavigation.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/TabNavigation.cs @@ -7,48 +7,43 @@ using System.Collections.Generic; using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.WindowManagement +namespace ResearchDataManagementPlatform.WindowManagement; + +/// +/// Records the fact that the user visited a specific (Tab) +/// +public class TabNavigation : INavigation { - /// - /// Records the fact that the user visited a specific (Tab) - /// - public class TabNavigation: INavigation - { - public DockContent Tab { get; } + public DockContent Tab { get; } - public bool IsAlive => Tab.ParentForm != null; + public bool IsAlive => Tab.ParentForm != null; - public TabNavigation(DockContent tab) - { - Tab = tab; - } + public TabNavigation(DockContent tab) + { + Tab = tab; + } - public void Activate(ActivateItems activateItems) - { - Tab.Activate(); - } + public void Activate(ActivateItems activateItems) + { + Tab.Activate(); + } - public void Close() - { - Tab.Close(); - } - public override string ToString() - { - return Tab.TabText; - } + public void Close() + { + Tab.Close(); + } - public override bool Equals(object obj) - { - return obj is TabNavigation navigation && - EqualityComparer.Default.Equals(Tab, navigation.Tab); - } + public override string ToString() => Tab.TabText; - public override int GetHashCode() + public override bool Equals(object obj) => + obj is TabNavigation navigation && + EqualityComparer.Default.Equals(Tab, navigation.Tab); + + public override int GetHashCode() + { + unchecked { - unchecked - { - return -2031380020 + EqualityComparer.Default.GetHashCode(Tab); - } + return -2031380020 + EqualityComparer.Default.GetHashCode(Tab); } } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/TabPageContextMenus/RDMPSingleControlTabMenu.cs b/Application/ResearchDataManagementPlatform/WindowManagement/TabPageContextMenus/RDMPSingleControlTabMenu.cs index 778e5225e8..bf2b4912bc 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/TabPageContextMenus/RDMPSingleControlTabMenu.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/TabPageContextMenus/RDMPSingleControlTabMenu.cs @@ -15,46 +15,48 @@ using Rdmp.UI.Menus; using ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; -namespace ResearchDataManagementPlatform.WindowManagement.TabPageContextMenus +namespace ResearchDataManagementPlatform.WindowManagement.TabPageContextMenus; + +/// +/// Right click menu for the top tab section of a docked tab in RDMP main application. +/// +[System.ComponentModel.DesignerCategory("")] +public class RDMPSingleControlTabMenu : ContextMenuStrip { - /// - /// Right click menu for the top tab section of a docked tab in RDMP main application. - /// - [System.ComponentModel.DesignerCategory("")] - public class RDMPSingleControlTabMenu : ContextMenuStrip + public RDMPSingleControlTabMenu(IActivateItems activator, RDMPSingleControlTab tab, WindowManager windowManager) { - private readonly RDMPSingleControlTab _tab; + var tab1 = tab; + Items.Add("Close Tab", null, (s, e) => tab.Close()); + Items.Add("Close All Tabs", null, (s, e) => windowManager.CloseAllWindows(tab)); + Items.Add("Close All But This", null, (s, e) => windowManager.CloseAllButThis(tab)); + + Items.Add("Show", null, (s, e) => tab.HandleUserRequestingEmphasis(activator)); - public RDMPSingleControlTabMenu(IActivateItems activator, RDMPSingleControlTab tab, WindowManager windowManager) + if (tab is PersistableSingleDatabaseObjectDockContent single) { - _tab = tab; - Items.Add("Close Tab", null, (s, e) => tab.Close()); - Items.Add("Close All Tabs", null, (s, e) => windowManager.CloseAllWindows(tab)); - Items.Add("Close All But This", null, (s, e) => windowManager.CloseAllButThis(tab)); + var uiFactory = new AtomicCommandUIFactory(activator); + var builder = new GoToCommandFactory(activator); - Items.Add("Show", null, (s, e) => tab.HandleUserRequestingEmphasis(activator)); + var gotoMenu = new ToolStripMenuItem(AtomicCommandFactory.GoTo) { Enabled = false }; + Items.Add(gotoMenu); - if (tab is PersistableSingleDatabaseObjectDockContent single) + foreach (var cmd in builder.GetCommands(single.DatabaseObject).OfType()) { - var uiFactory = new AtomicCommandUIFactory(activator); - var builder = new GoToCommandFactory(activator); - - var gotoMenu = new ToolStripMenuItem(AtomicCommandFactory.GoTo){Enabled = false }; - Items.Add(gotoMenu); - - foreach(var cmd in builder.GetCommands(single.DatabaseObject).OfType()) - { - gotoMenu.DropDownItems.Add(uiFactory.CreateMenuItem(cmd)); - gotoMenu.Enabled = true; - } - RDMPContextMenuStrip.RegisterFetchGoToObjecstCallback(gotoMenu); + gotoMenu.DropDownItems.Add(uiFactory.CreateMenuItem(cmd)); + gotoMenu.Enabled = true; } - Items.Add("Refresh", FamFamFamIcons.arrow_refresh.ImageToBitmap(), (s, e) => _tab.HandleUserRequestingTabRefresh(activator)); - - var help = new ToolStripMenuItem("Help", FamFamFamIcons.help.ImageToBitmap(), (s, e) => _tab.ShowHelp(activator)); - help.ShortcutKeys = Keys.F1; - Items.Add(help); + RDMPContextMenuStrip.RegisterFetchGoToObjecstCallback(gotoMenu); } + + Items.Add("Refresh", FamFamFamIcons.arrow_refresh.ImageToBitmap(), + (s, e) => tab1.HandleUserRequestingTabRefresh(activator)); + + var help = new ToolStripMenuItem("Help", FamFamFamIcons.help.ImageToBitmap(), + (s, e) => tab1.ShowHelp(activator)) + { + ShortcutKeys = Keys.F1 + }; + Items.Add(help); } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/TopBar/RDMPTaskBarUI.cs b/Application/ResearchDataManagementPlatform/WindowManagement/TopBar/RDMPTaskBarUI.cs index 5da6ecc06f..6639022c8a 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/TopBar/RDMPTaskBarUI.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/TopBar/RDMPTaskBarUI.cs @@ -6,296 +6,289 @@ using System; using System.Windows.Forms; -using MapsDirectlyToDatabaseTable; using Rdmp.Core; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.Curation.Data; using Rdmp.Core.Icons.IconProvision; +using Rdmp.Core.MapsDirectlyToDatabaseTable; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Settings; using Rdmp.UI; using Rdmp.UI.SimpleDialogs; using Rdmp.UI.Theme; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Settings; -namespace ResearchDataManagementPlatform.WindowManagement.TopBar +namespace ResearchDataManagementPlatform.WindowManagement.TopBar; + +/// +/// Allows you to access the main object collections that make up the RDMP. These include +/// +public partial class RDMPTaskBarUI : UserControl { - /// - /// Allows you to access the main object collections that make up the RDMP. These include - /// - public partial class RDMPTaskBarUI : UserControl - { - private WindowManager _manager; - - private const string CreateNewLayout = "<>"; + private WindowManager _manager; - public RDMPTaskBarUI() - { - InitializeComponent(); - BackColorProvider provider = new BackColorProvider(); + private const string CreateNewLayout = "<>"; - btnHome.Image = FamFamFamIcons.application_home.ImageToBitmap(); - btnCatalogues.Image = CatalogueIcons.Catalogue.ImageToBitmap(); - btnCatalogues.BackgroundImage = provider.GetBackgroundImage(btnCatalogues.Size, RDMPCollection.Catalogue); + public RDMPTaskBarUI() + { + InitializeComponent(); - btnCohorts.Image = CatalogueIcons.CohortIdentificationConfiguration.ImageToBitmap(); - btnCohorts.BackgroundImage = provider.GetBackgroundImage(btnCohorts.Size, RDMPCollection.Cohort); + btnHome.Image = FamFamFamIcons.application_home.ImageToBitmap(); + btnCatalogues.Image = CatalogueIcons.Catalogue.ImageToBitmap(); + btnCatalogues.BackgroundImage = + BackColorProvider.GetBackgroundImage(btnCatalogues.Size, RDMPCollection.Catalogue); - btnSavedCohorts.Image = CatalogueIcons.AllCohortsNode.ImageToBitmap(); - btnSavedCohorts.BackgroundImage = provider.GetBackgroundImage(btnSavedCohorts.Size, RDMPCollection.SavedCohorts); + btnCohorts.Image = CatalogueIcons.CohortIdentificationConfiguration.ImageToBitmap(); + btnCohorts.BackgroundImage = BackColorProvider.GetBackgroundImage(btnCohorts.Size, RDMPCollection.Cohort); - btnDataExport.Image = CatalogueIcons.Project.ImageToBitmap(); - btnDataExport.BackgroundImage = provider.GetBackgroundImage(btnDataExport.Size, RDMPCollection.DataExport); + btnSavedCohorts.Image = CatalogueIcons.AllCohortsNode.ImageToBitmap(); + btnSavedCohorts.BackgroundImage = + BackColorProvider.GetBackgroundImage(btnSavedCohorts.Size, RDMPCollection.SavedCohorts); - btnTables.Image = CatalogueIcons.TableInfo.ImageToBitmap(); - btnTables.BackgroundImage = provider.GetBackgroundImage(btnTables.Size, RDMPCollection.Tables); + btnDataExport.Image = CatalogueIcons.Project.ImageToBitmap(); + btnDataExport.BackgroundImage = + BackColorProvider.GetBackgroundImage(btnDataExport.Size, RDMPCollection.DataExport); - btnLoad.Image = CatalogueIcons.LoadMetadata.ImageToBitmap(); - btnLoad.BackgroundImage = provider.GetBackgroundImage(btnLoad.Size, RDMPCollection.DataLoad); - - btnFavourites.Image = CatalogueIcons.Favourite.ImageToBitmap(); - btnDeleteLayout.Image = FamFamFamIcons.delete.ImageToBitmap(); + btnTables.Image = CatalogueIcons.TableInfo.ImageToBitmap(); + btnTables.BackgroundImage = BackColorProvider.GetBackgroundImage(btnTables.Size, RDMPCollection.Tables); - cbCommits.Image = CatalogueIcons.Commit.ImageToBitmap(); - cbCommits.Checked = UserSettings.EnableCommits; - cbCommits.CheckedChanged += (s, e) => UserSettings.EnableCommits = cbCommits.Checked; - cbCommits.CheckOnClick = true; - } + btnLoad.Image = CatalogueIcons.LoadMetadata.ImageToBitmap(); + btnLoad.BackgroundImage = BackColorProvider.GetBackgroundImage(btnLoad.Size, RDMPCollection.DataLoad); - public void SetWindowManager(WindowManager manager) - { - _manager = manager; + btnFavourites.Image = CatalogueIcons.Favourite.ImageToBitmap(); + btnDeleteLayout.Image = FamFamFamIcons.delete.ImageToBitmap(); - //Update task bar buttons enabledness when the user navigates somewhere - _manager.Navigation.Changed += (s,e)=> UpdateForwardBackEnabled(); + cbCommits.Image = CatalogueIcons.Commit.ImageToBitmap(); + cbCommits.Checked = UserSettings.EnableCommits; + cbCommits.CheckedChanged += (s, e) => UserSettings.EnableCommits = cbCommits.Checked; + cbCommits.CheckOnClick = true; + } - btnDataExport.Enabled = manager.RepositoryLocator.DataExportRepository != null; - - ReCreateDropDowns(); - - SetupToolTipText(); - - _manager.ActivateItems.Theme.ApplyTo(toolStrip1); - - // if we don't support commit system then disable the task bar button for it - if(!_manager.ActivateItems.RepositoryLocator.CatalogueRepository.SupportsCommits) - { - cbCommits.Enabled = false; - cbCommits.Text = "Repository does not support commits"; - } - - } + public void SetWindowManager(WindowManager manager) + { + _manager = manager; - /// - /// Updates the enabled status (greyed out) of the Forward/Back buttons based on the current - /// - void UpdateForwardBackEnabled() - { - btnBack.Enabled = _manager.Navigation.CanBack(); - btnForward.Enabled = _manager.Navigation.CanForward(); - } + //Update task bar buttons enabledness when the user navigates somewhere + _manager.Navigation.Changed += (s, e) => UpdateForwardBackEnabled(); + btnDataExport.Enabled = manager.RepositoryLocator.DataExportRepository != null; - private void SetupToolTipText() - { - try - { - btnHome.ToolTipText = "Home screen, shows recent objects etc"; - btnCatalogues.ToolTipText = "All datasets configured for access by RDMP"; - btnCohorts.ToolTipText = "Built queries for creating cohorts"; - btnSavedCohorts.ToolTipText = "Finalised identifier lists, ready for linkage and extraction"; - btnDataExport.ToolTipText = "Show Projects and Extractable Dataset Packages allowing data extraction"; - btnTables.ToolTipText = "Advanced features e.g. logging, credentials, dashboards etc"; - btnLoad.ToolTipText = "Load configurations for reading data into your databases"; - btnFavourites.ToolTipText = "Collection of all objects that you have favourited"; - } - catch (Exception e) - { - _manager.ActivateItems.GlobalErrorCheckNotifier.OnCheckPerformed(new CheckEventArgs("Failed to setup tool tips", CheckResult.Fail, e)); - } + ReCreateDropDowns(); - } + SetupToolTipText(); - private void ReCreateDropDowns() + _manager.ActivateItems.Theme.ApplyTo(toolStrip1); + + // if we don't support commit system then disable the task bar button for it + if (!_manager.ActivateItems.RepositoryLocator.CatalogueRepository.SupportsCommits) { - CreateDropDown(cbxLayouts, CreateNewLayout); + cbCommits.Enabled = false; + cbCommits.Text = "Repository does not support commits"; } + } - private void CreateDropDown(ToolStripComboBox cbx, string createNewDashboard) where T:IMapsDirectlyToDatabaseTable, INamed - { - const int xPaddingForComboText = 10; + /// + /// Updates the enabled status (greyed out) of the Forward/Back buttons based on the current + /// + private void UpdateForwardBackEnabled() + { + btnBack.Enabled = _manager.Navigation.CanBack(); + btnForward.Enabled = _manager.Navigation.CanForward(); + } - if (cbx.ComboBox == null) - throw new Exception("Expected combo box!"); - - cbx.ComboBox.Items.Clear(); - var objects = _manager.RepositoryLocator.CatalogueRepository.GetAllObjects(); + private void SetupToolTipText() + { + try + { + btnHome.ToolTipText = "Home screen, shows recent objects etc"; + btnCatalogues.ToolTipText = "All datasets configured for access by RDMP"; + btnCohorts.ToolTipText = "Built queries for creating cohorts"; + btnSavedCohorts.ToolTipText = "Finalised identifier lists, ready for linkage and extraction"; + btnDataExport.ToolTipText = "Show Projects and Extractable Dataset Packages allowing data extraction"; + btnTables.ToolTipText = "Advanced features e.g. logging, credentials, dashboards etc"; + btnLoad.ToolTipText = "Load configurations for reading data into your databases"; + btnFavourites.ToolTipText = "Collection of all objects that you have favourited"; + } + catch (Exception e) + { + _manager.ActivateItems.GlobalErrorCheckNotifier.OnCheckPerformed( + new CheckEventArgs("Failed to setup tool tips", CheckResult.Fail, e)); + } + } - cbx.ComboBox.Items.Add(""); + private void ReCreateDropDowns() + { + CreateDropDown(cbxLayouts, CreateNewLayout); + } - //minimum size that it will be (same width as the combo box) - int proposedComboBoxWidth = cbx.Width - xPaddingForComboText; + private void CreateDropDown(ToolStripComboBox cbx, string createNewDashboard) + where T : IMapsDirectlyToDatabaseTable, INamed + { + const int xPaddingForComboText = 10; - foreach (T o in objects) - { - //add dropdown item - cbx.ComboBox.Items.Add(o); + if (cbx.ComboBox == null) + throw new Exception("Expected combo box!"); - //will that label be too big to fit in text box? if so expand the max width - proposedComboBoxWidth = Math.Max(proposedComboBoxWidth, TextRenderer.MeasureText(o.Name, cbx.Font).Width); - } + cbx.ComboBox.Items.Clear(); - cbx.DropDownWidth = Math.Min(400, proposedComboBoxWidth + xPaddingForComboText); - cbx.ComboBox.SelectedItem = ""; + var objects = _manager.RepositoryLocator.CatalogueRepository.GetAllObjects(); - cbx.Items.Add(createNewDashboard); - } + cbx.ComboBox.Items.Add(""); - private void btnHome_Click(object sender, EventArgs e) - { - _manager.PopHome(); - } + //minimum size that it will be (same width as the combo box) + var proposedComboBoxWidth = cbx.Width - xPaddingForComboText; - private void ToolboxButtonClicked(object sender, EventArgs e) + foreach (var o in objects) { - RDMPCollection collection = ButtonToEnum(sender); + //add dropdown item + cbx.ComboBox.Items.Add(o); - if (_manager.IsVisible(collection)) - _manager.Pop(collection); - else - _manager.Create(collection); + //will that label be too big to fit in text box? if so expand the max width + proposedComboBoxWidth = Math.Max(proposedComboBoxWidth, TextRenderer.MeasureText(o.Name, cbx.Font).Width); } - private RDMPCollection ButtonToEnum(object button) - { - RDMPCollection collectionToToggle; - - if (button == btnCatalogues) - collectionToToggle = RDMPCollection.Catalogue; - else - if (button == btnCohorts) - collectionToToggle = RDMPCollection.Cohort; - else - if (button == btnDataExport) - collectionToToggle = RDMPCollection.DataExport; - else - if (button == btnTables) - collectionToToggle = RDMPCollection.Tables; - else - if (button == btnLoad) - collectionToToggle = RDMPCollection.DataLoad; - else if (button == btnSavedCohorts) - collectionToToggle = RDMPCollection.SavedCohorts; - else if (button == btnFavourites) - collectionToToggle = RDMPCollection.Favourites; - else - throw new ArgumentOutOfRangeException(); - - return collectionToToggle; - } + cbx.DropDownWidth = Math.Min(400, proposedComboBoxWidth + xPaddingForComboText); + cbx.ComboBox.SelectedItem = ""; - - private void cbx_DropDownClosed(object sender, EventArgs e) - { - var cbx = (ToolStripComboBox)sender; - var toOpen = cbx.SelectedItem as INamed; + cbx.Items.Add(createNewDashboard); + } + + private void btnHome_Click(object sender, EventArgs e) + { + _manager.PopHome(); + } - if (ReferenceEquals(cbx.SelectedItem, CreateNewLayout)) - AddNewLayout(); + private void ToolboxButtonClicked(object sender, EventArgs e) + { + var collection = ButtonToEnum(sender); - if (toOpen != null) - { - var cmd = new ExecuteCommandActivate(_manager.ActivateItems, toOpen); - cmd.Execute(); - } + if (_manager.IsVisible(collection)) + _manager.Pop(collection); + else + _manager.Create(collection); + } - UpdateButtonEnabledness(); - } + private RDMPCollection ButtonToEnum(object button) + { + RDMPCollection collectionToToggle; + + if (button == btnCatalogues) + collectionToToggle = RDMPCollection.Catalogue; + else if (button == btnCohorts) + collectionToToggle = RDMPCollection.Cohort; + else if (button == btnDataExport) + collectionToToggle = RDMPCollection.DataExport; + else if (button == btnTables) + collectionToToggle = RDMPCollection.Tables; + else if (button == btnLoad) + collectionToToggle = RDMPCollection.DataLoad; + else if (button == btnSavedCohorts) + collectionToToggle = RDMPCollection.SavedCohorts; + else if (button == btnFavourites) + collectionToToggle = RDMPCollection.Favourites; + else + throw new ArgumentOutOfRangeException(nameof(button)); + + return collectionToToggle; + } + private void cbx_DropDownClosed(object sender, EventArgs e) + { + var cbx = (ToolStripComboBox)sender; - private void cbx_SelectedIndexChanged(object sender, EventArgs e) - { - UpdateButtonEnabledness(); - } + if (ReferenceEquals(cbx.SelectedItem, CreateNewLayout)) + AddNewLayout(); - private void UpdateButtonEnabledness() + if (cbx.SelectedItem is INamed toOpen) { - btnSaveWindowLayout.Enabled = cbxLayouts.SelectedItem is WindowLayout; - btnDeleteLayout.Enabled = cbxLayouts.SelectedItem is WindowLayout; + var cmd = new ExecuteCommandActivate(_manager.ActivateItems, toOpen); + cmd.Execute(); } - private void AddNewLayout() - { - string xml = _manager.MainForm.GetCurrentLayoutXml(); + UpdateButtonEnabledness(); + } - var dialog = new TypeTextOrCancelDialog("Layout Name", "Name", 100, null, false); - if (dialog.ShowDialog() == DialogResult.OK) - { - var layout = new WindowLayout(_manager.RepositoryLocator.CatalogueRepository, dialog.ResultText,xml); - var cmd = new ExecuteCommandActivate(_manager.ActivateItems, layout); - cmd.Execute(); + private void cbx_SelectedIndexChanged(object sender, EventArgs e) + { + UpdateButtonEnabledness(); + } - ReCreateDropDowns(); - } - } + private void UpdateButtonEnabledness() + { + btnSaveWindowLayout.Enabled = cbxLayouts.SelectedItem is WindowLayout; + btnDeleteLayout.Enabled = cbxLayouts.SelectedItem is WindowLayout; + } + private void AddNewLayout() + { + var xml = _manager.MainForm.GetCurrentLayoutXml(); - public void InjectButton(ToolStripButton button) + var dialog = new TypeTextOrCancelDialog("Layout Name", "Name", 100, null, false); + if (dialog.ShowDialog() == DialogResult.OK) { - toolStrip1.Items.Add(button); - } + var layout = new WindowLayout(_manager.RepositoryLocator.CatalogueRepository, dialog.ResultText, xml); - private void btnDelete_Click(object sender, EventArgs e) - { - ToolStripComboBox cbx; - if (sender == btnDeleteLayout) - cbx = cbxLayouts; - else - throw new Exception("Unexpected sender"); - - var d = cbx.SelectedItem as IDeleteable; - if (d != null) - { - _manager.ActivateItems.DeleteWithConfirmation(d); - ReCreateDropDowns(); - } - } + var cmd = new ExecuteCommandActivate(_manager.ActivateItems, layout); + cmd.Execute(); - private void btnSaveWindowLayout_Click(object sender, EventArgs e) - { - var layout = cbxLayouts.SelectedItem as WindowLayout; - if(layout != null) - { - string xml = _manager.MainForm.GetCurrentLayoutXml(); - - layout.LayoutData = xml; - layout.SaveToDatabase(); - } + ReCreateDropDowns(); } + } + + + public void InjectButton(ToolStripButton button) + { + toolStrip1.Items.Add(button); + } - private void btnBack_ButtonClick(object sender, EventArgs e) + private void btnDelete_Click(object sender, EventArgs e) + { + ToolStripComboBox cbx; + if (sender == btnDeleteLayout) + cbx = cbxLayouts; + else + throw new Exception("Unexpected sender"); + + if (cbx.SelectedItem is IDeleteable d) { - _manager.Navigation.Back(true); + _manager.ActivateItems.DeleteWithConfirmation(d); + ReCreateDropDowns(); } + } - private void btnForward_Click(object sender, EventArgs e) + private void btnSaveWindowLayout_Click(object sender, EventArgs e) + { + if (cbxLayouts.SelectedItem is WindowLayout layout) { - _manager.Navigation.Forward(true); + var xml = _manager.MainForm.GetCurrentLayoutXml(); + + layout.LayoutData = xml; + layout.SaveToDatabase(); } + } - private void btnBack_DropDownOpening(object sender, EventArgs e) - { - btnBack.DropDownItems.Clear(); + private void btnBack_ButtonClick(object sender, EventArgs e) + { + _manager.Navigation.Back(true); + } + + private void btnForward_Click(object sender, EventArgs e) + { + _manager.Navigation.Forward(true); + } - int backIndex = 1; + private void btnBack_DropDownOpening(object sender, EventArgs e) + { + btnBack.DropDownItems.Clear(); - foreach (INavigation history in _manager.Navigation.GetHistory(16)) - { - var i = backIndex++; - btnBack.DropDownItems.Add(history.ToString(),null,(a,b)=>_manager.Navigation.Back(i,true)); - } + var backIndex = 1; + + foreach (var history in _manager.Navigation.GetHistory(16)) + { + var i = backIndex++; + btnBack.DropDownItems.Add(history.ToString(), null, (a, b) => _manager.Navigation.Back(i, true)); } } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/UIObjectConstructor.cs b/Application/ResearchDataManagementPlatform/WindowManagement/UIObjectConstructor.cs index 789f18e565..1528392c71 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/UIObjectConstructor.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/UIObjectConstructor.cs @@ -8,16 +8,13 @@ using Rdmp.Core.Repositories.Construction; using Rdmp.UI.ItemActivation; -namespace ResearchDataManagementPlatform.WindowManagement +namespace ResearchDataManagementPlatform.WindowManagement; + +/// +/// Provides UI specific helpful overloads to ObjectConstructor (which is defined in a data class) +/// +public sealed class UIObjectConstructor : ObjectConstructor { - /// - /// Provides UI specific helpful overloads to ObjectConstructor (which is defined in a data class) - /// - public class UIObjectConstructor:ObjectConstructor - { - public object Construct(Type t,IActivateItems itemActivator, bool allowBlankConstructors = true) - { - return Construct(t, itemActivator, allowBlankConstructors); - } - } + public static object Construct(Type t, IActivateItems itemActivator, bool allowBlankConstructors = true) => + ObjectConstructor.Construct(t, itemActivator, allowBlankConstructors); } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/WindowArranging/WindowArranger.cs b/Application/ResearchDataManagementPlatform/WindowManagement/WindowArranging/WindowArranger.cs index 4b4fea9c2d..712016bcef 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/WindowArranging/WindowArranger.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/WindowArranging/WindowArranger.cs @@ -5,72 +5,62 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Linq; using System.Text.RegularExpressions; -using System.Windows.Forms; -using MapsDirectlyToDatabaseTable; using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.Curation.Data; -using Rdmp.Core.Curation.Data.DataLoad; -using Rdmp.Core.DataExport.Data; -using Rdmp.UI.Collections; -using Rdmp.UI.CommandExecution.AtomicCommands; -using Rdmp.UI.DataLoadUIs.LoadMetadataUIs.LoadDiagram; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.UI.ItemActivation; using Rdmp.UI.ItemActivation.Arranging; -using Rdmp.UI.LoadExecutionUIs; using WeifenLuo.WinFormsUI.Docking; -namespace ResearchDataManagementPlatform.WindowManagement.WindowArranging +namespace ResearchDataManagementPlatform.WindowManagement.WindowArranging; + +/// +public class WindowArranger : IArrangeWindows { - /// - public class WindowArranger : IArrangeWindows + private readonly IActivateItems _activator; + private readonly WindowManager _windowManager; + + public WindowArranger(IActivateItems activator, WindowManager windowManager, DockPanel mainDockPanel) { - private readonly IActivateItems _activator; - private readonly WindowManager _windowManager; + _activator = activator; + _windowManager = windowManager; + } - public WindowArranger(IActivateItems activator, WindowManager windowManager, DockPanel mainDockPanel) - { - _activator = activator; - _windowManager =windowManager; - } - - public void SetupEditAnything(object sender, IMapsDirectlyToDatabaseTable o) - { - _activator.RequestItemEmphasis(this, new EmphasiseRequest(o)); + public void SetupEditAnything(object sender, IMapsDirectlyToDatabaseTable o) + { + _activator.RequestItemEmphasis(this, new EmphasiseRequest(o)); - var activate = new ExecuteCommandActivate(_activator, o); + var activate = new ExecuteCommandActivate(_activator, o); - //activate it if possible - if (!activate.IsImpossible) - activate.Execute(); - else - _activator.RequestItemEmphasis(this, new EmphasiseRequest(o, 1)); //otherwise just show it - - } + //activate it if possible + if (!activate.IsImpossible) + activate.Execute(); + else + _activator.RequestItemEmphasis(this, new EmphasiseRequest(o, 1)); //otherwise just show it + } - public void Setup(WindowLayout target) - { - //Do not reload an existing layout - string oldXml = _windowManager.MainForm.GetCurrentLayoutXml(); - string newXml = target.LayoutData; + public void Setup(WindowLayout target) + { + //Do not reload an existing layout + var oldXml = _windowManager.MainForm.GetCurrentLayoutXml(); + var newXml = target.LayoutData; + + if (AreBasicallyTheSameLayout(oldXml, newXml)) + return; - if(AreBasicallyTheSameLayout(oldXml, newXml)) - return; - - _windowManager.CloseAllToolboxes(); - _windowManager.CloseAllWindows(); - _windowManager.MainForm.LoadFromXml(target); - } + _windowManager.CloseAllToolboxes(); + _windowManager.CloseAllWindows(); + _windowManager.MainForm.LoadFromXml(target); + } - private bool AreBasicallyTheSameLayout(string oldXml, string newXml) - { - var patStripActive = @"Active.*=[""\-\d]*"; - oldXml = Regex.Replace(oldXml, patStripActive, ""); - newXml = Regex.Replace(newXml, patStripActive, ""); + private static bool AreBasicallyTheSameLayout(string oldXml, string newXml) + { + var patStripActive = @"Active.*=[""\-\d]*"; + oldXml = Regex.Replace(oldXml, patStripActive, ""); + newXml = Regex.Replace(newXml, patStripActive, ""); - return oldXml.Equals(newXml, StringComparison.CurrentCultureIgnoreCase); - } + return oldXml.Equals(newXml, StringComparison.CurrentCultureIgnoreCase); } } \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/WindowFactory.cs b/Application/ResearchDataManagementPlatform/WindowManagement/WindowFactory.cs index 586e007730..3723e94adf 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/WindowFactory.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/WindowFactory.cs @@ -4,12 +4,11 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System.Drawing; using System.Windows.Forms; -using MapsDirectlyToDatabaseTable; using Rdmp.Core; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Dashboarding; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Repositories; using Rdmp.UI; using Rdmp.UI.Collections; @@ -23,137 +22,131 @@ using SixLabors.ImageSharp; using SixLabors.ImageSharp.PixelFormats; using WeifenLuo.WinFormsUI.Docking; -using Image = SixLabors.ImageSharp.Image; -namespace ResearchDataManagementPlatform.WindowManagement +namespace ResearchDataManagementPlatform.WindowManagement; + +/// +/// Translates Controls into docked tabs (DockContent). Provides overloads for the two main control Types IRDMPSingleDatabaseObjectControl and +/// IObjectCollectionControl (for see ). +/// +public class WindowFactory { + private readonly WindowManager _windowManager; + /// - /// Translates Controls into docked tabs (DockContent). Provides overloads for the two main control Types IRDMPSingleDatabaseObjectControl and - /// IObjectCollectionControl (for see ). + /// Location of the Catalogue / Data export repository databases (and allows access to repository objects) /// - public class WindowFactory + public IRDMPPlatformRepositoryServiceLocator RepositoryLocator { get; set; } + + private readonly IconFactory _iconFactory = IconFactory.Instance; + + + public WindowFactory(IRDMPPlatformRepositoryServiceLocator repositoryLocator, WindowManager windowManager) { - private readonly WindowManager _windowManager; + _windowManager = windowManager; + RepositoryLocator = repositoryLocator; + } - /// - /// Location of the Catalogue / Data export repository databases (and allows access to repository objects) - /// - public IRDMPPlatformRepositoryServiceLocator RepositoryLocator { get; set; } + public PersistableToolboxDockContent Create(IActivateItems activator, Control control, string label, + Image image, RDMPCollection collection) + { + var content = new PersistableToolboxDockContent(collection); - private readonly IconFactory _iconFactory = IconFactory.Instance; + AddControlToDockContent(activator, control, content, label, image); - - public WindowFactory(IRDMPPlatformRepositoryServiceLocator repositoryLocator, WindowManager windowManager) - { - _windowManager = windowManager; - RepositoryLocator = repositoryLocator; - } + return content; + } - public PersistableToolboxDockContent Create(IActivateItems activator,Control control, string label, Image image, RDMPCollection collection) - { - var content = new PersistableToolboxDockContent(collection); - - AddControlToDockContent(activator, control, content, label, image); + public PersistableSingleDatabaseObjectDockContent Create(IActivateItems activator, RefreshBus refreshBus, + IRDMPSingleDatabaseObjectControl control, Image image, IMapsDirectlyToDatabaseTable databaseObject) + { + var content = new PersistableSingleDatabaseObjectDockContent(control, databaseObject, refreshBus); + _windowManager.AddWindow(content); - return content; - } - - public PersistableSingleDatabaseObjectDockContent Create(IActivateItems activator, RefreshBus refreshBus,IRDMPSingleDatabaseObjectControl control, Image image, IMapsDirectlyToDatabaseTable databaseObject) - { - var content = new PersistableSingleDatabaseObjectDockContent(control, databaseObject,refreshBus); - _windowManager.AddWindow(content); + AddControlToDockContent(activator, (Control)control, content, "Loading...", image); - AddControlToDockContent(activator, (Control)control,content,"Loading...",image); - - if (!RDMPMainForm.Loading) - activator.HistoryProvider.Add(databaseObject); + if (!RDMPMainForm.Loading) + activator.HistoryProvider.Add(databaseObject); - return content; - } + return content; + } - public PersistableObjectCollectionDockContent Create(IActivateItems activator, IObjectCollectionControl control, IPersistableObjectCollection objectCollection, Image image) - { - //create a new persistable docking tab - var content = new PersistableObjectCollectionDockContent(activator,control,objectCollection); + public PersistableObjectCollectionDockContent Create(IActivateItems activator, IObjectCollectionControl control, + IPersistableObjectCollection objectCollection, Image image) + { + //create a new persistable docking tab + var content = new PersistableObjectCollectionDockContent(activator, control, objectCollection); - //add the control to the tab - AddControlToDockContent(activator,(Control)control, content,content.TabText, image); - - //add to the window tracker - _windowManager.AddWindow(content); + //add the control to the tab + AddControlToDockContent(activator, (Control)control, content, content.TabText, image); - //return the tab - return content; - } + //add to the window tracker + _windowManager.AddWindow(content); - public PersistableSingleDatabaseObjectDockContent Create(IActivateItems activator, IRDMPSingleDatabaseObjectControl control, DatabaseEntity entity) - { - var content = new PersistableSingleDatabaseObjectDockContent(control, entity, activator.RefreshBus); + //return the tab + return content; + } - var img = activator.CoreIconProvider.GetImage(entity); - AddControlToDockContent(activator, (Control)control, content, entity.ToString(), img); + public PersistableSingleDatabaseObjectDockContent Create(IActivateItems activator, + IRDMPSingleDatabaseObjectControl control, DatabaseEntity entity) + { + var content = new PersistableSingleDatabaseObjectDockContent(control, entity, activator.RefreshBus); - if (!RDMPMainForm.Loading) - activator.HistoryProvider.Add(entity); + var img = activator.CoreIconProvider.GetImage(entity); + AddControlToDockContent(activator, (Control)control, content, entity.ToString(), img); - return content; - } + if (!RDMPMainForm.Loading) + activator.HistoryProvider.Add(entity); + return content; + } - public DockContent Create(IActivateItems activator, Control control, string label, Image image) - { - DockContent content = new RDMPSingleControlTab(activator.RefreshBus,control); - - AddControlToDockContent(activator, control, content,label, image); - _windowManager.AddAdhocWindow(content); + public DockContent Create(IActivateItems activator, Control control, string label, Image image) + { + DockContent content = new RDMPSingleControlTab(activator.RefreshBus, control); - return content; - } + AddControlToDockContent(activator, control, content, label, image); - private void AddControlToDockContent(IActivateItems activator, Control control,DockContent content, string label, Image image) - { - control.Dock = DockStyle.Fill; - content.Controls.Add(control); - content.TabText = label; - - if(image != null) - { - content.Icon = _iconFactory.GetIcon(image); - } - - + _windowManager.AddAdhocWindow(content); + + return content; + } + + private void AddControlToDockContent(IActivateItems activator, Control control, DockContent content, string label, + Image image) + { + control.Dock = DockStyle.Fill; + content.Controls.Add(control); + content.TabText = label; + + if (image != null) content.Icon = _iconFactory.GetIcon(image); - if (control is IConsultableBeforeClosing consult) - content.FormClosing += consult.ConsultAboutClosing; - if(control is ISaveableUI saveable) - content.FormClosing += (s,e)=>saveable.GetObjectSaverButton()?.CheckForUnsavedChangesAnOfferToSave(); + if (control is IConsultableBeforeClosing consult) + content.FormClosing += consult.ConsultAboutClosing; - content.KeyPreview = true; + if (control is ISaveableUI saveable) + content.FormClosing += (s, e) => saveable.GetObjectSaverButton()?.CheckForUnsavedChangesAnOfferToSave(); - if (content is RDMPSingleControlTab tab) + content.KeyPreview = true; + + if (content is RDMPSingleControlTab tab) + { + content.TabPageContextMenuStrip = new RDMPSingleControlTabMenu(activator, tab, _windowManager); + + //Create handler for AfterPublish + void Handler(object s, RefreshObjectEventArgs e) { - content.TabPageContextMenuStrip = new RDMPSingleControlTabMenu(activator, tab, _windowManager); - - //Create handler for AfterPublish - RefreshObjectEventHandler handler = null; - handler = (s,e)=> - { - // After global changes, rebuild the context menu - - if(!content.IsDisposed) - content.TabPageContextMenuStrip = new RDMPSingleControlTabMenu(activator, tab, _windowManager); - else - if(handler != null) - activator.RefreshBus.AfterPublish -= handler; //don't leak handlers - }; - - //register the event handler - activator.RefreshBus.AfterPublish += handler; - + // After global changes, rebuild the context menu + + if (!content.IsDisposed) + content.TabPageContextMenuStrip = new RDMPSingleControlTabMenu(activator, tab, _windowManager); + else activator.RefreshBus.AfterPublish -= Handler; //don't leak handlers } - + + //register the event handler + activator.RefreshBus.AfterPublish += Handler; } } -} +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/WindowManagement/WindowManager.cs b/Application/ResearchDataManagementPlatform/WindowManagement/WindowManager.cs index b6a338034a..dd49fcaad6 100644 --- a/Application/ResearchDataManagementPlatform/WindowManagement/WindowManager.cs +++ b/Application/ResearchDataManagementPlatform/WindowManagement/WindowManager.cs @@ -6,14 +6,14 @@ using System; using System.Collections.Generic; -using System.Drawing; using System.Linq; -using MapsDirectlyToDatabaseTable; using Rdmp.Core; using Rdmp.Core.CommandExecution; using Rdmp.Core.Curation.Data.Dashboarding; using Rdmp.Core.Icons.IconProvision; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Repositories; +using Rdmp.Core.ReusableLibraryCode.Checks; using Rdmp.UI; using Rdmp.UI.Collections; using Rdmp.UI.Refreshing; @@ -25,461 +25,451 @@ using ResearchDataManagementPlatform.WindowManagement.ContentWindowTracking.Persistence; using ResearchDataManagementPlatform.WindowManagement.Events; using ResearchDataManagementPlatform.WindowManagement.HomePane; -using ReusableLibraryCode.Checks; using SixLabors.ImageSharp; using SixLabors.ImageSharp.PixelFormats; using WeifenLuo.WinFormsUI.Docking; using Image = SixLabors.ImageSharp.Image; -namespace ResearchDataManagementPlatform.WindowManagement +namespace ResearchDataManagementPlatform.WindowManagement; + +/// +/// Handles creating and tracking the main RDMPCollectionUIs tree views +/// +public class WindowManager { + private readonly Dictionary _visibleToolboxes = new(); + private readonly List _trackedWindows = new(); + private readonly List _trackedAdhocWindows = new(); + + public NavigationTrack Navigation { get; private set; } + public event TabChangedHandler TabChanged; + + private readonly DockPanel _mainDockPanel; + + public RDMPMainForm MainForm { get; set; } + /// - /// Handles creating and tracking the main RDMPCollectionUIs tree views + /// The location finder for the Catalogue and optionally Data Export databases /// - public class WindowManager + public IRDMPPlatformRepositoryServiceLocator RepositoryLocator { get; set; } + + public ActivateItems ActivateItems; + private readonly WindowFactory _windowFactory; + + public event RDMPCollectionCreatedEventHandler CollectionCreated; + + private HomeUI _home; + private DockContent _homeContent; + + public WindowManager(ITheme theme, RDMPMainForm mainForm, RefreshBus refreshBus, DockPanel mainDockPanel, + IRDMPPlatformRepositoryServiceLocator repositoryLocator, ICheckNotifier globalErrorCheckNotifier) { - readonly Dictionary _visibleToolboxes = new Dictionary(); - readonly List _trackedWindows = new List(); - readonly List _trackedAdhocWindows = new List(); - - public NavigationTrack Navigation { get; private set; } - public event TabChangedHandler TabChanged; - - private readonly DockPanel _mainDockPanel; - - public RDMPMainForm MainForm { get; set; } - - /// - /// The location finder for the Catalogue and optionally Data Export databases - /// - public IRDMPPlatformRepositoryServiceLocator RepositoryLocator { get; set; } - - public ActivateItems ActivateItems; - private readonly WindowFactory _windowFactory; - - public event RDMPCollectionCreatedEventHandler CollectionCreated; - - HomeUI _home; - DockContent _homeContent; - - public WindowManager(ITheme theme,RDMPMainForm mainForm, RefreshBus refreshBus, DockPanel mainDockPanel, IRDMPPlatformRepositoryServiceLocator repositoryLocator, ICheckNotifier globalErrorCheckNotifier) - { - _windowFactory = new WindowFactory(repositoryLocator,this); - ActivateItems = new ActivateItems(theme,refreshBus, mainDockPanel, repositoryLocator, _windowFactory, this, globalErrorCheckNotifier); + _windowFactory = new WindowFactory(repositoryLocator, this); + ActivateItems = new ActivateItems(theme, refreshBus, mainDockPanel, repositoryLocator, _windowFactory, this, + globalErrorCheckNotifier); - GlobalExceptionHandler.Instance.Handler = (e)=>globalErrorCheckNotifier.OnCheckPerformed(new CheckEventArgs(e.Message,CheckResult.Fail,e)); + GlobalExceptionHandler.Instance.Handler = e => + globalErrorCheckNotifier.OnCheckPerformed(new CheckEventArgs(e.Message, CheckResult.Fail, e)); - _mainDockPanel = mainDockPanel; - - MainForm = mainForm; - RepositoryLocator = repositoryLocator; + _mainDockPanel = mainDockPanel; - Navigation = new NavigationTrack(c=>c.IsAlive,(c)=>c.Activate(ActivateItems)); - mainDockPanel.ActiveDocumentChanged += mainDockPanel_ActiveDocumentChanged; - ActivateItems.Emphasise += RecordEmphasis; - } + MainForm = mainForm; + RepositoryLocator = repositoryLocator; - private void RecordEmphasis(object sender, EmphasiseEventArgs args) - { - if(args.Request.ObjectToEmphasise is IMapsDirectlyToDatabaseTable m) - Navigation.Append(new CollectionNavigation(m)); - } + Navigation = new NavigationTrack(c => c.IsAlive, c => c.Activate(ActivateItems)); + mainDockPanel.ActiveDocumentChanged += mainDockPanel_ActiveDocumentChanged; + ActivateItems.Emphasise += RecordEmphasis; + } - /// - /// Creates a new instance of the given RDMPCollectionUI specified by the Enum collectionToCreate at the specified dock position - /// - /// - /// - /// - public PersistableToolboxDockContent Create(RDMPCollection collectionToCreate, DockState position = DockState.DockLeft) - { - PersistableToolboxDockContent toReturn; - RDMPCollectionUI collection; + private void RecordEmphasis(object sender, EmphasiseEventArgs args) + { + if (args.Request.ObjectToEmphasise is IMapsDirectlyToDatabaseTable m) + Navigation.Append(new CollectionNavigation(m)); + } - switch (collectionToCreate) - { - case RDMPCollection.Catalogue: - collection = new CatalogueCollectionUI(); - toReturn = Show(RDMPCollection.Catalogue, collection, "Catalogues", Image.Load(CatalogueIcons.Catalogue)); + /// + /// Creates a new instance of the given RDMPCollectionUI specified by the Enum collectionToCreate at the specified dock position + /// + /// + /// + /// + public PersistableToolboxDockContent Create(RDMPCollection collectionToCreate, + DockState position = DockState.DockLeft) + { + PersistableToolboxDockContent toReturn; + RDMPCollectionUI collection; + + switch (collectionToCreate) + { + case RDMPCollection.Catalogue: + collection = new CatalogueCollectionUI(); + toReturn = Show(RDMPCollection.Catalogue, collection, "Catalogues", + Image.Load(CatalogueIcons.Catalogue)); break; - case RDMPCollection.DataLoad: - collection = new LoadMetadataCollectionUI(); - toReturn = Show(RDMPCollection.DataLoad, collection, "Load Configurations", Image.Load(CatalogueIcons.LoadMetadata)); + case RDMPCollection.DataLoad: + collection = new LoadMetadataCollectionUI(); + toReturn = Show(RDMPCollection.DataLoad, collection, "Load Configurations", + Image.Load(CatalogueIcons.LoadMetadata)); break; - case RDMPCollection.Tables: - collection = new TableInfoCollectionUI(); - toReturn = Show(RDMPCollection.Tables, collection, "Tables",Image.Load(CatalogueIcons.TableInfo)); + case RDMPCollection.Tables: + collection = new TableInfoCollectionUI(); + toReturn = Show(RDMPCollection.Tables, collection, "Tables", + Image.Load(CatalogueIcons.TableInfo)); break; - case RDMPCollection.DataExport: - if (RepositoryLocator.DataExportRepository == null) - { - WideMessageBox.Show("Data export database unavailable","Cannot create DataExport Toolbox because DataExportRepository has not been set/created yet"); - return null; - } + case RDMPCollection.DataExport: + if (RepositoryLocator.DataExportRepository == null) + { + WideMessageBox.Show("Data export database unavailable", + "Cannot create DataExport Toolbox because DataExportRepository has not been set/created yet"); + return null; + } - collection = new DataExportCollectionUI(); - toReturn = Show(RDMPCollection.DataExport,collection, "Projects", Image.Load(CatalogueIcons.Project)); + collection = new DataExportCollectionUI(); + toReturn = Show(RDMPCollection.DataExport, collection, "Projects", + Image.Load(CatalogueIcons.Project)); break; - case RDMPCollection.Cohort: - collection = new CohortIdentificationCollectionUI(); - toReturn = Show(RDMPCollection.Cohort, collection, "Cohort Builder", Image.Load(CatalogueIcons.CohortIdentificationConfiguration)); + case RDMPCollection.Cohort: + collection = new CohortIdentificationCollectionUI(); + toReturn = Show(RDMPCollection.Cohort, collection, "Cohort Builder", + Image.Load(CatalogueIcons.CohortIdentificationConfiguration)); break; - case RDMPCollection.SavedCohorts: - collection = new SavedCohortsCollectionUI(); - toReturn = Show(RDMPCollection.SavedCohorts, collection, "Saved Cohorts", Image.Load(CatalogueIcons.AllCohortsNode)); + case RDMPCollection.SavedCohorts: + collection = new SavedCohortsCollectionUI(); + toReturn = Show(RDMPCollection.SavedCohorts, collection, "Saved Cohorts", + Image.Load(CatalogueIcons.AllCohortsNode)); break; - case RDMPCollection.Favourites: - collection = new FavouritesCollectionUI(); - toReturn = Show(RDMPCollection.Favourites, collection, "Favourites", Image.Load(CatalogueIcons.Favourite)); + case RDMPCollection.Favourites: + collection = new FavouritesCollectionUI(); + toReturn = Show(RDMPCollection.Favourites, collection, "Favourites", + Image.Load(CatalogueIcons.Favourite)); break; - default: throw new ArgumentOutOfRangeException("collectionToCreate"); - } - - toReturn.DockState = position; + default: throw new ArgumentOutOfRangeException(nameof(collectionToCreate)); + } - collection.SetItemActivator(ActivateItems); + toReturn.DockState = position; - if(CollectionCreated != null) - CollectionCreated(this, new RDMPCollectionCreatedEventHandlerArgs(collectionToCreate)); + collection.SetItemActivator(ActivateItems); - collection.CommonTreeFunctionality.Tree.SelectionChanged += (s,e)=> - { - if(collection.CommonTreeFunctionality.Tree.SelectedObject is IMapsDirectlyToDatabaseTable im) - Navigation.Append(new CollectionNavigation(im)); - }; + CollectionCreated?.Invoke(this, new RDMPCollectionCreatedEventHandlerArgs(collectionToCreate)); - return toReturn; - } + collection.CommonTreeFunctionality.Tree.SelectionChanged += (s, e) => + { + if (collection.CommonTreeFunctionality.Tree.SelectedObject is IMapsDirectlyToDatabaseTable im) + Navigation.Append(new CollectionNavigation(im)); + }; - + return toReturn; + } - private PersistableToolboxDockContent Show(RDMPCollection collection,RDMPCollectionUI control, string label, Image image) - { - var content = _windowFactory.Create(ActivateItems,control, label, image, collection);//these are collections so are not tracked with a window tracker. - content.Closed += (s, e) => content_Closed(collection); - _visibleToolboxes.Add(collection, content); - content.Show(_mainDockPanel, DockState.DockLeft); - - return content; - } + private PersistableToolboxDockContent Show(RDMPCollection collection, RDMPCollectionUI control, string label, + Image image) + { + var content = + _windowFactory.Create(ActivateItems, control, label, image, + collection); //these are collections so are not tracked with a window tracker. + content.Closed += (s, e) => content_Closed(collection); - private void content_Closed(RDMPCollection collection) - { - //no longer visible - _visibleToolboxes.Remove(collection); - } + _visibleToolboxes.Add(collection, content); + content.Show(_mainDockPanel, DockState.DockLeft); - /// - /// Closes the specified RDMPCollectionUI (must be open - use IsVisible to check this) - /// - /// - public void Destroy(RDMPCollection collection) - { - _visibleToolboxes[collection].Close(); - } + return content; + } - /// - /// Brings the specified collection to the front (must already be visible) - /// - /// - public void Pop(RDMPCollection collection) - { - if (_visibleToolboxes.ContainsKey(collection)) - { - switch (_visibleToolboxes[collection].DockState) - { - case DockState.DockLeftAutoHide: - _visibleToolboxes[collection].DockState = DockState.DockLeft; - break; - case DockState.DockRightAutoHide: - _visibleToolboxes[collection].DockState = DockState.DockRight; - break; - case DockState.DockTopAutoHide: - _visibleToolboxes[collection].DockState = DockState.DockTop; - break; - case DockState.DockBottomAutoHide: - _visibleToolboxes[collection].DockState = DockState.DockBottom; - break; - } + private void content_Closed(RDMPCollection collection) + { + //no longer visible + _visibleToolboxes.Remove(collection); + } - _visibleToolboxes[collection].Activate(); - } - } + /// + /// Closes the specified RDMPCollectionUI (must be open - use IsVisible to check this) + /// + /// + public void Destroy(RDMPCollection collection) + { + _visibleToolboxes[collection].Close(); + } - /// - /// Returns true if the corresponding RDMPCollectionUI is open (even if it is burried under other windows). - /// - /// - /// - public bool IsVisible(RDMPCollection collection) + /// + /// Brings the specified collection to the front (must already be visible) + /// + /// + public void Pop(RDMPCollection collection) + { + if (!_visibleToolboxes.TryGetValue(collection, out var content)) return; + content.DockState = content.DockState switch { - return _visibleToolboxes.ContainsKey(collection); - } + DockState.DockLeftAutoHide => DockState.DockLeft, + DockState.DockRightAutoHide => DockState.DockRight, + DockState.DockTopAutoHide => DockState.DockTop, + DockState.DockBottomAutoHide => DockState.DockBottom, + _ => _visibleToolboxes[collection].DockState + }; + + content.Activate(); + } - public RDMPCollection GetFocusedCollection() - { - foreach (KeyValuePair t in _visibleToolboxes) - { - if (t.Value.ContainsFocus) - return t.Key; - } + /// + /// Returns true if the corresponding RDMPCollectionUI is open (even if it is buried under other windows). + /// + /// + /// + public bool IsVisible(RDMPCollection collection) => _visibleToolboxes.ContainsKey(collection); - return RDMPCollection.None; - } + public RDMPCollection GetFocusedCollection() + { + return _visibleToolboxes.Where(static t => t.Value.ContainsFocus).Select(static t => t.Key).FirstOrDefault(); + } - internal void OnFormClosing(System.Windows.Forms.FormClosingEventArgs e) - { - foreach(var c in _trackedWindows) + internal void OnFormClosing(System.Windows.Forms.FormClosingEventArgs e) + { + foreach (var c in _trackedWindows) + if (c.Control is IConsultableBeforeClosing consult) { - if(c.Control is IConsultableBeforeClosing consult) - { - consult.ConsultAboutClosing(this, e); + consult.ConsultAboutClosing(this, e); - if(e.Cancel) - { - return; - } - } + if (e.Cancel) return; } - } - + } - /// - /// Attempts to ensure that a compatible RDMPCollectionUI is made visible for the supplied object which must be one of the expected root Tree types of - /// an RDMPCollectionUI. For example Project is the a root object of DataExportCollectionUI. If a matching collection is already visible or no collection - /// supports the supplied object as a root object then nothing will happen. Otherwise the coresponding collection will be shown - /// - /// - public void ShowCollectionWhichSupportsRootObjectType(object root) - { - RDMPCollection collection = GetCollectionForRootObject(root); - if(collection == RDMPCollection.None) - return; + /// + /// Attempts to ensure that a compatible RDMPCollectionUI is made visible for the supplied object which must be one of the expected root Tree types of + /// an RDMPCollectionUI. For example Project is the a root object of DataExportCollectionUI. If a matching collection is already visible or no collection + /// supports the supplied object as a root object then nothing will happen. Otherwise the coresponding collection will be shown + /// + /// + public void ShowCollectionWhichSupportsRootObjectType(object root) + { + var collection = GetCollectionForRootObject(root); - if(IsVisible(collection)) - { - Pop(collection); - return; - } + if (collection == RDMPCollection.None) + return; - Create(collection); + if (IsVisible(collection)) + { + Pop(collection); + return; } - public RDMPCollection GetCollectionForRootObject(object root) - { - if (FavouritesCollectionUI.IsRootObject(ActivateItems,root)) - return RDMPCollection.Favourites; + Create(collection); + } + + public RDMPCollection GetCollectionForRootObject(object root) + { + if (FavouritesCollectionUI.IsRootObject(ActivateItems, root)) + return RDMPCollection.Favourites; - if(CatalogueCollectionUI.IsRootObject(root)) - return RDMPCollection.Catalogue; + if (CatalogueCollectionUI.IsRootObject(root)) + return RDMPCollection.Catalogue; - if(CohortIdentificationCollectionUI.IsRootObject(root)) - return RDMPCollection.Cohort; + if (CohortIdentificationCollectionUI.IsRootObject(root)) + return RDMPCollection.Cohort; - if(DataExportCollectionUI.IsRootObject(root)) - return RDMPCollection.DataExport; + if (DataExportCollectionUI.IsRootObject(root)) + return RDMPCollection.DataExport; - if(LoadMetadataCollectionUI.IsRootObject(root)) - return RDMPCollection.DataLoad; + if (LoadMetadataCollectionUI.IsRootObject(root)) + return RDMPCollection.DataLoad; - if(TableInfoCollectionUI.IsRootObject(root)) - return RDMPCollection.Tables; + if (TableInfoCollectionUI.IsRootObject(root)) + return RDMPCollection.Tables; - if(SavedCohortsCollectionUI.IsRootObject(root)) - return RDMPCollection.SavedCohorts; + return SavedCohortsCollectionUI.IsRootObject(root) ? RDMPCollection.SavedCohorts : RDMPCollection.None; + } - return RDMPCollection.None; + /// + /// Displays the HomeUI tab or brings it to the front if it is already open + /// + public void PopHome() + { + if (_home == null) + { + _home = new HomeUI(ActivateItems); + + _homeContent = _windowFactory.Create(ActivateItems, _home, "Home", + Image.Load(FamFamFamIcons.application_home)); + _homeContent.Closed += (s, e) => _home = null; + _homeContent.Show(_mainDockPanel, DockState.Document); } - - /// - /// Displays the HomeUI tab or brings it to the front if it is already open - /// - public void PopHome() + else { - if(_home == null) - { - _home = new HomeUI(this.ActivateItems); - - _homeContent = _windowFactory.Create(ActivateItems, _home, "Home", SixLabors.ImageSharp.Image.Load(FamFamFamIcons.application_home)); - _homeContent.Closed += (s, e) => _home = null; - _homeContent.Show(_mainDockPanel, DockState.Document); - } - else - { - _homeContent.Activate(); - } + _homeContent.Activate(); } + } + + /// + /// Closes all currently open RDMPCollectionUI tabs + /// + public void CloseAllToolboxes() + { + foreach (RDMPCollection collection in Enum.GetValues(typeof(RDMPCollection))) + if (IsVisible(collection)) + Destroy(collection); + } + + /// + /// Closes all content window tabs (i.e. anything that isn't an RDMPCollectionUI tab - see CloseAllToolboxes) + /// + public void CloseAllWindows() + { + CloseAllWindows(null); + } - /// - /// Closes all currently open RDMPCollectionUI tabs - /// - public void CloseAllToolboxes() + + /// + /// Closes all Tracked windows + /// + /// + public void CloseAllWindows(RDMPSingleControlTab tab) + { + if (tab != null) { - foreach (RDMPCollection collection in Enum.GetValues(typeof (RDMPCollection))) - if (IsVisible(collection)) - Destroy(collection); + CloseAllButThis(tab); + tab.Close(); } - - /// - /// Closes all content window tabs (i.e. anything that isn't an RDMPCollectionUI tab - see CloseAllToolboxes) - /// - public void CloseAllWindows() + else { - CloseAllWindows(null); + foreach (var trackedWindow in _trackedWindows.ToArray()) + trackedWindow.Close(); + + foreach (var adhoc in _trackedAdhocWindows.ToArray()) + adhoc.Close(); } + } + private void mainDockPanel_ActiveDocumentChanged(object sender, EventArgs e) + { + var newTab = (DockContent)_mainDockPanel.ActiveDocument; - /// - /// Closes all Tracked windows - /// - /// - public void CloseAllWindows(RDMPSingleControlTab tab) + if (newTab?.ParentForm != null) { - if(tab != null) - { - CloseAllButThis(tab); - tab.Close(); - } - else - { - foreach (var trackedWindow in _trackedWindows.ToArray()) - trackedWindow.Close(); - - foreach (var adhoc in _trackedAdhocWindows.ToArray()) - adhoc.Close(); - } + Navigation.Append(new TabNavigation(newTab)); + newTab.ParentForm.Text = $"{newTab.TabText} - RDMP"; } - void mainDockPanel_ActiveDocumentChanged(object sender, EventArgs e) - { - var newTab = (DockContent) _mainDockPanel.ActiveDocument; - - if(newTab != null && newTab.ParentForm != null) - { - Navigation.Append(new TabNavigation(newTab)); - newTab.ParentForm.Text = newTab.TabText + " - RDMP"; - } - - if (TabChanged != null) - TabChanged(sender, newTab); - } + TabChanged?.Invoke(sender, newTab); + } - /// - /// Records the fact that a new single object editing tab has been opened. . - /// - /// Thrown if another instance of the Control Type is already active with the same DatabaseObject - /// - public void AddWindow(RDMPSingleControlTab window) - { - if(window is PersistableSingleDatabaseObjectDockContent singleObjectUI) - if(AlreadyActive(singleObjectUI.Control.GetType(),singleObjectUI.DatabaseObject)) - throw new ArgumentOutOfRangeException($"Cannot create another window for object {singleObjectUI.DatabaseObject} of type {singleObjectUI.Control.GetType()} because there is already a window active for that object/window type"); - - _trackedWindows.Add(window); + /// + /// Records the fact that a new single object editing tab has been opened. . + /// + /// Thrown if another instance of the Control Type is already active with the same DatabaseObject + /// + public void AddWindow(RDMPSingleControlTab window) + { + if (window is PersistableSingleDatabaseObjectDockContent singleObjectUI) + if (AlreadyActive(singleObjectUI.Control.GetType(), singleObjectUI.DatabaseObject)) + throw new ArgumentOutOfRangeException( + $"Cannot create another window for object {singleObjectUI.DatabaseObject} of type {singleObjectUI.Control.GetType()} because there is already a window active for that object/window type"); - window.FormClosed += (s,e)=>Remove(window); - } + _trackedWindows.Add(window); - /// - /// Records the fact that a new impromptu/adhoc tab has been shown. These windows are not checked for duplication. - /// - /// - public void AddAdhocWindow(DockContent adhocWindow) - { - _trackedAdhocWindows.Add(adhocWindow); - adhocWindow.FormClosed += (s, e) => _trackedAdhocWindows.Remove(adhocWindow); - } + window.FormClosed += (s, e) => Remove(window); + } - private void Remove(RDMPSingleControlTab window) - { - _trackedWindows.Remove(window); - } + /// + /// Records the fact that a new impromptu/adhoc tab has been shown. These windows are not checked for duplication. + /// + /// + public void AddAdhocWindow(DockContent adhocWindow) + { + _trackedAdhocWindows.Add(adhocWindow); + adhocWindow.FormClosed += (s, e) => _trackedAdhocWindows.Remove(adhocWindow); + } - public PersistableSingleDatabaseObjectDockContent GetActiveWindowIfAnyFor(Type windowType, IMapsDirectlyToDatabaseTable databaseObject) - { - return _trackedWindows.OfType().SingleOrDefault(t => t.Control.GetType() == windowType && t.DatabaseObject.Equals(databaseObject)); - } + private void Remove(RDMPSingleControlTab window) + { + _trackedWindows.Remove(window); + } - public PersistableObjectCollectionDockContent GetActiveWindowIfAnyFor(Type windowType, IPersistableObjectCollection collection) - { - return _trackedWindows.OfType().SingleOrDefault(t => t.Control.GetType() == windowType && t.Collection.Equals(collection)); - } - /// - /// Check whether a given RDMPSingleControlTab is already showing with the given DatabaseObject (e.g. is user currently editing Catalogue bob in CatalogueUI) - /// - /// - /// A Type derrived from RDMPSingleControlTab - /// An instance of an object which matches the windowType - /// - public bool AlreadyActive(Type windowType, IMapsDirectlyToDatabaseTable databaseObject) - { - if (!typeof(IRDMPSingleDatabaseObjectControl).IsAssignableFrom(windowType)) - throw new ArgumentException("windowType must be a Type derrived from RDMPSingleControlTab"); + public PersistableSingleDatabaseObjectDockContent GetActiveWindowIfAnyFor(Type windowType, + IMapsDirectlyToDatabaseTable databaseObject) + { + return _trackedWindows.OfType().SingleOrDefault(t => + t.Control.GetType() == windowType && t.DatabaseObject.Equals(databaseObject)); + } - return _trackedWindows.OfType().Any(t => t.Control.GetType() == windowType && t.DatabaseObject.Equals(databaseObject)); - } - - /// - /// Closes all Tracked windows except the specified tab - /// - public void CloseAllButThis(DockContent content) - { - var trackedWindowsToClose = _trackedWindows.ToArray().Where(t => t != content); + public PersistableObjectCollectionDockContent GetActiveWindowIfAnyFor(Type windowType, + IPersistableObjectCollection collection) + { + return _trackedWindows.OfType() + .SingleOrDefault(t => t.Control.GetType() == windowType && t.Collection.Equals(collection)); + } - foreach (var trackedWindow in trackedWindowsToClose) - CloseWindowIfInSameScope(trackedWindow, content); + /// + /// Check whether a given RDMPSingleControlTab is already showing with the given DatabaseObject (e.g. is user currently editing Catalogue bob in CatalogueUI) + /// + /// + /// A Type derrived from RDMPSingleControlTab + /// An instance of an object which matches the windowType + /// + public bool AlreadyActive(Type windowType, IMapsDirectlyToDatabaseTable databaseObject) + { + return !typeof(IRDMPSingleDatabaseObjectControl).IsAssignableFrom(windowType) + ? throw new ArgumentException("windowType must be a Type derrived from RDMPSingleControlTab") + : _trackedWindows.OfType().Any(t => + t.Control.GetType() == windowType && t.DatabaseObject.Equals(databaseObject)); + } - foreach (var adhoc in _trackedAdhocWindows.ToArray().Where(t => t != content)) - CloseWindowIfInSameScope(adhoc, content); - } + /// + /// Closes all Tracked windows except the specified tab + /// + public void CloseAllButThis(DockContent content) + { + var trackedWindowsToClose = _trackedWindows.ToArray().Where(t => t != content); - private void CloseWindowIfInSameScope(DockContent toClose, DockContent tabInSameScopeOrNull) - { - var parent = tabInSameScopeOrNull == null ? null : tabInSameScopeOrNull.Parent; + foreach (var trackedWindow in trackedWindowsToClose) + CloseWindowIfInSameScope(trackedWindow, content); - if (toClose != null && (parent == null || toClose.Parent == parent)) - toClose.Close(); - } + foreach (var adhoc in _trackedAdhocWindows.ToArray().Where(t => t != content)) + CloseWindowIfInSameScope(adhoc, content); + } - public void CloseCurrentTab() - { - //nothing to close - if (Navigation.Current == null) - return; + private static void CloseWindowIfInSameScope(DockContent toClose, DockContent tabInSameScopeOrNull) + { + var parent = tabInSameScopeOrNull?.Parent; - Navigation.Suspend(); - try - { - Navigation.Current.Close(); + if (toClose != null && (parent == null || toClose.Parent == parent)) + toClose.Close(); + } - if (Navigation.Current != null) - Navigation.Current.Activate(ActivateItems); - } - finally - { - Navigation.Resume(); - if(_mainDockPanel.ActiveDocument is DockContent dc) - Navigation.Append(new TabNavigation(dc)); - } - } + public void CloseCurrentTab() + { + //nothing to close + if (Navigation.Current == null) + return; - /// - /// Returns all tracked tabs currently open of the Type - /// - /// - /// - public IEnumerable GetAllWindows() + Navigation.Suspend(); + try + { + Navigation.Current.Close(); + + Navigation.Current?.Activate(ActivateItems); + } + finally { - return _trackedWindows.OfType().Select(t => t.Control).OfType(); + Navigation.Resume(); + if (_mainDockPanel.ActiveDocument is DockContent dc) + Navigation.Append(new TabNavigation(dc)); } } -} + + /// + /// Returns all tracked tabs currently open of the Type + /// + /// + /// + public IEnumerable GetAllWindows() + { + return _trackedWindows.OfType().Select(t => t.Control).OfType(); + } +} \ No newline at end of file diff --git a/Application/ResearchDataManagementPlatform/publish.bat b/Application/ResearchDataManagementPlatform/publish.bat deleted file mode 100644 index d695e0f02f..0000000000 --- a/Application/ResearchDataManagementPlatform/publish.bat +++ /dev/null @@ -1,74 +0,0 @@ -@echo off - -if "%1"=="" goto usage -if "%2"=="" goto usage -if "%3"=="" goto usage -if "%4"=="" goto usage - -if "%WindowsSdkDir%" neq "" goto build - -if exist "%ProgramFiles(x86)%\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" goto initialize2k8on64Dev14 -if exist "%ProgramFiles%\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" goto initialize2k8Dev14 - -if exist "%ProgramFiles(x86)%\Microsoft Visual Studio 12.0\VC\vcvarsall.bat" goto initialize2k8on64Dev12 -if exist "%ProgramFiles%\Microsoft Visual Studio 12.0\VC\vcvarsall.bat" goto initialize2k8Dev12 - -if exist "%ProgramFiles(x86)%\Microsoft Visual Studio 10.0\VC\vcvarsall.bat" goto initialize2k8on64 -if exist "%ProgramFiles%\Microsoft Visual Studio 10.0\VC\vcvarsall.bat" goto initialize2k8 - -if exist "%ProgramFiles(x86)%\Microsoft Visual Studio 11.0\VC\vcvarsall.bat" goto initialize2k8on64Dev11 -if exist "%ProgramFiles%\Microsoft Visual Studio 11.0\VC\vcvarsall.bat" goto initialize2k8Dev11 -echo "Unable to detect suitable environment. Build may not succeed." -goto build - -:initialize2k8 -call "%ProgramFiles%\Microsoft Visual Studio 10.0\VC\vcvarsall.bat" x86 -goto build - -:initialize2k8on64 -call "%ProgramFiles(x86)%\Microsoft Visual Studio 10.0\VC\vcvarsall.bat" x86 -goto build - -:initialize2k8Dev11 -call "%ProgramFiles%\Microsoft Visual Studio 11.0\VC\vcvarsall.bat" x86 -goto build - -:initialize2k8on64Dev11 -call "%ProgramFiles(x86)%\Microsoft Visual Studio 11.0\VC\vcvarsall.bat" x86 -goto build - -:initialize2k8Dev12 -call "%ProgramFiles%\Microsoft Visual Studio 12.0\VC\vcvarsall.bat" x86 -goto build - -:initialize2k8on64Dev12 -call "%ProgramFiles(x86)%\Microsoft Visual Studio 12.0\VC\vcvarsall.bat" x86 -goto build - -:initialize2k8Dev14 -call "%ProgramFiles%\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86 -goto build - -:initialize2k8on64Dev14 -call "%ProgramFiles(x86)%\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86 -goto build - -:build -msbuild /t:Clean,Publish /p:Configuration=Release /p:ApplicationVersion=%1 /p:PublishDir=%3standalone/ /p:MinimumRequiredVersion=%2 /p:InstallUrl="%3" /p:UpdateUrl="%3" /p:InstallFrom=Disk /p:IsWebBootstrapper=false /p:UpdateEnabled=false /p:UpdateRequired=false -goto buildweb - -:buildweb -msbuild /t:Clean,Publish /p:Configuration=Release /p:ApplicationVersion=%1 /p:PublishDir=%3 /p:MinimumRequiredVersion=%2 /p:InstallUrl="%4" /p:UpdateUrl="%4" /p:InstallFrom=Web /p:IsWebBootstrapper=true /p:UpdateEnabled=true /p:UpdateRequired=true -goto end - -:usage -echo. -echo publish.bat [version] [directory] [url] -echo. -echo( [version] The version you wish to publish the application as. -echo( [minversion] The minimum version the ClickOnce deploy will accept for updating. -echo( [directory] The directory to which the ClickOnce package will be published. -echo( [url] The url from which the ClickOnce package can be installed and updated. -echo. - -:end \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index a2a3de69cc..7100a239d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ... +## [8.1.0] - 2023-09-19 + +### Changed + +- Refactor build process +- Update Scintilla +- Add LibArchive.Net 0.1.3 for archive reading support +- Batching of progress log writing to improve performance +- Add Begin/End flags for DataTable loads to improve performance of large writes +- Removable default logging server +- Increase Progress Log timeout to account for long db lock queue +- Allow users to clear all settings +- Plugin updates are now installed in the correct place +- Move Terminal.Gui to Core rather than duplicating in both CLI and GUI +- Remove Moq Library in favour of NSubstitute +- Add max message length check when logging notifications to prevent erroneous DB write attempts + ## [8.0.7] - 2022-11-22 ### Changed @@ -1451,7 +1468,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed Culture (e.g. en-us) not being passed correctly in DelimitedFlatFileAttacher - Fixed bug where Updater would show older versions of RDMP as installable 'updates' -[Unreleased]: https://github.com/HicServices/RDMP/compare/v8.0.7...develop +[Unreleased]: https://github.com/HicServices/RDMP/compare/v8.1.0...develop +[8.1.0]: https://github.com/HicServices/RDMP/compare/v8.0.7...v8.1.0 [8.0.7]: https://github.com/HicServices/RDMP/compare/v8.0.6...v8.0.7 [8.0.6]: https://github.com/HicServices/RDMP/compare/v8.0.5...v8.0.6 [8.0.5]: https://github.com/HicServices/RDMP/compare/v8.0.4...v8.0.5 diff --git a/Documentation/CodeTutorials/CreatingANewCollectionTreeNode.md b/Documentation/CodeTutorials/CreatingANewCollectionTreeNode.md index d2921cd9e5..e57f599f8c 100644 --- a/Documentation/CodeTutorials/CreatingANewCollectionTreeNode.md +++ b/Documentation/CodeTutorials/CreatingANewCollectionTreeNode.md @@ -49,7 +49,7 @@ Start out by overriding the ToString method to return the text you want to appea ```csharp namespace Rdmp.Core.Providers.Nodes { - class FrozenExtractionConfigurationsNode + private class FrozenExtractionConfigurationsNode { public override string ToString() { @@ -59,13 +59,13 @@ namespace Rdmp.Core.Providers.Nodes } ``` -NOTE: If a node can only ever appear once in any collection (e.g. it is a unique top level node) then you can instead inherit from SingletonNode and skip the rest of this section. If you are an SingletonNode then your class name should start with All e.g. AllCakesNode, AllCarsNode etc. +NOTE: If a node can only ever appear once in any collection (e.g. it is a unique top level node) then you can instead inherit from SingletonNode and skip the rest of this section. If you are a SingletonNode then your class name should start with All e.g. AllCakesNode, AllCarsNode etc. -Assuming you dont have a SingletonNode then you should add constructor arguments sufficient to uniquely identify which instance you have. In this case we will have an `FrozenExtractionConfigurationsNode` instance for every `Project` so we need a reference to which `Project` we relate to. +Assuming you don't have a SingletonNode then you should add constructor arguments sufficient to uniquely identify which instance you have. In this case we will have an `FrozenExtractionConfigurationsNode` instance for every `Project` so we need a reference to which `Project` we relate to. ```csharp -class FrozenExtractionConfigurationsNode +private class FrozenExtractionConfigurationsNode { public Project Project { get; set; } @@ -74,10 +74,7 @@ class FrozenExtractionConfigurationsNode Project = project; } - public override string ToString() - { - return "Frozen Extraction Configurations"; - } + public override string ToString() => "Frozen Extraction Configurations"; } ``` @@ -85,7 +82,7 @@ Finally we need to implement 'Equality members', this ensures that Object List V ```csharp -class FrozenExtractionConfigurationsNode +private class FrozenExtractionConfigurationsNode { public Project Project { get; set; } @@ -94,28 +91,18 @@ class FrozenExtractionConfigurationsNode Project = project; } - public override string ToString() - { - return "Frozen Extraction Configurations"; - } + public override string ToString() => "Frozen Extraction Configurations"; - protected bool Equals(FrozenExtractionConfigurationsNode other) - { - return Equals(Project, other.Project); - } + protected bool Equals(FrozenExtractionConfigurationsNode other) => Equals(Project, other.Project); public override bool Equals(object obj) { - if (ReferenceEquals(null, obj)) return false; + if (obj is null) return false; if (ReferenceEquals(this, obj)) return true; - if (obj.GetType() != this.GetType()) return false; - return Equals((FrozenExtractionConfigurationsNode) obj); + return obj.GetType() == GetType() && Equals((FrozenExtractionConfigurationsNode)obj); } - public override int GetHashCode() - { - return (Project != null ? Project.GetHashCode() : 0); - } + public override int GetHashCode() => Project?.GetHashCode() ?? 0; } ``` @@ -139,13 +126,13 @@ The full method should now look something like: ```csharp private void AddChildren(ExtractionConfigurationsNode extractionConfigurationsNode, DescendancyList descendancy) { - HashSet children = new HashSet(); + var children = new HashSet(); var frozenConfigurationsNode = new FrozenExtractionConfigurationsNode(extractionConfigurationsNode.Project); children.Add(frozenConfigurationsNode); - var configs = ExtractionConfigurations.Where(c => c.Project_ID == extractionConfigurationsNode.Project.ID).ToArray(); - foreach (ExtractionConfiguration config in configs) + var configs = ExtractionConfigurations .Where(c => c.Project_ID == extractionConfigurationsNode.Project.ID).ToArray(); + foreach (var config in configs) { AddChildren(config, descendancy.Add(config)); children.Add(config); @@ -208,7 +195,7 @@ In this method add the following: ```csharp private void AddChildren(FrozenExtractionConfigurationsNode frozenExtractionConfigurationsNode, DescendancyList descendancy) { - HashSet children = new HashSet(); + var children = new HashSet(); //todo add child objects here @@ -223,7 +210,7 @@ Just as an example add the number 87 to the children HashSet: ```csharp private void AddChildren(FrozenExtractionConfigurationsNode frozenExtractionConfigurationsNode, DescendancyList descendancy) { - HashSet children = new HashSet(); + var children = new HashSet(); children.Add(87); @@ -241,7 +228,7 @@ To complete this example we will modify the `AddChildren` method for `Extraction ```csharp private void AddChildren(ExtractionConfigurationsNode extractionConfigurationsNode, DescendancyList descendancy) { - HashSet children = new HashSet(); + var children = new HashSet(); //Create a frozen extraction configurations folder as a subfolder of each ExtractionConfigurationsNode var frozenConfigurationsNode = new FrozenExtractionConfigurationsNode(extractionConfigurationsNode.Project); @@ -250,11 +237,11 @@ private void AddChildren(ExtractionConfigurationsNode extractionConfigurationsNo children.Add(frozenConfigurationsNode); //Add children to the frozen folder - AddChildren(frozenConfigurationsNode,descendancy.Add(frozenConfigurationsNode)); + AddChildren(frozenConfigurationsNode, descendancy.Add(frozenConfigurationsNode)); //Add ExtractionConfigurations which are not released (frozen) - var configs = ExtractionConfigurations.Where(c => c.Project_ID == extractionConfigurationsNode.Project.ID).ToArray(); - foreach (ExtractionConfiguration config in configs.Where(c=>!c.IsReleased)) + var configs = ExtractionConfigurations .Where(c => c.Project_ID == extractionConfigurationsNode.Project.ID).ToArray(); + foreach (var config in configs.Where(c => !c.IsReleased)) { AddChildren(config, descendancy.Add(config)); children.Add(config); @@ -265,17 +252,17 @@ private void AddChildren(ExtractionConfigurationsNode extractionConfigurationsNo private void AddChildren(FrozenExtractionConfigurationsNode frozenExtractionConfigurationsNode, DescendancyList descendancy) { - HashSet children = new HashSet(); + var children = new HashSet(); //Add ExtractionConfigurations which are not released (frozen) - var configs = ExtractionConfigurations.Where(c => c.Project_ID == frozenExtractionConfigurationsNode.Project.ID).ToArray(); - foreach (ExtractionConfiguration config in configs.Where(c => c.IsReleased)) + var configs = ExtractionConfigurations .Where(c => c.Project_ID == frozenExtractionConfigurationsNode.Project.ID).ToArray(); + foreach (var config in configs.Where(c => c.IsReleased)) { AddChildren(config, descendancy.Add(config)); children.Add(config); } - AddToDictionaries(children,descendancy); + AddToDictionaries(children, descendancy); } ``` Now when you run RDMP, the final tree should look something like: diff --git a/Documentation/CodeTutorials/CreatingANewRightClickMenu.md b/Documentation/CodeTutorials/CreatingANewRightClickMenu.md index f7a4f1e29d..3932fb5d12 100644 --- a/Documentation/CodeTutorials/CreatingANewRightClickMenu.md +++ b/Documentation/CodeTutorials/CreatingANewRightClickMenu.md @@ -79,7 +79,7 @@ The preferred way of adding menu items is to use abstract base class `RDMPContex ```csharp -class AllServersNodeMenu : RDMPContextMenuStrip +private class AllServersNodeMenu : RDMPContextMenuStrip { public AllServersNodeMenu(RDMPContextMenuStripArgs args, AllServersNode o) : base(args, o) { diff --git a/Documentation/CodeTutorials/DoubleClickAndDragDrop.md b/Documentation/CodeTutorials/DoubleClickAndDragDrop.md index dbd7b55476..ea0e33f260 100644 --- a/Documentation/CodeTutorials/DoubleClickAndDragDrop.md +++ b/Documentation/CodeTutorials/DoubleClickAndDragDrop.md @@ -31,26 +31,20 @@ Create a new class called `ProposeExecutionWhenTargetIs` in namespace ```csharp -class ProposeExecutionWhenTargetIsPipeline:RDMPCommandExecutionProposal +private class ProposeExecutionWhenTargetIsPipeline : RDMPCommandExecutionProposal { public ProposeExecutionWhenTargetIsPipeline(IActivateItems itemActivator) : base(itemActivator) { } - public override bool CanActivate(Pipeline target) - { - return true; - } + public override bool CanActivate(Pipeline target) => true; public override void Activate(Pipeline target) { MessageBox.Show("Double clicked"); } - public override ICommandExecution ProposeExecution(ICombineToMakeCommand cmd, Pipeline target, InsertOption insertOption = InsertOption.Default) - { - return null; - } + public override ICommandExecution ProposeExecution(ICombineToMakeCommand cmd, Pipeline target, InsertOption insertOption = InsertOption.Default) => null; } ``` @@ -90,7 +84,7 @@ To add support for item dropping you should add an implementation to the body of | ------------- | ------------- | | ICombineToMakeCommand cmd| Self contained class describing both the object being dragged and salient facts about it e.g. if it is a `CatalogueCombineable` then it will know whether the dragged [Catalogue] has at least one patient identifier column.| | T target | The object the cursor is currently hovering over | -| InsertOption insertOption | Whether the cursor is above or below or ontop of your object (if the collection the object is in supports it) | +| InsertOption insertOption | Whether the cursor is above or below or on top of your object (if the collection the object is in supports it) | The reason we have an `ICombineToMakeCommand` is so we can front load discovery and encapsulate facts into a single class which can then be waved around the place to look for valid combinations. If an object doesn't have an associated `ICommand` then it won't be draggable in the first place. @@ -98,15 +92,7 @@ To add support for dropping an object with an existing `ICombineToMakeCommand` y ```csharp -public override ICommandExecution ProposeExecution(ICombineToMakeCommand cmd, Pipeline target, InsertOption insertOption = InsertOption.Default) -{ - var sourceCatalogueCombineable = cmd as CatalogueCombineable; - - if(sourceCatalogueCombineable != null) - return new ExecuteCommandDelete(ItemActivator,sourceCatalogueCombineable.Catalogue); - - return null; -} +public override ICommandExecution ProposeExecution(ICombineToMakeCommand cmd, Pipeline target, InsertOption insertOption = InsertOption.Default) => cmd is CatalogueCombineable sourceCatalogueCombineable ? new ExecuteCommandDelete(ItemActivator, sourceCatalogueCombineable.Catalogue) : (ICommandExecution)null; ``` While not terribly useful, you can now drop a [Catalogue] on `Pipeline` to delete the [Catalogue] @@ -149,10 +135,7 @@ public class PipelineCombineable : ICombineToMakeCommand IsEmpty = Pipeline.PipelineComponents.Count == 0; } - public string GetSqlString() - { - return ""; - } + public string GetSqlString() => ""; } ``` diff --git a/Documentation/CodeTutorials/Packages.md b/Documentation/CodeTutorials/Packages.md index f1e13bcdc7..4bad44d302 100644 --- a/Documentation/CodeTutorials/Packages.md +++ b/Documentation/CodeTutorials/Packages.md @@ -7,51 +7,51 @@ 2. This package is widely used and is actively maintained. 3. It is open source. -| Package | Source Code | Version | License | Purpose | Additional Risk Assessment | -| ------- | ------------| --------| ------- | ------- | -------------------------- | -| coverlet.collector | [GitHub](https://github.com/coverlet-coverage/coverlet) | [3.0.3](https://www.nuget.org/packages/coverlet.collector/3.0.3) | [MIT](https://opensource.org/licenses/MIT) | Collects code coverage information | | -| [DockPanelSuite](http://dockpanelsuite.com/) | [GitHub](https://github.com/dockpanelsuite/dockpanelsuite) | [3.1.0-beta2](https://www.nuget.org/packages/DockPanelSuite/3.1.0-beta2) | [MIT](https://opensource.org/licenses/MIT) | Provides Window layout and docking for RDMP. | There are no powershell initialization files in the package which can be run by the NuGet installer.| -| [ObjectListView.Official](http://objectlistview.sourceforge.net/cs/index.html) | [Svn](http://objectlistview.sourceforge.net/cs/download.html#bleeding-edge-source) | [2.9.1](https://www.nuget.org/packages/ObjectListView.Official/2.9.1) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html) | Provides tree layout for user interfaces in main client application | -| fernandreu.ScintillaNET | [GitHub](https://github.com/fernandreu/ScintillaNET) | [4.2.0](https://www.nuget.org/packages/fernandreu.ScintillaNET/4.2.0) | [MIT](https://opensource.org/licenses/MIT) | Provides text editor component with highlighting etc | -| NHunspell | [SourceForge](https://sourceforge.net/p/nhunspell/code/ci/default/tree/) | [1.2.5554.16953](https://www.nuget.org/packages/NHunspell/1.2.5554.16953) | LGPL / MPL | Adds spell check support to ScintillaNET text editor | -| [FAM FAM FAM Icons](http://www.famfamfam.com/lab/icons/silk/) | N\A | N\A | [CC 2.5](https://creativecommons.org/licenses/by/2.5/) | Icons for user interfaces | -| Xam.Plugins.Settings | [GitHub](https://github.com/jamesmontemagno/SettingsPlugin)| [3.1.1](https://www.nuget.org/packages/Xam.Plugins.Settings/3.1.1) | [MIT](https://opensource.org/licenses/MIT)| Read/Write user settings for main client application | -| CommandLineParser | [GitHub](https://github.com/commandlineparser/commandline) | [2.9.1](https://www.nuget.org/packages/CommandLineParser/2.9.1) | [MIT](https://opensource.org/licenses/MIT) | Allows command line arguments for main client application and CLI executables | -| CsvHelper | [GitHub](https://github.com/JoshClose/CsvHelper) | [30.0.1](https://www.nuget.org/packages/CsvHelper/30.0.1) | MS-PL / Apache 2.0 | Enables reading/writing CSV files | -| NPOI | [GitHub](https://github.com/tonyqus/npoi) | [2.5.5](https://www.nuget.org/packages/NPOI/2.5.5) | Apache 2.0 | Enables reading/writing Microsoft Excel files | -| ExcelNumberFormat | [GitHub](https://github.com/andersnm/ExcelNumberFormat) | [1.1.0](https://www.nuget.org/packages/ExcelNumberFormat/1.1.0) |[MIT](https://opensource.org/licenses/MIT) | Handles translating number formats from Excel formats into usable values | | -| [NLog](https://nlog-project.org/) | [GitHub](https://github.com/NLog/NLog) | [5.0.5](https://www.nuget.org/packages/NLog/5.0.5) | [BSD 3-Clause](https://github.com/NLog/NLog/blob/dev/LICENSE.txt) | Flexible user configurable logging | | -| HIC.FAnsiSql |[GitHub](https://github.com/HicServices/FAnsiSql) | [3.0.1](https://www.nuget.org/packages/HIC.FansiSql/3.0.1) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html) | [DBMS] abstraction layer | -| HIC.BadMedicine | [GitHub](https://github.com/HicServices/BadMedicine) | [1.1.2](https://www.nuget.org/packages/HIC.BadMedicine/1.1.2) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html) | Generate Test Datasets for tests/exericses | -| SSH.NET | [GitHub](https://github.com/sshnet/SSH.NET) | [2020.0.2](https://www.nuget.org/packages/SSH.NET/2020.0.2) | [MIT](https://github.com/sshnet/SSH.NET/blob/develop/LICENSE) | Enables fetching files from SFTP servers | -| Moq 4 | [GitHub](https://github.com/moq/moq4) | [4.18.2](https://www.nuget.org/packages/Moq/4.18.2) |[BSD 3](https://github.com/moq/moq4/blob/master/License.txt) | Mock objects during unit testing | -| [Nunit](https://nunit.org/) |[GitHub](https://github.com/nunit/nunit) | [3.13.3](https://www.nuget.org/packages/NUnit/3.13.3) | [MIT](https://opensource.org/licenses/MIT) | Unit testing | -| Microsoft.NET.Test.Sdk | [GitHub](https://github.com/microsoft/vstest/) | [17.4.0](https://www.nuget.org/packages/Microsoft.NET.Test.Sdk/17.4.0) | [MIT](https://opensource.org/licenses/MIT) | Required for running tests| | -| NUnit3TestAdapter | [GitHub](https://github.com/nunit/nunit3-vs-adapter)| [3.13.3](https://www.nuget.org/packages/NUnit3TestAdapter/3.13.3) | [MIT](https://opensource.org/licenses/MIT) | Run unit tests from within Visual Studio | -| [Newtonsoft.Json](https://www.newtonsoft.com/json) | [GitHub](https://github.com/JamesNK/Newtonsoft.Json) | [13.0.1](https://www.nuget.org/packages/Newtonsoft.Json/13.0.1) | [MIT](https://opensource.org/licenses/MIT) | Serialization of objects for sharing/transmission | -| YamlDotNet | [GitHub](https://github.com/aaubry/YamlDotNet) | [12.0.2](https://www.nuget.org/packages/YamlDotNet/12.0.2) | [MIT](https://opensource.org/licenses/MIT) |Loading configuration files| -| [SecurityCodeScan.VS2019](https://security-code-scan.github.io/) | [GitHub](https://github.com/security-code-scan/security-code-scan) | [5.6.7](https://www.nuget.org/packages/SecurityCodeScan.VS2019/5.6.7) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html)| Performs static build time analysis for vulnerabilities in the codebase (e.g. Sql injection)| | -| SixLabors.ImageSharp | [GitHub](https://github.com/SixLabors/ImageSharp) | [2.1.3](https://www.nuget.org/packages/SixLabors.ImageSharp/2.1.3) | [Apache 2.0](https://github.com/SixLabors/ImageSharp/blob/main/LICENSE) | Platform-independent replacement for legacy Windows-only System.Drawing.Common | | -| SixLabors.ImageSharp.Drawing | [GitHub](https://github.com/SixLabors/ImageSharp.Drawing) | [1.0.0-beta15](https://www.nuget.org/packages/SixLabors.ImageSharp.Drawing/1.0.0-beta15) | [Apache 2.0](https://github.com/SixLabors/ImageSharp/blob/main/LICENSE) | Font handling for ImageSharp | | -| System.Runtime.Loader | [GitHub](https://github.com/dotnet/corefx) | [4.3.0](https://www.nuget.org/packages/System.Runtime.Loader/4.3.0) |[MIT](https://opensource.org/licenses/MIT) | Allows loading assemblies in dot net core| | -| System.Diagnostics.Debug | [GitHub](https://github.com/dotnet/corefx) | [4.3.0](https://www.nuget.org/packages/System.Diagnostics.Debug/4.3.0) |[MIT](https://opensource.org/licenses/MIT) | Interact with Processes / Debug / Console | | -| System.IO.FileSystem.Primitives | [GitHub](https://github.com/dotnet/corefx) | [4.3.0](https://www.nuget.org/packages/System.IO.FileSystem.Primitives/4.3.0) |[MIT](https://opensource.org/licenses/MIT) | Provides common enumerations and exceptions for path-based I/O libraries | | -| System.IO.FileSystem | [GitHub](https://github.com/dotnet/corefx) | [4.3.0](https://www.nuget.org/packages/System.IO.FileSystem/4.3.0) |[MIT](https://opensource.org/licenses/MIT) | Provides types that allow reading and writing to files | | -| System.Runtime.Extensions | [GitHub](https://github.com/dotnet/corefx) | [4.3.1](https://www.nuget.org/packages/System.Runtime.Extensions/4.3.1) |[MIT](https://opensource.org/licenses/MIT) | Provides commonly-used classes for performing mathematical functions, conversions, string comparisons etc | | -| System.Threading | [GitHub](https://github.com/dotnet/corefx) | [4.3.0](https://www.nuget.org/packages/System.Threading/4.3.0) |[MIT](https://opensource.org/licenses/MIT) | Provides the fundamental synchronization primitives | | -| System.Threading.AccessControl | [GitHub](https://github.com/dotnet/runtime) | [7.0.0](https://www.nuget.org/packages/System.Threading.AccessControl/7.0.0) |[MIT](https://opensource.org/licenses/MIT) | Required by Scintilla for sync primitives | | -| System.Threading.ThreadPool | [GitHub](https://github.com/dotnet/corefx) | [4.3.0](https://www.nuget.org/packages/System.Threading.ThreadPool/4.3.0) |[MIT](https://opensource.org/licenses/MIT) | Required to compile native linux binaries | | -| System.Globalization | [GitHub](https://github.com/dotnet/corefx) | [4.3.0](https://www.nuget.org/packages/System.Globalization/4.3.0) |[MIT](https://opensource.org/licenses/MIT) | Provides classes that define culture-related information | | -| System.Net.NameResolution | [GitHub](https://github.com/dotnet/corefx) | [4.3.0](https://www.nuget.org/packages/System.Net.NameResolution/4.3.0) |[MIT](https://opensource.org/licenses/MIT) | Provides the System.Net.Dns class, which enables developers to perform simple domain name resolution | | -| System.Net.Primitives | [GitHub](https://github.com/dotnet/corefx) | [4.3.1](https://www.nuget.org/packages/System.Net.Primitives/4.3.1) |[MIT](https://opensource.org/licenses/MIT) | Provides common types for network-based libraries | | -| System.Security.Permissions |[GitHub](https://github.com/dotnet/corefx) | [7.0.0](https://www.nuget.org/packages/System.Security.Permissions/7.0.0) |[MIT](https://opensource.org/licenses/MIT) | Provides common types for Xml doc reading in UI code | | -| [AutoComplete Console](https://www.codeproject.com/Articles/1182358/Using-Autocomplete-in-Windows-Console-Applications) by Jasper Lammers | Embedded | 4.0 | [CPOL](https://www.codeproject.com/info/cpol10.aspx) | Provides interactive autocomplete in console input | | -| System.Resources.Extensions | [GitHub](https://github.com/dotnet/corefx) | [4.6.0](https://www.nuget.org/packages/System.Resources.Extensions/4.6.0) | [MIT](https://opensource.org/licenses/MIT) | Allows [publishing with dotnet publish on machines with netcoreapp3.0 SDK installed](https://github.com/microsoft/msbuild/issues/4704#issuecomment-530034240) | | -| Spectre.Console | [GitHub](https://github.com/spectreconsole/spectre.console) | [0.45.0](https://www.nuget.org/packages/Spectre.Console/0.45.0) | [MIT](https://opensource.org/licenses/MIT) | Allows richer command line interactions| | -| HIC.System.Windows.Forms.DataVisualization | [GitHub](https://github.com/HicServices/winforms-datavisualization) | [1.0.1](https://www.nuget.org/packages/HIC.System.Windows.Forms.DataVisualization/1.0.1) |[MIT](https://opensource.org/licenses/MIT) | Dotnet core support for DQE charts | | -| System.DirectoryServices.Protocols | [GitHub](https://github.com/dotnet/runtime) | [7.0.0](https://www.nuget.org/packages/System.DirectoryServices.Protocols/7.0.0) | MIT | Required dependency of Oracle when using LDAP auth | -| Autoupdater.NET | [GitHub](https://github.com/ravibpatel/AutoUpdater.NET) | [1.7.0](https://github.com/ravibpatel/AutoUpdater.NET) | MIT | Manages updating of the RDMP windows client directly from the RDMP GitHub Releases| -| ConsoleControl | [GitHub](https://github.com/dwmkerr/consolecontrol) | [1.3.0](https://www.nuget.org/packages/ConsoleControl/) | MIT | Runs RDMP cli subprocesses| - +| Package | Source Code | License | Purpose | Additional Risk Assessment | +| ------- | ------------| ------- | ------- | -------------------------- | +| Equ | [GitHub](https://github.com/thedmi/Equ) | [MIT](https://opensource.org/licenses/MIT) | Simplifies object equality implementation | | +| MongoDB.Driver | [GitHub](https://github.com/mongodb/mongo-csharp-driver) | [Apache 2.0](https://opensource.org/licenses/Apache-2.0) | Database driver for MongoDB | | +| Microsoft.SourceLink.GitHub | [GitHub](https://github.com/dotnet/sourcelink) | [MIT](https://opensource.org/licenses/MIT) | Enable source linkage from nupkg | Official MS project | +| Microsoft.XmlSerializer.Generator | [Microsoft](https://learn.microsoft.com/en-us/dotnet/core/additional-tools/xml-serializer-generator) | [MIT](https://opensource.org/licenses/MIT) | XML handling improvements | +| ObjectListView.Repack.NET6Plus | [GitHub](https://github.com/nasisakk/ObjectListViewRepack) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html) | | +| Scintilla.NET | [GitHub](https://github.com/VPKSoft/Scintilla.NET) | [MIT](https://opensource.org/licenses/MIT) | | +| VPKSoft.ScintillaLexers.NET | [GitHub](https://github.com/VPKSoft/ScintillaLexers) | [MIT](https://opensource.org/licenses/MIT) | | +| WeCantSpell.Hunspell | [GitHub](https://github.com/aarondandy/WeCantSpell.Hunspell/) | [GPL-2 and others](https://github.com/aarondandy/WeCantSpell.Hunspell/blob/main/license.txt) | | +| [DockPanelSuite.ThemeVS2015](http://dockpanelsuite.com/) | [GitHub](https://github.com/dockpanelsuite/dockpanelsuite) | [MIT](https://opensource.org/licenses/MIT) | Provides Window layout and docking for RDMP. | There are no powershell initialization files in the package which can be run by the NuGet installer.| +| [FAM FAM FAM Icons](https://web.archive.org/web/20070824000227/http://www.famfamfam.com/lab/icons/silk/) | N\A | [CC 2.5](https://creativecommons.org/licenses/by/2.5/) | Icons for user interfaces | +| CommandLineParser | [GitHub](https://github.com/commandlineparser/commandline) | [MIT](https://opensource.org/licenses/MIT) | Allows command line arguments for main client application and CLI executables | +| CsvHelper | [GitHub](https://github.com/JoshClose/CsvHelper) | MS-PL / Apache 2.0 | Enables reading/writing CSV files | +| NPOI | [GitHub](https://github.com/tonyqus/npoi) | Apache 2.0 | Enables reading/writing Microsoft Excel files | +| ExcelNumberFormat | [GitHub](https://github.com/andersnm/ExcelNumberFormat) |[MIT](https://opensource.org/licenses/MIT) | Handles translating number formats from Excel formats into usable values | | +| LibArchive.Net | [GitHub](https://github.com/jas88/libarchive.net) | [BSD](https://opensource.org/license/bsd-2-clause/) | Access archive formats without the LZMA bugs of SharpCompress | | +| [NLog](https://nlog-project.org/) | [GitHub](https://github.com/NLog/NLog) | [BSD 3-Clause](https://github.com/NLog/NLog/blob/dev/LICENSE.txt) | Flexible user configurable logging | | +| HIC.FAnsiSql |[GitHub](https://github.com/HicServices/FAnsiSql) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html) | [DBMS] abstraction layer | +| HIC.BadMedicine | [GitHub](https://github.com/HicServices/BadMedicine) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html) | Generate Test Datasets for tests/exericses | +| SSH.NET | [GitHub](https://github.com/sshnet/SSH.NET) | [MIT](https://github.com/sshnet/SSH.NET/blob/develop/LICENSE) | Enables fetching files from SFTP servers | +| Moq 4 | [GitHub](https://github.com/moq/moq4) |[BSD 3](https://github.com/moq/moq4/blob/master/License.txt) | Mock objects during unit testing | +| [Newtonsoft.Json](https://www.newtonsoft.com/json) | [GitHub](https://github.com/JamesNK/Newtonsoft.Json) | [MIT](https://opensource.org/licenses/MIT) | Serialization of objects for sharing/transmission | +| YamlDotNet | [GitHub](https://github.com/aaubry/YamlDotNet) | [MIT](https://opensource.org/licenses/MIT) |Loading configuration files| +| [SecurityCodeScan.VS2019](https://security-code-scan.github.io/) | [GitHub](https://github.com/security-code-scan/security-code-scan) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html)| Performs static build time analysis for vulnerabilities in the codebase (e.g. Sql injection)| | +| SixLabors.ImageSharp | [GitHub](https://github.com/SixLabors/ImageSharp) | [Apache 2.0](https://github.com/SixLabors/ImageSharp/blob/main/LICENSE) | Platform-independent replacement for legacy Windows-only System.Drawing.Common | | +| SixLabors.ImageSharp.Drawing | [GitHub](https://github.com/SixLabors/ImageSharp.Drawing) | [Apache 2.0](https://github.com/SixLabors/ImageSharp/blob/main/LICENSE) | Font handling for ImageSharp | | +| System.Runtime.Loader | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Allows loading assemblies in dot net core| | +| System.Diagnostics.Debug | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Interact with Processes / Debug / Console | | +| System.IO.FileSystem.Primitives | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Provides common enumerations and exceptions for path-based I/O libraries | | +| System.IO.FileSystem | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Provides types that allow reading and writing to files | | +| System.Runtime.Extensions | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Provides commonly-used classes for performing mathematical functions, conversions, string comparisons etc | | +| System.Threading | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Provides the fundamental synchronization primitives | | +| System.Threading.AccessControl | [GitHub](https://github.com/dotnet/runtime) |[MIT](https://opensource.org/licenses/MIT) | Required by Scintilla for sync primitives | | +| System.Threading.ThreadPool | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Required to compile native linux binaries | | +| System.Globalization | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Provides classes that define culture-related information | | +| System.Net.NameResolution | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Provides the System.Net.Dns class, which enables developers to perform simple domain name resolution | | +| System.Net.Primitives | [GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Provides common types for network-based libraries | | +| System.Security.Permissions |[GitHub](https://github.com/dotnet/corefx) |[MIT](https://opensource.org/licenses/MIT) | Provides common types for Xml doc reading in UI code | | +| [AutoComplete Console](https://www.codeproject.com/Articles/1182358/Using-Autocomplete-in-Windows-Console-Applications) by Jasper Lammers | Embedded | [CPOL](https://www.codeproject.com/info/cpol10.aspx) | Provides interactive autocomplete in console input | | +| System.Resources.Extensions | [GitHub](https://github.com/dotnet/corefx) | [MIT](https://opensource.org/licenses/MIT) | Allows [publishing with dotnet publish on machines with netcoreapp3.0 SDK installed](https://github.com/microsoft/msbuild/issues/4704#issuecomment-530034240) | | +| Spectre.Console | [GitHub](https://github.com/spectreconsole/spectre.console) | [MIT](https://opensource.org/licenses/MIT) | Allows richer command line interactions| | +| HIC.System.Windows.Forms.DataVisualization | [GitHub](https://github.com/HicServices/winforms-datavisualization) |[MIT](https://opensource.org/licenses/MIT) | Dotnet core support for DQE charts | | +| Autoupdater.NET.Official | [GitHub](https://github.com/ravibpatel/AutoUpdater.NET) | MIT | Manages updating of the RDMP windows client directly from the RDMP GitHub Releases| +| ConsoleControl | [GitHub](https://github.com/dwmkerr/consolecontrol) | MIT | Runs RDMP cli subprocesses| +| Terminal.Gui | [GitHub](https://github.com/gui-cs/Terminal.Gui) | [MIT](https://opensource.org/licenses/MIT) | Console user-interface| [DBMS]: ./Glossary.md#DBMS diff --git a/Documentation/CodeTutorials/PluginWriting.md b/Documentation/CodeTutorials/PluginWriting.md index 22d6fa1476..de284ee2ab 100644 --- a/Documentation/CodeTutorials/PluginWriting.md +++ b/Documentation/CodeTutorials/PluginWriting.md @@ -9,7 +9,7 @@ 5. [Tests](#tests) * [Unit Tests](#unitTests) * [Setting up Database Tests](#databaseTestsSetup) - * [Writting a Database Test](#databaseTestsWritting) + * [Writing a Database Test](#databaseTestsWriting) 6. [Checks](#checks) * [Version 4](#anoPluginVersion4) 7. [Progress Logging](#progress) @@ -602,8 +602,8 @@ Create these databases you can use the main RDMP UI: Clean and Rebuild your project and run the unit test again. It should pass this time. - -## Writting a Database Test + +## Writing a Database Test Add a new test ```csharp @@ -809,7 +809,7 @@ Go to your unit tests and write a test for it passing it a `ThrowImmediatelyChec public void TestBasicDataTableAnonymiser4_FailConditions() { var a = new BasicDataTableAnonymiser4(); - a.Check(new ThrowImmediatelyCheckNotifier()); + a.Check(ThrowImmediatelyCheckNotifier.Quiet()); } ``` @@ -855,7 +855,7 @@ namespace MyPipelinePlugin public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener,GracefulCancellationToken cancellationToken) { - GetCommonNamesTable(new ThrowImmediatelyCheckNotifier()); + GetCommonNamesTable(ThrowImmediatelyCheckNotifier.Quiet()); //Go through each row in the table foreach (DataRow row in toProcess.Rows) @@ -948,7 +948,7 @@ Now we can run our test and see an error that makes sense public void TestBasicDataTableAnonymiser4_FailConditions() { var a = new BasicDataTableAnonymiser4(); - var ex = Assert.Throws(()=>a.Check(new ThrowImmediatelyCheckNotifier())); + var ex = Assert.Throws(()=>a.Check(ThrowImmediatelyCheckNotifier.Quiet())); Assert.IsTrue(ex.Message.Contains("No NamesTable has been set")); } ``` @@ -1042,7 +1042,7 @@ This will let us record how long is specifically spent on the anonymisation of t ```csharp public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { - GetCommonNamesTable(new ThrowImmediatelyCheckNotifier()); + GetCommonNamesTable(ThrowImmediatelyCheckNotifier.Quiet()); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "Ready to process batch with row count " + toProcess.Rows.Count)); diff --git a/HIC.DataManagementPlatform.sln b/HIC.DataManagementPlatform.sln index 635bd477ed..9601ae9ef9 100644 --- a/HIC.DataManagementPlatform.sln +++ b/HIC.DataManagementPlatform.sln @@ -1,10 +1,7 @@ - Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 17 VisualStudioVersion = 17.3.32825.248 MinimumVisualStudioVersion = 10.0.40219.1 -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ReusableLibraryCode", "Reusable\ReusableLibraryCode\ReusableLibraryCode.csproj", "{5AD03B2F-232A-4E0B-86FF-DC7F49146962}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = ".nuget", ".nuget", "{E42F0D5F-79D5-4322-BE3F-F52DCAAED2C3}" ProjectSection(SolutionItems) = preProject .nuget\packages.config = .nuget\packages.config @@ -12,19 +9,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = ".nuget", ".nuget", "{E42F0D EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tests.Common", "Tests.Common\Tests.Common.csproj", "{CA13A431-7FB6-4BCE-8521-42D130934311}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "MapsDirectlyToDatabaseTable", "Reusable\MapsDirectlyToDatabaseTable\MapsDirectlyToDatabaseTable.csproj", "{7F56D629-DC24-43A5-A338-073687433E55}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Reusable", "Reusable", "{9336B43C-AC6E-4ADF-AB8D-F78B0AA92B97}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{C4EEDBD1-ACCE-47AF-83FA-99C5E5C33D0A}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ReusableCodeTests", "Reusable\Tests\ReusableCodeTests\ReusableCodeTests.csproj", "{FEE67387-9078-4691-8856-F12C597B0BBE}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{AEDF99FB-885A-4243-8DFE-6A4E3F40D50D}" ProjectSection(SolutionItems) = preProject .github\workflows\build.yml = .github\workflows\build.yml CHANGELOG.md = CHANGELOG.md deadlinksconfig.json = deadlinksconfig.json + directory.build.props = directory.build.props .github\workflows\links.yml = .github\workflows\links.yml NoteForNewDevelopers.md = NoteForNewDevelopers.md Documentation\CodeTutorials\Packages.md = Documentation\CodeTutorials\Packages.md @@ -49,14 +39,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Rdmp.UI", "Rdmp.UI\Rdmp.UI. EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Rdmp.UI.Tests", "Rdmp.UI.Tests\Rdmp.UI.Tests.csproj", "{8048FC7B-7F1F-4F7D-A47B-48342B8FF018}" EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Plugins", "Plugins", "{FDA39DF9-32DD-4180-AEDA-28AFF7A863AA}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Plugin", "Plugins\Plugin\Plugin.csproj", "{D09313E9-0368-47BF-9900-D8296C2581BC}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Plugin.UI", "Plugins\Plugin.UI\Plugin.UI.csproj", "{CE554269-9AFC-45FF-9A5B-2289938250CE}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Plugin.Test", "Plugins\Plugin.Test\Plugin.Test.csproj", "{3647CB7F-FA3A-487E-B766-6BD0B8B37579}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Documentation", "Documentation", "{BEEC317E-0984-4EA6-AF33-CBF37096E01B}" ProjectSection(SolutionItems) = preProject Documentation\CodeTutorials\ChangeTracking.md = Documentation\CodeTutorials\ChangeTracking.md @@ -112,33 +94,6 @@ Global Test|x86 = Test|x86 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Debug|x64.ActiveCfg = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Debug|x64.Build.0 = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Debug|x86.ActiveCfg = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Development|Any CPU.ActiveCfg = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Development|Any CPU.Build.0 = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Development|Mixed Platforms.ActiveCfg = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Development|Mixed Platforms.Build.0 = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Development|x64.ActiveCfg = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Development|x86.ActiveCfg = Debug|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Release|Any CPU.Build.0 = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Release|x64.ActiveCfg = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Release|x86.ActiveCfg = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Test|Any CPU.ActiveCfg = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Test|Any CPU.Build.0 = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Test|Mixed Platforms.ActiveCfg = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Test|Mixed Platforms.Build.0 = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Test|x64.ActiveCfg = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Test|x64.Build.0 = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Test|x86.ActiveCfg = Release|Any CPU - {5AD03B2F-232A-4E0B-86FF-DC7F49146962}.Test|x86.Build.0 = Release|Any CPU {CA13A431-7FB6-4BCE-8521-42D130934311}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {CA13A431-7FB6-4BCE-8521-42D130934311}.Debug|Any CPU.Build.0 = Debug|Any CPU {CA13A431-7FB6-4BCE-8521-42D130934311}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU @@ -164,56 +119,6 @@ Global {CA13A431-7FB6-4BCE-8521-42D130934311}.Test|Mixed Platforms.Build.0 = Release|Any CPU {CA13A431-7FB6-4BCE-8521-42D130934311}.Test|x64.ActiveCfg = Release|Any CPU {CA13A431-7FB6-4BCE-8521-42D130934311}.Test|x86.ActiveCfg = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Debug|x64.ActiveCfg = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Debug|x64.Build.0 = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Debug|x86.ActiveCfg = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Development|Any CPU.ActiveCfg = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Development|Any CPU.Build.0 = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Development|Mixed Platforms.ActiveCfg = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Development|Mixed Platforms.Build.0 = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Development|x64.ActiveCfg = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Development|x86.ActiveCfg = Debug|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Release|Any CPU.Build.0 = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Release|x64.ActiveCfg = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Release|x86.ActiveCfg = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Test|Any CPU.ActiveCfg = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Test|Any CPU.Build.0 = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Test|Mixed Platforms.ActiveCfg = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Test|Mixed Platforms.Build.0 = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Test|x64.ActiveCfg = Release|Any CPU - {7F56D629-DC24-43A5-A338-073687433E55}.Test|x86.ActiveCfg = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Debug|x64.ActiveCfg = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Debug|x64.Build.0 = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Debug|x86.ActiveCfg = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Development|Any CPU.ActiveCfg = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Development|Any CPU.Build.0 = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Development|Mixed Platforms.ActiveCfg = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Development|Mixed Platforms.Build.0 = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Development|x64.ActiveCfg = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Development|x86.ActiveCfg = Debug|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Release|Any CPU.Build.0 = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Release|x64.ActiveCfg = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Release|x86.ActiveCfg = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Test|Any CPU.ActiveCfg = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Test|Any CPU.Build.0 = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Test|Mixed Platforms.ActiveCfg = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Test|Mixed Platforms.Build.0 = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Test|x64.ActiveCfg = Release|Any CPU - {FEE67387-9078-4691-8856-F12C597B0BBE}.Test|x86.ActiveCfg = Release|Any CPU {550988FD-F1FA-41D8-BE0F-00B4DE47D320}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {550988FD-F1FA-41D8-BE0F-00B4DE47D320}.Debug|Any CPU.Build.0 = Debug|Any CPU {550988FD-F1FA-41D8-BE0F-00B4DE47D320}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU @@ -399,116 +304,13 @@ Global {8048FC7B-7F1F-4F7D-A47B-48342B8FF018}.Test|x64.Build.0 = Debug|Any CPU {8048FC7B-7F1F-4F7D-A47B-48342B8FF018}.Test|x86.ActiveCfg = Debug|Any CPU {8048FC7B-7F1F-4F7D-A47B-48342B8FF018}.Test|x86.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Debug|x64.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Debug|x64.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Debug|x86.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Debug|x86.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Development|Any CPU.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Development|Any CPU.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Development|Mixed Platforms.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Development|Mixed Platforms.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Development|x64.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Development|x64.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Development|x86.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Development|x86.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Release|Any CPU.Build.0 = Release|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Release|x64.ActiveCfg = Release|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Release|x64.Build.0 = Release|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Release|x86.ActiveCfg = Release|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Release|x86.Build.0 = Release|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Test|Any CPU.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Test|Any CPU.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Test|Mixed Platforms.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Test|Mixed Platforms.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Test|x64.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Test|x64.Build.0 = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Test|x86.ActiveCfg = Debug|Any CPU - {D09313E9-0368-47BF-9900-D8296C2581BC}.Test|x86.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Debug|x64.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Debug|x64.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Debug|x86.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Debug|x86.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Development|Any CPU.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Development|Any CPU.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Development|Mixed Platforms.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Development|Mixed Platforms.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Development|x64.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Development|x64.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Development|x86.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Development|x86.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Release|Any CPU.Build.0 = Release|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Release|x64.ActiveCfg = Release|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Release|x64.Build.0 = Release|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Release|x86.ActiveCfg = Release|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Release|x86.Build.0 = Release|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Test|Any CPU.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Test|Any CPU.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Test|Mixed Platforms.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Test|Mixed Platforms.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Test|x64.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Test|x64.Build.0 = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Test|x86.ActiveCfg = Debug|Any CPU - {CE554269-9AFC-45FF-9A5B-2289938250CE}.Test|x86.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Debug|x64.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Debug|x64.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Debug|x86.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Debug|x86.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Development|Any CPU.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Development|Any CPU.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Development|Mixed Platforms.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Development|Mixed Platforms.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Development|x64.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Development|x64.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Development|x86.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Development|x86.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Release|Any CPU.Build.0 = Release|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Release|Mixed Platforms.Build.0 = Release|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Release|x64.ActiveCfg = Release|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Release|x64.Build.0 = Release|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Release|x86.ActiveCfg = Release|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Release|x86.Build.0 = Release|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Test|Any CPU.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Test|Any CPU.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Test|Mixed Platforms.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Test|Mixed Platforms.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Test|x64.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Test|x64.Build.0 = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Test|x86.ActiveCfg = Debug|Any CPU - {3647CB7F-FA3A-487E-B766-6BD0B8B37579}.Test|x86.Build.0 = Debug|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection GlobalSection(NestedProjects) = preSolution - {5AD03B2F-232A-4E0B-86FF-DC7F49146962} = {9336B43C-AC6E-4ADF-AB8D-F78B0AA92B97} - {7F56D629-DC24-43A5-A338-073687433E55} = {9336B43C-AC6E-4ADF-AB8D-F78B0AA92B97} - {C4EEDBD1-ACCE-47AF-83FA-99C5E5C33D0A} = {9336B43C-AC6E-4ADF-AB8D-F78B0AA92B97} - {FEE67387-9078-4691-8856-F12C597B0BBE} = {C4EEDBD1-ACCE-47AF-83FA-99C5E5C33D0A} {550988FD-F1FA-41D8-BE0F-00B4DE47D320} = {28A15609-73F6-4840-94AF-9BD7E1B06B22} {A6107DDC-8268-4902-A994-233B00480113} = {E277AD74-0AB8-4A8D-BA8A-4BE5408F12D2} - {D09313E9-0368-47BF-9900-D8296C2581BC} = {FDA39DF9-32DD-4180-AEDA-28AFF7A863AA} - {CE554269-9AFC-45FF-9A5B-2289938250CE} = {FDA39DF9-32DD-4180-AEDA-28AFF7A863AA} - {3647CB7F-FA3A-487E-B766-6BD0B8B37579} = {FDA39DF9-32DD-4180-AEDA-28AFF7A863AA} {BEEC317E-0984-4EA6-AF33-CBF37096E01B} = {AEDF99FB-885A-4243-8DFE-6A4E3F40D50D} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution diff --git a/HIC.DataManagementPlatform.sln.DotSettings b/HIC.DataManagementPlatform.sln.DotSettings index 038f11de76..2eba2c8d40 100644 --- a/HIC.DataManagementPlatform.sln.DotSettings +++ b/HIC.DataManagementPlatform.sln.DotSettings @@ -10,7 +10,22 @@ MEF RDMP SQL - UI + UI + True + True + True + True + True + True + True + True + True + True + True + True + True + True + True True True diff --git a/LIBRARYLICENSES b/LIBRARYLICENSES index 07545dc76f..3054425dac 100644 --- a/LIBRARYLICENSES +++ b/LIBRARYLICENSES @@ -275,7 +275,7 @@ Some icons from FAM FAM FAM are used under the Creative Commons Attribution 2.5 http://creativecommons.org/licenses/by/2.5/ FAM FAM FAM Icons are hosted at -http://www.famfamfam.com/lab/icons/silk/ +https://web.archive.org/web/20070824000227/http://www.famfamfam.com/lab/icons/silk/ -------------------------------------------------------------------------------- diff --git a/Plugins/Plugin.Test/Plugin.Test.csproj b/Plugins/Plugin.Test/Plugin.Test.csproj deleted file mode 100644 index dff567f695..0000000000 --- a/Plugins/Plugin.Test/Plugin.Test.csproj +++ /dev/null @@ -1,26 +0,0 @@ - - - - net6.0 - false - - embedded - true - - - - - - - - - all - runtime; build; native; contentfiles; analyzers - - - - - - - - diff --git a/Plugins/Plugin.Test/Plugin.Test.nuspec b/Plugins/Plugin.Test/Plugin.Test.nuspec deleted file mode 100644 index 44e38f08bc..0000000000 --- a/Plugins/Plugin.Test/Plugin.Test.nuspec +++ /dev/null @@ -1,59 +0,0 @@ - - - - HIC.RDMP.Plugin.Test - $version$ - HIC.RDMP.Plugin.Test - Health Informatics Centre, University of Dundee - Health Informatics Centre, University of Dundee - https://raw.githubusercontent.com/HicServices/RDMP/master/LICENSE - https://github.com/HicServices/RDMP - https://raw.githubusercontent.com/HicServices/RDMP/master/Application/ResearchDataManagementPlatform/Icon/main.png - false - Package for writing plugin tests - Copyright 2018-2019 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Plugins/Plugin.UI/Plugin.UI.csproj b/Plugins/Plugin.UI/Plugin.UI.csproj deleted file mode 100644 index d162996fbe..0000000000 --- a/Plugins/Plugin.UI/Plugin.UI.csproj +++ /dev/null @@ -1,26 +0,0 @@ - - - - net6.0-windows7.0 - false - - embedded - true - - - - - - - - - all - runtime; build; native; contentfiles; analyzers - - - - - - - - diff --git a/Plugins/Plugin.UI/Plugin.UI.nuspec b/Plugins/Plugin.UI/Plugin.UI.nuspec deleted file mode 100644 index 3d7c630acc..0000000000 --- a/Plugins/Plugin.UI/Plugin.UI.nuspec +++ /dev/null @@ -1,62 +0,0 @@ - - - - HIC.RDMP.Plugin.UI - $version$ - HIC.RDMP.Plugin.UI - Health Informatics Centre, University of Dundee - Health Informatics Centre, University of Dundee - https://raw.githubusercontent.com/HicServices/RDMP/master/LICENSE - https://github.com/HicServices/RDMP - https://raw.githubusercontent.com/HicServices/RDMP/master/Application/ResearchDataManagementPlatform/Icon/main.png - false - UI package for plugin development - Copyright 2018-2019 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Plugins/Plugin/Plugin.csproj b/Plugins/Plugin/Plugin.csproj deleted file mode 100644 index 1ea3510846..0000000000 --- a/Plugins/Plugin/Plugin.csproj +++ /dev/null @@ -1,26 +0,0 @@ - - - - net6.0 - false - - embedded - true - - - - - - - - - all - runtime; build; native; contentfiles; analyzers - - - - - - - - diff --git a/Plugins/Plugin/Plugin.nuspec b/Plugins/Plugin/Plugin.nuspec deleted file mode 100644 index ec2deb5cfc..0000000000 --- a/Plugins/Plugin/Plugin.nuspec +++ /dev/null @@ -1,50 +0,0 @@ - - - - HIC.RDMP.Plugin - $version$ - HIC.RDMP.Plugin - Health Informatics Centre, University of Dundee - Health Informatics Centre, University of Dundee - https://raw.githubusercontent.com/HicServices/RDMP/master/LICENSE - https://github.com/HicServices/RDMP - https://raw.githubusercontent.com/HicServices/RDMP/master/Application/ResearchDataManagementPlatform/Icon/main.png - false - Core package for plugin development - Copyright 2018-2019 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Rdmp.Core.Tests/Caching/Integration/CachingHostTests.cs b/Rdmp.Core.Tests/Caching/Integration/CachingHostTests.cs index 11e161e32e..0d80ef878a 100644 --- a/Rdmp.Core.Tests/Caching/Integration/CachingHostTests.cs +++ b/Rdmp.Core.Tests/Caching/Integration/CachingHostTests.cs @@ -9,7 +9,7 @@ using System.IO; using System.Threading; using System.Threading.Tasks; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Caching; using Rdmp.Core.Curation; @@ -17,123 +17,121 @@ using Rdmp.Core.Curation.Data.Cache; using Rdmp.Core.Curation.Data.Pipelines; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.Caching.Integration +namespace Rdmp.Core.Tests.Caching.Integration; + +public class CachingHostTests : UnitTests { - public class CachingHostTests : UnitTests + /// + /// Makes sure that a cache progress pipeline will not be run if we are outside the permission window + /// + [Test] + public void CacheHostOutwithPermissionWindow() { - /// - /// Makes sure that a cache progress pipeline will not be run if we are outside the permission window - /// - [Test] - public void CacheHostOutwithPermissionWindow() - { - var rootDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); - var testDir = rootDir.CreateSubdirectory("C"); - - if (testDir.Exists) - Directory.Delete(testDir.FullName, true); - - var loadDirectory = LoadDirectory.CreateDirectoryStructure(testDir, "Test"); - - - var cp = WhenIHaveA(); - var loadMetadata = cp.LoadProgress.LoadMetadata; - loadMetadata.LocationOfFlatFiles = loadDirectory.RootPath.FullName; - - // This feels a bit nasty, but quick and much better than having the test wait for an arbitrary time period. - var listener = new ExpectedNotificationListener("Download not permitted at this time, sleeping for 60 seconds"); - - cp.CacheFillProgress = DateTime.Now.AddDays(-1); - cp.PermissionWindow_ID = 1; - - - var permissionWindow = new PermissionWindow(Repository); - permissionWindow.RequiresSynchronousAccess = true; - permissionWindow.ID = 1; - permissionWindow.Name = "Test Permission Window"; - - - //Create a time period that we are outwith (1 hour ago to 30 minutes ago). - TimeSpan start = DateTime.Now.TimeOfDay.Subtract(new TimeSpan(0,1,0,0)); - TimeSpan stop = DateTime.Now.TimeOfDay.Subtract(new TimeSpan(0,0,30,0)); - permissionWindow.SetPermissionWindowPeriods(new List(new [] - { - new PermissionWindowPeriod( - (int)DateTime.Now.DayOfWeek, - start, - stop) - })); - permissionWindow.SaveToDatabase(); - - cp.PermissionWindow_ID = permissionWindow.ID; - cp.SaveToDatabase(); - - var dataFlowPipelineEngine = Mock.Of(); - - // set SetUp a factory stub to return our engine mock - var cacheHost = new CachingHost(Repository) - { - CacheProgress = cp - }; - - var stopTokenSource = new CancellationTokenSource(); - var abortTokenSource = new CancellationTokenSource(); - var cancellationToken = new GracefulCancellationToken(stopTokenSource.Token, abortTokenSource.Token); - - var task = Task.Run(() => cacheHost.Start(listener, cancellationToken), cancellationToken.CreateLinkedSource().Token); - - // Don't want to cancel before the DownloadUntilFinished loop starts and we receive the first "Download not permitted at this time, sleeping for 60 seconds" message - listener.ReceivedMessage += abortTokenSource.Cancel; - - try - { - task.Wait(); - } - catch (AggregateException e) - { - Assert.AreEqual(1, e.InnerExceptions.Count); - Assert.IsInstanceOf(typeof (TaskCanceledException), e.InnerExceptions[0], e.InnerExceptions[0].Message); - } - finally - { - testDir.Delete(true); - } - } + var rootDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); + var testDir = rootDir.CreateSubdirectory("C"); + if (testDir.Exists) + Directory.Delete(testDir.FullName, true); - } + var loadDirectory = LoadDirectory.CreateDirectoryStructure(testDir, "Test"); - internal delegate void ReceivedMessageHandler(); - internal class ExpectedNotificationListener : IDataLoadEventListener - { - private readonly string _expectedNotificationString; - public event ReceivedMessageHandler ReceivedMessage; - protected virtual void OnReceivedMessage() - { - var handler = ReceivedMessage; - if (handler != null) handler(); - } + var cp = WhenIHaveA(); + var loadMetadata = cp.LoadProgress.LoadMetadata; + loadMetadata.LocationOfFlatFiles = loadDirectory.RootPath.FullName; + + // This feels a bit nasty, but quick and much better than having the test wait for an arbitrary time period. + var listener = new ExpectedNotificationListener("Download not permitted at this time, sleeping for 60 seconds"); + + cp.CacheFillProgress = DateTime.Now.AddDays(-1); + cp.PermissionWindow_ID = 1; - public ExpectedNotificationListener(string expectedNotificationString) + var permissionWindow = new PermissionWindow(Repository) { - _expectedNotificationString = expectedNotificationString; - } + RequiresSynchronousAccess = true, + ID = 1, + Name = "Test Permission Window" + }; + - public void OnNotify(object sender, NotifyEventArgs e) + //Create a time period that we are outwith (1 hour ago to 30 minutes ago). + var start = DateTime.Now.TimeOfDay.Subtract(new TimeSpan(0, 1, 0, 0)); + var stop = DateTime.Now.TimeOfDay.Subtract(new TimeSpan(0, 0, 30, 0)); + permissionWindow.SetPermissionWindowPeriods(new List(new[] + { + new PermissionWindowPeriod( + (int)DateTime.Now.DayOfWeek, + start, + stop) + })); + permissionWindow.SaveToDatabase(); + + cp.PermissionWindow_ID = permissionWindow.ID; + cp.SaveToDatabase(); + + // set SetUp a factory stub to return our engine mock + var cacheHost = new CachingHost(Repository) { - Console.WriteLine(sender + " sent message: " + e.Message); + CacheProgress = cp + }; - if (e.Message.Equals(_expectedNotificationString)) - OnReceivedMessage(); - } + var stopTokenSource = new CancellationTokenSource(); + var abortTokenSource = new CancellationTokenSource(); + var cancellationToken = new GracefulCancellationToken(stopTokenSource.Token, abortTokenSource.Token); + + var task = Task.Run(() => cacheHost.Start(listener, cancellationToken), + cancellationToken.CreateLinkedSource().Token); - public void OnProgress(object sender, ProgressEventArgs e) + // Don't want to cancel before the DownloadUntilFinished loop starts and we receive the first "Download not permitted at this time, sleeping for 60 seconds" message + listener.ReceivedMessage += abortTokenSource.Cancel; + + try + { + task.Wait(); + } + catch (AggregateException e) { - throw new NotImplementedException(); + Assert.AreEqual(1, e.InnerExceptions.Count); + Assert.IsInstanceOf(typeof(TaskCanceledException), e.InnerExceptions[0], e.InnerExceptions[0].Message); } + finally + { + testDir.Delete(true); + } + } +} + +internal delegate void ReceivedMessageHandler(); + +internal class ExpectedNotificationListener : IDataLoadEventListener +{ + private readonly string _expectedNotificationString; + public event ReceivedMessageHandler ReceivedMessage; + + protected virtual void OnReceivedMessage() + { + var handler = ReceivedMessage; + handler?.Invoke(); + } + + public ExpectedNotificationListener(string expectedNotificationString) + { + _expectedNotificationString = expectedNotificationString; + } + + public void OnNotify(object sender, NotifyEventArgs e) + { + Console.WriteLine($"{sender} sent message: {e.Message}"); + + if (e.Message.Equals(_expectedNotificationString)) + OnReceivedMessage(); + } + + public void OnProgress(object sender, ProgressEventArgs e) + { + throw new NotImplementedException(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Caching/Integration/CustomDateCachingTests.cs b/Rdmp.Core.Tests/Caching/Integration/CustomDateCachingTests.cs index 39fe21d034..08c51665db 100644 --- a/Rdmp.Core.Tests/Caching/Integration/CustomDateCachingTests.cs +++ b/Rdmp.Core.Tests/Caching/Integration/CustomDateCachingTests.cs @@ -9,7 +9,7 @@ using System.IO; using System.Linq; using System.Threading.Tasks; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Caching; using Rdmp.Core.Caching.Layouts; @@ -22,176 +22,166 @@ using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.Curation.Data.Pipelines; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.Repositories; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.Caching.Integration +namespace Rdmp.Core.Tests.Caching.Integration; + +public class CustomDateCachingTests : DatabaseTests +{ + [TestCase(false)] + [TestCase(true)] + public void FetchMultipleDays_Success(bool singleDay) + { + MEF.AddTypeToCatalogForTesting(typeof(TestCacheSource)); + MEF.AddTypeToCatalogForTesting(typeof(TestCacheDestination)); + + // Create a pipeline that will record the cache chunks + var sourceComponent = Substitute.For(); + sourceComponent.Class.Returns("CachingEngineTests.Integration.TestCacheSource"); + sourceComponent.GetClassAsSystemType().Returns(typeof(TestCacheSource)); + sourceComponent.GetAllArguments().Returns(Array.Empty()); + + var destinationComponent = Substitute.For(); + destinationComponent.Class.Returns("CachingEngineTests.Integration.TestCacheDestination"); + destinationComponent.GetClassAsSystemType().Returns(typeof(TestCacheDestination)); + destinationComponent.GetAllArguments().Returns(Array.Empty()); + + var pipeline = Substitute.For(); + pipeline.Repository.Returns(CatalogueRepository); + pipeline.Source.Returns(sourceComponent); + pipeline.Destination.Returns(destinationComponent); + pipeline.Repository.Returns(CatalogueRepository); + pipeline.PipelineComponents.Returns(Enumerable.Empty().OrderBy(o => o).ToList()); + + var projDir = + LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", + true); + + var lmd = Substitute.For(); + lmd.LocationOfFlatFiles = projDir.RootPath.FullName; + + var loadProgress = Substitute.For(); + loadProgress.OriginDate.Returns(new DateTime(2001, 01, 01)); + loadProgress.LoadMetadata.Returns(lmd); + + var cacheProgress = Substitute.For(); + cacheProgress.Pipeline_ID.Returns(-123); + cacheProgress.Pipeline.Returns(pipeline); + cacheProgress.ChunkPeriod.Returns(new TimeSpan(1, 0, 0, 0)); + cacheProgress.LoadProgress_ID.Returns(-1); + cacheProgress.Repository.Returns(CatalogueRepository); + cacheProgress.LoadProgress.Returns(loadProgress); + cacheProgress.CacheFillProgress.Returns(new DateTime(2020, 1, 1)); + + var caching = new CustomDateCaching(cacheProgress, RepositoryLocator.CatalogueRepository); + var startDate = new DateTime(2016, 1, 1); + var endDate = singleDay ? new DateTime(2016, 1, 1) : new DateTime(2016, 1, 3); + + var listener = new LoggingListener(); + var task = caching.Fetch(startDate, endDate, new GracefulCancellationToken(), listener); + task.Wait(); + + var dateNotifications = listener.Notifications.Where(n => n.Message.StartsWith("!!")) + .Select(n => n.Message.TrimStart('!')) + .ToArray(); + + //should not have been updated because this is a backfill request + Assert.AreEqual(new DateTime(2020, 1, 1), cacheProgress.CacheFillProgress); + + Assert.IsTrue(task.IsCompleted); + Assert.IsTrue(dateNotifications.Contains(startDate.ToString("g"))); + Assert.IsTrue(dateNotifications.Contains(endDate.ToString("g"))); + Assert.IsTrue(task.Status == TaskStatus.RanToCompletion); + + projDir.RootPath.Delete(true); + } +} + +public class LoggingListener : IDataLoadEventListener { - public class CustomDateCachingTests : DatabaseTests + public List Notifications { get; private set; } + + public LoggingListener() { - [TestCase(false)] - [TestCase(true)] - public void FetchMultipleDays_Success(bool singleDay) - { - var mef = RepositoryLocator.CatalogueRepository.MEF; - mef.AddTypeToCatalogForTesting(typeof(TestCacheSource)); - mef.AddTypeToCatalogForTesting(typeof(TestCacheDestination)); - - // Create a pipeline that will record the cache chunks - var sourceComponent = Mock.Of(x=> - x.Class == "CachingEngineTests.Integration.TestCacheSource" && - x.GetClassAsSystemType()==typeof (TestCacheSource) && - x.GetAllArguments()==new IArgument[0]); - - var destinationComponent = Mock.Of(x=> - x.Class == "CachingEngineTests.Integration.TestCacheDestination" && - x.GetClassAsSystemType()==typeof (TestCacheDestination) && - x.GetAllArguments()==new IArgument[0]); - - var pipeline = Mock.Of(p=> - p.Repository == CatalogueRepository && - p.Source==sourceComponent && - p.Destination==destinationComponent && - p.Repository == CatalogueRepository && - p.PipelineComponents==Enumerable.Empty().OrderBy(o => o).ToList()); - - var projDir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory),"delme",true); - - var lmd = Mock.Of(); - lmd.LocationOfFlatFiles = projDir.RootPath.FullName; - - var loadProgress = Mock.Of(l=> - l.OriginDate == new DateTime(2001,01,01) && - l.LoadMetadata==lmd); - - var cacheProgress = Mock.Of(c=> - - c.Pipeline_ID == -123 && - c.Pipeline==pipeline && - c.ChunkPeriod == new TimeSpan(1, 0, 0, 0) && - c.LoadProgress_ID == -1 && - c.Repository == CatalogueRepository && - c.LoadProgress ==loadProgress && - c.CacheFillProgress == new DateTime(2020, 1, 1)); - - var caching = new CustomDateCaching(cacheProgress, RepositoryLocator.CatalogueRepository); - var startDate = new DateTime(2016, 1, 1); - var endDate = singleDay? new DateTime(2016, 1, 1): new DateTime(2016, 1, 3); - - var listener = new LoggingListener(); - var task = caching.Fetch(startDate, endDate, new GracefulCancellationToken(), listener); - task.Wait(); - - var dateNotifications = listener.Notifications.Where(n => n.Message.StartsWith("!!")) - .Select(n => n.Message.TrimStart('!')) - .ToArray(); - - //should not have been updated because this is a backfill request - Assert.AreEqual(new DateTime(2020,1,1),cacheProgress.CacheFillProgress); - - Assert.IsTrue(task.IsCompleted); - Assert.IsTrue(dateNotifications.Contains(startDate.ToString("g"))); - Assert.IsTrue(dateNotifications.Contains(endDate.ToString("g"))); - Assert.IsTrue(task.Status == TaskStatus.RanToCompletion); - - projDir.RootPath.Delete(true); - } + Notifications = new List(); } - public class LoggingListener : IDataLoadEventListener + public void OnNotify(object sender, NotifyEventArgs e) { - public List Notifications { get; private set; } + Notifications.Add(e); + } + + public void OnProgress(object sender, ProgressEventArgs e) + { + } +} + +public class TestCacheChunk : ICacheChunk +{ + public ICacheFetchRequest Request { get; private set; } + + public TestCacheChunk(DateTime fetchDate) + { + Request = new CacheFetchRequest(null, fetchDate) { ChunkPeriod = new TimeSpan(1, 0, 0) }; + } +} - public LoggingListener() - { - Notifications = new List(); - } +public class TestCacheSource : CacheSource +{ + public override TestCacheChunk DoGetChunk(ICacheFetchRequest request, IDataLoadEventListener listener, + GracefulCancellationToken cancellationToken) + { + var c = new TestCacheChunk(Request.Start); + listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"!!{request.Start:g}")); + return c; + } - public void OnNotify(object sender, NotifyEventArgs e) - { - Notifications.Add(e); - } + public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) + { + } - public void OnProgress(object sender, ProgressEventArgs e) - { - } + public override void Abort(IDataLoadEventListener listener) + { } - public class TestCacheChunk : ICacheChunk + public override TestCacheChunk TryGetPreview() => throw new NotImplementedException(); + + public override void Check(ICheckNotifier notifier) { - public ICacheFetchRequest Request { get; private set; } + } +} - public TestCacheChunk(DateTime fetchDate) - { - Request = new CacheFetchRequest(null,fetchDate){ChunkPeriod = new TimeSpan(1,0,0)}; - } +public class TestCacheDestination : IPluginDataFlowComponent, IDataFlowDestination, + ICacheFileSystemDestination +{ + public static TestCacheChunk ProcessPipelineData(TestCacheChunk toProcess, IDataLoadEventListener listener, + GracefulCancellationToken cancellationToken) => toProcess; + + public ICacheChunk ProcessPipelineData(ICacheChunk toProcess, IDataLoadEventListener listener, + GracefulCancellationToken cancellationToken) => + ProcessPipelineData((TestCacheChunk)toProcess, listener, cancellationToken); - + public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) + { } - public class TestCacheSource : CacheSource + public void Abort(IDataLoadEventListener listener) { - public override TestCacheChunk DoGetChunk(ICacheFetchRequest request, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) - { - var c = new TestCacheChunk(Request.Start); - listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, "!!" + request.Start.ToString("g"))); - return c; - } - - public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) - { - } - - public override void Abort(IDataLoadEventListener listener) - { - } - - public override TestCacheChunk TryGetPreview() - { - throw new NotImplementedException(); - } - - public override void Check(ICheckNotifier notifier) - { - } } - public class TestCacheDestination : IPluginDataFlowComponent, IDataFlowDestination, ICacheFileSystemDestination + public void Check(ICheckNotifier notifier) { - public TestCacheChunk ProcessPipelineData(TestCacheChunk toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) - { - return toProcess; - } - - public ICacheChunk ProcessPipelineData(ICacheChunk toProcess, IDataLoadEventListener listener, - GracefulCancellationToken cancellationToken) - { - return ProcessPipelineData((TestCacheChunk) toProcess, listener, cancellationToken); - } - - public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) - { - } - - public void Abort(IDataLoadEventListener listener) - { - } - - public void Check(ICheckNotifier notifier) - { - } - - private ILoadDirectory project; - public void PreInitialize(ILoadDirectory value, IDataLoadEventListener listener) - { - project = value; - } - - public ICacheLayout CreateCacheLayout() - { - return new BasicCacheLayout(project.Cache); - } + } + private ILoadDirectory project; + + public void PreInitialize(ILoadDirectory value, IDataLoadEventListener listener) + { + project = value; } + + public ICacheLayout CreateCacheLayout() => new BasicCacheLayout(project.Cache); } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Caching/Unit/PipelineExecutionTests.cs b/Rdmp.Core.Tests/Caching/Unit/PipelineExecutionTests.cs index 2dd3072844..ff5e813575 100644 --- a/Rdmp.Core.Tests/Caching/Unit/PipelineExecutionTests.cs +++ b/Rdmp.Core.Tests/Caching/Unit/PipelineExecutionTests.cs @@ -5,100 +5,78 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Collections.Generic; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Caching.Pipeline; -using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Pipelines; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.Caching.Unit +namespace Rdmp.Core.Tests.Caching.Unit; + +[Category("Unit")] +public class PipelineExecutionTests { - [Category("Unit")] - public class PipelineExecutionTests + [Ignore("Tests locking we don't actually have")] + public static void TestSerialPipelineExecution() + { + // set SetUp two engines, one with a locked cache progress/load schedule + // run the serial execution and ensure that only one engine had its 'ExecutePipeline' method called + var engine1 = Substitute.For(); + + + var engine2 = Substitute.For(); + + var tokenSource = new GracefulCancellationTokenSource(); + var listener = ThrowImmediatelyDataLoadEventListener.Quiet; + + // set SetUp the engine map + // set SetUp the lock provider + + // create the execution object + var pipelineExecutor = new SerialPipelineExecution(); + + // Act + pipelineExecutor.Execute(new[] { engine1, engine2 }, tokenSource.Token, listener); + + // engine1 should have been executed once + engine1.Received(1).ExecutePipeline(Arg.Any()); + + // engine2 should also have been run (locking isn't a thing any more) + engine2.Received(1).ExecutePipeline(Arg.Any()); + } + + [Ignore("Tests locking we don't actually have")] + public static void TestRoundRobinPipelineExecution() { - [Test] - public void TestSerialPipelineExecution() - { - // set SetUp two engines, one with a locked cache progress/load schedule - // run the serial execution and ensure that only one engine had its 'ExecutePipeline' method called - var engine1 = new Mock(); - - - var engine2 = new Mock(); - - var tokenSource = new GracefulCancellationTokenSource(); - var listener = new ThrowImmediatelyDataLoadEventListener(); - - // set SetUp the engine map - var loadProgress1 = Mock.Of(); - var loadProgress2 = Mock.Of(); - - // set SetUp the lock provider - var engineMap = new Dictionary - { - {engine1.Object, loadProgress1}, - {engine2.Object, loadProgress2} - }; - - // create the execution object - var pipelineExecutor = new SerialPipelineExecution(); - - // Act - pipelineExecutor.Execute(new [] {engine1.Object, engine2.Object}, tokenSource.Token, listener); - - // engine1 should have been executed once - engine1.Verify(e=>e.ExecutePipeline(It.IsAny()),Times.Once); - - // engine2 should also have been run (locking isn't a thing anymore) - engine2.Verify(e=>e.ExecutePipeline(It.IsAny()),Times.Once); - } - - [Test] - public void TestRoundRobinPipelineExecution() - { - // set SetUp two engines, one with a locked cache progress/load schedule - // run the serial execution and ensure that only one engine had its 'ExecutePipeline' method called - var engine1 = new Mock(); - var engine2 = new Mock(); - var tokenSource = new GracefulCancellationTokenSource(); - var listener = new ThrowImmediatelyDataLoadEventListener(); - - // first time both engines return that they have more data, second time they are both complete - engine1.SetupSequence(engine => engine.ExecuteSinglePass(It.IsAny())) - .Returns(true) - .Returns(false) - .Throws(); - - engine2.SetupSequence(engine => engine.ExecuteSinglePass(It.IsAny())) - .Returns(true) - .Returns(false) - .Throws(); - - // set SetUp the engine map - var loadProgress1 = Mock.Of(); - var loadProgress2 = Mock.Of(); - - // set SetUp the lock provider - var engineMap = new Dictionary - { - {engine1.Object, loadProgress1}, - {engine2.Object, loadProgress2} - }; - // create the execution object - var pipelineExecutor = new RoundRobinPipelineExecution(); - - // Act - pipelineExecutor.Execute(new[] { engine1.Object, engine2.Object }, tokenSource.Token, listener); - - // Assert - // engine1 should have been executed once - engine1.Verify(); - - // engine2 should not have been executed as it is locked - engine1.Verify(); - } + // set SetUp two engines, one with a locked cache progress/load schedule + // run the serial execution and ensure that only one engine had its 'ExecutePipeline' method called + var engine1 = Substitute.For(); + var engine2 = Substitute.For(); + var tokenSource = new GracefulCancellationTokenSource(); + var listener = ThrowImmediatelyDataLoadEventListener.Quiet; + + // first time both engines return that they have more data, second time they are both complete + engine1.ExecuteSinglePass(Arg.Any()) + .Returns(true, + false + ); + + engine2.ExecuteSinglePass(Arg.Any()) + .Returns(true, + false); + + // create the execution object + var pipelineExecutor = new RoundRobinPipelineExecution(); + + // Act + pipelineExecutor.Execute(new[] { engine1, engine2 }, tokenSource.Token, listener); + + // Assert + // engine1 should have been executed once + engine1.Received(1); + + // engine2 should not have been executed as it is locked, but we don't actually have locks. + engine1.Received(1); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Caching/Unit/TestIFileDataFlowComponents.cs b/Rdmp.Core.Tests/Caching/Unit/TestIFileDataFlowComponents.cs index 8ccd50e539..776d933c7a 100644 --- a/Rdmp.Core.Tests/Caching/Unit/TestIFileDataFlowComponents.cs +++ b/Rdmp.Core.Tests/Caching/Unit/TestIFileDataFlowComponents.cs @@ -11,87 +11,85 @@ using Rdmp.Core.Caching.Layouts; using Rdmp.Core.Curation.Data.Cache; using Rdmp.Core.DataFlowPipeline.Requirements; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.Caching.Unit +namespace Rdmp.Core.Tests.Caching.Unit; + +public interface IFileDataFlowDestination : TestIFileDataFlowComponent { - public interface IFileDataFlowDestination : TestIFileDataFlowComponent - { - } +} - public interface TestIFileDataFlowComponent - { - IList ProcessPipelineData(IList toProcess, IDataLoadEventListener listener); - void Dispose(IDataLoadEventListener listener); - } +public interface TestIFileDataFlowComponent +{ + IList ProcessPipelineData(IList toProcess, IDataLoadEventListener listener); + void Dispose(IDataLoadEventListener listener); +} + +// this is really a destination? +public class MoveToDirectory : IFileDataFlowDestination +{ + public DirectoryInfo DestinationDirectory { get; set; } - // this is really a destination? - public class MoveToDirectory : IFileDataFlowDestination + public IList ProcessPipelineData(IList toProcess, IDataLoadEventListener listener) { - public DirectoryInfo DestinationDirectory { get; set; } + if (DestinationDirectory.Parent == null) + throw new Exception("The destination directory has no parent so a new set of filepaths cannot be created."); - public IList ProcessPipelineData(IList toProcess, IDataLoadEventListener listener) + var movedFiles = new List(); + foreach (var fileInfo in toProcess.ToList()) { - if (DestinationDirectory.Parent == null) - throw new Exception("The destination directory has no parent so a new set of filepaths cannot be created."); - - var movedFiles = new List(); - foreach (var fileInfo in toProcess.ToList()) - { - var filePath = Path.Combine(DestinationDirectory.Parent.FullName, "...?"); - fileInfo.MoveTo(filePath); - movedFiles.Add(new FileInfo(filePath)); - } - - return movedFiles; + var filePath = Path.Combine(DestinationDirectory.Parent.FullName, "...?"); + fileInfo.MoveTo(filePath); + movedFiles.Add(new FileInfo(filePath)); } - public void Dispose(IDataLoadEventListener listener) - { - throw new System.NotImplementedException(); - } + return movedFiles; } - public class FilesystemCacheDestination : IFileDataFlowDestination, IPipelineRequirement, IPipelineRequirement + public void Dispose(IDataLoadEventListener listener) { - public CacheProgress CacheProgress { get; set; } - public DirectoryInfo CacheDirectory { get; set; } - - public IList ProcessPipelineData(IList toProcess, IDataLoadEventListener listener) - { - var layout = new ZipCacheLayoutOnePerDay(CacheDirectory, new NoSubdirectoriesCachePathResolver()); - - var moveComponent = new MoveToDirectory - { - DestinationDirectory = layout.GetLoadCacheDirectory(listener) - }; - - moveComponent.ProcessPipelineData(toProcess, listener); + throw new NotImplementedException(); + } +} - // would be in CacheLayout, with it being a component - // ? where does the date come from? - // either going to be CacheFillProgress or CacheFillProgress + period, depending on fetch logic - if (CacheProgress.CacheFillProgress == null) - throw new Exception("Should throw, but currently on first cache it is valid for the CacheFIllProgress to be null"); - +public class FilesystemCacheDestination : IFileDataFlowDestination, IPipelineRequirement, + IPipelineRequirement +{ + public CacheProgress CacheProgress { get; set; } + public DirectoryInfo CacheDirectory { get; set; } - return toProcess; - } + public IList ProcessPipelineData(IList toProcess, IDataLoadEventListener listener) + { + var layout = new ZipCacheLayoutOnePerDay(CacheDirectory, new NoSubdirectoriesCachePathResolver()); - public void Dispose(IDataLoadEventListener listener) + var moveComponent = new MoveToDirectory { - throw new NotImplementedException(); - } + DestinationDirectory = layout.GetLoadCacheDirectory(listener) + }; + + moveComponent.ProcessPipelineData(toProcess, listener); + + // would be in CacheLayout, with it being a component + // ? where does the date come from? + // either going to be CacheFillProgress or CacheFillProgress + period, depending on fetch logic + return CacheProgress.CacheFillProgress == null + ? throw new Exception( + "Should throw, but currently on first cache it is valid for the CacheFIllProgress to be null") + : toProcess; + } - public void PreInitialize(CacheProgress cacheProgress, IDataLoadEventListener listener) - { - CacheProgress = cacheProgress; - } + public void Dispose(IDataLoadEventListener listener) + { + throw new NotImplementedException(); + } - public void PreInitialize(DirectoryInfo cacheDirectory, IDataLoadEventListener listener) - { - CacheDirectory = cacheDirectory; - } + public void PreInitialize(CacheProgress cacheProgress, IDataLoadEventListener listener) + { + CacheProgress = cacheProgress; + } + public void PreInitialize(DirectoryInfo cacheDirectory, IDataLoadEventListener listener) + { + CacheDirectory = cacheDirectory; } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Caching/Unit/ZipTests.cs b/Rdmp.Core.Tests/Caching/Unit/ZipTests.cs index c61dc13e85..21d152245e 100644 --- a/Rdmp.Core.Tests/Caching/Unit/ZipTests.cs +++ b/Rdmp.Core.Tests/Caching/Unit/ZipTests.cs @@ -12,60 +12,72 @@ using Rdmp.Core.Caching.Layouts; using Rdmp.Core.Caching.Pipeline.Destinations; using Rdmp.Core.Curation.Data.DataLoad; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.Caching.Unit +namespace Rdmp.Core.Tests.Caching.Unit; + +internal class ZipTests { - class ZipTests + private class ZipTestLayout : CacheLayout { - private class ZipTestLayout : CacheLayout + public ZipTestLayout(DirectoryInfo dir, string dateFormat, CacheArchiveType cacheArchiveType, + CacheFileGranularity granularity, ILoadCachePathResolver resolver) : base(dir, dateFormat, cacheArchiveType, + granularity, resolver) { - public ZipTestLayout(DirectoryInfo dir, string dateFormat, CacheArchiveType cacheArchiveType, - CacheFileGranularity granularity, ILoadCachePathResolver resolver) : base(dir, dateFormat, cacheArchiveType, - granularity, resolver) - { - } + } - public new void ArchiveFiles(FileInfo[] fi, DateTime dt, IDataLoadEventListener l) - { - base.ArchiveFiles(fi,dt,l); - } + public new void ArchiveFiles(FileInfo[] fi, DateTime dt, IDataLoadEventListener l) + { + base.ArchiveFiles(fi, dt, l); } + } - [Test] - public void CreateAndUpdateZip() + [Test] + public void CreateAndUpdateZip() + { + var _dir = TestContext.CurrentContext.WorkDirectory; + var _zt = new ZipTestLayout(new DirectoryInfo(_dir), "yyyy-MM-dd", CacheArchiveType.Zip, + CacheFileGranularity.Hour, new NoSubdirectoriesCachePathResolver()); + var _listener = ThrowImmediatelyDataLoadEventListener.Quiet; + var when = DateTime.Now; + var targetzip = _zt.GetArchiveFileInfoForDate(when, _listener); + var files = new List(); + + // First create a zip file with one item in + File.Delete(targetzip.FullName); + files.Add(new FileInfo(Path.Combine(_dir, Path.GetRandomFileName()))); + using (var sw = new StreamWriter(files[0].FullName)) { - var _dir = TestContext.CurrentContext.WorkDirectory; - var _zt = new ZipTestLayout(new DirectoryInfo(_dir), "yyyy-MM-dd", CacheArchiveType.Zip, CacheFileGranularity.Hour, new NoSubdirectoriesCachePathResolver()); - var _listener = new ThrowImmediatelyDataLoadEventListener(); - var when = DateTime.Now; - var targetzip = _zt.GetArchiveFileInfoForDate(when, _listener); - List files=new List(); + sw.WriteLine("Example data file"); + } - // First create a zip file with one item in - File.Delete(targetzip.FullName); - files.Add(new FileInfo(Path.Combine(_dir, Path.GetRandomFileName()))); - using (var sw=new StreamWriter(files[0].FullName)) - sw.WriteLine("Example data file"); - _zt.ArchiveFiles(files.ToArray(), when, _listener); - using (var zip = ZipFile.Open(targetzip.FullName, ZipArchiveMode.Read)) - Assert.True(zip.Entries.Count == 1); + _zt.ArchiveFiles(files.ToArray(), when, _listener); + using (var zip = ZipFile.Open(targetzip.FullName, ZipArchiveMode.Read)) + { + Assert.True(zip.Entries.Count == 1); + } - // Create a second file and add that to the zip too - files.Add(new FileInfo(Path.Combine(_dir,Path.GetRandomFileName()))); - using (var sw = new StreamWriter(files[1].FullName)) - sw.WriteLine("Another example data file"); - _zt.ArchiveFiles(files.ToArray(), when, _listener); - using (var zip= ZipFile.Open(targetzip.FullName,ZipArchiveMode.Read)) - Assert.True(zip.Entries.Count==2); + // Create a second file and add that to the zip too + files.Add(new FileInfo(Path.Combine(_dir, Path.GetRandomFileName()))); + using (var sw = new StreamWriter(files[1].FullName)) + { + sw.WriteLine("Another example data file"); + } - // Re-add just the first file: resulting zip should still contain both files - _zt.ArchiveFiles(files.GetRange(0,1).ToArray(), when, _listener); - using (var zip = ZipFile.Open(targetzip.FullName, ZipArchiveMode.Read)) - Assert.True(zip.Entries.Count == 2); + _zt.ArchiveFiles(files.ToArray(), when, _listener); + using (var zip = ZipFile.Open(targetzip.FullName, ZipArchiveMode.Read)) + { + Assert.True(zip.Entries.Count == 2); + } - files.ForEach( s => File.Delete(s.FullName)); - File.Delete(targetzip.FullName); + // Re-add just the first file: resulting zip should still contain both files + _zt.ArchiveFiles(files.GetRange(0, 1).ToArray(), when, _listener); + using (var zip = ZipFile.Open(targetzip.FullName, ZipArchiveMode.Read)) + { + Assert.True(zip.Entries.Count == 2); } + + files.ForEach(s => File.Delete(s.FullName)); + File.Delete(targetzip.FullName); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCommitting/CommitCohortExample.cs b/Rdmp.Core.Tests/CohortCommitting/CommitCohortExample.cs index 51333dfe41..3c521dca15 100644 --- a/Rdmp.Core.Tests/CohortCommitting/CommitCohortExample.cs +++ b/Rdmp.Core.Tests/CohortCommitting/CommitCohortExample.cs @@ -14,82 +14,86 @@ using Rdmp.Core.CohortCommitting.Pipeline.Destinations.IdentifierAllocation; using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCommitting +namespace Rdmp.Core.Tests.CohortCommitting; + +internal class CommitCohortExample : DatabaseTests { - class CommitCohortExample: DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer, "varchar(10)")] + [TestCase(DatabaseType.MySql, "varchar(10)")] + [TestCase(DatabaseType.Oracle, "varchar2(10)")] + public void CommitCohortExampleTest(DatabaseType dbType, string privateDataType) { - [TestCase(DatabaseType.MicrosoftSQLServer,"varchar(10)")] - [TestCase(DatabaseType.MySql,"varchar(10)")] - [TestCase(DatabaseType.Oracle,"varchar2(10)")] - public void CommitCohortExampleTest(DatabaseType dbType,string privateDataType) - { - RunBlitzDatabases(RepositoryLocator); + RunBlitzDatabases(RepositoryLocator); - //find the test server (where we will create the store schema) - var db = GetCleanedServer(dbType); + //find the test server (where we will create the store schema) + var db = GetCleanedServer(dbType); - //create the cohort store table - var wizard = new CreateNewCohortDatabaseWizard(db,CatalogueRepository,DataExportRepository,false); - var privateColumn = new PrivateIdentifierPrototype("chi", privateDataType); - var externalCohortTable = wizard.CreateDatabase(privateColumn,new ThrowImmediatelyCheckNotifier()); + //create the cohort store table + var wizard = new CreateNewCohortDatabaseWizard(db, CatalogueRepository, DataExportRepository, false); + var privateColumn = new PrivateIdentifierPrototype("chi", privateDataType); + var externalCohortTable = wizard.CreateDatabase(privateColumn, ThrowImmediatelyCheckNotifier.Quiet); - Assert.AreEqual(dbType,externalCohortTable.DatabaseType); + Assert.AreEqual(dbType, externalCohortTable.DatabaseType); - //create a project into which we want to import a cohort - var project = new Project(DataExportRepository, "MyProject"); - project.ProjectNumber = 500; - project.SaveToDatabase(); - - //create a description of the cohort we are importing - var definition = new CohortDefinition(null, "MyCohort", 1, 500, externalCohortTable); + //create a project into which we want to import a cohort + var project = new Project(DataExportRepository, "MyProject") + { + ProjectNumber = 500 + }; + project.SaveToDatabase(); - //create our cohort (normally this would be read from a file or be the results of cohort identification query) - var dt = new DataTable(); - dt.Columns.Add("chi"); - dt.Rows.Add("0101010101"); - dt.Rows.Add("0202020202"); + //create a description of the cohort we are importing + var definition = new CohortDefinition(null, "MyCohort", 1, 500, externalCohortTable); - //Create a pipeline (we only need the destination) - var pipelineDestination = new BasicCohortDestination(); + //create our cohort (normally this would be read from a file or be the results of cohort identification query) + var dt = new DataTable(); + dt.Columns.Add("chi"); + dt.Rows.Add("0101010101"); + dt.Rows.Add("0202020202"); + //Create a pipeline (we only need the destination) + var pipelineDestination = new BasicCohortDestination + { //choose how to allocate the anonymous release identifiers - pipelineDestination.ReleaseIdentifierAllocator = typeof(ProjectConsistentGuidReleaseIdentifierAllocator); + ReleaseIdentifierAllocator = typeof(ProjectConsistentGuidReleaseIdentifierAllocator) + }; - //initialize the destination - pipelineDestination.PreInitialize( - new CohortCreationRequest(project, definition, DataExportRepository,"A cohort created in an example unit test"), - new ThrowImmediatelyDataLoadEventListener()); + //initialize the destination + pipelineDestination.PreInitialize( + new CohortCreationRequest(project, definition, DataExportRepository, + "A cohort created in an example unit test"), + ThrowImmediatelyDataLoadEventListener.Quiet); - //process the cohort data table - pipelineDestination.ProcessPipelineData(dt,new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + //process the cohort data table + pipelineDestination.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - //there should be no cohorts yet - Assert.IsEmpty(DataExportRepository.GetAllObjects()); + //there should be no cohorts yet + Assert.IsEmpty(DataExportRepository.GetAllObjects()); - //dispose of the pipeline - pipelineDestination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); + //dispose of the pipeline + pipelineDestination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); - //now there should be one - ExtractableCohort cohort = DataExportRepository.GetAllObjects().Single(); + //now there should be one + var cohort = DataExportRepository.GetAllObjects().Single(); - //make sure we are all on the same page about what the DBMS type is (nothing cached etc) - Assert.AreEqual(dbType, cohort.ExternalCohortTable.DatabaseType); - Assert.AreEqual(dbType,cohort.GetQuerySyntaxHelper().DatabaseType); + //make sure we are all on the same page about what the DBMS type is (nothing cached etc) + Assert.AreEqual(dbType, cohort.ExternalCohortTable.DatabaseType); + Assert.AreEqual(dbType, cohort.GetQuerySyntaxHelper().DatabaseType); - Assert.AreEqual(500,cohort.ExternalProjectNumber); - Assert.AreEqual(2,cohort.CountDistinct); + Assert.AreEqual(500, cohort.ExternalProjectNumber); + Assert.AreEqual(2, cohort.CountDistinct); - var tbl = externalCohortTable.DiscoverCohortTable(); - Assert.AreEqual(2,tbl.GetRowCount()); - var dtInDatabase = tbl.GetDataTable(); - - //guid will be something like "6fb23de5-e8eb-46eb-84b5-dd368da21073" - Assert.AreEqual(36,dtInDatabase.Rows[0]["ReleaseId"].ToString().Length); - Assert.AreEqual("0101010101",dtInDatabase.Rows[0]["chi"]); - } + var tbl = externalCohortTable.DiscoverCohortTable(); + Assert.AreEqual(2, tbl.GetRowCount()); + var dtInDatabase = tbl.GetDataTable(); + + //guid will be something like "6fb23de5-e8eb-46eb-84b5-dd368da21073" + Assert.AreEqual(36, dtInDatabase.Rows[0]["ReleaseId"].ToString().Length); + Assert.AreEqual("0101010101", dtInDatabase.Rows[0]["chi"]); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCommitting/CreateNewCohortDatabaseWizardTests.cs b/Rdmp.Core.Tests/CohortCommitting/CreateNewCohortDatabaseWizardTests.cs index 4cc65004b1..6257d4558c 100644 --- a/Rdmp.Core.Tests/CohortCommitting/CreateNewCohortDatabaseWizardTests.cs +++ b/Rdmp.Core.Tests/CohortCommitting/CreateNewCohortDatabaseWizardTests.cs @@ -17,249 +17,255 @@ using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.Providers; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; -using ReusableLibraryCode.Settings; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Settings; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCommitting +namespace Rdmp.Core.Tests.CohortCommitting; + +public class CreateNewCohortDatabaseWizardTests : DatabaseTests { - public class CreateNewCohortDatabaseWizardTests:DatabaseTests + private Catalogue _cata1; + private Catalogue _cata2; + private TableInfo _t1; + private TableInfo _t2; + private ColumnInfo _c1; + private ColumnInfo _c2; + private CatalogueItem _ci1; + private CatalogueItem _ci2; + + private ExtractionInformation _extractionInfo1; + private ExtractionInformation _extractionInfo2; + + [SetUp] + protected override void SetUp() { - private Catalogue _cata1; - private Catalogue _cata2; - private TableInfo _t1; - private TableInfo _t2; - private ColumnInfo _c1; - private ColumnInfo _c2; - private CatalogueItem _ci1; - private CatalogueItem _ci2; - - private ExtractionInformation _extractionInfo1; - private ExtractionInformation _extractionInfo2; - - [SetUp] - protected override void SetUp() - { - base.SetUp(); - - RunBlitzDatabases(RepositoryLocator); - - _cata1 = new Catalogue(CatalogueRepository, "Dataset1"); - _cata2 = new Catalogue(CatalogueRepository, "Dataset2"); - - _t1 = new TableInfo(CatalogueRepository, "T1"); - _t2 = new TableInfo(CatalogueRepository, "T2"); - - _c1 = new ColumnInfo(CatalogueRepository, "PrivateIdentifierA", "varchar(10)", _t1); - _c2 = new ColumnInfo(CatalogueRepository, "PrivateIdentifierB", "int", _t2); - - _ci1 = new CatalogueItem(CatalogueRepository, _cata1, "PrivateIdentifierA"); - _ci2 = new CatalogueItem(CatalogueRepository, _cata2, "PrivateIdentifierB"); - - _extractionInfo1 = new ExtractionInformation(CatalogueRepository, _ci1, _c1, _c1.ToString()); - _extractionInfo2 = new ExtractionInformation(CatalogueRepository, _ci2, _c2, _c2.ToString()); - - cohortDatabaseName = TestDatabaseNames.GetConsistentName("Tests_CreateCohortDatabaseWizard"); - } - - private string cohortDatabaseName; - - - [Test] - public void TestMissingColumnInfos() - { - _extractionInfo1.IsExtractionIdentifier = true; - _extractionInfo1.SaveToDatabase(); - CreateNewCohortDatabaseWizard wizard = new CreateNewCohortDatabaseWizard(null,CatalogueRepository, DataExportRepository,false); - - //it finds it! - Assert.IsTrue(wizard.GetPrivateIdentifierCandidates().Any(prototype => prototype.RuntimeName.Equals("PrivateIdentifierA"))); - - //delete the column info to make it a missing reference - _c1.DeleteInDatabase(); - - //now it should gracefully skip over it - Assert.IsFalse(wizard.GetPrivateIdentifierCandidates() - .Any(prototype => prototype.RuntimeName.Equals("PrivateIdentifierA"))); - - } - - [Test] - public void ProposePrivateIdentifierDatatypes() - { - CreateNewCohortDatabaseWizard wizard = new CreateNewCohortDatabaseWizard(null,CatalogueRepository, DataExportRepository,false); + base.SetUp(); - var candidates = wizard.GetPrivateIdentifierCandidates(); + RunBlitzDatabases(RepositoryLocator); - Assert.IsFalse(candidates.Any(c => c.RuntimeName.Equals("PrivateIdentifierA") || c.RuntimeName.Equals("PrivateIdentifierB"))); + _cata1 = new Catalogue(CatalogueRepository, "Dataset1"); + _cata2 = new Catalogue(CatalogueRepository, "Dataset2"); - _extractionInfo1.IsExtractionIdentifier = true; - _extractionInfo1.SaveToDatabase(); - candidates = wizard.GetPrivateIdentifierCandidates(); + _t1 = new TableInfo(CatalogueRepository, "T1"); + _t2 = new TableInfo(CatalogueRepository, "T2"); - var candidate = candidates.Single(c => c.RuntimeName.Equals("PrivateIdentifierA")); - Assert.AreEqual("varchar(10)", candidate.DataType); - Assert.IsTrue(candidate.MatchingExtractionInformations.Single().ID== _extractionInfo1.ID); - } + _c1 = new ColumnInfo(CatalogueRepository, "PrivateIdentifierA", "varchar(10)", _t1); + _c2 = new ColumnInfo(CatalogueRepository, "PrivateIdentifierB", "int", _t2); - [TestCase("text")] - //[TestCase("ntext")] // TODO: FAnsiSql doesn't know that this is max width - [TestCase("varchar(max)")] - [TestCase("nvarchar(max)")] - public void TestVarcharMaxNotAllowed(string term) - { - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + _ci1 = new CatalogueItem(CatalogueRepository, _cata1, "PrivateIdentifierA"); + _ci2 = new CatalogueItem(CatalogueRepository, _cata2, "PrivateIdentifierB"); - CreateNewCohortDatabaseWizard wizard = new CreateNewCohortDatabaseWizard(db, CatalogueRepository, DataExportRepository, false); + _extractionInfo1 = new ExtractionInformation(CatalogueRepository, _ci1, _c1, _c1.ToString()); + _extractionInfo2 = new ExtractionInformation(CatalogueRepository, _ci2, _c2, _c2.ToString()); - _extractionInfo2.IsExtractionIdentifier = true; - _c2.Data_type = term; - _c2.SaveToDatabase(); + cohortDatabaseName = TestDatabaseNames.GetConsistentName("Tests_CreateCohortDatabaseWizard"); + } - _extractionInfo2.SaveToDatabase(); + private string cohortDatabaseName; - var candidate = wizard.GetPrivateIdentifierCandidates().Single(c => c.RuntimeName.Equals("PrivateIdentifierB")); - var ex = Assert.Throws(()=>wizard.CreateDatabase( - candidate, - new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Private identifier datatype cannot be varchar(max) style as this prevents Primary Key creation on the table", ex.Message); - } - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void TestActuallyCreatingIt(DatabaseType type) - { - var db = GetCleanedServer(type); + [Test] + public void TestMissingColumnInfos() + { + _extractionInfo1.IsExtractionIdentifier = true; + _extractionInfo1.SaveToDatabase(); + var wizard = new CreateNewCohortDatabaseWizard(null, CatalogueRepository, DataExportRepository, false); - CreateNewCohortDatabaseWizard wizard = new CreateNewCohortDatabaseWizard(db,CatalogueRepository, DataExportRepository,false); + //it finds it! + Assert.IsTrue(wizard.GetPrivateIdentifierCandidates() + .Any(prototype => prototype.RuntimeName.Equals("PrivateIdentifierA"))); - _extractionInfo2.IsExtractionIdentifier = true; - _extractionInfo2.SaveToDatabase(); + //delete the column info to make it a missing reference + _c1.DeleteInDatabase(); - var candidate = wizard.GetPrivateIdentifierCandidates().Single(c => c.RuntimeName.Equals("PrivateIdentifierB")); - var ect = wizard.CreateDatabase( - candidate, - new ThrowImmediatelyCheckNotifier()); + //now it should gracefully skip over it + Assert.IsFalse(wizard.GetPrivateIdentifierCandidates() + .Any(prototype => prototype.RuntimeName.Equals("PrivateIdentifierA"))); + } - Assert.AreEqual(type,ect.DatabaseType); + [Test] + public void ProposePrivateIdentifierDatatypes() + { + var wizard = new CreateNewCohortDatabaseWizard(null, CatalogueRepository, DataExportRepository, false); - //database should exist - DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseName); - Assert.IsTrue(db.Exists()); - - //did it create the correct type? - Assert.AreEqual(type,ect.DatabaseType); + var candidates = wizard.GetPrivateIdentifierCandidates(); - //the ExternalCohortTable should pass tests - ect.Check(new ThrowImmediatelyCheckNotifier()); - - //now try putting someone in it - //the project it will go under - var project = new Project(DataExportRepository, "MyProject"); - project.ProjectNumber = 10; - project.SaveToDatabase(); + Assert.IsFalse(candidates.Any(c => + c.RuntimeName.Equals("PrivateIdentifierA") || c.RuntimeName.Equals("PrivateIdentifierB"))); - //the request to put it under there - var request = new CohortCreationRequest(project, new CohortDefinition(null, "My cohort", 1, 10, ect), DataExportRepository,"Blah"); - - //the actual cohort data - DataTable dt = new DataTable(); - dt.Columns.Add(_extractionInfo2.GetRuntimeName()); - dt.Rows.Add(101243); //_extractionInfo2 is of type int + _extractionInfo1.IsExtractionIdentifier = true; + _extractionInfo1.SaveToDatabase(); + candidates = wizard.GetPrivateIdentifierCandidates(); - //the destination component that will put it there - var dest = new BasicCohortDestination(); + var candidate = candidates.Single(c => c.RuntimeName.Equals("PrivateIdentifierA")); + Assert.AreEqual("varchar(10)", candidate.DataType); + Assert.IsTrue(candidate.MatchingExtractionInformations.Single().ID == _extractionInfo1.ID); + } - dest.PreInitialize(request, new ThrowImmediatelyDataLoadEventListener()); - - //tell it to use the guid allocator - dest.ReleaseIdentifierAllocator = typeof (GuidReleaseIdentifierAllocator); - - dest.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - dest.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); + [TestCase("text")] + //[TestCase("ntext")] // TODO: FAnsiSql doesn't know that this is max width + [TestCase("varchar(max)")] + [TestCase("nvarchar(max)")] + public void TestVarcharMaxNotAllowed(string term) + { + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var cohort = request.CohortCreatedIfAny; - Assert.IsNotNull(cohort); + var wizard = new CreateNewCohortDatabaseWizard(db, CatalogueRepository, DataExportRepository, false); - var externalData = cohort.GetExternalData(); - Assert.AreEqual(10,externalData.ExternalProjectNumber); - Assert.IsFalse(string.IsNullOrEmpty(externalData.ExternalDescription)); + _extractionInfo2.IsExtractionIdentifier = true; + _c2.Data_type = term; + _c2.SaveToDatabase(); + _extractionInfo2.SaveToDatabase(); - Assert.AreEqual(DateTime.Now.Year, externalData.ExternalCohortCreationDate.Value.Year); - Assert.AreEqual(DateTime.Now.Month, externalData.ExternalCohortCreationDate.Value.Month); - Assert.AreEqual(DateTime.Now.Day, externalData.ExternalCohortCreationDate.Value.Day); - Assert.AreEqual(DateTime.Now.Hour, externalData.ExternalCohortCreationDate.Value.Hour); + var candidate = wizard.GetPrivateIdentifierCandidates().Single(c => c.RuntimeName.Equals("PrivateIdentifierB")); + var ex = Assert.Throws(() => wizard.CreateDatabase( + candidate, + ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "Private identifier datatype cannot be varchar(max) style as this prevents Primary Key creation on the table", + ex.Message); + } - cohort.AppendToAuditLog("Test"); - - Assert.IsTrue(cohort.AuditLog.Contains("Test")); + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void TestActuallyCreatingIt(DatabaseType type) + { + var db = GetCleanedServer(type); - Assert.AreEqual(1,cohort.Count); - Assert.AreEqual(1,cohort.CountDistinct); + var wizard = new CreateNewCohortDatabaseWizard(db, CatalogueRepository, DataExportRepository, false); - var cohortTable = cohort.FetchEntireCohort(); + _extractionInfo2.IsExtractionIdentifier = true; + _extractionInfo2.SaveToDatabase(); - Assert.AreEqual(1,cohortTable.Rows.Count); + var candidate = wizard.GetPrivateIdentifierCandidates().Single(c => c.RuntimeName.Equals("PrivateIdentifierB")); + var ect = wizard.CreateDatabase( + candidate, + ThrowImmediatelyCheckNotifier.Quiet); - var helper = ect.GetQuerySyntaxHelper(); + Assert.AreEqual(type, ect.DatabaseType); - Assert.AreEqual(101243, cohortTable.Rows[0][helper.GetRuntimeName(ect.PrivateIdentifierField)]); - var aguid = cohortTable.Rows[0][helper.GetRuntimeName(ect.ReleaseIdentifierField)].ToString(); - Assert.IsFalse(string.IsNullOrWhiteSpace(aguid)); //should be a guid + //database should exist + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseName); + Assert.IsTrue(db.Exists()); - //test reversing the anonymisation of something - var dtAno = new DataTable(); - dtAno.Columns.Add(cohort.GetReleaseIdentifier(true)); - dtAno.Columns.Add("Age"); - dtAno.Rows.Add(aguid, 23); - dtAno.Rows.Add(aguid, 99); + //did it create the correct type? + Assert.AreEqual(type, ect.DatabaseType); - cohort.ReverseAnonymiseDataTable(dtAno, new ThrowImmediatelyDataLoadEventListener(), true); + //the ExternalCohortTable should pass tests + ect.Check(ThrowImmediatelyCheckNotifier.Quiet); - Assert.AreEqual(2, dtAno.Columns.Count); - Assert.IsTrue(dtAno.Columns.Contains(cohort.GetPrivateIdentifier(true))); + //now try putting someone in it + //the project it will go under + var project = new Project(DataExportRepository, "MyProject") + { + ProjectNumber = 10 + }; + project.SaveToDatabase(); - Assert.AreEqual("101243", dtAno.Rows[0][cohort.GetPrivateIdentifier(true)]); - Assert.AreEqual("101243", dtAno.Rows[1][cohort.GetPrivateIdentifier(true)]); + //the request to put it under there + var request = new CohortCreationRequest(project, new CohortDefinition(null, "My cohort", 1, 10, ect), + DataExportRepository, "Blah"); - //make sure that it shows up in the child provider (provides fast object access in CLI and builds tree model for UI) - var repo = new DataExportChildProvider(RepositoryLocator, null,new ThrowImmediatelyCheckNotifier(),null); - var descendancy = repo.GetDescendancyListIfAnyFor(cohort); - Assert.IsNotNull(descendancy); - } + //the actual cohort data + var dt = new DataTable(); + dt.Columns.Add(_extractionInfo2.GetRuntimeName()); + dt.Rows.Add(101243); //_extractionInfo2 is of type int - [Test] - public void Test_IdentifiableExtractions() - { - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + //the destination component that will put it there + var dest = new BasicCohortDestination(); + + dest.PreInitialize(request, ThrowImmediatelyDataLoadEventListener.Quiet); + + //tell it to use the guid allocator + dest.ReleaseIdentifierAllocator = typeof(GuidReleaseIdentifierAllocator); + + dest.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + dest.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); - CreateNewCohortDatabaseWizard wizard = new CreateNewCohortDatabaseWizard(db,CatalogueRepository, DataExportRepository,false); + var cohort = request.CohortCreatedIfAny; + Assert.IsNotNull(cohort); + + var externalData = cohort.GetExternalData(); + Assert.AreEqual(10, externalData.ExternalProjectNumber); + Assert.IsFalse(string.IsNullOrEmpty(externalData.ExternalDescription)); + + + Assert.AreEqual(DateTime.Now.Year, externalData.ExternalCohortCreationDate.Value.Year); + Assert.AreEqual(DateTime.Now.Month, externalData.ExternalCohortCreationDate.Value.Month); + Assert.AreEqual(DateTime.Now.Day, externalData.ExternalCohortCreationDate.Value.Day); + Assert.AreEqual(DateTime.Now.Hour, externalData.ExternalCohortCreationDate.Value.Hour); + + cohort.AppendToAuditLog("Test"); + + Assert.IsTrue(cohort.AuditLog.Contains("Test")); + + Assert.AreEqual(1, cohort.Count); + Assert.AreEqual(1, cohort.CountDistinct); + + var cohortTable = cohort.FetchEntireCohort(); + + Assert.AreEqual(1, cohortTable.Rows.Count); + + var helper = ect.GetQuerySyntaxHelper(); + + Assert.AreEqual(101243, cohortTable.Rows[0][helper.GetRuntimeName(ect.PrivateIdentifierField)]); + var aguid = cohortTable.Rows[0][helper.GetRuntimeName(ect.ReleaseIdentifierField)].ToString(); + Assert.IsFalse(string.IsNullOrWhiteSpace(aguid)); //should be a guid + + //test reversing the anonymisation of something + var dtAno = new DataTable(); + dtAno.Columns.Add(cohort.GetReleaseIdentifier(true)); + dtAno.Columns.Add("Age"); + dtAno.Rows.Add(aguid, 23); + dtAno.Rows.Add(aguid, 99); + + cohort.ReverseAnonymiseDataTable(dtAno, ThrowImmediatelyDataLoadEventListener.Quiet, true); + + Assert.AreEqual(2, dtAno.Columns.Count); + Assert.IsTrue(dtAno.Columns.Contains(cohort.GetPrivateIdentifier(true))); + + Assert.AreEqual("101243", dtAno.Rows[0][cohort.GetPrivateIdentifier(true)]); + Assert.AreEqual("101243", dtAno.Rows[1][cohort.GetPrivateIdentifier(true)]); + + //make sure that it shows up in the child provider (provides fast object access in CLI and builds tree model for UI) + var repo = new DataExportChildProvider(RepositoryLocator, null, ThrowImmediatelyCheckNotifier.Quiet, null); + var descendancy = repo.GetDescendancyListIfAnyFor(cohort); + Assert.IsNotNull(descendancy); + } + + [Test] + public void Test_IdentifiableExtractions() + { + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - _extractionInfo2.IsExtractionIdentifier = true; - _extractionInfo2.SaveToDatabase(); + var wizard = new CreateNewCohortDatabaseWizard(db, CatalogueRepository, DataExportRepository, false); - var candidate = wizard.GetPrivateIdentifierCandidates().Single(c => c.RuntimeName.Equals("PrivateIdentifierB")); - var ect = wizard.CreateDatabase( - candidate, - new ThrowImmediatelyCheckNotifier()); + _extractionInfo2.IsExtractionIdentifier = true; + _extractionInfo2.SaveToDatabase(); - ect.Check(new ThrowImmediatelyCheckNotifier()); + var candidate = wizard.GetPrivateIdentifierCandidates().Single(c => c.RuntimeName.Equals("PrivateIdentifierB")); + var ect = wizard.CreateDatabase( + candidate, + ThrowImmediatelyCheckNotifier.Quiet); - ect.ReleaseIdentifierField = ect.PrivateIdentifierField; - ect.SaveToDatabase(); + ect.Check(ThrowImmediatelyCheckNotifier.Quiet); - UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable,CheckResult.Fail); + ect.ReleaseIdentifierField = ect.PrivateIdentifierField; + ect.SaveToDatabase(); - var ex = Assert.Throws(()=>ect.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("R004 PrivateIdentifierField and ReleaseIdentifierField are the same, this means your cohort will extract identifiable data (no cohort identifier substitution takes place)", ex.Message); + UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Fail); - UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Warning); + var ex = Assert.Throws(() => ect.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "R004 PrivateIdentifierField and ReleaseIdentifierField are the same, this means your cohort will extract identifiable data (no cohort identifier substitution takes place)", + ex.Message); - ect.Check(new ThrowImmediatelyCheckNotifier()); + UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Warning); - UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Fail); + ect.Check(ThrowImmediatelyCheckNotifier.Quiet); - } + UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Fail); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/AggregateFilterPublishingTests.cs b/Rdmp.Core.Tests/CohortCreation/AggregateFilterPublishingTests.cs index 193f915496..2297781e78 100644 --- a/Rdmp.Core.Tests/CohortCreation/AggregateFilterPublishingTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/AggregateFilterPublishingTests.cs @@ -12,195 +12,220 @@ using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.FilterImporting; using Rdmp.Core.Curation.FilterImporting.Construction; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.QueryBuilding; -using MapsDirectlyToDatabaseTable; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +public class AggregateFilterPublishingTests : CohortIdentificationTests { - public class AggregateFilterPublishingTests:CohortIdentificationTests - { + private AggregateFilter _filter; + private AggregateFilterContainer _container; - private AggregateFilter _filter; - private AggregateFilterContainer _container; + private ExtractionInformation _chiExtractionInformation; - private ExtractionInformation _chiExtractionInformation; - - [SetUp] - protected override void SetUp() - { - base.SetUp(); + [OneTimeSetUp] + protected override void SetUp() + { + base.SetUp(); - aggregate1.RootFilterContainer_ID = new AggregateFilterContainer(CatalogueRepository,FilterContainerOperation.AND).ID; - aggregate1.SaveToDatabase(); + aggregate1.RootFilterContainer_ID = + new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND).ID; + aggregate1.SaveToDatabase(); - _chiExtractionInformation = aggregate1.AggregateDimensions.Single().ExtractionInformation; + _chiExtractionInformation = aggregate1.AggregateDimensions.Single().ExtractionInformation; - _container = (AggregateFilterContainer)aggregate1.RootFilterContainer; + _container = (AggregateFilterContainer)aggregate1.RootFilterContainer; - _filter = new AggregateFilter(CatalogueRepository,"folk", _container); - } - - [Test] - public void NotPopulated_Description() - { - var ex = Assert.Throws(()=>new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(_container,_filter, null)); - Assert.AreEqual("Cannot clone filter called 'folk' because:There is no description",ex.Message); - } + _filter = new AggregateFilter(CatalogueRepository, "folk", _container); + } - [Test] - public void NotPopulated_DescriptionTooShort() - { - _filter.Description = "fish"; - var ex = Assert.Throws(()=>new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(_container, _filter, null)); - Assert.AreEqual("Cannot clone filter called 'folk' because:Description is not long enough (minimum length is 20 characters)",ex.Message); - } + [Test] + public void NotPopulated_Description() + { + var ex = Assert.Throws(() => + new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(_container, + _filter, null)); + Assert.AreEqual("Cannot clone filter called 'folk' because:There is no description", ex?.Message); + } - [Test] - public void NotPopulated_WhereSQLNotSet() - { - _filter.Description = "fish swim in the sea and make people happy to be"; - var ex = Assert.Throws(()=>new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(_container, _filter, null)); - Assert.AreEqual("Cannot clone filter called 'folk' because:WhereSQL is not populated",ex.Message); - } - - /// - /// Check parameters can be created without a comment - /// - [Test] - public void NotPopulated_ParameterNoComment() - { - _filter.Description = "fish swim in the sea and make people happy to be"; - _filter.WhereSQL = "LovelyCoconuts = @coconutCount"; - _filter.SaveToDatabase(); - new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(_filter, null); + [Test] + public void NotPopulated_DescriptionTooShort() + { + _filter.Description = "fish"; + var ex = Assert.Throws(() => + new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(_container, + _filter, null)); + Assert.AreEqual( + "Cannot clone filter called 'folk' because:Description is not long enough (minimum length is 20 characters)", + ex?.Message); + } - IFilter importedFilter = new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(null, _filter, null); - Assert.AreEqual("folk", importedFilter.Name); - } + [Test] + public void NotPopulated_WhereSQLNotSet() + { + _filter.Description = "fish swim in the sea and make people happy to be"; + var ex = Assert.Throws(() => + new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(_container, + _filter, null)); + Assert.AreEqual("Cannot clone filter called 'folk' because:WhereSQL is not populated", ex?.Message); + } + /// + /// Check parameters can be created without a comment + /// + [Test] + public void NotPopulated_ParameterNoComment() + { + _filter.Description = "fish swim in the sea and make people happy to be"; + _filter.WhereSQL = "LovelyCoconuts = @coconutCount"; + _filter.SaveToDatabase(); + new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(_filter, null); + + var importedFilter = + new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(null, _filter, + null); + Assert.AreEqual("folk", importedFilter.Name); + } - [Test] - public void NotPopulated_ParameterNotSet() - { - _filter.Description = "fish swim in the sea and make people happy to be"; - _filter.WhereSQL = "LovelyCoconuts = @coconutCount"; - - new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(_filter, null); - var parameter = _filter.GetAllParameters().Single(); - parameter.Comment = "It's coconut time!"; - parameter.Value = null;//clear its value - parameter.SaveToDatabase(); - - var ex = Assert.Throws(()=>new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation),null).ImportFilter(_container, _filter,null)); - Assert.AreEqual("Cannot clone filter called 'folk' because:Parameter '@coconutCount' was rejected :There is no value/default value listed",ex.Message); - - } - - [Test] - public void ShortcutFiltersWork_ProperlyReplicatesParentAndHasFK() - { - _filter.WhereSQL = "folk=1"; - _filter.SaveToDatabase(); - string sql = new CohortQueryBuilder(aggregate1, null,null).SQL; + [Test] + public void NotPopulated_ParameterNotSet() + { + _filter.Description = "fish swim in the sea and make people happy to be"; + _filter.WhereSQL = "LovelyCoconuts = @coconutCount"; + + new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(_filter, null); + var parameter = _filter.GetAllParameters().Single(); + parameter.Comment = "It's coconut time!"; + parameter.Value = null; //clear its value + parameter.SaveToDatabase(); + + var ex = Assert.Throws(() => + new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(_container, + _filter, null)); + Assert.AreEqual( + "Cannot clone filter called 'folk' because:Parameter '@coconutCount' was rejected :There is no value/default value listed", + ex?.Message); + } - Console.WriteLine(sql); - Assert.IsTrue(sql.Contains("folk=1")); + [Test] + public void ShortcutFiltersWork_ProperlyReplicatesParentAndHasFK() + { + _filter.WhereSQL = "folk=1"; + _filter.SaveToDatabase(); + var sql = new CohortQueryBuilder(aggregate1, null, null).SQL; - var shortcutAggregate = - new AggregateConfiguration(CatalogueRepository,testData.catalogue, "UnitTestShortcutAggregate"); + Console.WriteLine(sql); + Assert.IsTrue(sql.Contains("folk=1")); - new AggregateDimension(CatalogueRepository,testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("sex")), shortcutAggregate); - //before it is a shortcut it has no filters - Assert.IsFalse(shortcutAggregate.GetQueryBuilder().SQL.Contains("WHERE")); + var shortcutAggregate = + new AggregateConfiguration(CatalogueRepository, testData.catalogue, "UnitTestShortcutAggregate"); - //make it a shortcut - shortcutAggregate.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = aggregate1.ID; - shortcutAggregate.SaveToDatabase(); + _ = new AggregateDimension(CatalogueRepository, + testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("sex")), shortcutAggregate); - string sqlShortcut = shortcutAggregate.GetQueryBuilder().SQL; - - //shortcut should have its own dimensions - Assert.IsTrue(sqlShortcut.Contains("[sex]")); - Assert.IsFalse(sqlShortcut.Contains("[chi]")); - - //but should have a REFERENCE (not a clone!) to aggregate 1's filters - Assert.IsTrue(sqlShortcut.Contains("folk=1")); + //before it is a shortcut it has no filters + Assert.IsFalse(shortcutAggregate.GetQueryBuilder().SQL.Contains("WHERE")); - //make sure it is a reference by changing the original - _filter.WhereSQL = "folk=2"; - _filter.SaveToDatabase(); - Assert.IsTrue(shortcutAggregate.GetQueryBuilder().SQL.Contains("folk=2")); + //make it a shortcut + shortcutAggregate.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = aggregate1.ID; + shortcutAggregate.SaveToDatabase(); - //shouldnt work because of the dependency of the child - should give a foreign key error - if(CatalogueRepository is TableRepository) - { - Assert.Throws(aggregate1.DeleteInDatabase); - } + var sqlShortcut = shortcutAggregate.GetQueryBuilder().SQL; - //delete the child - shortcutAggregate.DeleteInDatabase(); + //shortcut should have its own dimensions + Assert.IsTrue(sqlShortcut.Contains("[sex]")); + Assert.IsFalse(sqlShortcut.Contains("[chi]")); - aggregate1.DeleteInDatabase(); - aggregate1 = null; - } + //but should have a REFERENCE (not a clone!) to aggregate 1's filters + Assert.IsTrue(sqlShortcut.Contains("folk=1")); - [Test] - public void ShortcutFilters_AlreadyHasFilter() - { - Assert.IsNotNull(aggregate1.RootFilterContainer_ID); - var ex = Assert.Throws(()=>aggregate1.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = -500);//not ok - Assert.AreEqual("Cannot set OverrideFiltersByUsingParentAggregateConfigurationInstead_ID because this AggregateConfiguration already has a filter container set (if you were to be a shortcut and also have a filter tree set it would be very confusing)",ex.Message); - } - [Test] - public void ShortcutFilters_AlreadyHasFilter_ButSettingItToNull() - { - Assert.IsNotNull(aggregate1.RootFilterContainer_ID); - Assert.DoesNotThrow( - () => { aggregate1.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = null; }); // is ok - } + //make sure it is a reference by changing the original + _filter.WhereSQL = "folk=2"; + _filter.SaveToDatabase(); + Assert.IsTrue(shortcutAggregate.GetQueryBuilder().SQL.Contains("folk=2")); + //shouldn't work because of the dependency of the child - should give a foreign key error + if (CatalogueRepository is TableRepository) Assert.Throws(aggregate1.DeleteInDatabase); - [Test] - public void ShortcutFilters_DoesNotHaveFilter_SetOne() - { - aggregate1.RootFilterContainer_ID = null; - Assert.DoesNotThrow(() => { aggregate1.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = null; }); // is ok - Assert.DoesNotThrow(() => { aggregate1.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = -19; }); // is ok - var ex = Assert.Throws(()=>aggregate1.RootFilterContainer_ID = 123); - Assert.AreEqual("This AggregateConfiguration has a shortcut to another AggregateConfiguration's Filters (its OverrideFiltersByUsingParentAggregateConfigurationInstead_ID is -19) which means it cannot be assigned its own RootFilterContainerID",ex.Message); - } - - [Test] - public void CloneWorks_AllPropertiesMatchIncludingParameters() - { - _filter.Description = "fish swim in the sea and make people happy to be"; - _filter.WhereSQL = "LovelyCoconuts = @coconutCount"; + //delete the child + shortcutAggregate.DeleteInDatabase(); - new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(_filter, null); - _filter.SaveToDatabase(); + aggregate1.DeleteInDatabase(); + aggregate1 = null; + } - Assert.IsNull(_filter.ClonedFromExtractionFilter_ID); + [Test] + public void ShortcutFilters_AlreadyHasFilter() + { + Assert.IsNotNull(aggregate1.RootFilterContainer_ID); + var ex = Assert.Throws(() => + aggregate1.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = -500); //not ok + Assert.AreEqual( + "Cannot set OverrideFiltersByUsingParentAggregateConfigurationInstead_ID because this AggregateConfiguration already has a filter container set (if you were to be a shortcut and also have a filter tree set it would be very confusing)", + ex?.Message); + } - var parameter = _filter.GetAllParameters().Single(); - parameter.ParameterSQL = "Declare @coconutCount int"; - parameter.Comment = "It's coconut time!"; - parameter.Value = "3"; - parameter.SaveToDatabase(); - - var newMaster = new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(null, _filter, null); + [Test] + public void ShortcutFilters_AlreadyHasFilter_ButSettingItToNull() + { + Assert.IsNotNull(aggregate1.RootFilterContainer_ID); + Assert.DoesNotThrow( + () => { aggregate1.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = null; }); // is ok + } - //we should now be a clone of the master we just created - Assert.AreEqual(_filter.ClonedFromExtractionFilter_ID,newMaster.ID); - Assert.IsTrue(newMaster.Description.StartsWith(_filter.Description)); //it adds some addendum stuff onto it - Assert.AreEqual(_filter.WhereSQL,newMaster.WhereSQL); - Assert.AreEqual(_filter.GetAllParameters().Single().ParameterName, newMaster.GetAllParameters().Single().ParameterName); - Assert.AreEqual(_filter.GetAllParameters().Single().ParameterSQL, newMaster.GetAllParameters().Single().ParameterSQL); - Assert.AreEqual(_filter.GetAllParameters().Single().Value, newMaster.GetAllParameters().Single().Value); + [Test] + public void ShortcutFilters_DoesNotHaveFilter_SetOne() + { + aggregate1.RootFilterContainer_ID = null; + Assert.DoesNotThrow(() => + { + aggregate1.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = null; + }); // is ok + Assert.DoesNotThrow(() => + { + aggregate1.OverrideFiltersByUsingParentAggregateConfigurationInstead_ID = -19; + }); // is ok + var ex = Assert.Throws(() => aggregate1.RootFilterContainer_ID = 123); + Assert.AreEqual( + "This AggregateConfiguration has a shortcut to another AggregateConfiguration's Filters (its OverrideFiltersByUsingParentAggregateConfigurationInstead_ID is -19) which means it cannot be assigned its own RootFilterContainerID", + ex?.Message); + } - } + [Test] + public void CloneWorks_AllPropertiesMatchIncludingParameters() + { + _filter.Description = "fish swim in the sea and make people happy to be"; + _filter.WhereSQL = "LovelyCoconuts = @coconutCount"; + + new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(_filter, null); + _filter.SaveToDatabase(); + + Assert.IsNull(_filter.ClonedFromExtractionFilter_ID); + + var parameter = _filter.GetAllParameters().Single(); + parameter.ParameterSQL = "Declare @coconutCount int"; + parameter.Comment = "It's coconut time!"; + parameter.Value = "3"; + parameter.SaveToDatabase(); + + var newMaster = + new FilterImporter(new ExtractionFilterFactory(_chiExtractionInformation), null).ImportFilter(null, _filter, + null); + + //we should now be a clone of the master we just created + Assert.AreEqual(_filter.ClonedFromExtractionFilter_ID, newMaster.ID); + Assert.IsTrue(newMaster.Description.StartsWith(_filter.Description)); //it adds some addendum stuff onto it + Assert.AreEqual(_filter.WhereSQL, newMaster.WhereSQL); + + Assert.AreEqual(_filter.GetAllParameters().Single().ParameterName, + newMaster.GetAllParameters().Single().ParameterName); + Assert.AreEqual(_filter.GetAllParameters().Single().ParameterSQL, + newMaster.GetAllParameters().Single().ParameterSQL); + Assert.AreEqual(_filter.GetAllParameters().Single().Value, newMaster.GetAllParameters().Single().Value); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/CohortCompilerRunnerTests.cs b/Rdmp.Core.Tests/CohortCreation/CohortCompilerRunnerTests.cs index f790cefb6c..d85e4350ec 100644 --- a/Rdmp.Core.Tests/CohortCreation/CohortCompilerRunnerTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/CohortCompilerRunnerTests.cs @@ -9,109 +9,104 @@ using System.Threading; using FAnsi; using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable.Versioning; using NUnit.Framework; using Rdmp.Core.CohortCreation.Execution; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.Databases; -using ReusableLibraryCode.Checks; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Versioning; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +public class CohortCompilerRunnerTests : DatabaseTests { - public class CohortCompilerRunnerTests:DatabaseTests + [Test] + public void CacheIdentifierListWithRunner_SimpleCase() { - [Test] - public void CacheIdentifierListWithRunner_SimpleCase() - { - DiscoveredDatabase db; - CohortIdentificationConfiguration cic; - DataTable dt; + SetupCohort(out var db, out var cic, out var dt); - SetupCohort(out db,out cic,out dt); + var compiler = new CohortCompiler(cic); - var compiler = new CohortCompiler(cic); + var runner = new CohortCompilerRunner(compiler, 5000); + runner.Run(new CancellationToken()); - var runner = new CohortCompilerRunner(compiler, 5000); - runner.Run(new CancellationToken()); + Assert.AreEqual(CohortCompilerRunner.Phase.Finished, runner.ExecutionPhase); - Assert.AreEqual(CohortCompilerRunner.Phase.Finished, runner.ExecutionPhase); + var rootTask = runner.Compiler.Tasks.Single(t => t.Key is AggregationContainerTask); - var rootTask = runner.Compiler.Tasks.Single(t => t.Key is AggregationContainerTask); - - Assert.IsTrue(rootTask.Value.IsResultsForRootContainer); - Assert.IsNull(rootTask.Key.CrashMessage); - Assert.AreEqual(CompilationState.Finished, rootTask.Key.State); + Assert.IsTrue(rootTask.Value.IsResultsForRootContainer); + Assert.IsNull(rootTask.Key.CrashMessage); + Assert.AreEqual(CompilationState.Finished, rootTask.Key.State); - Assert.AreEqual(dt.Rows.Count,rootTask.Value.Identifiers.Rows.Count); - } + Assert.AreEqual(dt.Rows.Count, rootTask.Value.Identifiers.Rows.Count); + } - [Test] - public void CacheIdentifierListWithRunner_WithCaching() - { - DiscoveredDatabase db; - CohortIdentificationConfiguration cic; - DataTable dt; + [Test] + public void CacheIdentifierListWithRunner_WithCaching() + { + SetupCohort(out var db, out var cic, out var dt); - SetupCohort(out db, out cic, out dt); - - MasterDatabaseScriptExecutor e = new MasterDatabaseScriptExecutor(db); - var p = new QueryCachingPatcher(); - e.CreateAndPatchDatabase(p,new AcceptAllCheckNotifier()); - - var serverReference = new ExternalDatabaseServer(CatalogueRepository, "Cache", p); - serverReference.SetProperties(db); + var e = new MasterDatabaseScriptExecutor(db); + var p = new QueryCachingPatcher(); + e.CreateAndPatchDatabase(p, new AcceptAllCheckNotifier()); - cic.QueryCachingServer_ID = serverReference.ID; - cic.SaveToDatabase(); + var serverReference = new ExternalDatabaseServer(CatalogueRepository, "Cache", p); + serverReference.SetProperties(db); - var compiler = new CohortCompiler(cic); + cic.QueryCachingServer_ID = serverReference.ID; + cic.SaveToDatabase(); - var runner = new CohortCompilerRunner(compiler, 5000); - runner.Run(new CancellationToken()); + var compiler = new CohortCompiler(cic); - Assert.AreEqual(CohortCompilerRunner.Phase.Finished, runner.ExecutionPhase); + var runner = new CohortCompilerRunner(compiler, 5000); + runner.Run(new CancellationToken()); - var rootTask = runner.Compiler.Tasks.Single(t => t.Key is AggregationContainerTask); + Assert.AreEqual(CohortCompilerRunner.Phase.Finished, runner.ExecutionPhase); - Assert.IsTrue(rootTask.Value.IsResultsForRootContainer); - Assert.IsNull(rootTask.Key.CrashMessage); - Assert.AreEqual(CompilationState.Finished, rootTask.Key.State); + var rootTask = runner.Compiler.Tasks.Single(t => t.Key is AggregationContainerTask); - Assert.IsTrue(runner.Compiler.AreaAllQueriesCached(rootTask.Key)); + Assert.IsTrue(rootTask.Value.IsResultsForRootContainer); + Assert.IsNull(rootTask.Key.CrashMessage); + Assert.AreEqual(CompilationState.Finished, rootTask.Key.State); - Assert.AreEqual(dt.Rows.Count, rootTask.Value.Identifiers.Rows.Count); - } - private void SetupCohort(out DiscoveredDatabase db, out CohortIdentificationConfiguration cic, out DataTable dt) - { - dt = new DataTable(); - dt.Columns.Add("PK"); + Assert.IsTrue(runner.Compiler.AreaAllQueriesCached(rootTask.Key)); - //add lots of rows - for (int i = 0; i < 100000; i++) - dt.Rows.Add(i); + Assert.AreEqual(dt.Rows.Count, rootTask.Value.Identifiers.Rows.Count); + } + + private void SetupCohort(out DiscoveredDatabase db, out CohortIdentificationConfiguration cic, out DataTable dt) + { + dt = new DataTable(); + dt.BeginLoadData(); + dt.Columns.Add("PK"); - db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var tbl = db.CreateTable("CohortCompilerRunnerTestsTable", dt); + //add lots of rows + for (var i = 0; i < 100000; i++) + dt.Rows.Add(i); - var cata = Import(tbl); + db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var tbl = db.CreateTable("CohortCompilerRunnerTestsTable", dt); - var ei = cata.CatalogueItems[0].ExtractionInformation; - ei.IsExtractionIdentifier = true; - ei.SaveToDatabase(); + var cata = Import(tbl); - var agg = new AggregateConfiguration(CatalogueRepository, cata, "MyAgg"); - agg.CountSQL = null; - agg.SaveToDatabase(); - var dimension = new AggregateDimension(CatalogueRepository, ei, agg); + var ei = cata.CatalogueItems[0].ExtractionInformation; + ei.IsExtractionIdentifier = true; + ei.SaveToDatabase(); + + var agg = new AggregateConfiguration(CatalogueRepository, cata, "MyAgg") + { + CountSQL = null + }; + agg.SaveToDatabase(); + _ = new AggregateDimension(CatalogueRepository, ei, agg); - cic = new CohortIdentificationConfiguration(CatalogueRepository, "MyCic"); - cic.CreateRootContainerIfNotExists(); - cic.RootCohortAggregateContainer.AddChild(agg, 0); + cic = new CohortIdentificationConfiguration(CatalogueRepository, "MyCic"); + cic.CreateRootContainerIfNotExists(); + cic.RootCohortAggregateContainer.AddChild(agg, 0); - cic.EnsureNamingConvention(agg); - } + cic.EnsureNamingConvention(agg); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/CohortCompilerTests.cs b/Rdmp.Core.Tests/CohortCreation/CohortCompilerTests.cs index fa712e272f..0b60742c4c 100644 --- a/Rdmp.Core.Tests/CohortCreation/CohortCompilerTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/CohortCompilerTests.cs @@ -15,178 +15,189 @@ using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.Data.Cohort.Joinables; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +public class CohortCompilerTests : CohortIdentificationTests { - public class CohortCompilerTests:CohortIdentificationTests + [Test] + public void AddSameTaskTwice_StaysAtOne() { - [Test] - public void AddSameTaskTwice_StaysAtOne() + var compiler = new CohortCompiler(cohortIdentificationConfiguration); + container1.AddChild(aggregate1, 0); + try { - CohortCompiler compiler = new CohortCompiler(cohortIdentificationConfiguration); - container1.AddChild(aggregate1,0); - try + compiler.AddTask(aggregate1, null); + + Assert.AreEqual(1, compiler.Tasks.Count); + + var oldTask = compiler.Tasks.First(); + + //adding it again with the same SQL should result in it ignoring it + compiler.AddTask(aggregate1, null); + Assert.AreEqual(1, compiler.Tasks.Count); + + //make a change to the SQL + foreach (var d in aggregate1.AggregateDimensions) { - compiler.AddTask(aggregate1, null); + d.SelectSQL = "'fish'"; + d.SaveToDatabase(); + } + + //now add it again + var newAggregate1 = CatalogueRepository.GetObjectByID(aggregate1.ID); + + compiler.AddTask(newAggregate1, null); + Assert.AreEqual(1, compiler.Tasks.Count); //should still be 1 task + + // TN: Task was never asked to start so was still at NotScheduled so cancellation wouldn't actually happen + //old task should have been asked to cancel + // Assert.IsTrue(oldTask.Key.CancellationToken.IsCancellationRequested); - Assert.AreEqual(1, compiler.Tasks.Count); + Assert.AreNotSame(oldTask, compiler.Tasks.Single()); //new task should not be the same as the old one + Assert.IsFalse(compiler.Tasks.Single().Key.CancellationToken.IsCancellationRequested); + //new task should not be cancelled} finally { + } + finally + { + container1.RemoveChild(aggregate1); + } + } - var oldTask = compiler.Tasks.First(); + [Test] + public void AddContainer_StaysAtOne() + { + var compiler = new CohortCompiler(cohortIdentificationConfiguration); + rootcontainer.AddChild(aggregate1, 1); - //adding it again with the same SQL should result in it ignoring it - compiler.AddTask(aggregate1, null); - Assert.AreEqual(1, compiler.Tasks.Count); + compiler.AddTask(rootcontainer, null); //add the root container - //make a change to the SQL - foreach (var d in aggregate1.AggregateDimensions) - { - d.SelectSQL = "'fish'"; - d.SaveToDatabase(); - } + Assert.AreEqual(1, compiler.Tasks.Count); + var oldTask = compiler.Tasks.First(); - //now add it again - var newAggregate1 = CatalogueRepository.GetObjectByID(aggregate1.ID); + //adding it again with the same SQL should result in it ignoring it + compiler.AddTask(rootcontainer, null); + Assert.AreEqual(1, compiler.Tasks.Count); - compiler.AddTask(newAggregate1, null); - Assert.AreEqual(1, compiler.Tasks.Count); //should still be 1 task + //add another aggregate into the container + rootcontainer.AddChild(aggregate2, 1); - // TN: Task was never asked to start so was still at NotScheduled so cancellation wouldn't actually happen - //old task should have been asked to cancel - // Assert.IsTrue(oldTask.Key.CancellationToken.IsCancellationRequested); + compiler.AddTask(rootcontainer, null); + Assert.AreEqual(1, compiler.Tasks.Count); //should still be 1 task - Assert.AreNotSame(oldTask, compiler.Tasks.Single()); //new task should not be the same as the old one - Assert.IsFalse(compiler.Tasks.Single().Key.CancellationToken.IsCancellationRequested); - //new task should not be cancelled} finally { + // TN: Task was never asked to start so was still at NotScheduled so cancellation wouldn't actually happen + //old task should have been asked to cancel + //Assert.IsTrue(oldTask.Key.CancellationToken.IsCancellationRequested); + Assert.AreNotSame(oldTask, compiler.Tasks.Single()); //new task should not be the same as the old one + Assert.IsFalse(compiler.Tasks.Single().Key.CancellationToken + .IsCancellationRequested); //new task should not be cancelled - } - finally + rootcontainer.RemoveChild(aggregate1); + rootcontainer.RemoveChild(aggregate2); + } + + public enum TestCompilerAddAllTasksTestCase + { + CIC, + RootContainer, + Subcontainer + } + + [TestCase(TestCompilerAddAllTasksTestCase.CIC, true)] + [TestCase(TestCompilerAddAllTasksTestCase.CIC, false)] + [TestCase(TestCompilerAddAllTasksTestCase.RootContainer, true)] + [TestCase(TestCompilerAddAllTasksTestCase.RootContainer, false)] + [TestCase(TestCompilerAddAllTasksTestCase.Subcontainer, true)] + [TestCase(TestCompilerAddAllTasksTestCase.Subcontainer, false)] + public void TestCompilerAddAllTasks(TestCompilerAddAllTasksTestCase testCase, bool includeSubcontainers) + { + var aggregate4 = + new AggregateConfiguration(CatalogueRepository, testData.catalogue, "UnitTestAggregate4") { - container1.RemoveChild(aggregate1); - } - } + CountSQL = null + }; + aggregate4.SaveToDatabase(); + new AggregateDimension(CatalogueRepository, + testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate4); + + var aggregate5 = + new AggregateConfiguration(CatalogueRepository, testData.catalogue, "UnitTestAggregate5") + { + CountSQL = null + }; + aggregate5.SaveToDatabase(); + new AggregateDimension(CatalogueRepository, + testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate5); - [Test] - public void AddContainer_StaysAtOne() + var joinable = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate5); + + + try { - CohortCompiler compiler = new CohortCompiler(cohortIdentificationConfiguration); - rootcontainer.AddChild(aggregate1, 1); + //EXCEPT + //Aggregate 1 + //UNION + //Aggregate 3 + //Aggregate 4 + //Aggregate 2 - compiler.AddTask(rootcontainer, null);//add the root container + //Joinable:aggregate5 (patient index table, the other Aggregates could JOIN to this) - Assert.AreEqual(1, compiler.Tasks.Count); - var oldTask = compiler.Tasks.First(); + var compiler = new CohortCompiler(cohortIdentificationConfiguration); + rootcontainer.AddChild(aggregate1, 1); + rootcontainer.AddChild(container1); + container1.Order = 2; + container1.SaveToDatabase(); - //adding it again with the same SQL should result in it ignoring it - compiler.AddTask(rootcontainer, null); - Assert.AreEqual(1, compiler.Tasks.Count); + rootcontainer.AddChild(aggregate2, 3); - //add another aggregate into the container - rootcontainer.AddChild(aggregate2, 1); + container1.AddChild(aggregate3, 1); + container1.AddChild(aggregate4, 2); - compiler.AddTask(rootcontainer, null); - Assert.AreEqual(1, compiler.Tasks.Count);//should still be 1 task + cohortIdentificationConfiguration.RootCohortAggregateContainer_ID = rootcontainer.ID; + cohortIdentificationConfiguration.SaveToDatabase(); + + //The bit we are testing + List tasks; + switch (testCase) + { + case TestCompilerAddAllTasksTestCase.CIC: + tasks = compiler.AddAllTasks(includeSubcontainers); + Assert.AreEqual(joinable, + tasks.OfType().Single().Joinable); //should be a single joinable + Assert.AreEqual(includeSubcontainers ? 7 : 6, + tasks.Count); //all joinables, aggregates and root container + + break; + case TestCompilerAddAllTasksTestCase.RootContainer: + tasks = compiler.AddTasksRecursively(Array.Empty(), + cohortIdentificationConfiguration.RootCohortAggregateContainer, includeSubcontainers); + Assert.AreEqual(includeSubcontainers ? 6 : 5, + tasks.Count); //all aggregates and root container (but not joinables) + break; + case TestCompilerAddAllTasksTestCase.Subcontainer: + tasks = compiler.AddTasksRecursively(Array.Empty(), container1, + includeSubcontainers); + Assert.AreEqual(includeSubcontainers ? 3 : 2, tasks.Count); //subcontainer and its aggregates + break; + default: + throw new ArgumentOutOfRangeException(nameof(testCase)); + } - // TN: Task was never asked to start so was still at NotScheduled so cancellation wouldn't actually happen - //old task should have been asked to cancel - //Assert.IsTrue(oldTask.Key.CancellationToken.IsCancellationRequested); - Assert.AreNotSame(oldTask, compiler.Tasks.Single());//new task should not be the same as the old one - Assert.IsFalse(compiler.Tasks.Single().Key.CancellationToken.IsCancellationRequested);//new task should not be cancelled rootcontainer.RemoveChild(aggregate1); rootcontainer.RemoveChild(aggregate2); - } - public enum TestCompilerAddAllTasksTestCase - { - CIC, - RootContainer, - Subcontainer + container1.RemoveChild(aggregate3); + container1.RemoveChild(aggregate4); + container1.MakeIntoAnOrphan(); } - - [TestCase(TestCompilerAddAllTasksTestCase.CIC,true)] - [TestCase(TestCompilerAddAllTasksTestCase.CIC, false)] - [TestCase(TestCompilerAddAllTasksTestCase.RootContainer, true)] - [TestCase(TestCompilerAddAllTasksTestCase.RootContainer, false)] - [TestCase(TestCompilerAddAllTasksTestCase.Subcontainer, true)] - [TestCase(TestCompilerAddAllTasksTestCase.Subcontainer, false)] - public void TestCompilerAddAllTasks(TestCompilerAddAllTasksTestCase testCase,bool includeSubcontainers) + finally { - var aggregate4 = - new AggregateConfiguration(CatalogueRepository, testData.catalogue, "UnitTestAggregate4"); - aggregate4.CountSQL = null; - aggregate4.SaveToDatabase(); - new AggregateDimension(CatalogueRepository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate4); - - var aggregate5 = - new AggregateConfiguration(CatalogueRepository, testData.catalogue, "UnitTestAggregate5"); - aggregate5.CountSQL = null; - aggregate5.SaveToDatabase(); - new AggregateDimension(CatalogueRepository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate5); - - var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository,cohortIdentificationConfiguration, aggregate5); - - - try - { - //EXCEPT - //Aggregate 1 - //UNION - //Aggregate 3 - //Aggregate 4 - //Aggregate 2 - - //Joinable:aggregate5 (patient index table, the other Aggregates could JOIN to this) - - CohortCompiler compiler = new CohortCompiler(cohortIdentificationConfiguration); - rootcontainer.AddChild(aggregate1, 1); - rootcontainer.AddChild(container1); - container1.Order = 2; - container1.SaveToDatabase(); - - rootcontainer.AddChild(aggregate2, 3); - - container1.AddChild(aggregate3,1); - container1.AddChild(aggregate4, 2); - - cohortIdentificationConfiguration.RootCohortAggregateContainer_ID = rootcontainer.ID; - cohortIdentificationConfiguration.SaveToDatabase(); - - //The bit we are testing - List tasks; - switch (testCase) - { - case TestCompilerAddAllTasksTestCase.CIC: - tasks = compiler.AddAllTasks(includeSubcontainers); - Assert.AreEqual(joinable,tasks.OfType().Single().Joinable); //should be a single joinable - Assert.AreEqual(includeSubcontainers?7:6,tasks.Count); //all joinables, aggregates and root container - - break; - case TestCompilerAddAllTasksTestCase.RootContainer: - tasks = compiler.AddTasksRecursively(new ISqlParameter[0], cohortIdentificationConfiguration.RootCohortAggregateContainer, includeSubcontainers); - Assert.AreEqual(includeSubcontainers?6:5,tasks.Count); //all aggregates and root container (but not joinables) - break; - case TestCompilerAddAllTasksTestCase.Subcontainer: - tasks = compiler.AddTasksRecursively(new ISqlParameter[0], container1, includeSubcontainers); - Assert.AreEqual(includeSubcontainers?3:2,tasks.Count); //subcontainer and its aggregates - break; - default: - throw new ArgumentOutOfRangeException("testCase"); - } - - - rootcontainer.RemoveChild(aggregate1); - rootcontainer.RemoveChild(aggregate2); - - container1.RemoveChild(aggregate3); - container1.RemoveChild(aggregate4); - container1.MakeIntoAnOrphan(); - - } - finally - { - aggregate4.DeleteInDatabase(); - joinable.DeleteInDatabase(); - aggregate5.DeleteInDatabase(); - } + aggregate4.DeleteInDatabase(); + joinable.DeleteInDatabase(); + aggregate5.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/CohortContainerAndCloningTests.cs b/Rdmp.Core.Tests/CohortCreation/CohortContainerAndCloningTests.cs index 3b76e192e0..105dedd202 100644 --- a/Rdmp.Core.Tests/CohortCreation/CohortContainerAndCloningTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/CohortContainerAndCloningTests.cs @@ -13,141 +13,141 @@ using Rdmp.Core.Curation.FilterImporting; using Rdmp.Core.Curation.FilterImporting.Construction; using Rdmp.Core.QueryBuilding; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +public class CohortContainerAndCloningTests : CohortIdentificationTests { - public class CohortContainerAndCloningTests : CohortIdentificationTests + [Test] + public void AggregateOrdering_ExplicitSetting_CorrectOrder() { - [Test] - public void AggregateOrdering_ExplicitSetting_CorrectOrder() + try + { + //set the order so that 2 comes before 1 + rootcontainer.AddChild(aggregate2, 1); + rootcontainer.AddChild(aggregate1, 5); + } + catch (Exception e) { - try - { - //set the order so that 2 comes before 1 - rootcontainer.AddChild(aggregate2, 1); - rootcontainer.AddChild(aggregate1, 5); - } - catch (Exception e) - { - Console.WriteLine(e); - throw; - } - - try - { - Assert.AreEqual(1, aggregate2.Order); - Assert.AreEqual(5, aggregate1.Order); - - Assert.AreEqual(rootcontainer.GetAggregateConfigurations()[0].ID, aggregate2.ID); - Assert.AreEqual(rootcontainer.GetAggregateConfigurations()[1].ID, aggregate1.ID); - } - finally - { - rootcontainer.RemoveChild(aggregate1); - rootcontainer.RemoveChild(aggregate2); - } + Console.WriteLine(e); + throw; } - [Test] - public void CloneChild_NamingCorrectNewObject() + try { + Assert.AreEqual(1, aggregate2.Order); + Assert.AreEqual(5, aggregate1.Order); - //should not follow naming convention - aggregate1.Name = "fish"; - Assert.IsFalse(cohortIdentificationConfiguration.IsValidNamedConfiguration(aggregate1)); + Assert.AreEqual(rootcontainer.GetAggregateConfigurations()[0].ID, aggregate2.ID); + Assert.AreEqual(rootcontainer.GetAggregateConfigurations()[1].ID, aggregate1.ID); + } + finally + { + rootcontainer.RemoveChild(aggregate1); + rootcontainer.RemoveChild(aggregate2); + } + } - //add a clone using aggregate1 as a template - var clone = cohortIdentificationConfiguration.ImportAggregateConfigurationAsIdentifierList(aggregate1,null); - //add the clone - rootcontainer.AddChild(clone, 0); + [Test] + public void CloneChild_NamingCorrectNewObject() + { + //should not follow naming convention + aggregate1.Name = "fish"; + Assert.IsFalse(cohortIdentificationConfiguration.IsValidNamedConfiguration(aggregate1)); - try - { - //there should be 1 child - AggregateConfiguration[] aggregateConfigurations = rootcontainer.GetAggregateConfigurations(); - Assert.AreEqual(1, aggregateConfigurations.Length); + //add a clone using aggregate1 as a template + var clone = cohortIdentificationConfiguration.ImportAggregateConfigurationAsIdentifierList(aggregate1, null); + //add the clone + rootcontainer.AddChild(clone, 0); - //child should follow naming convention - Assert.IsTrue(cohortIdentificationConfiguration.IsValidNamedConfiguration(aggregateConfigurations[0])); + try + { + //there should be 1 child + var aggregateConfigurations = rootcontainer.GetAggregateConfigurations(); + Assert.AreEqual(1, aggregateConfigurations.Length); - //clone should have a different ID - also it was created after so should be higher ID - Assert.Greater(aggregateConfigurations[0].ID, aggregate1.ID); + //child should follow naming convention + Assert.IsTrue(cohortIdentificationConfiguration.IsValidNamedConfiguration(aggregateConfigurations[0])); - } - finally - { - aggregate1.RevertToDatabaseState(); + //clone should have a different ID - also it was created after so should be higher ID + Assert.Greater(aggregateConfigurations[0].ID, aggregate1.ID); + } + finally + { + aggregate1.RevertToDatabaseState(); - rootcontainer.RemoveChild(clone); + rootcontainer.RemoveChild(clone); - if (clone.RootFilterContainer != null) - clone.RootFilterContainer.DeleteInDatabase(); + clone.RootFilterContainer?.DeleteInDatabase(); - clone.DeleteInDatabase(); - } + clone.DeleteInDatabase(); } - [Test] - public void CloneChildWithFilter_IDsDifferent() - { - //aggregate 1 is now a normal non cohort aggregate - var container = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.OR); - aggregate1.CountSQL = "count(*)"; - aggregate1.RootFilterContainer_ID = container.ID; - aggregate1.SaveToDatabase(); + } - //with filters - var filter = new AggregateFilter(CatalogueRepository, "MyFilter", container); - filter.WhereSQL = "sex=@sex"; + [Test] + public void CloneChildWithFilter_IDsDifferent() + { + //aggregate 1 is now a normal non cohort aggregate + var container = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.OR); + aggregate1.CountSQL = "count(*)"; + aggregate1.RootFilterContainer_ID = container.ID; + aggregate1.SaveToDatabase(); + + //with filters + var filter = new AggregateFilter(CatalogueRepository, "MyFilter", container) + { + WhereSQL = "sex=@sex" + }; - //and parameters - new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(filter, null); - filter.SaveToDatabase(); + //and parameters + new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(filter, null); + filter.SaveToDatabase(); - var param = (AggregateFilterParameter)filter.GetAllParameters().Single(); - param.Value = "'M'"; - param.SaveToDatabase(); + var param = (AggregateFilterParameter)filter.GetAllParameters().Single(); + param.Value = "'M'"; + param.SaveToDatabase(); - //we are importing this graph aggregate as a new cohort identification aggregate - var clone = cohortIdentificationConfiguration.ImportAggregateConfigurationAsIdentifierList(aggregate1, null); + //we are importing this graph aggregate as a new cohort identification aggregate + var clone = cohortIdentificationConfiguration.ImportAggregateConfigurationAsIdentifierList(aggregate1, null); - //since its a cohort aggregate it should be identical to the origin Aggregate except it has a different ID and no count SQL - Assert.AreEqual(clone.CountSQL,null); + //since its a cohort aggregate it should be identical to the origin Aggregate except it has a different ID and no count SQL + Assert.AreEqual(clone.CountSQL, null); - //get the original sql - var aggregateSql = aggregate1.GetQueryBuilder().SQL; + //get the original sql + var aggregateSql = aggregate1.GetQueryBuilder().SQL; - try - { - Assert.AreNotEqual(clone.ID, aggregate1.ID); - Assert.AreNotEqual(clone.RootFilterContainer_ID, aggregate1.RootFilterContainer_ID); + try + { + Assert.AreNotEqual(clone.ID, aggregate1.ID); + Assert.AreNotEqual(clone.RootFilterContainer_ID, aggregate1.RootFilterContainer_ID); - var cloneContainer = clone.RootFilterContainer; - var cloneFilter = cloneContainer.GetFilters().Single(); + var cloneContainer = clone.RootFilterContainer; + var cloneFilter = cloneContainer.GetFilters().Single(); - Assert.AreNotEqual(cloneContainer.ID, container.ID); - Assert.AreNotEqual(cloneFilter.ID, filter.ID); + Assert.AreNotEqual(cloneContainer.ID, container.ID); + Assert.AreNotEqual(cloneFilter.ID, filter.ID); - var cloneParameter = (AggregateFilterParameter)cloneFilter.GetAllParameters().Single(); - Assert.AreNotEqual(cloneParameter.ID, param.ID); + var cloneParameter = (AggregateFilterParameter)cloneFilter.GetAllParameters().Single(); + Assert.AreNotEqual(cloneParameter.ID, param.ID); - //it has a different ID and is part of an aggregate filter container (It is presumed to be involved with cohort identification cohortIdentificationConfiguration) which means it will be called cic_X_ - string cohortAggregateSql = new CohortQueryBuilder(clone,null,null).SQL; + //it has a different ID and is part of an aggregate filter container (It is presumed to be involved with cohort identification cohortIdentificationConfiguration) which means it will be called cic_X_ + var cohortAggregateSql = new CohortQueryBuilder(clone, null, null).SQL; //the basic aggregate has the filter, parameter and group by - Assert.AreEqual(CollapseWhitespace( + Assert.AreEqual(CollapseWhitespace( string.Format( -@"DECLARE @sex AS varchar(50); + @"DECLARE @sex AS varchar(50); SET @sex='M'; /*cic_{0}_UnitTestAggregate1*/ SELECT -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi], +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi], count(*) FROM -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] WHERE ( /*MyFilter*/ @@ -155,9 +155,10 @@ public void CloneChildWithFilter_IDsDifferent() ) group by -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] order by -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi]",cohortIdentificationConfiguration.ID)),CollapseWhitespace(aggregateSql)); +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi]", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(aggregateSql)); //the expected differences are //1. should not have the count @@ -165,15 +166,15 @@ order by //3. should be marked with the cic comment with the ID matching the CohortIdentificationConfiguration.ID //4. should have a distinct on the identifier column - Assert.AreEqual( -@"DECLARE @sex AS varchar(50); + Assert.AreEqual( + $@"DECLARE @sex AS varchar(50); SET @sex='M'; -/*cic_"+cohortIdentificationConfiguration.ID+@"_UnitTestAggregate1*/ +/*cic_{cohortIdentificationConfiguration.ID}_UnitTestAggregate1*/ SELECT distinct -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] +[{TestDatabaseNames.Prefix}ScratchArea].[dbo].[BulkData].[chi] FROM -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData] +[{TestDatabaseNames.Prefix}ScratchArea].[dbo].[BulkData] WHERE ( /*MyFilter*/ @@ -181,87 +182,87 @@ order by )", cohortAggregateSql); - clone.RootFilterContainer.DeleteInDatabase(); - container.DeleteInDatabase(); - } - finally - { - clone.DeleteInDatabase(); - } + clone.RootFilterContainer.DeleteInDatabase(); + container.DeleteInDatabase(); } - - - [Test] - public void CohortIdentificationConfiguration_CloneEntirely() + finally { - //set the order so that 2 comes before 1 - rootcontainer.AddChild(aggregate1, 5); + clone.DeleteInDatabase(); + } + } - rootcontainer.AddChild(container1); - container1.AddChild(aggregate2,1); - container1.AddChild(aggregate3,2); + [Test] + public void CohortIdentificationConfiguration_CloneEntirely() + { + //set the order so that 2 comes before 1 + rootcontainer.AddChild(aggregate1, 5); - //create a filter too - var container = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.OR); + rootcontainer.AddChild(container1); + container1.AddChild(aggregate2, 1); + container1.AddChild(aggregate3, 2); - aggregate1.RootFilterContainer_ID = container.ID; - aggregate1.SaveToDatabase(); - var filter = new AggregateFilter(CatalogueRepository, "MyFilter", container); - filter.WhereSQL = "sex=@sex"; - new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(filter,null); - filter.SaveToDatabase(); + //create a filter too + var container = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.OR); - //with a parameter too - var param = (AggregateFilterParameter)filter.GetAllParameters().Single(); - param.Value = "'M'"; - param.SaveToDatabase(); + aggregate1.RootFilterContainer_ID = container.ID; + aggregate1.SaveToDatabase(); - cohortIdentificationConfiguration.RootCohortAggregateContainer_ID = rootcontainer.ID; - cohortIdentificationConfiguration.SaveToDatabase(); + var filter = new AggregateFilter(CatalogueRepository, "MyFilter", container) + { + WhereSQL = "sex=@sex" + }; + new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(filter, null); + filter.SaveToDatabase(); - try - { - var clone = cohortIdentificationConfiguration.CreateClone(new ThrowImmediatelyCheckNotifier()); + //with a parameter too + var param = (AggregateFilterParameter)filter.GetAllParameters().Single(); + param.Value = "'M'"; + param.SaveToDatabase(); - //the objects should be different - Assert.AreNotEqual(cohortIdentificationConfiguration.ID, clone.ID); - Assert.IsTrue(clone.Name.EndsWith("(Clone)")); + cohortIdentificationConfiguration.RootCohortAggregateContainer_ID = rootcontainer.ID; + cohortIdentificationConfiguration.SaveToDatabase(); - Assert.AreNotEqual(clone.RootCohortAggregateContainer_ID, cohortIdentificationConfiguration.RootCohortAggregateContainer_ID); - Assert.IsNotNull(clone.RootCohortAggregateContainer_ID); + try + { + var clone = cohortIdentificationConfiguration.CreateClone(ThrowImmediatelyCheckNotifier.Quiet); - var beforeSQL = new CohortQueryBuilder(cohortIdentificationConfiguration,null).SQL; - var cloneSQL = new CohortQueryBuilder(clone,null).SQL; + //the objects should be different + Assert.AreNotEqual(cohortIdentificationConfiguration.ID, clone.ID); + Assert.IsTrue(clone.Name.EndsWith("(Clone)")); - beforeSQL = Regex.Replace(beforeSQL, "cic_[0-9]+_", ""); - cloneSQL = Regex.Replace(cloneSQL, "cic_[0-9]+_", ""); + Assert.AreNotEqual(clone.RootCohortAggregateContainer_ID, + cohortIdentificationConfiguration.RootCohortAggregateContainer_ID); + Assert.IsNotNull(clone.RootCohortAggregateContainer_ID); - //the SQL should be the same for them - Assert.AreEqual(beforeSQL,cloneSQL); + var beforeSQL = new CohortQueryBuilder(cohortIdentificationConfiguration, null).SQL; + var cloneSQL = new CohortQueryBuilder(clone, null).SQL; - var containerClone = clone.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively() - .Where(a => a.RootFilterContainer_ID != null) - .Select(ag => ag.RootFilterContainer).Single(); + beforeSQL = Regex.Replace(beforeSQL, "cic_[0-9]+_", ""); + cloneSQL = Regex.Replace(cloneSQL, "cic_[0-9]+_", ""); - Assert.AreNotEqual(container, containerClone); - - //cleanup phase - clone.DeleteInDatabase(); - containerClone.DeleteInDatabase(); + //the SQL should be the same for them + Assert.AreEqual(beforeSQL, cloneSQL); - } - finally - { - rootcontainer.RemoveChild(aggregate1); - container1.RemoveChild(aggregate2); - container1.RemoveChild(aggregate3); + var containerClone = clone.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively() + .Where(a => a.RootFilterContainer_ID != null) + .Select(ag => ag.RootFilterContainer).Single(); - filter.DeleteInDatabase(); - container.DeleteInDatabase(); - } + Assert.AreNotEqual(container, containerClone); + + //cleanup phase + clone.DeleteInDatabase(); + containerClone.DeleteInDatabase(); } + finally + { + rootcontainer.RemoveChild(aggregate1); + container1.RemoveChild(aggregate2); + container1.RemoveChild(aggregate3); + filter.DeleteInDatabase(); + container.DeleteInDatabase(); + } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/CohortIdentificationConfigurationMergerTests.cs b/Rdmp.Core.Tests/CohortCreation/CohortIdentificationConfigurationMergerTests.cs index 60ae3d8763..73991efe6a 100644 --- a/Rdmp.Core.Tests/CohortCreation/CohortIdentificationConfigurationMergerTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/CohortIdentificationConfigurationMergerTests.cs @@ -6,158 +6,167 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data.Cohort; -using System; -using System.Collections.Generic; using System.Linq; -using System.Text; -using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +internal class CohortIdentificationConfigurationMergerTests : CohortIdentificationTests { - class CohortIdentificationConfigurationMergerTests : CohortIdentificationTests + [Test] + public void TestSimpleMerge() { - [Test] - public void TestSimpleMerge() - { - var merger = new CohortIdentificationConfigurationMerger(CatalogueRepository); + var merger = new CohortIdentificationConfigurationMerger(CatalogueRepository); + + var cic1 = new CohortIdentificationConfiguration(CatalogueRepository, "cic1"); + var cic2 = new CohortIdentificationConfiguration(CatalogueRepository, "cic2"); - var cic1 = new CohortIdentificationConfiguration(CatalogueRepository,"cic1"); - var cic2 = new CohortIdentificationConfiguration(CatalogueRepository,"cic2"); + cic1.CreateRootContainerIfNotExists(); + var root1 = cic1.RootCohortAggregateContainer; + root1.Name = "Root1"; + root1.SaveToDatabase(); + root1.AddChild(aggregate1, 1); - cic1.CreateRootContainerIfNotExists(); - var root1 = cic1.RootCohortAggregateContainer; - root1.Name = "Root1"; - root1.SaveToDatabase(); - root1.AddChild(aggregate1,1); + cic2.CreateRootContainerIfNotExists(); + var root2 = cic2.RootCohortAggregateContainer; + root2.Name = "Root2"; + root2.SaveToDatabase(); + root2.AddChild(aggregate2, 2); - cic2.CreateRootContainerIfNotExists(); - var root2 = cic2.RootCohortAggregateContainer; - root2.Name = "Root2"; - root2.SaveToDatabase(); - root2.AddChild(aggregate2,2); + Assert.AreEqual(1, cic1.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); + Assert.AreEqual(1, cic2.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - Assert.AreEqual(1,cic1.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - Assert.AreEqual(1,cic2.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - - int numberOfCicsBefore = CatalogueRepository.GetAllObjects().Count(); + var numberOfCicsBefore = CatalogueRepository.GetAllObjects().Length; - var result = merger.Merge(new []{cic1,cic2 },SetOperation.UNION); + var result = merger.Merge(new[] { cic1, cic2 }, SetOperation.UNION); - //original should still be intact - Assert.AreEqual(1,cic1.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - Assert.AreEqual(1,cic2.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); + //original should still be intact + Assert.AreEqual(1, cic1.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); + Assert.AreEqual(1, cic2.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - //the new merged set should contain both - Assert.AreEqual(2,result.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); + //the new merged set should contain both + Assert.AreEqual(2, result.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - Assert.IsFalse(result.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Any(c=>c.Equals(aggregate1)),"Expected the merge to include clone aggregates not the originals! (aggregate1)"); - Assert.IsFalse(result.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Any(c=>c.Equals(aggregate2)),"Expected the merge to include clone aggregates not the originals! (aggregate2)"); + Assert.IsFalse( + result.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively() + .Any(c => c.Equals(aggregate1)), + "Expected the merge to include clone aggregates not the originals! (aggregate1)"); + Assert.IsFalse( + result.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively() + .Any(c => c.Equals(aggregate2)), + "Expected the merge to include clone aggregates not the originals! (aggregate2)"); - // Now should be a new one - Assert.AreEqual(numberOfCicsBefore + 1,CatalogueRepository.GetAllObjects().Count()); + // Now should be a new one + Assert.AreEqual(numberOfCicsBefore + 1, + CatalogueRepository.GetAllObjects().Length); - var newCicId = result.ID; + var newCicId = result.ID; - // Should have the root containers of the old configs - var newRoot2 = result.RootCohortAggregateContainer.GetSubContainers().Single(c => c.Name.Equals("Root2")); - var newRoot1 = result.RootCohortAggregateContainer.GetSubContainers().Single(c => c.Name.Equals("Root1")); - Assert.AreEqual(2, result.RootCohortAggregateContainer.GetSubContainers().Length); + // Should have the root containers of the old configs + var newRoot2 = result.RootCohortAggregateContainer.GetSubContainers().Single(c => c.Name.Equals("Root2")); + var newRoot1 = result.RootCohortAggregateContainer.GetSubContainers().Single(c => c.Name.Equals("Root1")); + Assert.AreEqual(2, result.RootCohortAggregateContainer.GetSubContainers().Length); - // And should have - Assert.AreEqual($"cic_{newCicId}_UnitTestAggregate2", newRoot2.GetAggregateConfigurations()[0].Name); - Assert.AreEqual($"cic_{newCicId}_UnitTestAggregate1",newRoot1.GetAggregateConfigurations()[0].Name); + // And should have + Assert.AreEqual($"cic_{newCicId}_UnitTestAggregate2", newRoot2.GetAggregateConfigurations()[0].Name); + Assert.AreEqual($"cic_{newCicId}_UnitTestAggregate1", newRoot1.GetAggregateConfigurations()[0].Name); - Assert.AreEqual($"Merged cics (IDs {cic1.ID},{cic2.ID})",result.Name); + Assert.AreEqual($"Merged cics (IDs {cic1.ID},{cic2.ID})", result.Name); - Assert.IsTrue(cic1.Exists()); - Assert.IsTrue(cic2.Exists()); + Assert.IsTrue(cic1.Exists()); + Assert.IsTrue(cic2.Exists()); + } - } + [Test] + public void TestSimpleUnMerge() + { + var merger = new CohortIdentificationConfigurationMerger(CatalogueRepository); - [Test] - public void TestSimpleUnMerge() - { - var merger = new CohortIdentificationConfigurationMerger(CatalogueRepository); + var cicInput = new CohortIdentificationConfiguration(CatalogueRepository, "cic99"); - var cicInput = new CohortIdentificationConfiguration(CatalogueRepository,"cic99"); + cicInput.CreateRootContainerIfNotExists(); + var root = cicInput.RootCohortAggregateContainer; + root.Name = "Root"; + root.SaveToDatabase(); - cicInput.CreateRootContainerIfNotExists(); - var root = cicInput.RootCohortAggregateContainer; - root.Name = "Root"; - root.SaveToDatabase(); + var sub1 = new CohortAggregateContainer(CatalogueRepository, SetOperation.INTERSECT) + { + Order = 1 + }; + sub1.SaveToDatabase(); + + var sub2 = new CohortAggregateContainer(CatalogueRepository, SetOperation.EXCEPT) + { + Order = 2 + }; + sub2.SaveToDatabase(); - var sub1 = new CohortAggregateContainer(CatalogueRepository,SetOperation.INTERSECT); - sub1.Order = 1; - sub1.SaveToDatabase(); + root.AddChild(sub1); + root.AddChild(sub2); - var sub2 = new CohortAggregateContainer(CatalogueRepository,SetOperation.EXCEPT); - sub2.Order = 2; - sub2.SaveToDatabase(); + sub1.AddChild(aggregate1, 0); + sub2.AddChild(aggregate2, 0); + sub2.AddChild(aggregate3, 1); - root.AddChild(sub1); - root.AddChild(sub2); + var numberOfCicsBefore = CatalogueRepository.GetAllObjects().Length; - sub1.AddChild(aggregate1,0); - sub2.AddChild(aggregate2,0); - sub2.AddChild(aggregate3,1); - - int numberOfCicsBefore = CatalogueRepository.GetAllObjects().Count(); + var results = merger.UnMerge(root); - var results = merger.UnMerge(root); + // Now should be two new ones + Assert.AreEqual(numberOfCicsBefore + 2, + CatalogueRepository.GetAllObjects().Length); + Assert.AreEqual(2, results.Length); - // Now should be two new ones - Assert.AreEqual(numberOfCicsBefore + 2,CatalogueRepository.GetAllObjects().Count()); - Assert.AreEqual(2,results.Length); + Assert.AreEqual(SetOperation.INTERSECT, results[0].RootCohortAggregateContainer.Operation); + Assert.AreEqual(1, results[0].RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - Assert.AreEqual(SetOperation.INTERSECT,results[0].RootCohortAggregateContainer.Operation); - Assert.AreEqual(1,results[0].RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - - Assert.IsFalse(results[0].RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Intersect(new []{ aggregate1,aggregate2,aggregate3}).Any(),"Expected new aggregates to be new!"); + Assert.IsFalse( + results[0].RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively() + .Intersect(new[] { aggregate1, aggregate2, aggregate3 }).Any(), "Expected new aggregates to be new!"); - Assert.AreEqual(SetOperation.EXCEPT,results[1].RootCohortAggregateContainer.Operation); - Assert.AreEqual(2,results[1].RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); + Assert.AreEqual(SetOperation.EXCEPT, results[1].RootCohortAggregateContainer.Operation); + Assert.AreEqual(2, results[1].RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - Assert.IsFalse(results[1].RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Intersect(new []{ aggregate1,aggregate2,aggregate3}).Any(),"Expected new aggregates to be new!"); + Assert.IsFalse( + results[1].RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively() + .Intersect(new[] { aggregate1, aggregate2, aggregate3 }).Any(), "Expected new aggregates to be new!"); + } - } + [Test] + public void TestSimpleImportCic() + { + var merger = new CohortIdentificationConfigurationMerger(CatalogueRepository); - [Test] - public void TestSimpleImportCic() - { - var merger = new CohortIdentificationConfigurationMerger(CatalogueRepository); + var cic1 = new CohortIdentificationConfiguration(CatalogueRepository, "cic1"); + var cic2 = new CohortIdentificationConfiguration(CatalogueRepository, "cic2"); - var cic1 = new CohortIdentificationConfiguration(CatalogueRepository,"cic1"); - var cic2 = new CohortIdentificationConfiguration(CatalogueRepository,"cic2"); + cic1.CreateRootContainerIfNotExists(); + var root1 = cic1.RootCohortAggregateContainer; + root1.Name = "Root1"; + root1.SaveToDatabase(); + root1.AddChild(aggregate1, 1); - cic1.CreateRootContainerIfNotExists(); - var root1 = cic1.RootCohortAggregateContainer; - root1.Name = "Root1"; - root1.SaveToDatabase(); - root1.AddChild(aggregate1,1); + cic2.CreateRootContainerIfNotExists(); + var root2 = cic2.RootCohortAggregateContainer; + root2.Name = "Root2"; + root2.SaveToDatabase(); + root2.AddChild(aggregate2, 2); - cic2.CreateRootContainerIfNotExists(); - var root2 = cic2.RootCohortAggregateContainer; - root2.Name = "Root2"; - root2.SaveToDatabase(); - root2.AddChild(aggregate2,2); + Assert.AreEqual(1, cic1.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); + Assert.AreEqual(1, cic2.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - Assert.AreEqual(1,cic1.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - Assert.AreEqual(1,cic2.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - - int numberOfCicsBefore = CatalogueRepository.GetAllObjects().Count(); + var numberOfCicsBefore = CatalogueRepository.GetAllObjects().Length; - //import 2 into 1 - merger.Import(new []{cic2 },cic1.RootCohortAggregateContainer); + //import 2 into 1 + merger.Import(new[] { cic2 }, cic1.RootCohortAggregateContainer); - //no new cics - Assert.AreEqual(numberOfCicsBefore,CatalogueRepository.GetAllObjects().Count()); + //no new cics + Assert.AreEqual(numberOfCicsBefore, + CatalogueRepository.GetAllObjects().Length); - // cic 1 should now have both aggregates - Assert.AreEqual(2,cic1.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - - Assert.AreEqual("Root1",cic1.RootCohortAggregateContainer.Name); - Assert.AreEqual("Root2",cic1.RootCohortAggregateContainer.GetSubContainers()[0].Name); + // cic 1 should now have both aggregates + Assert.AreEqual(2, cic1.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively().Count); - } + Assert.AreEqual("Root1", cic1.RootCohortAggregateContainer.Name); + Assert.AreEqual("Root2", cic1.RootCohortAggregateContainer.GetSubContainers()[0].Name); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/CohortIdentificationTests.cs b/Rdmp.Core.Tests/CohortCreation/CohortIdentificationTests.cs index d691fec73f..f5f6702618 100644 --- a/Rdmp.Core.Tests/CohortCreation/CohortIdentificationTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/CohortIdentificationTests.cs @@ -10,76 +10,83 @@ using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.Repositories; -using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +public class CohortIdentificationTests : FromToDatabaseTests { - public class CohortIdentificationTests : FromToDatabaseTests + public DiscoveredDatabase Database { get; private set; } + + protected BulkTestsData testData; + protected AggregateConfiguration aggregate1; + protected AggregateConfiguration aggregate2; + protected AggregateConfiguration aggregate3; + protected CohortIdentificationConfiguration cohortIdentificationConfiguration; + protected CohortAggregateContainer rootcontainer; + protected CohortAggregateContainer container1; + + [SetUp] + protected override void SetUp() { - public DiscoveredDatabase Database { get; private set; } - - protected BulkTestsData testData; - protected AggregateConfiguration aggregate1; - protected AggregateConfiguration aggregate2; - protected AggregateConfiguration aggregate3; - protected CohortIdentificationConfiguration cohortIdentificationConfiguration; - protected CohortAggregateContainer rootcontainer; - protected CohortAggregateContainer container1; + base.SetUp(); - [SetUp] - protected override void SetUp() - { - base.SetUp(); + SetupTestData(CatalogueRepository); + } - SetupTestData(CatalogueRepository); - } - - public void SetupTestData(ICatalogueRepository repository) - { - BlitzMainDataTables(); + public void SetupTestData(ICatalogueRepository repository) + { + BlitzMainDataTables(); - Database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + Database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - testData = new BulkTestsData(repository, Database, 100); - testData.SetupTestData(); + testData = new BulkTestsData(repository, Database, 100); + testData.SetupTestData(); - testData.ImportAsCatalogue(); + testData.ImportAsCatalogue(); - aggregate1 = - new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate1"); - aggregate1.CountSQL = null; - aggregate1.SaveToDatabase(); + aggregate1 = + new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate1") + { + CountSQL = null + }; + aggregate1.SaveToDatabase(); - new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate1); + new AggregateDimension(repository, + testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate1); - aggregate2 = - new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate2"); + aggregate2 = + new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate2") + { + CountSQL = null + }; - aggregate2.CountSQL = null; - aggregate2.SaveToDatabase(); + aggregate2.SaveToDatabase(); - new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate2); + new AggregateDimension(repository, + testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate2); - aggregate3 = - new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate3"); - aggregate3.CountSQL = null; - aggregate3.SaveToDatabase(); + aggregate3 = + new AggregateConfiguration(repository, testData.catalogue, "UnitTestAggregate3") + { + CountSQL = null + }; + aggregate3.SaveToDatabase(); - new AggregateDimension(repository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate3); + new AggregateDimension(repository, + testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate3); - cohortIdentificationConfiguration = new CohortIdentificationConfiguration(repository, "UnitTestIdentification"); + cohortIdentificationConfiguration = new CohortIdentificationConfiguration(repository, "UnitTestIdentification"); - rootcontainer = new CohortAggregateContainer(repository, SetOperation.EXCEPT); - container1 = new CohortAggregateContainer(repository, SetOperation.UNION); + rootcontainer = new CohortAggregateContainer(repository, SetOperation.EXCEPT); + container1 = new CohortAggregateContainer(repository, SetOperation.UNION); - cohortIdentificationConfiguration.RootCohortAggregateContainer_ID = rootcontainer.ID; - cohortIdentificationConfiguration.SaveToDatabase(); + cohortIdentificationConfiguration.RootCohortAggregateContainer_ID = rootcontainer.ID; + cohortIdentificationConfiguration.SaveToDatabase(); - cohortIdentificationConfiguration.EnsureNamingConvention(aggregate1); - cohortIdentificationConfiguration.EnsureNamingConvention(aggregate2); - cohortIdentificationConfiguration.EnsureNamingConvention(aggregate3); - } + cohortIdentificationConfiguration.EnsureNamingConvention(aggregate1); + cohortIdentificationConfiguration.EnsureNamingConvention(aggregate2); + cohortIdentificationConfiguration.EnsureNamingConvention(aggregate3); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/CohortMandatoryFilterImportingTests.cs b/Rdmp.Core.Tests/CohortCreation/CohortMandatoryFilterImportingTests.cs index 5c9c050e05..73c0369a1f 100644 --- a/Rdmp.Core.Tests/CohortCreation/CohortMandatoryFilterImportingTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/CohortMandatoryFilterImportingTests.cs @@ -5,226 +5,238 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System.Linq; -using MapsDirectlyToDatabaseTable.Revertable; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.Curation.FilterImporting; using Rdmp.Core.Curation.FilterImporting.Construction; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Revertable; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +public class CohortMandatoryFilterImportingTests : CohortIdentificationTests { - public class CohortMandatoryFilterImportingTests : CohortIdentificationTests + [Test] + public void NoMandatoryFilters() { - [Test] - public void NoMandatoryFilters() - { - var importedAggregate = cohortIdentificationConfiguration.CreateNewEmptyConfigurationForCatalogue(testData.catalogue, null); - var importedAggregateFilterContainer = importedAggregate.RootFilterContainer; + var importedAggregate = + cohortIdentificationConfiguration.CreateNewEmptyConfigurationForCatalogue(testData.catalogue, null); + var importedAggregateFilterContainer = importedAggregate.RootFilterContainer; - //Must have a root container - Assert.IsNull(importedAggregateFilterContainer); + //Must have a root container + Assert.IsNull(importedAggregateFilterContainer); - importedAggregate.DeleteInDatabase(); - } + importedAggregate.DeleteInDatabase(); + } - [Test] - public void ImportCatalogueWithMandatoryFilter() + [Test] + public void ImportCatalogueWithMandatoryFilter() + { + var filter = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[0]) { - var filter = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[0]); - filter.IsMandatory = true; - filter.WhereSQL = "There Be Dragons"; - filter.SaveToDatabase(); - - //ensure that it is picked SetUp - var mandatoryFilters = testData.catalogue.GetAllMandatoryFilters(); - Assert.AreEqual(1, mandatoryFilters.Length); - Assert.AreEqual(filter, mandatoryFilters[0]); - - AggregateConfiguration importedAggregate = null; - - try - { - - importedAggregate = cohortIdentificationConfiguration.CreateNewEmptyConfigurationForCatalogue(testData.catalogue,null); + IsMandatory = true, + WhereSQL = "There Be Dragons" + }; + filter.SaveToDatabase(); - Assert.AreEqual(ChangeDescription.NoChanges, importedAggregate.HasLocalChanges().Evaluation); + //ensure that it is picked SetUp + var mandatoryFilters = testData.catalogue.GetAllMandatoryFilters(); + Assert.AreEqual(1, mandatoryFilters.Length); + Assert.AreEqual(filter, mandatoryFilters[0]); - var importedAggregateFilterContainer = importedAggregate.RootFilterContainer; + AggregateConfiguration importedAggregate = null; - //Must have a root container - Assert.IsNotNull(importedAggregateFilterContainer); + try + { + importedAggregate = + cohortIdentificationConfiguration.CreateNewEmptyConfigurationForCatalogue(testData.catalogue, null); - //With an AND operation - Assert.AreEqual(FilterContainerOperation.AND,importedAggregateFilterContainer.Operation); + Assert.AreEqual(ChangeDescription.NoChanges, importedAggregate.HasLocalChanges().Evaluation); - var importedFilters = importedAggregateFilterContainer.GetFilters(); - Assert.AreEqual(1, importedFilters.Length); - - //they are not the same object - Assert.AreNotEqual(filter, importedFilters[0]); - //the deployed filter knows its parent it was cloned from - Assert.AreEqual(filter.ID, importedFilters[0].ClonedFromExtractionFilter_ID); - //the WHERE SQL of the filters should be the same - Assert.AreEqual(filter.WhereSQL, importedFilters[0].WhereSQL); - - } - finally - { - filter.DeleteInDatabase(); + var importedAggregateFilterContainer = importedAggregate.RootFilterContainer; - if(importedAggregate != null) - { - importedAggregate.RootFilterContainer.DeleteInDatabase(); - importedAggregate.DeleteInDatabase(); - } - } - } + //Must have a root container + Assert.IsNotNull(importedAggregateFilterContainer); - [Test] - [TestCase(true)] - [TestCase(false)] - public void ImportCatalogueWithSingleFilterThatHasAParameter(bool createAGlobalOverrideBeforeHand) - { - string parameterSQL = "DECLARE @dragonCount as varchar(100)"; + //With an AND operation + Assert.AreEqual(FilterContainerOperation.AND, importedAggregateFilterContainer.Operation); - var filter = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[0]); - filter.IsMandatory = true; - filter.WhereSQL = "There Be Dragons AND @dragonCount = 1"; - filter.SaveToDatabase(); + var importedFilters = importedAggregateFilterContainer.GetFilters(); + Assert.AreEqual(1, importedFilters.Length); - //Should result in the creation of a parameter - new ParameterCreator(new ExtractionFilterFactory(testData.extractionInformations[0]),null,null).CreateAll(filter,null); + //they are not the same object + Assert.AreNotEqual(filter, importedFilters[0]); + //the deployed filter knows its parent it was cloned from + Assert.AreEqual(filter.ID, importedFilters[0].ClonedFromExtractionFilter_ID); + //the WHERE SQL of the filters should be the same + Assert.AreEqual(filter.WhereSQL, importedFilters[0].WhereSQL); + } + finally + { + filter.DeleteInDatabase(); - var filterParameters = filter.ExtractionFilterParameters.ToArray(); - Assert.AreEqual(1, filterParameters.Length); + if (importedAggregate != null) + { + importedAggregate.RootFilterContainer.DeleteInDatabase(); + importedAggregate.DeleteInDatabase(); + } + } + } - filterParameters[0].ParameterSQL = parameterSQL; - filterParameters[0].Value = "'No More than 300 Dragons Please'"; - filterParameters[0].SaveToDatabase(); + [Test] + [TestCase(true)] + [TestCase(false)] + public void ImportCatalogueWithSingleFilterThatHasAParameter(bool createAGlobalOverrideBeforeHand) + { + var parameterSQL = "DECLARE @dragonCount as varchar(100)"; - AnyTableSqlParameter global = null; + var filter = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[0]) + { + IsMandatory = true, + WhereSQL = "There Be Dragons AND @dragonCount = 1" + }; + filter.SaveToDatabase(); - if (createAGlobalOverrideBeforeHand) - { - global = new AnyTableSqlParameter(CatalogueRepository, cohortIdentificationConfiguration,parameterSQL); - global.Value = "'At Least 1000 Dragons'"; - global.SaveToDatabase(); - } + //Should result in the creation of a parameter + new ParameterCreator(new ExtractionFilterFactory(testData.extractionInformations[0]), null, null) + .CreateAll(filter, null); - //ensure that it is picked SetUp - var mandatoryFilters = testData.catalogue.GetAllMandatoryFilters(); - Assert.AreEqual(1, mandatoryFilters.Length); - Assert.AreEqual(filter, mandatoryFilters[0]); + var filterParameters = filter.ExtractionFilterParameters.ToArray(); + Assert.AreEqual(1, filterParameters.Length); + filterParameters[0].ParameterSQL = parameterSQL; + filterParameters[0].Value = "'No More than 300 Dragons Please'"; + filterParameters[0].SaveToDatabase(); - AggregateConfiguration importedAggregate = null; + AnyTableSqlParameter global = null; - try + if (createAGlobalOverrideBeforeHand) + { + global = new AnyTableSqlParameter(CatalogueRepository, cohortIdentificationConfiguration, parameterSQL) { - importedAggregate = cohortIdentificationConfiguration.CreateNewEmptyConfigurationForCatalogue(testData.catalogue, null); - var importedAggregateFilterContainer = importedAggregate.RootFilterContainer; + Value = "'At Least 1000 Dragons'" + }; + global.SaveToDatabase(); + } - //Must have a root container - Assert.IsNotNull(importedAggregateFilterContainer); + //ensure that it is picked SetUp + var mandatoryFilters = testData.catalogue.GetAllMandatoryFilters(); + Assert.AreEqual(1, mandatoryFilters.Length); + Assert.AreEqual(filter, mandatoryFilters[0]); - //With an AND operation - Assert.AreEqual(FilterContainerOperation.AND, importedAggregateFilterContainer.Operation); - var importedFilters = importedAggregateFilterContainer.GetFilters(); - Assert.AreEqual(1, importedFilters.Length); + AggregateConfiguration importedAggregate = null; - //Because the configuration already has a parameter with the same declaration it should not bother to import the parameter from the underlying filter - if(createAGlobalOverrideBeforeHand) - Assert.AreEqual(0,importedFilters[0].GetAllParameters().Length); - else - { - //Because there is no global we should be creating a clone of the parameter too - var paramClones = importedFilters[0].GetAllParameters(); - Assert.AreEqual(1, paramClones.Length); + try + { + importedAggregate = + cohortIdentificationConfiguration.CreateNewEmptyConfigurationForCatalogue(testData.catalogue, null); + var importedAggregateFilterContainer = importedAggregate.RootFilterContainer; + + //Must have a root container + Assert.IsNotNull(importedAggregateFilterContainer); - //clone should have same SQL and Value - Assert.AreEqual(parameterSQL,paramClones[0].ParameterSQL); - Assert.AreEqual(filterParameters[0].ParameterSQL, paramClones[0].ParameterSQL); - Assert.AreEqual(filterParameters[0].Value, paramClones[0].Value); + //With an AND operation + Assert.AreEqual(FilterContainerOperation.AND, importedAggregateFilterContainer.Operation); - //but not be the same object in database - Assert.AreNotEqual(filterParameters[0], paramClones[0]); - } + var importedFilters = importedAggregateFilterContainer.GetFilters(); + Assert.AreEqual(1, importedFilters.Length); + //Because the configuration already has a parameter with the same declaration it should not bother to import the parameter from the underlying filter + if (createAGlobalOverrideBeforeHand) + { + Assert.AreEqual(0, importedFilters[0].GetAllParameters().Length); } - finally + else { - if(global != null) - global.DeleteInDatabase(); + //Because there is no global we should be creating a clone of the parameter too + var paramClones = importedFilters[0].GetAllParameters(); + Assert.AreEqual(1, paramClones.Length); - filter.DeleteInDatabase(); + //clone should have same SQL and Value + Assert.AreEqual(parameterSQL, paramClones[0].ParameterSQL); + Assert.AreEqual(filterParameters[0].ParameterSQL, paramClones[0].ParameterSQL); + Assert.AreEqual(filterParameters[0].Value, paramClones[0].Value); - if (importedAggregate != null) - { - importedAggregate.RootFilterContainer.DeleteInDatabase(); - importedAggregate.DeleteInDatabase(); - } + //but not be the same object in database + Assert.AreNotEqual(filterParameters[0], paramClones[0]); } + } + finally + { + global?.DeleteInDatabase(); + + filter.DeleteInDatabase(); + if (importedAggregate != null) + { + importedAggregate.RootFilterContainer.DeleteInDatabase(); + importedAggregate.DeleteInDatabase(); + } } + } - [Test] - public void ImportCatalogueWithMultipleMandatoryFilters() + [Test] + public void ImportCatalogueWithMultipleMandatoryFilters() + { + //First mandatory + var filter1 = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[0]) { + IsMandatory = true, + WhereSQL = "There Be Dragons" + }; + filter1.SaveToDatabase(); - //First mandatory - var filter1 = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[0]); - filter1.IsMandatory = true; - filter1.WhereSQL = "There Be Dragons"; - filter1.SaveToDatabase(); + //Second mandatory + var filter2 = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[1]) + { + IsMandatory = true, + WhereSQL = "And Months" + }; + filter2.SaveToDatabase(); - //Second mandatory - var filter2 = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[1]); - filter2.IsMandatory = true; - filter2.WhereSQL = "And Months"; - filter2.SaveToDatabase(); + //Then one that is not mandatory + var filter3 = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[2]) + { + IsMandatory = false, + WhereSQL = "But Can Also Be Flies" + }; + filter3.SaveToDatabase(); - //Then one that is not mandatory - var filter3 = new ExtractionFilter(CatalogueRepository, "MyMandatoryFilter", testData.extractionInformations[2]); - filter3.IsMandatory = false; - filter3.WhereSQL = "But Can Also Be Flies"; - filter3.SaveToDatabase(); + //ensure that both are picked SetUp as mandatory filters by catalogue + var mandatoryFilters = testData.catalogue.GetAllMandatoryFilters(); + Assert.AreEqual(2, mandatoryFilters.Length); - //ensure that both are picked SetUp as mandatory filters by catalogue - var mandatoryFilters = testData.catalogue.GetAllMandatoryFilters(); - Assert.AreEqual(2, mandatoryFilters.Length); + AggregateConfiguration importedAggregate = null; - AggregateConfiguration importedAggregate = null; + try + { + //import the Catalogue + importedAggregate = + cohortIdentificationConfiguration.CreateNewEmptyConfigurationForCatalogue(testData.catalogue, null); + var importedAggregateFilterContainer = importedAggregate.RootFilterContainer; - try - { - //import the Catalogue - importedAggregate = cohortIdentificationConfiguration.CreateNewEmptyConfigurationForCatalogue(testData.catalogue, null); - var importedAggregateFilterContainer = importedAggregate.RootFilterContainer; + //Must have a root container + Assert.IsNotNull(importedAggregateFilterContainer); - //Must have a root container - Assert.IsNotNull(importedAggregateFilterContainer); + //the AND container should be there + Assert.AreEqual(FilterContainerOperation.AND, importedAggregateFilterContainer.Operation); - //the AND container should be there - Assert.AreEqual(FilterContainerOperation.AND, importedAggregateFilterContainer.Operation); + //the filters should both be there (See above test for WHERE SQL, ID etc checking) + var importedFilters = importedAggregateFilterContainer.GetFilters(); + Assert.AreEqual(2, importedFilters.Length); + } + finally + { + filter1.DeleteInDatabase(); + filter2.DeleteInDatabase(); + filter3.DeleteInDatabase(); - //the filters should both be there (See above test for WHERE SQL, ID etc checking) - var importedFilters = importedAggregateFilterContainer.GetFilters(); - Assert.AreEqual(2, importedFilters.Length); - } - finally + if (importedAggregate != null) { - filter1.DeleteInDatabase(); - filter2.DeleteInDatabase(); - filter3.DeleteInDatabase(); - - if(importedAggregate != null) - { - importedAggregate.RootFilterContainer.DeleteInDatabase(); - importedAggregate.DeleteInDatabase(); - } + importedAggregate.RootFilterContainer.DeleteInDatabase(); + importedAggregate.DeleteInDatabase(); } } } diff --git a/Rdmp.Core.Tests/CohortCreation/PluginCohortCompilerTests.cs b/Rdmp.Core.Tests/CohortCreation/PluginCohortCompilerTests.cs index 71f58e17c6..f6c00d1717 100644 --- a/Rdmp.Core.Tests/CohortCreation/PluginCohortCompilerTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/PluginCohortCompilerTests.cs @@ -9,7 +9,6 @@ using Rdmp.Core.CohortCommitting.Pipeline.Sources; using Rdmp.Core.CohortCreation.Execution; using Rdmp.Core.CommandExecution.AtomicCommands; -using Rdmp.Core.CommandExecution.AtomicCommands.CatalogueCreationCommands; using Rdmp.Core.CommandExecution.Combining; using Rdmp.Core.CommandLine.Interactive; using Rdmp.Core.Curation.Data; @@ -17,197 +16,213 @@ using Rdmp.Core.Curation.Data.Cohort.Joinables; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.Tests.CohortCreation.QueryTests; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; using System; using System.Data; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +public class PluginCohortCompilerTests : CohortQueryBuilderWithCacheTests { - public class PluginCohortCompilerTests : CohortQueryBuilderWithCacheTests + [Test] + public void TestIPluginCohortCompiler_PopulatesCacheCorrectly() { - [Test] - public void TestIPluginCohortCompiler_PopulatesCacheCorrectly() + var activator = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) + { DisallowInput = true }; + + // create a cohort config + var cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic") { - var activator = new ConsoleInputManager(RepositoryLocator, new ThrowImmediatelyCheckNotifier()) { DisallowInput = true }; + QueryCachingServer_ID = externalDatabaseServer.ID + }; + cic.SaveToDatabase(); - // create a cohort config - var cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic"); - cic.QueryCachingServer_ID = externalDatabaseServer.ID; - cic.SaveToDatabase(); + // this special Catalogue will be detected by ExamplePluginCohortCompiler and interpreted as an API call + var myApi = new Catalogue(CatalogueRepository, ExamplePluginCohortCompiler.ExampleAPIName); - // this special Catalogue will be detected by ExamplePluginCohortCompiler and interpreted as an API call - var myApi = new Catalogue(CatalogueRepository, ExamplePluginCohortCompiler.ExampleAPIName); + // add it to the cohort config + cic.CreateRootContainerIfNotExists(); - // add it to the cohort config - cic.CreateRootContainerIfNotExists(); + // create a use of the API as an AggregateConfiguration + var cmd = new ExecuteCommandAddCatalogueToCohortIdentificationSetContainer(activator, + new CatalogueCombineable(myApi), cic.RootCohortAggregateContainer); - // create a use of the API as an AggregateConfiguration - var cmd = new ExecuteCommandAddCatalogueToCohortIdentificationSetContainer(activator, new CatalogueCombineable(myApi),cic.RootCohortAggregateContainer); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); - Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); - cmd.Execute(); + // run the cic + var source = new CohortIdentificationConfigurationSource(); + source.PreInitialize(cic, ThrowImmediatelyDataLoadEventListener.Quiet); + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - // run the cic - var source = new CohortIdentificationConfigurationSource(); - source.PreInitialize(cic, new ThrowImmediatelyDataLoadEventListener()); - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); + // 5 random chi numbers + Assert.AreEqual(5, dt.Rows.Count); - // 5 random chi numbers - Assert.AreEqual(5, dt.Rows.Count); + // test stale + cmd.AggregateCreatedIfAny.Description = "2"; + cmd.AggregateCreatedIfAny.SaveToDatabase(); - // test stale - cmd.AggregateCreatedIfAny.Description = "2"; - cmd.AggregateCreatedIfAny.SaveToDatabase(); + // run the cic again + source = new CohortIdentificationConfigurationSource(); + source.PreInitialize(cic, ThrowImmediatelyDataLoadEventListener.Quiet); + dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - // run the cic again - source = new CohortIdentificationConfigurationSource(); - source.PreInitialize(cic, new ThrowImmediatelyDataLoadEventListener()); - dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + // because the rules changed to generate 2 chis only there should be a new result + Assert.AreEqual(2, dt.Rows.Count); - // because the rules changed to generate 2 chis only there should be a new result - Assert.AreEqual(2, dt.Rows.Count); + var results = new[] { (string)dt.Rows[0][0], (string)dt.Rows[1][0] }; - var results = new[] { (string)dt.Rows[0][0], (string)dt.Rows[1][0] }; + // run the cic again with no changes, the results should be unchanged since there is no config changed + // I.e. no new chis should be generated and the cached values returned + source = new CohortIdentificationConfigurationSource(); + source.PreInitialize(cic, ThrowImmediatelyDataLoadEventListener.Quiet); + dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - // run the cic again with no changes, the results should be unchanged since there is no config changed - // I.e. no new chis should be generated and the cached values returned - source = new CohortIdentificationConfigurationSource(); - source.PreInitialize(cic, new ThrowImmediatelyDataLoadEventListener()); - dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + Assert.AreEqual(2, dt.Rows.Count); + var results2 = new[] { (string)dt.Rows[0][0], (string)dt.Rows[1][0] }; - Assert.AreEqual(2, dt.Rows.Count); - var results2 = new[] { (string)dt.Rows[0][0], (string)dt.Rows[1][0] }; + Assert.AreEqual(results[0], results2[0]); + Assert.AreEqual(results[1], results2[1]); + } - Assert.AreEqual(results[0], results2[0]); - Assert.AreEqual(results[1], results2[1]); + [Test] + public void TestIPluginCohortCompiler_TestCloneCic() + { + var activator = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) + { DisallowInput = true }; - } - [Test] - public void TestIPluginCohortCompiler_TestCloneCic() + // create a cohort config + var cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic") { - var activator = new ConsoleInputManager(RepositoryLocator, new ThrowImmediatelyCheckNotifier()) { DisallowInput = true }; - - // create a cohort config - var cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic"); - cic.QueryCachingServer_ID = externalDatabaseServer.ID; - cic.SaveToDatabase(); - - // this special Catalogue will be detected by ExamplePluginCohortCompiler and interpreted as an API call - var myApi = new Catalogue(CatalogueRepository, ExamplePluginCohortCompiler.ExampleAPIName); - - // add it to the cohort config - cic.CreateRootContainerIfNotExists(); - - // create a use of the API as an AggregateConfiguration - var cmd = new ExecuteCommandAddCatalogueToCohortIdentificationSetContainer(activator, new CatalogueCombineable(myApi), cic.RootCohortAggregateContainer); - Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); - cmd.Execute(); - cmd.AggregateCreatedIfAny.Description = "33"; - cmd.AggregateCreatedIfAny.SaveToDatabase(); - - // clone the cic - var cmd2 = new ExecuteCommandCloneCohortIdentificationConfiguration(activator, cic); - Assert.IsFalse(cmd2.IsImpossible, cmd2.ReasonCommandImpossible); - cmd2.Execute(); + QueryCachingServer_ID = externalDatabaseServer.ID + }; + cic.SaveToDatabase(); + + // this special Catalogue will be detected by ExamplePluginCohortCompiler and interpreted as an API call + var myApi = new Catalogue(CatalogueRepository, ExamplePluginCohortCompiler.ExampleAPIName); + + // add it to the cohort config + cic.CreateRootContainerIfNotExists(); + + // create a use of the API as an AggregateConfiguration + var cmd = new ExecuteCommandAddCatalogueToCohortIdentificationSetContainer(activator, + new CatalogueCombineable(myApi), cic.RootCohortAggregateContainer); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); + cmd.AggregateCreatedIfAny.Description = "33"; + cmd.AggregateCreatedIfAny.SaveToDatabase(); + + // clone the cic + var cmd2 = new ExecuteCommandCloneCohortIdentificationConfiguration(activator, cic); + Assert.IsFalse(cmd2.IsImpossible, cmd2.ReasonCommandImpossible); + cmd2.Execute(); + + var cloneAc = cmd2.CloneCreatedIfAny.RootCohortAggregateContainer.GetAggregateConfigurations()[0]; + Assert.AreEqual("33", cloneAc.Description); + } - var cloneAc = cmd2.CloneCreatedIfAny.RootCohortAggregateContainer.GetAggregateConfigurations()[0]; - Assert.AreEqual("33", cloneAc.Description); - } + [Test] + public void TestIPluginCohortCompiler_APIsCantHavePatientIndexTables() + { + var activator = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) + { DisallowInput = true }; - [Test] - public void TestIPluginCohortCompiler_APIsCantHavePatientIndexTables() + // create a cohort config + var cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic") { - var activator = new ConsoleInputManager(RepositoryLocator, new ThrowImmediatelyCheckNotifier()) { DisallowInput = true }; - - // create a cohort config - var cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic"); - cic.QueryCachingServer_ID = externalDatabaseServer.ID; - cic.SaveToDatabase(); + QueryCachingServer_ID = externalDatabaseServer.ID + }; + cic.SaveToDatabase(); - // this special Catalogue will be detected by ExamplePluginCohortCompiler and interpreted as an API call - var myApi = new Catalogue(CatalogueRepository, ExamplePluginCohortCompiler.ExampleAPIName); + // this special Catalogue will be detected by ExamplePluginCohortCompiler and interpreted as an API call + var myApi = new Catalogue(CatalogueRepository, ExamplePluginCohortCompiler.ExampleAPIName); - // add it to the cohort config - cic.CreateRootContainerIfNotExists(); + // add it to the cohort config + cic.CreateRootContainerIfNotExists(); - // We need something in the root container otherwise the cic won't build - var cmd = new ExecuteCommandAddCatalogueToCohortIdentificationSetContainer(activator, new CatalogueCombineable(myApi), cic.RootCohortAggregateContainer); - Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); - cmd.Execute(); - var regularAggregate = cmd.AggregateCreatedIfAny; + // We need something in the root container otherwise the cic won't build + var cmd = new ExecuteCommandAddCatalogueToCohortIdentificationSetContainer(activator, + new CatalogueCombineable(myApi), cic.RootCohortAggregateContainer); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); + var regularAggregate = cmd.AggregateCreatedIfAny; - // The thing we are wanting to test - creating a use of the API as a patient index table - var cmd2 = new ExecuteCommandAddCatalogueToCohortIdentificationAsPatientIndexTable( - activator, new CatalogueCombineable(myApi), cic); + // The thing we are wanting to test - creating a use of the API as a patient index table + var cmd2 = new ExecuteCommandAddCatalogueToCohortIdentificationAsPatientIndexTable( + activator, new CatalogueCombineable(myApi), cic); - Assert.IsFalse(cmd2.IsImpossible, cmd2.ReasonCommandImpossible); - cmd2.Execute(); + Assert.IsFalse(cmd2.IsImpossible, cmd2.ReasonCommandImpossible); + cmd2.Execute(); - var joinables = cic.GetAllJoinables(); + var joinables = cic.GetAllJoinables(); - // make them join one another - var ex = Assert.Throws(()=> + // make them join one another + var ex = Assert.Throws(() => new JoinableCohortAggregateConfigurationUse(CatalogueRepository, regularAggregate, joinables[0])); - Assert.AreEqual("API calls cannot join with PatientIndexTables (The API call must be self contained)", ex.Message); - } + Assert.AreEqual("API calls cannot join with PatientIndexTables (The API call must be self contained)", + ex.Message); + } - [Test] - public void TestIPluginCohortCompiler_AsPatientIndexTable() - { - var activator = new ConsoleInputManager(RepositoryLocator, new ThrowImmediatelyCheckNotifier()) { DisallowInput = true }; + [Test] + public void TestIPluginCohortCompiler_AsPatientIndexTable() + { + var activator = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) + { DisallowInput = true }; - // Create a regular normal boring old table that will join into the results of the API call - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - using DataTable dt = new DataTable(); - dt.Columns.Add("chi"); - dt.Rows.Add("0101010101"); + // Create a regular normal boring old table that will join into the results of the API call + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + using var dt = new DataTable(); + dt.Columns.Add("chi"); + dt.Rows.Add("0101010101"); - var tbl = db.CreateTable("RegularBoringOldTable",dt); - var cata = (Catalogue)Import(tbl); - var eiChi = cata.GetAllExtractionInformation()[0]; + var tbl = db.CreateTable("RegularBoringOldTable", dt); + var cata = (Catalogue)Import(tbl); + var eiChi = cata.GetAllExtractionInformation()[0]; - eiChi.IsExtractionIdentifier = true; - eiChi.SaveToDatabase(); + eiChi.IsExtractionIdentifier = true; + eiChi.SaveToDatabase(); - // create a cohort config - var cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic"); - cic.QueryCachingServer_ID = externalDatabaseServer.ID; - cic.SaveToDatabase(); + // create a cohort config + var cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic") + { + QueryCachingServer_ID = externalDatabaseServer.ID + }; + cic.SaveToDatabase(); - // this special Catalogue will be detected by ExamplePluginCohortCompiler and interpreted as an API call - var myApi = new Catalogue(CatalogueRepository, ExamplePluginCohortCompiler.ExampleAPIName); + // this special Catalogue will be detected by ExamplePluginCohortCompiler and interpreted as an API call + var myApi = new Catalogue(CatalogueRepository, ExamplePluginCohortCompiler.ExampleAPIName); - // add it to the cohort config - cic.CreateRootContainerIfNotExists(); + // add it to the cohort config + cic.CreateRootContainerIfNotExists(); - // Add the regular table - var cmd = new ExecuteCommandAddCatalogueToCohortIdentificationSetContainer(activator, new CatalogueCombineable(cata), cic.RootCohortAggregateContainer); - Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); - cmd.Execute(); - var regularAggregate = cmd.AggregateCreatedIfAny; + // Add the regular table + var cmd = new ExecuteCommandAddCatalogueToCohortIdentificationSetContainer(activator, + new CatalogueCombineable(cata), cic.RootCohortAggregateContainer); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); + var regularAggregate = cmd.AggregateCreatedIfAny; - // The thing we are wanting to test - creating a use of the API as a patient index table - var cmd2 = new ExecuteCommandAddCatalogueToCohortIdentificationAsPatientIndexTable( - activator, new CatalogueCombineable(myApi), cic); + // The thing we are wanting to test - creating a use of the API as a patient index table + var cmd2 = new ExecuteCommandAddCatalogueToCohortIdentificationAsPatientIndexTable( + activator, new CatalogueCombineable(myApi), cic); - Assert.IsFalse(cmd2.IsImpossible, cmd2.ReasonCommandImpossible); - cmd2.Execute(); + Assert.IsFalse(cmd2.IsImpossible, cmd2.ReasonCommandImpossible); + cmd2.Execute(); - var joinables = cic.GetAllJoinables(); + var joinables = cic.GetAllJoinables(); - Assert.AreEqual(1, joinables.Length); + Assert.AreEqual(1, joinables.Length); - // make them join one another - new JoinableCohortAggregateConfigurationUse(CatalogueRepository, regularAggregate, joinables[0]); + // make them join one another + new JoinableCohortAggregateConfigurationUse(CatalogueRepository, regularAggregate, joinables[0]); - // run the cic again - var source = new CohortIdentificationConfigurationSource(); - source.PreInitialize(cic, new ThrowImmediatelyDataLoadEventListener()); - var result = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(1, result.Rows.Count); - } + // run the cic again + var source = new CohortIdentificationConfigurationSource(); + source.PreInitialize(cic, ThrowImmediatelyDataLoadEventListener.Quiet); + var result = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.AreEqual(1, result.Rows.Count); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortCompilerCacheJoinableTest.cs b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortCompilerCacheJoinableTest.cs index e148bcc442..0852492dce 100644 --- a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortCompilerCacheJoinableTest.cs +++ b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortCompilerCacheJoinableTest.cs @@ -4,7 +4,6 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using MapsDirectlyToDatabaseTable.Versioning; using NUnit.Framework; using Rdmp.Core.CohortCreation.Execution; using Rdmp.Core.Curation; @@ -13,134 +12,138 @@ using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.Curation.Data.Cohort.Joinables; using Rdmp.Core.Databases; -using ReusableLibraryCode.Checks; using System; using System.Data; using System.Linq; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Versioning; using Tests.Common.Scenarios; using Rdmp.Core.QueryCaching.Aggregation; +using Rdmp.Core.ReusableLibraryCode.Checks; using static Rdmp.Core.CohortCreation.Execution.CohortCompilerRunner; -namespace Rdmp.Core.Tests.CohortCreation.QueryTests +namespace Rdmp.Core.Tests.CohortCreation.QueryTests; + +/// +/// Tests caching the results of an which hits up multiple underlying tables. +/// +internal class CohortCompilerCacheJoinableTest : FromToDatabaseTests { - /// - /// Tests caching the results of an which hits up multiple underlying tables. - /// - class CohortCompilerCacheJoinableTest:FromToDatabaseTests + [Test] + public void CohortIdentificationConfiguration_Join_PatientIndexTable() { - [Test] - public void CohortIdentificationConfiguration_Join_PatientIndexTable() - { - DataTable header = new DataTable(); - header.Columns.Add("ID"); - header.Columns.Add("Chi"); - header.Columns.Add("Age"); - header.Columns.Add("Date"); - header.Columns.Add("Healthboard"); - header.PrimaryKey = new []{header.Columns["ID"] }; - - header.Rows.Add("1","0101010101",50,new DateTime(2001,1,1),"T"); - header.Rows.Add("2", "0202020202", 50, new DateTime(2002, 2, 2), "T"); - - var hTbl = From.CreateTable("header",header); - var cata = Import(hTbl,out var hTi, out _); - cata.Name = "My Combo Join Catalogue"; - cata.SaveToDatabase(); - - var scripter = new MasterDatabaseScriptExecutor(To); - var patcher = new QueryCachingPatcher(); - scripter.CreateAndPatchDatabase(patcher,new AcceptAllCheckNotifier()); - var edsCache = new ExternalDatabaseServer(CatalogueRepository,"Cache", new QueryCachingPatcher()); - edsCache.SetProperties(To); - - DataTable results = new DataTable(); - results.Columns.Add("Header_ID"); - results.Columns.Add("TestCode"); - results.Columns.Add("Result"); - - results.Rows.Add("1","HBA1C",50); - results.Rows.Add("1", "ECOM", "Hi fellas"); - results.Rows.Add("1", "ALB", 100); - results.Rows.Add("2", "ALB", 50); - - var rTbl = From.CreateTable("results", results); - - var importer = new TableInfoImporter(CatalogueRepository,rTbl); - importer.DoImport(out var rTi,out ColumnInfo[] rColInfos); - - var fe = new ForwardEngineerCatalogue(rTi,rColInfos); - fe.ExecuteForwardEngineering(cata); - - //Should now be 1 Catalogue with all the columns (tables will have to be joined to build the query though) - Assert.AreEqual(8,cata.GetAllExtractionInformation(ExtractionCategory.Core).Length); - - var ji = new JoinInfo(CatalogueRepository, - rTi.ColumnInfos.Single(ci=>ci.GetRuntimeName().Equals("Header_ID",StringComparison.CurrentCultureIgnoreCase)), - hTi.ColumnInfos.Single(ci => ci.GetRuntimeName().Equals("ID", StringComparison.CurrentCultureIgnoreCase)), - ExtractionJoinType.Right, - null - ); - - //setup a cic that uses the cache - var cic = new CohortIdentificationConfiguration(CatalogueRepository,"MyCic"); - cic.CreateRootContainerIfNotExists(); - cic.QueryCachingServer_ID = edsCache.ID; - cic.SaveToDatabase(); - - //create a patient index table that shows all the times that they had a test in any HB (with the HB being part of the result set) - var acPatIndex = new AggregateConfiguration(CatalogueRepository,cata,"My PatIndes"); - - var eiChi = cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Chi")); - eiChi.IsExtractionIdentifier = true; - acPatIndex.CountSQL = null; - eiChi.SaveToDatabase(); - - acPatIndex.AddDimension(eiChi); - acPatIndex.AddDimension(cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Date"))); - acPatIndex.AddDimension(cata.GetAllExtractionInformation(ExtractionCategory.Core).Single(ei => ei.GetRuntimeName().Equals("Healthboard"))); - - cic.EnsureNamingConvention(acPatIndex); - - var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository,cic,acPatIndex); - - Assert.IsTrue(acPatIndex.IsCohortIdentificationAggregate); - Assert.IsTrue(acPatIndex.IsJoinablePatientIndexTable()); - - var compiler = new CohortCompiler(cic); - - var runner = new CohortCompilerRunner(compiler,50); - - var cancellation = new System.Threading.CancellationToken(); - runner.Run(cancellation); - - //they should not be executing and should be completed - Assert.IsFalse(compiler.Tasks.Any(t=>t.Value.IsExecuting)); - Assert.AreEqual(Phase.Finished,runner.ExecutionPhase); - - var manager = new CachedAggregateConfigurationResultsManager(edsCache); - - var cacheTableName = manager.GetLatestResultsTableUnsafe(acPatIndex,AggregateOperation.JoinableInceptionQuery); - - Assert.IsNotNull(cacheTableName,"No results were cached!"); - - var cacheTable = To.ExpectTable(cacheTableName.GetRuntimeName()); - - //chi, Date and TestCode - Assert.AreEqual(3,cacheTable.DiscoverColumns().Length); - - //healthboard should be a string - Assert.AreEqual(typeof(string),cacheTable.DiscoverColumn("Healthboard").DataType.GetCSharpDataType()); - - /* Query Cache contains this: - * + var header = new DataTable(); + header.Columns.Add("ID"); + header.Columns.Add("Chi"); + header.Columns.Add("Age"); + header.Columns.Add("Date"); + header.Columns.Add("Healthboard"); + header.PrimaryKey = new[] { header.Columns["ID"] }; + + header.Rows.Add("1", "0101010101", 50, new DateTime(2001, 1, 1), "T"); + header.Rows.Add("2", "0202020202", 50, new DateTime(2002, 2, 2), "T"); + + var hTbl = From.CreateTable("header", header); + var cata = Import(hTbl, out var hTi, out _); + cata.Name = "My Combo Join Catalogue"; + cata.SaveToDatabase(); + + var scripter = new MasterDatabaseScriptExecutor(To); + var patcher = new QueryCachingPatcher(); + scripter.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); + var edsCache = new ExternalDatabaseServer(CatalogueRepository, "Cache", new QueryCachingPatcher()); + edsCache.SetProperties(To); + + var results = new DataTable(); + results.Columns.Add("Header_ID"); + results.Columns.Add("TestCode"); + results.Columns.Add("Result"); + + results.Rows.Add("1", "HBA1C", 50); + results.Rows.Add("1", "ECOM", "Hi fellas"); + results.Rows.Add("1", "ALB", 100); + results.Rows.Add("2", "ALB", 50); + + var rTbl = From.CreateTable("results", results); + + var importer = new TableInfoImporter(CatalogueRepository, rTbl); + importer.DoImport(out var rTi, out var rColInfos); + + var fe = new ForwardEngineerCatalogue(rTi, rColInfos); + fe.ExecuteForwardEngineering(cata); + + //Should now be 1 Catalogue with all the columns (tables will have to be joined to build the query though) + Assert.AreEqual(8, cata.GetAllExtractionInformation(ExtractionCategory.Core).Length); + + var ji = new JoinInfo(CatalogueRepository, + rTi.ColumnInfos.Single(ci => + ci.GetRuntimeName().Equals("Header_ID", StringComparison.CurrentCultureIgnoreCase)), + hTi.ColumnInfos.Single(ci => ci.GetRuntimeName().Equals("ID", StringComparison.CurrentCultureIgnoreCase)), + ExtractionJoinType.Right, + null + ); + + //setup a cic that uses the cache + var cic = new CohortIdentificationConfiguration(CatalogueRepository, "MyCic"); + cic.CreateRootContainerIfNotExists(); + cic.QueryCachingServer_ID = edsCache.ID; + cic.SaveToDatabase(); + + //create a patient index table that shows all the times that they had a test in any HB (with the HB being part of the result set) + var acPatIndex = new AggregateConfiguration(CatalogueRepository, cata, "My PatIndes"); + + var eiChi = cata.GetAllExtractionInformation(ExtractionCategory.Core) + .Single(ei => ei.GetRuntimeName().Equals("Chi")); + eiChi.IsExtractionIdentifier = true; + acPatIndex.CountSQL = null; + eiChi.SaveToDatabase(); + + acPatIndex.AddDimension(eiChi); + acPatIndex.AddDimension(cata.GetAllExtractionInformation(ExtractionCategory.Core) + .Single(ei => ei.GetRuntimeName().Equals("Date"))); + acPatIndex.AddDimension(cata.GetAllExtractionInformation(ExtractionCategory.Core) + .Single(ei => ei.GetRuntimeName().Equals("Healthboard"))); + + cic.EnsureNamingConvention(acPatIndex); + + var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cic, acPatIndex); + + Assert.IsTrue(acPatIndex.IsCohortIdentificationAggregate); + Assert.IsTrue(acPatIndex.IsJoinablePatientIndexTable()); + + var compiler = new CohortCompiler(cic); + + var runner = new CohortCompilerRunner(compiler, 50); + + var cancellation = new System.Threading.CancellationToken(); + runner.Run(cancellation); + + //they should not be executing and should be completed + Assert.IsFalse(compiler.Tasks.Any(t => t.Value.IsExecuting)); + Assert.AreEqual(Phase.Finished, runner.ExecutionPhase); + + var manager = new CachedAggregateConfigurationResultsManager(edsCache); + + var cacheTableName = manager.GetLatestResultsTableUnsafe(acPatIndex, AggregateOperation.JoinableInceptionQuery); + + Assert.IsNotNull(cacheTableName, "No results were cached!"); + + var cacheTable = To.ExpectTable(cacheTableName.GetRuntimeName()); + + //chi, Date and TestCode + Assert.AreEqual(3, cacheTable.DiscoverColumns().Length); + + //healthboard should be a string + Assert.AreEqual(typeof(string), cacheTable.DiscoverColumn("Healthboard").DataType.GetCSharpDataType()); + + /* Query Cache contains this: + * Chi Date Healthboard 0101010101 2001-01-01 00:00:00.0000000 T 0202020202 2002-02-02 00:00:00.0000000 T */ - Assert.AreEqual(2, cacheTable.GetRowCount()); + Assert.AreEqual(2, cacheTable.GetRowCount()); - //Now we could add a new AggregateConfiguration that uses the joinable! - } + //Now we could add a new AggregateConfiguration that uses the joinable! } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderTests.cs b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderTests.cs index 476bb50d5f..768eac84db 100644 --- a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderTests.cs @@ -15,54 +15,56 @@ using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation.QueryTests +namespace Rdmp.Core.Tests.CohortCreation.QueryTests; + +public class CohortQueryBuilderTests : CohortIdentificationTests { - public class CohortQueryBuilderTests : CohortIdentificationTests + private readonly string _scratchDatabaseName = TestDatabaseNames.GetConsistentName("ScratchArea"); + + [Test] + public void TestGettingAggregateJustFromConfig_DistinctCHISelect() { - private readonly string _scratchDatabaseName = TestDatabaseNames.GetConsistentName("ScratchArea"); - - [Test] - public void TestGettingAggregateJustFromConfig_DistinctCHISelect() - { - CohortQueryBuilder builder = new CohortQueryBuilder(aggregate1,null,null); + var builder = new CohortQueryBuilder(aggregate1, null, null); - Assert.AreEqual(CollapseWhitespace(string.Format(@"/*cic_{0}_UnitTestAggregate1*/ + Assert.AreEqual(CollapseWhitespace(string.Format(@"/*cic_{0}_UnitTestAggregate1*/ SELECT distinct [" + _scratchDatabaseName + @"].[dbo].[BulkData].[chi] FROM -[" + _scratchDatabaseName + @"].[dbo].[BulkData]", cohortIdentificationConfiguration.ID)), CollapseWhitespace(builder.SQL)); - } - - [Test] - public void TestGettingAggregateJustFromConfig_SelectStar() - { - CohortQueryBuilder builder = new CohortQueryBuilder(aggregate1, null,null); +[" + _scratchDatabaseName + @"].[dbo].[BulkData]", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder.SQL)); + } - Assert.AreEqual(CollapseWhitespace( + [Test] + public void TestGettingAggregateJustFromConfig_SelectStar() + { + var builder = new CohortQueryBuilder(aggregate1, null, null); + + Assert.AreEqual(CollapseWhitespace( string.Format(@"/*cic_{0}_UnitTestAggregate1*/ SELECT TOP 1000 * FROM - [" + _scratchDatabaseName + @"].[dbo].[BulkData]",cohortIdentificationConfiguration.ID)),CollapseWhitespace(builder.GetDatasetSampleSQL())); - } + [" + _scratchDatabaseName + @"].[dbo].[BulkData]", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder.GetDatasetSampleSQL())); + } - /// - /// When we we normally get "select TOP 1000 *" of the query body BUT - /// if there's HAVING sql then SQL will balk at select *. In this case we expect it to just run the normal distinct chi - /// bit but put a TOP X on it. - /// - [Test] - public void Test_GetDatasetSampleSQL_WithHAVING() - { - aggregate1.HavingSQL = "count(*)>1"; + /// + /// When we we normally get "select TOP 1000 *" of the query body BUT + /// if there's HAVING sql then SQL will balk at select *. In this case we expect it to just run the normal distinct chi + /// bit but put a TOP X on it. + /// + [Test] + public void Test_GetDatasetSampleSQL_WithHAVING() + { + aggregate1.HavingSQL = "count(*)>1"; - CohortQueryBuilder builder = new CohortQueryBuilder(aggregate1, null,null); + var builder = new CohortQueryBuilder(aggregate1, null, null); - Assert.AreEqual(CollapseWhitespace( - string.Format(@"/*cic_{0}_UnitTestAggregate1*/ + Assert.AreEqual(CollapseWhitespace( + string.Format(@"/*cic_{0}_UnitTestAggregate1*/ SELECT distinct TOP 1000 @@ -73,27 +75,26 @@ group by [" + _scratchDatabaseName + @"].[dbo].[BulkData].[chi] HAVING count(*)>1", cohortIdentificationConfiguration.ID)), CollapseWhitespace(builder.GetDatasetSampleSQL())); - } - + } - [Test] - public void TestGettingAggregateSQLFromEntirity() - { - Assert.AreEqual(null, aggregate1.GetCohortAggregateContainerIfAny()); - //set the order so that 2 comes before 1 - rootcontainer.AddChild(aggregate2, 1); - rootcontainer.AddChild(aggregate1, 5); + [Test] + public void TestGettingAggregateSQLFromEntirity() + { + Assert.AreEqual(null, aggregate1.GetCohortAggregateContainerIfAny()); - CohortQueryBuilder builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); + //set the order so that 2 comes before 1 + rootcontainer.AddChild(aggregate2, 1); + rootcontainer.AddChild(aggregate1, 5); - Assert.AreEqual(rootcontainer,aggregate1.GetCohortAggregateContainerIfAny()); - try - { - Assert.AreEqual( + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); - CollapseWhitespace(string.Format( -@"( + Assert.AreEqual(rootcontainer, aggregate1.GetCohortAggregateContainerIfAny()); + try + { + Assert.AreEqual( + CollapseWhitespace(string.Format( + @"( /*cic_{0}_UnitTestAggregate2*/ SELECT distinct @@ -110,44 +111,43 @@ public void TestGettingAggregateSQLFromEntirity() FROM [" + _scratchDatabaseName + @"].[dbo].[BulkData] )" - - ,cohortIdentificationConfiguration.ID)) - , CollapseWhitespace(builder.SQL)); - } - finally - { - rootcontainer.RemoveChild(aggregate1); - rootcontainer.RemoveChild(aggregate2); - } + , cohortIdentificationConfiguration.ID)) + , CollapseWhitespace(builder.SQL)); } - - [Test] - public void TestOrdering_AggregateThenContainer() + finally { - //set the order so that a configuration is in position 1 - rootcontainer.AddChild(aggregate1, 1); + rootcontainer.RemoveChild(aggregate1); + rootcontainer.RemoveChild(aggregate2); + } + } - //then a container in position 2 - container1.Order = 2; - container1.SaveToDatabase(); - rootcontainer.AddChild(container1); + [Test] + public void TestOrdering_AggregateThenContainer() + { + //set the order so that a configuration is in position 1 + rootcontainer.AddChild(aggregate1, 1); - //container 1 contains both other aggregates - container1.AddChild(aggregate2, 1); - container1.AddChild(aggregate3, 2); + //then a container in position 2 + container1.Order = 2; + container1.SaveToDatabase(); + rootcontainer.AddChild(container1); - CohortQueryBuilder builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); + //container 1 contains both other aggregates + container1.AddChild(aggregate2, 1); + container1.AddChild(aggregate3, 2); - try - { - var allConfigurations = rootcontainer.GetAllAggregateConfigurationsRecursively(); - Assert.IsTrue(allConfigurations.Contains(aggregate1)); - Assert.IsTrue(allConfigurations.Contains(aggregate2)); - Assert.IsTrue(allConfigurations.Contains(aggregate3)); + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); - Assert.AreEqual( - CollapseWhitespace(string.Format( -@"( + try + { + var allConfigurations = rootcontainer.GetAllAggregateConfigurationsRecursively(); + Assert.IsTrue(allConfigurations.Contains(aggregate1)); + Assert.IsTrue(allConfigurations.Contains(aggregate2)); + Assert.IsTrue(allConfigurations.Contains(aggregate3)); + + Assert.AreEqual( + CollapseWhitespace(string.Format( + @"( /*cic_{0}_UnitTestAggregate1*/ SELECT distinct @@ -176,41 +176,41 @@ public void TestOrdering_AggregateThenContainer() [" + _scratchDatabaseName + @"].[dbo].[BulkData] ) -)",cohortIdentificationConfiguration.ID)) - , - CollapseWhitespace(builder.SQL)); - } - finally - { - container1.RemoveChild(aggregate2); - container1.RemoveChild(aggregate3); - rootcontainer.RemoveChild(aggregate1); - } +)", cohortIdentificationConfiguration.ID)) + , + CollapseWhitespace(builder.SQL)); } - - [Test] - public void TestOrdering_ContainerThenAggregate() + finally { - //set the order so that a configuration is in position 1 - rootcontainer.AddChild(aggregate1, 2); + container1.RemoveChild(aggregate2); + container1.RemoveChild(aggregate3); + rootcontainer.RemoveChild(aggregate1); + } + } - //then a container in position 2 - container1.Order = 1; - container1.SaveToDatabase(); - rootcontainer.AddChild(container1); + [Test] + public void TestOrdering_ContainerThenAggregate() + { + //set the order so that a configuration is in position 1 + rootcontainer.AddChild(aggregate1, 2); - //container 1 contains both other aggregates - container1.AddChild(aggregate2, 1); - container1.AddChild(aggregate3, 2); - - CohortQueryBuilder builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); + //then a container in position 2 + container1.Order = 1; + container1.SaveToDatabase(); + rootcontainer.AddChild(container1); - try - { - Assert.AreEqual( - CollapseWhitespace( + //container 1 contains both other aggregates + container1.AddChild(aggregate2, 1); + container1.AddChild(aggregate3, 2); + + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); + + try + { + Assert.AreEqual( + CollapseWhitespace( string.Format( -@"( + @"( ( /*cic_{0}_UnitTestAggregate2*/ @@ -239,56 +239,56 @@ public void TestOrdering_ContainerThenAggregate() [" + _scratchDatabaseName + @"].[dbo].[BulkData].[chi] FROM [" + _scratchDatabaseName + @"].[dbo].[BulkData] -)",cohortIdentificationConfiguration.ID)) - ,CollapseWhitespace(builder.SQL)); - } - finally - { - container1.RemoveChild(aggregate2); - container1.RemoveChild(aggregate3); - rootcontainer.RemoveChild(aggregate1); - } +)", cohortIdentificationConfiguration.ID)) + , CollapseWhitespace(builder.SQL)); } - - [Test] - public void TestGettingAggregateSQLFromEntirity_IncludingParametersAtTop() + finally { - //setup a filter (all filters must be in a container so the container is a default AND container) - var AND = new AggregateFilterContainer(CatalogueRepository,FilterContainerOperation.AND); - var filter = new AggregateFilter(CatalogueRepository,"hithere",AND); + container1.RemoveChild(aggregate2); + container1.RemoveChild(aggregate3); + rootcontainer.RemoveChild(aggregate1); + } + } + [Test] + public void TestGettingAggregateSQLFromEntirity_IncludingParametersAtTop() + { + //setup a filter (all filters must be in a container so the container is a default AND container) + var AND = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + var filter = new AggregateFilter(CatalogueRepository, "hithere", AND) + { //give the filter an implicit parameter requiring bit of SQL - filter.WhereSQL = "1=@abracadabra"; - filter.SaveToDatabase(); + WhereSQL = "1=@abracadabra" + }; - //Make aggregate1 use the filter we just setup (required to happen before parameter creator gets hit because otherwise it won't know the IFilter DatabaseType because IFilter is an orphan at the moment) - aggregate1.RootFilterContainer_ID = AND.ID; - aggregate1.SaveToDatabase(); + filter.SaveToDatabase(); - //get it to create the parameters for us - new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(filter, null); + //Make aggregate1 use the filter we just setup (required to happen before parameter creator gets hit because otherwise it won't know the IFilter DatabaseType because IFilter is an orphan at the moment) + aggregate1.RootFilterContainer_ID = AND.ID; + aggregate1.SaveToDatabase(); - //get the parameter it just created, set its value and save it - var param = (AggregateFilterParameter) filter.GetAllParameters().Single(); - param.Value = "1"; - param.ParameterSQL = "DECLARE @abracadabra AS int;"; - param.SaveToDatabase(); + //get it to create the parameters for us + new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(filter, null); - - - - //set the order so that 2 comes before 1 - rootcontainer.AddChild(aggregate2, 1); - rootcontainer.AddChild(aggregate1, 5); - - CohortQueryBuilder builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); + //get the parameter it just created, set its value and save it + var param = (AggregateFilterParameter)filter.GetAllParameters().Single(); + param.Value = "1"; + param.ParameterSQL = "DECLARE @abracadabra AS int;"; + param.SaveToDatabase(); - try - { - Assert.AreEqual( - CollapseWhitespace( + + //set the order so that 2 comes before 1 + rootcontainer.AddChild(aggregate2, 1); + rootcontainer.AddChild(aggregate1, 5); + + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); + + try + { + Assert.AreEqual( + CollapseWhitespace( string.Format( -@"DECLARE @abracadabra AS int; + @"DECLARE @abracadabra AS int; SET @abracadabra=1; ( @@ -313,16 +313,15 @@ public void TestGettingAggregateSQLFromEntirity_IncludingParametersAtTop() 1=@abracadabra ) ) -",cohortIdentificationConfiguration.ID)) - ,CollapseWhitespace(builder.SQL)); +", cohortIdentificationConfiguration.ID)) + , CollapseWhitespace(builder.SQL)); - CohortQueryBuilder builder2 = new CohortQueryBuilder(aggregate1, null,null); - Assert.AreEqual( - -CollapseWhitespace( -string.Format( -@"DECLARE @abracadabra AS int; + var builder2 = new CohortQueryBuilder(aggregate1, null, null); + Assert.AreEqual( + CollapseWhitespace( + string.Format( + @"DECLARE @abracadabra AS int; SET @abracadabra=1; /*cic_{0}_UnitTestAggregate1*/ SELECT @@ -334,17 +333,16 @@ public void TestGettingAggregateSQLFromEntirity_IncludingParametersAtTop() ( /*hithere*/ 1=@abracadabra -)",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(builder2.SQL)); +)", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder2.SQL)); - string selectStar = new CohortQueryBuilder(aggregate1,null,null).GetDatasetSampleSQL(); + var selectStar = new CohortQueryBuilder(aggregate1, null, null).GetDatasetSampleSQL(); - Assert.AreEqual( - CollapseWhitespace( + Assert.AreEqual( + CollapseWhitespace( string.Format( - -@"DECLARE @abracadabra AS int; + @"DECLARE @abracadabra AS int; SET @abracadabra=1; /*cic_{0}_UnitTestAggregate1*/ @@ -352,44 +350,43 @@ public void TestGettingAggregateSQLFromEntirity_IncludingParametersAtTop() TOP 1000 * FROM - [" + TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] WHERE ( /*hithere*/ 1=@abracadabra - )",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(selectStar)); + )", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(selectStar)); + } + finally + { + filter.DeleteInDatabase(); + AND.DeleteInDatabase(); - } - finally - { - filter.DeleteInDatabase(); - AND.DeleteInDatabase(); + rootcontainer.RemoveChild(aggregate1); + rootcontainer.RemoveChild(aggregate2); + } + } - rootcontainer.RemoveChild(aggregate1); - rootcontainer.RemoveChild(aggregate2); - } - } + [Test] + public void TestGettingAggregateSQLFromEntirity_StopEarly() + { + rootcontainer.AddChild(aggregate1, 1); + rootcontainer.AddChild(aggregate2, 2); + rootcontainer.AddChild(aggregate3, 3); + var builder = new CohortQueryBuilder(rootcontainer, null, null) + { + StopContainerWhenYouReach = aggregate2 + }; - [Test] - public void TestGettingAggregateSQLFromEntirity_StopEarly() + try { - rootcontainer.AddChild(aggregate1,1); - rootcontainer.AddChild(aggregate2,2); - rootcontainer.AddChild(aggregate3,3); - - CohortQueryBuilder builder = new CohortQueryBuilder(rootcontainer, null,null); - - builder.StopContainerWhenYouReach = aggregate2; - try - { - Assert.AreEqual( - CollapseWhitespace( + Assert.AreEqual( + CollapseWhitespace( string.Format( - -@" + @" ( /*cic_{0}_UnitTestAggregate1*/ SELECT @@ -407,16 +404,18 @@ public void TestGettingAggregateSQLFromEntirity_StopEarly() FROM [" + _scratchDatabaseName + @"].[dbo].[BulkData] ) -",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(builder.SQL)); +", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder.SQL)); - CohortQueryBuilder builder2 = new CohortQueryBuilder(rootcontainer, null,null); - builder2.StopContainerWhenYouReach = null; + var builder2 = new CohortQueryBuilder(rootcontainer, null, null) + { + StopContainerWhenYouReach = null + }; Assert.AreEqual( -CollapseWhitespace( -string.Format( -@" + CollapseWhitespace( + string.Format( + @" ( /*cic_{0}_UnitTestAggregate1*/ SELECT @@ -443,51 +442,53 @@ public void TestGettingAggregateSQLFromEntirity_StopEarly() FROM [" + _scratchDatabaseName + @"].[dbo].[BulkData] ) -",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(builder2.SQL)); - } - finally - { - rootcontainer.RemoveChild(aggregate1); - rootcontainer.RemoveChild(aggregate2); - rootcontainer.RemoveChild(aggregate3); - - } +", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder2.SQL)); } - - [Test] - public void TestGettingAggregateSQLFromEntirity_StopEarlyContainer() + finally { - rootcontainer.AddChild(aggregate1, -5); + rootcontainer.RemoveChild(aggregate1); + rootcontainer.RemoveChild(aggregate2); + rootcontainer.RemoveChild(aggregate3); + } + } - container1.AddChild(aggregate2, 2); - container1.AddChild(aggregate3, 3); - - rootcontainer.AddChild(container1); + [Test] + public void TestGettingAggregateSQLFromEntirity_StopEarlyContainer() + { + rootcontainer.AddChild(aggregate1, -5); - AggregateConfiguration aggregate4 = new AggregateConfiguration(CatalogueRepository, testData.catalogue, "UnitTestAggregate4"); - new AggregateDimension(CatalogueRepository, testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate4); + container1.AddChild(aggregate2, 2); + container1.AddChild(aggregate3, 3); - rootcontainer.AddChild(aggregate4,5); - CohortQueryBuilder builder = new CohortQueryBuilder(rootcontainer, null,null); + rootcontainer.AddChild(container1); + var aggregate4 = new AggregateConfiguration(CatalogueRepository, testData.catalogue, "UnitTestAggregate4"); + new AggregateDimension(CatalogueRepository, + testData.extractionInformations.Single(e => e.GetRuntimeName().Equals("chi")), aggregate4); + + rootcontainer.AddChild(aggregate4, 5); + var builder = new CohortQueryBuilder(rootcontainer, null, null) + { //Looks like: /* - * - EXCEPT - Aggregate 1 - UNION <-----We tell it to stop after this container - Aggregate2 - Aggregate3 - Aggregate 4 - */ - builder.StopContainerWhenYouReach = container1; - try - { - Assert.AreEqual( -CollapseWhitespace( -string.Format( -@" + * + EXCEPT + Aggregate 1 + UNION <-----We tell it to stop after this container + Aggregate2 + Aggregate3 + Aggregate 4 + */ + StopContainerWhenYouReach = container1 + }; + + try + { + Assert.AreEqual( + CollapseWhitespace( + string.Format( + @" ( /*cic_{0}_UnitTestAggregate1*/ SELECT @@ -518,42 +519,41 @@ Aggregate 4 ) ) -",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(builder.SQL)); - } - finally - { - rootcontainer.RemoveChild(aggregate1); - rootcontainer.RemoveChild(aggregate4); - container1.RemoveChild(aggregate2); - container1.RemoveChild(aggregate3); - - aggregate4.DeleteInDatabase(); - } +", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder.SQL)); } - - [Test] - public void TestHavingSQL() + finally { - rootcontainer.AddChild(aggregate1, -5); + rootcontainer.RemoveChild(aggregate1); + rootcontainer.RemoveChild(aggregate4); + container1.RemoveChild(aggregate2); + container1.RemoveChild(aggregate3); - container1.AddChild(aggregate2, 2); - container1.AddChild(aggregate3, 3); + aggregate4.DeleteInDatabase(); + } + } - aggregate2.HavingSQL = "count(*)>1"; - aggregate2.SaveToDatabase(); - aggregate1.HavingSQL = "SUM(Result)>10"; - aggregate1.SaveToDatabase(); - try - { + [Test] + public void TestHavingSQL() + { + rootcontainer.AddChild(aggregate1, -5); - rootcontainer.AddChild(container1); + container1.AddChild(aggregate2, 2); + container1.AddChild(aggregate3, 3); - CohortQueryBuilder builder = new CohortQueryBuilder(rootcontainer, null,null); - Assert.AreEqual( -CollapseWhitespace( -string.Format( -@" + aggregate2.HavingSQL = "count(*)>1"; + aggregate2.SaveToDatabase(); + aggregate1.HavingSQL = "SUM(Result)>10"; + aggregate1.SaveToDatabase(); + try + { + rootcontainer.AddChild(container1); + + var builder = new CohortQueryBuilder(rootcontainer, null, null); + Assert.AreEqual( + CollapseWhitespace( + string.Format( + @" ( /*cic_{0}_UnitTestAggregate1*/ SELECT @@ -592,89 +592,88 @@ group by ) ) -",cohortIdentificationConfiguration.ID)) - ,CollapseWhitespace(builder.SQL)); - - } - finally - { - rootcontainer.RemoveChild(aggregate1); +", cohortIdentificationConfiguration.ID)) + , CollapseWhitespace(builder.SQL)); + } + finally + { + rootcontainer.RemoveChild(aggregate1); - container1.RemoveChild(aggregate2); - container1.RemoveChild(aggregate3); + container1.RemoveChild(aggregate2); + container1.RemoveChild(aggregate3); - aggregate2.HavingSQL = null; - aggregate2.SaveToDatabase(); - aggregate1.HavingSQL = null; - aggregate1.SaveToDatabase(); - } + aggregate2.HavingSQL = null; + aggregate2.SaveToDatabase(); + aggregate1.HavingSQL = null; + aggregate1.SaveToDatabase(); } + } + + [Test] + [TestCase(true, true)] + [TestCase(true, false)] + [TestCase(false, true)] + [TestCase(false, false)] + public void TestGettingAggregateSQLFromEntirity_TwoFilterParametersPerDataset(bool valuesAreSame, + bool memoryRepository) + { + var repo = memoryRepository ? (ICatalogueRepository)new MemoryCatalogueRepository() : CatalogueRepository; + + //create all the setup again but in the memory repository + SetupTestData(repo); + + //setup a filter (all filters must be in a container so the container is a default AND container) + var AND1 = new AggregateFilterContainer(repo, FilterContainerOperation.AND); + var filter1_1 = new AggregateFilter(repo, "filter1_1", AND1); + var filter1_2 = new AggregateFilter(repo, "filter1_2", AND1); + + var AND2 = new AggregateFilterContainer(repo, FilterContainerOperation.AND); + var filter2_1 = new AggregateFilter(repo, "filter2_1", AND2); + var filter2_2 = new AggregateFilter(repo, "filter2_2", AND2); + + //Filters must belong to containers BEFORE parameter creation + //Make aggregate1 use the filter set we just setup + aggregate1.RootFilterContainer_ID = AND1.ID; + aggregate1.SaveToDatabase(); - [Test] - [TestCase(true,true)] - [TestCase(true, false)] - [TestCase(false,true)] - [TestCase(false, false)] - public void TestGettingAggregateSQLFromEntirity_TwoFilterParametersPerDataset(bool valuesAreSame,bool memoryRepository) + //Make aggregate3 use the other filter set we just setup + aggregate2.RootFilterContainer_ID = AND2.ID; + aggregate2.SaveToDatabase(); + + //set the order so that 2 comes before 1 + rootcontainer.AddChild(aggregate2, 1); + rootcontainer.AddChild(aggregate1, 5); + + //give the filter an implicit parameter requiring bit of SQL + foreach (var filter in new IFilter[] { filter1_1, filter1_2, filter2_1, filter2_2 }) { - var repo = memoryRepository ? (ICatalogueRepository)new MemoryCatalogueRepository() : CatalogueRepository; - - //create all the setup again but in the memory repository - SetupTestData(repo); - - //setup a filter (all filters must be in a container so the container is a default AND container) - var AND1 = new AggregateFilterContainer(repo,FilterContainerOperation.AND); - var filter1_1 = new AggregateFilter(repo,"filter1_1",AND1); - var filter1_2 = new AggregateFilter(repo,"filter1_2",AND1); - - var AND2 = new AggregateFilterContainer(repo,FilterContainerOperation.AND); - var filter2_1 = new AggregateFilter(repo,"filter2_1",AND2); - var filter2_2 = new AggregateFilter(repo,"filter2_2",AND2); - - //Filters must belong to containers BEFORE parameter creation - //Make aggregate1 use the filter set we just setup - aggregate1.RootFilterContainer_ID = AND1.ID; - aggregate1.SaveToDatabase(); + filter.WhereSQL = "@bob = 'bob'"; + filter.SaveToDatabase(); + //get it to create the parameters for us + new ParameterCreator(new AggregateFilterFactory(repo), null, null).CreateAll(filter, null); - //Make aggregate3 use the other filter set we just setup - aggregate2.RootFilterContainer_ID = AND2.ID; - aggregate2.SaveToDatabase(); + //get the parameter it just created, set its value and save it + var param = (AggregateFilterParameter)filter.GetAllParameters().Single(); + param.Value = "'Boom!'"; + param.ParameterSQL = "DECLARE @bob AS varchar(10);"; - //set the order so that 2 comes before 1 - rootcontainer.AddChild(aggregate2, 1); - rootcontainer.AddChild(aggregate1, 5); + //if test case is different values then we change the values of the parameters + if (!valuesAreSame && (filter.Equals(filter2_1) || Equals(filter, filter2_2))) + param.Value = "'Grenades Are Go'"; - //give the filter an implicit parameter requiring bit of SQL - foreach (var filter in new IFilter[]{filter1_1,filter1_2,filter2_1,filter2_2}) - { - filter.WhereSQL = "@bob = 'bob'"; - filter.SaveToDatabase(); - //get it to create the parameters for us - new ParameterCreator(new AggregateFilterFactory(repo), null, null).CreateAll(filter, null); - - //get the parameter it just created, set its value and save it - var param = (AggregateFilterParameter) filter.GetAllParameters().Single(); - param.Value = "'Boom!'"; - param.ParameterSQL = "DECLARE @bob AS varchar(10);"; - - //if test case is different values then we change the values of the parameters - if (!valuesAreSame && (filter.Equals(filter2_1) || Equals(filter, filter2_2))) - param.Value = "'Grenades Are Go'"; - - param.SaveToDatabase(); - } - - CohortQueryBuilder builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); - Console.WriteLine( builder.SQL); - - try - { - if (valuesAreSame) - { - Assert.AreEqual( -CollapseWhitespace( -string.Format( -@"DECLARE @bob AS varchar(10); + param.SaveToDatabase(); + } + + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); + Console.WriteLine(builder.SQL); + + try + { + if (valuesAreSame) + Assert.AreEqual( + CollapseWhitespace( + string.Format( + @"DECLARE @bob AS varchar(10); SET @bob='Boom!'; ( @@ -710,16 +709,13 @@ public void TestGettingAggregateSQLFromEntirity_TwoFilterParametersPerDataset(bo @bob = 'bob' ) ) -",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(builder.SQL)); - } - else - { - Assert.AreEqual( - - CollapseWhitespace( +", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder.SQL)); + else + Assert.AreEqual( + CollapseWhitespace( string.Format( -@"DECLARE @bob AS varchar(10); + @"DECLARE @bob AS varchar(10); SET @bob='Grenades Are Go'; DECLARE @bob_2 AS varchar(10); SET @bob_2='Boom!'; @@ -728,9 +724,9 @@ public void TestGettingAggregateSQLFromEntirity_TwoFilterParametersPerDataset(bo /*cic_{0}_UnitTestAggregate2*/ SELECT distinct - [" + TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] FROM - ["+TestDatabaseNames.Prefix+ @"ScratchArea].[dbo].[BulkData] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] WHERE ( /*filter2_1*/ @@ -745,9 +741,9 @@ public void TestGettingAggregateSQLFromEntirity_TwoFilterParametersPerDataset(bo /*cic_{0}_UnitTestAggregate1*/ SELECT distinct - [" + TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] FROM - ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] WHERE ( /*filter1_1*/ @@ -757,48 +753,45 @@ public void TestGettingAggregateSQLFromEntirity_TwoFilterParametersPerDataset(bo @bob_2 = 'bob' ) ) -",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(builder.SQL)); - } - } - finally - { - rootcontainer.RemoveChild(aggregate2); - rootcontainer.RemoveChild(aggregate1); - - filter1_1.DeleteInDatabase(); - filter1_2.DeleteInDatabase(); - filter2_1.DeleteInDatabase(); - filter2_2.DeleteInDatabase(); - - AND1.DeleteInDatabase(); - AND2.DeleteInDatabase(); - - } - } - - [Test] - public void TestGettingAggregateSQL_Aggregate_IsDisabled() +", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder.SQL)); + } + finally { - Assert.AreEqual(null, aggregate1.GetCohortAggregateContainerIfAny()); + rootcontainer.RemoveChild(aggregate2); + rootcontainer.RemoveChild(aggregate1); - //set the order so that 2 comes before 1 - rootcontainer.AddChild(aggregate2, 1); - rootcontainer.AddChild(aggregate1, 5); + filter1_1.DeleteInDatabase(); + filter1_2.DeleteInDatabase(); + filter2_1.DeleteInDatabase(); + filter2_2.DeleteInDatabase(); - //disable aggregate 1 - aggregate1.IsDisabled = true; - aggregate1.SaveToDatabase(); + AND1.DeleteInDatabase(); + AND2.DeleteInDatabase(); + } + } - Assert.AreEqual(rootcontainer, aggregate1.GetCohortAggregateContainerIfAny()); + [Test] + public void TestGettingAggregateSQL_Aggregate_IsDisabled() + { + Assert.AreEqual(null, aggregate1.GetCohortAggregateContainerIfAny()); - CohortQueryBuilder builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); - try - { - Assert.AreEqual( + //set the order so that 2 comes before 1 + rootcontainer.AddChild(aggregate2, 1); + rootcontainer.AddChild(aggregate1, 5); + + //disable aggregate 1 + aggregate1.IsDisabled = true; + aggregate1.SaveToDatabase(); + + Assert.AreEqual(rootcontainer, aggregate1.GetCohortAggregateContainerIfAny()); - CollapseWhitespace(string.Format( -@"( + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); + try + { + Assert.AreEqual( + CollapseWhitespace(string.Format( + @"( /*cic_{0}_UnitTestAggregate2*/ SELECT distinct @@ -806,80 +799,77 @@ public void TestGettingAggregateSQL_Aggregate_IsDisabled() FROM [" + _scratchDatabaseName + @"].[dbo].[BulkData] )" - - , cohortIdentificationConfiguration.ID)) - , CollapseWhitespace(builder.SQL)); - } - finally - { - - aggregate1.IsDisabled = false; - aggregate1.SaveToDatabase(); - - rootcontainer.RemoveChild(aggregate1); - rootcontainer.RemoveChild(aggregate2); - } + , cohortIdentificationConfiguration.ID)) + , CollapseWhitespace(builder.SQL)); } - - [Test] - public void TestGettingAggregateSQLFromEntirity_Filter_IsDisabled() + finally { - //setup a filter (all filters must be in a container so the container is a default AND container) - var AND1 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); - var filter1_1 = new AggregateFilter(CatalogueRepository, "filter1_1", AND1); - var filter1_2 = new AggregateFilter(CatalogueRepository, "filter1_2", AND1); - - var AND2 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); - var filter2_1 = new AggregateFilter(CatalogueRepository, "filter2_1", AND2); - var filter2_2 = new AggregateFilter(CatalogueRepository, "filter2_2", AND2); - - //Filters must belong to containers BEFORE parameter creation - //Make aggregate1 use the filter set we just setup - aggregate1.RootFilterContainer_ID = AND1.ID; + aggregate1.IsDisabled = false; aggregate1.SaveToDatabase(); - //Make aggregate3 use the other filter set we just setup - aggregate2.RootFilterContainer_ID = AND2.ID; - aggregate2.SaveToDatabase(); + rootcontainer.RemoveChild(aggregate1); + rootcontainer.RemoveChild(aggregate2); + } + } - //set the order so that 2 comes before 1 - rootcontainer.AddChild(aggregate2, 1); - rootcontainer.AddChild(aggregate1, 5); + [Test] + public void TestGettingAggregateSQLFromEntirity_Filter_IsDisabled() + { + //setup a filter (all filters must be in a container so the container is a default AND container) + var AND1 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + var filter1_1 = new AggregateFilter(CatalogueRepository, "filter1_1", AND1); + var filter1_2 = new AggregateFilter(CatalogueRepository, "filter1_2", AND1); - filter2_2.IsDisabled = true; - filter2_2.SaveToDatabase(); + var AND2 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + var filter2_1 = new AggregateFilter(CatalogueRepository, "filter2_1", AND2); + var filter2_2 = new AggregateFilter(CatalogueRepository, "filter2_2", AND2); - //give the filter an implicit parameter requiring bit of SQL - foreach (var filter in new IFilter[] { filter1_1, filter1_2, filter2_1, filter2_2 }) - { - filter.WhereSQL = "@bob = 'bob'"; - filter.SaveToDatabase(); - //get it to create the parameters for us - new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(filter, null); + //Filters must belong to containers BEFORE parameter creation + //Make aggregate1 use the filter set we just setup + aggregate1.RootFilterContainer_ID = AND1.ID; + aggregate1.SaveToDatabase(); - //get the parameter it just created, set its value and save it - var param = (AggregateFilterParameter)filter.GetAllParameters().Single(); - param.Value = "'Boom!'"; - param.ParameterSQL = "DECLARE @bob AS varchar(10);"; + //Make aggregate3 use the other filter set we just setup + aggregate2.RootFilterContainer_ID = AND2.ID; + aggregate2.SaveToDatabase(); - //change the values of the parameters - if (filter.Equals(filter2_1) || Equals(filter, filter2_2)) - param.Value = "'Grenades Are Go'"; + //set the order so that 2 comes before 1 + rootcontainer.AddChild(aggregate2, 1); + rootcontainer.AddChild(aggregate1, 5); - param.SaveToDatabase(); - } - - CohortQueryBuilder builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); + filter2_2.IsDisabled = true; + filter2_2.SaveToDatabase(); - Console.WriteLine(builder.SQL); + //give the filter an implicit parameter requiring bit of SQL + foreach (var filter in new IFilter[] { filter1_1, filter1_2, filter2_1, filter2_2 }) + { + filter.WhereSQL = "@bob = 'bob'"; + filter.SaveToDatabase(); + //get it to create the parameters for us + new ParameterCreator(new AggregateFilterFactory(CatalogueRepository), null, null).CreateAll(filter, null); - try - { - Assert.AreEqual( -CollapseWhitespace( -string.Format( + //get the parameter it just created, set its value and save it + var param = (AggregateFilterParameter)filter.GetAllParameters().Single(); + param.Value = "'Boom!'"; + param.ParameterSQL = "DECLARE @bob AS varchar(10);"; -@"DECLARE @bob AS varchar(10); + //change the values of the parameters + if (filter.Equals(filter2_1) || Equals(filter, filter2_2)) + param.Value = "'Grenades Are Go'"; + + param.SaveToDatabase(); + } + + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); + + Console.WriteLine(builder.SQL); + + try + { + Assert.AreEqual( + CollapseWhitespace( + string.Format( + @"DECLARE @bob AS varchar(10); SET @bob='Grenades Are Go'; DECLARE @bob_2 AS varchar(10); SET @bob_2='Boom!'; @@ -915,27 +905,23 @@ public void TestGettingAggregateSQLFromEntirity_Filter_IsDisabled() ) ) ", cohortIdentificationConfiguration.ID)), -CollapseWhitespace(builder.SQL)); - - } - finally - { - - filter2_2.IsDisabled = false; - filter2_2.SaveToDatabase(); - - rootcontainer.RemoveChild(aggregate2); - rootcontainer.RemoveChild(aggregate1); + CollapseWhitespace(builder.SQL)); + } + finally + { + filter2_2.IsDisabled = false; + filter2_2.SaveToDatabase(); - filter1_1.DeleteInDatabase(); - filter1_2.DeleteInDatabase(); - filter2_1.DeleteInDatabase(); - filter2_2.DeleteInDatabase(); + rootcontainer.RemoveChild(aggregate2); + rootcontainer.RemoveChild(aggregate1); - AND1.DeleteInDatabase(); - AND2.DeleteInDatabase(); + filter1_1.DeleteInDatabase(); + filter1_2.DeleteInDatabase(); + filter2_1.DeleteInDatabase(); + filter2_2.DeleteInDatabase(); - } + AND1.DeleteInDatabase(); + AND2.DeleteInDatabase(); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderTestsInvolvingTableValuedParameters.cs b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderTestsInvolvingTableValuedParameters.cs index 10e06a23e5..032b99c501 100644 --- a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderTestsInvolvingTableValuedParameters.cs +++ b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderTestsInvolvingTableValuedParameters.cs @@ -11,64 +11,71 @@ using Rdmp.Core.QueryBuilding; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation.QueryTests +namespace Rdmp.Core.Tests.CohortCreation.QueryTests; + +public class CohortQueryBuilderTestsInvolvingTableValuedParameters : DatabaseTests { - public class CohortQueryBuilderTestsInvolvingTableValuedParameters:DatabaseTests + private TestableTableValuedFunction _function = new(); + + public void CreateFunction() { - private TestableTableValuedFunction _function = new TestableTableValuedFunction(); - - public void CreateFunction() + _function.Create(GetCleanedServer(DatabaseType.MicrosoftSQLServer), CatalogueRepository); + } + + [Test] + public void CohortGenerationDifferingTableValuedParametersTest() + { + CreateFunction(); + + //In this example we have 2 configurations which both target the same table valued function but which must have different parameter values + var config1 = new AggregateConfiguration(CatalogueRepository, _function.Cata, + "CohortGenerationDifferingTableValuedParametersTest_1") { - _function.Create(GetCleanedServer(DatabaseType.MicrosoftSQLServer), CatalogueRepository); - } + CountSQL = null + }; + config1.SaveToDatabase(); - [Test] - public void CohortGenerationDifferingTableValuedParametersTest() + var config2 = new AggregateConfiguration(CatalogueRepository, _function.Cata, + "CohortGenerationDifferingTableValuedParametersTest_2") { - CreateFunction(); - - //In this example we have 2 configurations which both target the same table valued function but which must have different parameter values - var config1 = new AggregateConfiguration(CatalogueRepository,_function.Cata, "CohortGenerationDifferingTableValuedParametersTest_1"); - config1.CountSQL = null; - config1.SaveToDatabase(); - - var config2 = new AggregateConfiguration(CatalogueRepository,_function.Cata, "CohortGenerationDifferingTableValuedParametersTest_2"); - config2.CountSQL = null; - config2.SaveToDatabase(); - - var cic = new CohortIdentificationConfiguration(CatalogueRepository,"CohortGenerationDifferingTableValuedParametersTest"); - - cic.EnsureNamingConvention(config1); - cic.EnsureNamingConvention(config2); - - try - { - //make the string column the extraction identifier - _function.ExtractionInformations[1].IsExtractionIdentifier = true; - _function.ExtractionInformations[1].SaveToDatabase(); - - //add the extraction identtifier as the only dimension one ach of the aggregate configurations that we will use for the cohort identification query - new AggregateDimension(CatalogueRepository,_function.ExtractionInformations[1], config1); - new AggregateDimension(CatalogueRepository,_function.ExtractionInformations[1], config2); - - Assert.IsNull(cic.RootCohortAggregateContainer_ID); - - //create a root container for it - CohortAggregateContainer container = new CohortAggregateContainer(CatalogueRepository,SetOperation.INTERSECT); - - //set the container as the root container for the cohort identification task object - cic.RootCohortAggregateContainer_ID = container.ID; - cic.SaveToDatabase(); - - //put both the aggregates into the container - container.AddChild(config1, 0); - container.AddChild(config2, 1); - - CohortQueryBuilder builder = new CohortQueryBuilder(cic,null); - Assert.AreEqual( - CollapseWhitespace( + CountSQL = null + }; + config2.SaveToDatabase(); + + var cic = new CohortIdentificationConfiguration(CatalogueRepository, + "CohortGenerationDifferingTableValuedParametersTest"); + + cic.EnsureNamingConvention(config1); + cic.EnsureNamingConvention(config2); + + try + { + //make the string column the extraction identifier + _function.ExtractionInformations[1].IsExtractionIdentifier = true; + _function.ExtractionInformations[1].SaveToDatabase(); + + //add the extraction identtifier as the only dimension one ach of the aggregate configurations that we will use for the cohort identification query + new AggregateDimension(CatalogueRepository, _function.ExtractionInformations[1], config1); + new AggregateDimension(CatalogueRepository, _function.ExtractionInformations[1], config2); + + Assert.IsNull(cic.RootCohortAggregateContainer_ID); + + //create a root container for it + var container = new CohortAggregateContainer(CatalogueRepository, SetOperation.INTERSECT); + + //set the container as the root container for the cohort identification task object + cic.RootCohortAggregateContainer_ID = container.ID; + cic.SaveToDatabase(); + + //put both the aggregates into the container + container.AddChild(config1, 0); + container.AddChild(config2, 1); + + var builder = new CohortQueryBuilder(cic, null); + Assert.AreEqual( + CollapseWhitespace( string.Format( -@"DECLARE @startNumber AS int; + @"DECLARE @startNumber AS int; SET @startNumber=5; DECLARE @stopNumber AS int; SET @stopNumber=10; @@ -81,7 +88,8 @@ public void CohortGenerationDifferingTableValuedParametersTest() distinct MyAwesomeFunction.[Name] FROM - [" + TestDatabaseNames.Prefix+ @"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction + [" + TestDatabaseNames.Prefix + + @"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction INTERSECT @@ -90,26 +98,31 @@ public void CohortGenerationDifferingTableValuedParametersTest() distinct MyAwesomeFunction.[Name] FROM - [" + TestDatabaseNames.Prefix+@"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction + [" + TestDatabaseNames.Prefix + + @"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction ) -",cic.ID)), - CollapseWhitespace(builder.SQL)); - - //now override JUST @name - var param1 = new AnyTableSqlParameter(CatalogueRepository,config1, "DECLARE @name AS varchar(50);"); - param1.Value = "'lobster'"; - param1.SaveToDatabase(); - - var param2 = new AnyTableSqlParameter(CatalogueRepository,config2, "DECLARE @name AS varchar(50);"); - param2.Value = "'monkey'"; - param2.SaveToDatabase(); - - CohortQueryBuilder builder2 = new CohortQueryBuilder(cic,null); - - Assert.AreEqual( - CollapseWhitespace( +", cic.ID)), + CollapseWhitespace(builder.SQL)); + + //now override JUST @name + var param1 = new AnyTableSqlParameter(CatalogueRepository, config1, "DECLARE @name AS varchar(50);") + { + Value = "'lobster'" + }; + param1.SaveToDatabase(); + + var param2 = new AnyTableSqlParameter(CatalogueRepository, config2, "DECLARE @name AS varchar(50);") + { + Value = "'monkey'" + }; + param2.SaveToDatabase(); + + var builder2 = new CohortQueryBuilder(cic, null); + + Assert.AreEqual( + CollapseWhitespace( string.Format( -@"DECLARE @startNumber AS int; + @"DECLARE @startNumber AS int; SET @startNumber=5; DECLARE @stopNumber AS int; SET @stopNumber=10; @@ -124,7 +137,8 @@ public void CohortGenerationDifferingTableValuedParametersTest() distinct MyAwesomeFunction.[Name] FROM - [" + TestDatabaseNames.Prefix+ @"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction + [" + TestDatabaseNames.Prefix + + @"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction INTERSECT @@ -133,18 +147,17 @@ public void CohortGenerationDifferingTableValuedParametersTest() distinct MyAwesomeFunction.[Name] FROM - [" + TestDatabaseNames.Prefix+@"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name_2) AS MyAwesomeFunction + [" + TestDatabaseNames.Prefix + + @"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name_2) AS MyAwesomeFunction ) -",cic.ID)), - CollapseWhitespace(builder2.SQL)); - } - finally - { - cic.DeleteInDatabase(); - config1.DeleteInDatabase(); - config2.DeleteInDatabase(); - - } +", cic.ID)), + CollapseWhitespace(builder2.SQL)); + } + finally + { + cic.DeleteInDatabase(); + config1.DeleteInDatabase(); + config2.DeleteInDatabase(); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderWithCacheTests.cs b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderWithCacheTests.cs index 1458367bb2..ea1a9f1478 100644 --- a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderWithCacheTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortQueryBuilderWithCacheTests.cs @@ -6,114 +6,112 @@ using System.Data; using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable.Versioning; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Databases; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Versioning; using Rdmp.Core.QueryBuilding; using Rdmp.Core.QueryCaching.Aggregation; using Rdmp.Core.QueryCaching.Aggregation.Arguments; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation.QueryTests +namespace Rdmp.Core.Tests.CohortCreation.QueryTests; + +public class CohortQueryBuilderWithCacheTests : CohortIdentificationTests { - public class CohortQueryBuilderWithCacheTests : CohortIdentificationTests + protected DiscoveredDatabase queryCacheDatabase; + protected ExternalDatabaseServer externalDatabaseServer; + protected DatabaseColumnRequest _chiColumnSpecification = new("chi", "varchar(10)"); + + [OneTimeSetUp] + protected override void OneTimeSetUp() { - protected DiscoveredDatabase queryCacheDatabase; - protected ExternalDatabaseServer externalDatabaseServer; - protected DatabaseColumnRequest _chiColumnSpecification = new DatabaseColumnRequest("chi","varchar(10)"); + base.OneTimeSetUp(); - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); + queryCacheDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase( + $"{TestDatabaseNames.Prefix}QueryCache"); - queryCacheDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(TestDatabaseNames.Prefix + "QueryCache"); + if (queryCacheDatabase.Exists()) + DeleteTables(queryCacheDatabase); - if (queryCacheDatabase.Exists()) - base.DeleteTables(queryCacheDatabase); + var executor = new MasterDatabaseScriptExecutor(queryCacheDatabase); - MasterDatabaseScriptExecutor executor = new MasterDatabaseScriptExecutor(queryCacheDatabase); + var p = new QueryCachingPatcher(); + executor.CreateAndPatchDatabase(p, new AcceptAllCheckNotifier()); - var p = new QueryCachingPatcher(); - executor.CreateAndPatchDatabase(p, new AcceptAllCheckNotifier()); - - externalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, "QueryCacheForUnitTests",p); - externalDatabaseServer.SetProperties(queryCacheDatabase); - } - - [Test] - public void TestGettingAggregateJustFromConfig_DistinctCHISelect() - { + externalDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, "QueryCacheForUnitTests", p); + externalDatabaseServer.SetProperties(queryCacheDatabase); + } - CachedAggregateConfigurationResultsManager manager = new CachedAggregateConfigurationResultsManager( externalDatabaseServer); - - cohortIdentificationConfiguration.QueryCachingServer_ID = externalDatabaseServer.ID; - cohortIdentificationConfiguration.SaveToDatabase(); - + [Test] + public void TestGettingAggregateJustFromConfig_DistinctCHISelect() + { + var manager = new CachedAggregateConfigurationResultsManager(externalDatabaseServer); - cohortIdentificationConfiguration.CreateRootContainerIfNotExists(); - cohortIdentificationConfiguration.RootCohortAggregateContainer.AddChild(aggregate1,0); + cohortIdentificationConfiguration.QueryCachingServer_ID = externalDatabaseServer.ID; + cohortIdentificationConfiguration.SaveToDatabase(); - CohortQueryBuilder builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); - try - { - Assert.AreEqual( -CollapseWhitespace( -string.Format( -@" + + cohortIdentificationConfiguration.CreateRootContainerIfNotExists(); + cohortIdentificationConfiguration.RootCohortAggregateContainer.AddChild(aggregate1, 0); + + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); + try + { + Assert.AreEqual( + CollapseWhitespace( + string.Format( + @" ( /*cic_{0}_UnitTestAggregate1*/ SELECT distinct - [" + TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] FROM - ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] ) -",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(builder.SQL)); +", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builder.SQL)); - var server = queryCacheDatabase.Server; - using(var con = server.GetConnection()) - { - con.Open(); + var server = queryCacheDatabase.Server; + using (var con = server.GetConnection()) + { + con.Open(); - var da = server.GetDataAdapter(builder.SQL, con); - var dt = new DataTable(); - da.Fill(dt); + var da = server.GetDataAdapter(builder.SQL, con); + var dt = new DataTable(); + da.Fill(dt); - manager.CommitResults(new CacheCommitIdentifierList(aggregate1, - string.Format(@"/*cic_{0}_UnitTestAggregate1*/ + manager.CommitResults(new CacheCommitIdentifierList(aggregate1, + string.Format(@"/*cic_{0}_UnitTestAggregate1*/ SELECT distinct -[" +TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] FROM -[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData]", cohortIdentificationConfiguration.ID), dt, _chiColumnSpecification, 30)); - } +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData]", cohortIdentificationConfiguration.ID), dt, + _chiColumnSpecification, 30)); + } - CohortQueryBuilder builderCached = new CohortQueryBuilder(cohortIdentificationConfiguration,null); + var builderCached = new CohortQueryBuilder(cohortIdentificationConfiguration, null); - Assert.AreEqual( - CollapseWhitespace( + Assert.AreEqual( + CollapseWhitespace( string.Format( -@" + @" ( /*Cached:cic_{0}_UnitTestAggregate1*/ - select * from [" + queryCacheDatabase.GetRuntimeName() + "]..[IndexedExtractionIdentifierList_AggregateConfiguration" + aggregate1.ID + @"] + select * from [" + queryCacheDatabase.GetRuntimeName() + + "]..[IndexedExtractionIdentifierList_AggregateConfiguration" + aggregate1.ID + @"] ) -",cohortIdentificationConfiguration.ID)), - CollapseWhitespace(builderCached.SQL)); - - } - finally - { - cohortIdentificationConfiguration.RootCohortAggregateContainer.RemoveChild(aggregate1); - - } - +", cohortIdentificationConfiguration.ID)), + CollapseWhitespace(builderCached.SQL)); + } + finally + { + cohortIdentificationConfiguration.RootCohortAggregateContainer.RemoveChild(aggregate1); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortSummaryQueryBuilderTests.cs b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortSummaryQueryBuilderTests.cs index 649d1adf40..caeab8701c 100644 --- a/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortSummaryQueryBuilderTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/QueryTests/CohortSummaryQueryBuilderTests.cs @@ -5,122 +5,139 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.Curation.FilterImporting; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.QueryBuilding; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation.QueryTests +namespace Rdmp.Core.Tests.CohortCreation.QueryTests; + +public class CohortSummaryQueryBuilderTests : DatabaseTests { - public class CohortSummaryQueryBuilderTests:DatabaseTests - { - private Catalogue c; - private CatalogueItem ci; - private CatalogueItem ci2; + private Catalogue c; + private CatalogueItem ci; + private CatalogueItem ci2; - private TableInfo t; - private ColumnInfo col; - private ColumnInfo col2; + private TableInfo t; + private ColumnInfo col; + private ColumnInfo col2; - private AggregateConfiguration acCohort; - private AggregateConfiguration acDataset; - private ExtractionInformation ei_Chi; - - private CohortIdentificationConfiguration cic; - private ExtractionInformation ei_Year; + private AggregateConfiguration acCohort; + private AggregateConfiguration acDataset; + private ExtractionInformation ei_Chi; - private ISqlParameter parama1; - private ISqlParameter parama2; - private AggregateFilterContainer container1; - private AggregateFilterContainer container2; + private CohortIdentificationConfiguration cic; + private ExtractionInformation ei_Year; + private ISqlParameter parama1; + private ISqlParameter parama2; + private AggregateFilterContainer container1; + private AggregateFilterContainer container2; - [SetUp] - protected override void SetUp() - { - base.SetUp(); - c = new Catalogue(CatalogueRepository, "MyCata"); - ci = new CatalogueItem(CatalogueRepository, c, "MyCataItem"); - ci2 = new CatalogueItem(CatalogueRepository, c, "YearColumn"); - t = new TableInfo(CatalogueRepository, "MyTable"); - col = new ColumnInfo(CatalogueRepository, "mycol", "varchar(10)", t); - col2 = new ColumnInfo(CatalogueRepository, "myOtherCol", "varchar(10)", t); + [SetUp] + protected override void SetUp() + { + base.SetUp(); - - acCohort = new AggregateConfiguration(CatalogueRepository, c, CohortIdentificationConfiguration.CICPrefix + "Agg1_Cohort"); - acDataset = new AggregateConfiguration(CatalogueRepository, c, "Agg2_Dataset"); + c = new Catalogue(CatalogueRepository, "MyCata"); + ci = new CatalogueItem(CatalogueRepository, c, "MyCataItem"); + ci2 = new CatalogueItem(CatalogueRepository, c, "YearColumn"); + t = new TableInfo(CatalogueRepository, "MyTable"); + col = new ColumnInfo(CatalogueRepository, "mycol", "varchar(10)", t); + col2 = new ColumnInfo(CatalogueRepository, "myOtherCol", "varchar(10)", t); - ei_Year = new ExtractionInformation(CatalogueRepository, ci2, col2, "Year"); - ei_Year.IsExtractionIdentifier = true; - ei_Year.SaveToDatabase(); - acDataset.AddDimension(ei_Year); - acDataset.CountSQL = "count(*)"; - acDataset.SaveToDatabase(); + acCohort = new AggregateConfiguration(CatalogueRepository, c, + $"{CohortIdentificationConfiguration.CICPrefix}Agg1_Cohort"); + acDataset = new AggregateConfiguration(CatalogueRepository, c, "Agg2_Dataset"); - ei_Chi = new ExtractionInformation(CatalogueRepository, ci, col, "CHI"); - ei_Chi.IsExtractionIdentifier = true; - ei_Chi.SaveToDatabase(); + ei_Year = new ExtractionInformation(CatalogueRepository, ci2, col2, "Year") + { + IsExtractionIdentifier = true + }; + ei_Year.SaveToDatabase(); + acDataset.AddDimension(ei_Year); + acDataset.CountSQL = "count(*)"; + acDataset.SaveToDatabase(); - acCohort.AddDimension(ei_Chi); - cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic"); - cic.CreateRootContainerIfNotExists(); - cic.RootCohortAggregateContainer.AddChild(acCohort,0); - } - #region Constructor Arguments - [Test] - public void ConstructorArguments_SameAggregateTwice() + ei_Chi = new ExtractionInformation(CatalogueRepository, ci, col, "CHI") { - var ex = Assert.Throws(()=>new CohortSummaryQueryBuilder(acCohort,acCohort,null)); - Assert.AreEqual("Summary and Cohort should be different aggregates. Summary should be a graphable useful aggregate while cohort should return a list of private identifiers",ex.Message); - } + IsExtractionIdentifier = true + }; + ei_Chi.SaveToDatabase(); - [Test] - public void ConstructorArguments_Param1AccidentallyACohort() - { - var ex = Assert.Throws(() => new CohortSummaryQueryBuilder(acCohort, acDataset,null)); - Assert.AreEqual("The first argument to constructor CohortSummaryQueryBuilder should be a basic AggregateConfiguration (i.e. not a cohort) but the argument you passed ('cic_Agg1_Cohort') was a cohort identification configuration aggregate", ex.Message); - } - [Test] - public void ConstructorArguments_Param2AccidentallyAnAggregate() - { - //change it in memory so it doesn't look like a cohort aggregate anymore - acCohort.Name = "RegularAggregate"; - var ex = Assert.Throws(() => new CohortSummaryQueryBuilder(acDataset,acCohort,null)); - Assert.AreEqual("The second argument to constructor CohortSummaryQueryBuilder should be a cohort identification aggregate (i.e. have a single AggregateDimension marked IsExtractionIdentifier and have a name starting with cic_) but the argument you passed ('RegularAggregate') was NOT a cohort identification configuration aggregate", ex.Message); - acCohort.RevertToDatabaseState(); - } + acCohort.AddDimension(ei_Chi); - [Test] - public void ConstructorArguments_DifferentDatasets() - { - acCohort.Catalogue_ID = -999999; - var ex = Assert.Throws(() => new CohortSummaryQueryBuilder(acDataset, acCohort,null)); + cic = new CohortIdentificationConfiguration(CatalogueRepository, "mycic"); + cic.CreateRootContainerIfNotExists(); + cic.RootCohortAggregateContainer.AddChild(acCohort, 0); + } - Assert.IsTrue(ex.Message.StartsWith("Constructor arguments to CohortSummaryQueryBuilder must belong to the same dataset")); - acCohort.RevertToDatabaseState(); - } + #region Constructor Arguments - [Test] - public void ConstructorArguments_Normal() - { - Assert.DoesNotThrow(() => new CohortSummaryQueryBuilder(acDataset, acCohort,null)); - } - #endregion + [Test] + public void ConstructorArguments_SameAggregateTwice() + { + var ex = Assert.Throws(() => new CohortSummaryQueryBuilder(acCohort, acCohort, null)); + Assert.AreEqual( + "Summary and Cohort should be different aggregates. Summary should be a graphable useful aggregate while cohort should return a list of private identifiers", + ex.Message); + } + [Test] + public void ConstructorArguments_Param1AccidentallyACohort() + { + var ex = Assert.Throws(() => new CohortSummaryQueryBuilder(acCohort, acDataset, null)); + Assert.AreEqual( + "The first argument to constructor CohortSummaryQueryBuilder should be a basic AggregateConfiguration (i.e. not a cohort) but the argument you passed ('cic_Agg1_Cohort') was a cohort identification configuration aggregate", + ex.Message); + } - [Test] - public void QueryGeneration_BasicQuery() - { - string sql = acDataset.GetQueryBuilder().SQL; + [Test] + public void ConstructorArguments_Param2AccidentallyAnAggregate() + { + //change it in memory so it doesn't look like a cohort aggregate anymore + acCohort.Name = "RegularAggregate"; + var ex = Assert.Throws(() => new CohortSummaryQueryBuilder(acDataset, acCohort, null)); + Assert.AreEqual( + "The second argument to constructor CohortSummaryQueryBuilder should be a cohort identification aggregate (i.e. have a single AggregateDimension marked IsExtractionIdentifier and have a name starting with cic_) but the argument you passed ('RegularAggregate') was NOT a cohort identification configuration aggregate", + ex.Message); + acCohort.RevertToDatabaseState(); + } + + [Test] + public void ConstructorArguments_DifferentDatasets() + { + acCohort.Catalogue_ID = -999999; + var ex = Assert.Throws(() => new CohortSummaryQueryBuilder(acDataset, acCohort, null)); + + Assert.IsTrue( + ex.Message.StartsWith( + "Constructor arguments to CohortSummaryQueryBuilder must belong to the same dataset")); + acCohort.RevertToDatabaseState(); + } - Assert.AreEqual(@"/*Agg2_Dataset*/ + [Test] + public void ConstructorArguments_Normal() + { + Assert.DoesNotThrow(() => new CohortSummaryQueryBuilder(acDataset, acCohort, null)); + } + + #endregion + + + [Test] + public void QueryGeneration_BasicQuery() + { + var sql = acDataset.GetQueryBuilder().SQL; + + Assert.AreEqual(@"/*Agg2_Dataset*/ SELECT Year, count(*) AS MyCount @@ -130,47 +147,48 @@ group by Year order by Year", sql); - } + } - [Test] - public void QueryGeneration_WithLinkedCohort_WHERECHIIN() - { - var csqb = new CohortSummaryQueryBuilder(acDataset, acCohort,null); - - var ex = Assert.Throws(() => csqb.GetAdjustedAggregateBuilder(CohortSummaryAdjustment.WhereExtractionIdentifiersIn)); + [Test] + public void QueryGeneration_WithLinkedCohort_WHERECHIIN() + { + var csqb = new CohortSummaryQueryBuilder(acDataset, acCohort, null); - Assert.AreEqual("No Query Caching Server configured", ex.Message); - } + var ex = Assert.Throws(() => + csqb.GetAdjustedAggregateBuilder(CohortSummaryAdjustment.WhereExtractionIdentifiersIn)); - [Test] - public void QueryGeneration_Parameters_DifferentValues_WHERECHIIN() - { - CreateParameters("'bob'","'fish'"); + Assert.AreEqual("No Query Caching Server configured", ex.Message); + } - try - { - var csqb = new CohortSummaryQueryBuilder(acDataset, acCohort,null); + [Test] + public void QueryGeneration_Parameters_DifferentValues_WHERECHIIN() + { + CreateParameters("'bob'", "'fish'"); - var ex = Assert.Throws(() => csqb.GetAdjustedAggregateBuilder(CohortSummaryAdjustment.WhereExtractionIdentifiersIn)); + try + { + var csqb = new CohortSummaryQueryBuilder(acDataset, acCohort, null); - Assert.AreEqual("No Query Caching Server configured",ex.Message); - } - finally - { - DestroyParameters(); - } - } + var ex = Assert.Throws(() => + csqb.GetAdjustedAggregateBuilder(CohortSummaryAdjustment.WhereExtractionIdentifiersIn)); + Assert.AreEqual("No Query Caching Server configured", ex.Message); + } + finally + { + DestroyParameters(); + } + } - [Test] - public void QueryGeneration_NoCohortWhereLogic() - { - var csqb = new CohortSummaryQueryBuilder(acDataset, acCohort,null); + [Test] + public void QueryGeneration_NoCohortWhereLogic() + { + var csqb = new CohortSummaryQueryBuilder(acDataset, acCohort, null); - var builder = csqb.GetAdjustedAggregateBuilder(CohortSummaryAdjustment.WhereRecordsIn); + var builder = csqb.GetAdjustedAggregateBuilder(CohortSummaryAdjustment.WhereRecordsIn); - Assert.AreEqual(@"/*Agg2_Dataset*/ + Assert.AreEqual(@"/*Agg2_Dataset*/ SELECT Year, count(*) AS MyCount @@ -180,25 +198,27 @@ group by Year order by Year", builder.SQL); - } + } - [Test] - public void QueryGeneration_BothHaveWHEREContainerAndParameters() - { - CreateParameters("'bob'", "'fish'"); + [Test] + public void QueryGeneration_BothHaveWHEREContainerAndParameters() + { + CreateParameters("'bob'", "'fish'"); - var global = new AnyTableSqlParameter(CatalogueRepository, cic, "DECLARE @bob AS varchar(50);"); - global.Value = "'zomber'"; - global.SaveToDatabase(); + var global = new AnyTableSqlParameter(CatalogueRepository, cic, "DECLARE @bob AS varchar(50);") + { + Value = "'zomber'" + }; + global.SaveToDatabase(); - try - { - ((IDeleteable)parama1).DeleteInDatabase(); - var csqb = new CohortSummaryQueryBuilder(acDataset, acCohort,null); + try + { + ((IDeleteable)parama1).DeleteInDatabase(); + var csqb = new CohortSummaryQueryBuilder(acDataset, acCohort, null); - var builder = csqb.GetAdjustedAggregateBuilder(CohortSummaryAdjustment.WhereRecordsIn); + var builder = csqb.GetAdjustedAggregateBuilder(CohortSummaryAdjustment.WhereRecordsIn); - Assert.AreEqual(CollapseWhitespace(@"DECLARE @bob AS varchar(50); + Assert.AreEqual(CollapseWhitespace(@"DECLARE @bob AS varchar(50); SET @bob='zomber'; /*Agg2_Dataset*/ SELECT @@ -223,54 +243,56 @@ group by Year order by Year"), CollapseWhitespace(builder.SQL)); - - } - finally - { - global.DeleteInDatabase(); - DestroyParameters(); - } } - private void DestroyParameters() + finally { - container1.GetFilters()[0].DeleteInDatabase(); - container2.GetFilters()[0].DeleteInDatabase(); - - container1.DeleteInDatabase(); - container2.DeleteInDatabase(); + global.DeleteInDatabase(); + DestroyParameters(); } + } - private void CreateParameters(string param1Value,string param2Value) - { - container1 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); - acDataset.RootFilterContainer_ID = container1.ID; - acDataset.SaveToDatabase(); + private void DestroyParameters() + { + container1.GetFilters()[0].DeleteInDatabase(); + container2.GetFilters()[0].DeleteInDatabase(); - AggregateFilter filter1 = new AggregateFilter(CatalogueRepository, "Filter1", container1); - filter1.WhereSQL = "@bob = 'bob'"; - filter1.SaveToDatabase(); + container1.DeleteInDatabase(); + container2.DeleteInDatabase(); + } - var paramCreator = new ParameterCreator(filter1.GetFilterFactory(), null, null); - paramCreator.CreateAll(filter1, null); + private void CreateParameters(string param1Value, string param2Value) + { + container1 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + acDataset.RootFilterContainer_ID = container1.ID; + acDataset.SaveToDatabase(); - container2 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); - acCohort.RootFilterContainer_ID = container2.ID; - acCohort.SaveToDatabase(); + var filter1 = new AggregateFilter(CatalogueRepository, "Filter1", container1) + { + WhereSQL = "@bob = 'bob'" + }; + filter1.SaveToDatabase(); - AggregateFilter filter2 = new AggregateFilter(CatalogueRepository, "Filter2", container2); - filter2.WhereSQL = "@bob = 'fish'"; - filter2.SaveToDatabase(); + var paramCreator = new ParameterCreator(filter1.GetFilterFactory(), null, null); + paramCreator.CreateAll(filter1, null); - paramCreator.CreateAll(filter2, null); + container2 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + acCohort.RootFilterContainer_ID = container2.ID; + acCohort.SaveToDatabase(); - parama1 = filter1.GetAllParameters()[0]; - parama1.Value = param1Value; - parama1.SaveToDatabase(); + var filter2 = new AggregateFilter(CatalogueRepository, "Filter2", container2) + { + WhereSQL = "@bob = 'fish'" + }; + filter2.SaveToDatabase(); - parama2 = filter2.GetAllParameters()[0]; - parama2.Value = param2Value; - parama2.SaveToDatabase(); - - } + paramCreator.CreateAll(filter2, null); + + parama1 = filter1.GetAllParameters()[0]; + parama1.Value = param1Value; + parama1.SaveToDatabase(); + + parama2 = filter2.GetAllParameters()[0]; + parama2.Value = param2Value; + parama2.SaveToDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/QueryTests/JoinableCohortConfigurationTests.cs b/Rdmp.Core.Tests/CohortCreation/QueryTests/JoinableCohortConfigurationTests.cs index e976d977d9..297f2aaf1e 100644 --- a/Rdmp.Core.Tests/CohortCreation/QueryTests/JoinableCohortConfigurationTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/QueryTests/JoinableCohortConfigurationTests.cs @@ -10,510 +10,558 @@ using System.Linq; using System.Text.RegularExpressions; using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable; -using MapsDirectlyToDatabaseTable.Versioning; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.Curation.Data.Cohort.Joinables; using Rdmp.Core.Databases; +using Rdmp.Core.MapsDirectlyToDatabaseTable; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Versioning; using Rdmp.Core.QueryBuilding; using Rdmp.Core.QueryCaching.Aggregation; using Rdmp.Core.QueryCaching.Aggregation.Arguments; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation.QueryTests +namespace Rdmp.Core.Tests.CohortCreation.QueryTests; + +public class JoinableCohortConfigurationTests : CohortIdentificationTests { - public class JoinableCohortConfigurationTests : CohortIdentificationTests - { - private DiscoveredDatabase _queryCachingDatabase; + private DiscoveredDatabase _queryCachingDatabase; - [Test] - public void CreateJoinable() + [Test] + public void CreateJoinable() + { + JoinableCohortAggregateConfiguration joinable = null; + try { - JoinableCohortAggregateConfiguration joinable = null; - try - { - joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1); + joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1); - Assert.AreEqual(joinable.CohortIdentificationConfiguration_ID, cohortIdentificationConfiguration.ID); - Assert.AreEqual(joinable.AggregateConfiguration_ID, aggregate1.ID); - } - finally - { - if(joinable != null) - joinable.DeleteInDatabase(); - } + Assert.AreEqual(joinable.CohortIdentificationConfiguration_ID, cohortIdentificationConfiguration.ID); + Assert.AreEqual(joinable.AggregateConfiguration_ID, aggregate1.ID); } - - [Test] - public void CreateJoinable_IsAlreadyInAContainer() + finally { - cohortIdentificationConfiguration.RootCohortAggregateContainer.AddChild(aggregate1,1); - - var ex = Assert.Throws(() => new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1)); - Assert.AreEqual("Cannot make aggregate UnitTestAggregate1 into a Joinable aggregate because it is already in a CohortAggregateContainer", ex.Message); - cohortIdentificationConfiguration.RootCohortAggregateContainer.RemoveChild(aggregate1); + joinable?.DeleteInDatabase(); } + } - [Test] - public void CreateJoinable_NoIsExtractionIdentifier() - { - //delete the first dimension (chi) - aggregate1.AggregateDimensions.First().DeleteInDatabase(); - aggregate1.ClearAllInjections(); + [Test] + public void CreateJoinable_IsAlreadyInAContainer() + { + cohortIdentificationConfiguration.RootCohortAggregateContainer.AddChild(aggregate1, 1); + + var ex = Assert.Throws(() => + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1)); + Assert.AreEqual( + "Cannot make aggregate UnitTestAggregate1 into a Joinable aggregate because it is already in a CohortAggregateContainer", + ex.Message); + cohortIdentificationConfiguration.RootCohortAggregateContainer.RemoveChild(aggregate1); + } - var ex = Assert.Throws(()=>new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1)); - Assert.AreEqual("Cannot make aggregate UnitTestAggregate1 into a Joinable aggregate because it has 0 columns marked IsExtractionIdentifier", ex.Message); - } + [Test] + public void CreateJoinable_NoIsExtractionIdentifier() + { + //delete the first dimension (chi) + aggregate1.AggregateDimensions.First().DeleteInDatabase(); + aggregate1.ClearAllInjections(); + + var ex = Assert.Throws(() => + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1)); + Assert.AreEqual( + "Cannot make aggregate UnitTestAggregate1 into a Joinable aggregate because it has 0 columns marked IsExtractionIdentifier", + ex.Message); + } - [Test] - public void CreateJoinable_AddTwice() + [Test] + public void CreateJoinable_AddTwice() + { + //delete the first dimension (chi) + var join1 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1); + try { - //delete the first dimension (chi) - var join1 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1); - try - { - if(CatalogueRepository is TableRepository) - { - var ex = Assert.Throws(() => new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1)); - Assert.IsTrue(ex.Message.Contains("ix_eachAggregateCanOnlyBeJoinableOnOneProject")); - } - } - finally + if (CatalogueRepository is TableRepository) { - join1.DeleteInDatabase(); + var ex = Assert.Throws(() => + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1)); + Assert.IsTrue(ex.Message.Contains("ix_eachAggregateCanOnlyBeJoinableOnOneProject")); } } + finally + { + join1.DeleteInDatabase(); + } + } - [Test] - public void CreateUsers() + [Test] + public void CreateUsers() + { + JoinableCohortAggregateConfiguration joinable = null; + try { - JoinableCohortAggregateConfiguration joinable = null; - try - { - joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1); - joinable.AddUser(aggregate2); + joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1); + joinable.AddUser(aggregate2); - Assert.IsTrue(joinable.Users.Length == 1); - Assert.AreEqual(aggregate2,joinable.Users[0].AggregateConfiguration); - } - finally - { - if (joinable != null) - joinable.DeleteInDatabase(); - } + Assert.IsTrue(joinable.Users.Length == 1); + Assert.AreEqual(aggregate2, joinable.Users[0].AggregateConfiguration); + } + finally + { + joinable?.DeleteInDatabase(); } + } - [Test] - public void CreateUsers_DuplicateUser() + [Test] + public void CreateUsers_DuplicateUser() + { + var joinable = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1); + try { - var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1); - try - { - joinable.AddUser(aggregate2); - var ex = Assert.Throws(()=>joinable.AddUser(aggregate2)); - Assert.AreEqual($"AggregateConfiguration 'UnitTestAggregate2' already uses 'Patient Index Table:cic_{cohortIdentificationConfiguration.ID}_UnitTestAggregate1'. Only one patient index table join is permitted.", ex.Message); - } - finally - { - joinable.DeleteInDatabase(); - } + joinable.AddUser(aggregate2); + var ex = Assert.Throws(() => joinable.AddUser(aggregate2)); + Assert.AreEqual( + $"AggregateConfiguration 'UnitTestAggregate2' already uses 'Patient Index Table:cic_{cohortIdentificationConfiguration.ID}_UnitTestAggregate1'. Only one patient index table join is permitted.", + ex.Message); } + finally + { + joinable.DeleteInDatabase(); + } + } - [Test] - public void CreateUsers_SelfReferrential() + [Test] + public void CreateUsers_SelfReferrential() + { + var joinable = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1); + try { - var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1); - try - { - var ex = Assert.Throws(()=>joinable.AddUser(aggregate1)); - Assert.AreEqual("Cannot configure AggregateConfiguration UnitTestAggregate1 as a Join user to itself!", ex.Message); - } - finally - { - joinable.DeleteInDatabase(); - } + var ex = Assert.Throws(() => joinable.AddUser(aggregate1)); + Assert.AreEqual("Cannot configure AggregateConfiguration UnitTestAggregate1 as a Join user to itself!", + ex.Message); + } + finally + { + joinable.DeleteInDatabase(); } + } - [Test] - public void CreateUsers_ToAnyOtherJoinable() + [Test] + public void CreateUsers_ToAnyOtherJoinable() + { + var joinable = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1); + var joinable2 = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate2); + try { - var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1); - var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate2); - try - { - var ex = Assert.Throws(() => joinable.AddUser(aggregate2)); - Assert.AreEqual("Cannot add user UnitTestAggregate2 because that AggregateConfiguration is itself a JoinableCohortAggregateConfiguration", ex.Message); - } - finally - { - joinable.DeleteInDatabase(); - joinable2.DeleteInDatabase(); - } + var ex = Assert.Throws(() => joinable.AddUser(aggregate2)); + Assert.AreEqual( + "Cannot add user UnitTestAggregate2 because that AggregateConfiguration is itself a JoinableCohortAggregateConfiguration", + ex.Message); } - [Test] - public void CreateUsers_ToNoExtractionIdentifierTable() + finally { - var joinable = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate1); + joinable.DeleteInDatabase(); + joinable2.DeleteInDatabase(); + } + } - aggregate2.AggregateDimensions.First().DeleteInDatabase(); - aggregate2.ClearAllInjections(); + [Test] + public void CreateUsers_ToNoExtractionIdentifierTable() + { + var joinable = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate1); - try - { - var ex = Assert.Throws(() => joinable.AddUser(aggregate2)); - Assert.AreEqual("Cannot configure AggregateConfiguration UnitTestAggregate2 as join user because it does not contain exactly 1 IsExtractionIdentifier dimension", ex.Message); - } - finally - { - joinable.DeleteInDatabase(); - } - } + aggregate2.AggregateDimensions.First().DeleteInDatabase(); + aggregate2.ClearAllInjections(); - [Test] - public void QueryBuilderTest() + try { - //make aggregate 2 a joinable - var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository,cohortIdentificationConfiguration, aggregate2); - joinable2.AddUser(aggregate1); + var ex = Assert.Throws(() => joinable.AddUser(aggregate2)); + Assert.AreEqual( + "Cannot configure AggregateConfiguration UnitTestAggregate2 as join user because it does not contain exactly 1 IsExtractionIdentifier dimension", + ex.Message); + } + finally + { + joinable.DeleteInDatabase(); + } + } - var builder = new CohortQueryBuilder(aggregate1, null,null); - Console.WriteLine(builder.SQL); - try + [Test] + public void QueryBuilderTest() + { + //make aggregate 2 a joinable + var joinable2 = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate2); + joinable2.AddUser(aggregate1); + + var builder = new CohortQueryBuilder(aggregate1, null, null); + Console.WriteLine(builder.SQL); + try + { + using (var con = (SqlConnection)Database.Server.GetConnection()) { - - using (var con = (SqlConnection)Database.Server.GetConnection()) - { - con.Open(); + con.Open(); - using var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader(); - - //can read at least one row - Assert.IsTrue(dbReader.Read()); - } + using var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader(); - string expectedTableAlias = "ix" + joinable2.ID; + //can read at least one row + Assert.IsTrue(dbReader.Read()); + } - //after joinables - Assert.AreEqual( - string.Format( - @"/*cic_{1}_UnitTestAggregate1*/ + var expectedTableAlias = $"ix{joinable2.ID}"; + + //after joinables + Assert.AreEqual( + string.Format( + @"/*cic_{1}_UnitTestAggregate1*/ SELECT distinct -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] FROM -["+TestDatabaseNames.Prefix+ @"ScratchArea].[dbo].[BulkData] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] LEFT Join ( /*cic_{1}_UnitTestAggregate2*/ SELECT distinct - [" + TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] FROM - ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] ){0} -on ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] = {0}.chi",expectedTableAlias,cohortIdentificationConfiguration.ID), builder.SQL); - - } - finally - { - joinable2.Users[0].DeleteInDatabase(); - joinable2.DeleteInDatabase(); - } +on [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] = {0}.chi", expectedTableAlias, + cohortIdentificationConfiguration.ID), builder.SQL); } - - [Test] - public void QueryBuilderTest_AdditionalColumn() + finally { - var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => e.GetRuntimeName().Equals("dtCreated")); - aggregate2.AddDimension(anotherCol); + joinable2.Users[0].DeleteInDatabase(); + joinable2.DeleteInDatabase(); + } + } - //make aggregate 2 a joinable - var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate2); - joinable2.AddUser(aggregate1); + [Test] + public void QueryBuilderTest_AdditionalColumn() + { + var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(e => e.GetRuntimeName().Equals("dtCreated")); + aggregate2.AddDimension(anotherCol); - string expectedTableAlias = "ix" + joinable2.ID; + //make aggregate 2 a joinable + var joinable2 = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate2); + joinable2.AddUser(aggregate1); - var filterContainer1 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); - var filterContainer2 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + var expectedTableAlias = $"ix{joinable2.ID}"; - var filter1 = new AggregateFilter(CatalogueRepository, "Within 1 year of event", filterContainer1); - var filter2 = new AggregateFilter(CatalogueRepository, "DateAfter2001", filterContainer2); + var filterContainer1 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + var filterContainer2 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); - filter1.WhereSQL = string.Format("ABS(DATEDIFF(year, {0}.dtCreated, ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].dtCreated)) <= 1",expectedTableAlias); - filter1.SaveToDatabase(); + var filter1 = new AggregateFilter(CatalogueRepository, "Within 1 year of event", filterContainer1); + var filter2 = new AggregateFilter(CatalogueRepository, "DateAfter2001", filterContainer2); - filter2.WhereSQL = "dtCreated > '2001-01-01'"; - filter2.SaveToDatabase(); + filter1.WhereSQL = + string.Format( + "ABS(DATEDIFF(year, {0}.dtCreated, [" + TestDatabaseNames.Prefix + + @"ScratchArea].[dbo].[BulkData].dtCreated)) <= 1", expectedTableAlias); + filter1.SaveToDatabase(); - aggregate1.RootFilterContainer_ID = filterContainer1.ID; - aggregate1.SaveToDatabase(); + filter2.WhereSQL = "dtCreated > '2001-01-01'"; + filter2.SaveToDatabase(); - aggregate2.RootFilterContainer_ID = filterContainer2.ID; - aggregate2.SaveToDatabase(); + aggregate1.RootFilterContainer_ID = filterContainer1.ID; + aggregate1.SaveToDatabase(); - var builder = new CohortQueryBuilder(aggregate1, null,null); + aggregate2.RootFilterContainer_ID = filterContainer2.ID; + aggregate2.SaveToDatabase(); + var builder = new CohortQueryBuilder(aggregate1, null, null); - Console.WriteLine(builder.SQL); - - try + Console.WriteLine(builder.SQL); + + + try + { + using (var con = (SqlConnection)Database.Server.GetConnection()) { - using (var con = (SqlConnection)Database.Server.GetConnection()) - { - con.Open(); + con.Open(); - using var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader(); + using var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader(); - //can read at least one row - Assert.IsTrue(dbReader.Read()); - } + //can read at least one row + Assert.IsTrue(dbReader.Read()); + } - //after joinables - Assert.AreEqual( - CollapseWhitespace( + //after joinables + Assert.AreEqual( + CollapseWhitespace( string.Format( - @"/*cic_{1}_UnitTestAggregate1*/ + @"/*cic_{1}_UnitTestAggregate1*/ SELECT distinct -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] FROM -["+TestDatabaseNames.Prefix+ @"ScratchArea].[dbo].[BulkData] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] LEFT Join ( /*cic_{1}_UnitTestAggregate2*/ SELECT distinct - [" + TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi], ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[dtCreated] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi], [" + TestDatabaseNames.Prefix + + @"ScratchArea].[dbo].[BulkData].[dtCreated] FROM - ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData] + [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] WHERE ( /*DateAfter2001*/ dtCreated > '2001-01-01' ) ){0} -on ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] = {0}.chi +on [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] = {0}.chi WHERE ( /*Within 1 year of event*/ -ABS(DATEDIFF(year, {0}.dtCreated, ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].dtCreated)) <= 1 -)", expectedTableAlias,cohortIdentificationConfiguration.ID)), CollapseWhitespace(builder.SQL)); +ABS(DATEDIFF(year, {0}.dtCreated, [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].dtCreated)) <= 1 +)", expectedTableAlias, cohortIdentificationConfiguration.ID)), CollapseWhitespace(builder.SQL)); + } + finally + { + filter1.DeleteInDatabase(); + filter2.DeleteInDatabase(); - } - finally - { - filter1.DeleteInDatabase(); - filter2.DeleteInDatabase(); + filterContainer1.DeleteInDatabase(); - filterContainer1.DeleteInDatabase(); + filterContainer2.DeleteInDatabase(); - filterContainer2.DeleteInDatabase(); - - joinable2.Users[0].DeleteInDatabase(); - joinable2.DeleteInDatabase(); - } + joinable2.Users[0].DeleteInDatabase(); + joinable2.DeleteInDatabase(); } + } - [Test] - public void QueryBuilderTest_JoinableCloning() - { - var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => e.GetRuntimeName().Equals("dtCreated")); - aggregate2.AddDimension(anotherCol); + [Test] + public void QueryBuilderTest_JoinableCloning() + { + var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(e => e.GetRuntimeName().Equals("dtCreated")); + aggregate2.AddDimension(anotherCol); - //make aggregate 2 a joinable - var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate2); - joinable2.AddUser(aggregate1); + //make aggregate 2 a joinable + var joinable2 = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate2); + joinable2.AddUser(aggregate1); - string expectedTableAlias = "ix" + joinable2.ID; + var expectedTableAlias = $"ix{joinable2.ID}"; - var filterContainer1 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); - var filterContainer2 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + var filterContainer1 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); + var filterContainer2 = new AggregateFilterContainer(CatalogueRepository, FilterContainerOperation.AND); - var filter1 = new AggregateFilter(CatalogueRepository, "Within 1 year of event", filterContainer1); - var filter2 = new AggregateFilter(CatalogueRepository, "DateAfter2001", filterContainer2); + var filter1 = new AggregateFilter(CatalogueRepository, "Within 1 year of event", filterContainer1); + var filter2 = new AggregateFilter(CatalogueRepository, "DateAfter2001", filterContainer2); - filter1.WhereSQL = string.Format("ABS(DATEDIFF(year, {0}.dtCreated, ["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].dtCreated)) <= 1", expectedTableAlias); - filter1.SaveToDatabase(); + filter1.WhereSQL = + string.Format( + "ABS(DATEDIFF(year, {0}.dtCreated, [" + TestDatabaseNames.Prefix + + @"ScratchArea].[dbo].[BulkData].dtCreated)) <= 1", expectedTableAlias); + filter1.SaveToDatabase(); - filter2.WhereSQL = "dtCreated > '2001-01-01'"; - filter2.SaveToDatabase(); + filter2.WhereSQL = "dtCreated > '2001-01-01'"; + filter2.SaveToDatabase(); - aggregate1.RootFilterContainer_ID = filterContainer1.ID; - aggregate1.SaveToDatabase(); + aggregate1.RootFilterContainer_ID = filterContainer1.ID; + aggregate1.SaveToDatabase(); - aggregate2.RootFilterContainer_ID = filterContainer2.ID; - aggregate2.SaveToDatabase(); + aggregate2.RootFilterContainer_ID = filterContainer2.ID; + aggregate2.SaveToDatabase(); - //add the first aggregate to the configuration - rootcontainer.AddChild(aggregate1,1); - - var globalParameter = new AnyTableSqlParameter(CatalogueRepository, cohortIdentificationConfiguration,"DECLARE @fish varchar(50)"); - globalParameter.Comment = "Comments for the crazies"; - globalParameter.Value = "'fishes'"; - globalParameter.SaveToDatabase(); + //add the first aggregate to the configuration + rootcontainer.AddChild(aggregate1, 1); - var builder = new CohortQueryBuilder(cohortIdentificationConfiguration,null); + var globalParameter = new AnyTableSqlParameter(CatalogueRepository, cohortIdentificationConfiguration, + "DECLARE @fish varchar(50)") + { + Comment = "Comments for the crazies", + Value = "'fishes'" + }; + globalParameter.SaveToDatabase(); - try - { - var clone = cohortIdentificationConfiguration.CreateClone(new ThrowImmediatelyCheckNotifier()); + var builder = new CohortQueryBuilder(cohortIdentificationConfiguration, null); - var cloneBuilder = new CohortQueryBuilder(clone,null); + try + { + var clone = cohortIdentificationConfiguration.CreateClone(ThrowImmediatelyCheckNotifier.Quiet); - string origSql = builder.SQL; - string cloneOrigSql = cloneBuilder.SQL; + var cloneBuilder = new CohortQueryBuilder(clone, null); - Console.WriteLine("//////////////////////////////////////////////VERBATIM//////////////////////////////////////////////"); - Console.WriteLine(origSql); - Console.WriteLine(cloneOrigSql); - Console.WriteLine("//////////////////////////////////////////////END VERBATIM//////////////////////////////////////////////"); + var origSql = builder.SQL; + var cloneOrigSql = cloneBuilder.SQL; - var builderSql = Regex.Replace(Regex.Replace(origSql, "cic_[0-9]+_", ""), "ix[0-9]+", "ix"); - var cloneBuilderSql = Regex.Replace(Regex.Replace(cloneOrigSql, "cic_[0-9]+_", ""), "ix[0-9]+", "ix").Replace("(Clone)", "");//get rid of explicit ix53 etc for the comparison + Console.WriteLine( + "//////////////////////////////////////////////VERBATIM//////////////////////////////////////////////"); + Console.WriteLine(origSql); + Console.WriteLine(cloneOrigSql); + Console.WriteLine( + "//////////////////////////////////////////////END VERBATIM//////////////////////////////////////////////"); - Console.WriteLine("//////////////////////////////////////////////TEST COMPARISON IS//////////////////////////////////////////////"); - Console.WriteLine(builderSql); - Console.WriteLine(cloneBuilderSql); - Console.WriteLine("//////////////////////////////////////////////END COMPARISON//////////////////////////////////////////////"); + var builderSql = Regex.Replace(Regex.Replace(origSql, "cic_[0-9]+_", ""), "ix[0-9]+", "ix"); + var cloneBuilderSql = Regex.Replace(Regex.Replace(cloneOrigSql, "cic_[0-9]+_", ""), "ix[0-9]+", "ix") + .Replace("(Clone)", ""); //get rid of explicit ix53 etc for the comparison - Assert.AreEqual(builderSql, cloneBuilderSql); + Console.WriteLine( + "//////////////////////////////////////////////TEST COMPARISON IS//////////////////////////////////////////////"); + Console.WriteLine(builderSql); + Console.WriteLine(cloneBuilderSql); + Console.WriteLine( + "//////////////////////////////////////////////END COMPARISON//////////////////////////////////////////////"); + Assert.AreEqual(builderSql, cloneBuilderSql); - ////////////////Cleanup Database////////////////////////////// - //find the WHERE logic too - var containerClone = clone.RootCohortAggregateContainer.GetAllAggregateConfigurationsRecursively()//get all the aggregates - .Union(clone.GetAllJoinables().Select(j=>j.AggregateConfiguration))//including the joinables - .Where(a => a.RootFilterContainer_ID != null)//that have WHERE sql - .Select(ag => ag.RootFilterContainer);//grab their containers so we can clean them SetUp - ((IDeleteable)clone.GetAllParameters()[0]).DeleteInDatabase(); - clone.DeleteInDatabase(); + ////////////////Cleanup Database////////////////////////////// + //find the WHERE logic too + var containerClone = clone.RootCohortAggregateContainer + .GetAllAggregateConfigurationsRecursively() //get all the aggregates + .Union(clone.GetAllJoinables().Select(j => j.AggregateConfiguration)) //including the joinables + .Where(a => a.RootFilterContainer_ID != null) //that have WHERE sql + .Select(ag => ag.RootFilterContainer); //grab their containers so we can clean them SetUp - //delete the WHERE logic too - foreach (AggregateFilterContainer c in containerClone) - c.DeleteInDatabase(); - } - finally - { - rootcontainer.RemoveChild(aggregate1); + ((IDeleteable)clone.GetAllParameters()[0]).DeleteInDatabase(); + clone.DeleteInDatabase(); - filter1.DeleteInDatabase(); - filter2.DeleteInDatabase(); + //delete the WHERE logic too + foreach (AggregateFilterContainer c in containerClone) + c.DeleteInDatabase(); + } + finally + { + rootcontainer.RemoveChild(aggregate1); - filterContainer1.DeleteInDatabase(); + filter1.DeleteInDatabase(); + filter2.DeleteInDatabase(); - filterContainer2.DeleteInDatabase(); + filterContainer1.DeleteInDatabase(); - joinable2.Users[0].DeleteInDatabase(); - joinable2.DeleteInDatabase(); + filterContainer2.DeleteInDatabase(); - globalParameter.DeleteInDatabase(); - } + joinable2.Users[0].DeleteInDatabase(); + joinable2.DeleteInDatabase(); + + globalParameter.DeleteInDatabase(); } + } + + + [Test] + public void JoinablesWithCache() + { + var queryCachingDatabaseName = To.GetRuntimeName(); + _queryCachingDatabase = To; + + //make aggregate 2 a joinable + var joinable2 = + new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, + aggregate2); + joinable2.AddUser(aggregate1); + //make aggregate 2 have an additional column (dtCreated) + var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(e => e.GetRuntimeName().Equals("dtCreated")); + aggregate2.AddDimension(anotherCol); - [Test] - public void JoinablesWithCache() + //create a caching server + var scripter = new MasterDatabaseScriptExecutor(_queryCachingDatabase); + scripter.CreateAndPatchDatabase(new QueryCachingPatcher(), new AcceptAllCheckNotifier()); + + var queryCachingDatabaseServer = + new ExternalDatabaseServer(CatalogueRepository, queryCachingDatabaseName, null); + queryCachingDatabaseServer.SetProperties(_queryCachingDatabase); + + try { - string queryCachingDatabaseName = To.GetRuntimeName(); - _queryCachingDatabase = To; - - //make aggregate 2 a joinable - var joinable2 = new JoinableCohortAggregateConfiguration(CatalogueRepository, cohortIdentificationConfiguration, aggregate2); - joinable2.AddUser(aggregate1); - - //make aggregate 2 have an additional column (dtCreated) - var anotherCol = aggregate2.Catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => e.GetRuntimeName().Equals("dtCreated")); - aggregate2.AddDimension(anotherCol); - - //create a caching server - MasterDatabaseScriptExecutor scripter = new MasterDatabaseScriptExecutor(_queryCachingDatabase); - scripter.CreateAndPatchDatabase(new QueryCachingPatcher(), new AcceptAllCheckNotifier()); - - var queryCachingDatabaseServer = new ExternalDatabaseServer(CatalogueRepository, queryCachingDatabaseName,null); - queryCachingDatabaseServer.SetProperties(_queryCachingDatabase); - - try - { - - var builderForCaching = new CohortQueryBuilder(aggregate2, null,null); + var builderForCaching = new CohortQueryBuilder(aggregate2, null, null); - var cacheDt = new DataTable(); - using (SqlConnection con = (SqlConnection)Database.Server.GetConnection()) - { - con.Open(); - SqlDataAdapter da = new SqlDataAdapter(new SqlCommand(builderForCaching.SQL, con)); - da.Fill(cacheDt); - } + var cacheDt = new DataTable(); + using (var con = (SqlConnection)Database.Server.GetConnection()) + { + con.Open(); + var da = new SqlDataAdapter(new SqlCommand(builderForCaching.SQL, con)); + da.Fill(cacheDt); + } - var cacheManager = new CachedAggregateConfigurationResultsManager(queryCachingDatabaseServer); - cacheManager.CommitResults(new CacheCommitJoinableInceptionQuery(aggregate2, builderForCaching.SQL, cacheDt, null,30)); + var cacheManager = new CachedAggregateConfigurationResultsManager(queryCachingDatabaseServer); + cacheManager.CommitResults( + new CacheCommitJoinableInceptionQuery(aggregate2, builderForCaching.SQL, cacheDt, null, 30)); - try + try + { + var builder = new CohortQueryBuilder(aggregate1, null, null) { - var builder = new CohortQueryBuilder(aggregate1, null,null); - //make the builder use the query cache we just set SetUp - builder.CacheServer = queryCachingDatabaseServer; + CacheServer = queryCachingDatabaseServer + }; - Console.WriteLine(builder.SQL); + Console.WriteLine(builder.SQL); - using (var con = (SqlConnection)Database.Server.GetConnection()) - { - con.Open(); + using (var con = (SqlConnection)Database.Server.GetConnection()) + { + con.Open(); - using var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader(); + using var dbReader = new SqlCommand(builder.SQL, con).ExecuteReader(); - //can read at least one row - Assert.IsTrue(dbReader.Read()); - } + //can read at least one row + Assert.IsTrue(dbReader.Read()); + } - string expectedTableAlias = "ix" + joinable2.ID; + var expectedTableAlias = $"ix{joinable2.ID}"; - //after joinables - Assert.AreEqual( - CollapseWhitespace( + //after joinables + Assert.AreEqual( + CollapseWhitespace( string.Format( - @"/*cic_{2}_UnitTestAggregate1*/ + @"/*cic_{2}_UnitTestAggregate1*/ SELECT distinct -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData].[chi] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] FROM -["+TestDatabaseNames.Prefix+@"ScratchArea].[dbo].[BulkData] +[" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData] LEFT Join ( /*Cached:cic_{2}_UnitTestAggregate2*/ select * from [{3}]..[JoinableInceptionQuery_AggregateConfiguration{1}] ){0} on [" + TestDatabaseNames.Prefix + @"ScratchArea].[dbo].[BulkData].[chi] = {0}.chi", - expectedTableAlias, //{0} - aggregate2.ID, //{1} - cohortIdentificationConfiguration.ID,//{2} - queryCachingDatabaseName) //{3} - ),CollapseWhitespace(builder.SQL)); - - } - finally - { - joinable2.Users[0].DeleteInDatabase(); - joinable2.DeleteInDatabase(); - } + expectedTableAlias, //{0} + aggregate2.ID, //{1} + cohortIdentificationConfiguration.ID, //{2} + queryCachingDatabaseName) //{3} + ), CollapseWhitespace(builder.SQL)); } - finally + finally { - - queryCachingDatabaseServer.DeleteInDatabase(); - DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(queryCachingDatabaseName).Drop(); - + joinable2.Users[0].DeleteInDatabase(); + joinable2.DeleteInDatabase(); } - - - - + } + finally + { + queryCachingDatabaseServer.DeleteInDatabase(); + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(queryCachingDatabaseName).Drop(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CohortCreation/SimpleCohortIdentificationTests.cs b/Rdmp.Core.Tests/CohortCreation/SimpleCohortIdentificationTests.cs index 84147376b6..13cf973454 100644 --- a/Rdmp.Core.Tests/CohortCreation/SimpleCohortIdentificationTests.cs +++ b/Rdmp.Core.Tests/CohortCreation/SimpleCohortIdentificationTests.cs @@ -9,87 +9,86 @@ using Rdmp.Core.Curation.Data.Cohort; using Tests.Common; -namespace Rdmp.Core.Tests.CohortCreation +namespace Rdmp.Core.Tests.CohortCreation; + +public class SimpleCohortIdentificationTests : DatabaseTests { - public class SimpleCohortIdentificationTests:DatabaseTests + [Test] + public void CreateNewCohortIdentificationConfiguration_SaveAndReload() { - [Test] - public void CreateNewCohortIdentificationConfiguration_SaveAndReload() + var config = new CohortIdentificationConfiguration(CatalogueRepository, "franky"); + + try + { + Assert.IsTrue(config.Exists()); + Assert.AreEqual("franky", config.Name); + + config.Description = "Hi there"; + config.SaveToDatabase(); + + + var config2 = CatalogueRepository.GetObjectByID(config.ID); + Assert.AreEqual("Hi there", config2.Description); + } + finally { - var config = new CohortIdentificationConfiguration(CatalogueRepository, "franky"); - - try - { - Assert.IsTrue(config.Exists()); - Assert.AreEqual("franky",config.Name); - - config.Description = "Hi there"; - config.SaveToDatabase(); - - - CohortIdentificationConfiguration config2 = CatalogueRepository.GetObjectByID(config.ID); - Assert.AreEqual("Hi there", config2.Description); - } - finally - { - config.DeleteInDatabase(); - Assert.IsFalse(config.Exists()); - } + config.DeleteInDatabase(); + Assert.IsFalse(config.Exists()); } + } + + [Test] + public void ContainerCreate() + { + var container = new CohortAggregateContainer(CatalogueRepository, SetOperation.UNION); - [Test] - public void ContainerCreate() + try { - var container = new CohortAggregateContainer(CatalogueRepository,SetOperation.UNION); - - try - { - Assert.AreEqual(SetOperation.UNION,container.Operation); - - container.Operation = SetOperation.INTERSECT; - container.SaveToDatabase(); - - var container2 = CatalogueRepository.GetObjectByID(container.ID); - Assert.AreEqual(SetOperation.INTERSECT, container2.Operation); - } - finally - { - container.DeleteInDatabase(); - } + Assert.AreEqual(SetOperation.UNION, container.Operation); + + container.Operation = SetOperation.INTERSECT; + container.SaveToDatabase(); + + var container2 = CatalogueRepository.GetObjectByID(container.ID); + Assert.AreEqual(SetOperation.INTERSECT, container2.Operation); } + finally + { + container.DeleteInDatabase(); + } + } + + + [Test] + public void Container_Subcontainering() + { + var container = new CohortAggregateContainer(CatalogueRepository, SetOperation.UNION); + + var container2 = new CohortAggregateContainer(CatalogueRepository, SetOperation.INTERSECT); + try + { + Assert.AreEqual(0, container.GetSubContainers().Length); + + + Assert.AreEqual(0, container.GetSubContainers().Length); + //set container to parent + container.AddChild(container2); - [Test] - public void Container_Subcontainering() + //container 1 should now contain container 2 + Assert.AreEqual(1, container.GetSubContainers().Length); + Assert.Contains(container2, container.GetSubContainers()); + + //container 2 should not have any children + Assert.AreEqual(0, container2.GetSubContainers().Length); + } + finally { - var container = new CohortAggregateContainer(CatalogueRepository,SetOperation.UNION); - - var container2 = new CohortAggregateContainer(CatalogueRepository,SetOperation.INTERSECT); - try - { - Assert.AreEqual(0, container.GetSubContainers().Length); - - - Assert.AreEqual(0, container.GetSubContainers().Length); - - //set container to parent - container.AddChild(container2); - - //container 1 should now contain container 2 - Assert.AreEqual(1, container.GetSubContainers().Length); - Assert.Contains(container2, container.GetSubContainers()); - - //container 2 should not have any children - Assert.AreEqual(0, container2.GetSubContainers().Length); - } - finally - { - container.DeleteInDatabase(); - - //container 2 was contained within container 1 so should have also been deleted - Assert.Throws( - () => CatalogueRepository.GetObjectByID(container2.ID)); - } + container.DeleteInDatabase(); + + //container 2 was contained within container 1 so should have also been deleted + Assert.Throws( + () => CatalogueRepository.GetObjectByID(container2.ID)); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/AxisAndPivotCLITests.cs b/Rdmp.Core.Tests/CommandExecution/AxisAndPivotCLITests.cs index 1d153c06b6..3597a350ce 100644 --- a/Rdmp.Core.Tests/CommandExecution/AxisAndPivotCLITests.cs +++ b/Rdmp.Core.Tests/CommandExecution/AxisAndPivotCLITests.cs @@ -9,108 +9,113 @@ using Rdmp.Core.Curation.Data.Aggregation; using System; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +public class AxisAndPivotCLITests : CommandCliTests { - public class AxisAndPivotCLITests : CommandCliTests - { [Test] public void SetPivot_DimensionNonExistant() { - var ac = WhenIHaveA(); + var ac = WhenIHaveA(); + + var cmd = new ExecuteCommandSetPivot(GetMockActivator(), ac, "fff"); + var ex = Assert.Throws(() => cmd.Execute()); - var cmd = new ExecuteCommandSetPivot(GetMockActivator().Object, ac,"fff"); - var ex = Assert.Throws(()=>cmd.Execute()); - - Assert.AreEqual("Could not find AggregateDimension fff in Aggregate My graph so could not set it as a pivot dimension. Try adding the column to the aggregate first", ex.Message); + Assert.AreEqual( + "Could not find AggregateDimension fff in Aggregate My graph so could not set it as a pivot dimension. Try adding the column to the aggregate first", + ex.Message); } [Test] public void SetPivot_Exists() { - var ac = WhenIHaveA(); - var dim = WhenIHaveA(); - - - dim.AggregateConfiguration_ID = ac.ID; - dim.Alias = "frogmarch"; + var ac = WhenIHaveA(); + var dim = WhenIHaveA(); + + + dim.AggregateConfiguration_ID = ac.ID; + dim.Alias = "frogmarch"; - var cmd = new ExecuteCommandSetPivot(GetMockActivator().Object, ac, "frogmarch"); - cmd.Execute(); + var cmd = new ExecuteCommandSetPivot(GetMockActivator(), ac, "frogmarch"); + cmd.Execute(); - Assert.AreEqual(dim.ID, ac.PivotOnDimensionID); + Assert.AreEqual(dim.ID, ac.PivotOnDimensionID); - cmd = new ExecuteCommandSetPivot(GetMockActivator().Object, ac, null); - cmd.Execute(); + cmd = new ExecuteCommandSetPivot(GetMockActivator(), ac, null); + cmd.Execute(); - Assert.IsNull(ac.PivotOnDimensionID); + Assert.IsNull(ac.PivotOnDimensionID); } [Test] public void SetPivot_ExistsButIsADate() { - var ac = WhenIHaveA(); - var dim = WhenIHaveA(); + var ac = WhenIHaveA(); + var dim = WhenIHaveA(); - dim.AggregateConfiguration_ID = ac.ID; - dim.Alias = "frogmarch"; - dim.ColumnInfo.Data_type = "datetime"; + dim.AggregateConfiguration_ID = ac.ID; + dim.Alias = "frogmarch"; + dim.ColumnInfo.Data_type = "datetime"; - var cmd = new ExecuteCommandSetPivot(GetMockActivator().Object, ac, "frogmarch"); - var ex = Assert.Throws(()=>cmd.Execute()); + var cmd = new ExecuteCommandSetPivot(GetMockActivator(), ac, "frogmarch"); + var ex = Assert.Throws(() => cmd.Execute()); - Assert.AreEqual("AggregateDimension frogmarch is a Date so cannot set it as a Pivot for Aggregate My graph",ex.Message); + Assert.AreEqual("AggregateDimension frogmarch is a Date so cannot set it as a Pivot for Aggregate My graph", + ex.Message); } [Test] public void SetAxis_DimensionNonExistant() { - var ac = WhenIHaveA(); + var ac = WhenIHaveA(); - var cmd = new ExecuteCommandSetAxis(GetMockActivator().Object, ac, "fff"); - var ex = Assert.Throws(() => cmd.Execute()); + var cmd = new ExecuteCommandSetAxis(GetMockActivator(), ac, "fff"); + var ex = Assert.Throws(() => cmd.Execute()); - Assert.AreEqual("Could not find AggregateDimension fff in Aggregate My graph so could not set it as an axis dimension. Try adding the column to the aggregate first", ex.Message); + Assert.AreEqual( + "Could not find AggregateDimension fff in Aggregate My graph so could not set it as an axis dimension. Try adding the column to the aggregate first", + ex.Message); } [Test] public void SetAxis_Exists() { - var ac = WhenIHaveA(); - var dim = WhenIHaveA(); + var ac = WhenIHaveA(); + var dim = WhenIHaveA(); - dim.AggregateConfiguration_ID = ac.ID; - dim.Alias = "frogmarch"; - dim.ColumnInfo.Data_type = "datetime"; + dim.AggregateConfiguration_ID = ac.ID; + dim.Alias = "frogmarch"; + dim.ColumnInfo.Data_type = "datetime"; - Assert.IsNull(ac.GetAxisIfAny()); + Assert.IsNull(ac.GetAxisIfAny()); - var cmd = new ExecuteCommandSetAxis(GetMockActivator().Object, ac, "frogmarch"); - cmd.Execute(); + var cmd = new ExecuteCommandSetAxis(GetMockActivator(), ac, "frogmarch"); + cmd.Execute(); - Assert.IsNotNull(ac.GetAxisIfAny()); + Assert.IsNotNull(ac.GetAxisIfAny()); - cmd = new ExecuteCommandSetAxis(GetMockActivator().Object, ac, null); - cmd.Execute(); + cmd = new ExecuteCommandSetAxis(GetMockActivator(), ac, null); + cmd.Execute(); - Assert.IsNull(ac.GetAxisIfAny()); + Assert.IsNull(ac.GetAxisIfAny()); } [Test] public void SetAxis_ExistsButIsNotADate() { - var ac = WhenIHaveA(); - var dim = WhenIHaveA(); + var ac = WhenIHaveA(); + var dim = WhenIHaveA(); - dim.AggregateConfiguration_ID = ac.ID; - dim.Alias = "frogmarch"; + dim.AggregateConfiguration_ID = ac.ID; + dim.Alias = "frogmarch"; - var cmd = new ExecuteCommandSetAxis(GetMockActivator().Object, ac, "frogmarch"); - var ex = Assert.Throws(() => cmd.Execute()); + var cmd = new ExecuteCommandSetAxis(GetMockActivator(), ac, "frogmarch"); + var ex = Assert.Throws(() => cmd.Execute()); - Assert.AreEqual("AggregateDimension frogmarch is not a Date so cannot set it as an axis for Aggregate My graph", ex.Message); + Assert.AreEqual("AggregateDimension frogmarch is not a Date so cannot set it as an axis for Aggregate My graph", + ex.Message); } - } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/CommandCliTests.cs b/Rdmp.Core.Tests/CommandExecution/CommandCliTests.cs index 07eac59c06..246768e000 100644 --- a/Rdmp.Core.Tests/CommandExecution/CommandCliTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/CommandCliTests.cs @@ -7,65 +7,58 @@ using System; using System.Collections.Generic; using System.Linq; -using Moq; +using NSubstitute; using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandLine.Interactive; using Rdmp.Core.CommandLine.Options; using Rdmp.Core.CommandLine.Runners; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +/// +/// Base class for all tests which test RDMP CLI command line arguments to run derrived +/// classes +/// +public abstract class CommandCliTests : UnitTests { - /// - /// Base class for all tests which test RDMP CLI command line arguments to run derrived - /// classes - /// - public abstract class CommandCliTests : UnitTests + protected CommandInvoker GetInvoker() { - protected override void OneTimeSetUp() + var invoker = new CommandInvoker(new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) { - base.OneTimeSetUp(); + DisallowInput = true + }); + invoker.CommandImpossible += (s, c) => throw new Exception(c.Command.ReasonCommandImpossible); - SetupMEF(); - } + return invoker; + } - protected CommandInvoker GetInvoker() - { - var invoker = new CommandInvoker(new ConsoleInputManager(RepositoryLocator,new ThrowImmediatelyCheckNotifier()) - { - DisallowInput = true - }); - invoker.CommandImpossible +=(s,c)=> throw new Exception(c.Command.ReasonCommandImpossible); + protected IBasicActivateItems GetMockActivator() + { + var mock = Substitute.For(); + mock.RepositoryLocator.Returns(RepositoryLocator); + mock.GetDelegates().Returns(new List()); + return mock; + } - return invoker; - } - - protected Mock GetMockActivator() + /// + /// Runs the provided string which should start after the cmd e.g. the bit after rdmp cmd + /// + /// 1 string per piece following rdmp cmd. Element 0 should be the Type of command to run + /// + protected int Run(params string[] command) + { + var opts = new ExecuteCommandOptions { - var mock = new Mock(); - mock.Setup(m => m.RepositoryLocator).Returns(RepositoryLocator); - mock.Setup(m => m.GetDelegates()).Returns(new List()); - mock.Setup(m => m.Show(It.IsAny())); - return mock; - } + CommandName = command[0], + CommandArgs = command.Skip(1).ToArray() + }; - /// - /// Runs the provided string which should start after the cmd e.g. the bit after rdmp cmd - /// - /// 1 string per piece following rdmp cmd. Element 0 should be the Type of command to run - /// - protected int Run(params string[] command) - { - var opts = new ExecuteCommandOptions(); - opts.CommandName = command[0]; - opts.CommandArgs = command.Skip(1).ToArray(); - - var runner = new ExecuteCommandRunner(opts); - return runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), - new ThrowImmediatelyCheckNotifier(), new GracefulCancellationToken()); - } + var runner = new ExecuteCommandRunner(opts); + return runner.Run(RepositoryLocator, ThrowImmediatelyDataLoadEventListener.Quiet, + ThrowImmediatelyCheckNotifier.Quiet, new GracefulCancellationToken()); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/CommandInvokerTests.cs b/Rdmp.Core.Tests/CommandExecution/CommandInvokerTests.cs index 26e38ee98c..145adca75f 100644 --- a/Rdmp.Core.Tests/CommandExecution/CommandInvokerTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/CommandInvokerTests.cs @@ -9,105 +9,91 @@ using NUnit.Framework; using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; -using Rdmp.Core.CommandLine.Interactive; using Rdmp.Core.CommandLine.Interactive.Picking; using Rdmp.Core.Curation.Data; -using ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +public class CommandInvokerTests : UnitTests { - public class CommandInvokerTests : UnitTests + [Test] + [Timeout(5000)] + public void Test_ListSupportedCommands_NoPicker() { - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); + var mgr = GetActivator(); + var invoker = new CommandInvoker(mgr); - SetupMEF(); - } + invoker.ExecuteCommand(typeof(ExecuteCommandListSupportedCommands), null); + } - [Test] - [Timeout(5000)] - public void Test_ListSupportedCommands_NoPicker() - { - var mgr = GetActivator(); - var invoker = new CommandInvoker(mgr); - - invoker.ExecuteCommand(typeof(ExecuteCommandListSupportedCommands),null); - } + [Test] + [Timeout(5000)] + public void Test_Delete_WithPicker() + { + var mgr = GetActivator(); + var invoker = new CommandInvoker(mgr); - [Test] - [Timeout(5000)] - public void Test_Delete_WithPicker() - { - var mgr = GetActivator(); - var invoker = new CommandInvoker(mgr); + WhenIHaveA(); - WhenIHaveA(); + var picker = new CommandLineObjectPicker(new[] { "Catalogue:*" }, mgr); + invoker.ExecuteCommand(typeof(ExecuteCommandDelete), picker); + } - var picker = new CommandLineObjectPicker(new[] {"Catalogue:*"}, mgr); - invoker.ExecuteCommand(typeof(ExecuteCommandDelete),picker); - } + [Test] + [Timeout(5000)] + public void Test_Generic_WithPicker() + { + var mgr = GetActivator(); + var invoker = new CommandInvoker(mgr); + + WhenIHaveA(); + + invoker.ExecuteCommand(typeof(GenericTestCommand), GetPicker("Catalogue:*")); + invoker.ExecuteCommand(typeof(GenericTestCommand), GetPicker("Pipeline")); + invoker.ExecuteCommand(typeof(GenericTestCommand), + GetPicker( + "DatabaseType:MicrosoftSqlServer:Name:imaging:Server=localhost\\sqlexpress;Database=master;Trusted_Connection=True;", + "true")); + } + + private CommandLineObjectPicker GetPicker(params string[] args) => new(args, GetActivator()); - [Test] - [Timeout(5000)] - public void Test_Generic_WithPicker() + private class GenericTestCommand : BasicCommandExecution + { + private readonly T _arg; + + public GenericTestCommand(T a) { - var mgr = GetActivator(); - var invoker = new CommandInvoker(mgr); - - WhenIHaveA(); - - invoker.ExecuteCommand(typeof(GenericTestCommand),GetPicker("Catalogue:*")); - invoker.ExecuteCommand(typeof(GenericTestCommand),GetPicker("Pipeline")); - invoker.ExecuteCommand(typeof(GenericTestCommand), - GetPicker( - "DatabaseType:MicrosoftSqlServer:Name:imaging:Server=localhost\\sqlexpress;Database=master;Trusted_Connection=True;", - "true")); - + _arg = a; } - private CommandLineObjectPicker GetPicker(params string[] args) + public override void Execute() { - return new CommandLineObjectPicker(args, GetActivator()); + base.Execute(); + Console.Write($"Arg was {_arg}"); + Assert.IsNotNull(_arg); } + } + + private class GenericTestCommand : BasicCommandExecution + { + private readonly T1 _a; + private readonly T2 _b; - private class GenericTestCommand : BasicCommandExecution + public GenericTestCommand(T1 a, T2 b) { - private readonly T _arg; - - public GenericTestCommand(T a) - { - _arg = a; - } - - public override void Execute() - { - base.Execute(); - Console.Write("Arg was " + _arg); - Assert.IsNotNull(_arg); - } + _a = a; + _b = b; } - private class GenericTestCommand : BasicCommandExecution + public override void Execute() { - private readonly T1 _a; - private readonly T2 _b; - - public GenericTestCommand(T1 a, T2 b) - { - _a = a; - _b = b; - } - - public override void Execute() - { - base.Execute(); - Console.Write("_a was " + _a); - Console.Write("_b was " + _b); - Assert.IsNotNull(_a); - Assert.IsNotNull(_b); - } + base.Execute(); + Console.Write($"_a was {_a}"); + Console.Write($"_b was {_b}"); + Assert.IsNotNull(_a); + Assert.IsNotNull(_b); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAddNewFilterContainerTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAddNewFilterContainerTests.cs index 725bb01b34..17e77483b2 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAddNewFilterContainerTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAddNewFilterContainerTests.cs @@ -11,53 +11,54 @@ using Rdmp.Core.Curation.Data.Aggregation; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +public class ExecuteCommandAddNewFilterContainerTests : UnitTests { - public class ExecuteCommandAddNewFilterContainerTests : UnitTests + [Test] + public void TestNormalCase() + { + var ac = WhenIHaveA(); + var cmd = new ExecuteCommandAddNewFilterContainer(new ThrowImmediatelyActivator(RepositoryLocator), ac); + + Assert.IsNull(ac.RootFilterContainer_ID); + + Assert.IsNull(cmd.ReasonCommandImpossible); + Assert.IsFalse(cmd.IsImpossible); + + cmd.Execute(); + + Assert.IsNotNull(ac.RootFilterContainer_ID); + } + + [Test] + public void Impossible_BecauseAlreadyHasContainer() + { + var ac = WhenIHaveA(); + + ac.CreateRootContainerIfNotExists(); + Assert.IsNotNull(ac.RootFilterContainer_ID); + + var cmd = new ExecuteCommandAddNewFilterContainer(new ThrowImmediatelyActivator(RepositoryLocator), ac); + + Assert.AreEqual("There is already a root filter container on this object", cmd.ReasonCommandImpossible); + Assert.IsTrue(cmd.IsImpossible); + } + + [Test] + public void Impossible_BecauseAPI() { - [Test] - public void TestNormalCase() - { - var ac = WhenIHaveA(); - var cmd = new ExecuteCommandAddNewFilterContainer(new ThrowImmediatelyActivator(RepositoryLocator), ac); - - Assert.IsNull(ac.RootFilterContainer_ID); - - Assert.IsNull(cmd.ReasonCommandImpossible); - Assert.IsFalse(cmd.IsImpossible); - - cmd.Execute(); - - Assert.IsNotNull(ac.RootFilterContainer_ID); - } - [Test] - public void Impossible_BecauseAlreadyHasContainer() - { - var ac = WhenIHaveA(); - - ac.CreateRootContainerIfNotExists(); - Assert.IsNotNull(ac.RootFilterContainer_ID); - - var cmd = new ExecuteCommandAddNewFilterContainer(new ThrowImmediatelyActivator(RepositoryLocator), ac); - - Assert.AreEqual("There is already a root filter container on this object", cmd.ReasonCommandImpossible); - Assert.IsTrue(cmd.IsImpossible); - } - [Test] - public void Impossible_BecauseAPI() - { - var ac = WhenIHaveA(); - - var c = ac.Catalogue; - c.Name = PluginCohortCompiler.ApiPrefix + "MyAwesomeAPI"; - c.SaveToDatabase(); - - Assert.IsTrue(c.IsApiCall()); - - var cmd = new ExecuteCommandAddNewFilterContainer(new ThrowImmediatelyActivator(RepositoryLocator), ac); - - Assert.AreEqual("Filters cannot be added to API calls", cmd.ReasonCommandImpossible); - Assert.IsTrue(cmd.IsImpossible); - } + var ac = WhenIHaveA(); + + var c = ac.Catalogue; + c.Name = $"{PluginCohortCompiler.ApiPrefix}MyAwesomeAPI"; + c.SaveToDatabase(); + + Assert.IsTrue(c.IsApiCall()); + + var cmd = new ExecuteCommandAddNewFilterContainer(new ThrowImmediatelyActivator(RepositoryLocator), ac); + + Assert.AreEqual("Filters cannot be added to API calls", cmd.ReasonCommandImpossible); + Assert.IsTrue(cmd.IsImpossible); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAddPipelineComponentTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAddPipelineComponentTests.cs index 7ca2169a32..b27c66e9a8 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAddPipelineComponentTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAddPipelineComponentTests.cs @@ -5,82 +5,77 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using NUnit.Framework; -using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.Curation.Data.Pipelines; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using Rdmp.Core.DataLoad.Engine.Pipeline.Components; using Rdmp.Core.DataLoad.Modules.DataFlowOperations.Swapping; using Rdmp.Core.DataLoad.Modules.DataFlowSources; using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandAddPipelineComponentTests : CommandCliTests { - class ExecuteCommandAddPipelineComponentTests : CommandCliTests + [Test] + public void TestCreatePipelineWithCommands() { - [Test] - public void TestCreatePipelineWithCommands() - { - var p = WhenIHaveA(); - - Assert.IsNull(p.Source); - Assert.IsNull(p.Destination); - Assert.IsEmpty(p.PipelineComponents); + var p = WhenIHaveA(); - Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(DelimitedFlatFileDataFlowSource)); + Assert.IsNull(p.Source); + Assert.IsNull(p.Destination); + Assert.IsEmpty(p.PipelineComponents); - Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(CleanStrings),"2"); - Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(ColumnSwapper),"1"); + Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(DelimitedFlatFileDataFlowSource)); - Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(ExecuteFullExtractionToDatabaseMSSql)); + Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(CleanStrings), "2"); + Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(ColumnSwapper), "1"); - p.ClearAllInjections(); + Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(ExecuteFullExtractionToDatabaseMSSql)); - Assert.IsNotNull(p.Source); - Assert.AreEqual(typeof(DelimitedFlatFileDataFlowSource), p.Source.GetClassAsSystemType()); - Assert.IsNotEmpty(p.Source.GetAllArguments()); + p.ClearAllInjections(); - Assert.AreEqual(4, p.PipelineComponents.Count); + Assert.IsNotNull(p.Source); + Assert.AreEqual(typeof(DelimitedFlatFileDataFlowSource), p.Source.GetClassAsSystemType()); + Assert.IsNotEmpty(p.Source.GetAllArguments()); - Assert.AreEqual(1, p.PipelineComponents[1].Order); - Assert.AreEqual(typeof(ColumnSwapper), p.PipelineComponents[1].GetClassAsSystemType()); + Assert.AreEqual(4, p.PipelineComponents.Count); - Assert.AreEqual(2, p.PipelineComponents[2].Order); - Assert.AreEqual(typeof(CleanStrings), p.PipelineComponents[2].GetClassAsSystemType()); + Assert.AreEqual(1, p.PipelineComponents[1].Order); + Assert.AreEqual(typeof(ColumnSwapper), p.PipelineComponents[1].GetClassAsSystemType()); - Assert.IsNotNull(p.Destination); - Assert.AreEqual(typeof(ExecuteFullExtractionToDatabaseMSSql), p.Destination.GetClassAsSystemType()); - Assert.IsNotEmpty(p.Destination.GetAllArguments()); + Assert.AreEqual(2, p.PipelineComponents[2].Order); + Assert.AreEqual(typeof(CleanStrings), p.PipelineComponents[2].GetClassAsSystemType()); + Assert.IsNotNull(p.Destination); + Assert.AreEqual(typeof(ExecuteFullExtractionToDatabaseMSSql), p.Destination.GetClassAsSystemType()); + Assert.IsNotEmpty(p.Destination.GetAllArguments()); + } - } + [Test] + public void TestCreatePipeline_TooManySources() + { + var p = WhenIHaveA(); - [Test] - public void TestCreatePipeline_TooManySources() - { - var p = WhenIHaveA(); + Assert.IsNull(p.Source); - Assert.IsNull(p.Source); + Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(DelimitedFlatFileDataFlowSource)); + var ex = Assert.Throws(() => + Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(DelimitedFlatFileDataFlowSource))); - Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(DelimitedFlatFileDataFlowSource)); - var ex = Assert.Throws(()=>Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(DelimitedFlatFileDataFlowSource))); + Assert.AreEqual("Pipeline 'My Pipeline' already has a source", ex.Message); + } - Assert.AreEqual("Pipeline 'My Pipeline' already has a source",ex.Message); - } - [Test] - public void TestCreatePipeline_TooManyDestinations() - { - var p = WhenIHaveA(); + [Test] + public void TestCreatePipeline_TooManyDestinations() + { + var p = WhenIHaveA(); - Assert.IsNull(p.Source); + Assert.IsNull(p.Source); - Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(ExecuteFullExtractionToDatabaseMSSql)); - var ex = Assert.Throws(() => Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(ExecuteFullExtractionToDatabaseMSSql))); + Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(ExecuteFullExtractionToDatabaseMSSql)); + var ex = Assert.Throws(() => + Run("AddPipelineComponent", $"Pipeline:{p.ID}", nameof(ExecuteFullExtractionToDatabaseMSSql))); - Assert.AreEqual("Pipeline 'My Pipeline' already has a destination", ex.Message); - } + Assert.AreEqual("Pipeline 'My Pipeline' already has a destination", ex.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAlterTableMakeDistinctTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAlterTableMakeDistinctTests.cs index 1f70273e7e..0d29270e8c 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAlterTableMakeDistinctTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandAlterTableMakeDistinctTests.cs @@ -8,59 +8,53 @@ using NUnit.Framework; using Rdmp.Core.CommandExecution.AtomicCommands.Alter; using Rdmp.Core.CommandLine.Interactive; -using Rdmp.Core.Curation.Data; -using ReusableLibraryCode.Checks; using System; -using System.Collections.Generic; using System.Data; -using System.Linq; -using System.Text; -using System.Threading.Tasks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandAlterTableMakeDistinctTests : DatabaseTests { - class ExecuteCommandAlterTableMakeDistinctTests : DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.PostgreSql)] + public void Test(DatabaseType dbType) { + var db = GetCleanedServer(dbType); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.PostgreSql)] - public void Test(DatabaseType dbType) - { - var db = GetCleanedServer(dbType); + var dt = new DataTable(); + dt.Columns.Add("fff"); + dt.Rows.Add("1"); + dt.Rows.Add("1"); + dt.Rows.Add("2"); + dt.Rows.Add("2"); + dt.Rows.Add("2"); - var dt = new DataTable(); - dt.Columns.Add("fff"); - dt.Rows.Add("1"); - dt.Rows.Add("1"); - dt.Rows.Add("2"); - dt.Rows.Add("2"); - dt.Rows.Add("2"); + var tbl = db.CreateTable("MyTable", dt); - var tbl = db.CreateTable("MyTable", dt); + Import(tbl, out var tblInfo, out _); - Import(tbl, out ITableInfo tblInfo,out _); + Assert.AreEqual(5, tbl.GetRowCount()); - Assert.AreEqual(5, tbl.GetRowCount()); + var activator = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) + { DisallowInput = true }; - var activator = new ConsoleInputManager(RepositoryLocator, new ThrowImmediatelyCheckNotifier()) { DisallowInput = true }; + var cmd = new ExecuteCommandAlterTableMakeDistinct(activator, tblInfo, 700, true); - var cmd = new ExecuteCommandAlterTableMakeDistinct(activator, tblInfo, 700, true); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); - Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); - cmd.Execute(); + Assert.AreEqual(2, tbl.GetRowCount()); - Assert.AreEqual(2, tbl.GetRowCount()); - - tbl.CreatePrimaryKey(tbl.DiscoverColumn("fff")); + tbl.CreatePrimaryKey(tbl.DiscoverColumn("fff")); - cmd = new ExecuteCommandAlterTableMakeDistinct(activator, tblInfo, 700, true); + cmd = new ExecuteCommandAlterTableMakeDistinct(activator, tblInfo, 700, true); - var ex = Assert.Throws(()=>cmd.Execute()); + var ex = Assert.Throws(() => cmd.Execute()); - Assert.AreEqual("Table 'MyTable' has primary key columns so cannot contain duplication", ex.Message); - } + Assert.AreEqual("Table 'MyTable' has primary key columns so cannot contain duplication", ex.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandConfirmLogsTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandConfirmLogsTests.cs index e6c8ce616e..46531505df 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandConfirmLogsTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandConfirmLogsTests.cs @@ -14,161 +14,177 @@ using System.Threading; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandConfirmLogsTests : DatabaseTests { - class ExecuteCommandConfirmLogsTests : DatabaseTests + [Test] + public void ConfirmLogs_NoEntries_Throws() { - [Test] - public void ConfirmLogs_NoEntries_Throws() + var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); + var cata = new Catalogue(CatalogueRepository, "myCata") { - var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); - var cata = new Catalogue(CatalogueRepository, "myCata"); - cata.LoadMetadata_ID = lmd.ID; - cata.LoggingDataTask = "GGG"; - cata.SaveToDatabase(); + LoadMetadata_ID = lmd.ID, + LoggingDataTask = "GGG" + }; + cata.SaveToDatabase(); - var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); - lm.CreateNewLoggingTaskIfNotExists("GGG"); + var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); + lm.CreateNewLoggingTaskIfNotExists("GGG"); - var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), lmd); - var ex = Assert.Throws(()=>cmd.Execute()); + var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), lmd); + var ex = Assert.Throws(() => cmd.Execute()); - Assert.AreEqual("There are no log entries for MyLmd", ex.Message); + Assert.AreEqual("There are no log entries for MyLmd", ex.Message); + } - } - [TestCase(true)] - [TestCase(false)] - public void ConfirmLogs_HappyEntries_Passes(bool withinTime) - { - var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); - var cata = new Catalogue(CatalogueRepository, "myCata"); - cata.LoadMetadata_ID = lmd.ID; - cata.LoggingDataTask = "FFF"; - cata.SaveToDatabase(); - - var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); - lm.CreateNewLoggingTaskIfNotExists("FFF"); - var logEntry = lm.CreateDataLoadInfo("FFF", "pack o' cards", "going down gambling", null, true); - - // we mark it as completed successfully - this is a good, happy log entry - logEntry.CloseAndMarkComplete(); - - - var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), - //within last 10 hours - lmd, withinTime ? "10:00:00":null); - Assert.DoesNotThrow(() => cmd.Execute()); - } - - [Test] - public void ConfirmLogs_SadEntry_BecauseNeverEnded_Throws() - { - var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); - var cata = new Catalogue(CatalogueRepository, "myCata"); - cata.LoadMetadata_ID = lmd.ID; - cata.LoggingDataTask = "FFF"; - cata.SaveToDatabase(); - - var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); - lm.CreateNewLoggingTaskIfNotExists("FFF"); - - // we have created log entry but it did not have an end time. This is a sad entry because it never completed - lm.CreateDataLoadInfo("FFF", "pack o' cards", "going down gambling", null, true); - - var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), lmd); - var ex = Assert.Throws(() => cmd.Execute()); - - StringAssert.IsMatch("Latest logs for MyLmd .* indicate that it did not complete", ex.Message); - } - [Test] - public void ConfirmLogs_SadEntryWithEx_Throws() + [TestCase(true)] + [TestCase(false)] + public void ConfirmLogs_HappyEntries_Passes(bool withinTime) + { + var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); + var cata = new Catalogue(CatalogueRepository, "myCata") { - var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); - var cata = new Catalogue(CatalogueRepository, "myCata"); - cata.LoadMetadata_ID = lmd.ID; - cata.LoggingDataTask = "FFF"; - cata.SaveToDatabase(); + LoadMetadata_ID = lmd.ID, + LoggingDataTask = "FFF" + }; + cata.SaveToDatabase(); - var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); - lm.CreateNewLoggingTaskIfNotExists("FFF"); - var logEntry = lm.CreateDataLoadInfo("FFF", "pack o' cards", "going down gambling", null, true); - logEntry.LogFatalError("vegas", "we lost it all on a pair of deuces"); + var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); + lm.CreateNewLoggingTaskIfNotExists("FFF"); + var logEntry = lm.CreateDataLoadInfo("FFF", "pack o' cards", "going down gambling", null, true); - var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), lmd); - var ex = Assert.Throws(() => cmd.Execute()); + // we mark it as completed successfully - this is a good, happy log entry + logEntry.CloseAndMarkComplete(); - StringAssert.IsMatch("Latest logs for MyLmd .* indicate that it failed", ex.Message); - } + var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), + //within last 10 hours + lmd, withinTime ? "10:00:00" : null); + Assert.DoesNotThrow(() => cmd.Execute()); + } + [Test] + public void ConfirmLogs_SadEntry_BecauseNeverEnded_Throws() + { + var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); + var cata = new Catalogue(CatalogueRepository, "myCata") + { + LoadMetadata_ID = lmd.ID, + LoggingDataTask = "FFF" + }; + cata.SaveToDatabase(); + + var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); + lm.CreateNewLoggingTaskIfNotExists("FFF"); + + // we have created log entry but it did not have an end time. This is a sad entry because it never completed + lm.CreateDataLoadInfo("FFF", "pack o' cards", "going down gambling", null, true); + + var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), lmd); + var ex = Assert.Throws(() => cmd.Execute()); + + StringAssert.IsMatch("Latest logs for MyLmd .* indicate that it did not complete", ex.Message); + } - [Test] - public void ConfirmLogs_NotWithinTime_Throws() + [Test] + public void ConfirmLogs_SadEntryWithEx_Throws() + { + var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); + var cata = new Catalogue(CatalogueRepository, "myCata") { + LoadMetadata_ID = lmd.ID, + LoggingDataTask = "FFF" + }; + cata.SaveToDatabase(); + + var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); + lm.CreateNewLoggingTaskIfNotExists("FFF"); + var logEntry = lm.CreateDataLoadInfo("FFF", "pack o' cards", "going down gambling", null, true); + logEntry.LogFatalError("vegas", "we lost it all on a pair of deuces"); + + var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), lmd); + var ex = Assert.Throws(() => cmd.Execute()); + + StringAssert.IsMatch("Latest logs for MyLmd .* indicate that it failed", ex.Message); + } + - var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); - var cata = new Catalogue(CatalogueRepository, "myCata"); - cata.LoadMetadata_ID = lmd.ID; - cata.LoggingDataTask = "FFF"; - cata.SaveToDatabase(); + [Test] + public void ConfirmLogs_NotWithinTime_Throws() + { + var lmd = new LoadMetadata(CatalogueRepository, "MyLmd"); + var cata = new Catalogue(CatalogueRepository, "myCata") + { + LoadMetadata_ID = lmd.ID, + LoggingDataTask = "FFF" + }; + cata.SaveToDatabase(); - var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); - lm.CreateNewLoggingTaskIfNotExists("FFF"); - var logEntry = lm.CreateDataLoadInfo("FFF", "pack o' cards", "going down gambling", null, true); + var lm = new LogManager(lmd.GetDistinctLoggingDatabase()); + lm.CreateNewLoggingTaskIfNotExists("FFF"); + var logEntry = lm.CreateDataLoadInfo("FFF", "pack o' cards", "going down gambling", null, true); - // we mark it as completed successfully - this is a good, happy log entry - logEntry.CloseAndMarkComplete(); + // we mark it as completed successfully - this is a good, happy log entry + logEntry.CloseAndMarkComplete(); - Thread.Sleep(5000); + Thread.Sleep(5000); - // but we want this to have finished in the last second - var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), lmd,"00:00:01"); - var ex = Assert.Throws(() => cmd.Execute()); + // but we want this to have finished in the last second + var cmd = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), lmd, "00:00:01"); + var ex = Assert.Throws(() => cmd.Execute()); - StringAssert.IsMatch("Latest logged activity for MyLmd is .*. This is older than the requested date threshold:.*", ex.Message); - } + StringAssert.IsMatch( + "Latest logged activity for MyLmd is .*. This is older than the requested date threshold:.*", ex.Message); + } - [Test] - public void ConfirmLogs_With2CacheProgress_Throws() + [Test] + public void ConfirmLogs_With2CacheProgress_Throws() + { + var lmd1 = new LoadMetadata(CatalogueRepository, "MyLmd"); + var cata = new Catalogue(CatalogueRepository, "myCata") { - var lmd1 = new LoadMetadata(CatalogueRepository, "MyLmd"); - var cata = new Catalogue(CatalogueRepository, "myCata"); - cata.LoadMetadata_ID = lmd1.ID; - cata.LoggingDataTask = "B"; - cata.SaveToDatabase(); + LoadMetadata_ID = lmd1.ID, + LoggingDataTask = "B" + }; + cata.SaveToDatabase(); - var lmd2 = new LoadMetadata(CatalogueRepository, "MyLmd"); - var cata2 = new Catalogue(CatalogueRepository, "myCata"); - cata2.LoadMetadata_ID = lmd2.ID; - cata2.LoggingDataTask = "A"; - cata2.SaveToDatabase(); + var lmd2 = new LoadMetadata(CatalogueRepository, "MyLmd"); + var cata2 = new Catalogue(CatalogueRepository, "myCata") + { + LoadMetadata_ID = lmd2.ID, + LoggingDataTask = "A" + }; + cata2.SaveToDatabase(); - var lp1 = new LoadProgress(CatalogueRepository, lmd1); - var lp2 = new LoadProgress(CatalogueRepository, lmd2); + var lp1 = new LoadProgress(CatalogueRepository, lmd1); + var lp2 = new LoadProgress(CatalogueRepository, lmd2); - var cp1 = new CacheProgress(CatalogueRepository, lp1); - var cp2 = new CacheProgress(CatalogueRepository, lp2); - cp2.Name = "MyCoolCache"; - cp2.SaveToDatabase(); + var cp1 = new CacheProgress(CatalogueRepository, lp1); + var cp2 = new CacheProgress(CatalogueRepository, lp2) + { + Name = "MyCoolCache" + }; + cp2.SaveToDatabase(); - var lm = new LogManager(cp1.GetDistinctLoggingDatabase()); - lm.CreateNewLoggingTaskIfNotExists(cp1.GetDistinctLoggingTask()); + var lm = new LogManager(cp1.GetDistinctLoggingDatabase()); + lm.CreateNewLoggingTaskIfNotExists(cp1.GetDistinctLoggingTask()); - // create a log entry for cp1 only - var logEntry = lm.CreateDataLoadInfo(cp1.GetDistinctLoggingTask(), "pack o' cards", cp1.GetLoggingRunName(), null, true); + // create a log entry for cp1 only + var logEntry = lm.CreateDataLoadInfo(cp1.GetDistinctLoggingTask(), "pack o' cards", cp1.GetLoggingRunName(), + null, true); - // we mark it as completed successfully - this is a good, happy log entry - logEntry.CloseAndMarkComplete(); + // we mark it as completed successfully - this is a good, happy log entry + logEntry.CloseAndMarkComplete(); - // The first cache has logged success so should be happy - var cmd1 = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), cp1, null); - Assert.DoesNotThrow(() => cmd1.Execute()); + // The first cache has logged success so should be happy + var cmd1 = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), cp1, null); + Assert.DoesNotThrow(() => cmd1.Execute()); - // The second cache has not logged any successes so should be unhappy - var cmd2 = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), cp2,null); - var ex = Assert.Throws(() => cmd2.Execute()); + // The second cache has not logged any successes so should be unhappy + var cmd2 = new ExecuteCommandConfirmLogs(new ThrowImmediatelyActivator(RepositoryLocator), cp2, null); + var ex = Assert.Throws(() => cmd2.Execute()); - Assert.AreEqual("There are no log entries for MyCoolCache", ex.Message); - } + Assert.AreEqual("There are no log entries for MyCoolCache", ex.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandCreateNewDataLoadDirectoryTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandCreateNewDataLoadDirectoryTests.cs index 8b54c2014c..323f6a89fb 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandCreateNewDataLoadDirectoryTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandCreateNewDataLoadDirectoryTests.cs @@ -8,41 +8,34 @@ using Rdmp.Core.Curation.Data.DataLoad; using System.IO; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandCreateNewDataLoadDirectoryTests : CommandCliTests { - class ExecuteCommandCreateNewDataLoadDirectoryTests : CommandCliTests + [Test] + public void TestCreateNewDataLoadDirectory_CreateDeepFolder_NoLmd() { - [Test] - public void TestCreateNewDataLoadDirectory_CreateDeepFolder_NoLmd() - { - var root = Path.Combine(TestContext.CurrentContext.WorkDirectory, "abc"); - if(Directory.Exists(root)) - { - Directory.Delete(root, true); - } - var toCreate = Path.Combine(root, "def", "ghi"); - - Run("CreateNewDataLoadDirectory","null", toCreate); - - Assert.IsTrue(Directory.Exists(root)); - } - - [Test] - public void TestCreateNewDataLoadDirectory_WithLoadMetadata() - { - var root = Path.Combine(TestContext.CurrentContext.WorkDirectory, "def"); - if (Directory.Exists(root)) - { - Directory.Delete(root, true); - } - var lmd = WhenIHaveA(); - - Assert.IsNull(lmd.LocationOfFlatFiles); - - Run("CreateNewDataLoadDirectory", $"LoadMetadata:{lmd.ID}", root); - - Assert.IsTrue(Directory.Exists(root)); - Assert.AreEqual(root,lmd.LocationOfFlatFiles); - } + var root = Path.Combine(TestContext.CurrentContext.WorkDirectory, "abc"); + if (Directory.Exists(root)) Directory.Delete(root, true); + var toCreate = Path.Combine(root, "def", "ghi"); + + Run("CreateNewDataLoadDirectory", "null", toCreate); + + Assert.IsTrue(Directory.Exists(root)); + } + + [Test] + public void TestCreateNewDataLoadDirectory_WithLoadMetadata() + { + var root = Path.Combine(TestContext.CurrentContext.WorkDirectory, "def"); + if (Directory.Exists(root)) Directory.Delete(root, true); + var lmd = WhenIHaveA(); + + Assert.IsNull(lmd.LocationOfFlatFiles); + + Run("CreateNewDataLoadDirectory", $"LoadMetadata:{lmd.ID}", root); + + Assert.IsTrue(Directory.Exists(root)); + Assert.AreEqual(root, lmd.LocationOfFlatFiles); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandCreateNewFilterCliTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandCreateNewFilterCliTests.cs index 4d2872c134..76dcbcf11a 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandCreateNewFilterCliTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandCreateNewFilterCliTests.cs @@ -11,49 +11,50 @@ using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.DataExport.Data; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +/// +/// Tests for +/// +internal class ExecuteCommandCreateNewFilterCliTests : CommandCliTests { - /// - /// Tests for - /// - class ExecuteCommandCreateNewFilterCliTests : CommandCliTests + [Test] + public void TestNewFilterForAggregate() { - [Test] - public void TestNewFilterForAggregate() - { - var ac = WhenIHaveA(); - - // has no container to start with (no filters) - Assert.IsNull(ac.RootFilterContainer_ID); - Run("CreateNewFilter",$"{nameof(AggregateConfiguration)}:{ac.ID}"); - - Assert.IsNotNull(ac.RootFilterContainer_ID,"Should now have a container"); - Assert.AreEqual(1,ac.RootFilterContainer.GetFilters().Count(),"Expected a single new filter"); - } - [Test] - public void TestNewFilterForExtractionConfiguration() - { - var sds = WhenIHaveA(); - - // has no container to start with (no filters) - Assert.IsNull(sds.RootFilterContainer_ID); - Run("CreateNewFilter", $"{nameof(SelectedDataSets)}:{sds.ID}"); - - Assert.IsNotNull(sds.RootFilterContainer_ID, "Should now have a container"); - Assert.AreEqual(1, sds.RootFilterContainer.GetFilters().Count(), "Expected a single new filter"); - } - [Test] - public void TestNewFilterForCatalogue() - { - var ei = WhenIHaveA(); - - // no Catalogue level filters - Assert.IsEmpty(ei.ExtractionFilters); - Run("CreateNewFilter", $"{nameof(ExtractionInformation)}:{ei.ID}", "My cool filter", "hb='t'"); - - var f = ei.ExtractionFilters.Single(); - Assert.AreEqual("My cool filter", f.Name); - Assert.AreEqual("hb='t'", f.WhereSQL); - } + var ac = WhenIHaveA(); + + // has no container to start with (no filters) + Assert.IsNull(ac.RootFilterContainer_ID); + Run("CreateNewFilter", $"{nameof(AggregateConfiguration)}:{ac.ID}"); + + Assert.IsNotNull(ac.RootFilterContainer_ID, "Should now have a container"); + Assert.AreEqual(1, ac.RootFilterContainer.GetFilters().Length, "Expected a single new filter"); + } + + [Test] + public void TestNewFilterForExtractionConfiguration() + { + var sds = WhenIHaveA(); + + // has no container to start with (no filters) + Assert.IsNull(sds.RootFilterContainer_ID); + Run("CreateNewFilter", $"{nameof(SelectedDataSets)}:{sds.ID}"); + + Assert.IsNotNull(sds.RootFilterContainer_ID, "Should now have a container"); + Assert.AreEqual(1, sds.RootFilterContainer.GetFilters().Length, "Expected a single new filter"); + } + + [Test] + public void TestNewFilterForCatalogue() + { + var ei = WhenIHaveA(); + + // no Catalogue level filters + Assert.IsEmpty(ei.ExtractionFilters); + Run("CreateNewFilter", $"{nameof(ExtractionInformation)}:{ei.ID}", "My cool filter", "hb='t'"); + + var f = ei.ExtractionFilters.Single(); + Assert.AreEqual("My cool filter", f.Name); + Assert.AreEqual("hb='t'", f.WhereSQL); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandDeleteTestsCli.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandDeleteTestsCli.cs index 8f29c5b8c7..99b124c39c 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandDeleteTestsCli.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandDeleteTestsCli.cs @@ -8,88 +8,90 @@ using Rdmp.Core.Curation.Data; using System; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandDeleteTestsCli : CommandCliTests { - class ExecuteCommandDeleteTestsCli : CommandCliTests + [Test] + public void TestDeletingACatalogue_NoneInDbIsFine() + { + var prev = RepositoryLocator.CatalogueRepository.GetAllObjects(); + Assert.IsEmpty(prev); + + Assert.AreEqual(0, Run("delete", "Catalogue:bob")); + + var now = RepositoryLocator.CatalogueRepository.GetAllObjects(); + Assert.IsEmpty(now); + } + + [Test] + public void TestDeletingACatalogue_DeleteBecauseMatches() { - [Test] - public void TestDeletingACatalogue_NoneInDbIsFine() - { - var prev = RepositoryLocator.CatalogueRepository.GetAllObjects(); - Assert.IsEmpty(prev); - - Assert.AreEqual(0,Run("delete","Catalogue:bob")); - - var now = RepositoryLocator.CatalogueRepository.GetAllObjects(); - Assert.IsEmpty(now); - } - - [Test] - public void TestDeletingACatalogue_DeleteBecauseMatches() - { - var cata = WhenIHaveA(); - cata.Name = "bob"; - - Assert.AreEqual(0, Run("delete", "Catalogue:bob")); - Assert.IsFalse(cata.Exists()); - } - [Test] - public void TestDeletingACatalogue_DoesNotMatchPattern() - { - var cata = WhenIHaveA(); - cata.Name = "ffff"; - - Assert.AreEqual(0, Run("delete", "Catalogue:bob")); - - // should not have been deleted because name does not match what is sought to be deleted - Assert.IsTrue(cata.Exists()); - - //cleanup - cata.DeleteInDatabase(); - } - - [Test] - public void TestDeleteMany_ThrowsBecauseNotExpected() - { - // 2 catalogues - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); - - // delete all catalogues - var ex = Assert.Throws(()=>Run("delete", "Catalogue")); - - Assert.AreEqual(ex.Message, + var cata = WhenIHaveA(); + cata.Name = "bob"; + + Assert.AreEqual(0, Run("delete", "Catalogue:bob")); + Assert.IsFalse(cata.Exists()); + } + + [Test] + public void TestDeletingACatalogue_DoesNotMatchPattern() + { + var cata = WhenIHaveA(); + cata.Name = "ffff"; + + Assert.AreEqual(0, Run("delete", "Catalogue:bob")); + + // should not have been deleted because name does not match what is sought to be deleted + Assert.IsTrue(cata.Exists()); + + //cleanup + cata.DeleteInDatabase(); + } + + [Test] + public void TestDeleteMany_ThrowsBecauseNotExpected() + { + // 2 catalogues + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); + + // delete all catalogues + var ex = Assert.Throws(() => Run("delete", "Catalogue")); + + Assert.AreEqual(ex.Message, "Allow delete many is false but multiple objects were matched for deletion (Mycata,Mycata)"); - c1.DeleteInDatabase(); - c2.DeleteInDatabase(); - } - [Test] - public void TestDeleteMany_Allowed() - { + c1.DeleteInDatabase(); + c2.DeleteInDatabase(); + } - // 2 catalogues - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); + [Test] + public void TestDeleteMany_Allowed() + { + // 2 catalogues + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); - // delete all catalogues - Assert.AreEqual(0,Run("delete", "Catalogue","true")); + // delete all catalogues + Assert.AreEqual(0, Run("delete", "Catalogue", "true")); - Assert.IsFalse(c1.Exists()); - Assert.IsFalse(c2.Exists()); - } - [Test] - public void TestDeleteMany_BadParameterFormat() - { - var c1 = WhenIHaveA(); + Assert.IsFalse(c1.Exists()); + Assert.IsFalse(c2.Exists()); + } + + [Test] + public void TestDeleteMany_BadParameterFormat() + { + var c1 = WhenIHaveA(); - // delete all catalogues - var ex = Assert.Throws(()=> Run("delete", "Catalogue", "FLIBBLE!")); + // delete all catalogues + var ex = Assert.Throws(() => Run("delete", "Catalogue", "FLIBBLE!")); - Assert.AreEqual("Expected parameter at index 1 to be a System.Boolean (for parameter 'deleteMany') but it was FLIBBLE!", - ex.Message); + Assert.AreEqual( + "Expected parameter at index 1 to be a System.Boolean (for parameter 'deleteMany') but it was FLIBBLE!", + ex.Message); - c1.DeleteInDatabase(); - } + c1.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandDeprecateTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandDeprecateTests.cs index 1d94905470..6fb1daa05f 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandDeprecateTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandDeprecateTests.cs @@ -7,20 +7,19 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandDeprecateTests : CommandCliTests { - class ExecuteCommandDeprecateTests : CommandCliTests + [Test] + public void TestDeprecateCommand() { - [Test] - public void TestDeprecateCommand() - { - var c = WhenIHaveA(); + var c = WhenIHaveA(); - Assert.IsFalse(c.IsDeprecated); + Assert.IsFalse(c.IsDeprecated); - Run("Deprecate", $"Catalogue:{c.ID}"); + Run("Deprecate", $"Catalogue:{c.ID}"); - Assert.IsTrue(c.IsDeprecated); - } + Assert.IsTrue(c.IsDeprecated); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandListTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandListTests.cs index 620e27c109..bc3d18bc14 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandListTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandListTests.cs @@ -4,59 +4,60 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.CommandLine.Interactive.Picking; using Rdmp.Core.Curation.Data; using System.Text.RegularExpressions; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestsExecuteCommandList : CommandCliTests { - class TestsExecuteCommandList : CommandCliTests + [Test] + public void Test_ExecuteCommandList_NoCataloguesParsing() { - [Test] - public void Test_ExecuteCommandList_NoCataloguesParsing() - { - foreach(var cat in RepositoryLocator.CatalogueRepository.GetAllObjects()) - cat.DeleteInDatabase(); - - Assert.IsEmpty(RepositoryLocator.CatalogueRepository.GetAllObjects(),"Failed to clear CatalogueRepository"); - - GetInvoker().ExecuteCommand(typeof(ExecuteCommandList), - new CommandLineObjectPicker(new string[]{ "Catalogue"}, GetActivator())); - } - - [Test] - public void Test_ExecuteCommandList_OneCatalogueParsing() - { - var c = WhenIHaveA(); - - GetInvoker().ExecuteCommand(typeof(ExecuteCommandList), - new CommandLineObjectPicker(new string[]{ "Catalogue"}, GetActivator())); - - c.DeleteInDatabase(); - } - [Test] - public void Test_ExecuteCommandList_OneCatalogue() - { - var c = WhenIHaveA(); - c.Name = "fff"; - c.SaveToDatabase(); - - var mock = GetMockActivator(); - - var cmd = new ExecuteCommandList(mock.Object,new []{c}); - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); - - cmd.Execute(); - - string contents = Regex.Escape($"{c.ID}:fff"); - - // Called once - mock.Verify(m => m.Show(It.IsRegex(contents)), Times.Once()); - - c.DeleteInDatabase(); - } + foreach (var cat in RepositoryLocator.CatalogueRepository.GetAllObjects()) + cat.DeleteInDatabase(); + + Assert.IsEmpty(RepositoryLocator.CatalogueRepository.GetAllObjects(), + "Failed to clear CatalogueRepository"); + + GetInvoker().ExecuteCommand(typeof(ExecuteCommandList), + new CommandLineObjectPicker(new string[] { "Catalogue" }, GetActivator())); + } + + [Test] + public void Test_ExecuteCommandList_OneCatalogueParsing() + { + var c = WhenIHaveA(); + + GetInvoker().ExecuteCommand(typeof(ExecuteCommandList), + new CommandLineObjectPicker(new string[] { "Catalogue" }, GetActivator())); + + c.DeleteInDatabase(); + } + + [Test] + public void Test_ExecuteCommandList_OneCatalogue() + { + var c = WhenIHaveA(); + c.Name = "fff"; + c.SaveToDatabase(); + + var mock = GetMockActivator(); + + var cmd = new ExecuteCommandList(mock, new[] { c }); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + + cmd.Execute(); + + var contents = Regex.Escape($"{c.ID}:fff"); + + // Called once + mock.Received(1).Show(Arg.Is(i => i.Contains($"{c.ID}:fff"))); + + c.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandRefreshBrokenCohortsTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandRefreshBrokenCohortsTests.cs index 1bc8f1edb8..eb852b8af4 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandRefreshBrokenCohortsTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandRefreshBrokenCohortsTests.cs @@ -10,65 +10,63 @@ using Rdmp.Core.DataExport.Data; using Rdmp.Core.Providers; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Checks; -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; +using Rdmp.Core.ReusableLibraryCode.Checks; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandRefreshBrokenCohortsTests { - internal class ExecuteCommandRefreshBrokenCohortsTests + [Test] + public void TestBrokenCohort() { + var repo = new MemoryDataExportRepository(); + + var ect = new ExternalCohortTable(repo, "yarg", FAnsi.DatabaseType.MicrosoftSQLServer) + { + Server = "IDontExist", + Database = "fff", + PrivateIdentifierField = "haha", + ReleaseIdentifierField = "haha" + }; + ect.SaveToDatabase(); - [Test] - public void TestBrokenCohort() + var cohort = new ExtractableCohort { - var repo = new MemoryDataExportRepository(); - - var ect = new ExternalCohortTable(repo, "yarg", FAnsi.DatabaseType.MicrosoftSQLServer); - ect.Server = "IDontExist"; - ect.Database = "fff"; - ect.PrivateIdentifierField = "haha"; - ect.ReleaseIdentifierField = "haha"; - ect.SaveToDatabase(); + Repository = repo, + ExternalCohortTable_ID = ect.ID, + OriginID = 123 + }; + cohort.SaveToDatabase(); - var cohort = new ExtractableCohort(); - cohort.Repository = repo; - cohort.ExternalCohortTable_ID = ect.ID; - cohort.OriginID = 123; - cohort.SaveToDatabase(); + var repoLocator = new RepositoryProvider(repo); - var repoLocator = new RepositoryProvider(repo); + var activator = new ConsoleInputManager(repoLocator, ThrowImmediatelyCheckNotifier.Quiet) + { + DisallowInput = true + }; - var activator = new ConsoleInputManager(repoLocator, new ThrowImmediatelyCheckNotifier()) { - DisallowInput = true - }; + Assert.AreEqual(1, ((DataExportChildProvider)activator.CoreChildProvider).ForbidListedSources.Count); - Assert.AreEqual(1,((DataExportChildProvider)activator.CoreChildProvider).ForbidListedSources.Count); + var cmd = new ExecuteCommandRefreshBrokenCohorts(activator) + { + // suppress publishing so we don't just go back into a refresh + // and find it missing again + NoPublish = true + }; - var cmd = new ExecuteCommandRefreshBrokenCohorts(activator) - { - // suppress publishing so we don't just go back into a refresh - // and find it missing again - NoPublish = true, - }; - - Assert.IsFalse(cmd.IsImpossible); - cmd.Execute(); + Assert.IsFalse(cmd.IsImpossible); + cmd.Execute(); - //now no forbidden cohorts - Assert.IsEmpty(((DataExportChildProvider)activator.CoreChildProvider).ForbidListedSources); + //now no forbidden cohorts + Assert.IsEmpty(((DataExportChildProvider)activator.CoreChildProvider).ForbidListedSources); - cmd = new ExecuteCommandRefreshBrokenCohorts(activator); - Assert.IsTrue(cmd.IsImpossible); - Assert.AreEqual("There are no broken ExternalCohortTable to clear status on", cmd.ReasonCommandImpossible); - - cmd = new ExecuteCommandRefreshBrokenCohorts(activator,ect); - Assert.IsTrue(cmd.IsImpossible); - Assert.AreEqual("'yarg' is not broken", cmd.ReasonCommandImpossible); - } + cmd = new ExecuteCommandRefreshBrokenCohorts(activator); + Assert.IsTrue(cmd.IsImpossible); + Assert.AreEqual("There are no broken ExternalCohortTable to clear status on", cmd.ReasonCommandImpossible); + + cmd = new ExecuteCommandRefreshBrokenCohorts(activator, ect); + Assert.IsTrue(cmd.IsImpossible); + Assert.AreEqual("'yarg' is not broken", cmd.ReasonCommandImpossible); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandReplacedByTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandReplacedByTests.cs index e3654d4456..e1408fed57 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandReplacedByTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandReplacedByTests.cs @@ -9,74 +9,73 @@ using Rdmp.Core.Curation.Data; using System.Linq; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandReplacedByTests : CommandCliTests { - class ExecuteCommandReplacedByTests : CommandCliTests - { [Test] public void CommandImpossible_BecauseNotDeprecated() { - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); - - var cmd = new ExecuteCommandReplacedBy(GetMockActivator().Object,c1,c2); - - Assert.IsTrue(cmd.IsImpossible); - StringAssert.Contains("is not marked IsDeprecated",cmd.ReasonCommandImpossible); + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); + + var cmd = new ExecuteCommandReplacedBy(GetMockActivator(), c1, c2); + + Assert.IsTrue(cmd.IsImpossible); + StringAssert.Contains("is not marked IsDeprecated", cmd.ReasonCommandImpossible); } [Test] public void CommandImpossible_BecauseDifferentTypes() { - var c1 = WhenIHaveA(); - var ci1 = WhenIHaveA(); - - c1.IsDeprecated = true; - c1.SaveToDatabase(); - - var cmd = new ExecuteCommandReplacedBy(GetMockActivator().Object,c1,ci1); - - Assert.IsTrue(cmd.IsImpossible); - StringAssert.Contains("because it is a different object Type",cmd.ReasonCommandImpossible); + var c1 = WhenIHaveA(); + var ci1 = WhenIHaveA(); + + c1.IsDeprecated = true; + c1.SaveToDatabase(); + + var cmd = new ExecuteCommandReplacedBy(GetMockActivator(), c1, ci1); + + Assert.IsTrue(cmd.IsImpossible); + StringAssert.Contains("because it is a different object Type", cmd.ReasonCommandImpossible); } + [Test] public void CommandImpossible_Allowed() { - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); - - c1.IsDeprecated = true; - c1.SaveToDatabase(); - - var cmd = new ExecuteCommandReplacedBy(GetMockActivator().Object,c1,c2); - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); - - cmd.Execute(); - - var replacement = RepositoryLocator.CatalogueRepository - .GetAllObjectsWhere("Name",ExtendedProperty.ReplacedBy) - .Single(r=>r.IsReferenceTo(c1)); - - Assert.IsTrue(replacement.IsReferenceTo(c1)); - Assert.AreEqual(c2.ID.ToString(),replacement.Value); - - // running command multiple times shouldn't result in duplicate objects - cmd.Execute(); - cmd.Execute(); - cmd.Execute(); - cmd.Execute(); - - Assert.AreEqual(1,RepositoryLocator.CatalogueRepository + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); + + c1.IsDeprecated = true; + c1.SaveToDatabase(); + + var cmd = new ExecuteCommandReplacedBy(GetMockActivator(), c1, c2); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + + cmd.Execute(); + + var replacement = RepositoryLocator.CatalogueRepository + .GetAllObjectsWhere("Name", ExtendedProperty.ReplacedBy) + .Single(r => r.IsReferenceTo(c1)); + + Assert.IsTrue(replacement.IsReferenceTo(c1)); + Assert.AreEqual(c2.ID.ToString(), replacement.Value); + + // running command multiple times shouldn't result in duplicate objects + cmd.Execute(); + cmd.Execute(); + cmd.Execute(); + cmd.Execute(); + + Assert.AreEqual(1, RepositoryLocator.CatalogueRepository .GetAllObjectsWhere("Name", ExtendedProperty.ReplacedBy) - .Count(r=>r.IsReferenceTo(c1))); + .Count(r => r.IsReferenceTo(c1))); - cmd = new ExecuteCommandReplacedBy(GetMockActivator().Object,c1,null); - cmd.Execute(); + cmd = new ExecuteCommandReplacedBy(GetMockActivator(), c1, null); + cmd.Execute(); - Assert.IsEmpty(RepositoryLocator.CatalogueRepository + Assert.IsEmpty(RepositoryLocator.CatalogueRepository .GetAllObjectsWhere("Name", ExtendedProperty.ReplacedBy) - .Where(r=>r.IsReferenceTo(c1))); - + .Where(r => r.IsReferenceTo(c1))); } - } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetArgumentTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetArgumentTests.cs index d6ee8216a0..ce41dcd9b1 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetArgumentTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetArgumentTests.cs @@ -11,176 +11,185 @@ using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.Curation.Data.Pipelines; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandSetArgumentTests : CommandCliTests { - class ExecuteCommandSetArgumentTests : CommandCliTests + [Test] + public void TestSetArgument_WrongArgCount() + { + var picker = new CommandLineObjectPicker(new[] { "yyy" }, GetActivator()); + var cmd = new ExecuteCommandSetArgument(GetMockActivator(), picker); + + Assert.IsTrue(cmd.IsImpossible); + Assert.AreEqual("Wrong number of parameters supplied to command, expected 3 but got 1", + cmd.ReasonCommandImpossible); + } + + [Test] + public void TestSetArgument_NotAHost() + { + var c = WhenIHaveA(); + + var picker = new CommandLineObjectPicker(new[] { $"Catalogue:{c.ID}", "fff", "yyy" }, GetActivator()); + var cmd = new ExecuteCommandSetArgument(GetMockActivator(), picker); + + Assert.IsTrue(cmd.IsImpossible); + Assert.AreEqual("First parameter must be an IArgumentHost", cmd.ReasonCommandImpossible); + } + + [Test] + public void TestSetArgument_NoArgumentFound() + { + var pt = WhenIHaveA(); + + + var picker = new CommandLineObjectPicker(new[] { $"ProcessTask:{pt.ID}", "fff", "yyy" }, GetActivator()); + var cmd = new ExecuteCommandSetArgument(GetMockActivator(), picker); + + Assert.IsTrue(cmd.IsImpossible); + StringAssert.StartsWith("Could not find argument called 'fff' on ", cmd.ReasonCommandImpossible); + } + + [Test] + public void TestSetArgument_ArgumentWrongType() + { + var pta = WhenIHaveA(); + var pt = pta.ProcessTask; + + pta.Name = "fff"; + + // Argument expects int but is given string value "yyy" + pta.SetType(typeof(int)); + + var picker = new CommandLineObjectPicker(new[] { $"ProcessTask:{pt.ID}", "fff", "yyy" }, GetActivator()); + var cmd = new ExecuteCommandSetArgument(GetMockActivator(), picker); + + Assert.IsTrue(cmd.IsImpossible); + StringAssert.StartsWith("Provided value 'yyy' does not match expected Type 'Int32' of ", + cmd.ReasonCommandImpossible); + } + + + [Test] + public void TestSetArgument_Int_Valid() + { + var pta = WhenIHaveA(); + var pt = pta.ProcessTask; + + pta.Name = "fff"; + pta.SetType(typeof(int)); + + Assert.IsNull(pta.Value); + + var picker = new CommandLineObjectPicker(new[] { $"ProcessTask:{pt.ID}", "fff", "5" }, GetActivator()); + + Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument), picker)); + + Assert.AreEqual(5, pta.GetValueAsSystemType()); + } + + [Test] + public void TestSetArgument_Catalogue_Valid() + { + var cata = WhenIHaveA(); + cata.Name = "kapow splat"; + cata.SaveToDatabase(); + + var pta = WhenIHaveA(); + var pt = pta.ProcessTask; + + pta.Name = "fff"; + pta.SetType(typeof(Catalogue)); + + Assert.IsNull(pta.Value); + + var picker = new CommandLineObjectPicker(new[] { $"ProcessTask:{pt.ID}", "fff", $"Catalogue:kapow splat" }, + GetActivator()); + + Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument), picker)); + + Assert.AreEqual(cata, pta.GetValueAsSystemType()); + } + + [Test] + public void TestSetArgument_CatalogueArrayOf1_Valid() + { + var cata1 = WhenIHaveA(); + cata1.Name = "lolzzzyy"; + cata1.SaveToDatabase(); + + + //let's also test that PipelineComponentArgument also work (not just ProcessTaskArgument) + var pca = WhenIHaveA(); + var pc = pca.PipelineComponent; + + pca.Name = "ggg"; + pca.SetType(typeof(Catalogue[])); + + Assert.IsNull(pca.Value); + + var picker = new CommandLineObjectPicker(new[] { $"PipelineComponent:{pc.ID}", "ggg", $"Catalogue:lolzzzyy" }, + GetActivator()); + + Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument), picker)); + + Assert.Contains(cata1, (System.Collections.ICollection)pca.GetValueAsSystemType()); + } + + [Test] + public void TestSetArgument_CatalogueArrayOf2_Valid() { - [Test] - public void TestSetArgument_WrongArgCount() - { - var picker = new CommandLineObjectPicker(new []{"yyy" }, GetActivator()); - var cmd = new ExecuteCommandSetArgument(GetMockActivator().Object,picker); - - Assert.IsTrue(cmd.IsImpossible); - Assert.AreEqual("Wrong number of parameters supplied to command, expected 3 but got 1",cmd.ReasonCommandImpossible); - } - [Test] - public void TestSetArgument_NotAHost() - { - var c = WhenIHaveA(); - - var picker = new CommandLineObjectPicker(new []{$"Catalogue:{c.ID}","fff","yyy" }, GetActivator()); - var cmd = new ExecuteCommandSetArgument(GetMockActivator().Object,picker); - - Assert.IsTrue(cmd.IsImpossible); - Assert.AreEqual("First parameter must be an IArgumentHost",cmd.ReasonCommandImpossible); - } - - [Test] - public void TestSetArgument_NoArgumentFound() - { - var pt = WhenIHaveA(); - - - var picker = new CommandLineObjectPicker(new []{$"ProcessTask:{pt.ID}","fff","yyy" }, GetActivator()); - var cmd = new ExecuteCommandSetArgument(GetMockActivator().Object,picker); - - Assert.IsTrue(cmd.IsImpossible); - StringAssert.StartsWith("Could not find argument called 'fff' on ",cmd.ReasonCommandImpossible); - } - - [Test] - public void TestSetArgument_ArgumentWrongType() - { - var pta = WhenIHaveA(); - var pt = pta.ProcessTask; - - pta.Name = "fff"; - - // Argument expects int but is given string value "yyy" - pta.SetType(typeof(int)); - - var picker = new CommandLineObjectPicker(new []{$"ProcessTask:{pt.ID}","fff","yyy" }, GetActivator()); - var cmd = new ExecuteCommandSetArgument(GetMockActivator().Object,picker); - - Assert.IsTrue(cmd.IsImpossible); - StringAssert.StartsWith("Provided value 'yyy' does not match expected Type 'Int32' of ",cmd.ReasonCommandImpossible); - } - - - [Test] - public void TestSetArgument_Int_Valid() - { - var pta = WhenIHaveA(); - var pt = pta.ProcessTask; - - pta.Name = "fff"; - pta.SetType(typeof(int)); - - Assert.IsNull(pta.Value); - - var picker = new CommandLineObjectPicker(new []{$"ProcessTask:{pt.ID}","fff","5" }, GetActivator()); - - Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument),picker)); - - Assert.AreEqual(5,pta.GetValueAsSystemType()); - } - - [Test] - public void TestSetArgument_Catalogue_Valid() - { - var cata = WhenIHaveA(); - cata.Name = "kapow splat"; - cata.SaveToDatabase(); - - var pta = WhenIHaveA(); - var pt = pta.ProcessTask; - - pta.Name = "fff"; - pta.SetType(typeof(Catalogue)); - - Assert.IsNull(pta.Value); - - var picker = new CommandLineObjectPicker(new []{$"ProcessTask:{pt.ID}","fff",$"Catalogue:kapow splat" }, GetActivator()); - - Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument),picker)); - - Assert.AreEqual(cata,pta.GetValueAsSystemType()); - } - [Test] - public void TestSetArgument_CatalogueArrayOf1_Valid() - { - var cata1 = WhenIHaveA(); - cata1.Name = "lolzzzyy"; - cata1.SaveToDatabase(); - - - //let's also test that PipelineComponentArgument also work (not just ProcessTaskArgument) - var pca = WhenIHaveA(); - var pc = pca.PipelineComponent; - - pca.Name = "ggg"; - pca.SetType(typeof(Catalogue[])); - - Assert.IsNull(pca.Value); - - var picker = new CommandLineObjectPicker(new []{$"PipelineComponent:{pc.ID}","ggg",$"Catalogue:lolzzzyy" }, GetActivator()); - - Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument),picker)); - - Assert.Contains(cata1, (System.Collections.ICollection)pca.GetValueAsSystemType()); - } - [Test] - public void TestSetArgument_CatalogueArrayOf2_Valid() - { - var cata1 = WhenIHaveA(); - cata1.Name = "kapow bob"; - cata1.SaveToDatabase(); - - var cata2 = WhenIHaveA(); - cata2.Name = "kapow frank"; - cata2.SaveToDatabase(); - - //let's also test that PipelineComponentArgument also work (not just ProcessTaskArgument) - var pca = WhenIHaveA(); - var pc = pca.PipelineComponent; - - pca.Name = "ggg"; - pca.SetType(typeof(Catalogue[])); - - Assert.IsNull(pca.Value); - - var picker = new CommandLineObjectPicker(new []{$"PipelineComponent:{pc.ID}","ggg",$"Catalogue:kapow*" }, GetActivator()); - - Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument),picker)); - - Assert.Contains(cata1, (System.Collections.ICollection)pca.GetValueAsSystemType()); - Assert.Contains(cata2, (System.Collections.ICollection)pca.GetValueAsSystemType()); - } - [Test] - public void TestSetArgument_CatalogueArray_SetToNull_Valid() - { - var cata1 = WhenIHaveA(); - cata1.Name = "lolzzzyy"; - cata1.SaveToDatabase(); - - - //let's also test that PipelineComponentArgument also work (not just ProcessTaskArgument) - var pca = WhenIHaveA(); - var pc = pca.PipelineComponent; - - pca.Name = "ggg"; - pca.SetType(typeof(Catalogue[])); - pca.SetValue(new Catalogue[]{ cata1}); - pca.SaveToDatabase(); - - Assert.Contains(cata1, (System.Collections.ICollection)pca.GetValueAsSystemType()); - - var picker = new CommandLineObjectPicker(new []{$"PipelineComponent:{pc.ID}","ggg",$"Null" }, GetActivator()); - - Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument),picker)); - - Assert.IsNull(pca.GetValueAsSystemType()); - } + var cata1 = WhenIHaveA(); + cata1.Name = "kapow bob"; + cata1.SaveToDatabase(); + + var cata2 = WhenIHaveA(); + cata2.Name = "kapow frank"; + cata2.SaveToDatabase(); + + //let's also test that PipelineComponentArgument also work (not just ProcessTaskArgument) + var pca = WhenIHaveA(); + var pc = pca.PipelineComponent; + + pca.Name = "ggg"; + pca.SetType(typeof(Catalogue[])); + + Assert.IsNull(pca.Value); + + var picker = new CommandLineObjectPicker(new[] { $"PipelineComponent:{pc.ID}", "ggg", $"Catalogue:kapow*" }, + GetActivator()); + + Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument), picker)); + + Assert.Contains(cata1, (System.Collections.ICollection)pca.GetValueAsSystemType()); + Assert.Contains(cata2, (System.Collections.ICollection)pca.GetValueAsSystemType()); + } + + [Test] + public void TestSetArgument_CatalogueArray_SetToNull_Valid() + { + var cata1 = WhenIHaveA(); + cata1.Name = "lolzzzyy"; + cata1.SaveToDatabase(); + + + //let's also test that PipelineComponentArgument also work (not just ProcessTaskArgument) + var pca = WhenIHaveA(); + var pc = pca.PipelineComponent; + + pca.Name = "ggg"; + pca.SetType(typeof(Catalogue[])); + pca.SetValue(new Catalogue[] { cata1 }); + pca.SaveToDatabase(); + + Assert.Contains(cata1, (System.Collections.ICollection)pca.GetValueAsSystemType()); + + var picker = + new CommandLineObjectPicker(new[] { $"PipelineComponent:{pc.ID}", "ggg", $"Null" }, GetActivator()); + + Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetArgument), picker)); + + Assert.IsNull(pca.GetValueAsSystemType()); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetExtendedPropertyTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetExtendedPropertyTests.cs index e9b12e46e6..364ce8bc8f 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetExtendedPropertyTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetExtendedPropertyTests.cs @@ -10,63 +10,62 @@ using Rdmp.Core.Curation.Data.Aggregation; using System.Linq; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandSetExtendedPropertyTests : CommandCliTests { - class ExecuteCommandSetExtendedPropertyTests : CommandCliTests + [Test] + public void CommandImpossible_BecausePropertyDoesNotExist() { - [Test] - public void CommandImpossible_BecausePropertyDoesNotExist() - { - var c1 = WhenIHaveA(); + var c1 = WhenIHaveA(); - var cmd = new ExecuteCommandSetExtendedProperty(GetMockActivator().Object, new[] { c1 },"blarg","fff"); + var cmd = new ExecuteCommandSetExtendedProperty(GetMockActivator(), new[] { c1 }, "blarg", "fff"); - Assert.IsTrue(cmd.IsImpossible); - StringAssert.StartsWith("blarg is not a known property. Known properties are:", cmd.ReasonCommandImpossible); - } - [Test] - public void SetIsTemplate_OnMultipleObjects() - { - var ac1 = WhenIHaveA(); - var ac2 = WhenIHaveA(); + Assert.IsTrue(cmd.IsImpossible); + StringAssert.StartsWith("blarg is not a known property. Known properties are:", cmd.ReasonCommandImpossible); + } + [Test] + public void SetIsTemplate_OnMultipleObjects() + { + var ac1 = WhenIHaveA(); + var ac2 = WhenIHaveA(); - Assert.IsEmpty( - Repository.CatalogueRepository.GetExtendedProperties(ac1)); - Assert.IsEmpty( - Repository.CatalogueRepository.GetExtendedProperties(ac2)); - var cmd = new ExecuteCommandSetExtendedProperty(GetMockActivator().Object, new[] { ac1,ac2 }, - ExtendedProperty.IsTemplate,"true"); + Assert.IsEmpty( + Repository.CatalogueRepository.GetExtendedProperties(ac1)); + Assert.IsEmpty( + Repository.CatalogueRepository.GetExtendedProperties(ac2)); - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); + var cmd = new ExecuteCommandSetExtendedProperty(GetMockActivator(), new[] { ac1, ac2 }, + ExtendedProperty.IsTemplate, "true"); - cmd.Execute(); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); - var declaration1 = Repository.CatalogueRepository.GetExtendedProperties(ac1).Single(); - var declaration2 = Repository.CatalogueRepository.GetExtendedProperties(ac2).Single(); + cmd.Execute(); - foreach(var dec in new[] { declaration1,declaration2}) - { - Assert.AreEqual("IsTemplate", dec.Name); - Assert.AreEqual("true", dec.Value); - } + var declaration1 = Repository.CatalogueRepository.GetExtendedProperties(ac1).Single(); + var declaration2 = Repository.CatalogueRepository.GetExtendedProperties(ac2).Single(); - // now clear that status + foreach (var dec in new[] { declaration1, declaration2 }) + { + Assert.AreEqual("IsTemplate", dec.Name); + Assert.AreEqual("true", dec.Value); + } - cmd = new ExecuteCommandSetExtendedProperty(GetMockActivator().Object, new[] { ac1, ac2 }, - ExtendedProperty.IsTemplate, null); + // now clear that status - Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd = new ExecuteCommandSetExtendedProperty(GetMockActivator(), new[] { ac1, ac2 }, + ExtendedProperty.IsTemplate, null); - cmd.Execute(); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); - // should now be back where we started - Assert.IsEmpty( - Repository.CatalogueRepository.GetExtendedProperties(ac1)); - Assert.IsEmpty( - Repository.CatalogueRepository.GetExtendedProperties(ac2)); + cmd.Execute(); - } + // should now be back where we started + Assert.IsEmpty( + Repository.CatalogueRepository.GetExtendedProperties(ac1)); + Assert.IsEmpty( + Repository.CatalogueRepository.GetExtendedProperties(ac2)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetExtractionIdentifierTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetExtractionIdentifierTests.cs index 5c56dd7b12..ebf0a8ef94 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetExtractionIdentifierTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSetExtractionIdentifierTests.cs @@ -6,94 +6,97 @@ using NUnit.Framework; using Rdmp.Core.CommandExecution.AtomicCommands; -using Rdmp.Core.CommandLine.Interactive.Picking; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; using System; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class ExecuteCommandSetExtractionIdentifierTests : CommandCliTests { - class ExecuteCommandSetExtractionIdentifierTests : CommandCliTests - { [Test] public void TestSetExtractionIdentifier_Catalogue() { - var ei1 = WhenIHaveA(); - - ei1.Alias = "happyfun"; - ei1.IsExtractionIdentifier = false; - ei1.SaveToDatabase(); + var ei1 = WhenIHaveA(); + + ei1.Alias = "happyfun"; + ei1.IsExtractionIdentifier = false; + ei1.SaveToDatabase(); - var cmd = new ExecuteCommandSetExtractionIdentifier(GetMockActivator().Object,ei1.CatalogueItem.Catalogue,null,"happyfun"); - cmd.Execute(); + var cmd = new ExecuteCommandSetExtractionIdentifier(GetMockActivator(), ei1.CatalogueItem.Catalogue, + null, "happyfun"); + cmd.Execute(); - Assert.IsTrue(ei1.IsExtractionIdentifier); + Assert.IsTrue(ei1.IsExtractionIdentifier); } + [Test] public void TestSetExtractionIdentifier_Catalogue_PickOther() { - var ei1 = WhenIHaveA(); + var ei1 = WhenIHaveA(); - var otherCol = new ColumnInfo(Repository, "Other", "varchar", ei1.ColumnInfo.TableInfo); - var otherCatItem = new CatalogueItem(Repository, ei1.CatalogueItem.Catalogue,"Other"); - var otherEi = new ExtractionInformation(Repository, otherCatItem, otherCol, "FFF"); + var otherCol = new ColumnInfo(Repository, "Other", "varchar", ei1.ColumnInfo.TableInfo); + var otherCatItem = new CatalogueItem(Repository, ei1.CatalogueItem.Catalogue, "Other"); + var otherEi = new ExtractionInformation(Repository, otherCatItem, otherCol, "FFF"); - ei1.Alias = "happyfun"; - ei1.IsExtractionIdentifier = true; - ei1.SaveToDatabase(); + ei1.Alias = "happyfun"; + ei1.IsExtractionIdentifier = true; + ei1.SaveToDatabase(); - // before we run the command the primary ei1 is the identifier - Assert.IsTrue(ei1.IsExtractionIdentifier); - Assert.IsFalse(otherEi.IsExtractionIdentifier); + // before we run the command the primary ei1 is the identifier + Assert.IsTrue(ei1.IsExtractionIdentifier); + Assert.IsFalse(otherEi.IsExtractionIdentifier); - // by picking the second (FFF) we should switch - var cmd = new ExecuteCommandSetExtractionIdentifier(GetMockActivator().Object, ei1.CatalogueItem.Catalogue, null, "FFF"); - cmd.Execute(); + // by picking the second (FFF) we should switch + var cmd = new ExecuteCommandSetExtractionIdentifier(GetMockActivator(), ei1.CatalogueItem.Catalogue, + null, "FFF"); + cmd.Execute(); - // original should no longer be the extraction identifer - Assert.IsFalse(ei1.IsExtractionIdentifier); + // original should no longer be the extraction identifer + Assert.IsFalse(ei1.IsExtractionIdentifier); - // and the one picked should now be the only one - Assert.IsTrue(otherEi.IsExtractionIdentifier); + // and the one picked should now be the only one + Assert.IsTrue(otherEi.IsExtractionIdentifier); } + [Test] public void TestSetExtractionIdentifier_Catalogue_ButColumnDoesNotExist() { - var ei1 = WhenIHaveA(); + var ei1 = WhenIHaveA(); - ei1.Alias = "happyfun"; - ei1.IsExtractionIdentifier = false; - ei1.SaveToDatabase(); + ei1.Alias = "happyfun"; + ei1.IsExtractionIdentifier = false; + ei1.SaveToDatabase(); - var ex = Assert.Throws(()=> - new ExecuteCommandSetExtractionIdentifier(GetMockActivator().Object, ei1.CatalogueItem.Catalogue, null, "trollolo") - .Execute()); - Assert.AreEqual("Could not find column(s) trollolo amongst available columns (happyfun)", ex.Message); + var ex = Assert.Throws(() => + new ExecuteCommandSetExtractionIdentifier(GetMockActivator(), ei1.CatalogueItem.Catalogue, null, + "trollolo") + .Execute()); + Assert.AreEqual("Could not find column(s) trollolo amongst available columns (happyfun)", ex.Message); } [Test] public void TestSetExtractionIdentifier_Configuration() { - var ec1 = WhenIHaveA(); + var ec1 = WhenIHaveA(); + + ec1.Alias = "happyfun"; + ec1.IsExtractionIdentifier = false; + ec1.SaveToDatabase(); - ec1.Alias = "happyfun"; - ec1.IsExtractionIdentifier = false; - ec1.SaveToDatabase(); + var config = Repository.GetObjectByID(ec1.ExtractionConfiguration_ID); - var config = Repository.GetObjectByID(ec1.ExtractionConfiguration_ID); - - var cmd = new ExecuteCommandSetExtractionIdentifier(GetMockActivator().Object, - ec1.CatalogueExtractionInformation.CatalogueItem.Catalogue, - config - , "happyfun"); - cmd.Execute(); + var cmd = new ExecuteCommandSetExtractionIdentifier(GetMockActivator(), + ec1.CatalogueExtractionInformation.CatalogueItem.Catalogue, + config + , "happyfun"); + cmd.Execute(); - // affects extraction specific version - Assert.IsTrue(ec1.IsExtractionIdentifier); + // affects extraction specific version + Assert.IsTrue(ec1.IsExtractionIdentifier); - // but not master - Assert.IsFalse(ec1.CatalogueExtractionInformation.IsExtractionIdentifier); + // but not master + Assert.IsFalse(ec1.CatalogueExtractionInformation.IsExtractionIdentifier); } - } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSimilarTests.cs b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSimilarTests.cs index 871a69ae78..486314c57c 100644 --- a/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSimilarTests.cs +++ b/Rdmp.Core.Tests/CommandExecution/ExecuteCommandSimilarTests.cs @@ -10,74 +10,73 @@ using Rdmp.Core.Curation.Data; using System.Linq; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +public class ExecuteCommandSimilarTests : CommandCliTests { - public class ExecuteCommandSimilarTests : CommandCliTests + [Test] + public void FindSameName_MixedCaps() { - [Test] - public void FindSameName_MixedCaps() - { - var cata1 = new Catalogue(Repository, "Bob"); - var cata2 = new Catalogue(Repository, "bob"); + var cata1 = new Catalogue(Repository, "Bob"); + var cata2 = new Catalogue(Repository, "bob"); + + var activator = new ThrowImmediatelyActivator(RepositoryLocator); + var cmd = new ExecuteCommandSimilar(activator, cata1, false); - var activator = new ThrowImmediatelyActivator(RepositoryLocator); - var cmd = new ExecuteCommandSimilar(activator, cata1, false); + Assert.AreEqual(cata2, cmd.Matched.Single()); - Assert.AreEqual(cata2, cmd.Matched.Single()); + cata1.DeleteInDatabase(); + cata2.DeleteInDatabase(); + } - cata1.DeleteInDatabase(); - cata2.DeleteInDatabase(); - } + [Test] + public void FindDifferent_ColumnInfosSame() + { + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); - [Test] - public void FindDifferent_ColumnInfosSame() - { - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); - - var activator = new ThrowImmediatelyActivator(RepositoryLocator); - var cmd = new ExecuteCommandSimilar(activator, c1, true); + var activator = new ThrowImmediatelyActivator(RepositoryLocator); + var cmd = new ExecuteCommandSimilar(activator, c1, true); - Assert.IsEmpty(cmd.Matched); + Assert.IsEmpty(cmd.Matched); - c1.DeleteInDatabase(); - c2.DeleteInDatabase(); + c1.DeleteInDatabase(); + c2.DeleteInDatabase(); + } - } + [Test] + public void FindDifferent_ColumnInfosDiffer_OnType() + { + var c1 = WhenIHaveA(); + c1.Data_type = "varchar(10)"; - [Test] - public void FindDifferent_ColumnInfosDiffer_OnType() - { - var c1 = WhenIHaveA(); - c1.Data_type = "varchar(10)"; + var c2 = WhenIHaveA(); + c2.Data_type = "varchar(20)"; - var c2 = WhenIHaveA(); - c2.Data_type = "varchar(20)"; + var activator = new ThrowImmediatelyActivator(RepositoryLocator); + var cmd = new ExecuteCommandSimilar(activator, c1, true); - var activator = new ThrowImmediatelyActivator(RepositoryLocator); - var cmd = new ExecuteCommandSimilar(activator, c1, true); + Assert.AreEqual(c2, cmd.Matched.Single()); - Assert.AreEqual(c2, cmd.Matched.Single()); + c1.DeleteInDatabase(); + c2.DeleteInDatabase(); + } - c1.DeleteInDatabase(); - c2.DeleteInDatabase(); - } - [Test] - public void FindDifferent_ColumnInfosDiffer_OnCollation() - { - var c1 = WhenIHaveA(); - c1.Collation = "troll doll"; + [Test] + public void FindDifferent_ColumnInfosDiffer_OnCollation() + { + var c1 = WhenIHaveA(); + c1.Collation = "troll doll"; - var c2 = WhenIHaveA(); - c2.Collation = "durdur"; + var c2 = WhenIHaveA(); + c2.Collation = "durdur"; - var activator = new ThrowImmediatelyActivator(RepositoryLocator); - var cmd = new ExecuteCommandSimilar(activator, c1, true); + var activator = new ThrowImmediatelyActivator(RepositoryLocator); + var cmd = new ExecuteCommandSimilar(activator, c1, true); - Assert.AreEqual(c2, cmd.Matched.Single()); + Assert.AreEqual(c2, cmd.Matched.Single()); - c1.DeleteInDatabase(); - c2.DeleteInDatabase(); - } + c1.DeleteInDatabase(); + c2.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestCommandsAreSupported.cs b/Rdmp.Core.Tests/CommandExecution/TestCommandsAreSupported.cs index aca896a5fe..5a96722f23 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestCommandsAreSupported.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestCommandsAreSupported.cs @@ -14,165 +14,164 @@ using System; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +public class TestCommandsAreSupported : UnitTests { - public class TestCommandsAreSupported : UnitTests - { - private CommandInvoker invoker; + private CommandInvoker invoker; - [OneTimeSetUp] - public void Init() - { - var activator = GetActivator(); - invoker = new CommandInvoker(activator); - } + [OneTimeSetUp] + public void Init() + { + var activator = GetActivator(); + invoker = new CommandInvoker(activator); + } - [TestCase(typeof(ExecuteCommandAlterColumnType))] - [TestCase(typeof(ExecuteCommandAlterTableAddArchiveTrigger))] - [TestCase(typeof(ExecuteCommandAlterTableCreatePrimaryKey))] - [TestCase(typeof(ExecuteCommandAlterTableMakeDistinct))] - [TestCase(typeof(ExecuteCommandAlterTableName))] - [TestCase(typeof(ExecuteCommandCreateNewCatalogueByExecutingAnAggregateConfiguration))] - [TestCase(typeof(ExecuteCommandCreateNewCatalogueByImportingExistingDataTable))] - [TestCase(typeof(ExecuteCommandCreateNewCatalogueByImportingFile))] - [TestCase(typeof(ExecuteCommandCreateNewCatalogueFromTableInfo))] - [TestCase(typeof(ExecuteCommandCreateNewCohortByExecutingACohortIdentificationConfiguration))] - [TestCase(typeof(ExecuteCommandCreateNewCohortFromCatalogue))] - [TestCase(typeof(ExecuteCommandCreateNewCohortFromFile))] - [TestCase(typeof(ExecuteCommandCreateNewCohortFromTable))] - [TestCase(typeof(ExecuteCommandImportAlreadyExistingCohort))] - [TestCase(typeof(ExecuteCommandAddAggregateConfigurationToCohortIdentificationSetContainer))] - [TestCase(typeof(ExecuteCommandAddCatalogueToCohortIdentificationAsPatientIndexTable))] - [TestCase(typeof(ExecuteCommandAddCatalogueToCohortIdentificationSetContainer))] - [TestCase(typeof(ExecuteCommandAddCatalogueToGovernancePeriod))] - [TestCase(typeof(ExecuteCommandAddCohortSubContainer))] - [TestCase(typeof(ExecuteCommandAddCohortToExtractionConfiguration))] - [TestCase(typeof(ExecuteCommandAddDatasetsToConfiguration))] - [TestCase(typeof(ExecuteCommandAddDimension))] - [TestCase(typeof(ExecuteCommandAddExtractionProgress))] - [TestCase(typeof(ExecuteCommandAddFavourite))] - [TestCase(typeof(ExecuteCommandAddMissingParameters))] - [TestCase(typeof(ExecuteCommandAddNewAggregateGraph))] - [TestCase(typeof(ExecuteCommandAddNewCatalogueItem))] - [TestCase(typeof(ExecuteCommandAddNewExtractionFilterParameterSet))] - [TestCase(typeof(ExecuteCommandAddNewFilterContainer))] - [TestCase(typeof(ExecuteCommandAddNewGovernanceDocument))] - [TestCase(typeof(ExecuteCommandAddNewSupportingDocument))] - [TestCase(typeof(ExecuteCommandAddNewSupportingSqlTable))] - [TestCase(typeof(ExecuteCommandAddPackageToConfiguration))] - [TestCase(typeof(ExecuteCommandAddParameter))] - [TestCase(typeof(ExecuteCommandAddPipelineComponent))] - [TestCase(typeof(ExecuteCommandAddPlugins))] - [TestCase(typeof(ExecuteCommandAssociateCatalogueWithLoadMetadata))] - [TestCase(typeof(ExecuteCommandAssociateCohortIdentificationConfigurationWithProject))] - [TestCase(typeof(ExecuteCommandBulkImportTableInfos))] - [TestCase(typeof(ExecuteCommandChangeExtractability))] - [TestCase(typeof(ExecuteCommandChangeExtractionCategory))] - [TestCase(typeof(ExecuteCommandChangeLoadStage))] - [TestCase(typeof(ExecuteCommandCheck))] - [TestCase(typeof(ExecuteCommandChooseCohort))] - [TestCase(typeof(ExecuteCommandClearQueryCache))] - [TestCase(typeof(ExecuteCommandCloneCohortIdentificationConfiguration))] - [TestCase(typeof(ExecuteCommandCloneExtractionConfiguration))] - [TestCase(typeof(ExecuteCommandClonePipeline))] - [TestCase(typeof(ExecuteCommandConfirmLogs))] - [TestCase(typeof(ExecuteCommandConvertAggregateConfigurationToPatientIndexTable))] - [TestCase(typeof(ExecuteCommandCreateLookup))] - [TestCase(typeof(ExecuteCommandCreateNewANOTable))] - [TestCase(typeof(ExecuteCommandCreateNewCacheProgress))] - [TestCase(typeof(ExecuteCommandCreateNewClassBasedProcessTask))] - [TestCase(typeof(ExecuteCommandCreateNewCohortIdentificationConfiguration))] - [TestCase(typeof(ExecuteCommandCreateNewCohortStore))] - [TestCase(typeof(ExecuteCommandCreateNewDataLoadDirectory))] - [TestCase(typeof(ExecuteCommandCreateNewEmptyCatalogue))] - [TestCase(typeof(ExecuteCommandCreateNewExternalDatabaseServer))] - [TestCase(typeof(ExecuteCommandCreateNewExtractableDataSetPackage))] - [TestCase(typeof(ExecuteCommandCreateNewExtractionConfigurationForProject))] - [TestCase(typeof(ExecuteCommandCreateNewFileBasedProcessTask))] - [TestCase(typeof(ExecuteCommandCreateNewFilter))] - [TestCase(typeof(ExecuteCommandCreateNewGovernancePeriod))] - [TestCase(typeof(ExecuteCommandCreateNewLoadMetadata))] - [TestCase(typeof(ExecuteCommandCreateNewLoadProgress))] - [TestCase(typeof(ExecuteCommandCreateNewPermissionWindow))] - [TestCase(typeof(ExecuteCommandCreateNewRemoteRDMP))] - [TestCase(typeof(ExecuteCommandCreateNewStandardRegex))] - [TestCase(typeof(ExecuteCommandCreatePrivateKey))] - [TestCase(typeof(ExecuteCommandDelete))] - [TestCase(typeof(ExecuteCommandDeprecate))] - [TestCase(typeof(ExecuteCommandDescribe))] - [TestCase(typeof(ExecuteCommandDisableOrEnable))] - [TestCase(typeof(ExecuteCommandExecuteAggregateGraph))] - [TestCase(typeof(ExecuteCommandExportLoggedDataToCsv))] - [TestCase(typeof(ExecuteCommandExportObjectsToFile))] - [TestCase(typeof(ExecuteCommandExportPlugins))] - [TestCase(typeof(ExecuteCommandExtractMetadata))] - [TestCase(typeof(ExecuteCommandFreezeCohortIdentificationConfiguration))] - [TestCase(typeof(ExecuteCommandFreezeExtractionConfiguration))] - [TestCase(typeof(ExecuteCommandGenerateReleaseDocument))] - [TestCase(typeof(ExecuteCommandGuessAssociatedColumns))] - [TestCase(typeof(ExecuteCommandImportCatalogueItemDescription))] - [TestCase(typeof(ExecuteCommandImportCatalogueItemDescriptions))] - [TestCase(typeof(ExecuteCommandImportCohortIdentificationConfiguration))] - [TestCase(typeof(ExecuteCommandImportFilterContainerTree))] - [TestCase(typeof(ExecuteCommandImportTableInfo))] - [TestCase(typeof(ExecuteCommandLinkCatalogueItemToColumnInfo))] - [TestCase(typeof(ExecuteCommandList))] - [TestCase(typeof(ExecuteCommandListSupportedCommands))] - [TestCase(typeof(ExecuteCommandListUserSettings))] - [TestCase(typeof(ExecuteCommandMakeCatalogueItemExtractable))] - [TestCase(typeof(ExecuteCommandMakeCatalogueProjectSpecific))] - [TestCase(typeof(ExecuteCommandMakePatientIndexTableIntoRegularCohortIdentificationSetAgain))] - [TestCase(typeof(ExecuteCommandMakeProjectSpecificCatalogueNormalAgain))] - [TestCase(typeof(ExecuteCommandMergeCohortIdentificationConfigurations))] - [TestCase(typeof(ExecuteCommandMoveAggregateIntoContainer))] - [TestCase(typeof(ExecuteCommandMoveCohortAggregateContainerIntoSubContainer))] - [TestCase(typeof(ExecuteCommandMoveContainerIntoContainer))] - [TestCase(typeof(ExecuteCommandMoveFilterIntoContainer))] - [TestCase(typeof(ExecuteCommandNewObject))] - [TestCase(typeof(ExecuteCommandOverrideRawServer))] - [TestCase(typeof(ExecuteCommandPrunePlugin))] - [TestCase(typeof(ExecuteCommandQueryPlatformDatabase))] - [TestCase(typeof(ExecuteCommandRefreshBrokenCohorts))] - [TestCase(typeof(ExecuteCommandRename))] - [TestCase(typeof(ExecuteCommandResetExtractionProgress))] - [TestCase(typeof(ExecuteCommandRunSupportingSql))] - [TestCase(typeof(ExecuteCommandScriptTable))] - [TestCase(typeof(ExecuteCommandScriptTables))] - [TestCase(typeof(ExecuteCommandSet))] - [TestCase(typeof(ExecuteCommandSetAggregateDimension))] - [TestCase(typeof(ExecuteCommandSetArgument))] - [TestCase(typeof(ExecuteCommandSetAxis))] - [TestCase(typeof(ExecuteCommandSetContainerOperation))] - [TestCase(typeof(ExecuteCommandSetExtractionIdentifier))] - [TestCase(typeof(ExecuteCommandSetFilterTreeShortcut))] - [TestCase(typeof(ExecuteCommandSetGlobalDleIgnorePattern))] - [TestCase(typeof(ExecuteCommandSetIgnoredColumns))] - [TestCase(typeof(ExecuteCommandSetPermissionWindow))] - [TestCase(typeof(ExecuteCommandSetPivot))] - [TestCase(typeof(ExecuteCommandSetProjectExtractionDirectory))] - [TestCase(typeof(ExecuteCommandSetQueryCachingDatabase))] - [TestCase(typeof(ExecuteCommandSetUserSetting))] - [TestCase(typeof(ExecuteCommandShow))] - [TestCase(typeof(ExecuteCommandShowRelatedObject))] - [TestCase(typeof(ExecuteCommandSimilar))] - [TestCase(typeof(ExecuteCommandSyncTableInfo))] - [TestCase(typeof(ExecuteCommandUnfreezeExtractionConfiguration))] - [TestCase(typeof(ExecuteCommandUnMergeCohortIdentificationConfiguration))] - [TestCase(typeof(ExecuteCommandUseCredentialsToAccessTableInfoData))] - [TestCase(typeof(ExecuteCommandViewData))] - [TestCase(typeof(ExecuteCommandViewExtractionSql))] - [TestCase(typeof(ExecuteCommandViewFilterMatchData))] - [TestCase(typeof(ExecuteCommandViewLogs))] - [TestCase(typeof(ExecuteCommandExportInDublinCoreFormat))] - [TestCase(typeof(ExecuteCommandImportCatalogueDescriptionsFromShare))] - [TestCase(typeof(ExecuteCommandImportDublinCoreFormat))] - [TestCase(typeof(ExecuteCommandImportFilterDescriptionsFromShare))] - [TestCase(typeof(ExecuteCommandImportShareDefinitionList))] - // [TestCase(typeof(ExecuteCommandSetDataAccessContextForCredentials))] // Not currently CLI compatible - public void TestIsSupported(Type t) - { - Assert.IsTrue(invoker.IsSupported(t), $"Type {t} was not supported by CommandInvoker"); - } + [TestCase(typeof(ExecuteCommandAlterColumnType))] + [TestCase(typeof(ExecuteCommandAlterTableAddArchiveTrigger))] + [TestCase(typeof(ExecuteCommandAlterTableCreatePrimaryKey))] + [TestCase(typeof(ExecuteCommandAlterTableMakeDistinct))] + [TestCase(typeof(ExecuteCommandAlterTableName))] + [TestCase(typeof(ExecuteCommandCreateNewCatalogueByExecutingAnAggregateConfiguration))] + [TestCase(typeof(ExecuteCommandCreateNewCatalogueByImportingExistingDataTable))] + [TestCase(typeof(ExecuteCommandCreateNewCatalogueByImportingFile))] + [TestCase(typeof(ExecuteCommandCreateNewCatalogueFromTableInfo))] + [TestCase(typeof(ExecuteCommandCreateNewCohortByExecutingACohortIdentificationConfiguration))] + [TestCase(typeof(ExecuteCommandCreateNewCohortFromCatalogue))] + [TestCase(typeof(ExecuteCommandCreateNewCohortFromFile))] + [TestCase(typeof(ExecuteCommandCreateNewCohortFromTable))] + [TestCase(typeof(ExecuteCommandImportAlreadyExistingCohort))] + [TestCase(typeof(ExecuteCommandAddAggregateConfigurationToCohortIdentificationSetContainer))] + [TestCase(typeof(ExecuteCommandAddCatalogueToCohortIdentificationAsPatientIndexTable))] + [TestCase(typeof(ExecuteCommandAddCatalogueToCohortIdentificationSetContainer))] + [TestCase(typeof(ExecuteCommandAddCatalogueToGovernancePeriod))] + [TestCase(typeof(ExecuteCommandAddCohortSubContainer))] + [TestCase(typeof(ExecuteCommandAddCohortToExtractionConfiguration))] + [TestCase(typeof(ExecuteCommandAddDatasetsToConfiguration))] + [TestCase(typeof(ExecuteCommandAddDimension))] + [TestCase(typeof(ExecuteCommandAddExtractionProgress))] + [TestCase(typeof(ExecuteCommandAddFavourite))] + [TestCase(typeof(ExecuteCommandAddMissingParameters))] + [TestCase(typeof(ExecuteCommandAddNewAggregateGraph))] + [TestCase(typeof(ExecuteCommandAddNewCatalogueItem))] + [TestCase(typeof(ExecuteCommandAddNewExtractionFilterParameterSet))] + [TestCase(typeof(ExecuteCommandAddNewFilterContainer))] + [TestCase(typeof(ExecuteCommandAddNewGovernanceDocument))] + [TestCase(typeof(ExecuteCommandAddNewSupportingDocument))] + [TestCase(typeof(ExecuteCommandAddNewSupportingSqlTable))] + [TestCase(typeof(ExecuteCommandAddPackageToConfiguration))] + [TestCase(typeof(ExecuteCommandAddParameter))] + [TestCase(typeof(ExecuteCommandAddPipelineComponent))] + [TestCase(typeof(ExecuteCommandAddPlugins))] + [TestCase(typeof(ExecuteCommandAssociateCatalogueWithLoadMetadata))] + [TestCase(typeof(ExecuteCommandAssociateCohortIdentificationConfigurationWithProject))] + [TestCase(typeof(ExecuteCommandBulkImportTableInfos))] + [TestCase(typeof(ExecuteCommandChangeExtractability))] + [TestCase(typeof(ExecuteCommandChangeExtractionCategory))] + [TestCase(typeof(ExecuteCommandChangeLoadStage))] + [TestCase(typeof(ExecuteCommandCheck))] + [TestCase(typeof(ExecuteCommandChooseCohort))] + [TestCase(typeof(ExecuteCommandClearQueryCache))] + [TestCase(typeof(ExecuteCommandCloneCohortIdentificationConfiguration))] + [TestCase(typeof(ExecuteCommandCloneExtractionConfiguration))] + [TestCase(typeof(ExecuteCommandClonePipeline))] + [TestCase(typeof(ExecuteCommandConfirmLogs))] + [TestCase(typeof(ExecuteCommandConvertAggregateConfigurationToPatientIndexTable))] + [TestCase(typeof(ExecuteCommandCreateLookup))] + [TestCase(typeof(ExecuteCommandCreateNewANOTable))] + [TestCase(typeof(ExecuteCommandCreateNewCacheProgress))] + [TestCase(typeof(ExecuteCommandCreateNewClassBasedProcessTask))] + [TestCase(typeof(ExecuteCommandCreateNewCohortIdentificationConfiguration))] + [TestCase(typeof(ExecuteCommandCreateNewCohortStore))] + [TestCase(typeof(ExecuteCommandCreateNewDataLoadDirectory))] + [TestCase(typeof(ExecuteCommandCreateNewEmptyCatalogue))] + [TestCase(typeof(ExecuteCommandCreateNewExternalDatabaseServer))] + [TestCase(typeof(ExecuteCommandCreateNewExtractableDataSetPackage))] + [TestCase(typeof(ExecuteCommandCreateNewExtractionConfigurationForProject))] + [TestCase(typeof(ExecuteCommandCreateNewFileBasedProcessTask))] + [TestCase(typeof(ExecuteCommandCreateNewFilter))] + [TestCase(typeof(ExecuteCommandCreateNewGovernancePeriod))] + [TestCase(typeof(ExecuteCommandCreateNewLoadMetadata))] + [TestCase(typeof(ExecuteCommandCreateNewLoadProgress))] + [TestCase(typeof(ExecuteCommandCreateNewPermissionWindow))] + [TestCase(typeof(ExecuteCommandCreateNewRemoteRDMP))] + [TestCase(typeof(ExecuteCommandCreateNewStandardRegex))] + [TestCase(typeof(ExecuteCommandCreatePrivateKey))] + [TestCase(typeof(ExecuteCommandDelete))] + [TestCase(typeof(ExecuteCommandDeprecate))] + [TestCase(typeof(ExecuteCommandDescribe))] + [TestCase(typeof(ExecuteCommandDisableOrEnable))] + [TestCase(typeof(ExecuteCommandExecuteAggregateGraph))] + [TestCase(typeof(ExecuteCommandExportLoggedDataToCsv))] + [TestCase(typeof(ExecuteCommandExportObjectsToFile))] + [TestCase(typeof(ExecuteCommandExportPlugins))] + [TestCase(typeof(ExecuteCommandExtractMetadata))] + [TestCase(typeof(ExecuteCommandFreezeCohortIdentificationConfiguration))] + [TestCase(typeof(ExecuteCommandFreezeExtractionConfiguration))] + [TestCase(typeof(ExecuteCommandGenerateReleaseDocument))] + [TestCase(typeof(ExecuteCommandGuessAssociatedColumns))] + [TestCase(typeof(ExecuteCommandImportCatalogueItemDescription))] + [TestCase(typeof(ExecuteCommandImportCatalogueItemDescriptions))] + [TestCase(typeof(ExecuteCommandImportCohortIdentificationConfiguration))] + [TestCase(typeof(ExecuteCommandImportFilterContainerTree))] + [TestCase(typeof(ExecuteCommandImportTableInfo))] + [TestCase(typeof(ExecuteCommandLinkCatalogueItemToColumnInfo))] + [TestCase(typeof(ExecuteCommandList))] + [TestCase(typeof(ExecuteCommandListSupportedCommands))] + [TestCase(typeof(ExecuteCommandListUserSettings))] + [TestCase(typeof(ExecuteCommandMakeCatalogueItemExtractable))] + [TestCase(typeof(ExecuteCommandMakeCatalogueProjectSpecific))] + [TestCase(typeof(ExecuteCommandMakePatientIndexTableIntoRegularCohortIdentificationSetAgain))] + [TestCase(typeof(ExecuteCommandMakeProjectSpecificCatalogueNormalAgain))] + [TestCase(typeof(ExecuteCommandMergeCohortIdentificationConfigurations))] + [TestCase(typeof(ExecuteCommandMoveAggregateIntoContainer))] + [TestCase(typeof(ExecuteCommandMoveCohortAggregateContainerIntoSubContainer))] + [TestCase(typeof(ExecuteCommandMoveContainerIntoContainer))] + [TestCase(typeof(ExecuteCommandMoveFilterIntoContainer))] + [TestCase(typeof(ExecuteCommandNewObject))] + [TestCase(typeof(ExecuteCommandOverrideRawServer))] + [TestCase(typeof(ExecuteCommandPrunePlugin))] + [TestCase(typeof(ExecuteCommandQueryPlatformDatabase))] + [TestCase(typeof(ExecuteCommandRefreshBrokenCohorts))] + [TestCase(typeof(ExecuteCommandRename))] + [TestCase(typeof(ExecuteCommandResetExtractionProgress))] + [TestCase(typeof(ExecuteCommandRunSupportingSql))] + [TestCase(typeof(ExecuteCommandScriptTable))] + [TestCase(typeof(ExecuteCommandScriptTables))] + [TestCase(typeof(ExecuteCommandSet))] + [TestCase(typeof(ExecuteCommandSetAggregateDimension))] + [TestCase(typeof(ExecuteCommandSetArgument))] + [TestCase(typeof(ExecuteCommandSetAxis))] + [TestCase(typeof(ExecuteCommandSetContainerOperation))] + [TestCase(typeof(ExecuteCommandSetExtractionIdentifier))] + [TestCase(typeof(ExecuteCommandSetFilterTreeShortcut))] + [TestCase(typeof(ExecuteCommandSetGlobalDleIgnorePattern))] + [TestCase(typeof(ExecuteCommandSetIgnoredColumns))] + [TestCase(typeof(ExecuteCommandSetPermissionWindow))] + [TestCase(typeof(ExecuteCommandSetPivot))] + [TestCase(typeof(ExecuteCommandSetProjectExtractionDirectory))] + [TestCase(typeof(ExecuteCommandSetQueryCachingDatabase))] + [TestCase(typeof(ExecuteCommandSetUserSetting))] + [TestCase(typeof(ExecuteCommandShow))] + [TestCase(typeof(ExecuteCommandShowRelatedObject))] + [TestCase(typeof(ExecuteCommandSimilar))] + [TestCase(typeof(ExecuteCommandSyncTableInfo))] + [TestCase(typeof(ExecuteCommandUnfreezeExtractionConfiguration))] + [TestCase(typeof(ExecuteCommandUnMergeCohortIdentificationConfiguration))] + [TestCase(typeof(ExecuteCommandUseCredentialsToAccessTableInfoData))] + [TestCase(typeof(ExecuteCommandViewData))] + [TestCase(typeof(ExecuteCommandViewExtractionSql))] + [TestCase(typeof(ExecuteCommandViewFilterMatchData))] + [TestCase(typeof(ExecuteCommandViewLogs))] + [TestCase(typeof(ExecuteCommandExportInDublinCoreFormat))] + [TestCase(typeof(ExecuteCommandImportCatalogueDescriptionsFromShare))] + [TestCase(typeof(ExecuteCommandImportDublinCoreFormat))] + [TestCase(typeof(ExecuteCommandImportFilterDescriptionsFromShare))] + [TestCase(typeof(ExecuteCommandImportShareDefinitionList))] + // [TestCase(typeof(ExecuteCommandSetDataAccessContextForCredentials))] // Not currently CLI compatible + public void TestIsSupported(Type t) + { + Assert.IsNull(invoker.WhyCommandNotSupported(t), $"Type {t} was not supported by CommandInvoker"); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandAssociateCatalogueWithLoadMetadata.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandAssociateCatalogueWithLoadMetadata.cs index a5880a7140..02b623e25e 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandAssociateCatalogueWithLoadMetadata.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandAssociateCatalogueWithLoadMetadata.cs @@ -9,34 +9,29 @@ using Rdmp.Core.CommandLine.Interactive.Picking; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; -using System; -using System.Collections.Generic; -using System.Text; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestExecuteCommandAssociateCatalogueWithLoadMetadata : CommandCliTests { - class TestExecuteCommandAssociateCatalogueWithLoadMetadata : CommandCliTests + [Test] + public void TestExecuteCommandAssociateCatalogueWithLoadMetadata_Simple() { + var cata1 = new Catalogue(RepositoryLocator.CatalogueRepository, "fff"); + var cata2 = new Catalogue(RepositoryLocator.CatalogueRepository, "bbb"); - [Test] - public void TestExecuteCommandAssociateCatalogueWithLoadMetadata_Simple() - { - var cata1 = new Catalogue(RepositoryLocator.CatalogueRepository,"fff"); - var cata2 = new Catalogue(RepositoryLocator.CatalogueRepository,"bbb"); - - Assert.IsNull(cata1.LoadMetadata); - Assert.IsNull(cata2.LoadMetadata); + Assert.IsNull(cata1.LoadMetadata); + Assert.IsNull(cata2.LoadMetadata); - var lmd = new LoadMetadata(RepositoryLocator.CatalogueRepository,"mylmd"); + var lmd = new LoadMetadata(RepositoryLocator.CatalogueRepository, "mylmd"); - GetInvoker().ExecuteCommand(typeof(ExecuteCommandAssociateCatalogueWithLoadMetadata), - new CommandLineObjectPicker(new[]{$"LoadMetadata:{lmd.ID}", "Catalogue:fff"}, GetActivator())); + GetInvoker().ExecuteCommand(typeof(ExecuteCommandAssociateCatalogueWithLoadMetadata), + new CommandLineObjectPicker(new[] { $"LoadMetadata:{lmd.ID}", "Catalogue:fff" }, GetActivator())); - cata1.RevertToDatabaseState(); - cata2.RevertToDatabaseState(); + cata1.RevertToDatabaseState(); + cata2.RevertToDatabaseState(); - Assert.AreEqual(lmd.ID,cata1.LoadMetadata_ID); - Assert.IsNull(cata2.LoadMetadata); - } + Assert.AreEqual(lmd.ID, cata1.LoadMetadata_ID); + Assert.IsNull(cata2.LoadMetadata); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandClearUserSettings.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandClearUserSettings.cs new file mode 100644 index 0000000000..eb2e3d0b6f --- /dev/null +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandClearUserSettings.cs @@ -0,0 +1,34 @@ +// Copyright (c) The University of Dundee 2018-2023 +// This file is part of the Research Data Management Platform (RDMP). +// RDMP is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. +// RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +// You should have received a copy of the GNU General Public License along with RDMP. If not, see . + +using NUnit.Framework; +using Rdmp.Core.CommandExecution.AtomicCommands; +using Rdmp.Core.CommandLine.Interactive.Picking; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Settings; + +namespace Rdmp.Core.Tests.CommandExecution; + +internal sealed class TestExecuteCommandClearUserSettings : CommandCliTests +{ + [Test] + public void Test_ClearUserSettings() + { + var invoker = GetInvoker(); + var activator = GetActivator(); + + UserSettings.Wait5SecondsAfterStartupUI = false; + + invoker.ExecuteCommand(typeof(ExecuteCommandSetUserSetting), new CommandLineObjectPicker(new[] { nameof(UserSettings.Wait5SecondsAfterStartupUI), "true" }, activator)); + + Assert.IsTrue(UserSettings.Wait5SecondsAfterStartupUI); + invoker.ExecuteCommand(typeof(ExecuteCommandSetUserSetting), new CommandLineObjectPicker(new[] { nameof(UserSettings.Wait5SecondsAfterStartupUI), "false" }, activator)); + Assert.IsFalse(UserSettings.Wait5SecondsAfterStartupUI); + invoker.ExecuteCommand(typeof(ExecuteCommandClearUserSettings), new CommandLineObjectPicker(System.Array.Empty(), activator)); + + Assert.IsTrue(UserSettings.Wait5SecondsAfterStartupUI); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandDescribe.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandDescribe.cs index f2c88dc903..608893fd1d 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandDescribe.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandDescribe.cs @@ -4,36 +4,32 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System; -using System.Collections.Generic; -using System.Text; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestExecuteCommandDescribe : UnitTests { - class TestExecuteCommandDescribe : UnitTests - { [Test] public void TestDescribeCatalogue() { - var mock = new Mock(); - mock.Setup(m => m.Show(It.IsAny())); - - var c = WhenIHaveA(); - c.Description = "fish"; - - var describe = new ExecuteCommandDescribe(mock.Object,new []{c}); - Assert.IsFalse(describe.IsImpossible,describe.ReasonCommandImpossible); + var mock = Substitute.For(); + mock.When(x => x.Show(Arg.Any())).Do(x => { }); + + var c = WhenIHaveA(); + c.Description = "fish"; + + var describe = new ExecuteCommandDescribe(mock, new[] { c }); + Assert.IsFalse(describe.IsImpossible, describe.ReasonCommandImpossible); - describe.Execute(); + describe.Execute(); - // Called once - mock.Verify(m => m.Show(It.IsRegex(".*Description:fish.*")), Times.Once()); + // Called once + mock.Received(1).Show(Arg.Is(i => i.Contains("Description:fish"))); } - } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandDescribeCommand.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandDescribeCommand.cs index f411c5b240..801ffce86a 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandDescribeCommand.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandDescribeCommand.cs @@ -5,79 +5,73 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Collections.Generic; -using System.Text.RegularExpressions; -using Moq; using NUnit.Framework; -using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.CommandLine.Interactive.Picking; -using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestExecuteCommandDescribeCommand : CommandCliTests { - class TestExecuteCommandDescribeCommand : CommandCliTests + /// + /// Asserts that the help text matches your text + /// + /// + /// + private void AssertHelpIs(string expectedHelp, Type forCommand) { - - /// - /// Asserts that the help text matches your text - /// - /// - /// - private void AssertHelpIs(string expectedHelp, Type forCommand) - { - var activator = GetMockActivator().Object; + var activator = GetMockActivator(); - var cmd = new ExecuteCommandDescribe(activator, new CommandLineObjectPicker(new []{forCommand.Name},activator)); - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); + var cmd = new ExecuteCommandDescribe(activator, + new CommandLineObjectPicker(new[] { forCommand.Name }, activator)); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); - cmd.Execute(); - StringAssert.Contains(expectedHelp, cmd.HelpShown); - } + cmd.Execute(); + StringAssert.Contains(expectedHelp, cmd.HelpShown); + } - [Test] - public void Test_DescribeDeleteCommand() - { - AssertHelpIs(@" Delete + [Test] + public void Test_DescribeDeleteCommand() + { + AssertHelpIs(@" Delete PARAMETERS: -deletables IDeleteable[] The object(s) you want to delete. If multiple you must set deleteMany to true", typeof(ExecuteCommandDelete)); - } +deletables IDeleteable[] The object(s) you want to delete. If multiple you must set deleteMany to true", + typeof(ExecuteCommandDelete)); + } - [Test] - public void Test_ImportTableInfo_CommandHelp() - { - AssertHelpIs( -@" ImportTableInfo + [Test] + public void Test_ImportTableInfo_CommandHelp() + { + AssertHelpIs( + @" ImportTableInfo
PARAMETERS: table DiscoveredTable The table or view you want to reference from RDMP. See PickTable for syntax createCatalogue Boolean True to create a Catalogue as well as a TableInfo" - ,typeof(ExecuteCommandImportTableInfo)); - - } + , typeof(ExecuteCommandImportTableInfo)); + } - [Test] - public void Test_DescribeCommand_ExecuteCommandNewObject() - { - AssertHelpIs( @" NewObject + [Test] + public void Test_DescribeCommand_ExecuteCommandNewObject() + { + AssertHelpIs(@" NewObject PARAMETERS: type The object to create e.g. Catalogue -args Dynamic list of values to satisfy the types constructor",typeof(ExecuteCommandNewObject)); - } +args Dynamic list of values to satisfy the types constructor", typeof(ExecuteCommandNewObject)); + } - [Test] - public void Test_DescribeCommand_ExecuteCommandSetArgument() - { - AssertHelpIs( @" SetArgument + [Test] + public void Test_DescribeCommand_ExecuteCommandSetArgument() + { + AssertHelpIs(@" SetArgument PARAMETERS: component Module to set value on e.g. ProcessTask:1 argName Name of an argument to set on the component e.g. Retry argValue New value for argument e.g. Null, True, Catalogue:5 etc -",typeof(ExecuteCommandSetArgument)); - } +", typeof(ExecuteCommandSetArgument)); } -} \ No newline at end of file +} diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandImportFilterContainerTree.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandImportFilterContainerTree.cs index 90657a68a5..1acda692f4 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandImportFilterContainerTree.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandImportFilterContainerTree.cs @@ -5,151 +5,147 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using NUnit.Framework; -using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.CommandLine.Interactive; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.DataExport.Data; -using ReusableLibraryCode.Checks; -using System; -using System.Collections.Generic; using System.Linq; -using System.Text; -using Tests.Common; +using Rdmp.Core.ReusableLibraryCode.Checks; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestExecuteCommandImportFilterContainerTree : CommandInvokerTests { - class TestExecuteCommandImportFilterContainerTree : CommandInvokerTests + [Test] + public void TestImportTree_FromCohortIdentificationConfiguration_ToSelectedDatasets() { - [Test] - public void TestImportTree_FromCohortIdentificationConfiguration_ToSelectedDatasets() - { - var sds = WhenIHaveA(); - - var cata = sds.ExtractableDataSet.Catalogue; - - var cic = new CohortIdentificationConfiguration(Repository,"my cic"); - cic.CreateRootContainerIfNotExists(); - - var ac = new AggregateConfiguration(Repository,cata,"myagg"); - ac.CreateRootContainerIfNotExists(); - cic.RootCohortAggregateContainer.AddChild(ac,1); - - var filterToImport = new AggregateFilter(Repository,"MyFilter"){WhereSQL = "true" }; - ac.RootFilterContainer.AddChild(filterToImport); - - //there should be no root container - Assert.IsNull(sds.RootFilterContainer); - - //run the command - var mgr = new ConsoleInputManager(RepositoryLocator,new ThrowImmediatelyCheckNotifier()); - mgr.DisallowInput = true; - var cmd = new ExecuteCommandImportFilterContainerTree(mgr,sds,ac); - - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); - cmd.Execute(); - - sds.ClearAllInjections(); - Assert.IsNotNull(sds.RootFilterContainer); - Assert.AreEqual(1,sds.RootFilterContainer.GetFilters().Length); - Assert.AreEqual("MyFilter",sds.RootFilterContainer.GetFilters()[0].Name); - Assert.AreEqual("true",sds.RootFilterContainer.GetFilters()[0].WhereSQL); - - Assert.AreNotEqual(filterToImport.GetType(),sds.RootFilterContainer.GetFilters()[0].GetType()); - } - - [Test] - public void TestImportTree_FromSelectedDatasets_ToCohortIdentificationConfiguration() + var sds = WhenIHaveA(); + + var cata = sds.ExtractableDataSet.Catalogue; + + var cic = new CohortIdentificationConfiguration(Repository, "my cic"); + cic.CreateRootContainerIfNotExists(); + + var ac = new AggregateConfiguration(Repository, cata, "myagg"); + ac.CreateRootContainerIfNotExists(); + cic.RootCohortAggregateContainer.AddChild(ac, 1); + + var filterToImport = new AggregateFilter(Repository, "MyFilter") { WhereSQL = "true" }; + ac.RootFilterContainer.AddChild(filterToImport); + + //there should be no root container + Assert.IsNull(sds.RootFilterContainer); + + //run the command + var mgr = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) { + DisallowInput = true + }; + var cmd = new ExecuteCommandImportFilterContainerTree(mgr, sds, ac); + + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); + + sds.ClearAllInjections(); + Assert.IsNotNull(sds.RootFilterContainer); + Assert.AreEqual(1, sds.RootFilterContainer.GetFilters().Length); + Assert.AreEqual("MyFilter", sds.RootFilterContainer.GetFilters()[0].Name); + Assert.AreEqual("true", sds.RootFilterContainer.GetFilters()[0].WhereSQL); + + Assert.AreNotEqual(filterToImport.GetType(), sds.RootFilterContainer.GetFilters()[0].GetType()); + } + + [Test] + public void TestImportTree_FromSelectedDatasets_ToCohortIdentificationConfiguration() + { + // Import From Selected Dataset + var sds = WhenIHaveA(); + sds.CreateRootContainerIfNotExists(); + + var filterToImport = + new DeployedExtractionFilter(Repository, "MyFilter", (FilterContainer)sds.RootFilterContainer) + { WhereSQL = "true" }; + filterToImport.SaveToDatabase(); + + var cata = sds.ExtractableDataSet.Catalogue; + + // Into an Aggregate Configuration + var cic = new CohortIdentificationConfiguration(Repository, "my cic"); + cic.CreateRootContainerIfNotExists(); + var ac = new AggregateConfiguration(Repository, cata, "myagg"); - // Import From Selected Dataset - var sds = WhenIHaveA(); - sds.CreateRootContainerIfNotExists(); - - var filterToImport = new DeployedExtractionFilter(Repository,"MyFilter", (FilterContainer)sds.RootFilterContainer){WhereSQL = "true" }; - filterToImport.SaveToDatabase(); - - var cata = sds.ExtractableDataSet.Catalogue; - - // Into an Aggregate Configuration - var cic = new CohortIdentificationConfiguration(Repository,"my cic"); - cic.CreateRootContainerIfNotExists(); - var ac = new AggregateConfiguration(Repository,cata,"myagg"); - - cic.RootCohortAggregateContainer.AddChild(ac,1); - - //there should be no root container - Assert.IsNull(ac.RootFilterContainer); - - //run the command - var mgr = new ConsoleInputManager(RepositoryLocator,new ThrowImmediatelyCheckNotifier()); - mgr.DisallowInput = true; - var cmd = new ExecuteCommandImportFilterContainerTree(mgr,ac,sds); - - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); - cmd.Execute(); - - ac.ClearAllInjections(); - Assert.IsNotNull(ac.RootFilterContainer); - Assert.AreEqual(1,ac.RootFilterContainer.GetFilters().Length); - Assert.AreEqual("MyFilter",ac.RootFilterContainer.GetFilters()[0].Name); - Assert.AreEqual("true",ac.RootFilterContainer.GetFilters()[0].WhereSQL); - - Assert.AreNotEqual(filterToImport.GetType(),ac.RootFilterContainer.GetFilters()[0].GetType()); - - - } - - - [Test] - public void TestImportTree_FromCohortIdentificationConfiguration_ToSelectedDatasets_PreserveOperation() + cic.RootCohortAggregateContainer.AddChild(ac, 1); + + //there should be no root container + Assert.IsNull(ac.RootFilterContainer); + + //run the command + var mgr = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) { - var sds = WhenIHaveA(); - - var cata = sds.ExtractableDataSet.Catalogue; - - var cic = new CohortIdentificationConfiguration(Repository,"my cic"); - cic.CreateRootContainerIfNotExists(); - - var ac = new AggregateConfiguration(Repository,cata,"myagg"); - ac.CreateRootContainerIfNotExists(); - cic.RootCohortAggregateContainer.AddChild(ac,1); - - var filterToImport = new AggregateFilter(Repository,"MyFilter"){WhereSQL = "true" }; - var root = ac.RootFilterContainer; - root.AddChild(filterToImport); - root.Operation = FilterContainerOperation.OR; - root.SaveToDatabase(); - - // add 2 subcontainers, these should also get cloned and should preserve the Operation correctly - root.AddChild(new AggregateFilterContainer(Repository, FilterContainerOperation.AND)); - root.AddChild(new AggregateFilterContainer(Repository, FilterContainerOperation.OR)); - - //there should be no root container - Assert.IsNull(sds.RootFilterContainer); - - //run the command - var mgr = new ConsoleInputManager(RepositoryLocator,new ThrowImmediatelyCheckNotifier()); - mgr.DisallowInput = true; - var cmd = new ExecuteCommandImportFilterContainerTree(mgr,sds,ac); - - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); - cmd.Execute(); - - sds.ClearAllInjections(); - Assert.AreEqual(FilterContainerOperation.OR, sds.RootFilterContainer.Operation); - Assert.IsNotNull(sds.RootFilterContainer); - Assert.AreEqual(1,sds.RootFilterContainer.GetFilters().Length); - - var subContainers = sds.RootFilterContainer.GetSubContainers(); - Assert.AreEqual(2, subContainers.Length); - Assert.AreEqual(1, subContainers.Count(e=>e.Operation == FilterContainerOperation.AND)); - Assert.AreEqual(1, subContainers.Count(e => e.Operation == FilterContainerOperation.OR)); - - - } + DisallowInput = true + }; + var cmd = new ExecuteCommandImportFilterContainerTree(mgr, ac, sds); + + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); + + ac.ClearAllInjections(); + Assert.IsNotNull(ac.RootFilterContainer); + Assert.AreEqual(1, ac.RootFilterContainer.GetFilters().Length); + Assert.AreEqual("MyFilter", ac.RootFilterContainer.GetFilters()[0].Name); + Assert.AreEqual("true", ac.RootFilterContainer.GetFilters()[0].WhereSQL); + + Assert.AreNotEqual(filterToImport.GetType(), ac.RootFilterContainer.GetFilters()[0].GetType()); + } + + + [Test] + public void TestImportTree_FromCohortIdentificationConfiguration_ToSelectedDatasets_PreserveOperation() + { + var sds = WhenIHaveA(); + + var cata = sds.ExtractableDataSet.Catalogue; + + var cic = new CohortIdentificationConfiguration(Repository, "my cic"); + cic.CreateRootContainerIfNotExists(); + + var ac = new AggregateConfiguration(Repository, cata, "myagg"); + ac.CreateRootContainerIfNotExists(); + cic.RootCohortAggregateContainer.AddChild(ac, 1); + var filterToImport = new AggregateFilter(Repository, "MyFilter") { WhereSQL = "true" }; + var root = ac.RootFilterContainer; + root.AddChild(filterToImport); + root.Operation = FilterContainerOperation.OR; + root.SaveToDatabase(); + + // add 2 subcontainers, these should also get cloned and should preserve the Operation correctly + root.AddChild(new AggregateFilterContainer(Repository, FilterContainerOperation.AND)); + root.AddChild(new AggregateFilterContainer(Repository, FilterContainerOperation.OR)); + + //there should be no root container + Assert.IsNull(sds.RootFilterContainer); + + //run the command + var mgr = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) + { + DisallowInput = true + }; + var cmd = new ExecuteCommandImportFilterContainerTree(mgr, sds, ac); + + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); + + sds.ClearAllInjections(); + Assert.AreEqual(FilterContainerOperation.OR, sds.RootFilterContainer.Operation); + Assert.IsNotNull(sds.RootFilterContainer); + Assert.AreEqual(1, sds.RootFilterContainer.GetFilters().Length); + + var subContainers = sds.RootFilterContainer.GetSubContainers(); + Assert.AreEqual(2, subContainers.Length); + Assert.AreEqual(1, subContainers.Count(e => e.Operation == FilterContainerOperation.AND)); + Assert.AreEqual(1, subContainers.Count(e => e.Operation == FilterContainerOperation.OR)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandImportTableInfo.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandImportTableInfo.cs index e7b46c04d2..410d05d1ed 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandImportTableInfo.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandImportTableInfo.cs @@ -9,39 +9,41 @@ using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.CommandLine.Interactive.Picking; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestExecuteCommandImportTableInfo : CommandCliTests { - class TestExecuteCommandImportTableInfo : CommandCliTests + [Test] + public void Test_ImportTableInfo_NoArguments() + { + var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandImportTableInfo), + new CommandLineObjectPicker(Array.Empty(), GetActivator()))); + + StringAssert.StartsWith( + "Expected parameter at index 0 to be a FAnsi.Discovery.DiscoveredTable (for parameter 'table') but it was Missing", + ex.Message); + } + + [Test] + public void Test_ImportTableInfo_MalformedArgument() { - [Test] - public void Test_ImportTableInfo_NoArguments() - { - - var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandImportTableInfo), - new CommandLineObjectPicker(new string[0], GetActivator()))); - - StringAssert.StartsWith("Expected parameter at index 0 to be a FAnsi.Discovery.DiscoveredTable (for parameter 'table') but it was Missing",ex.Message); - } - - [Test] - public void Test_ImportTableInfo_MalformedArgument() - { - var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandImportTableInfo), - new CommandLineObjectPicker(new string[]{ "MyTable"}, GetActivator()))); - - StringAssert.StartsWith("Expected parameter at index 0 to be a FAnsi.Discovery.DiscoveredTable (for parameter 'table') but it was MyTable",ex.Message); - } - - [Test] - public void Test_ImportTableInfo_NoTable() - { - var tbl = "Table:MyTable:DatabaseType:MicrosoftSQLServer:Server=myServerAddress;Database=myDataBase;Trusted_Connection=True"; - - var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandImportTableInfo), - new CommandLineObjectPicker(new string[]{ tbl,"true"}, GetActivator()))); - - StringAssert.StartsWith("Could not reach server myServerAddress",ex.Message); - } + var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandImportTableInfo), + new CommandLineObjectPicker(new string[] { "MyTable" }, GetActivator()))); + + StringAssert.StartsWith( + "Expected parameter at index 0 to be a FAnsi.Discovery.DiscoveredTable (for parameter 'table') but it was MyTable", + ex.Message); + } + + [Test] + public void Test_ImportTableInfo_NoTable() + { + var tbl = + "Table:MyTable:DatabaseType:MicrosoftSQLServer:Server=myServerAddress;Database=myDataBase;Trusted_Connection=True"; + + var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandImportTableInfo), + new CommandLineObjectPicker(new string[] { tbl, "true" }, GetActivator()))); + StringAssert.StartsWith("Could not reach server myServerAddress", ex.Message); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandNewObject.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandNewObject.cs index 3e1d92e30c..1205cf0f7f 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandNewObject.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandNewObject.cs @@ -5,69 +5,67 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Collections.Generic; using System.Linq; -using System.Text; using NUnit.Framework; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.CommandLine.Interactive.Picking; using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestExecuteCommandNewObject : CommandCliTests { - class TestExecuteCommandNewObject : CommandCliTests + [Test] + public void Test_NewObjectCommand_NoArguments() { - [Test] - public void Test_NewObjectCommand_NoArguments() - { - - var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject), - new CommandLineObjectPicker(new string[0], GetActivator()))); + var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject), + new CommandLineObjectPicker(Array.Empty(), GetActivator()))); - StringAssert.StartsWith("First parameter must be a Type",ex.Message); - } + StringAssert.StartsWith("First parameter must be a Type", ex.Message); + } - [Test] - public void Test_NewObjectCommand_NonExistentTypeArgument() - { - var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject), - new CommandLineObjectPicker(new[]{"Fissdlkfldfj"}, GetActivator()))); + [Test] + public void Test_NewObjectCommand_NonExistentTypeArgument() + { + var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject), + new CommandLineObjectPicker(new[] { "Fissdlkfldfj" }, GetActivator()))); + + StringAssert.StartsWith("First parameter must be a Type", ex.Message); + } - StringAssert.StartsWith("First parameter must be a Type",ex.Message); - } + [Test] + public void Test_NewObjectCommand_WrongTypeArgument() + { + var picker = new CommandLineObjectPicker(new[] { "UnitTests" }, GetActivator()); + Assert.AreEqual(typeof(UnitTests), picker[0].Type); - [Test] - public void Test_NewObjectCommand_WrongTypeArgument() - { - var picker = new CommandLineObjectPicker(new[] {"UnitTests"}, GetActivator()); - Assert.AreEqual(typeof(UnitTests),picker[0].Type); + var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject), picker)); - var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject),picker)); + StringAssert.StartsWith("Type must be derived from DatabaseEntity", ex.Message); + } - StringAssert.StartsWith("Type must be derived from DatabaseEntity",ex.Message); - } + [Test] + public void Test_NewObjectCommand_MissingNameArgument() + { + var picker = new CommandLineObjectPicker(new[] { "Catalogue" }, GetActivator()); + Assert.AreEqual(typeof(Catalogue), picker[0].Type); - [Test] - public void Test_NewObjectCommand_MissingNameArgument() - { - var picker = new CommandLineObjectPicker(new[] {"Catalogue"}, GetActivator()); - Assert.AreEqual(typeof(Catalogue),picker[0].Type); + var ex = Assert.Throws(() => + GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject), picker)); - var ex = Assert.Throws(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject),picker)); + StringAssert.StartsWith("Value needed for parameter 'name' (of type 'System.String')", ex.Message); + } - StringAssert.StartsWith("Value needed for parameter 'name' (of type 'System.String')",ex.Message); - } + [Test] + public void Test_NewObjectCommand_Success() + { + var picker = new CommandLineObjectPicker(new[] { "Catalogue", "lolzeeeyeahyeah" }, GetActivator()); + Assert.AreEqual(typeof(Catalogue), picker[0].Type); - [Test] - public void Test_NewObjectCommand_Success() - { - var picker = new CommandLineObjectPicker(new[] {"Catalogue","lolzeeeyeahyeah"}, GetActivator()); - Assert.AreEqual(typeof(Catalogue),picker[0].Type); + Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject), picker)); - Assert.DoesNotThrow(() => GetInvoker().ExecuteCommand(typeof(ExecuteCommandNewObject),picker)); - - Assert.Contains("lolzeeeyeahyeah",RepositoryLocator.CatalogueRepository.GetAllObjects().Select(c=>c.Name).ToArray()); - } + Assert.Contains("lolzeeeyeahyeah", + RepositoryLocator.CatalogueRepository.GetAllObjects().Select(c => c.Name).ToArray()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandSet.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandSet.cs index ef40491646..f878ddfe6f 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandSet.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandSet.cs @@ -4,73 +4,74 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System; -using System.Collections.Generic; -using System.Text; -using NPOI.Util; using NUnit.Framework; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.CommandLine.Interactive.Picking; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; -using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestExecuteCommandSet : CommandCliTests { - class TestExecuteCommandSet : CommandCliTests + [Test] + public void Test_CatalogueDescription_Normal() { - [Test] - public void Test_CatalogueDescription_Normal() - { - var cata = new Catalogue(Repository.CatalogueRepository, "Bob"); - - GetInvoker().ExecuteCommand(typeof(ExecuteCommandSet),new CommandLineObjectPicker(new []{"Catalogue:" + cata.ID,"Description","Some long description"}, GetActivator())); + var cata = new Catalogue(Repository.CatalogueRepository, "Bob"); - cata.RevertToDatabaseState(); - Assert.AreEqual("Some long description",cata.Description); + GetInvoker().ExecuteCommand(typeof(ExecuteCommandSet), new CommandLineObjectPicker(new[] + { + $"Catalogue:{cata.ID}", "Description", "Some long description" + }, GetActivator())); - } + cata.RevertToDatabaseState(); + Assert.AreEqual("Some long description", cata.Description); + } - [Test] - public void Test_CatalogueDescription_Null() + [Test] + public void Test_CatalogueDescription_Null() + { + var cata = new Catalogue(Repository.CatalogueRepository, "Bob") { - var cata = new Catalogue(Repository.CatalogueRepository, "Bob"); - cata.Description = "something cool"; - cata.SaveToDatabase(); + Description = "something cool" + }; + cata.SaveToDatabase(); - GetInvoker().ExecuteCommand(typeof(ExecuteCommandSet),new CommandLineObjectPicker(new []{"Catalogue:" + cata.ID,"Description","NULL"}, GetActivator())); - - cata.RevertToDatabaseState(); - Assert.IsNull(cata.Description); + GetInvoker().ExecuteCommand(typeof(ExecuteCommandSet), new CommandLineObjectPicker(new[] + { + $"Catalogue:{cata.ID}", "Description", "NULL" + }, GetActivator())); - } + cata.RevertToDatabaseState(); + Assert.IsNull(cata.Description); + } - [Test] - public void TestExecuteCommandSet_SetArrayValueFromCLI() - { - var pta = WhenIHaveA(); - pta.SetType(typeof(TableInfo[])); - pta.Name = "TablesToIsolate"; - pta.SaveToDatabase(); + [Test] + public void TestExecuteCommandSet_SetArrayValueFromCLI() + { + var pta = WhenIHaveA(); + pta.SetType(typeof(TableInfo[])); + pta.Name = "TablesToIsolate"; + pta.SaveToDatabase(); - var t1 = WhenIHaveA(); - var t2 = WhenIHaveA(); - var t3 = WhenIHaveA(); - var t4 = WhenIHaveA(); + var t1 = WhenIHaveA(); + var t2 = WhenIHaveA(); + var t3 = WhenIHaveA(); + var t4 = WhenIHaveA(); - var ids = t1.ID + "," + t2.ID + "," + t3.ID + "," + t4.ID; + var ids = $"{t1.ID},{t2.ID},{t3.ID},{t4.ID}"; - Assert.IsNull(pta.Value); - Assert.IsNull(pta.GetValueAsSystemType()); + Assert.IsNull(pta.Value); + Assert.IsNull(pta.GetValueAsSystemType()); - GetInvoker().ExecuteCommand(typeof(ExecuteCommandSet),new CommandLineObjectPicker(new []{"ProcessTaskArgument:TablesToIsolate" ,"Value",ids}, GetActivator())); + GetInvoker().ExecuteCommand(typeof(ExecuteCommandSet), + new CommandLineObjectPicker(new[] { "ProcessTaskArgument:TablesToIsolate", "Value", ids }, GetActivator())); - Assert.AreEqual(ids,pta.Value); + Assert.AreEqual(ids, pta.Value); - Assert.Contains(t1,(TableInfo[])pta.GetValueAsSystemType()); - Assert.Contains(t2,(TableInfo[])pta.GetValueAsSystemType()); - Assert.Contains(t3,(TableInfo[])pta.GetValueAsSystemType()); - Assert.Contains(t4,(TableInfo[])pta.GetValueAsSystemType()); - } + Assert.Contains(t1, (TableInfo[])pta.GetValueAsSystemType()); + Assert.Contains(t2, (TableInfo[])pta.GetValueAsSystemType()); + Assert.Contains(t3, (TableInfo[])pta.GetValueAsSystemType()); + Assert.Contains(t4, (TableInfo[])pta.GetValueAsSystemType()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandSetUserSetting.cs b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandSetUserSetting.cs index 7b974462f4..e95324837e 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandSetUserSetting.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestExecuteCommandSetUserSetting.cs @@ -7,55 +7,53 @@ using NUnit.Framework; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.CommandLine.Interactive.Picking; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Settings; -using System; -using System.Collections.Generic; -using System.Text; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Settings; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestExecuteCommandSetUserSetting : CommandCliTests { - class TestExecuteCommandSetUserSetting : CommandCliTests + [Test] + public void Test_CatalogueDescription_Normal() + { + UserSettings.Wait5SecondsAfterStartupUI = false; + + GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetUserSetting), + new CommandLineObjectPicker(new[] { "Wait5SecondsAfterStartupUI", "true" }, GetActivator())); + + Assert.IsTrue(UserSettings.Wait5SecondsAfterStartupUI); + + GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetUserSetting), + new CommandLineObjectPicker(new[] { "Wait5SecondsAfterStartupUI", "false" }, GetActivator())); + + Assert.IsFalse(UserSettings.Wait5SecondsAfterStartupUI); + } + + [Test] + public void TestSettingErrorCodeValue_InvalidValue() + { + var cmd = new ExecuteCommandSetUserSetting(GetActivator(), "R001", "foo"); + Assert.IsTrue(cmd.IsImpossible); + Assert.AreEqual(cmd.ReasonCommandImpossible, + "Invalid enum value. When setting an error code you must supply a value of one of :Success,Warning,Fail"); + } + + [Test] + public void TestSettingErrorCodeValue_Success() { - [Test] - public void Test_CatalogueDescription_Normal() - { - UserSettings.Wait5SecondsAfterStartupUI = false; - - GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetUserSetting),new CommandLineObjectPicker(new []{ "Wait5SecondsAfterStartupUI", "true"}, GetActivator())); - - Assert.IsTrue(UserSettings.Wait5SecondsAfterStartupUI); - - GetInvoker().ExecuteCommand(typeof(ExecuteCommandSetUserSetting),new CommandLineObjectPicker(new []{ "Wait5SecondsAfterStartupUI", "false"}, GetActivator())); - - Assert.IsFalse(UserSettings.Wait5SecondsAfterStartupUI); - - } - - [Test] - public void TestSettingErrorCodeValue_InvalidValue() - { - var cmd = new ExecuteCommandSetUserSetting(GetActivator(), "R001", "foo"); - Assert.IsTrue(cmd.IsImpossible); - Assert.AreEqual(cmd.ReasonCommandImpossible, "Invalid enum value. When setting an error code you must supply a value of one of :Success,Warning,Fail"); - } - - [Test] - public void TestSettingErrorCodeValue_Success() - { - Assert.AreEqual("R001", ErrorCodes.ExistingExtractionTableInDatabase.Code); - var before = UserSettings.GetErrorReportingLevelFor(ErrorCodes.ExistingExtractionTableInDatabase); - Assert.AreNotEqual(CheckResult.Success, before); - - var cmd = new ExecuteCommandSetUserSetting(GetActivator(), "R001", "Success"); - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); - cmd.Execute(); - - var after = UserSettings.GetErrorReportingLevelFor(ErrorCodes.ExistingExtractionTableInDatabase); - Assert.AreEqual(CheckResult.Success, after); - - //reset the original state of the system (the default) - UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExistingExtractionTableInDatabase,before); - } + Assert.AreEqual("R001", ErrorCodes.ExistingExtractionTableInDatabase.Code); + var before = UserSettings.GetErrorReportingLevelFor(ErrorCodes.ExistingExtractionTableInDatabase); + Assert.AreNotEqual(CheckResult.Success, before); + + var cmd = new ExecuteCommandSetUserSetting(GetActivator(), "R001", "Success"); + Assert.IsFalse(cmd.IsImpossible, cmd.ReasonCommandImpossible); + cmd.Execute(); + + var after = UserSettings.GetErrorReportingLevelFor(ErrorCodes.ExistingExtractionTableInDatabase); + Assert.AreEqual(CheckResult.Success, after); + + //reset the original state of the system (the default) + UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExistingExtractionTableInDatabase, before); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandExecution/TestStartup.cs b/Rdmp.Core.Tests/CommandExecution/TestStartup.cs index 7983d4b281..c3d46281b9 100644 --- a/Rdmp.Core.Tests/CommandExecution/TestStartup.cs +++ b/Rdmp.Core.Tests/CommandExecution/TestStartup.cs @@ -5,21 +5,18 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using NUnit.Framework; -using Rdmp.Core.Startup; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CommandExecution +namespace Rdmp.Core.Tests.CommandExecution; + +internal class TestStartup : UnitTests { - class TestStartup : UnitTests + [Test] + public void TestStartupWithMemoryRepository() { - [Test] - public void TestStartupWithMemoryRepository() - { - var s = new Rdmp.Core.Startup.Startup(new EnvironmentInfo(),RepositoryLocator); - Assert.DoesNotThrow(()=>s.DoStartup(new ThrowImmediatelyCheckNotifier())); - } - + var s = new Startup.Startup(RepositoryLocator); + Assert.DoesNotThrow(() => s.DoStartup(ThrowImmediatelyCheckNotifier.Quiet)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/AbstractBaseRunnerTests.cs b/Rdmp.Core.Tests/CommandLine/AbstractBaseRunnerTests.cs index 28bddaa225..1b0dccbbac 100644 --- a/Rdmp.Core.Tests/CommandLine/AbstractBaseRunnerTests.cs +++ b/Rdmp.Core.Tests/CommandLine/AbstractBaseRunnerTests.cs @@ -4,177 +4,166 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.CommandLine.Runners; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; using System.Collections.Generic; using System.Linq; +using Rdmp.Core.MapsDirectlyToDatabaseTable; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.CommandLine +namespace Rdmp.Core.Tests.CommandLine; + +public class AbstractBaseRunnerTests : UnitTests { - public class AbstractBaseRunnerTests : UnitTests + [SetUp] + public void CleanRemnants() + { + foreach (var o in Repository.GetAllObjectsInDatabase()) + o.DeleteInDatabase(); + } + + [Test] + public void GetObjectFromCommandLineString_CatalogueByID() + { + var c = WhenIHaveA(); + WhenIHaveA(); + WhenIHaveA(); + var r = new TestRunner(); + Assert.AreEqual(c, TestRunner.GetObjectFromCommandLineString(RepositoryLocator, c.ID.ToString())); + } + + [Test] + public void GetObjectFromCommandLineString_CatalogueByPattern() + { + var c = WhenIHaveA(); + c.Name = "gogogo"; + c.SaveToDatabase(); + + WhenIHaveA(); + WhenIHaveA(); + var r = new TestRunner(); + Assert.AreEqual(c, TestRunner.GetObjectFromCommandLineString(RepositoryLocator, "Catalogue:*go*")); + } + + [Test] + public void GetObjectFromCommandLineString_ProjectByID() + { + var c = WhenIHaveA(); + WhenIHaveA(); + WhenIHaveA(); + var r = new TestRunner(); + Assert.AreEqual(c, TestRunner.GetObjectFromCommandLineString(RepositoryLocator, c.ID.ToString())); + } + + [Test] + public void GetObjectFromCommandLineString_ProjectByPattern() + { + var c = WhenIHaveA(); + c.Name = "gogogo"; + c.SaveToDatabase(); + + WhenIHaveA(); + WhenIHaveA(); + var r = new TestRunner(); + Assert.AreEqual(c, TestRunner.GetObjectFromCommandLineString(RepositoryLocator, "Project:*go*")); + } + + /// + /// Tests that things the user might enter for a parameter (or default parameter values specified in RDMP + /// are going to be interpreted as null correctly + /// + /// + [TestCase(null)] + [TestCase("")] + [TestCase("0")] + [TestCase("null")] + public void GetObjectFromCommandLineString_Null(string expression) + { + var c = WhenIHaveA(); + c.Name = "gogogo"; + c.SaveToDatabase(); + + WhenIHaveA(); + WhenIHaveA(); + var r = new TestRunner(); + Assert.IsNull(TestRunner.GetObjectFromCommandLineString(RepositoryLocator, expression)); + } + + /// + /// This test is for the IEnumerable version + /// + /// + [TestCase(null)] + [TestCase("")] + [TestCase("0")] + [TestCase("null")] + public void GetObjectsFromCommandLineString_Null(string expression) { - [OneTimeSetUp] - public void SetupMef() - { - SetupMEF(); - } - - [SetUp] - public void CleanRemnants() - { - foreach (var o in Repository.GetAllObjectsInDatabase()) - o.DeleteInDatabase(); - } - - [Test] - public void GetObjectFromCommandLineString_CatalogueByID() - { - var c = WhenIHaveA(); - WhenIHaveA(); - WhenIHaveA(); - var r = new TestRunner(); - Assert.AreEqual(c,r.GetObjectFromCommandLineString(RepositoryLocator,c.ID.ToString())); - } - - [Test] - public void GetObjectFromCommandLineString_CatalogueByPattern() - { - var c = WhenIHaveA(); - c.Name = "gogogo"; - c.SaveToDatabase(); - - WhenIHaveA(); - WhenIHaveA(); - var r = new TestRunner(); - Assert.AreEqual(c, r.GetObjectFromCommandLineString(RepositoryLocator, "Catalogue:*go*")); - } - - [Test] - public void GetObjectFromCommandLineString_ProjectByID() - { - var c = WhenIHaveA(); - WhenIHaveA(); - WhenIHaveA(); - var r = new TestRunner(); - Assert.AreEqual(c, r.GetObjectFromCommandLineString(RepositoryLocator, c.ID.ToString())); - } - - [Test] - public void GetObjectFromCommandLineString_ProjectByPattern() - { - var c = WhenIHaveA(); - c.Name = "gogogo"; - c.SaveToDatabase(); - - WhenIHaveA(); - WhenIHaveA(); - var r = new TestRunner(); - Assert.AreEqual(c, r.GetObjectFromCommandLineString(RepositoryLocator, "Project:*go*")); - } - - /// - /// Tests that things the user might enter for a parameter (or default parameter values specified in RDMP - /// are going to be interpreted as null correctly - /// - /// - [TestCase(null)] - [TestCase("")] - [TestCase("0")] - [TestCase("null")] - public void GetObjectFromCommandLineString_Null(string expression) - { - var c = WhenIHaveA(); - c.Name = "gogogo"; - c.SaveToDatabase(); - - WhenIHaveA(); - WhenIHaveA(); - var r = new TestRunner(); - Assert.IsNull(r.GetObjectFromCommandLineString(RepositoryLocator, expression)); - } - - /// - /// This test is for the IEnumerable version - /// - /// - [TestCase(null)] - [TestCase("")] - [TestCase("0")] - [TestCase("null")] - public void GetObjectsFromCommandLineString_Null(string expression) - { - var c = WhenIHaveA(); - c.Name = "gogogo"; - c.SaveToDatabase(); - - WhenIHaveA(); - WhenIHaveA(); - var r = new TestRunner(); - Assert.IsEmpty(r.GetObjectsFromCommandLineString(RepositoryLocator, expression)); - } - - - [Test] - public void GetObjectsFromCommandLineString_CatalogueByID() - { - var c = WhenIHaveA(); - var c2 = WhenIHaveA(); - WhenIHaveA(); - var r = new TestRunner(); - - var results = r.GetObjectsFromCommandLineString(RepositoryLocator,$"{c.ID},{c2.ID}").ToArray(); - - Assert.AreEqual(2, results.Length); - Assert.AreSame(c, results[0]); - Assert.AreSame(c2, results[1]); - } - - [Test] - public void GetObjectsFromCommandLineString_CatalogueByPattern() - { - var c = WhenIHaveA(); - c.Name = "go long"; - c.SaveToDatabase(); - - var c2 = WhenIHaveA(); - c2.Name = "go hard"; - c2.SaveToDatabase(); - - WhenIHaveA(); - - var r = new TestRunner(); - var results = r.GetObjectsFromCommandLineString(RepositoryLocator, "Catalogue:*go*").ToArray(); - - Assert.AreEqual(2, results.Length); - Assert.Contains(c,results); - Assert.Contains(c2, results); - } - - class TestRunner : Runner - { - new public T GetObjectFromCommandLineString(IRDMPPlatformRepositoryServiceLocator locator, string arg) where T : IMapsDirectlyToDatabaseTable - { - return base.GetObjectFromCommandLineString(locator, arg); - } - - new public IEnumerable GetObjectsFromCommandLineString(IRDMPPlatformRepositoryServiceLocator locator, string arg) where T : IMapsDirectlyToDatabaseTable - { - return base.GetObjectsFromCommandLineString(locator, arg); - } - - public override int Run(IRDMPPlatformRepositoryServiceLocator repositoryLocator, IDataLoadEventListener listener, ICheckNotifier checkNotifier, GracefulCancellationToken token) - { - - return 0; - } - } + var c = WhenIHaveA(); + c.Name = "gogogo"; + c.SaveToDatabase(); + + WhenIHaveA(); + WhenIHaveA(); + var r = new TestRunner(); + Assert.IsEmpty(TestRunner.GetObjectsFromCommandLineString(RepositoryLocator, expression)); + } + + + [Test] + public void GetObjectsFromCommandLineString_CatalogueByID() + { + var c = WhenIHaveA(); + var c2 = WhenIHaveA(); + WhenIHaveA(); + var r = new TestRunner(); + + var results = TestRunner.GetObjectsFromCommandLineString(RepositoryLocator, $"{c.ID},{c2.ID}") + .ToArray(); + + Assert.AreEqual(2, results.Length); + Assert.AreSame(c, results[0]); + Assert.AreSame(c2, results[1]); + } + + [Test] + public void GetObjectsFromCommandLineString_CatalogueByPattern() + { + var c = WhenIHaveA(); + c.Name = "go long"; + c.SaveToDatabase(); + + var c2 = WhenIHaveA(); + c2.Name = "go hard"; + c2.SaveToDatabase(); + + WhenIHaveA(); + + var r = new TestRunner(); + var results = TestRunner.GetObjectsFromCommandLineString(RepositoryLocator, "Catalogue:*go*") + .ToArray(); + + Assert.AreEqual(2, results.Length); + Assert.Contains(c, results); + Assert.Contains(c2, results); + } + + private class TestRunner : Runner + { + public new static T GetObjectFromCommandLineString(IRDMPPlatformRepositoryServiceLocator locator, string arg) + where T : IMapsDirectlyToDatabaseTable => Runner.GetObjectFromCommandLineString(locator, arg); + + public new static IEnumerable + GetObjectsFromCommandLineString(IRDMPPlatformRepositoryServiceLocator locator, string arg) + where T : IMapsDirectlyToDatabaseTable => Runner.GetObjectsFromCommandLineString(locator, arg); + + public override int Run(IRDMPPlatformRepositoryServiceLocator repositoryLocator, + IDataLoadEventListener listener, ICheckNotifier checkNotifier, GracefulCancellationToken token) => 0; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndCacheTest.cs b/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndCacheTest.cs index 70527f7ccb..f214a8bafc 100644 --- a/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndCacheTest.cs +++ b/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndCacheTest.cs @@ -17,96 +17,104 @@ using Rdmp.Core.Curation.Data.Cache; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.Repositories; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; using Tests.Common.Helpers; -namespace Rdmp.Core.Tests.CommandLine.AutomationLoopTests +namespace Rdmp.Core.Tests.CommandLine.AutomationLoopTests; + +public class EndToEndCacheTest : DatabaseTests { - public class EndToEndCacheTest : DatabaseTests + private Catalogue _cata; + private LoadMetadata _lmd; + private LoadProgress _lp; + private CacheProgress _cp; + + private const int NumDaysToCache = 5; + + private TestDataPipelineAssembler _testPipeline; + private LoadDirectory _LoadDirectory; + + [SetUp] + protected override void SetUp() { + base.SetUp(); - private Catalogue _cata; - private LoadMetadata _lmd; - private LoadProgress _lp; - private CacheProgress _cp; + MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); + MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); - private TestDataPipelineAssembler _testPipeline; - private LoadDirectory _LoadDirectory; + _lmd = new LoadMetadata(CatalogueRepository, "Ive got a lovely bunch o' coconuts"); + _LoadDirectory = + LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), + @"EndToEndCacheTest", true); + _lmd.LocationOfFlatFiles = _LoadDirectory.RootPath.FullName; + _lmd.SaveToDatabase(); - const int NumDaysToCache = 5; + Clear(_LoadDirectory); - [SetUp] - protected override void SetUp() + _cata = new Catalogue(CatalogueRepository, "EndToEndCacheTest") { - base.SetUp(); + LoadMetadata_ID = _lmd.ID + }; + _cata.SaveToDatabase(); - RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); - RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); - - _lmd = new LoadMetadata(CatalogueRepository, "Ive got a lovely bunch o' coconuts"); - _LoadDirectory = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), @"EndToEndCacheTest", true); - _lmd.LocationOfFlatFiles = _LoadDirectory.RootPath.FullName; - _lmd.SaveToDatabase(); + _lp = new LoadProgress(CatalogueRepository, _lmd); + _cp = new CacheProgress(CatalogueRepository, _lp); - Clear(_LoadDirectory); + _lp.OriginDate = new DateTime(2001, 1, 1); + _lp.SaveToDatabase(); - _cata = new Catalogue(CatalogueRepository, "EndToEndCacheTest"); - _cata.LoadMetadata_ID = _lmd.ID; - _cata.SaveToDatabase(); + _testPipeline = + new TestDataPipelineAssembler($"EndToEndCacheTestPipeline{Guid.NewGuid()}", CatalogueRepository); + _testPipeline.ConfigureCacheProgressToUseThePipeline(_cp); - _lp = new LoadProgress(CatalogueRepository, _lmd); - _cp = new CacheProgress(CatalogueRepository, _lp); - - _lp.OriginDate = new DateTime(2001,1,1); - _lp.SaveToDatabase(); + _cp.CacheFillProgress = DateTime.Now.AddDays(-NumDaysToCache); + _cp.SaveToDatabase(); - _testPipeline = new TestDataPipelineAssembler("EndToEndCacheTestPipeline" + Guid.NewGuid(),CatalogueRepository); - _testPipeline.ConfigureCacheProgressToUseThePipeline(_cp); + _cp.SaveToDatabase(); + } - _cp.CacheFillProgress = DateTime.Now.AddDays(-NumDaysToCache); - _cp.SaveToDatabase(); - _cp.SaveToDatabase(); - } + [Test] + public void FireItUpManually() + { + MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); + MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); + var cachingHost = new CachingHost(CatalogueRepository) + { + CacheProgress = _cp + }; + + cachingHost.Start(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + + // should be numDaysToCache days in cache + Assert.AreEqual(NumDaysToCache, _LoadDirectory.Cache.GetFiles("*.csv").Length); - [Test] - public void FireItUpManually() + // make sure each file is named as expected + var cacheFiles = _LoadDirectory.Cache.GetFiles().Select(fi => fi.Name).ToArray(); + for (var i = -NumDaysToCache; i < 0; i++) { - RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); - RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); - - var cachingHost = new CachingHost(CatalogueRepository); - - cachingHost.CacheProgress = _cp; - cachingHost.Start(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - - // should be numDaysToCache days in cache - Assert.AreEqual(NumDaysToCache, _LoadDirectory.Cache.GetFiles("*.csv").Count()); - - // make sure each file is named as expected - var cacheFiles = _LoadDirectory.Cache.GetFiles().Select(fi => fi.Name).ToArray(); - for (var i = -NumDaysToCache; i < 0; i++) - { - var filename = DateTime.Now.AddDays(i).ToString("yyyyMMdd") + ".csv"; - Assert.IsTrue(cacheFiles.Contains(filename), filename + " not found"); - } + var filename = $"{DateTime.Now.AddDays(i):yyyyMMdd}.csv"; + Assert.IsTrue(cacheFiles.Contains(filename), filename + " not found"); } + } - [Test] - public void RunEndToEndCacheTest() + [Test] + public void RunEndToEndCacheTest() + { + var t = Task.Factory.StartNew(() => { - var t = Task.Factory.StartNew(() => - { - Assert.AreEqual(0, _LoadDirectory.Cache.GetFiles("*.csv").Count()); + Assert.AreEqual(0, _LoadDirectory.Cache.GetFiles("*.csv").Length); - var auto = new CacheRunner(new CacheOptions(){CacheProgress = _cp.ID.ToString(), Command = CommandLineActivity.run}); - auto.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(),new ThrowImmediatelyCheckNotifier(), new GracefulCancellationToken()); - }); + var auto = new CacheRunner(new CacheOptions + { CacheProgress = _cp.ID.ToString(), Command = CommandLineActivity.run }); + auto.Run(RepositoryLocator, ThrowImmediatelyDataLoadEventListener.Quiet, + ThrowImmediatelyCheckNotifier.Quiet, new GracefulCancellationToken()); + }); - Assert.True(t.Wait(60000)); - } + Assert.True(t.Wait(60000)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndDLECacheTest.cs b/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndDLECacheTest.cs index 7b7cfa3aee..59bbe968c4 100644 --- a/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndDLECacheTest.cs +++ b/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndDLECacheTest.cs @@ -8,78 +8,83 @@ using System.IO; using System.Linq; using NUnit.Framework; -using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Cache; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataLoad.Engine.DataProvider.FromCache; +using Rdmp.Core.Repositories; using Tests.Common.Helpers; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.CommandLine.AutomationLoopTests +namespace Rdmp.Core.Tests.CommandLine.AutomationLoopTests; + +public class EndToEndDLECacheTest : TestsRequiringADle { - public class EndToEndDLECacheTest:TestsRequiringADle + [Test] + public void RunEndToEndDLECacheTest() { - [Test] - public void RunEndToEndDLECacheTest() + MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); + MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); + + const int timeoutInMilliseconds = 120000; + + var lmd = TestLoadMetadata; + + var lp = new LoadProgress(CatalogueRepository, lmd) { - RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); - RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); + DataLoadProgress = new DateTime(2001, 1, 1), + DefaultNumberOfDaysToLoadEachTime = 10 + }; + lp.SaveToDatabase(); - int timeoutInMilliseconds = 120000; - - var lmd = TestLoadMetadata; + var cp = new CacheProgress(CatalogueRepository, lp) + { + CacheFillProgress = new DateTime(2001, 1, 11) //10 days available to load + }; + cp.SaveToDatabase(); - LoadProgress lp = new LoadProgress(CatalogueRepository,lmd); - lp.DataLoadProgress = new DateTime(2001,1,1); - lp.DefaultNumberOfDaysToLoadEachTime = 10; - lp.SaveToDatabase(); + var assembler = new TestDataPipelineAssembler("RunEndToEndDLECacheTest pipe", CatalogueRepository); + assembler.ConfigureCacheProgressToUseThePipeline(cp); - var cp = new CacheProgress(CatalogueRepository, lp); - cp.CacheFillProgress = new DateTime(2001,1,11); //10 days available to load - cp.SaveToDatabase(); + //setup the cache process task + var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.GetFiles) + { + Path = typeof(BasicCacheDataProvider).FullName, + ProcessTaskType = ProcessTaskType.DataProvider + }; + pt.SaveToDatabase(); + pt.CreateArgumentsForClassIfNotExists(); - var assembler = new TestDataPipelineAssembler("RunEndToEndDLECacheTest pipe", CatalogueRepository); - assembler.ConfigureCacheProgressToUseThePipeline(cp); + var attacher = lmd.ProcessTasks.Single(p => p.ProcessTaskType == ProcessTaskType.Attacher); + var patternArgument = (ProcessTaskArgument)attacher.GetAllArguments().Single(a => a.Name.Equals("FilePattern")); + patternArgument.SetValue("*.csv"); + patternArgument.SaveToDatabase(); - //setup the cache process task - var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.GetFiles); - pt.Path = typeof (BasicCacheDataProvider).FullName; - pt.ProcessTaskType = ProcessTaskType.DataProvider; - pt.SaveToDatabase(); - pt.CreateArgumentsForClassIfNotExists(); + //take the forLoading file + var csvFile = CreateFileInForLoading("bob.csv", 10, new Random(5000)); - var attacher = lmd.ProcessTasks.Single(p => p.ProcessTaskType == ProcessTaskType.Attacher); - var patternArgument = (ProcessTaskArgument)attacher.GetAllArguments().Single(a => a.Name.Equals("FilePattern")); - patternArgument.SetValue("*.csv"); - patternArgument.SaveToDatabase(); + //and move it to the cache and give it a date in the range we expect for the cached data + csvFile.MoveTo(Path.Combine(LoadDirectory.Cache.FullName, "2001-01-09.csv")); - //take the forLoading file - var csvFile = CreateFileInForLoading("bob.csv",10,new Random(5000)); + RunDLE(timeoutInMilliseconds); - //and move it to the cache and give it a date in the range we expect for the cached data - csvFile.MoveTo(Path.Combine(LoadDirectory.Cache.FullName,"2001-01-09.csv")); - - RunDLE(timeoutInMilliseconds); + Assert.AreEqual(10, RowsNow - RowsBefore); - Assert.AreEqual(10,RowsNow - RowsBefore); + Assert.AreEqual(0, LoadDirectory.Cache.GetFiles().Length); + Assert.AreEqual(0, LoadDirectory.ForLoading.GetFiles().Length); + Assert.AreEqual(1, LoadDirectory.ForArchiving.GetFiles().Length); - Assert.AreEqual(0,LoadDirectory.Cache.GetFiles().Count()); - Assert.AreEqual(0, LoadDirectory.ForLoading.GetFiles().Count()); - Assert.AreEqual(1, LoadDirectory.ForArchiving.GetFiles().Count()); - - var archiveFile = LoadDirectory.ForArchiving.GetFiles()[0]; - Assert.AreEqual(".zip",archiveFile.Extension); + var archiveFile = LoadDirectory.ForArchiving.GetFiles()[0]; + Assert.AreEqual(".zip", archiveFile.Extension); - //load progress should be updated to the largest date in the cache (2001-01-09) - lp.RevertToDatabaseState(); - Assert.AreEqual(lp.DataLoadProgress, new DateTime(2001,01,09)); + //load progress should be updated to the largest date in the cache (2001-01-09) + lp.RevertToDatabaseState(); + Assert.AreEqual(lp.DataLoadProgress, new DateTime(2001, 01, 09)); - cp.DeleteInDatabase(); - lp.DeleteInDatabase(); + cp.DeleteInDatabase(); + lp.DeleteInDatabase(); - assembler.Destroy(); - } + assembler.Destroy(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndDLETest.cs b/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndDLETest.cs index 1e202d7dd8..0c36ecc140 100644 --- a/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndDLETest.cs +++ b/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/EndToEndDLETest.cs @@ -6,7 +6,6 @@ using NUnit.Framework; using System; -using System.Data; using System.Linq; using FAnsi; using FAnsi.Discovery; @@ -15,51 +14,52 @@ using Tests.Common.Scenarios; using TypeGuesser; -namespace Rdmp.Core.Tests.CommandLine.AutomationLoopTests +namespace Rdmp.Core.Tests.CommandLine.AutomationLoopTests; + +public class EndToEndDLETest : TestsRequiringADle { - public class EndToEndDLETest : TestsRequiringADle + [Test] + public void RunEndToEndDLETest() { - [Test] - public void RunEndToEndDLETest() - { - const int timeoutInMilliseconds = 120000; - CreateFileInForLoading("loadmeee.csv",500,new Random(500)); - RunDLE(timeoutInMilliseconds); - } + const int timeoutInMilliseconds = 120000; + CreateFileInForLoading("loadmeee.csv", 500, new Random(500)); + RunDLE(timeoutInMilliseconds); + } - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void TestDle_DodgyColumnNames(DatabaseType dbType) - { - var db = GetCleanedServer(dbType); + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void TestDle_DodgyColumnNames(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - var tbl = db.CreateTable("Troll Select * Loll",new DatabaseColumnRequest[] - { - new DatabaseColumnRequest("group by",new DatabaseTypeRequest(typeof(string),100)){IsPrimaryKey = true}, - new DatabaseColumnRequest(",,,,",new DatabaseTypeRequest(typeof(string))), - }); + var tbl = db.CreateTable("Troll Select * Loll", new DatabaseColumnRequest[] + { + new("group by", new DatabaseTypeRequest(typeof(string), 100)) { IsPrimaryKey = true }, + new(",,,,", new DatabaseTypeRequest(typeof(string))) + }); - CreateFileInForLoading("Troll.csv", new string[] - { - "group by,\",,,,\"", - "fish,fishon" - }); + CreateFileInForLoading("Troll.csv", new string[] + { + "group by,\",,,,\"", + "fish,fishon" + }); - var cata = Import(tbl); - var lmd = new LoadMetadata(CatalogueRepository, nameof(TestDle_DodgyColumnNames)); - lmd.LocationOfFlatFiles = LoadDirectory.RootPath.FullName; - lmd.SaveToDatabase(); + var cata = Import(tbl); + var lmd = new LoadMetadata(CatalogueRepository, nameof(TestDle_DodgyColumnNames)) + { + LocationOfFlatFiles = LoadDirectory.RootPath.FullName + }; + lmd.SaveToDatabase(); - CreateFlatFileAttacher(lmd,"Troll.csv",cata.GetTableInfoList(false).Single()); + CreateFlatFileAttacher(lmd, "Troll.csv", cata.GetTableInfoList(false).Single()); - cata.LoadMetadata_ID = lmd.ID; - cata.SaveToDatabase(); + cata.LoadMetadata_ID = lmd.ID; + cata.SaveToDatabase(); - Assert.AreEqual(0,tbl.GetRowCount()); + Assert.AreEqual(0, tbl.GetRowCount()); - RunDLE(lmd,30000,true); + RunDLE(lmd, 30000, true); - Assert.AreEqual(1,tbl.GetRowCount()); - Assert.AreEqual("fishon",tbl.GetDataTable().Rows[0][",,,,"]); - } + Assert.AreEqual(1, tbl.GetRowCount()); + Assert.AreEqual("fishon", tbl.GetDataTable().Rows[0][",,,,"]); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/FictionalCache/Reading/TestDataCacheReader.cs b/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/FictionalCache/Reading/TestDataCacheReader.cs index 84c02bb07c..fc07a17e9f 100644 --- a/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/FictionalCache/Reading/TestDataCacheReader.cs +++ b/Rdmp.Core.Tests/CommandLine/AutomationLoopTests/FictionalCache/Reading/TestDataCacheReader.cs @@ -14,42 +14,35 @@ using Rdmp.Core.DataLoad; using Rdmp.Core.DataLoad.Engine.DataProvider.FromCache; using Rdmp.Core.DataLoad.Engine.Job; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.CommandLine.AutomationLoopTests.FictionalCache.Reading +namespace Rdmp.Core.Tests.CommandLine.AutomationLoopTests.FictionalCache.Reading; + +public class TestDataCacheReader : ICachedDataProvider { - public class TestDataCacheReader : ICachedDataProvider + public ILoadProgress LoadProgress { get; set; } + + public void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postLoadEventsListener) { - public ILoadProgress LoadProgress { get; set; } - - public void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postLoadEventsListener) - { - throw new NotImplementedException(); - } - - public void Check(ICheckNotifier notifier) - { - throw new NotImplementedException(); - } - - public void Initialize(ILoadDirectory directory, DiscoveredDatabase dbInfo) - { - throw new NotImplementedException(); - } - - public ExitCodeType Fetch(IDataLoadJob job, GracefulCancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public CacheArchiveType CacheArchiveType { get; set; } - public string CacheDateFormat { get; set; } - public Type CacheLayoutType { get; set; } - public ILoadCachePathResolver CreateResolver(ILoadProgress loadProgress) - { - throw new NotImplementedException(); - } + throw new NotImplementedException(); + } + public void Check(ICheckNotifier notifier) + { + throw new NotImplementedException(); } -} + + public void Initialize(ILoadDirectory directory, DiscoveredDatabase dbInfo) + { + throw new NotImplementedException(); + } + + public ExitCodeType Fetch(IDataLoadJob job, GracefulCancellationToken cancellationToken) => + throw new NotImplementedException(); + + public CacheArchiveType CacheArchiveType { get; set; } + public string CacheDateFormat { get; set; } + public Type CacheLayoutType { get; set; } + public ILoadCachePathResolver CreateResolver(ILoadProgress loadProgress) => throw new NotImplementedException(); +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/CommandLineObjectPickerTests.cs b/Rdmp.Core.Tests/CommandLine/CommandLineObjectPickerTests.cs index 612bec0d65..5d652bf868 100644 --- a/Rdmp.Core.Tests/CommandLine/CommandLineObjectPickerTests.cs +++ b/Rdmp.Core.Tests/CommandLine/CommandLineObjectPickerTests.cs @@ -6,304 +6,295 @@ using System; using System.Linq; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.CommandLine.Interactive.Picking; using Rdmp.Core.Curation.Data; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Repositories; using Rdmp.Core.Repositories.Construction; -using Rdmp.Core.Startup; using Tests.Common; -namespace Rdmp.Core.Tests.CommandLine +namespace Rdmp.Core.Tests.CommandLine; + +internal class CommandLineObjectPickerTests : UnitTests { - class CommandLineObjectPickerTests : UnitTests + [SetUp] + protected override void SetUp() { + base.SetUp(); - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); - - SetupMEF(); - } + foreach (var c in Repository.GetAllObjects()) c.DeleteInDatabase(); + } - [SetUp] - protected override void SetUp() - { - base.SetUp(); - foreach(var c in Repository.GetAllObjects()) - { - c.DeleteInDatabase(); - } - } + [Test] + public void Test_RandomGarbage_GeneratesRawValueOnly() + { + const string str = "Shiver me timbers"; + var picker = new CommandLineObjectPicker(new[] { str }, GetActivator()); + + Assert.AreEqual(str, picker[0].RawValue); + Assert.IsNull(picker[0].DatabaseEntities); + Assert.IsNull(picker[0].Database); + Assert.IsNull(picker[0].Table); + Assert.IsNull(picker[0].Type); + } + [Test] + public void Test_PickCatalogueByID_PickOne() + { + var cata = WhenIHaveA(); - [Test] - public void Test_RandomGarbage_GeneratesRawValueOnly() - { - string str = $"Shiver me timbers"; - var picker = new CommandLineObjectPicker(new []{str}, GetActivator()); - - Assert.AreEqual(str,picker[0].RawValue); - Assert.IsNull(picker[0].DatabaseEntities); - Assert.IsNull(picker[0].Database); - Assert.IsNull(picker[0].Table); - Assert.IsNull(picker[0].Type); - } + var picker = new CommandLineObjectPicker(new[] { $"Catalogue:{cata.ID}" }, GetActivator()); - [Test] - public void Test_PickCatalogueByID_PickOne() - { - var cata = WhenIHaveA(); + Assert.AreEqual(cata, picker[0].DatabaseEntities.Single()); - var picker = new CommandLineObjectPicker(new []{$"Catalogue:{cata.ID}"}, GetActivator()); - Assert.AreEqual(cata,picker[0].DatabaseEntities.Single()); + //specifying the same ID twice shouldn't return duplicate objects + picker = new CommandLineObjectPicker(new[] { $"Catalogue:{cata.ID},{cata.ID}" }, GetActivator()); - - //specifying the same ID twice shouldn't return duplicate objects - picker = new CommandLineObjectPicker(new []{$"Catalogue:{cata.ID},{cata.ID}"}, GetActivator()); + Assert.AreEqual(cata, picker[0].DatabaseEntities.Single()); + } - Assert.AreEqual(cata,picker[0].DatabaseEntities.Single()); - } + /// + /// Tests behaviour of picker when user passes an explicit empty string e.g. ./rdmp.exe DoSomething " " + /// + [TestCase(" ")] + [TestCase("\t")] + [TestCase("\r\n")] + public void Test_PickerForWhitespace(string val) + { + var picker = new CommandLineObjectPicker(new[] { val }, GetActivator()); - /// - /// Tests behaviour of picker when user passes an explicit empty string e.g. ./rdmp.exe DoSomething " " - /// - [TestCase(" ")] - [TestCase("\t")] - [TestCase("\r\n")] - public void Test_PickerForWhitespace(string val) - { - var picker = new CommandLineObjectPicker(new []{val }, GetActivator()); - - Assert.AreEqual(1,picker.Length); - - Assert.IsNull(picker[0].Database); - Assert.IsNull(picker[0].DatabaseEntities); - Assert.IsFalse(picker[0].ExplicitNull); - Assert.AreEqual(val,picker[0].RawValue); - Assert.IsNull(picker[0].Type); - - Assert.AreEqual(val,picker[0].GetValueForParameterOfType(typeof(string))); - Assert.IsTrue(picker.HasArgumentOfType(0, typeof(string))); - } - - [Test] - public void Test_PickCatalogueByID_PickTwo() - { - var cata1 = WhenIHaveA(); - var cata2 = WhenIHaveA(); + Assert.AreEqual(1, picker.Length); - var picker = new CommandLineObjectPicker(new []{$"Catalogue:{cata1.ID},{cata2.ID}"}, GetActivator()); + Assert.IsNull(picker[0].Database); + Assert.IsNull(picker[0].DatabaseEntities); + Assert.IsFalse(picker[0].ExplicitNull); + Assert.AreEqual(val, picker[0].RawValue); + Assert.IsNull(picker[0].Type); - Assert.AreEqual(2, picker[0].DatabaseEntities.Count); - Assert.Contains(cata1,picker[0].DatabaseEntities); - Assert.Contains(cata2,picker[0].DatabaseEntities); - } - - [Test] - public void Test_PickCatalogueByName_PickTwo() - { - var cata1 = WhenIHaveA(); - var cata2 = WhenIHaveA(); - var cata3 = WhenIHaveA(); + Assert.AreEqual(val, picker[0].GetValueForParameterOfType(typeof(string))); + Assert.IsTrue(picker.HasArgumentOfType(0, typeof(string))); + } - cata1.Name = "lolzy"; - cata2.Name = "lolxy"; - cata3.Name = "trollolxy"; //does not match pattern + [Test] + public void Test_PickCatalogueByID_PickTwo() + { + var cata1 = WhenIHaveA(); + var cata2 = WhenIHaveA(); - cata1.SaveToDatabase(); - cata2.SaveToDatabase(); - cata3.SaveToDatabase(); + var picker = new CommandLineObjectPicker(new[] { $"Catalogue:{cata1.ID},{cata2.ID}" }, GetActivator()); - var picker = new CommandLineObjectPicker(new []{$"Catalogue:lol*"}, GetActivator()); + Assert.AreEqual(2, picker[0].DatabaseEntities.Count); + Assert.Contains(cata1, picker[0].DatabaseEntities); + Assert.Contains(cata2, picker[0].DatabaseEntities); + } - Assert.AreEqual(2, picker[0].DatabaseEntities.Count); - Assert.Contains(cata1, picker[0].DatabaseEntities); - Assert.Contains(cata2, picker[0].DatabaseEntities); - } + [Test] + public void Test_PickCatalogueByName_PickTwo() + { + var cata1 = WhenIHaveA(); + var cata2 = WhenIHaveA(); + var cata3 = WhenIHaveA(); - [Test] - public void TestPicker_TypeYieldsEmptyArrayOfObjects() - { - foreach(var cat in RepositoryLocator.CatalogueRepository.GetAllObjects()) - cat.DeleteInDatabase(); + cata1.Name = "lolzy"; + cata2.Name = "lolxy"; + cata3.Name = "trollolxy"; //does not match pattern - Assert.IsEmpty(RepositoryLocator.CatalogueRepository.GetAllObjects()); + cata1.SaveToDatabase(); + cata2.SaveToDatabase(); + cata3.SaveToDatabase(); - //when interpreting the string "Catalogue" for a command - var picker = new CommandLineObjectPicker(new []{"Catalogue" }, GetActivator()); + var picker = new CommandLineObjectPicker(new[] { $"Catalogue:lol*" }, GetActivator()); - //we can pick it as either a Catalogue or a collection of all the Catalogues - Assert.AreEqual(typeof(Catalogue),picker.Arguments.Single().Type); - Assert.IsEmpty(picker.Arguments.Single().DatabaseEntities); + Assert.AreEqual(2, picker[0].DatabaseEntities.Count); + Assert.Contains(cata1, picker[0].DatabaseEntities); + Assert.Contains(cata2, picker[0].DatabaseEntities); + } - //when interpretting as a Type we get Catalogue - Assert.IsTrue(picker.Arguments.First().HasValueOfType(typeof(Type))); - Assert.AreEqual(typeof(Catalogue),picker.Arguments.Single().GetValueForParameterOfType(typeof(Type))); + [Test] + public void TestPicker_TypeYieldsEmptyArrayOfObjects() + { + foreach (var cat in RepositoryLocator.CatalogueRepository.GetAllObjects()) + cat.DeleteInDatabase(); - //if it is looking for an ienumerable of objects - Assert.IsTrue(picker.Arguments.First().HasValueOfType(typeof(IMapsDirectlyToDatabaseTable[]))); - Assert.IsEmpty((IMapsDirectlyToDatabaseTable[])picker.Arguments.First().GetValueForParameterOfType(typeof(IMapsDirectlyToDatabaseTable[]))); + Assert.IsEmpty(RepositoryLocator.CatalogueRepository.GetAllObjects()); - Assert.IsTrue(picker.Arguments.First().HasValueOfType(typeof(Catalogue[]))); - Assert.IsEmpty(((Catalogue[])picker.Arguments.First().GetValueForParameterOfType(typeof(Catalogue[]))).ToArray()); + //when interpreting the string "Catalogue" for a command + var picker = new CommandLineObjectPicker(new[] { "Catalogue" }, GetActivator()); - } + //we can pick it as either a Catalogue or a collection of all the Catalogues + Assert.AreEqual(typeof(Catalogue), picker.Arguments.Single().Type); + Assert.IsEmpty(picker.Arguments.Single().DatabaseEntities); - [TestCase(typeof(PickDatabase))] - [TestCase(typeof(PickTable))] - [TestCase(typeof(PickObjectByID))] - [TestCase(typeof(PickObjectByName))] - public void Pickers_ShouldAllHaveValidExamples_MatchingRegex(Type pickerType) - { - var oc = new ObjectConstructor(); + //when interpretting as a Type we get Catalogue + Assert.IsTrue(picker.Arguments.First().HasValueOfType(typeof(Type))); + Assert.AreEqual(typeof(Catalogue), picker.Arguments.Single().GetValueForParameterOfType(typeof(Type))); - var mem = new MemoryDataExportRepository(); - mem.MEF = MEF; + //if it is looking for an ienumerable of objects + Assert.IsTrue(picker.Arguments.First().HasValueOfType(typeof(IMapsDirectlyToDatabaseTable[]))); + Assert.IsEmpty((IMapsDirectlyToDatabaseTable[])picker.Arguments.First() + .GetValueForParameterOfType(typeof(IMapsDirectlyToDatabaseTable[]))); - //create some objects that the examples can successfully reference - new Catalogue(mem.CatalogueRepository, "mycata1"); //ID = 1 - new Catalogue(mem.CatalogueRepository, "mycata2"); //ID = 2 - new Catalogue(mem.CatalogueRepository, "mycata3"); //ID = 3 + Assert.IsTrue(picker.Arguments.First().HasValueOfType(typeof(Catalogue[]))); + Assert.IsEmpty( + ((Catalogue[])picker.Arguments.First().GetValueForParameterOfType(typeof(Catalogue[]))).ToArray()); + } - PickObjectBase picker = (PickObjectBase) oc.Construct(pickerType, GetActivator(new RepositoryProvider(mem))); + [TestCase(typeof(PickDatabase))] + [TestCase(typeof(PickTable))] + [TestCase(typeof(PickObjectByID))] + [TestCase(typeof(PickObjectByName))] + public void Pickers_ShouldAllHaveValidExamples_MatchingRegex(Type pickerType) + { + var mem = new MemoryDataExportRepository(); - Assert.IsNotEmpty(picker.Help,"No Help for picker {0}",picker); - Assert.IsNotEmpty(picker.Format,"No Format for picker {0}",picker); - Assert.IsNotNull(picker.Examples,"No Examples for picker {0}",picker); - Assert.IsNotEmpty(picker.Examples,"No Examples for picker {0}",picker); + //create some objects that the examples can successfully reference + new Catalogue(mem.CatalogueRepository, "mycata1"); //ID = 1 + new Catalogue(mem.CatalogueRepository, "mycata2"); //ID = 2 + new Catalogue(mem.CatalogueRepository, "mycata3"); //ID = 3 - foreach (var example in picker.Examples) - { - //examples should be matched by the picker! - Assert.IsTrue(picker.IsMatch(example,0),"Example of picker '{0}' did not match the regex,listed example is '{1}'",picker,example); + var picker = (PickObjectBase)ObjectConstructor.Construct(pickerType, GetActivator(new RepositoryProvider(mem))); - var result = picker.Parse(example, 0); + Assert.IsNotEmpty(picker.Help, "No Help for picker {0}", picker); + Assert.IsNotEmpty(picker.Format, "No Format for picker {0}", picker); + Assert.IsNotNull(picker.Examples, "No Examples for picker {0}", picker); + Assert.IsNotEmpty(picker.Examples, "No Examples for picker {0}", picker); - Assert.IsNotNull(result); - } - } - - [Test] - public void PickTypeName() + foreach (var example in picker.Examples) { - var picker = new CommandLineObjectPicker(new []{"Name"}, GetActivator()); - - Assert.IsNull(picker[0].Type); - Assert.AreEqual("Name",picker[0].RawValue); - } + //examples should be matched by the picker! + Assert.IsTrue(picker.IsMatch(example, 0), + "Example of picker '{0}' did not match the regex,listed example is '{1}'", picker, example); - [TestCase("null")] - [TestCase("NULL")] - public void PickNull(string nullString) - { - var picker = new CommandLineObjectPicker(new []{nullString}, GetActivator()); - Assert.IsTrue(picker[0].ExplicitNull); + var result = picker.Parse(example, 0); + + Assert.IsNotNull(result); } - [Test] - public void Test_PickCatalogueByName_WithShortCode() - { - var cata1 = WhenIHaveA(); - var cata2 = WhenIHaveA(); + } - cata1.Name = "Biochem"; - cata2.Name = "Haematology"; + [Test] + public void PickTypeName() + { + var picker = new CommandLineObjectPicker(new[] { "Name" }, GetActivator()); - var picker = new CommandLineObjectPicker(new[] { $"c:*io*" }, GetActivator()); + Assert.IsNull(picker[0].Type); + Assert.AreEqual("Name", picker[0].RawValue); + } - Assert.AreEqual(cata1, picker[0].DatabaseEntities[0]); - Assert.AreEqual(1, picker[0].DatabaseEntities.Count); - } + [TestCase("null")] + [TestCase("NULL")] + public void PickNull(string nullString) + { + var picker = new CommandLineObjectPicker(new[] { nullString }, GetActivator()); + Assert.IsTrue(picker[0].ExplicitNull); + } - [Test] - public void Test_PickCatalogueByID_WithShortCode() - { - var cata1 = WhenIHaveA(); - var cata2 = WhenIHaveA(); + [Test] + public void Test_PickCatalogueByName_WithShortCode() + { + var cata1 = WhenIHaveA(); + var cata2 = WhenIHaveA(); - var picker = new CommandLineObjectPicker(new[] { $"c:{cata1.ID},{cata2.ID}" }, GetActivator()); + cata1.Name = "Biochem"; + cata2.Name = "Haematology"; - Assert.AreEqual(2, picker[0].DatabaseEntities.Count); - Assert.Contains(cata1, picker[0].DatabaseEntities); - Assert.Contains(cata2, picker[0].DatabaseEntities); - } + var picker = new CommandLineObjectPicker(new[] { $"c:*io*" }, GetActivator()); - [Test] - public void Test_PickCatalogueByTypeOnly_WithShortCode() - { - var cata1 = WhenIHaveA(); - var cata2 = WhenIHaveA(); + Assert.AreEqual(cata1, picker[0].DatabaseEntities[0]); + Assert.AreEqual(1, picker[0].DatabaseEntities.Count); + } - // c is short for Catalogue - // so this would be the use case 'rdmp cmd list Catalogue' where user can instead write 'rdmp cmd list c' - var picker = new CommandLineObjectPicker(new[] { $"c" }, GetActivator()); + [Test] + public void Test_PickCatalogueByID_WithShortCode() + { + var cata1 = WhenIHaveA(); + var cata2 = WhenIHaveA(); - Assert.AreEqual(2, picker[0].DatabaseEntities.Count); - Assert.Contains(cata1, picker[0].DatabaseEntities); - Assert.Contains(cata2, picker[0].DatabaseEntities); - } + var picker = new CommandLineObjectPicker(new[] { $"c:{cata1.ID},{cata2.ID}" }, GetActivator()); - [Test] - public void Test_PickWithPropertyQuery_CatalogueItemsByCatalogue() - { - // these two belong to the same catalogue - var ci = WhenIHaveA(); - var ci2 = new CatalogueItem(ci.CatalogueRepository, ci.Catalogue, "My item 2"); - - // this one belongs to a different catalogue - var ci3 = WhenIHaveA(); - - var cataId = ci.Catalogue.ID; - var picker = new CommandLineObjectPicker(new[] { $"CatalogueItem?Catalogue_ID:{cataId}" }, GetActivator()); - - Assert.AreEqual(2, picker[0].DatabaseEntities.Count); - Assert.Contains(ci, picker[0].DatabaseEntities); - Assert.Contains(ci2, picker[0].DatabaseEntities); - Assert.IsFalse(picker[0].DatabaseEntities.Contains(ci3)); - } - [Test] - public void Test_PickWithPropertyQuery_CatalogueByFolder() - { - // Catalogues - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); - var c3 = WhenIHaveA(); + Assert.AreEqual(2, picker[0].DatabaseEntities.Count); + Assert.Contains(cata1, picker[0].DatabaseEntities); + Assert.Contains(cata2, picker[0].DatabaseEntities); + } - c1.Folder = "\\datasets\\hi\\"; - c2.Folder = "\\datasets\\no\\"; - c3.Folder = "\\datasets\\hi\\"; + [Test] + public void Test_PickCatalogueByTypeOnly_WithShortCode() + { + var cata1 = WhenIHaveA(); + var cata2 = WhenIHaveA(); - var picker = new CommandLineObjectPicker(new[] { $"Catalogue?Folder:*hi*" }, GetActivator()); + // c is short for Catalogue + // so this would be the use case 'rdmp cmd list Catalogue' where user can instead write 'rdmp cmd list c' + var picker = new CommandLineObjectPicker(new[] { $"c" }, GetActivator()); - Assert.AreEqual(2, picker[0].DatabaseEntities.Count); - Assert.Contains(c1, picker[0].DatabaseEntities); - Assert.Contains(c3, picker[0].DatabaseEntities); - Assert.IsFalse(picker[0].DatabaseEntities.Contains(c2)); - } - [Test] - public void Test_PickWithPropertyQuery_PeriodicityNull() - { - // Catalogues - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); + Assert.AreEqual(2, picker[0].DatabaseEntities.Count); + Assert.Contains(cata1, picker[0].DatabaseEntities); + Assert.Contains(cata2, picker[0].DatabaseEntities); + } - c1.PivotCategory_ExtractionInformation_ID = 10; - c2.PivotCategory_ExtractionInformation_ID = null; + [Test] + public void Test_PickWithPropertyQuery_CatalogueItemsByCatalogue() + { + // these two belong to the same catalogue + var ci = WhenIHaveA(); + var ci2 = new CatalogueItem(ci.CatalogueRepository, ci.Catalogue, "My item 2"); - var picker = new CommandLineObjectPicker(new[] { $"Catalogue?PivotCategory_ExtractionInformation_ID:null" }, GetActivator()); + // this one belongs to a different catalogue + var ci3 = WhenIHaveA(); - Assert.AreEqual(1, picker[0].DatabaseEntities.Count); - Assert.Contains(c2, picker[0].DatabaseEntities); - } - [Test] - public void Test_PickWithPropertyQuery_UnknownProperty() - { - var ex = Assert.Throws(()=>new CommandLineObjectPicker(new[] { $"Catalogue?Blarg:null" }, GetActivator())); - Assert.AreEqual("Unknown property 'Blarg'. Did not exist on Type 'Catalogue'", ex.Message); - } + var cataId = ci.Catalogue.ID; + var picker = new CommandLineObjectPicker(new[] { $"CatalogueItem?Catalogue_ID:{cataId}" }, GetActivator()); + + Assert.AreEqual(2, picker[0].DatabaseEntities.Count); + Assert.Contains(ci, picker[0].DatabaseEntities); + Assert.Contains(ci2, picker[0].DatabaseEntities); + Assert.IsFalse(picker[0].DatabaseEntities.Contains(ci3)); + } + + [Test] + public void Test_PickWithPropertyQuery_CatalogueByFolder() + { + // Catalogues + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); + var c3 = WhenIHaveA(); + + c1.Folder = "\\datasets\\hi\\"; + c2.Folder = "\\datasets\\no\\"; + c3.Folder = "\\datasets\\hi\\"; + + var picker = new CommandLineObjectPicker(new[] { $"Catalogue?Folder:*hi*" }, GetActivator()); + + Assert.AreEqual(2, picker[0].DatabaseEntities.Count); + Assert.Contains(c1, picker[0].DatabaseEntities); + Assert.Contains(c3, picker[0].DatabaseEntities); + Assert.IsFalse(picker[0].DatabaseEntities.Contains(c2)); + } + + [Test] + public void Test_PickWithPropertyQuery_PeriodicityNull() + { + // Catalogues + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); + + c1.PivotCategory_ExtractionInformation_ID = 10; + c2.PivotCategory_ExtractionInformation_ID = null; + + var picker = new CommandLineObjectPicker(new[] { $"Catalogue?PivotCategory_ExtractionInformation_ID:null" }, + GetActivator()); + + Assert.AreEqual(1, picker[0].DatabaseEntities.Count); + Assert.Contains(c2, picker[0].DatabaseEntities); + } + + [Test] + public void Test_PickWithPropertyQuery_UnknownProperty() + { + var ex = Assert.Throws(() => + new CommandLineObjectPicker(new[] { $"Catalogue?Blarg:null" }, GetActivator())); + Assert.AreEqual("Unknown property 'Blarg'. Did not exist on Type 'Catalogue'", ex.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/ExampleDatasetsCreationTests.cs b/Rdmp.Core.Tests/CommandLine/ExampleDatasetsCreationTests.cs index e2fc010f8f..2e6469c49b 100644 --- a/Rdmp.Core.Tests/CommandLine/ExampleDatasetsCreationTests.cs +++ b/Rdmp.Core.Tests/CommandLine/ExampleDatasetsCreationTests.cs @@ -10,39 +10,39 @@ using Rdmp.Core.CommandLine.DatabaseCreation; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CommandLine +namespace Rdmp.Core.Tests.CommandLine; + +internal class ExampleDatasetsCreationTests : DatabaseTests { - class ExampleDatasetsCreationTests:DatabaseTests + /// + /// Tests the creation of example datasets during first installation of RDMP or when running "rdmp.exe install [...] -e" from the CLI + /// + [Test] + public void Test_ExampleDatasetsCreation() { - /// - /// Tests the creation of example datasets during first installation of RDMP or when running "rdmp.exe install [...] -e" from the CLI - /// - [Test] - public void Test_ExampleDatasetsCreation() - { - //Should be empty RDMP metadata database - Assert.AreEqual(0,CatalogueRepository.GetAllObjects().Length); - Assert.AreEqual(0,CatalogueRepository.GetAllObjects().Length); + //Should be empty RDMP metadata database + Assert.AreEqual(0, CatalogueRepository.GetAllObjects().Length); + Assert.AreEqual(0, CatalogueRepository.GetAllObjects().Length); - //create the pipelines - var pipes = new CataloguePipelinesAndReferencesCreation(RepositoryLocator,null,null); - pipes.CreatePipelines(); + //create the pipelines + var pipes = new CataloguePipelinesAndReferencesCreation(RepositoryLocator, null, null); + pipes.CreatePipelines(new PlatformDatabaseCreationOptions()); - //create all the stuff - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var creator = new ExampleDatasetsCreation(new ThrowImmediatelyActivator(RepositoryLocator),RepositoryLocator); - creator.Create(db,new ThrowImmediatelyCheckNotifier(),new PlatformDatabaseCreationOptions(){Seed = 500,DropDatabases = true }); + //create all the stuff + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var creator = new ExampleDatasetsCreation(new ThrowImmediatelyActivator(RepositoryLocator), RepositoryLocator); + creator.Create(db, ThrowImmediatelyCheckNotifier.Quiet, + new PlatformDatabaseCreationOptions { Seed = 500, DropDatabases = true }); - //should be at least 2 views (marked as view) - var views = CatalogueRepository.GetAllObjects().Count(ti => ti.IsView); - Assert.GreaterOrEqual(views,2); + //should be at least 2 views (marked as view) + var views = CatalogueRepository.GetAllObjects().Count(ti => ti.IsView); + Assert.GreaterOrEqual(views, 2); - //should have at least created some catalogues, graphs etc - Assert.GreaterOrEqual(CatalogueRepository.GetAllObjects().Length,4); - Assert.GreaterOrEqual(CatalogueRepository.GetAllObjects().Length,4); - } + //should have at least created some catalogues, graphs etc + Assert.GreaterOrEqual(CatalogueRepository.GetAllObjects().Length, 4); + Assert.GreaterOrEqual(CatalogueRepository.GetAllObjects().Length, 4); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/Interactive/ConsoleInputManagerTests.cs b/Rdmp.Core.Tests/CommandLine/Interactive/ConsoleInputManagerTests.cs index f09abc8731..0a4d60005c 100644 --- a/Rdmp.Core.Tests/CommandLine/Interactive/ConsoleInputManagerTests.cs +++ b/Rdmp.Core.Tests/CommandLine/Interactive/ConsoleInputManagerTests.cs @@ -4,26 +4,24 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System; -using System.Collections.Generic; -using System.Text; using NUnit.Framework; using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandLine.Interactive; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.CommandLine.Interactive +namespace Rdmp.Core.Tests.CommandLine.Interactive; + +internal class ConsoleInputManagerTests : UnitTests { - class ConsoleInputManagerTests : UnitTests + [Test] + public void TestDisallowInput() { - [Test] - public void TestDisallowInput() + var manager = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) { - var manager = new ConsoleInputManager(RepositoryLocator, new ThrowImmediatelyCheckNotifier()); - manager.DisallowInput = true; - - Assert.Throws(()=>manager.GetString(new DialogArgs { WindowTitle = "bob" }, null)); - } + DisallowInput = true + }; + + Assert.Throws(() => manager.GetString(new DialogArgs { WindowTitle = "bob" }, null)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/NewObjectPoolTests.cs b/Rdmp.Core.Tests/CommandLine/NewObjectPoolTests.cs index b83e43802c..140780c124 100644 --- a/Rdmp.Core.Tests/CommandLine/NewObjectPoolTests.cs +++ b/Rdmp.Core.Tests/CommandLine/NewObjectPoolTests.cs @@ -4,60 +4,51 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.CommandLine.Interactive.Picking; using Rdmp.Core.Curation.Data; -using System; -using System.Collections.Generic; using System.Linq; -using System.Text; -using System.Threading.Tasks; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Tests.Common; -namespace Rdmp.Core.Tests.CommandLine +namespace Rdmp.Core.Tests.CommandLine; + +internal class NewObjectPoolTests : UnitTests { - class NewObjectPoolTests : UnitTests + [Test] + public void TwoCataloguesWithSameName_NoSession() { - [Test] - public void TwoCataloguesWithSameName_NoSession() - { - SetupMEF(); + var cata1 = new Catalogue(Repository, "Hey"); - var cata1 = new Catalogue(Repository,"Hey"); + // When there is only one object we can pick it by name + var picker = new CommandLineObjectPicker(new string[] { "Catalogue:Hey" }, GetActivator()); + Assert.IsTrue(picker.HasArgumentOfType(0, typeof(Catalogue))); + Assert.AreEqual(cata1, picker.Arguments.First().GetValueForParameterOfType(typeof(Catalogue))); + + // But when there are 2 objects we don't know which to pick so cannot pick a Catalogue + new Catalogue(Repository, "Hey"); + var picker2 = new CommandLineObjectPicker(new string[] { "Catalogue:Hey" }, GetActivator()); + Assert.IsFalse(picker2.HasArgumentOfType(0, typeof(Catalogue))); + } + + [Test] + public void TwoCataloguesWithSameName_WithSession() + { + using (NewObjectPool.StartSession()) + { + var cata1 = new Catalogue(Repository, "Hey"); // When there is only one object we can pick it by name var picker = new CommandLineObjectPicker(new string[] { "Catalogue:Hey" }, GetActivator()); Assert.IsTrue(picker.HasArgumentOfType(0, typeof(Catalogue))); Assert.AreEqual(cata1, picker.Arguments.First().GetValueForParameterOfType(typeof(Catalogue))); - // But when there are 2 objects we don't know which to pick so cannot pick a Catalogue - new Catalogue(Repository, "Hey"); + // There are now 2 objects with the same name but because we are in a session we can pick the latest + var cata2 = new Catalogue(Repository, "Hey"); var picker2 = new CommandLineObjectPicker(new string[] { "Catalogue:Hey" }, GetActivator()); - Assert.IsFalse(picker2.HasArgumentOfType(0, typeof(Catalogue))); - } - - [Test] - public void TwoCataloguesWithSameName_WithSession() - { - SetupMEF(); - - using(NewObjectPool.StartSession()) - { - var cata1 = new Catalogue(Repository, "Hey"); - - // When there is only one object we can pick it by name - var picker = new CommandLineObjectPicker(new string[] { "Catalogue:Hey" }, GetActivator()); - Assert.IsTrue(picker.HasArgumentOfType(0, typeof(Catalogue))); - Assert.AreEqual(cata1, picker.Arguments.First().GetValueForParameterOfType(typeof(Catalogue))); - - // There are now 2 objects with the same name but because we are in a session we can pick the latest - var cata2 = new Catalogue(Repository, "Hey"); - var picker2 = new CommandLineObjectPicker(new string[] { "Catalogue:Hey" }, GetActivator()); - Assert.IsTrue(picker2.HasArgumentOfType(0, typeof(Catalogue))); - Assert.AreEqual(cata2, picker2.Arguments.First().GetValueForParameterOfType(typeof(Catalogue))); - } + Assert.IsTrue(picker2.HasArgumentOfType(0, typeof(Catalogue))); + Assert.AreEqual(cata2, picker2.Arguments.First().GetValueForParameterOfType(typeof(Catalogue))); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/PickTableTests.cs b/Rdmp.Core.Tests/CommandLine/PickTableTests.cs index 3849c8ae2e..3b424bf03e 100644 --- a/Rdmp.Core.Tests/CommandLine/PickTableTests.cs +++ b/Rdmp.Core.Tests/CommandLine/PickTableTests.cs @@ -6,36 +6,33 @@ using FAnsi; using FAnsi.Discovery; -using FAnsi.Implementation; using NUnit.Framework; using Rdmp.Core.CommandLine.Interactive.Picking; -using System; -using System.Collections.Generic; -using System.Text; using Tests.Common; -namespace Rdmp.Core.Tests.CommandLine +namespace Rdmp.Core.Tests.CommandLine; + +internal class PickTableTests : UnitTests { - class PickTableTests : UnitTests + [Test] + public void TestPickTable() { + var pick = new PickTable(); + var result = + pick.Parse( + @"Table:v_cool:Schema:dbo:IsView:True:DatabaseType:MicrosoftSQLServer:Name:MyDb:Server=localhost\sqlexpress;Trusted_Connection=True;", + 0); - [Test] - public void TestPickTable() - { - var pick = new PickTable(); - var result = pick.Parse(@"Table:v_cool:Schema:dbo:IsView:True:DatabaseType:MicrosoftSQLServer:Name:MyDb:Server=localhost\sqlexpress;Trusted_Connection=True;",0); + Assert.IsNotNull(result.Table); - Assert.IsNotNull(result.Table); - - Assert.AreEqual(TableType.View,result.Table.TableType); - Assert.AreEqual("dbo",result.Table.Schema); + Assert.AreEqual(TableType.View, result.Table.TableType); + Assert.AreEqual("dbo", result.Table.Schema); - Assert.AreEqual("v_cool",result.Table.GetRuntimeName()); - Assert.AreEqual("MyDb",result.Table.Database.GetRuntimeName()); - Assert.AreEqual("localhost\\sqlexpress",result.Table.Database.Server.Name); - Assert.AreEqual(DatabaseType.MicrosoftSQLServer,result.Table.Database.Server.DatabaseType); - Assert.IsNull(result.Table.Database.Server.ExplicitPasswordIfAny); - Assert.IsNull(result.Table.Database.Server.ExplicitUsernameIfAny); - } + Assert.AreEqual("v_cool", result.Table.GetRuntimeName()); + Assert.AreEqual("MyDb", result.Table.Database.GetRuntimeName()); + Assert.AreEqual("localhost\\sqlexpress", result.Table.Database.Server.Name); + Assert.AreEqual(DatabaseType.MicrosoftSQLServer, result.Table.Database.Server.DatabaseType); + Assert.IsNull(result.Table.Database.Server.ExplicitPasswordIfAny); + Assert.IsNull(result.Table.Database.Server.ExplicitUsernameIfAny); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommandLine/RdmpScriptTests.cs b/Rdmp.Core.Tests/CommandLine/RdmpScriptTests.cs index fe8d892a1d..ff58860664 100644 --- a/Rdmp.Core.Tests/CommandLine/RdmpScriptTests.cs +++ b/Rdmp.Core.Tests/CommandLine/RdmpScriptTests.cs @@ -4,101 +4,96 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System; -using System.Collections.Generic; using System.Linq; -using System.Text; using NUnit.Framework; using Rdmp.Core.CommandLine.Options; using Rdmp.Core.CommandLine.Runners; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.CommandLine +namespace Rdmp.Core.Tests.CommandLine; + +internal class RdmpScriptTests : UnitTests { - class RdmpScriptTests : UnitTests + [TestCase("NewObject Catalogue 'trog dor'", "trog dor")] + [TestCase("NewObject Catalogue \"trog dor\"", "trog dor")] + [TestCase("NewObject Catalogue \"'trog dor'\"", "'trog dor'")] + [TestCase("NewObject Catalogue '\"trog dor\"'", "\"trog dor\"")] + public void RdmpScript_NewObject_Catalogue(string command, string expectedName) { - [TestCase("NewObject Catalogue 'trog dor'","trog dor")] - [TestCase("NewObject Catalogue \"trog dor\"","trog dor")] - [TestCase("NewObject Catalogue \"'trog dor'\"","'trog dor'")] - [TestCase("NewObject Catalogue '\"trog dor\"'","\"trog dor\"")] + foreach (var c in RepositoryLocator.CatalogueRepository.GetAllObjects()) + c.DeleteInDatabase(); - public void RdmpScript_NewObject_Catalogue(string command, string expectedName) + var runner = new ExecuteCommandRunner(new ExecuteCommandOptions { - foreach(var c in RepositoryLocator.CatalogueRepository.GetAllObjects()) - c.DeleteInDatabase(); - - var runner = new ExecuteCommandRunner(new ExecuteCommandOptions() + Script = new RdmpScript { - Script = new RdmpScript() - { - Commands = new[] {command} - } - }); - - SetupMEF(); + Commands = new[] { command } + } + }); - var exitCode = runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new ThrowImmediatelyCheckNotifier(), new GracefulCancellationToken()); + var exitCode = runner.Run(RepositoryLocator, ThrowImmediatelyDataLoadEventListener.Quiet, + ThrowImmediatelyCheckNotifier.Quiet, new GracefulCancellationToken()); - Assert.AreEqual(0,exitCode); - Assert.AreEqual(1,RepositoryLocator.CatalogueRepository.GetAllObjects().Length); + Assert.AreEqual(0, exitCode); + Assert.AreEqual(1, RepositoryLocator.CatalogueRepository.GetAllObjects().Length); - Assert.AreEqual(expectedName,RepositoryLocator.CatalogueRepository.GetAllObjects().Single().Name); - } + Assert.AreEqual(expectedName, RepositoryLocator.CatalogueRepository.GetAllObjects().Single().Name); + } - [TestCase("NewObject Catalogue 'fffff'","NewObject CatalogueItem Catalogue:*fff* 'bbbb'","bbbb")] - [TestCase("NewObject Catalogue '\"fff\"'","NewObject CatalogueItem 'Catalogue:\"fff\"' 'bbbb'","bbbb")] - [TestCase("NewObject Catalogue '\"ff ff\"'","NewObject CatalogueItem 'Catalogue:\"ff ff\"' 'bb bb'","bb bb")] - [TestCase("NewObject Catalogue '\"ff ff\"'","NewObject CatalogueItem 'Catalogue:\"ff ff\"' bb'bb","bb'bb")] - [TestCase("NewObject Catalogue '\"ff ff\"'","NewObject CatalogueItem 'Catalogue:\"ff ff\"' b\"b'bb'","b\"b'bb'")] - public void RdmpScript_NewObject_CatalogueItem(string cataCommand,string cataItemCommand, string expectedCataItemName) - { - foreach(var c in RepositoryLocator.CatalogueRepository.GetAllObjects()) - c.DeleteInDatabase(); + [TestCase("NewObject Catalogue 'fffff'", "NewObject CatalogueItem Catalogue:*fff* 'bbbb'", "bbbb")] + [TestCase("NewObject Catalogue '\"fff\"'", "NewObject CatalogueItem 'Catalogue:\"fff\"' 'bbbb'", "bbbb")] + [TestCase("NewObject Catalogue '\"ff ff\"'", "NewObject CatalogueItem 'Catalogue:\"ff ff\"' 'bb bb'", "bb bb")] + [TestCase("NewObject Catalogue '\"ff ff\"'", "NewObject CatalogueItem 'Catalogue:\"ff ff\"' bb'bb", "bb'bb")] + [TestCase("NewObject Catalogue '\"ff ff\"'", "NewObject CatalogueItem 'Catalogue:\"ff ff\"' b\"b'bb'", "b\"b'bb'")] + public void RdmpScript_NewObject_CatalogueItem(string cataCommand, string cataItemCommand, + string expectedCataItemName) + { + foreach (var c in RepositoryLocator.CatalogueRepository.GetAllObjects()) + c.DeleteInDatabase(); - var runner = new ExecuteCommandRunner(new ExecuteCommandOptions() + var runner = new ExecuteCommandRunner(new ExecuteCommandOptions + { + Script = new RdmpScript { - Script = new RdmpScript() + Commands = new[] { - Commands = new[] - { - cataCommand, - cataItemCommand - } + cataCommand, + cataItemCommand } - }); - - SetupMEF(); + } + }); - var exitCode = runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new ThrowImmediatelyCheckNotifier(), new GracefulCancellationToken()); + var exitCode = runner.Run(RepositoryLocator, ThrowImmediatelyDataLoadEventListener.Quiet, + ThrowImmediatelyCheckNotifier.Quiet, new GracefulCancellationToken()); - Assert.AreEqual(0,exitCode); - Assert.AreEqual(1,RepositoryLocator.CatalogueRepository.GetAllObjects().Length); - var ci = RepositoryLocator.CatalogueRepository.GetAllObjects().Single().CatalogueItems.Single(); + Assert.AreEqual(0, exitCode); + Assert.AreEqual(1, RepositoryLocator.CatalogueRepository.GetAllObjects().Length); + var ci = RepositoryLocator.CatalogueRepository.GetAllObjects().Single().CatalogueItems.Single(); - Assert.AreEqual(expectedCataItemName,ci.Name); - - } + Assert.AreEqual(expectedCataItemName, ci.Name); + } - [Test] - public void Test_SplitCommandLine() - { - var vals = ExecuteCommandRunner.SplitCommandLine("NewObject CatalogueItem 'Catalogue:\"fff\"' 'bbbb'").ToArray(); - Assert.AreEqual("NewObject",vals[0]); - Assert.AreEqual("CatalogueItem",vals[1]); - Assert.AreEqual("Catalogue:\"fff\"",vals[2]); - Assert.AreEqual("bbbb",vals[3]); - } - [Test] - public void Test_SplitCommandLine_QuotesInStrings() - { - var vals = ExecuteCommandRunner.SplitCommandLine("NewObject CatalogueItem bb\"'bb'").ToArray(); - Assert.AreEqual("NewObject",vals[0]); - Assert.AreEqual("CatalogueItem",vals[1]); - Assert.AreEqual("bb\"'bb'",vals[2]); - } + [Test] + public void Test_SplitCommandLine() + { + var vals = ExecuteCommandRunner.SplitCommandLine("NewObject CatalogueItem 'Catalogue:\"fff\"' 'bbbb'") + .ToArray(); + Assert.AreEqual("NewObject", vals[0]); + Assert.AreEqual("CatalogueItem", vals[1]); + Assert.AreEqual("Catalogue:\"fff\"", vals[2]); + Assert.AreEqual("bbbb", vals[3]); + } + + [Test] + public void Test_SplitCommandLine_QuotesInStrings() + { + var vals = ExecuteCommandRunner.SplitCommandLine("NewObject CatalogueItem bb\"'bb'").ToArray(); + Assert.AreEqual("NewObject", vals[0]); + Assert.AreEqual("CatalogueItem", vals[1]); + Assert.AreEqual("bb\"'bb'", vals[2]); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/CommentStoreTests.cs b/Rdmp.Core.Tests/CommentStoreTests.cs index f01ed40a1f..8c56debef5 100644 --- a/Rdmp.Core.Tests/CommentStoreTests.cs +++ b/Rdmp.Core.Tests/CommentStoreTests.cs @@ -6,41 +6,40 @@ using System.Xml; using NUnit.Framework; -using ReusableLibraryCode.Comments; +using Rdmp.Core.ReusableLibraryCode.Comments; -namespace Rdmp.Core.Tests +namespace Rdmp.Core.Tests; + +internal class CommentStoreTests { - class CommentStoreTests + [Test] + public void Test_CommentStoreXmlDoc_Basic() { - [Test] - public void Test_CommentStoreXmlDoc_Basic() - { - var store = new CommentStore(); - - XmlDocument doc = new XmlDocument(); - doc.LoadXml( - @" + var store = new CommentStore(); + + var doc = new XmlDocument(); + doc.LoadXml( + @" Does some stuff "); - store.AddXmlDoc(doc.FirstChild); + store.AddXmlDoc(doc.FirstChild); - Assert.AreEqual( - @"Does some stuff" - ,store["WindowFactory"]); + Assert.AreEqual( + @"Does some stuff" + , store["WindowFactory"]); + } - } + [Test] + public void Test_CommentStoreXmlDoc_OnePara() + { + var store = new CommentStore(); - [Test] - public void Test_CommentStoreXmlDoc_OnePara() - { - var store = new CommentStore(); - - XmlDocument doc = new XmlDocument(); - doc.LoadXml( - @" + var doc = new XmlDocument(); + doc.LoadXml( + @" Does some stuff @@ -48,22 +47,21 @@ Does some stuff "); - store.AddXmlDoc(doc.FirstChild); + store.AddXmlDoc(doc.FirstChild); - Assert.AreEqual( - @"Does some stuff" - ,store["WindowFactory"]); + Assert.AreEqual( + @"Does some stuff" + , store["WindowFactory"]); + } - } + [Test] + public void Test_CommentStoreXmlDoc_TwoPara() + { + var store = new CommentStore(); - [Test] - public void Test_CommentStoreXmlDoc_TwoPara() - { - var store = new CommentStore(); - - XmlDocument doc = new XmlDocument(); - doc.LoadXml( - @" + var doc = new XmlDocument(); + doc.LoadXml( + @" Does some stuff This is still one para @@ -73,65 +71,64 @@ this is next para "); - store.AddXmlDoc(doc.FirstChild); + store.AddXmlDoc(doc.FirstChild); - Assert.AreEqual( - @"Does some stuff This is still one para + Assert.AreEqual( + @"Does some stuff This is still one para this is next para" - ,store["WindowFactory"]); + , store["WindowFactory"]); + } - } + [Test] + public void Test_CommentStoreXmlDoc_EmptyElements() + { + var store = new CommentStore(); - [Test] - public void Test_CommentStoreXmlDoc_EmptyElements() - { - var store = new CommentStore(); - - XmlDocument doc = new XmlDocument(); - doc.LoadXml( - @" + var doc = new XmlDocument(); + doc.LoadXml( + @" "); - //shouldn't bomb - store.AddXmlDoc(null); - //also shouldn't bomb but should be 0 - store.AddXmlDoc(doc.FirstChild.FirstChild); - - Assert.IsEmpty(store); + //shouldn't bomb + store.AddXmlDoc(null); + //also shouldn't bomb but should be 0 + store.AddXmlDoc(doc.FirstChild.FirstChild); - store.AddXmlDoc(doc.FirstChild); - Assert.IsEmpty(store); + Assert.IsEmpty(store); - doc.LoadXml( - @" + store.AddXmlDoc(doc.FirstChild); + Assert.IsEmpty(store); + + doc.LoadXml( + @" "); - store.AddXmlDoc(doc.FirstChild); - Assert.IsEmpty(store); + store.AddXmlDoc(doc.FirstChild); + Assert.IsEmpty(store); + - - doc.LoadXml( - @" + doc.LoadXml( + @" a "); - store.AddXmlDoc(doc.FirstChild); - Assert.IsNotEmpty(store); - } + store.AddXmlDoc(doc.FirstChild); + Assert.IsNotEmpty(store); + } + + [Test] + public void Test_CommentStoreXmlDoc_TwoParaBothFormatted() + { + var store = new CommentStore(); - [Test] - public void Test_CommentStoreXmlDoc_TwoParaBothFormatted() - { - var store = new CommentStore(); - - XmlDocument doc = new XmlDocument(); - doc.LoadXml( - @" + var doc = new XmlDocument(); + doc.LoadXml( + @" Does some stuff @@ -143,25 +140,24 @@ this is next para "); - store.AddXmlDoc(doc.FirstChild); + store.AddXmlDoc(doc.FirstChild); - Assert.AreEqual( - @"Does some stuff This is still one para + Assert.AreEqual( + @"Does some stuff This is still one para this is next para" - ,store["WindowFactory"]); + , store["WindowFactory"]); + } - } + [Test] + public void Test_CommentStoreXmlDoc_WithCrefAndTwoPara() + { + var store = new CommentStore(); - [Test] - public void Test_CommentStoreXmlDoc_WithCrefAndTwoPara() - { - var store = new CommentStore(); - - XmlDocument doc = new XmlDocument(); - doc.LoadXml( - @" + var doc = new XmlDocument(); + doc.LoadXml( + @" Does some stuff And some more stuff @@ -174,14 +170,12 @@ got it? This text shouldn't appear "); - store.AddXmlDoc(doc.FirstChild); + store.AddXmlDoc(doc.FirstChild); - Assert.AreEqual( - @"Does some stuff And some more stuff IObjectCollectionControl (for RDMPCollectionUI see WindowManager ). + Assert.AreEqual( + @"Does some stuff And some more stuff IObjectCollectionControl (for RDMPCollectionUI see WindowManager ). paragraph 2 got it?" - ,store["WindowFactory"]); - - } + , store["WindowFactory"]); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Anonymisation/ANOMigrationTests.cs b/Rdmp.Core.Tests/Curation/Anonymisation/ANOMigrationTests.cs index 3db0cb6e3a..3e6a2855b7 100644 --- a/Rdmp.Core.Tests/Curation/Anonymisation/ANOMigrationTests.cs +++ b/Rdmp.Core.Tests/Curation/Anonymisation/ANOMigrationTests.cs @@ -5,51 +5,50 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Collections.Generic; -using System.Data.Common; using System.Linq; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; -using Rdmp.Core.Curation.Data.Defaults; using Rdmp.Core.DataLoad.Engine.Pipeline.Components.Anonymisation; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.Curation.Anonymisation +namespace Rdmp.Core.Tests.Curation.Anonymisation; + +public class ANOMigrationTests : TestsRequiringANOStore { - public class ANOMigrationTests : TestsRequiringANOStore + private const string TableName = "ANOMigration"; + + private ITableInfo _tableInfo; + private ColumnInfo[] _columnInfos; + private ANOTable _anoConditionTable; + + #region setup + + [SetUp] + protected override void SetUp() { - private const string TableName = "ANOMigration"; + base.SetUp(); - private ITableInfo _tableInfo; - private ColumnInfo[] _columnInfos; - private ANOTable _anoConditionTable; + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - #region setup - [SetUp] - protected override void SetUp() - { - base.SetUp(); + BlitzMainDataTables(); - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - - BlitzMainDataTables(); - - DeleteANOEndpoint(); - - ANOTable remnantANO = CatalogueRepository.GetAllObjects().SingleOrDefault(a => a.TableName.Equals("ANOCondition")); + DeleteANOEndpoint(); - if (remnantANO != null) - remnantANO.DeleteInDatabase(); + var remnantANO = CatalogueRepository.GetAllObjects() + .SingleOrDefault(a => a.TableName.Equals("ANOCondition")); - //cleanup - foreach (var remnant in CatalogueRepository.GetAllObjects().Where(t => t.GetRuntimeName().Equals(TableName))) - remnant.DeleteInDatabase(); + remnantANO?.DeleteInDatabase(); - const string sql = @" + //cleanup + foreach (var remnant in CatalogueRepository.GetAllObjects() + .Where(t => t.GetRuntimeName().Equals(TableName))) + remnant.DeleteInDatabase(); + + const string sql = @" CREATE TABLE [ANOMigration]( [AdmissionDate] [datetime] NOT NULL, [DischargeDate] [datetime] NOT NULL, @@ -79,66 +78,75 @@ INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Conditio INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x0000088A00000000 AS DateTime), CAST(0x0000089300000000 AS DateTime), N'G009', NULL, NULL, NULL, N'0706013071') INSERT [ANOMigration] ([AdmissionDate], [DischargeDate], [Condition1], [Condition2], [Condition3], [Condition4], [CHI]) VALUES (CAST(0x000008CA00000000 AS DateTime), CAST(0x000008D100000000 AS DateTime), N'T47', N'H311', N'O037', NULL, N'1204057592')"; - var server = db.Server; - using (var con = server.GetConnection()) - { - con.Open(); - server.GetCommand(sql,con).ExecuteNonQuery(); - } - - var table = db.ExpectTable(TableName); - TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, table); - importer.DoImport(out _tableInfo,out _columnInfos); - - //Configure the structure of the ANO transform we want - identifiers should have 3 characters and 2 ints and end with _C - _anoConditionTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOCondition","C"); - _anoConditionTable.NumberOfCharactersToUseInAnonymousRepresentation = 3; - _anoConditionTable.NumberOfIntegersToUseInAnonymousRepresentation = 2; - _anoConditionTable.SaveToDatabase(); - _anoConditionTable.PushToANOServerAsNewTable("varchar(4)", new ThrowImmediatelyCheckNotifier()); + var server = db.Server; + using (var con = server.GetConnection()) + { + con.Open(); + server.GetCommand(sql, con).ExecuteNonQuery(); } - private void DeleteANOEndpoint() + var table = db.ExpectTable(TableName); + var importer = new TableInfoImporter(CatalogueRepository, table); + importer.DoImport(out _tableInfo, out _columnInfos); + + //Configure the structure of the ANO transform we want - identifiers should have 3 characters and 2 ints and end with _C + _anoConditionTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOCondition", "C") { - var remnantEndpointANOTable = DataAccessPortal.GetInstance() - .ExpectDatabase(ANOStore_ExternalDatabaseServer, DataAccessContext.InternalDataProcessing) - .ExpectTable("ANOCondition"); + NumberOfCharactersToUseInAnonymousRepresentation = 3, + NumberOfIntegersToUseInAnonymousRepresentation = 2 + }; + _anoConditionTable.SaveToDatabase(); + _anoConditionTable.PushToANOServerAsNewTable("varchar(4)", ThrowImmediatelyCheckNotifier.Quiet); + } - if (remnantEndpointANOTable.Exists()) - remnantEndpointANOTable.Drop(); - } + private void DeleteANOEndpoint() + { + var remnantEndpointANOTable = DataAccessPortal + .ExpectDatabase(ANOStore_ExternalDatabaseServer, DataAccessContext.InternalDataProcessing) + .ExpectTable("ANOCondition"); - #endregion + if (remnantEndpointANOTable.Exists()) + remnantEndpointANOTable.Drop(); + } - - [Test,Order(1)] - public void PKsAreCorrect() - { - Assert.IsTrue(_columnInfos.Single(c=>c.GetRuntimeName().Equals("AdmissionDate")).IsPrimaryKey); - Assert.IsTrue(_columnInfos.Single(c => c.GetRuntimeName().Equals("Condition1")).IsPrimaryKey); - Assert.IsTrue(_columnInfos.Single(c => c.GetRuntimeName().Equals("CHI")).IsPrimaryKey); - } + #endregion - [Test,Order(2)] - public void ConvertPrimaryKeyColumn() - { - //The table we created above should have a column called Condition2 in it, we will migrate this data to ANO land - ColumnInfo condition = _columnInfos.Single(c => c.GetRuntimeName().Equals("Condition1")); - ColumnInfoToANOTableConverter converter = new ColumnInfoToANOTableConverter(condition, _anoConditionTable); - var ex = Assert.Throws(()=>converter.ConvertFullColumnInfo((s) => true, new ThrowImmediatelyCheckNotifier())); //say yes to everything it proposes - StringAssert.IsMatch(@"Could not perform transformation because column \[(.*)\]\.\[dbo\]\.\[.*\]\.\[Condition1\] is not droppable",ex.Message); - } + [Test] + [Order(1)] + public void PKsAreCorrect() + { + Assert.IsTrue(_columnInfos.Single(c => c.GetRuntimeName().Equals("AdmissionDate")).IsPrimaryKey); + Assert.IsTrue(_columnInfos.Single(c => c.GetRuntimeName().Equals("Condition1")).IsPrimaryKey); + Assert.IsTrue(_columnInfos.Single(c => c.GetRuntimeName().Equals("CHI")).IsPrimaryKey); + } + [Test] + [Order(2)] + public void ConvertPrimaryKeyColumn() + { + //The table we created above should have a column called Condition2 in it, we will migrate this data to ANO land + var condition = _columnInfos.Single(c => c.GetRuntimeName().Equals("Condition1")); + var converter = new ColumnInfoToANOTableConverter(condition, _anoConditionTable); + var ex = Assert.Throws(() => + converter.ConvertFullColumnInfo(s => true, + ThrowImmediatelyCheckNotifier.Quiet)); //say yes to everything it proposes + + StringAssert.IsMatch( + @"Could not perform transformation because column \[(.*)\]\.\[dbo\]\.\[.*\]\.\[Condition1\] is not droppable", + ex.Message); + } - [Test,Order(3)] - [TestCase("Condition2")] - [TestCase("Condition3")] - [TestCase("Condition4")] - public void ConvertNonPrimaryKeyColumn(string conditionColumn) - { - // TODO: This test doesn't ever seem to work! - return; + + [Test] + [Order(3)] + [TestCase("Condition2")] + [TestCase("Condition3")] + [TestCase("Condition4")] + public void ConvertNonPrimaryKeyColumn(string conditionColumn) + { + // TODO: This test doesn't ever seem to work! + return; /* //Value and a list of the rows in which it was found on (e.g. the value 'Fish' was found on row 11, 31, 52 and 501 @@ -160,11 +168,11 @@ public void ConvertNonPrimaryKeyColumn(string conditionColumn) else rowsObjectFoundIn.Add(r[conditionColumn], new List(new[] {row})); } - + //The table we created above should have a column called Condition2 in it, we will migrate this data to ANO land ColumnInfo condition = _columnInfos.Single(c => c.GetRuntimeName().Equals(conditionColumn)); ColumnInfoToANOTableConverter converter = new ColumnInfoToANOTableConverter(condition, _anoConditionTable); - converter.ConvertFullColumnInfo((s) => true,new AcceptAllCheckNotifier()); //say yes to everything it proposes + converter.ConvertFullColumnInfo((s) => true,new AcceptAllCheckNotifier()); //say yes to everything it proposes //refresh the column infos ColumnInfo[] columnInfos = _tableInfo.ColumnInfos; @@ -187,7 +195,7 @@ public void ConvertNonPrimaryKeyColumn(string conditionColumn) DbCommand cmd = server.GetCommand("Select * from " + TableName, con); var r = cmd.ExecuteReader(); - + List objectsFound = new List(); while (r.Read()) @@ -208,6 +216,5 @@ public void ConvertNonPrimaryKeyColumn(string conditionColumn) } } */ - } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Anonymisation/ANOStoreFunctionalityTests.cs b/Rdmp.Core.Tests/Curation/Anonymisation/ANOStoreFunctionalityTests.cs index 2650b6ea53..f88acb705e 100644 --- a/Rdmp.Core.Tests/Curation/Anonymisation/ANOStoreFunctionalityTests.cs +++ b/Rdmp.Core.Tests/Curation/Anonymisation/ANOStoreFunctionalityTests.cs @@ -5,83 +5,75 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Data.Common; using NUnit.Framework; -using ReusableLibraryCode; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.Curation.Anonymisation +namespace Rdmp.Core.Tests.Curation.Anonymisation; + +public class ANOStoreFunctionalityTests : TestsRequiringFullAnonymisationSuite { - public class ANOStoreFunctionalityTests:TestsRequiringFullAnonymisationSuite + [Test] + public void CanAccessANODatabase_Directly() { - [Test] - public void CanAccessANODatabase_Directly() - { - var server = ANOStore_Database.Server; - using (var con = server.GetConnection()) - { - con.Open(); + var server = ANOStore_Database.Server; + using var con = server.GetConnection(); + con.Open(); - var cmd = server.GetCommand("Select version from RoundhousE.Version", con); - var version = new Version(cmd.ExecuteScalar().ToString()); + var cmd = server.GetCommand("Select version from RoundhousE.Version", con); + var version = new Version(cmd.ExecuteScalar().ToString()); - Assert.GreaterOrEqual(version, new Version("0.0.0.0")); + Assert.GreaterOrEqual(version, new Version("0.0.0.0")); - con.Close(); - } - } + con.Close(); + } - [Test] - public void CanAccessANODatabase_ViaExternalServerPointer() + [Test] + public void CanAccessANODatabase_ViaExternalServerPointer() + { + using var connection = DataAccessPortal + .ExpectServer(ANOStore_ExternalDatabaseServer, DataAccessContext.DataLoad).GetConnection(); + connection.Open(); + + using (var cmd = + DatabaseCommandHelper.GetCommand("Select version from RoundhousE.Version", connection)) { - using (var connection = DataAccessPortal.GetInstance().ExpectServer(ANOStore_ExternalDatabaseServer, DataAccessContext.DataLoad).GetConnection()) - { - connection.Open(); - - using (DbCommand cmd = - DatabaseCommandHelper.GetCommand("Select version from RoundhousE.Version", connection)) - { - var version = new Version(cmd.ExecuteScalar().ToString()); - Assert.GreaterOrEqual(version, new Version("0.0.0.0")); - } - - connection.Close(); - } + var version = new Version(cmd.ExecuteScalar().ToString()); + Assert.GreaterOrEqual(version, new Version("0.0.0.0")); } - [Test] - public void CanAccessIdentifierDumpDatabase_Directly() - { - using (var con = IdentifierDump_Database.Server.GetConnection()) - { - con.Open(); + connection.Close(); + } - var cmd = IdentifierDump_Database.Server.GetCommand("Select version from RoundhousE.Version", con); - var version = new Version(cmd.ExecuteScalar().ToString()); + [Test] + public void CanAccessIdentifierDumpDatabase_Directly() + { + using var con = IdentifierDump_Database.Server.GetConnection(); + con.Open(); - Assert.GreaterOrEqual(version, new Version("0.0.0.0")); + var cmd = IdentifierDump_Database.Server.GetCommand("Select version from RoundhousE.Version", con); + var version = new Version(cmd.ExecuteScalar().ToString()); - con.Close(); - } - } + Assert.GreaterOrEqual(version, new Version("0.0.0.0")); + + con.Close(); + } - [Test] - public void CanAccessIdentifierDumpDatabase_ViaExternalServerPointer() + [Test] + public void CanAccessIdentifierDumpDatabase_ViaExternalServerPointer() + { + using var connection = DataAccessPortal + .ExpectServer(IdentifierDump_ExternalDatabaseServer, DataAccessContext.DataLoad).GetConnection(); + connection.Open(); + + using (var cmd = DatabaseCommandHelper.GetCommand("Select version from RoundhousE.Version", connection)) { - using(var connection = DataAccessPortal.GetInstance().ExpectServer(IdentifierDump_ExternalDatabaseServer, DataAccessContext.DataLoad).GetConnection()) - { - connection.Open(); - - using (DbCommand cmd = DatabaseCommandHelper.GetCommand("Select version from RoundhousE.Version", connection)) - { - var version = new Version(cmd.ExecuteScalar().ToString()); - Assert.GreaterOrEqual(version, new Version("0.0.0.0")); - } - - - connection.Close(); - } + var version = new Version(cmd.ExecuteScalar().ToString()); + Assert.GreaterOrEqual(version, new Version("0.0.0.0")); } + + + connection.Close(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Anonymisation/ANOTableTests.cs b/Rdmp.Core.Tests/Curation/Anonymisation/ANOTableTests.cs index 09e7394976..09fd40ac5f 100644 --- a/Rdmp.Core.Tests/Curation/Anonymisation/ANOTableTests.cs +++ b/Rdmp.Core.Tests/Curation/Anonymisation/ANOTableTests.cs @@ -7,334 +7,332 @@ using System; using System.Collections.Generic; using System.Data; -using Microsoft.Data.SqlClient; using System.Diagnostics; +using System.Globalization; using System.Linq; using System.Text.RegularExpressions; -using FAnsi.Discovery; using NUnit.Framework; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataLoad.Engine.Pipeline.Components.Anonymisation; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.Curation.Anonymisation +namespace Rdmp.Core.Tests.Curation.Anonymisation; + +public class ANOTableTests : TestsRequiringANOStore { - public class ANOTableTests:TestsRequiringANOStore + private Regex _anochiPattern = new(@"\d{10}_A"); + + #region Create New ANOTables + + [Test] + [TestCase("varchar(1)")] + [TestCase("int")] + [TestCase("tinyint")] + [TestCase("bit")] + public void CreateAnANOTable_PushAs(string datatypeForPush) { - Regex _anochiPattern = new Regex(@"\d{10}_A"); - - #region Create New ANOTables - [Test] - [TestCase("varchar(1)")] - [TestCase("int")] - [TestCase("tinyint")] - [TestCase("bit")] - public void CreateAnANOTable_PushAs(string datatypeForPush) - { - - var anoTable = GetANOTable(); - Assert.AreEqual("ANOMyTable", anoTable.TableName); - anoTable.NumberOfCharactersToUseInAnonymousRepresentation =20; - anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 20; - anoTable.PushToANOServerAsNewTable(datatypeForPush, new ThrowImmediatelyCheckNotifier()); + var anoTable = GetANOTable(); + Assert.AreEqual("ANOMyTable", anoTable.TableName); + anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 20; + anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 20; + anoTable.PushToANOServerAsNewTable(datatypeForPush, ThrowImmediatelyCheckNotifier.Quiet); - var discoveredTable = ANOStore_Database.DiscoverTables(false).SingleOrDefault(t => t.GetRuntimeName().Equals("ANOMyTable")); - - //server should have - Assert.NotNull(discoveredTable); - Assert.IsTrue(discoveredTable.Exists()); + var discoveredTable = ANOStore_Database.DiscoverTables(false) + .SingleOrDefault(t => t.GetRuntimeName().Equals("ANOMyTable")); - //yes that's right hte table name and column name are the same here \|/ - Assert.AreEqual(datatypeForPush, discoveredTable.DiscoverColumn("MyTable").DataType.SQLType); + //server should have + Assert.NotNull(discoveredTable); + Assert.IsTrue(discoveredTable.Exists()); - //20 + 20 + _ + A - Assert.AreEqual("varchar(42)", discoveredTable.DiscoverColumn("ANOMyTable").DataType.SQLType); + //yes that's right hte table name and column name are the same here \|/ + Assert.AreEqual(datatypeForPush, discoveredTable.DiscoverColumn("MyTable").DataType.SQLType); - anoTable.DeleteInDatabase(); - } + //20 + 20 + _ + A + Assert.AreEqual("varchar(42)", discoveredTable.DiscoverColumn("ANOMyTable").DataType.SQLType); - [Test] - public void CreateAnANOTable_Revertable() - { - var anoTable = GetANOTable(); + anoTable.DeleteInDatabase(); + } - anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 63; - anoTable.RevertToDatabaseState(); - Assert.AreEqual(1,anoTable.NumberOfCharactersToUseInAnonymousRepresentation); - anoTable.DeleteInDatabase(); - } + [Test] + public void CreateAnANOTable_Revertable() + { + var anoTable = GetANOTable(); + + anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 63; + anoTable.RevertToDatabaseState(); + Assert.AreEqual(1, anoTable.NumberOfCharactersToUseInAnonymousRepresentation); + anoTable.DeleteInDatabase(); + } + + [Test] + public void CreateAnANOTable_Check() + { + var anoTable = GetANOTable(); + Assert.AreEqual("ANOMyTable", anoTable.TableName); + anoTable.Check(new AcceptAllCheckNotifier()); + anoTable.DeleteInDatabase(); + } - [Test] - public void CreateAnANOTable_Check() + [Test] + public void DuplicateSuffix_Throws() + { + var anoTable = GetANOTable(); + try + { + var ex = Assert.Throws(() => + new ANOTable(CatalogueRepository, anoTable.Server, "DuplicateSuffix", anoTable.Suffix)); + Assert.AreEqual("There is already another ANOTable with the suffix 'A'", ex.Message); + } + finally { - var anoTable = GetANOTable(); - Assert.AreEqual("ANOMyTable", anoTable.TableName); - anoTable.Check(new AcceptAllCheckNotifier()); anoTable.DeleteInDatabase(); } + } - [Test] - public void DuplicateSuffix_Throws() + [Test] + public void CreateAnANOTable_CharCountNegative() + { + var anoTable = GetANOTable(); + try { - var anoTable = GetANOTable(); - try - { - var ex = Assert.Throws(()=>new ANOTable(CatalogueRepository, anoTable.Server, "DuplicateSuffix", anoTable.Suffix)); - Assert.AreEqual("There is already another ANOTable with the suffix 'A'", ex.Message); - } - finally - { - anoTable.DeleteInDatabase(); - } + anoTable.NumberOfCharactersToUseInAnonymousRepresentation = -500; + var ex = Assert.Throws(anoTable.SaveToDatabase); + Assert.AreEqual("NumberOfCharactersToUseInAnonymousRepresentation cannot be negative", ex.Message); } - - [Test] - public void CreateAnANOTable_CharCountNegative() + finally { - var anoTable = GetANOTable(); - try - { - anoTable.NumberOfCharactersToUseInAnonymousRepresentation = -500; - var ex = Assert.Throws(anoTable.SaveToDatabase); - Assert.AreEqual("NumberOfCharactersToUseInAnonymousRepresentation cannot be negative",ex.Message); - } - finally - { - anoTable.DeleteInDatabase(); - } - + anoTable.DeleteInDatabase(); } - - [Test] - public void CreateAnANOTable_IntCountNegative() + } + + [Test] + public void CreateAnANOTable_IntCountNegative() + { + var anoTable = GetANOTable(); + + try { - ANOTable anoTable = GetANOTable(); - - try - { - anoTable.NumberOfIntegersToUseInAnonymousRepresentation = -500; - var ex = Assert.Throws(anoTable.SaveToDatabase); - Assert.AreEqual("NumberOfIntegersToUseInAnonymousRepresentation cannot be negative", ex.Message); - } - finally - { - anoTable.DeleteInDatabase(); - } - + anoTable.NumberOfIntegersToUseInAnonymousRepresentation = -500; + var ex = Assert.Throws(anoTable.SaveToDatabase); + Assert.AreEqual("NumberOfIntegersToUseInAnonymousRepresentation cannot be negative", ex.Message); } - - [Test] - public void CreateAnANOTable_TotalCountZero() + finally { - var anoTable = GetANOTable(); - try - { - anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 0; - anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 0; - var ex = Assert.Throws(anoTable.SaveToDatabase); - Assert.AreEqual("Anonymous representations must have at least 1 integer or character",ex.Message); - } - finally - { - anoTable.DeleteInDatabase(); - } + anoTable.DeleteInDatabase(); } - #endregion + } - [Test] - public void SubstituteANOIdentifiers_2CHINumbers() + [Test] + public void CreateAnANOTable_TotalCountZero() + { + var anoTable = GetANOTable(); + try { - var anoTable = GetANOTable(); + anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 0; anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 0; - anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 10; - anoTable.PushToANOServerAsNewTable("varchar(10)",new ThrowImmediatelyCheckNotifier()); + var ex = Assert.Throws(anoTable.SaveToDatabase); + Assert.AreEqual("Anonymous representations must have at least 1 integer or character", ex.Message); + } + finally + { + anoTable.DeleteInDatabase(); + } + } + #endregion - DataTable dt = new DataTable(); - dt.Columns.Add("CHI"); - dt.Columns.Add("ANOCHI"); + [Test] + public void SubstituteANOIdentifiers_2CHINumbers() + { + var anoTable = GetANOTable(); + anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 0; + anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 10; + anoTable.PushToANOServerAsNewTable("varchar(10)", ThrowImmediatelyCheckNotifier.Quiet); - dt.Rows.Add("0101010101",DBNull.Value);//duplicates - dt.Rows.Add("0101010102",DBNull.Value); - dt.Rows.Add("0101010101",DBNull.Value);//duplicates - ANOTransformer transformer = new ANOTransformer(anoTable, new ThrowImmediatelyDataLoadEventListener()); - transformer.Transform(dt,dt.Columns["CHI"],dt.Columns["ANOCHI"]); + var dt = new DataTable(); + dt.Columns.Add("CHI"); + dt.Columns.Add("ANOCHI"); - Assert.IsTrue((string) dt.Rows[0][0] == "0101010101"); - Assert.IsTrue(_anochiPattern.IsMatch((string) dt.Rows[0][1]));//should be 10 digits and then _A - Assert.AreEqual(dt.Rows[0][1], dt.Rows[2][1]);//because of duplication these should both be the same + dt.Rows.Add("0101010101", DBNull.Value); //duplicates + dt.Rows.Add("0101010102", DBNull.Value); + dt.Rows.Add("0101010101", DBNull.Value); //duplicates - Console.WriteLine("ANO identifiers created were:" + dt.Rows[0][1] + "," +dt.Rows[1][1]); + var transformer = new ANOTransformer(anoTable, ThrowImmediatelyDataLoadEventListener.Quiet); + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"]); - TruncateANOTable(anoTable); + Assert.IsTrue((string)dt.Rows[0][0] == "0101010101"); + Assert.IsTrue(_anochiPattern.IsMatch((string)dt.Rows[0][1])); //should be 10 digits and then _A + Assert.AreEqual(dt.Rows[0][1], dt.Rows[2][1]); //because of duplication these should both be the same - //now test previews - transformer.Transform(dt,dt.Columns["CHI"],dt.Columns["ANOCHI"], true); - var val1 = dt.Rows[0][1]; + Console.WriteLine($"ANO identifiers created were:{dt.Rows[0][1]},{dt.Rows[1][1]}"); - transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], true); - var val2 = dt.Rows[0][1]; + TruncateANOTable(anoTable); - transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], true); - var val3 = dt.Rows[0][1]; + //now test previews + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], true); + var val1 = dt.Rows[0][1]; - //should always be different - Assert.AreNotEqual(val1,val2); - Assert.AreNotEqual(val1, val3); + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], true); + var val2 = dt.Rows[0][1]; - //now test repeatability - transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], false); - var val4 = dt.Rows[0][1]; + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], true); + var val3 = dt.Rows[0][1]; - transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], false); - var val5 = dt.Rows[0][1]; + //should always be different + Assert.AreNotEqual(val1, val2); + Assert.AreNotEqual(val1, val3); - transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], false); - var val6 = dt.Rows[0][1]; - Assert.AreEqual(val4,val5); - Assert.AreEqual(val4, val6); + //now test repeatability + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], false); + var val4 = dt.Rows[0][1]; - TruncateANOTable(anoTable); - - anoTable.DeleteInDatabase(); - } + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], false); + var val5 = dt.Rows[0][1]; - [Test] - public void SubstituteANOIdentifiers_PreviewWithoutPush() - { - - var anoTable = GetANOTable(); - anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 0; - anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 10; + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], false); + var val6 = dt.Rows[0][1]; + Assert.AreEqual(val4, val5); + Assert.AreEqual(val4, val6); - DiscoveredTable ANOtable = ANOStore_Database.ExpectTable(anoTable.TableName); + TruncateANOTable(anoTable); - //should not exist yet - Assert.False(ANOtable.Exists()); - - DataTable dt = new DataTable(); - dt.Columns.Add("CHI"); - dt.Columns.Add("ANOCHI"); - dt.Rows.Add("0101010101", DBNull.Value); - ANOTransformer transformer = new ANOTransformer(anoTable, new ThrowImmediatelyDataLoadEventListener()); - transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], true); + anoTable.DeleteInDatabase(); + } - Assert.IsTrue(_anochiPattern.IsMatch((string)dt.Rows[0][1]));//should be 10 digits and then _A - - //still not exist yet - Assert.False(ANOtable.Exists()); + [Test] + public void SubstituteANOIdentifiers_PreviewWithoutPush() + { + var anoTable = GetANOTable(); + anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 0; + anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 10; - anoTable.DeleteInDatabase(); - } + var ANOtable = ANOStore_Database.ExpectTable(anoTable.TableName); + //should not exist yet + Assert.False(ANOtable.Exists()); - [Test] - public void SubstituteANOIdentifiers_BulkTest() - { - const int batchSize = 10000; + using var dt = new DataTable(); + dt.Columns.Add("CHI"); + dt.Columns.Add("ANOCHI"); + dt.Rows.Add("0101010101", DBNull.Value); + var transformer = new ANOTransformer(anoTable, ThrowImmediatelyDataLoadEventListener.Quiet); + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"], true); - var anoTable = GetANOTable(); - anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 0; - anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 10; - anoTable.PushToANOServerAsNewTable("varchar(10)", new ThrowImmediatelyCheckNotifier()); + Assert.IsTrue(_anochiPattern.IsMatch((string)dt.Rows[0][1])); //should be 10 digits and then _A - - Stopwatch sw = new Stopwatch(); - sw.Start(); + //still not exist yet + Assert.False(ANOtable.Exists()); - DataTable dt = new DataTable(); - dt.Columns.Add("CHI"); - dt.Columns.Add("ANOCHI"); + anoTable.DeleteInDatabase(); + } - Random r = new Random(); - HashSet uniqueSourceSet = new HashSet(); + [Test] + public void SubstituteANOIdentifiers_BulkTest() + { + const int batchSize = 10000; + var anoTable = GetANOTable(); + anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 0; + anoTable.NumberOfIntegersToUseInAnonymousRepresentation = 10; + anoTable.PushToANOServerAsNewTable("varchar(10)", ThrowImmediatelyCheckNotifier.Quiet); - for (int i = 0; i < batchSize; i++) - { - var val = r.NextDouble() * 9999999999; - val = Math.Round(val); - string valAsString = val.ToString(); - - while (valAsString.Length < 10) - valAsString = "0" + valAsString; - if (!uniqueSourceSet.Contains(valAsString)) - uniqueSourceSet.Add(valAsString); + var sw = new Stopwatch(); + sw.Start(); - dt.Rows.Add(valAsString, DBNull.Value);//duplicates - } - Console.WriteLine("Time to allocate in C# memory:"+sw.Elapsed); - Console.WriteLine("Allocated " + dt.Rows.Count + " identifiers (" + uniqueSourceSet.Count() + " unique ones)"); + using var dt = new DataTable(); + dt.BeginLoadData(); + dt.Columns.Add("CHI"); + dt.Columns.Add("ANOCHI"); - sw.Reset(); - sw.Start(); + var r = new Random(); - ANOTransformer transformer = new ANOTransformer(anoTable, new ThrowImmediatelyDataLoadEventListener()); - transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"]); - Console.WriteLine("Time to perform SQL transform and allocation:" + sw.Elapsed); + var uniqueSourceSet = new HashSet(); - sw.Reset(); - sw.Start(); - HashSet uniqueSet = new HashSet(); - foreach (DataRow row in dt.Rows) - { - var ANOid= row["ANOCHI"].ToString(); - if (!uniqueSet.Contains(ANOid)) - uniqueSet.Add(ANOid); + for (var i = 0; i < batchSize; i++) + { + var val = r.NextDouble() * 9999999999; + val = Math.Round(val); + var valAsString = val.ToString(CultureInfo.InvariantCulture); - Assert.IsTrue(_anochiPattern.IsMatch(ANOid)); - } + while (valAsString.Length < 10) + valAsString = $"0{valAsString}"; - Console.WriteLine("Allocated " + uniqueSet.Count + " anonymous identifiers"); + uniqueSourceSet.Add(valAsString); + dt.Rows.Add(valAsString, DBNull.Value); //duplicates + } - var server = ANOStore_Database.Server; - using (var con = server.GetConnection()) - { - con.Open(); + Console.WriteLine($"Time to allocate in C# memory:{sw.Elapsed}"); + Console.WriteLine($"Allocated {dt.Rows.Count} identifiers ({uniqueSourceSet.Count} unique ones)"); - var cmd = server.GetCommand("Select count(*) from ANOMyTable", con); - int numberOfRows = Convert.ToInt32(cmd.ExecuteScalar()); + sw.Reset(); + sw.Start(); - //should be the same number of unique identifiers in memory as in the database - Assert.AreEqual(uniqueSet.Count,numberOfRows); - Console.WriteLine("Found " + numberOfRows + " unique ones"); + var transformer = new ANOTransformer(anoTable, ThrowImmediatelyDataLoadEventListener.Quiet); + transformer.Transform(dt, dt.Columns["CHI"], dt.Columns["ANOCHI"]); + Console.WriteLine($"Time to perform SQL transform and allocation:{sw.Elapsed}"); - var cmdNulls = server.GetCommand("select count(*) from ANOMyTable where ANOMyTable is null", con); - int nulls = Convert.ToInt32(cmdNulls.ExecuteScalar()); - Assert.AreEqual(0,nulls); - Console.WriteLine("Found " + nulls + " null ANO identifiers"); + sw.Reset(); + sw.Start(); + var uniqueSet = new HashSet(); - con.Close(); - } - sw.Stop(); - Console.WriteLine("Time to evaluate results:" + sw.Elapsed); - TruncateANOTable(anoTable); + foreach (DataRow row in dt.Rows) + { + var ANOid = row["ANOCHI"].ToString(); + uniqueSet.Add(ANOid); - anoTable.DeleteInDatabase(); + Assert.IsTrue(_anochiPattern.IsMatch(ANOid)); } - /// - /// Creates a new ANOTable called ANOMyTable in the Data Catalogue (and cleans SetUp any old copy kicking around), you will need to set its properties and - /// call PushToANOServerAsNewTable if you want to use it with an ANOTransformer - /// - /// - protected ANOTable GetANOTable() + Console.WriteLine($"Allocated {uniqueSet.Count} anonymous identifiers"); + + + var server = ANOStore_Database.Server; + using (var con = server.GetConnection()) { - const string name = "ANOMyTable"; + con.Open(); - var toCleanup = CatalogueRepository.GetAllObjects().SingleOrDefault(a => a.TableName.Equals(name)); + var cmd = server.GetCommand("Select count(*) from ANOMyTable", con); + var numberOfRows = Convert.ToInt32(cmd.ExecuteScalar()); - if (toCleanup != null) - toCleanup.DeleteInDatabase(); + //should be the same number of unique identifiers in memory as in the database + Assert.AreEqual(uniqueSet.Count, numberOfRows); + Console.WriteLine($"Found {numberOfRows} unique ones"); - return new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, name, "A"); + var cmdNulls = server.GetCommand("select count(*) from ANOMyTable where ANOMyTable is null", con); + var nulls = Convert.ToInt32(cmdNulls.ExecuteScalar()); + Assert.AreEqual(0, nulls); + Console.WriteLine($"Found {nulls} null ANO identifiers"); + + con.Close(); } + + sw.Stop(); + Console.WriteLine($"Time to evaluate results:{sw.Elapsed}"); + TruncateANOTable(anoTable); + + anoTable.DeleteInDatabase(); + } + + /// + /// Creates a new ANOTable called ANOMyTable in the Data Catalogue (and cleans SetUp any old copy kicking around), you will need to set its properties and + /// call PushToANOServerAsNewTable if you want to use it with an ANOTransformer + /// + /// + protected ANOTable GetANOTable() + { + const string name = "ANOMyTable"; + + var toCleanup = CatalogueRepository.GetAllObjects().SingleOrDefault(a => a.TableName.Equals(name)); + + toCleanup?.DeleteInDatabase(); + + return new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, name, "A"); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Anonymisation/ForwardEngineerANOCatalogueTwoTableTests.cs b/Rdmp.Core.Tests/Curation/Anonymisation/ForwardEngineerANOCatalogueTwoTableTests.cs index 7326e673e8..131e06b861 100644 --- a/Rdmp.Core.Tests/Curation/Anonymisation/ForwardEngineerANOCatalogueTwoTableTests.cs +++ b/Rdmp.Core.Tests/Curation/Anonymisation/ForwardEngineerANOCatalogueTwoTableTests.cs @@ -13,41 +13,38 @@ using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.QueryBuilding; -using ReusableLibraryCode; -using ReusableLibraryCode.Checks; -using Tests.Common; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.Curation.Anonymisation -{ - public class ForwardEngineerANOCatalogueTwoTableTests : TestsRequiringANOStore - { +namespace Rdmp.Core.Tests.Curation.Anonymisation; - ITableInfo t1; - ColumnInfo[] c1; +public class ForwardEngineerANOCatalogueTwoTableTests : TestsRequiringANOStore +{ + private ITableInfo t1; + private ColumnInfo[] c1; - ITableInfo t2; - ColumnInfo[] c2; + private ITableInfo t2; + private ColumnInfo[] c2; - private ICatalogue cata1; - private ICatalogue cata2; + private ICatalogue cata1; + private ICatalogue cata2; - private CatalogueItem[] cataItems1; - private CatalogueItem[] cataItems2; + private CatalogueItem[] cataItems1; + private CatalogueItem[] cataItems2; - private ExtractionInformation[] eis1; - private ExtractionInformation[] eis2; - private ANOTable _anoTable; - private Catalogue _comboCata; - private DiscoveredDatabase _destinationDatabase; + private ExtractionInformation[] eis1; + private ExtractionInformation[] eis2; + private ANOTable _anoTable; + private Catalogue _comboCata; + private DiscoveredDatabase _destinationDatabase; - [SetUp] - protected override void SetUp() - { - base.SetUp(); + [SetUp] + protected override void SetUp() + { + base.SetUp(); - string sql = - @"CREATE TABLE [dbo].[Tests]( + const string sql = @"CREATE TABLE [dbo].[Tests]( [chi] [varchar](10) NULL, [Date] [datetime] NULL, [hb_extract] [varchar](1) NULL, @@ -76,123 +73,126 @@ [Measure] ASC ALTER TABLE [dbo].[Results] WITH CHECK ADD CONSTRAINT [FK_Results_Tests] FOREIGN KEY([TestId]) REFERENCES [dbo].[Tests] ([TestId]) GO"; - - var server = From.Server; - using (var con = server.GetConnection()) - { - con.Open(); - UsefulStuff.ExecuteBatchNonQuery(sql,con); - } - - var importer1 = new TableInfoImporter(CatalogueRepository, From.ExpectTable("Tests")); - var importer2 = new TableInfoImporter(CatalogueRepository, From.ExpectTable("Results")); - - importer1.DoImport(out t1,out c1); - - importer2.DoImport(out t2, out c2); - - var engineer1 = new ForwardEngineerCatalogue(t1, c1); - var engineer2 = new ForwardEngineerCatalogue(t2, c2); - - engineer1.ExecuteForwardEngineering(out cata1,out cataItems1,out eis1); - engineer2.ExecuteForwardEngineering(out cata2, out cataItems2, out eis2); - - new JoinInfo(CatalogueRepository, - c1.Single(e => e.GetRuntimeName().Equals("TestId")), - c2.Single(e => e.GetRuntimeName().Equals("TestId")), - ExtractionJoinType.Left,null); - - _anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOTes", "T"); - _anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 10; - _anoTable.SaveToDatabase(); - _anoTable.PushToANOServerAsNewTable("int",new ThrowImmediatelyCheckNotifier()); - - _comboCata = new Catalogue(CatalogueRepository, "Combo Catalogue"); - - //pk - var ciTestId = new CatalogueItem(CatalogueRepository, _comboCata, "TestId"); - var colTestId = c1.Single(c => c.GetRuntimeName().Equals("TestId")); - ciTestId.ColumnInfo_ID = colTestId.ID; - ciTestId.SaveToDatabase(); - var eiTestId = new ExtractionInformation(CatalogueRepository, ciTestId, colTestId, colTestId.Name); - - //Measure - var ciMeasure = new CatalogueItem(CatalogueRepository, _comboCata, "Measuree"); - var colMeasure = c2.Single(c => c.GetRuntimeName().Equals("Measure")); - ciMeasure.ColumnInfo_ID = colMeasure.ID; - ciMeasure.SaveToDatabase(); - var eiMeasure = new ExtractionInformation(CatalogueRepository, ciMeasure,colMeasure, colMeasure.Name); - - //Date - var ciDate = new CatalogueItem(CatalogueRepository, _comboCata, "Dat"); - - var colDate = c1.Single(c => c.GetRuntimeName().Equals("Date")); - ciDate.ColumnInfo_ID = colDate.ID; - ciDate.SaveToDatabase(); - var eiDate = new ExtractionInformation(CatalogueRepository, ciDate, colDate, colDate.Name); - - _destinationDatabase = To; + + var server = From.Server; + using (var con = server.GetConnection()) + { + con.Open(); + UsefulStuff.ExecuteBatchNonQuery(sql, con); } + var importer1 = new TableInfoImporter(CatalogueRepository, From.ExpectTable("Tests")); + var importer2 = new TableInfoImporter(CatalogueRepository, From.ExpectTable("Results")); + + importer1.DoImport(out t1, out c1); + + importer2.DoImport(out t2, out c2); + + var engineer1 = new ForwardEngineerCatalogue(t1, c1); + var engineer2 = new ForwardEngineerCatalogue(t2, c2); - [Test] - public void TestAnonymisingJoinKey() + engineer1.ExecuteForwardEngineering(out cata1, out cataItems1, out eis1); + engineer2.ExecuteForwardEngineering(out cata2, out cataItems2, out eis2); + + new JoinInfo(CatalogueRepository, + c1.Single(e => e.GetRuntimeName().Equals("TestId")), + c2.Single(e => e.GetRuntimeName().Equals("TestId")), + ExtractionJoinType.Left, null); + + _anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOTes", "T") { - //Create a plan for the first Catlogue (Tests) - single Table dataset - var plan1 = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata1); - var testIdHeadPlan = plan1.GetPlanForColumnInfo(c1.Single(c => c.GetRuntimeName().Equals("TestId"))); - plan1.TargetDatabase = _destinationDatabase; + NumberOfCharactersToUseInAnonymousRepresentation = 10 + }; + _anoTable.SaveToDatabase(); + _anoTable.PushToANOServerAsNewTable("int", ThrowImmediatelyCheckNotifier.Quiet); + + _comboCata = new Catalogue(CatalogueRepository, "Combo Catalogue"); + + //pk + var ciTestId = new CatalogueItem(CatalogueRepository, _comboCata, "TestId"); + var colTestId = c1.Single(c => c.GetRuntimeName().Equals("TestId")); + ciTestId.ColumnInfo_ID = colTestId.ID; + ciTestId.SaveToDatabase(); + var eiTestId = new ExtractionInformation(CatalogueRepository, ciTestId, colTestId, colTestId.Name); + + //Measure + var ciMeasure = new CatalogueItem(CatalogueRepository, _comboCata, "Measuree"); + var colMeasure = c2.Single(c => c.GetRuntimeName().Equals("Measure")); + ciMeasure.ColumnInfo_ID = colMeasure.ID; + ciMeasure.SaveToDatabase(); + var eiMeasure = new ExtractionInformation(CatalogueRepository, ciMeasure, colMeasure, colMeasure.Name); + + //Date + var ciDate = new CatalogueItem(CatalogueRepository, _comboCata, "Dat"); + + var colDate = c1.Single(c => c.GetRuntimeName().Equals("Date")); + ciDate.ColumnInfo_ID = colDate.ID; + ciDate.SaveToDatabase(); + var eiDate = new ExtractionInformation(CatalogueRepository, ciDate, colDate, colDate.Name); + + _destinationDatabase = To; + } - //the plan is that the column TestId should be anonymised - where its name will become ANOTestId - testIdHeadPlan.Plan = Plan.ANO; - testIdHeadPlan.ANOTable = _anoTable; - plan1.Check(new ThrowImmediatelyCheckNotifier()); - - var engine1 = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, plan1); - engine1.Execute(); + [Test] + public void TestAnonymisingJoinKey() + { + //Create a plan for the first Catalogue (Tests) - single Table dataset + var plan1 = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata1); + var testIdHeadPlan = plan1.GetPlanForColumnInfo(c1.Single(c => c.GetRuntimeName().Equals("TestId"))); + plan1.TargetDatabase = _destinationDatabase; + + //the plan is that the column TestId should be anonymised - where its name will become ANOTestId + testIdHeadPlan.Plan = Plan.ANO; + testIdHeadPlan.ANOTable = _anoTable; - var plan1ExtractionInformationsAtDestination = engine1.NewCatalogue.GetAllExtractionInformation(ExtractionCategory.Any); + plan1.Check(ThrowImmediatelyCheckNotifier.Quiet); - var ei1 = plan1ExtractionInformationsAtDestination.Single(e => e.GetRuntimeName().Equals("ANOTestId")); - Assert.IsTrue(ei1.Exists()); + var engine1 = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, plan1); + engine1.Execute(); - //Now create a plan for the combo Catalogue which contains references to both tables (Tests and Results). Remember Tests has already been migrated as part of plan1 - var plan2 = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, _comboCata); - - //tell it to skip table 1 (Tests) and only anonymise Results - plan2.SkippedTables.Add(t1); - plan2.TargetDatabase = _destinationDatabase; - plan2.Check(new ThrowImmediatelyCheckNotifier()); + var plan1ExtractionInformationsAtDestination = + engine1.NewCatalogue.GetAllExtractionInformation(ExtractionCategory.Any); - //Run the anonymisation - var engine2 = new ForwardEngineerANOCatalogueEngine(RepositoryLocator,plan2); - engine2.Execute(); + var ei1 = plan1ExtractionInformationsAtDestination.Single(e => e.GetRuntimeName().Equals("ANOTestId")); + Assert.IsTrue(ei1.Exists()); - //Did it succesfully pick SetUp the correct ANO column - var plan2ExtractionInformationsAtDestination = engine2.NewCatalogue.GetAllExtractionInformation(ExtractionCategory.Any); + //Now create a plan for the combo Catalogue which contains references to both tables (Tests and Results). Remember Tests has already been migrated as part of plan1 + var plan2 = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, _comboCata); - var ei2 = plan2ExtractionInformationsAtDestination.Single(e => e.GetRuntimeName().Equals("ANOTestId")); - Assert.IsTrue(ei2.Exists()); + //tell it to skip table 1 (Tests) and only anonymise Results + plan2.SkippedTables.Add(t1); + plan2.TargetDatabase = _destinationDatabase; + plan2.Check(ThrowImmediatelyCheckNotifier.Quiet); - //and can the query be executed succesfully - var qb = new QueryBuilder(null, null); - qb.AddColumnRange(plan2ExtractionInformationsAtDestination); + //Run the anonymisation + var engine2 = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, plan2); + engine2.Execute(); - using (var con = _destinationDatabase.Server.GetConnection()) - { - con.Open(); + //Did it successfully pick SetUp the correct ANO column + var plan2ExtractionInformationsAtDestination = + engine2.NewCatalogue.GetAllExtractionInformation(ExtractionCategory.Any); - var cmd = _destinationDatabase.Server.GetCommand(qb.SQL, con); - - Assert.DoesNotThrow(()=>cmd.ExecuteNonQuery()); - } + var ei2 = plan2ExtractionInformationsAtDestination.Single(e => e.GetRuntimeName().Equals("ANOTestId")); + Assert.IsTrue(ei2.Exists()); - Console.WriteLine("Final migrated combo dataset SQL was:" + qb.SQL); + //and can the query be executed successfully + var qb = new QueryBuilder(null, null); + qb.AddColumnRange(plan2ExtractionInformationsAtDestination); - Assert.IsTrue(_comboCata.CatalogueItems.Any(ci => ci.Name.Equals("Measuree"))); - Assert.IsTrue(engine2.NewCatalogue.CatalogueItems.Any(ci => ci.Name.Equals("Measuree")),"ANO Catalogue did not respect the original CatalogueItem Name"); + using (var con = _destinationDatabase.Server.GetConnection()) + { + con.Open(); + + var cmd = _destinationDatabase.Server.GetCommand(qb.SQL, con); + + Assert.DoesNotThrow(() => cmd.ExecuteNonQuery()); } + Console.WriteLine($"Final migrated combo dataset SQL was:{qb.SQL}"); + + Assert.IsTrue(_comboCata.CatalogueItems.Any(ci => ci.Name.Equals("Measuree"))); + Assert.IsTrue(engine2.NewCatalogue.CatalogueItems.Any(ci => ci.Name.Equals("Measuree")), + "ANO Catalogue did not respect the original CatalogueItem Name"); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Anonymisation/IdentifierDumpFunctionalityTests.cs b/Rdmp.Core.Tests/Curation/Anonymisation/IdentifierDumpFunctionalityTests.cs index 528a5f1fa7..4ea7992673 100644 --- a/Rdmp.Core.Tests/Curation/Anonymisation/IdentifierDumpFunctionalityTests.cs +++ b/Rdmp.Core.Tests/Curation/Anonymisation/IdentifierDumpFunctionalityTests.cs @@ -8,370 +8,384 @@ using System.Collections.Generic; using System.Data; using System.Linq; -using FAnsi.Discovery; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataLoad.Engine.Pipeline.Components.Anonymisation; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.Curation.Anonymisation +namespace Rdmp.Core.Tests.Curation.Anonymisation; + +public class IdentifierDumpFunctionalityTests : TestsRequiringFullAnonymisationSuite { - public class IdentifierDumpFunctionalityTests:TestsRequiringFullAnonymisationSuite + private ITableInfo tableInfoCreated; + private ColumnInfo[] columnInfosCreated; + + private BulkTestsData _bulkData; + + [OneTimeSetUp] + protected override void OneTimeSetUp() { - private ITableInfo tableInfoCreated; - ColumnInfo[] columnInfosCreated; - - BulkTestsData _bulkData; + base.OneTimeSetUp(); - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); + Console.WriteLine("Setting SetUp bulk test data"); + _bulkData = new BulkTestsData(RepositoryLocator.CatalogueRepository, + GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); + _bulkData.SetupTestData(); - Console.WriteLine("Setting SetUp bulk test data"); - _bulkData = new BulkTestsData(RepositoryLocator.CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); - _bulkData.SetupTestData(); - - Console.WriteLine("Importing to Catalogue"); - var tbl = _bulkData.Table; - TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); + Console.WriteLine("Importing to Catalogue"); + var tbl = _bulkData.Table; + var importer = new TableInfoImporter(CatalogueRepository, tbl); - importer.DoImport(out tableInfoCreated,out columnInfosCreated); - - Console.WriteLine("Imported TableInfo " + tableInfoCreated); - Console.WriteLine("Imported ColumnInfos " + string.Join(",",columnInfosCreated.Select(c=>c.GetRuntimeName()))); + importer.DoImport(out tableInfoCreated, out columnInfosCreated); - Assert.NotNull(tableInfoCreated); + Console.WriteLine($"Imported TableInfo {tableInfoCreated}"); + Console.WriteLine( + $"Imported ColumnInfos {string.Join(",", columnInfosCreated.Select(c => c.GetRuntimeName()))}"); - ColumnInfo chi = columnInfosCreated.Single(c => c.GetRuntimeName().Equals("chi")); + Assert.NotNull(tableInfoCreated); - Console.WriteLine("CHI is primary key? (expecting true):" + chi.IsPrimaryKey); - Assert.IsTrue(chi.IsPrimaryKey); + var chi = columnInfosCreated.Single(c => c.GetRuntimeName().Equals("chi")); + Console.WriteLine($"CHI is primary key? (expecting true):{chi.IsPrimaryKey}"); + Assert.IsTrue(chi.IsPrimaryKey); - tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("surname")).DeleteInDatabase(); - tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("forename")).DeleteInDatabase(); - tableInfoCreated.ClearAllInjections(); - } + + tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("surname")).DeleteInDatabase(); + tableInfoCreated.ColumnInfos.Single(c => c.GetRuntimeName().Equals("forename")).DeleteInDatabase(); + tableInfoCreated.ClearAllInjections(); + } - #region tests that pass - [Test] - public void DumpAllIdentifiersInTable_Passes() + #region tests that pass + + [Test] + public void DumpAllIdentifiersInTable_Passes() + { + var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "surname") { - var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "surname") - { - Destination = DiscardedColumnDestination.StoreInIdentifiersDump, - SqlDataType = "varchar(20)" - }; - preDiscardedColumn1.SaveToDatabase(); + Destination = DiscardedColumnDestination.StoreInIdentifiersDump, + SqlDataType = "varchar(20)" + }; + preDiscardedColumn1.SaveToDatabase(); - //give it the correct server - tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; - tableInfoCreated.SaveToDatabase(); + //give it the correct server + tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; + tableInfoCreated.SaveToDatabase(); - IdentifierDumper dumper = new IdentifierDumper(tableInfoCreated); + var dumper = new IdentifierDumper(tableInfoCreated); - var chiToSurnameDictionary = new Dictionary>(); - try - { - dumper.Check(new AcceptAllCheckNotifier()); - - DataTable dt = _bulkData.GetDataTable(1000); - - Assert.AreEqual(1000,dt.Rows.Count); - Assert.IsTrue(dt.Columns.Contains("surname")); - - //for checking the final ID table has the correct values in - foreach (DataRow row in dt.Rows) - { - var chi = row["chi"].ToString(); - - if(!chiToSurnameDictionary.ContainsKey(chi)) - chiToSurnameDictionary.Add(chi,new HashSet()); - - chiToSurnameDictionary[chi].Add(row["surname"] as string); - } - - dumper.CreateSTAGINGTable(); - dumper.DumpAllIdentifiersInTable(dt); - dumper.DropStaging(); - - //confirm that the surname column is no longer in the pipeline - Assert.IsFalse(dt.Columns.Contains("surname")); - - //now look at the ids in the identifier dump and make sure they match what was in the pipeline before we sent it - var server = IdentifierDump_Database.Server; - using (var con = server.GetConnection()) - { - con.Open(); - - var cmd = server.GetCommand("Select * from " + "ID_" + BulkTestsData.BulkDataTable, con); - var r = cmd.ExecuteReader(); - - //make sure the values in the ID table match the ones we originally had in the pipeline - while (r.Read()) - if (!chiToSurnameDictionary[r["chi"].ToString()].Any()) - Assert.IsTrue(r["surname"] == DBNull.Value); - else - Assert.IsTrue(chiToSurnameDictionary[r["chi"].ToString()].Contains(r["surname"] as string),"Dictionary did not contain expected surname:" + r["surname"]); - r.Close(); - - //leave the identifier dump in the way we found it (empty) - var tbl = IdentifierDump_Database.ExpectTable("ID_" + BulkTestsData.BulkDataTable); - - if(tbl.Exists()) - tbl.Drop(); - - tbl = IdentifierDump_Database.ExpectTable("ID_" + BulkTestsData.BulkDataTable + "_Archive"); - - if (tbl.Exists()) - tbl.Drop(); - } - } - finally + var chiToSurnameDictionary = new Dictionary>(); + try + { + dumper.Check(new AcceptAllCheckNotifier()); + + var dt = _bulkData.GetDataTable(1000); + + Assert.AreEqual(1000, dt.Rows.Count); + Assert.IsTrue(dt.Columns.Contains("surname")); + + //for checking the final ID table has the correct values in + foreach (DataRow row in dt.Rows) { - preDiscardedColumn1.DeleteInDatabase(); - tableInfoCreated.IdentifierDumpServer_ID = null;//reset it back to how it was when we found it - tableInfoCreated.SaveToDatabase(); + var chi = row["chi"].ToString(); + + if (!chiToSurnameDictionary.ContainsKey(chi)) + chiToSurnameDictionary.Add(chi, new HashSet()); + + chiToSurnameDictionary[chi].Add(row["surname"] as string); } + dumper.CreateSTAGINGTable(); + dumper.DumpAllIdentifiersInTable(dt); + dumper.DropStaging(); + + //confirm that the surname column is no longer in the pipeline + Assert.IsFalse(dt.Columns.Contains("surname")); + + //now look at the ids in the identifier dump and make sure they match what was in the pipeline before we sent it + var server = IdentifierDump_Database.Server; + using var con = server.GetConnection(); + con.Open(); + + var cmd = server.GetCommand($"Select * from ID_{BulkTestsData.BulkDataTable}", con); + var r = cmd.ExecuteReader(); + + //make sure the values in the ID table match the ones we originally had in the pipeline + while (r.Read()) + if (!chiToSurnameDictionary[r["chi"].ToString()].Any()) + Assert.IsTrue(r["surname"] == DBNull.Value); + else + Assert.IsTrue(chiToSurnameDictionary[r["chi"].ToString()].Contains(r["surname"] as string), + "Dictionary did not contain expected surname:" + r["surname"]); + r.Close(); + + //leave the identifier dump in the way we found it (empty) + var tbl = IdentifierDump_Database.ExpectTable($"ID_{BulkTestsData.BulkDataTable}"); + + if (tbl.Exists()) + tbl.Drop(); + + tbl = IdentifierDump_Database.ExpectTable($"ID_{BulkTestsData.BulkDataTable}_Archive"); + + if (tbl.Exists()) + tbl.Drop(); } + finally + { + preDiscardedColumn1.DeleteInDatabase(); + tableInfoCreated.IdentifierDumpServer_ID = null; //reset it back to how it was when we found it + tableInfoCreated.SaveToDatabase(); + } + } + #endregion - #endregion + #region tests that throw - #region tests that throw - [Test] - public void DumpAllIdentifiersInTable_UnexpectedColumnFoundInIdentifierDumpTable() + [Test] + public void DumpAllIdentifiersInTable_UnexpectedColumnFoundInIdentifierDumpTable() + { + var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "surname") { - var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "surname"); - preDiscardedColumn1.Destination = DiscardedColumnDestination.StoreInIdentifiersDump; - preDiscardedColumn1.SqlDataType = "varchar(20)"; - preDiscardedColumn1.SaveToDatabase(); + Destination = DiscardedColumnDestination.StoreInIdentifiersDump, + SqlDataType = "varchar(20)" + }; + preDiscardedColumn1.SaveToDatabase(); - var preDiscardedColumn2 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "forename"); - preDiscardedColumn2.Destination = DiscardedColumnDestination.StoreInIdentifiersDump; - preDiscardedColumn2.SqlDataType = "varchar(50)"; - preDiscardedColumn2.SaveToDatabase(); - - //give it the correct server - tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; - tableInfoCreated.SaveToDatabase(); + var preDiscardedColumn2 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "forename") + { + Destination = DiscardedColumnDestination.StoreInIdentifiersDump, + SqlDataType = "varchar(50)" + }; + preDiscardedColumn2.SaveToDatabase(); - IdentifierDumper dumper = new IdentifierDumper(tableInfoCreated); - dumper.Check(new AcceptAllCheckNotifier()); + //give it the correct server + tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; + tableInfoCreated.SaveToDatabase(); - DiscoveredTable tableInDump = IdentifierDump_Database.ExpectTable("ID_" + BulkTestsData.BulkDataTable); - Assert.IsTrue(tableInDump.Exists(), "ID table did not exist"); + var dumper = new IdentifierDumper(tableInfoCreated); + dumper.Check(new AcceptAllCheckNotifier()); + var tableInDump = IdentifierDump_Database.ExpectTable($"ID_{BulkTestsData.BulkDataTable}"); + Assert.IsTrue(tableInDump.Exists(), "ID table did not exist"); - var columnsInDump = tableInDump.DiscoverColumns().Select(c=>c.GetRuntimeName()).ToArray(); - //works and creates table on server - Assert.Contains("hic_validFrom",columnsInDump); - Assert.Contains("forename", columnsInDump); - Assert.Contains("chi", columnsInDump); - Assert.Contains("surname", columnsInDump); - //now delete it! - preDiscardedColumn2.DeleteInDatabase(); + var columnsInDump = tableInDump.DiscoverColumns().Select(c => c.GetRuntimeName()).ToArray(); + //works and creates table on server + Assert.Contains("hic_validFrom", columnsInDump); + Assert.Contains("forename", columnsInDump); + Assert.Contains("chi", columnsInDump); + Assert.Contains("surname", columnsInDump); - //now create a new dumper and watch it go crazy - IdentifierDumper dumper2 = new IdentifierDumper(tableInfoCreated); + //now delete it! + preDiscardedColumn2.DeleteInDatabase(); - var thrower = new ThrowImmediatelyCheckNotifier(); - thrower.ThrowOnWarning = true; + //now create a new dumper and watch it go crazy + var dumper2 = new IdentifierDumper(tableInfoCreated); - try - { - var ex = Assert.Throws(()=>dumper2.Check(thrower)); - Assert.AreEqual("Column forename was found in the IdentifierDump table ID_BulkData but was not one of the primary keys or a PreLoadDiscardedColumn",ex.Message); - } - finally + try + { + var ex = Assert.Throws(() => dumper2.Check(ThrowImmediatelyCheckNotifier.QuietPicky)); + Assert.AreEqual( + "Column forename was found in the IdentifierDump table ID_BulkData but was not one of the primary keys or a PreLoadDiscardedColumn", + ex?.Message); + } + finally + { + //Drop all this stuff + var server = IdentifierDump_Database.Server; + using (var con = server.GetConnection()) { - //Drop all this stuff - var server = IdentifierDump_Database.Server; - using (var con = server.GetConnection()) - { - con.Open(); - - //leave the identifier dump in the way we found it (empty) - var cmdDrop = server.GetCommand("DROP TABLE ID_" + BulkTestsData.BulkDataTable, con); - cmdDrop.ExecuteNonQuery(); - - var cmdDropArchive = server.GetCommand("DROP TABLE ID_" + BulkTestsData.BulkDataTable + "_Archive", con); - cmdDropArchive.ExecuteNonQuery(); - } - - preDiscardedColumn1.DeleteInDatabase(); - tableInfoCreated.IdentifierDumpServer_ID = null;//reset it back to how it was when we found it - tableInfoCreated.SaveToDatabase(); + con.Open(); + + //leave the identifier dump in the way we found it (empty) + var cmdDrop = server.GetCommand($"DROP TABLE ID_{BulkTestsData.BulkDataTable}", con); + cmdDrop.ExecuteNonQuery(); + var cmdDropArchive = server.GetCommand($"DROP TABLE ID_{BulkTestsData.BulkDataTable}_Archive", con); + cmdDropArchive.ExecuteNonQuery(); } + preDiscardedColumn1.DeleteInDatabase(); + tableInfoCreated.IdentifierDumpServer_ID = null; //reset it back to how it was when we found it + tableInfoCreated.SaveToDatabase(); } + } - [Test] - public void IdentifierDumperCheckFails_StagingNotCalled() + [Test] + public void IdentifierDumperCheckFails_StagingNotCalled() + { + var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "forename") { - var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "forename"); - preDiscardedColumn1.Destination = DiscardedColumnDestination.StoreInIdentifiersDump; - preDiscardedColumn1.SqlDataType = "varchar(50)"; - preDiscardedColumn1.SaveToDatabase(); + Destination = DiscardedColumnDestination.StoreInIdentifiersDump, + SqlDataType = "varchar(50)" + }; + preDiscardedColumn1.SaveToDatabase(); - //give it the correct server - tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; - tableInfoCreated.SaveToDatabase(); + //give it the correct server + tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; + tableInfoCreated.SaveToDatabase(); - IdentifierDumper dumper = new IdentifierDumper(tableInfoCreated); - try - { - dumper.Check(new AcceptAllCheckNotifier()); - var ex = Assert.Throws(()=>dumper.DumpAllIdentifiersInTable(_bulkData.GetDataTable(10))); - Assert.AreEqual("IdentifierDumper STAGING insert (ID_BulkData_STAGING) failed, make sure you have called CreateSTAGINGTable() before trying to Dump identifiers (also you should call DropStagging() when you are done)",ex.Message); - } - finally - { - preDiscardedColumn1.DeleteInDatabase(); - tableInfoCreated.IdentifierDumpServer_ID = null;//reset it back to how it was when we found it - tableInfoCreated.SaveToDatabase(); - } + var dumper = new IdentifierDumper(tableInfoCreated); + try + { + dumper.Check(new AcceptAllCheckNotifier()); + var ex = Assert.Throws(() => dumper.DumpAllIdentifiersInTable(_bulkData.GetDataTable(10))); + Assert.AreEqual( + "IdentifierDumper STAGING insert (ID_BulkData_STAGING) failed, make sure you have called CreateSTAGINGTable() before trying to Dump identifiers (also you should call DropStaging() when you are done)", + ex?.Message); } + finally + { + preDiscardedColumn1.DeleteInDatabase(); + tableInfoCreated.IdentifierDumpServer_ID = null; //reset it back to how it was when we found it + tableInfoCreated.SaveToDatabase(); + } + } - [Test] - public void IdentifierDumperCheckFails_NoTableExists() + [Test] + public void IdentifierDumperCheckFails_NoTableExists() + { + var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "forename") { - var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "forename"); - preDiscardedColumn1.Destination = DiscardedColumnDestination.StoreInIdentifiersDump; - preDiscardedColumn1.SqlDataType = "varchar(50)"; - preDiscardedColumn1.SaveToDatabase(); + Destination = DiscardedColumnDestination.StoreInIdentifiersDump, + SqlDataType = "varchar(50)" + }; + preDiscardedColumn1.SaveToDatabase(); - //give it the correct server - tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; - tableInfoCreated.SaveToDatabase(); + //give it the correct server + tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; + tableInfoCreated.SaveToDatabase(); - var existingTable = DataAccessPortal.GetInstance() - .ExpectDatabase(IdentifierDump_ExternalDatabaseServer, DataAccessContext.InternalDataProcessing) - .ExpectTable("ID_BulkData"); + var existingTable = DataAccessPortal + .ExpectDatabase(IdentifierDump_ExternalDatabaseServer, DataAccessContext.InternalDataProcessing) + .ExpectTable("ID_BulkData"); - if(existingTable.Exists()) - existingTable.Drop(); + if (existingTable.Exists()) + existingTable.Drop(); - IdentifierDumper dumper = new IdentifierDumper(tableInfoCreated); + var dumper = new IdentifierDumper(tableInfoCreated); - try - { - ToMemoryCheckNotifier notifier = new ToMemoryCheckNotifier(new AcceptAllCheckNotifier()); - dumper.Check(notifier); + try + { + var notifier = new ToMemoryCheckNotifier(new AcceptAllCheckNotifier()); + dumper.Check(notifier); - Assert.IsTrue(notifier.Messages.Any(m=> - m.Result == CheckResult.Warning - && - m.Message.Contains("Table ID_BulkData was not found"))); - } - finally + Assert.IsTrue(notifier.Messages.Any(m => + m.Result == CheckResult.Warning + && + m.Message.Contains("Table ID_BulkData was not found"))); + } + finally + { + preDiscardedColumn1.DeleteInDatabase(); + tableInfoCreated.IdentifierDumpServer_ID = null; //reset it back to how it was when we found it + tableInfoCreated.SaveToDatabase(); + } + } + + [Test] + public void IdentifierDumperCheckFails_ServerIsNotADumpServer() + { + var preDiscardedColumn1 = + new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "NationalSecurityNumber") { - preDiscardedColumn1.DeleteInDatabase(); - tableInfoCreated.IdentifierDumpServer_ID = null;//reset it back to how it was when we found it - tableInfoCreated.SaveToDatabase(); - } + Destination = DiscardedColumnDestination.StoreInIdentifiersDump, + SqlDataType = "varchar(10)" + }; + preDiscardedColumn1.SaveToDatabase(); + + //give it the WRONG server + tableInfoCreated.IdentifierDumpServer_ID = ANOStore_ExternalDatabaseServer.ID; + tableInfoCreated.SaveToDatabase(); + + var dumper = new IdentifierDumper(tableInfoCreated); + try + { + dumper.Check(ThrowImmediatelyCheckNotifier.Quiet); + Assert.Fail("Expected it to crash before now"); + } + catch (Exception ex) + { + Assert.IsTrue( + ex.Message.StartsWith( + "Exception occurred when trying to find stored procedure sp_createIdentifierDump")); + Assert.IsTrue(ex.InnerException?.Message.StartsWith("Connected successfully to server")); + Assert.IsTrue(ex.InnerException?.Message.EndsWith( + " but did not find the stored procedure sp_createIdentifierDump in the database (Possibly the ExternalDatabaseServer is not an IdentifierDump database?)")); + } + finally + { + preDiscardedColumn1.DeleteInDatabase(); + tableInfoCreated.IdentifierDumpServer_ID = null; //reset it back to how it was when we found it + tableInfoCreated.SaveToDatabase(); } + } - [Test] - public void IdentifierDumperCheckFails_ServerIsNotADumpServer() + [Test] + public void IdentifierDumperCheckFails_NoTableOnServerRejectChange() + { + var preDiscardedColumn1 = + new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "NationalSecurityNumber"); + try { - var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "NationalSecurityNumber"); preDiscardedColumn1.Destination = DiscardedColumnDestination.StoreInIdentifiersDump; preDiscardedColumn1.SqlDataType = "varchar(10)"; preDiscardedColumn1.SaveToDatabase(); - - //give it the WRONG server - tableInfoCreated.IdentifierDumpServer_ID = ANOStore_ExternalDatabaseServer.ID; - tableInfoCreated.SaveToDatabase(); - - IdentifierDumper dumper = new IdentifierDumper(tableInfoCreated); - try - { - dumper.Check(new ThrowImmediatelyCheckNotifier()); - Assert.Fail("Expected it to crash before now"); - } - catch (Exception ex) - { - Assert.IsTrue(ex.Message.StartsWith("Exception occurred when trying to find stored procedure sp_createIdentifierDump")); - Assert.IsTrue(ex.InnerException.Message.StartsWith("Connected successfully to server")); - Assert.IsTrue(ex.InnerException.Message.EndsWith(" but did not find the stored procedure sp_createIdentifierDump in the database (Possibly the ExternalDatabaseServer is not an IdentifierDump database?)")); - } - finally - { - preDiscardedColumn1.DeleteInDatabase(); - tableInfoCreated.IdentifierDumpServer_ID = null;//reset it back to how it was when we found it - tableInfoCreated.SaveToDatabase(); - } + var ex = Assert.Throws(() => new IdentifierDumper(tableInfoCreated)); + StringAssert.Contains("does not have a listed IdentifierDump ExternalDatabaseServer", ex.Message); } - - [Test] - public void IdentifierDumperCheckFails_NoTableOnServerRejectChange() + finally { - var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "NationalSecurityNumber"); - try - { - preDiscardedColumn1.Destination = DiscardedColumnDestination.StoreInIdentifiersDump; - preDiscardedColumn1.SqlDataType = "varchar(10)"; - preDiscardedColumn1.SaveToDatabase(); - - var ex = Assert.Throws(()=> new IdentifierDumper(tableInfoCreated)); - StringAssert.Contains("does not have a listed IdentifierDump ExternalDatabaseServer",ex.Message); - } - finally - { - preDiscardedColumn1.DeleteInDatabase(); - } + preDiscardedColumn1.DeleteInDatabase(); } + } - [Test] - public void IdentifierDumperCheckFails_LieAboutDatatype() + [Test] + public void IdentifierDumperCheckFails_LieAboutDatatype() + { + var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "forename") { - var preDiscardedColumn1 = new PreLoadDiscardedColumn(CatalogueRepository, tableInfoCreated, "forename"); - preDiscardedColumn1.Destination = DiscardedColumnDestination.StoreInIdentifiersDump; - preDiscardedColumn1.SqlDataType = "varchar(50)"; + Destination = DiscardedColumnDestination.StoreInIdentifiersDump, + SqlDataType = "varchar(50)" + }; + preDiscardedColumn1.SaveToDatabase(); + try + { + //give it the correct server + tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; + tableInfoCreated.SaveToDatabase(); + + var dumper = new IdentifierDumper(tableInfoCreated); + + //table doesnt exist yet it should work + dumper.Check(new AcceptAllCheckNotifier()); + + //now it is varbinary + preDiscardedColumn1.SqlDataType = "varbinary(200)"; preDiscardedColumn1.SaveToDatabase(); - try - { - //give it the correct server - tableInfoCreated.IdentifierDumpServer_ID = IdentifierDump_ExternalDatabaseServer.ID; - tableInfoCreated.SaveToDatabase(); - - IdentifierDumper dumper = new IdentifierDumper(tableInfoCreated); - - //table doesnt exist yet it should work - dumper.Check(new AcceptAllCheckNotifier()); - - //now it is varbinary - preDiscardedColumn1.SqlDataType = "varbinary(200)"; - preDiscardedColumn1.SaveToDatabase(); - - //get a new dumper because we have changed the pre load discarded column - dumper = new IdentifierDumper(tableInfoCreated); - //table doesnt exist yet it should work - Exception ex = Assert.Throws(()=>dumper.Check(new ThrowImmediatelyCheckNotifier())); - - Assert.IsTrue(ex.Message.Contains("has data type varbinary(200) in the Catalogue but appears as varchar(50) in the actual IdentifierDump")); - } - finally - { - preDiscardedColumn1.DeleteInDatabase(); - tableInfoCreated.IdentifierDumpServer_ID = null;//reset it back to how it was when we found it - tableInfoCreated.SaveToDatabase(); - } - - } - #endregion + //get a new dumper because we have changed the pre load discarded column + dumper = new IdentifierDumper(tableInfoCreated); + //table doesnt exist yet it should work + var ex = Assert.Throws(() => dumper.Check(ThrowImmediatelyCheckNotifier.Quiet)); + + Assert.IsTrue(ex?.Message.Contains( + "has data type varbinary(200) in the Catalogue but appears as varchar(50) in the actual IdentifierDump")); + } + finally + { + preDiscardedColumn1.DeleteInDatabase(); + tableInfoCreated.IdentifierDumpServer_ID = null; //reset it back to how it was when we found it + tableInfoCreated.SaveToDatabase(); + } } -} + + #endregion +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/CrossPlatformParameterTests/BasicParameterUseTests.cs b/Rdmp.Core.Tests/Curation/CrossPlatformParameterTests/BasicParameterUseTests.cs index 02f6faf692..61c8193197 100644 --- a/Rdmp.Core.Tests/Curation/CrossPlatformParameterTests/BasicParameterUseTests.cs +++ b/Rdmp.Core.Tests/Curation/CrossPlatformParameterTests/BasicParameterUseTests.cs @@ -19,87 +19,87 @@ using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.CrossPlatformParameterTests +namespace Rdmp.Core.Tests.Curation.CrossPlatformParameterTests; + +public class BasicParameterUseTests : DatabaseTests { - public class BasicParameterUseTests:DatabaseTests + [Test] + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void Test_DatabaseTypeQueryWithParameter_IntParameter(DatabaseType dbType) { - [Test] - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void Test_DatabaseTypeQueryWithParameter_IntParameter(DatabaseType dbType) + //Pick the destination server + var tableName = TestDatabaseNames.GetConsistentName("tbl"); + + //make sure there's a database ready to receive the data + var db = GetCleanedServer(dbType); + db.Create(true); + + + //this is the table we are uploading + var dt = new DataTable(); + dt.Columns.Add("numbercol"); + dt.Rows.Add(10); + dt.Rows.Add(15); + dt.Rows.Add(20); + dt.Rows.Add(25); + dt.TableName = tableName; + try { - //Pick the destination server - var tableName = TestDatabaseNames.GetConsistentName("tbl"); - - //make sure there's a database ready to receive the data - var db = GetCleanedServer(dbType); - db.Create(true); - - - //this is the table we are uploading - var dt = new DataTable(); - dt.Columns.Add("numbercol"); - dt.Rows.Add(10); - dt.Rows.Add(15); - dt.Rows.Add(20); - dt.Rows.Add(25); - dt.TableName = tableName; - try - { - ///////////////////////UPLOAD THE DataTable TO THE DESTINATION//////////////////////////////////////////// - var uploader = new DataTableUploadDestination(); - uploader.PreInitialize(db,new ThrowImmediatelyDataLoadJob()); - uploader.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - uploader.Dispose(new ThrowImmediatelyDataLoadJob(),null ); - - var tbl = db.ExpectTable(tableName); - - var importer = new TableInfoImporter(CatalogueRepository, tbl); - importer.DoImport(out var ti,out var ci); - - var engineer = new ForwardEngineerCatalogue(ti, ci); - engineer.ExecuteForwardEngineering(out var cata, out var cis, out var ei); - ///////////////////////////////////////////////////////////////////////////////////////////////////////// - - /////////////////////////////////THE ACTUAL PROPER TEST//////////////////////////////////// - //create an extraction filter - var extractionInformation = ei.Single(); - var filter = new ExtractionFilter(CatalogueRepository, "Filter by numbers", extractionInformation); - filter.WhereSQL = extractionInformation.SelectSQL + " = @n"; - filter.SaveToDatabase(); - - //create the parameters for filter (no globals, masters or scope adjacent parameters) - new ParameterCreator(filter.GetFilterFactory(), null, null).CreateAll(filter,null); - - var p = filter.GetAllParameters().Single(); - Assert.AreEqual("@n",p.ParameterName); - p.ParameterSQL = p.ParameterSQL.Replace("varchar(50)", "int"); //make it int - p.Value = "20"; - p.SaveToDatabase(); - - var qb = new QueryBuilder(null, null); - qb.AddColumn(extractionInformation); - qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(new MemoryCatalogueRepository(), null, new[] { filter }, FilterContainerOperation.AND); - - using(var con = db.Server.GetConnection()) - { - con.Open(); - - string sql = qb.SQL; - - var cmd = db.Server.GetCommand(sql, con); - var r = cmd.ExecuteReader(); - Assert.IsTrue(r.Read()); - Assert.AreEqual( - 20, - r[extractionInformation.GetRuntimeName()]); - } - /////////////////////////////////////////////////////////////////////////////////////// - } - finally + ///////////////////////UPLOAD THE DataTable TO THE DESTINATION//////////////////////////////////////////// + var uploader = new DataTableUploadDestination(); + uploader.PreInitialize(db, new ThrowImmediatelyDataLoadJob()); + uploader.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + uploader.Dispose(new ThrowImmediatelyDataLoadJob(), null); + + var tbl = db.ExpectTable(tableName); + + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out var ti, out var ci); + + var engineer = new ForwardEngineerCatalogue(ti, ci); + engineer.ExecuteForwardEngineering(out var cata, out var cis, out var ei); + ///////////////////////////////////////////////////////////////////////////////////////////////////////// + + /////////////////////////////////THE ACTUAL PROPER TEST//////////////////////////////////// + //create an extraction filter + var extractionInformation = ei.Single(); + var filter = new ExtractionFilter(CatalogueRepository, "Filter by numbers", extractionInformation) { - db.Drop(); - } + WhereSQL = $"{extractionInformation.SelectSQL} = @n" + }; + filter.SaveToDatabase(); + + //create the parameters for filter (no globals, masters or scope adjacent parameters) + new ParameterCreator(filter.GetFilterFactory(), null, null).CreateAll(filter, null); + + var p = filter.GetAllParameters().Single(); + Assert.AreEqual("@n", p.ParameterName); + p.ParameterSQL = p.ParameterSQL.Replace("varchar(50)", "int"); //make it int + p.Value = "20"; + p.SaveToDatabase(); + + var qb = new QueryBuilder(null, null); + qb.AddColumn(extractionInformation); + qb.RootFilterContainer = new SpontaneouslyInventedFilterContainer(new MemoryCatalogueRepository(), null, + new[] { filter }, FilterContainerOperation.AND); + + using var con = db.Server.GetConnection(); + con.Open(); + + var sql = qb.SQL; + + var cmd = db.Server.GetCommand(sql, con); + var r = cmd.ExecuteReader(); + Assert.IsTrue(r.Read()); + Assert.AreEqual( + 20, + r[extractionInformation.GetRuntimeName()]); + /////////////////////////////////////////////////////////////////////////////////////// + } + finally + { + db.Drop(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/DublinCoreTests.cs b/Rdmp.Core.Tests/Curation/DublinCoreTests.cs index e088140e4b..1051964909 100644 --- a/Rdmp.Core.Tests/Curation/DublinCoreTests.cs +++ b/Rdmp.Core.Tests/Curation/DublinCoreTests.cs @@ -6,73 +6,71 @@ using System; using System.IO; -using System.Text; using System.Xml.Linq; using NUnit.Framework; -using Rdmp.Core.Curation.Data; using Rdmp.Core.Reports.DublinCore; using Tests.Common; -namespace Rdmp.Core.Tests.Curation +namespace Rdmp.Core.Tests.Curation; + +[Category("Unit")] +internal class DublinCoreTests { - [Category("Unit")] - class DublinCoreTests + [Test] + public void TestWritingDocument() { - [Test] - public void TestWrittingDocument() + var def = new DublinCoreDefinition { - var def = new DublinCoreDefinition() - { - Title = "ssssshh", - Alternative = "O'Rly", - Description = "Description of stuff", - Format = "text/html", - Identifier = new Uri("http://foo.com"), - Publisher = "University of Dundee", - IsPartOf = new Uri("http://foo2.com"), - Modified = new DateTime(2001,1,1), - Subject = "Interesting, PayAttention, HighPriority, Omg" - }; - - var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "dublinTest.xml")); - - using(var fw = f.OpenWrite()) - def.WriteXml(fw); - - var contents = File.ReadAllText(f.FullName); - StringAssert.Contains(def.Title, contents); - StringAssert.Contains(def.Alternative, contents); - StringAssert.Contains(def.Description, contents); - StringAssert.Contains(def.Format, contents); - StringAssert.Contains(def.Publisher, contents); - StringAssert.Contains(def.Subject, contents); - - StringAssert.Contains("2001-01-01", contents); - StringAssert.Contains("http://foo.com", contents); - StringAssert.Contains("http://foo2.com", contents); - - var def2 = new DublinCoreDefinition(); - def2.LoadFrom(XDocument.Load(f.FullName).Root); - - Assert.AreEqual(def.Title, def2.Title); - Assert.AreEqual(def.Alternative, def2.Alternative); - Assert.AreEqual(def.Description, def2.Description); - Assert.AreEqual(def.Format, def2.Format); - Assert.AreEqual(def.Publisher, def2.Publisher); - Assert.AreEqual(def.Subject, def2.Subject); - - Assert.AreEqual(def.Modified, def2.Modified); - Assert.AreEqual(def.IsPartOf.ToString(), def2.IsPartOf.ToString()); - Assert.AreEqual(def.Identifier.ToString(), def2.Identifier.ToString()); - - + Title = "ssssshh", + Alternative = "O'Rly", + Description = "Description of stuff", + Format = "text/html", + Identifier = new Uri("http://foo.com"), + Publisher = "University of Dundee", + IsPartOf = new Uri("http://foo2.com"), + Modified = new DateTime(2001, 1, 1), + Subject = "Interesting, PayAttention, HighPriority, Omg" + }; + + var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "dublinTest.xml")); + + using (var fw = f.OpenWrite()) + { + def.WriteXml(fw); } - [Test] - public void TestReadingDocument() - { - string xml = - @" + var contents = File.ReadAllText(f.FullName); + StringAssert.Contains(def.Title, contents); + StringAssert.Contains(def.Alternative, contents); + StringAssert.Contains(def.Description, contents); + StringAssert.Contains(def.Format, contents); + StringAssert.Contains(def.Publisher, contents); + StringAssert.Contains(def.Subject, contents); + + StringAssert.Contains("2001-01-01", contents); + StringAssert.Contains("http://foo.com", contents); + StringAssert.Contains("http://foo2.com", contents); + + var def2 = new DublinCoreDefinition(); + def2.LoadFrom(XDocument.Load(f.FullName).Root); + + Assert.AreEqual(def.Title, def2.Title); + Assert.AreEqual(def.Alternative, def2.Alternative); + Assert.AreEqual(def.Description, def2.Description); + Assert.AreEqual(def.Format, def2.Format); + Assert.AreEqual(def.Publisher, def2.Publisher); + Assert.AreEqual(def.Subject, def2.Subject); + + Assert.AreEqual(def.Modified, def2.Modified); + Assert.AreEqual(def.IsPartOf.ToString(), def2.IsPartOf.ToString()); + Assert.AreEqual(def.Identifier.ToString(), def2.Identifier.ToString()); + } + + [Test] + public void TestReadingDocument() + { + var xml = + @" "; - var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "dublinTestReading.xml")); - File.WriteAllText(fi.FullName,xml); + var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "dublinTestReading.xml")); + File.WriteAllText(fi.FullName, xml); - var doc = XDocument.Load(fi.FullName); + var doc = XDocument.Load(fi.FullName); - var def = new DublinCoreDefinition(); + var def = new DublinCoreDefinition(); - def.LoadFrom(doc.Root); + def.LoadFrom(doc.Root); - Assert.IsTrue(DatabaseTests.AreBasicallyEquals("UKOLN",def.Title)); - Assert.IsTrue(DatabaseTests.AreBasicallyEquals("UK Office for Library and Information Networking", def.Alternative)); - Assert.IsTrue(DatabaseTests.AreBasicallyEquals(@"national centre, network information support, library + Assert.IsTrue(DatabaseTests.AreBasicallyEquals("UKOLN", def.Title)); + Assert.IsTrue(DatabaseTests.AreBasicallyEquals("UK Office for Library and Information Networking", + def.Alternative)); + Assert.IsTrue(DatabaseTests.AreBasicallyEquals(@"national centre, network information support, library community, awareness, research, information services,public library networking, bibliographic management, distributed library systems, metadata, resource discovery, conferences,lectures, workshops", def.Subject)); - Assert.IsTrue(DatabaseTests.AreBasicallyEquals(@"UKOLN is a national focus of expertise in digital information + Assert.IsTrue(DatabaseTests.AreBasicallyEquals(@"UKOLN is a national focus of expertise in digital information management. It provides policy, research and awareness services to the UK library, information and cultural heritage communities. UKOLN is based at the University of Bath.", def.Description)); - Assert.IsTrue(DatabaseTests.AreBasicallyEquals("UKOLN, University of Bath", def.Publisher)); - StringAssert.AreEqualIgnoringCase("http://www.bath.ac.uk/", def.IsPartOf.AbsoluteUri); - StringAssert.AreEqualIgnoringCase("http://www.ukoln.ac.uk/", def.Identifier.AbsoluteUri); - Assert.IsTrue(DatabaseTests.AreBasicallyEquals(new DateTime(2001,07,18),def.Modified)); - } + Assert.IsTrue(DatabaseTests.AreBasicallyEquals("UKOLN, University of Bath", def.Publisher)); + StringAssert.AreEqualIgnoringCase("http://www.bath.ac.uk/", def.IsPartOf.AbsoluteUri); + StringAssert.AreEqualIgnoringCase("http://www.ukoln.ac.uk/", def.Identifier.AbsoluteUri); + Assert.IsTrue(DatabaseTests.AreBasicallyEquals(new DateTime(2001, 07, 18), def.Modified)); + } - /// - /// This test also appears in the Rdmp.UI.Tests project since it behaves differently in different runtime. - /// - [Test] - public void Test_DublinCore_WriteReadFile_NetCore() + /// + /// This test also appears in the Rdmp.UI.Tests project since it behaves differently in different runtime. + /// + [Test] + public void Test_DublinCore_WriteReadFile_NetCore() + { + var def1 = new DublinCoreDefinition + { + Title = "ssssshh", + Alternative = "O'Rly", + Description = "Description of stuff", + Format = "text/html", + Identifier = new Uri("http://foo.com"), + Publisher = "University of Dundee", + IsPartOf = new Uri("http://foo2.com"), + Modified = new DateTime(2001, 1, 1), + Subject = "Interesting, PayAttention, HighPriority, Omg" + }; + + var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "dublin.xml")); + + using (var outStream = fi.OpenWrite()) { - var def1 = new DublinCoreDefinition() - { - Title = "ssssshh", - Alternative = "O'Rly", - Description = "Description of stuff", - Format = "text/html", - Identifier = new Uri("http://foo.com"), - Publisher = "University of Dundee", - IsPartOf = new Uri("http://foo2.com"), - Modified = new DateTime(2001,1,1), - Subject = "Interesting, PayAttention, HighPriority, Omg" - }; - - var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "dublin.xml")); - - using(var outStream = fi.OpenWrite()) - def1.WriteXml(outStream); - - using (var inStream = fi.OpenRead()) - { - var def2 = new DublinCoreDefinition(); - var doc = XDocument.Load(inStream); - def2.LoadFrom(doc.Root); - } + def1.WriteXml(outStream); } + + using var inStream = fi.OpenRead(); + var def2 = new DublinCoreDefinition(); + var doc = XDocument.Load(inStream); + def2.LoadFrom(doc.Root); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/ExtendedPropertyTests.cs b/Rdmp.Core.Tests/Curation/ExtendedPropertyTests.cs index 2bc9f24fac..b710ccbdd6 100644 --- a/Rdmp.Core.Tests/Curation/ExtendedPropertyTests.cs +++ b/Rdmp.Core.Tests/Curation/ExtendedPropertyTests.cs @@ -8,34 +8,32 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation +namespace Rdmp.Core.Tests.Curation; + +internal class ExtendedPropertyTests : DatabaseTests { - class ExtendedPropertyTests : DatabaseTests + [Test] + public void ExtendedProperty_Catalogue() { - [Test] - public void ExtendedProperty_Catalogue() - { - var cata = new Catalogue(CatalogueRepository,"My cata"); - var prop = new ExtendedProperty(CatalogueRepository,cata,"Fish",5); + var cata = new Catalogue(CatalogueRepository, "My cata"); + var prop = new ExtendedProperty(CatalogueRepository, cata, "Fish", 5); - Assert.AreEqual(5,prop.GetValueAsSystemType()); - Assert.IsTrue(prop.IsReferenceTo(cata)); + Assert.AreEqual(5, prop.GetValueAsSystemType()); + Assert.IsTrue(prop.IsReferenceTo(cata)); - prop.SetValue(10); - prop.SaveToDatabase(); - - Assert.AreEqual(10,prop.GetValueAsSystemType()); - Assert.IsTrue(prop.IsReferenceTo(cata)); + prop.SetValue(10); + prop.SaveToDatabase(); - prop.RevertToDatabaseState(); + Assert.AreEqual(10, prop.GetValueAsSystemType()); + Assert.IsTrue(prop.IsReferenceTo(cata)); - Assert.AreEqual(10,prop.GetValueAsSystemType()); - Assert.IsTrue(prop.IsReferenceTo(cata)); + prop.RevertToDatabaseState(); - var prop2 = CatalogueRepository.GetObjectByID(prop.ID); - Assert.AreEqual(10,prop.GetValueAsSystemType()); - Assert.IsTrue(prop.IsReferenceTo(cata)); + Assert.AreEqual(10, prop.GetValueAsSystemType()); + Assert.IsTrue(prop.IsReferenceTo(cata)); - } + var prop2 = CatalogueRepository.GetObjectByID(prop.ID); + Assert.AreEqual(10, prop.GetValueAsSystemType()); + Assert.IsTrue(prop.IsReferenceTo(cata)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/ImportTests/GatherAndShareTests.cs b/Rdmp.Core.Tests/Curation/ImportTests/GatherAndShareTests.cs index c1fca7800c..a8ec16f1cf 100644 --- a/Rdmp.Core.Tests/Curation/ImportTests/GatherAndShareTests.cs +++ b/Rdmp.Core.Tests/Curation/ImportTests/GatherAndShareTests.cs @@ -8,8 +8,6 @@ using System.Collections.Generic; using System.IO; using System.Linq; -using MapsDirectlyToDatabaseTable.Attributes; -using MapsDirectlyToDatabaseTable.Revertable; using NUnit.Framework; using Rdmp.Core.CommandLine.Runners; using Rdmp.Core.Curation.Data; @@ -17,284 +15,302 @@ using Rdmp.Core.Curation.Data.ImportExport; using Rdmp.Core.Curation.Data.Serialization; using Rdmp.Core.Databases; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Attributes; using Rdmp.Core.Sharing.Dependency.Gathering; using Tests.Common; using TypeGuesser; -namespace Rdmp.Core.Tests.Curation.ImportTests +namespace Rdmp.Core.Tests.Curation.ImportTests; + +public class GatherAndShareTests : DatabaseTests { - public class GatherAndShareTests:DatabaseTests + [Test] + public void Test_SerializeObject_ShareAttribute() { - [Test] - public void Test_SerializeObject_ShareAttribute() - { - Dictionary d = new Dictionary(); - - var json = JsonConvertExtensions.SerializeObject(d,RepositoryLocator); - var obj = (Dictionary)JsonConvertExtensions.DeserializeObject(json, typeof(Dictionary),RepositoryLocator); + var d = new Dictionary(); - Assert.AreEqual(0,obj.Count); + var json = JsonConvertExtensions.SerializeObject(d, RepositoryLocator); + var obj = (Dictionary)JsonConvertExtensions.DeserializeObject(json, + typeof(Dictionary), RepositoryLocator); - //now add a key - d.Add(new RelationshipAttribute(typeof(string),RelationshipType.SharedObject,"fff"),Guid.Empty); - - json = JsonConvertExtensions.SerializeObject(d,RepositoryLocator); - obj = (Dictionary)JsonConvertExtensions.DeserializeObject(json, typeof(Dictionary),RepositoryLocator); + Assert.AreEqual(0, obj.Count); - Assert.AreEqual(1,obj.Count); - } + //now add a key + d.Add(new RelationshipAttribute(typeof(string), RelationshipType.SharedObject, "fff"), Guid.Empty); + + json = JsonConvertExtensions.SerializeObject(d, RepositoryLocator); + obj = (Dictionary)JsonConvertExtensions.DeserializeObject(json, + typeof(Dictionary), RepositoryLocator); + + Assert.AreEqual(1, obj.Count); + } + + [TestCase(true)] + [TestCase(false)] + public void GatherAndShare_ANOTable_Test(bool goViaJson) + { + var anoserver = + new ExternalDatabaseServer(CatalogueRepository, "MyGatherAndShareTestANOServer", new ANOStorePatcher()); + var anoTable = new ANOTable(CatalogueRepository, anoserver, "ANOMagad", "N"); + + Assert.AreEqual(anoTable.Server_ID, anoserver.ID); - [TestCase(true)] - [TestCase(false)] - public void GatherAndShare_ANOTable_Test(bool goViaJson) + var g = new Gatherer(RepositoryLocator); + Assert.IsTrue(g.CanGatherDependencies(anoTable)); + + var gObj = Gatherer.GatherDependencies(anoTable); + + //root should be the server + Assert.AreEqual(gObj.Object, anoserver); + Assert.AreEqual(gObj.Children.Single().Object, anoTable); + + //get the sharing definitions + var shareManager = new ShareManager(RepositoryLocator); + var defParent = gObj.ToShareDefinition(shareManager, new List()); + var defChild = gObj.Children.Single() + .ToShareDefinition(shareManager, new List(new[] { defParent })); + + //make it look like we never had it in the first place + shareManager.GetNewOrExistingExportFor(anoserver).DeleteInDatabase(); + shareManager.GetNewOrExistingExportFor(anoTable).DeleteInDatabase(); + anoTable.DeleteInDatabase(); + anoserver.DeleteInDatabase(); + + if (goViaJson) { - var anoserver = new ExternalDatabaseServer(CatalogueRepository, "MyGatherAndShareTestANOServer", new ANOStorePatcher()); - var anoTable = new ANOTable(CatalogueRepository, anoserver, "ANOMagad", "N"); - - Assert.AreEqual(anoTable.Server_ID,anoserver.ID); - - Gatherer g = new Gatherer(RepositoryLocator); - Assert.IsTrue(g.CanGatherDependencies(anoTable)); - - var gObj = g.GatherDependencies(anoTable); - - //root should be the server - Assert.AreEqual(gObj.Object,anoserver); - Assert.AreEqual(gObj.Children.Single().Object, anoTable); - - //get the sharing definitions - var shareManager = new ShareManager(RepositoryLocator); - ShareDefinition defParent = gObj.ToShareDefinition(shareManager,new List()); - ShareDefinition defChild = gObj.Children.Single().ToShareDefinition(shareManager, new List(new []{defParent})); - - //make it look like we never had it in the first place - shareManager.GetNewOrExistingExportFor(anoserver).DeleteInDatabase(); - shareManager.GetNewOrExistingExportFor(anoTable).DeleteInDatabase(); - anoTable.DeleteInDatabase(); - anoserver.DeleteInDatabase(); - - if(goViaJson) - { - var sParent = JsonConvertExtensions.SerializeObject(defParent,RepositoryLocator); - var sChild = JsonConvertExtensions.SerializeObject(defChild, RepositoryLocator); - - defParent = (ShareDefinition)JsonConvertExtensions.DeserializeObject(sParent, typeof(ShareDefinition),RepositoryLocator); - defChild = (ShareDefinition)JsonConvertExtensions.DeserializeObject(sChild, typeof(ShareDefinition), RepositoryLocator); - } - - var anoserverAfter = new ExternalDatabaseServer(shareManager, defParent); - - Assert.IsTrue(anoserverAfter.Exists()); - - //new instance - Assert.AreNotEqual(anoserverAfter.ID, anoserver.ID); - - //same properties - Assert.AreEqual(anoserverAfter.Name, anoserver.Name); - Assert.AreEqual(anoserverAfter.CreatedByAssembly, anoserver.CreatedByAssembly); - Assert.AreEqual(anoserverAfter.Database, anoserver.Database); - Assert.AreEqual(anoserverAfter.DatabaseType, anoserver.DatabaseType); - Assert.AreEqual(anoserverAfter.Username, anoserver.Username); - Assert.AreEqual(anoserverAfter.Password, anoserver.Password); - - var anoTableAfter = new ANOTable(shareManager, defChild); - - //new instance - Assert.AreNotEqual(anoTableAfter.ID, anoTable.ID); - Assert.AreNotEqual(anoTableAfter.Server_ID, anoTable.Server_ID); - - //same properties - Assert.AreEqual(anoTableAfter.NumberOfCharactersToUseInAnonymousRepresentation, anoTable.NumberOfCharactersToUseInAnonymousRepresentation); - Assert.AreEqual(anoTableAfter.Suffix, anoTable.Suffix); - - //change a property and save it - anoTableAfter.Suffix = "CAMMELS!"; - CatalogueRepository.SaveToDatabase(anoTableAfter); - //anoTableAfter.SaveToDatabase(); <- this decides to go check the ANOTable exists on the server refernced which is immaginary btw >< thats why we have the above line instead - - //reimport (this time it should be an update, we import the share definitions and it overrdies our database copy (sharing is UPSERT) - var anoTableAfter2 = new ANOTable(shareManager, defChild); - - Assert.AreEqual(anoTableAfter.ID, anoTableAfter2.ID); - Assert.AreEqual("N", anoTableAfter2.Suffix); - - anoTableAfter.DeleteInDatabase(); - anoserverAfter.DeleteInDatabase(); - - foreach (ObjectImport o in RepositoryLocator.CatalogueRepository.GetAllObjects()) - o.DeleteInDatabase(); + var sParent = JsonConvertExtensions.SerializeObject(defParent, RepositoryLocator); + var sChild = JsonConvertExtensions.SerializeObject(defChild, RepositoryLocator); + + defParent = (ShareDefinition)JsonConvertExtensions.DeserializeObject(sParent, typeof(ShareDefinition), + RepositoryLocator); + defChild = (ShareDefinition)JsonConvertExtensions.DeserializeObject(sChild, typeof(ShareDefinition), + RepositoryLocator); } - [Test] - public void GatherAndShare_Plugin_Test() + var anoserverAfter = new ExternalDatabaseServer(shareManager, defParent); + + Assert.IsTrue(anoserverAfter.Exists()); + + //new instance + Assert.AreNotEqual(anoserverAfter.ID, anoserver.ID); + + //same properties + Assert.AreEqual(anoserverAfter.Name, anoserver.Name); + Assert.AreEqual(anoserverAfter.CreatedByAssembly, anoserver.CreatedByAssembly); + Assert.AreEqual(anoserverAfter.Database, anoserver.Database); + Assert.AreEqual(anoserverAfter.DatabaseType, anoserver.DatabaseType); + Assert.AreEqual(anoserverAfter.Username, anoserver.Username); + Assert.AreEqual(anoserverAfter.Password, anoserver.Password); + + var anoTableAfter = new ANOTable(shareManager, defChild); + + //new instance + Assert.AreNotEqual(anoTableAfter.ID, anoTable.ID); + Assert.AreNotEqual(anoTableAfter.Server_ID, anoTable.Server_ID); + + //same properties + Assert.AreEqual(anoTableAfter.NumberOfCharactersToUseInAnonymousRepresentation, + anoTable.NumberOfCharactersToUseInAnonymousRepresentation); + Assert.AreEqual(anoTableAfter.Suffix, anoTable.Suffix); + + //change a property and save it + anoTableAfter.Suffix = "CAMMELS!"; + CatalogueRepository.SaveToDatabase(anoTableAfter); + //anoTableAfter.SaveToDatabase(); <- this decides to go check the ANOTable exists on the server refernced which is immaginary btw >< thats why we have the above line instead + + //reimport (this time it should be an update, we import the share definitions and it overrdies our database copy (sharing is UPSERT) + var anoTableAfter2 = new ANOTable(shareManager, defChild); + + Assert.AreEqual(anoTableAfter.ID, anoTableAfter2.ID); + Assert.AreEqual("N", anoTableAfter2.Suffix); + + anoTableAfter.DeleteInDatabase(); + anoserverAfter.DeleteInDatabase(); + + foreach (var o in RepositoryLocator.CatalogueRepository.GetAllObjects()) + o.DeleteInDatabase(); + } + + [Test] + public void GatherAndShare_Plugin_Test() + { + var f1 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, + $"Imaginary1{PackPluginRunner.PluginPackageSuffix}")); + File.WriteAllBytes(f1.FullName, new byte[] { 0x1, 0x2 }); + + var plugin = new Plugin(CatalogueRepository, new FileInfo( + $"Imaginary{PackPluginRunner.PluginPackageSuffix}"), new Version(1, 1, 1), new Version(1, 1, 1)); + var lma1 = new LoadModuleAssembly(CatalogueRepository, f1, plugin); + + Assert.AreEqual(lma1.Plugin_ID, plugin.ID); + + var g = new Gatherer(RepositoryLocator); + Assert.IsTrue(g.CanGatherDependencies(plugin)); + + var gObj = Gatherer.GatherDependencies(plugin); + + //root should be the server + Assert.AreEqual(gObj.Object, plugin); + Assert.IsTrue(gObj.Children.Any(d => d.Object.Equals(lma1))); + } + + + [TestCase(true)] + [TestCase(false)] + public void GatherAndShare_Catalogue_Test(bool goViaJson) + { + //Setup some objects under Catalogue that we can share + var cata = new Catalogue(CatalogueRepository, "Cata") { - var f1 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"Imaginary1"+ PackPluginRunner.PluginPackageSuffix)); - File.WriteAllBytes(f1.FullName,new byte[]{0x1,0x2}); - - var plugin = new Core.Curation.Data.Plugin(CatalogueRepository,new FileInfo("Imaginary"+ PackPluginRunner.PluginPackageSuffix),new System.Version(1,1,1),new System.Version(1,1,1)); - var lma1 = new LoadModuleAssembly(CatalogueRepository,f1,plugin); + Periodicity = Catalogue.CataloguePeriodicity.BiMonthly + }; + cata.SaveToDatabase(); - Assert.AreEqual(lma1.Plugin_ID, plugin.ID); + var catalogueItem1 = new CatalogueItem(CatalogueRepository, cata, "Ci1"); + var catalogueItem2 = new CatalogueItem(CatalogueRepository, cata, "Ci2"); - Gatherer g = new Gatherer(RepositoryLocator); - Assert.IsTrue(g.CanGatherDependencies(plugin)); + var tableInfo = new TableInfo(CatalogueRepository, "Myt"); + var colInfo = new ColumnInfo(CatalogueRepository, "[Mt].[C1]", "varchar(10)", tableInfo); - var gObj = g.GatherDependencies(plugin); + catalogueItem1.ColumnInfo_ID = colInfo.ID; + catalogueItem1.SaveToDatabase(); - //root should be the server - Assert.AreEqual(gObj.Object, plugin); - Assert.IsTrue(gObj.Children.Any(d=>d.Object.Equals(lma1))); - } + var ei = new ExtractionInformation(CatalogueRepository, catalogueItem1, colInfo, "UPPER(C1) as Fish"); + + //the logging server has a system default so should have been populated + Assert.IsNotNull(cata.LiveLoggingServer_ID); + + //Catalogue sharing should be allowed + var g = new Gatherer(RepositoryLocator); + Assert.IsTrue(g.CanGatherDependencies(cata)); + //gather the objects depending on Catalogue as a tree + var gObj = Gatherer.GatherDependencies(cata); + Assert.AreEqual(2, gObj.Children.Count); //both cata items - [TestCase(true)] - [TestCase(false)] - public void GatherAndShare_Catalogue_Test(bool goViaJson) + var lmd = new LoadMetadata(CatalogueRepository); + cata.LoadMetadata_ID = lmd.ID; + cata.SaveToDatabase(); + + //get the share definition + var shareManager = new ShareManager(RepositoryLocator); + var shareDefinition = gObj.ToShareDefinitionWithChildren(shareManager); + + + if (goViaJson) { - //Setup some objects under Catalogue that we can share - var cata = new Catalogue(CatalogueRepository, "Cata"); - cata.Periodicity = Catalogue.CataloguePeriodicity.BiMonthly; - cata.SaveToDatabase(); - - var catalogueItem1 = new CatalogueItem(CatalogueRepository, cata, "Ci1"); - var catalogueItem2 = new CatalogueItem(CatalogueRepository, cata, "Ci2"); - - var tableInfo = new TableInfo(CatalogueRepository, "Myt"); - var colInfo = new ColumnInfo(CatalogueRepository, "[Mt].[C1]", "varchar(10)", tableInfo); - - catalogueItem1.ColumnInfo_ID = colInfo.ID; - catalogueItem1.SaveToDatabase(); - - var ei = new ExtractionInformation(CatalogueRepository, catalogueItem1, colInfo, "UPPER(C1) as Fish"); - - //the logging server has a system default so should have been populated - Assert.IsNotNull(cata.LiveLoggingServer_ID); - - //Catalogue sharing should be allowed - Gatherer g = new Gatherer(RepositoryLocator); - Assert.IsTrue(g.CanGatherDependencies(cata)); - - //gather the objects depending on Catalogue as a tree - var gObj = g.GatherDependencies(cata); - Assert.AreEqual(2, gObj.Children.Count); //both cata items - - var lmd = new LoadMetadata(CatalogueRepository); - cata.LoadMetadata_ID = lmd.ID; - cata.SaveToDatabase(); - - //get the share definition - var shareManager = new ShareManager(RepositoryLocator); - var shareDefinition = gObj.ToShareDefinitionWithChildren(shareManager); - - - if (goViaJson) - { - var json = - shareDefinition.Select(s => JsonConvertExtensions.SerializeObject(s, RepositoryLocator)).ToList(); - shareDefinition = - json.Select( - j => JsonConvertExtensions.DeserializeObject(j, typeof (ShareDefinition), RepositoryLocator)) - .Cast() - .ToList(); - } - - //make a local change - cata.Name = "fishfish"; - cata.SubjectNumbers = "123"; - cata.LoadMetadata_ID = null; - cata.Periodicity = Catalogue.CataloguePeriodicity.Unknown; - cata.SaveToDatabase(); - - lmd.DeleteInDatabase(); - - //import the saved copy - shareManager.ImportSharedObject(shareDefinition); - - //revert the memory copy and check it got overwritten with the original saved values - cata = CatalogueRepository.GetObjectByID(cata.ID); - Assert.AreEqual("Cata", cata.Name); - - var exports = CatalogueRepository.GetAllObjects(); - Assert.IsTrue(exports.Any()); - - //now delete and report - foreach (var d in exports) - d.DeleteInDatabase(); - - //make a local change including Name - cata.Name = "fishfish"; - cata.SaveToDatabase(); - - //test importing the Catalogue properties only - shareManager.ImportPropertiesOnly(cata,shareDefinition[0]); - - //import the defined properties but not name - Assert.AreEqual("fishfish",cata.Name); - Assert.AreEqual(Catalogue.CataloguePeriodicity.BiMonthly,cata.Periodicity); //reset this though - Assert.IsNull(cata.LoadMetadata_ID); - cata.SaveToDatabase(); - - cata.DeleteInDatabase(); - - //none of these should now exist thanks to cascade deletes - Assert.IsFalse(cata.Exists()); - Assert.IsFalse(catalogueItem1.Exists()); - Assert.IsFalse(catalogueItem2.Exists()); - - //import the saved copy - var newObjects = shareManager.ImportSharedObject(shareDefinition).ToArray(); - - Assert.AreEqual("Cata", ((Catalogue) newObjects[0]).Name); - Assert.AreEqual("Ci1", ((CatalogueItem) newObjects[1]).Name); - Assert.AreEqual("Ci2", ((CatalogueItem) newObjects[2]).Name); + var json = + shareDefinition.Select(s => JsonConvertExtensions.SerializeObject(s, RepositoryLocator)).ToList(); + shareDefinition = + json.Select( + j => JsonConvertExtensions.DeserializeObject(j, typeof(ShareDefinition), RepositoryLocator)) + .Cast() + .ToList(); } - [Test] - public void GatherAndShare_ExtractionFilter_Test() + //make a local change + cata.Name = "fishfish"; + cata.SubjectNumbers = "123"; + cata.LoadMetadata_ID = null; + cata.Periodicity = Catalogue.CataloguePeriodicity.Unknown; + cata.SaveToDatabase(); + + lmd.DeleteInDatabase(); + + //import the saved copy + shareManager.ImportSharedObject(shareDefinition); + + //revert the memory copy and check it got overwritten with the original saved values + cata = CatalogueRepository.GetObjectByID(cata.ID); + Assert.AreEqual("Cata", cata.Name); + + var exports = CatalogueRepository.GetAllObjects(); + Assert.IsTrue(exports.Any()); + + //now delete and report + foreach (var d in exports) + d.DeleteInDatabase(); + + //make a local change including Name + cata.Name = "fishfish"; + cata.SaveToDatabase(); + + //test importing the Catalogue properties only + ShareManager.ImportPropertiesOnly(cata, shareDefinition[0]); + + //import the defined properties but not name + Assert.AreEqual("fishfish", cata.Name); + Assert.AreEqual(Catalogue.CataloguePeriodicity.BiMonthly, cata.Periodicity); //reset this though + Assert.IsNull(cata.LoadMetadata_ID); + cata.SaveToDatabase(); + + cata.DeleteInDatabase(); + + //none of these should now exist thanks to cascade deletes + Assert.IsFalse(cata.Exists()); + Assert.IsFalse(catalogueItem1.Exists()); + Assert.IsFalse(catalogueItem2.Exists()); + + //import the saved copy + var newObjects = shareManager.ImportSharedObject(shareDefinition).ToArray(); + + Assert.AreEqual("Cata", ((Catalogue)newObjects[0]).Name); + Assert.AreEqual("Ci1", ((CatalogueItem)newObjects[1]).Name); + Assert.AreEqual("Ci2", ((CatalogueItem)newObjects[2]).Name); + } + + [Test] + public void GatherAndShare_ExtractionFilter_Test() + { + //Setup some objects under Catalogue + var cata = new Catalogue(CatalogueRepository, "Cata") { - //Setup some objects under Catalogue - var cata = new Catalogue(CatalogueRepository, "Cata"); - cata.Periodicity = Catalogue.CataloguePeriodicity.BiMonthly; - cata.SaveToDatabase(); - - var catalogueItem1 = new CatalogueItem(CatalogueRepository, cata, "Ci1"); - - var tableInfo = new TableInfo(CatalogueRepository, "Myt"); - var colInfo = new ColumnInfo(CatalogueRepository, "[Mt].[C1]", "varchar(10)", tableInfo); - - catalogueItem1.ColumnInfo_ID = colInfo.ID; - catalogueItem1.SaveToDatabase(); - - //Setup a Filter under this extractable column (the filter is what we will share) - var ei = new ExtractionInformation(CatalogueRepository, catalogueItem1, colInfo, "UPPER(C1) as Fish"); - - var filter = new ExtractionFilter(CatalogueRepository, "My Filter", ei); - filter.Description = "amagad"; - filter.WhereSQL = "UPPER(C1) = @a"; - - //Give the filter a parameter @a just to make things interesting - var declaration = filter.GetQuerySyntaxHelper().GetParameterDeclaration("@a", new DatabaseTypeRequest(typeof (string), 1)); - var param = filter.GetFilterFactory().CreateNewParameter(filter, declaration); - - //Also create a 'known good value' set i.e. recommended value for the parameter to achive some goal (you can have multiple of these - this will not be shared) - var set = new ExtractionFilterParameterSet(CatalogueRepository, filter, "Fife"); - var val = new ExtractionFilterParameterSetValue(CatalogueRepository, set, (ExtractionFilterParameter) param); - val.Value = "'FISH'"; - - //Gather the dependencies (this is what we are testing) - var gatherer = new Gatherer(RepositoryLocator); - - Assert.IsTrue(gatherer.CanGatherDependencies(filter)); - var gathered = gatherer.GatherDependencies(filter); - - //gatherer should have gathered the filter and the parameter (but not the ExtractionFilterParameterSet sets) - Assert.AreEqual(1,gathered.Children.Count); - Assert.AreEqual(param,gathered.Children[0].Object); - - //Cleanup - val.DeleteInDatabase(); - set.DeleteInDatabase(); - cata.DeleteInDatabase(); - } + Periodicity = Catalogue.CataloguePeriodicity.BiMonthly + }; + cata.SaveToDatabase(); + + var catalogueItem1 = new CatalogueItem(CatalogueRepository, cata, "Ci1"); + + var tableInfo = new TableInfo(CatalogueRepository, "Myt"); + var colInfo = new ColumnInfo(CatalogueRepository, "[Mt].[C1]", "varchar(10)", tableInfo); + + catalogueItem1.ColumnInfo_ID = colInfo.ID; + catalogueItem1.SaveToDatabase(); + + //Setup a Filter under this extractable column (the filter is what we will share) + var ei = new ExtractionInformation(CatalogueRepository, catalogueItem1, colInfo, "UPPER(C1) as Fish"); + + var filter = new ExtractionFilter(CatalogueRepository, "My Filter", ei) + { + Description = "amagad", + WhereSQL = "UPPER(C1) = @a" + }; + + //Give the filter a parameter @a just to make things interesting + var declaration = filter.GetQuerySyntaxHelper() + .GetParameterDeclaration("@a", new DatabaseTypeRequest(typeof(string), 1)); + var param = filter.GetFilterFactory().CreateNewParameter(filter, declaration); + + //Also create a 'known good value' set i.e. recommended value for the parameter to achive some goal (you can have multiple of these - this will not be shared) + var set = new ExtractionFilterParameterSet(CatalogueRepository, filter, "Fife"); + var val = new ExtractionFilterParameterSetValue(CatalogueRepository, set, (ExtractionFilterParameter)param) + { + Value = "'FISH'" + }; + + //Gather the dependencies (this is what we are testing) + var gatherer = new Gatherer(RepositoryLocator); + + Assert.IsTrue(gatherer.CanGatherDependencies(filter)); + var gathered = Gatherer.GatherDependencies(filter); + + //gatherer should have gathered the filter and the parameter (but not the ExtractionFilterParameterSet sets) + Assert.AreEqual(1, gathered.Children.Count); + Assert.AreEqual(param, gathered.Children[0].Object); + + //Cleanup + val.DeleteInDatabase(); + set.DeleteInDatabase(); + cata.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/ImportTests/PluginClassTests.cs b/Rdmp.Core.Tests/Curation/ImportTests/PluginClassTests.cs index 80d25d9b70..9985d97131 100644 --- a/Rdmp.Core.Tests/Curation/ImportTests/PluginClassTests.cs +++ b/Rdmp.Core.Tests/Curation/ImportTests/PluginClassTests.cs @@ -16,115 +16,108 @@ using Rdmp.Core.Sharing.Dependency.Gathering; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.ImportTests +namespace Rdmp.Core.Tests.Curation.ImportTests; + +public class PluginClassTests : UnitTests { + [SetUp] + protected override void SetUp() + { + base.SetUp(); + + Repository.Clear(); + } + + [Test] + public void Catalogue_returns_latest_compatible_plugin() + { + var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah.zip")); + File.WriteAllBytes(fi.FullName, new byte[] { 0x1, 0x2 }); + + var version = FileVersionInfo.GetVersionInfo(typeof(PluginClassTests).Assembly.Location).FileVersion ?? + throw new Exception($"No file version in {typeof(PluginClassTests).Assembly.Location}"); + + var lma1 = WhenIHaveA(); + var lma2 = WhenIHaveA(); + + + lma1.Plugin.Name = "MyPlugin"; + lma1.Plugin.RdmpVersion = new Version(version); //the version of Rdmp.Core targetted + lma1.Plugin.PluginVersion = new Version(1, 1, 1, 1); //the version of the plugin + lma1.Plugin.SaveToDatabase(); + + lma2.Plugin.Name = "MyPlugin"; + lma2.Plugin.RdmpVersion = new Version(version); //the version of Rdmp.Core targetted (same as above) + lma2.Plugin.PluginVersion = new Version(1, 1, 1, 2); //the version of the plugin (higher) + lma2.SaveToDatabase(); + + var plugins = Repository.PluginManager.GetCompatiblePlugins(); + Assert.That(plugins, Has.Length.EqualTo(1)); + Assert.That(plugins[0], Is.EqualTo(lma2.Plugin)); + } + + [Test] + public void TestPlugin_OrphanImport_Sharing() + { + //Setup the load module we want to test (with plugin parent) + var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, + $"Blah2.{PackPluginRunner.PluginPackageSuffix}")); + File.WriteAllBytes(fi.FullName, new byte[] { 0x1, 0x2 }); + + var fi2 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, + $"Blah2.{PackPluginRunner.PluginPackageSuffix}")); + File.WriteAllBytes(fi2.FullName, new byte[] { 0x1, 0x2 }); + + var fi3 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, + $"Blah3.{PackPluginRunner.PluginPackageSuffix}")); + File.WriteAllBytes(fi3.FullName, new byte[] { 0x3, 0x4 }); + + var p = new Plugin(Repository, fi, new Version(1, 1, 1), new Version(1, 1, 1, 1)); + var lma = new LoadModuleAssembly(Repository, fi2, p); + var lma2 = new LoadModuleAssembly(Repository, fi3, p); + + //gather dependencies of the plugin (plugin[0] + lma[1]) + var g = new Gatherer(RepositoryLocator); + var sm = new ShareManager(RepositoryLocator); + var list = Gatherer.GatherDependencies(p).ToShareDefinitionWithChildren(sm); + + //Delete export definitions + foreach (var e in Repository.GetAllObjects()) + e.DeleteInDatabase(); + + //and delete pluing (CASCADE deletes lma too) + p.DeleteInDatabase(); + + //import them + var created = sm.ImportSharedObject(list).ToArray(); + + //There should be 3 + Assert.AreEqual(3, created.Length); + + Assert.AreEqual(3, Repository.GetAllObjects().Length); + + lma2 = (LoadModuleAssembly)created[2]; + + //now delete lma2 only + lma2.DeleteInDatabase(); + + Assert.AreEqual(2, Repository.GetAllObjects().Length); + + //import them + var created2 = sm.ImportSharedObject(list); + + //There should still be 3 + Assert.AreEqual(3, created2.Count()); + } - public class PluginClassTests:UnitTests + [TestCase("Rdmp.1.2.3.nupkg", "Rdmp")] + [TestCase("Rdmp.Dicom.1.2.3.nupkg", "Rdmp.Dicom")] + [TestCase("Rdmp.Dicom.nupkg", "Rdmp.Dicom")] + [TestCase("Rdmp.Dicom", "Rdmp.Dicom")] + public void Test_Plugin_ShortName(string fullname, string expected) { - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); - - SetupMEF(); - } - - [SetUp] - protected override void SetUp() - { - base.SetUp(); - - Repository.Clear(); - } - - [Test] - public void Catalogue_returns_latest_compatible_plugin() - { - var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah.zip")); - File.WriteAllBytes(fi.FullName, new byte[] { 0x1, 0x2 }); - - var version = FileVersionInfo.GetVersionInfo(Assembly.GetExecutingAssembly().Location).FileVersion; - var tripart = new Version(version); - - var lma1 = WhenIHaveA(); - var lma2 = WhenIHaveA(); - - - lma1.Plugin.Name = "MyPlugin"; - lma1.Plugin.RdmpVersion = new Version(version); //the version of Rdmp.Core targetted - lma1.Plugin.PluginVersion = new Version(1, 1, 1, 1); //the version of the plugin - lma1.Plugin.SaveToDatabase(); - - lma2.Plugin.Name = "MyPlugin"; - lma2.Plugin.RdmpVersion = new Version(version);//the version of Rdmp.Core targetted (same as above) - lma2.Plugin.PluginVersion = new Version(1, 1, 1, 2);//the version of the plugin (higher) - lma2.SaveToDatabase(); - - var plugins = Repository.PluginManager.GetCompatiblePlugins(); - Assert.That(plugins, Has.Length.EqualTo(1)); - Assert.That(plugins[0], Is.EqualTo(lma2.Plugin)); - } - - [Test] - public void TestPlugin_OrphanImport_Sharing() - { - //Setup the load module we want to test (with plugin parent) - var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"Blah2." + PackPluginRunner.PluginPackageSuffix)); - File.WriteAllBytes(fi.FullName, new byte[] { 0x1, 0x2 }); - - var fi2 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"Blah2."+ PackPluginRunner.PluginPackageSuffix)); - File.WriteAllBytes(fi2.FullName, new byte[] { 0x1, 0x2 }); - - var fi3 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"Blah3."+ PackPluginRunner.PluginPackageSuffix)); - File.WriteAllBytes(fi3.FullName, new byte[] { 0x3, 0x4 }); - - Core.Curation.Data.Plugin p = new Core.Curation.Data.Plugin(Repository, fi,new Version(1,1,1),new Version(1,1,1,1)); - var lma = new LoadModuleAssembly(Repository, fi2, p); - var lma2 = new LoadModuleAssembly(Repository, fi3, p); - - //gather dependencies of the plugin (plugin[0] + lma[1]) - Gatherer g = new Gatherer(RepositoryLocator); - ShareManager sm = new ShareManager(RepositoryLocator); - var list = g.GatherDependencies(p).ToShareDefinitionWithChildren(sm); - - //Delete export definitions - foreach (var e in Repository.GetAllObjects()) - e.DeleteInDatabase(); - - //and delete pluing (CASCADE deletes lma too) - p.DeleteInDatabase(); - - //import them - var created = sm.ImportSharedObject(list).ToArray(); - - //There should be 3 - Assert.AreEqual(3, created.Count()); - - Assert.AreEqual(3,Repository.GetAllObjects().Count()); - - lma2 = (LoadModuleAssembly) created[2]; - - //now delete lma2 only - lma2.DeleteInDatabase(); - - Assert.AreEqual(2, Repository.GetAllObjects().Count()); - - //import them - var created2 = sm.ImportSharedObject(list); - - //There should still be 3 - Assert.AreEqual(3, created2.Count()); - } - - [TestCase("Rdmp.1.2.3.nupkg","Rdmp")] - [TestCase("Rdmp.Dicom.1.2.3.nupkg","Rdmp.Dicom")] - [TestCase("Rdmp.Dicom.nupkg","Rdmp.Dicom")] - [TestCase("Rdmp.Dicom","Rdmp.Dicom")] - public void Test_Plugin_ShortName(string fullname, string expected) - { - var p = WhenIHaveA(); - p.Name = fullname; - Assert.AreEqual(expected,p.GetShortName()); - } - } -} + var p = WhenIHaveA(); + p.Name = fullname; + Assert.AreEqual(expected, p.GetShortName()); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/ImportTests/ShareLoadMetadataTests.cs b/Rdmp.Core.Tests/Curation/ImportTests/ShareLoadMetadataTests.cs index 2a59599511..f7598ce18d 100644 --- a/Rdmp.Core.Tests/Curation/ImportTests/ShareLoadMetadataTests.cs +++ b/Rdmp.Core.Tests/Curation/ImportTests/ShareLoadMetadataTests.cs @@ -7,7 +7,7 @@ using Microsoft.Data.SqlClient; using System.Linq; using FAnsi.Discovery; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; @@ -20,169 +20,167 @@ using Rdmp.Core.Sharing.Dependency.Gathering; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.ImportTests +namespace Rdmp.Core.Tests.Curation.ImportTests; + +public class ShareLoadMetadataTests : UnitTests { - public class ShareLoadMetadataTests : UnitTests + [Test] + public void GatherAndShare_LoadMetadata_EmptyLoadMetadata() { - [Test] - public void GatherAndShare_LoadMetadata_EmptyLoadMetadata() - { - //create an object - LoadMetadata lmd = new LoadMetadata(Repository, "MyLmd"); - - var lmd2 = ShareToNewRepository(lmd); - - //different repos so not identical - Assert.IsFalse(ReferenceEquals(lmd,lmd2)); - Assert.AreEqual(lmd.Name, lmd2.Name); - } - - [Test] - public void GatherAndShare_LoadMetadata_WithCatalogue() - { - //create an object - LoadMetadata lmd1; - var lmd2 = ShareToNewRepository(lmd1 = WhenIHaveA()); - - var cata1 = lmd1.GetAllCatalogues().Single(); - var cata2 = lmd2.GetAllCatalogues().Single(); - - //different repos so not identical - Assert.IsFalse(ReferenceEquals(lmd1,lmd2)); - Assert.IsFalse(ReferenceEquals(cata1,cata2)); - - Assert.AreEqual(lmd1.Name, lmd2.Name); - Assert.AreEqual(cata1.Name, cata2.Name); - } - - /// - /// Tests sharing a basic process task load metadata - /// - [Test] - public void GatherAndShare_LoadMetadata_WithProcessTask() - { - //create an object - LoadMetadata lmd1; - var lmd2 = ShareToNewRepository(lmd1=WhenIHaveA().ProcessTask.LoadMetadata); - - var pt1 = lmd1.ProcessTasks.Single(); - var pt2 = lmd2.ProcessTasks.Single(); - - //different repos so not identical - Assert.IsFalse(ReferenceEquals(lmd1,lmd2)); - AssertAreEqual(lmd1,lmd2); - - Assert.IsFalse(ReferenceEquals(pt1,pt2)); - AssertAreEqual(pt1,pt2); - - Assert.IsFalse(ReferenceEquals(pt1.ProcessTaskArguments.Single(),pt2.ProcessTaskArguments.Single())); - AssertAreEqual(pt1.ProcessTaskArguments.Single(),pt2.ProcessTaskArguments.Single()); - } - - /// - /// Tests sharing a more advanced loadmetadata with an actual class behind the ProcessTask - /// - [Test] - public void GatherAndShare_LoadMetadata_WithRealProcessTask() - { - //create an object - LoadMetadata lmd1 = WhenIHaveA(); + //create an object + var lmd = new LoadMetadata(Repository, "MyLmd"); - SetupMEF(); - - var pt1 = new ProcessTask(Repository, lmd1, LoadStage.Mounting); - pt1.ProcessTaskType = ProcessTaskType.Attacher; - pt1.LoadStage = LoadStage.Mounting; - pt1.Path = typeof(AnySeparatorFileAttacher).FullName; - pt1.SaveToDatabase(); + var lmd2 = ShareToNewRepository(lmd); + + //different repos so not identical + Assert.IsFalse(ReferenceEquals(lmd, lmd2)); + Assert.AreEqual(lmd.Name, lmd2.Name); + } + + [Test] + public void GatherAndShare_LoadMetadata_WithCatalogue() + { + //create an object + LoadMetadata lmd1; + var lmd2 = ShareToNewRepository(lmd1 = WhenIHaveA()); - pt1.CreateArgumentsForClassIfNotExists(typeof(AnySeparatorFileAttacher)); - var pta = pt1.ProcessTaskArguments.Single(pt => pt.Name == "Separator"); - pta.SetValue(","); - pta.SaveToDatabase(); + var cata1 = lmd1.GetAllCatalogues().Single(); + var cata2 = lmd2.GetAllCatalogues().Single(); + //different repos so not identical + Assert.IsFalse(ReferenceEquals(lmd1, lmd2)); + Assert.IsFalse(ReferenceEquals(cata1, cata2)); - var lmd2 = ShareToNewRepository(lmd1); - - //different repos so not identical - Assert.IsFalse(ReferenceEquals(lmd1,lmd2)); - AssertAreEqual(lmd1,lmd2); + Assert.AreEqual(lmd1.Name, lmd2.Name); + Assert.AreEqual(cata1.Name, cata2.Name); + } - var pt2 = lmd2.ProcessTasks.Single(); + /// + /// Tests sharing a basic process task load metadata + /// + [Test] + public void GatherAndShare_LoadMetadata_WithProcessTask() + { + //create an object + LoadMetadata lmd1; + var lmd2 = ShareToNewRepository(lmd1 = WhenIHaveA().ProcessTask.LoadMetadata); - Assert.IsFalse(ReferenceEquals(pt1,pt2)); - AssertAreEqual(pt1,pt2); + var pt1 = lmd1.ProcessTasks.Single(); + var pt2 = lmd2.ProcessTasks.Single(); - AssertAreEqual(pt1.GetAllArguments(),pt2.GetAllArguments()); + //different repos so not identical + Assert.IsFalse(ReferenceEquals(lmd1, lmd2)); + AssertAreEqual(lmd1, lmd2); - RuntimeTaskFactory f = new RuntimeTaskFactory(Repository); + Assert.IsFalse(ReferenceEquals(pt1, pt2)); + AssertAreEqual(pt1, pt2); - var stg = Mock.Of(x => x.LoadStage==LoadStage.Mounting); + Assert.IsFalse(ReferenceEquals(pt1.ProcessTaskArguments.Single(), pt2.ProcessTaskArguments.Single())); + AssertAreEqual(pt1.ProcessTaskArguments.Single(), pt2.ProcessTaskArguments.Single()); + } - f.Create(pt1, stg); - } + /// + /// Tests sharing a more advanced loadmetadata with an actual class behind the ProcessTask + /// + [Test] + public void GatherAndShare_LoadMetadata_WithRealProcessTask() + { + //create an object + var lmd1 = WhenIHaveA(); - - /// - /// Tests sharing a with a which has a reference argument (to - /// another object in the database. - /// - [Test] - public void GatherAndShare_LoadMetadata_WithReferenceProcessTaskArgument() + var pt1 = new ProcessTask(Repository, lmd1, LoadStage.Mounting) { - //create an object - LoadMetadata lmd1 = WhenIHaveA(); - - //setup Reflection / MEF - SetupMEF(); - RuntimeTaskFactory f = new RuntimeTaskFactory(Repository); - var stg = Mock.Of(x => - x.LoadStage==LoadStage.Mounting && - x.DbInfo == new DiscoveredServer(new SqlConnectionStringBuilder()).ExpectDatabase("d")); - - //create a single process task for the load - var pt1 = new ProcessTask(Repository, lmd1, LoadStage.Mounting); - pt1.ProcessTaskType = ProcessTaskType.MutilateDataTable; - pt1.LoadStage = LoadStage.AdjustRaw; - pt1.Path = typeof(SafePrimaryKeyCollisionResolverMutilation).FullName; - pt1.SaveToDatabase(); - - //give it a reference to an (unshared) object (ColumnInfo) - pt1.CreateArgumentsForClassIfNotExists(typeof(SafePrimaryKeyCollisionResolverMutilation)); - var pta = pt1.ProcessTaskArguments.Single(pt => pt.Name == "ColumnToResolveOn"); - pta.SetValue(WhenIHaveA()); - pta.SaveToDatabase(); - - //check that reflection can assemble the master ProcessTask - MutilateDataTablesRuntimeTask t = (MutilateDataTablesRuntimeTask) f.Create(pt1, stg); - Assert.IsNotNull(((SafePrimaryKeyCollisionResolverMutilation)t.MEFPluginClassInstance).ColumnToResolveOn); - - //share to the second repository (which won't have that ColumnInfo) - var lmd2 = ShareToNewRepository(lmd1); - - //create a new reflection factory for the new repo - RuntimeTaskFactory f2 = new RuntimeTaskFactory(lmd2.CatalogueRepository); - lmd2.CatalogueRepository.MEF = MEF; - - //when we create the shared instance it should not have a valid value for ColumnInfo (since it wasn't - and shouldn't be shared) - MutilateDataTablesRuntimeTask t2 = (MutilateDataTablesRuntimeTask) f2.Create(lmd2.ProcessTasks.Single(), stg); - Assert.IsNull(((SafePrimaryKeyCollisionResolverMutilation)t2.MEFPluginClassInstance).ColumnToResolveOn); - - } - - private LoadMetadata ShareToNewRepository(LoadMetadata lmd) + ProcessTaskType = ProcessTaskType.Attacher, + LoadStage = LoadStage.Mounting, + Path = typeof(AnySeparatorFileAttacher).FullName + }; + pt1.SaveToDatabase(); + + pt1.CreateArgumentsForClassIfNotExists(typeof(AnySeparatorFileAttacher)); + var pta = pt1.ProcessTaskArguments.Single(pt => pt.Name == "Separator"); + pta.SetValue(","); + pta.SaveToDatabase(); + + + var lmd2 = ShareToNewRepository(lmd1); + + //different repos so not identical + Assert.IsFalse(ReferenceEquals(lmd1, lmd2)); + AssertAreEqual(lmd1, lmd2); + + var pt2 = lmd2.ProcessTasks.Single(); + + Assert.IsFalse(ReferenceEquals(pt1, pt2)); + AssertAreEqual(pt1, pt2); + + AssertAreEqual(pt1.GetAllArguments(), pt2.GetAllArguments()); + + var f = new RuntimeTaskFactory(Repository); + + var stg = Substitute.For(); + stg.LoadStage.Returns(LoadStage.Mounting); + + RuntimeTaskFactory.Create(pt1, stg); + } + + + /// + /// Tests sharing a with a which has a reference argument (to + /// another object in the database. + /// + [Test] + public void GatherAndShare_LoadMetadata_WithReferenceProcessTaskArgument() + { + //create an object + var lmd1 = WhenIHaveA(); + + var f = new RuntimeTaskFactory(Repository); + var stg = Substitute.For(); + stg.LoadStage.Returns(LoadStage.Mounting); + stg.DbInfo.Returns(new DiscoveredServer(new SqlConnectionStringBuilder()).ExpectDatabase("d")); + + //create a single process task for the load + var pt1 = new ProcessTask(Repository, lmd1, LoadStage.Mounting) { - var gatherer = new Gatherer(RepositoryLocator); - - Assert.IsTrue(gatherer.CanGatherDependencies(lmd)); - var rootObj = gatherer.GatherDependencies(lmd); - - var sm = new ShareManager(RepositoryLocator,null); - var shareDefinition = rootObj.ToShareDefinitionWithChildren(sm); - - var repo2 = new MemoryDataExportRepository(); - var sm2 = new ShareManager(new RepositoryProvider(repo2)); - return sm2.ImportSharedObject(shareDefinition).OfType().Single(); - } + ProcessTaskType = ProcessTaskType.MutilateDataTable, + LoadStage = LoadStage.AdjustRaw, + Path = typeof(SafePrimaryKeyCollisionResolverMutilation).FullName + }; + pt1.SaveToDatabase(); + + //give it a reference to an (unshared) object (ColumnInfo) + pt1.CreateArgumentsForClassIfNotExists(typeof(SafePrimaryKeyCollisionResolverMutilation)); + var pta = pt1.ProcessTaskArguments.Single(pt => pt.Name == "ColumnToResolveOn"); + pta.SetValue(WhenIHaveA()); + pta.SaveToDatabase(); + + //check that reflection can assemble the master ProcessTask + var t = (MutilateDataTablesRuntimeTask)RuntimeTaskFactory.Create(pt1, stg); + Assert.IsNotNull(((SafePrimaryKeyCollisionResolverMutilation)t.MEFPluginClassInstance).ColumnToResolveOn); + + //share to the second repository (which won't have that ColumnInfo) + var lmd2 = ShareToNewRepository(lmd1); + + //create a new reflection factory for the new repo + var f2 = new RuntimeTaskFactory(lmd2.CatalogueRepository); + + //when we create the shared instance it should not have a valid value for ColumnInfo (since it wasn't - and shouldn't be shared) + var t2 = (MutilateDataTablesRuntimeTask)RuntimeTaskFactory.Create(lmd2.ProcessTasks.Single(), stg); + Assert.IsNull(((SafePrimaryKeyCollisionResolverMutilation)t2.MEFPluginClassInstance).ColumnToResolveOn); + } + + private LoadMetadata ShareToNewRepository(LoadMetadata lmd) + { + var gatherer = new Gatherer(RepositoryLocator); + + Assert.IsTrue(gatherer.CanGatherDependencies(lmd)); + var rootObj = gatherer.GatherDependencies(lmd); + + var sm = new ShareManager(RepositoryLocator, null); + var shareDefinition = rootObj.ToShareDefinitionWithChildren(sm); + + var repo2 = new MemoryDataExportRepository(); + var sm2 = new ShareManager(new RepositoryProvider(repo2)); + return sm2.ImportSharedObject(shareDefinition).OfType().Single(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/ImportTests/TestImportingAnObject.cs b/Rdmp.Core.Tests/Curation/ImportTests/TestImportingAnObject.cs index 395a55a7d4..51fb870e05 100644 --- a/Rdmp.Core.Tests/Curation/ImportTests/TestImportingAnObject.cs +++ b/Rdmp.Core.Tests/Curation/ImportTests/TestImportingAnObject.cs @@ -6,161 +6,160 @@ using Tests.Common; -namespace Rdmp.Core.Tests.Curation.ImportTests +namespace Rdmp.Core.Tests.Curation.ImportTests; + +public class TestImportingAnObject : DatabaseTests { - public class TestImportingAnObject : DatabaseTests + /* + [Test] + public void ImportACatalogue() { - /* - [Test] - public void ImportACatalogue() - { - var c = new Catalogue(CatalogueRepository, "omg cata"); - Assert.AreEqual(CatalogueRepository.GetAllObjects().Count(), 1); + var c = new Catalogue(CatalogueRepository, "omg cata"); + Assert.AreEqual(CatalogueRepository.GetAllObjects().Count(), 1); - var shareManager = new ShareManager(RepositoryLocator); + var shareManager = new ShareManager(RepositoryLocator); - var c2 = (Catalogue)new ShareDefinition(c).ImportObject(RepositoryLocator); + var c2 = (Catalogue)new ShareDefinition(c).ImportObject(RepositoryLocator); - Assert.AreEqual(c.Name, c2.Name); - Assert.AreNotEqual(c.ID,c2.ID); + Assert.AreEqual(c.Name, c2.Name); + Assert.AreNotEqual(c.ID,c2.ID); - Assert.AreEqual(CatalogueRepository.GetAllObjects().Count(),2); + Assert.AreEqual(CatalogueRepository.GetAllObjects().Count(),2); - } + } - [Test] - public void TestSharingAPluginIgnoringCollisions() - { - foreach (var oldP in CatalogueRepository.GetAllObjects()) - oldP.DeleteInDatabase(); - - var fi = new FileInfo("CommitAssemblyEmptyAssembly.dll"); - Assert.IsTrue(fi.Exists); - - var p = new Plugin(CatalogueRepository, fi); - var lma = new LoadModuleAssembly(CatalogueRepository, fi, p); - - var n = new SharedPluginImporter(p); - - //reject the reuse of an existing one - var p2 = n.Import(RepositoryLocator, new ThrowImmediatelyCheckNotifier()); - - Assert.AreEqual(p.LoadModuleAssemblies.Count(),p2.LoadModuleAssemblies.Count()); - Assert.AreEqual(p.LoadModuleAssemblies.First().Dll,p2.LoadModuleAssemblies.First().Dll); - Assert.AreNotEqual(p.ID, p2.ID); - } + [Test] + public void TestSharingAPluginIgnoringCollisions() + { + foreach (var oldP in CatalogueRepository.GetAllObjects()) + oldP.DeleteInDatabase(); - [Test] - public void TestSharingAPluginReplaceBinary() - { - foreach (var oldP in CatalogueRepository.GetAllObjects()) - oldP.DeleteInDatabase(); - - var fi = new FileInfo("CommitAssemblyEmptyAssembly.dll"); - Assert.IsTrue(fi.Exists); - - var p = new Plugin(CatalogueRepository, fi); - var lma = new LoadModuleAssembly(CatalogueRepository, fi, p); - - Assert.IsTrue(lma.Exists()); - - Dictionary newDll = new Dictionary(); - newDll.Add("Name","AmagadNewPluginLma"); - newDll.Add("Dll",new byte[]{0,1,0,1}); - newDll.Add("Pdb",new byte[]{0,1,0,1}); - newDll.Add("Committer","Frankenstine"); - newDll.Add("Description","Stuff"); - newDll.Add("Plugin_ID",999); - newDll.Add("DllFileVersion","5.0.0.1"); - newDll.Add("UploadDate",new DateTime(2001,1,1)); - newDll.Add("SoftwareVersion", "2.5.0.1"); - - var n = new SharedPluginImporter(new ShareDefinition(p), new[] { new ShareDefinition(newDll) }); - - //accept that it is an update - var p2 = n.Import(RepositoryLocator, new AcceptAllCheckNotifier()); - - Assert.IsFalse(lma.Exists()); - Assert.AreEqual(p,p2); - Assert.AreEqual(p.LoadModuleAssemblies.Single().Dll, new byte[] { 0, 1, 0, 1 }); - } + var fi = new FileInfo("CommitAssemblyEmptyAssembly.dll"); + Assert.IsTrue(fi.Exists); + + var p = new Plugin(CatalogueRepository, fi); + var lma = new LoadModuleAssembly(CatalogueRepository, fi, p); + + var n = new SharedPluginImporter(p); + + //reject the reuse of an existing one + var p2 = n.Import(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet); + + Assert.AreEqual(p.LoadModuleAssemblies.Count(),p2.LoadModuleAssemblies.Count()); + Assert.AreEqual(p.LoadModuleAssemblies.First().Dll,p2.LoadModuleAssemblies.First().Dll); + Assert.AreNotEqual(p.ID, p2.ID); + } + + [Test] + public void TestSharingAPluginReplaceBinary() + { + foreach (var oldP in CatalogueRepository.GetAllObjects()) + oldP.DeleteInDatabase(); + + var fi = new FileInfo("CommitAssemblyEmptyAssembly.dll"); + Assert.IsTrue(fi.Exists); + + var p = new Plugin(CatalogueRepository, fi); + var lma = new LoadModuleAssembly(CatalogueRepository, fi, p); + + Assert.IsTrue(lma.Exists()); + + Dictionary newDll = new Dictionary(); + newDll.Add("Name","AmagadNewPluginLma"); + newDll.Add("Dll",new byte[]{0,1,0,1}); + newDll.Add("Pdb",new byte[]{0,1,0,1}); + newDll.Add("Committer","Frankenstine"); + newDll.Add("Description","Stuff"); + newDll.Add("Plugin_ID",999); + newDll.Add("DllFileVersion","5.0.0.1"); + newDll.Add("UploadDate",new DateTime(2001,1,1)); + newDll.Add("SoftwareVersion", "2.5.0.1"); + + var n = new SharedPluginImporter(new ShareDefinition(p), new[] { new ShareDefinition(newDll) }); - [Test] - [TestCase(false)] - [TestCase(true)] - public void TestSharingAPluginReplaceDllBinary(bool fiddleIds) + //accept that it is an update + var p2 = n.Import(RepositoryLocator, new AcceptAllCheckNotifier()); + + Assert.IsFalse(lma.Exists()); + Assert.AreEqual(p,p2); + Assert.AreEqual(p.LoadModuleAssemblies.Single().Dll, new byte[] { 0, 1, 0, 1 }); + } + + [Test] + [TestCase(false)] + [TestCase(true)] + public void TestSharingAPluginReplaceDllBinary(bool fiddleIds) + { + foreach (var oldP in CatalogueRepository.GetAllObjects()) + oldP.DeleteInDatabase(); + + var fi = new FileInfo("CommitAssemblyEmptyAssembly.dll"); + Assert.IsTrue(fi.Exists); + + var p = new Plugin(CatalogueRepository, fi); + var lma = new LoadModuleAssembly(CatalogueRepository, fi, p); + + var pStateless = new ShareDefinition(p); + var lmaStateless = new ShareDefinition(lma); + + if (fiddleIds) { - foreach (var oldP in CatalogueRepository.GetAllObjects()) - oldP.DeleteInDatabase(); - - var fi = new FileInfo("CommitAssemblyEmptyAssembly.dll"); - Assert.IsTrue(fi.Exists); - - var p = new Plugin(CatalogueRepository, fi); - var lma = new LoadModuleAssembly(CatalogueRepository, fi, p); - - var pStateless = new ShareDefinition(p); - var lmaStateless = new ShareDefinition(lma); - - if (fiddleIds) - { - //make it look like a new object we haven't seen before (but which collides on Name) this is the case when sharing with someone that isn't yourself - pStateless.Properties["ID"] = -80; - lmaStateless.Properties["Plugin_ID"] = -80; - } - - //edit the binary data to represent a new version of the dll that should be imported - lmaStateless.Properties["Dll"] = new byte[] { 0, 1, 0, 1 }; - - var n = new SharedPluginImporter(pStateless,new []{lmaStateless}); - - //accept that it is an update - var p2 = n.Import(RepositoryLocator, new AcceptAllCheckNotifier()); - - Assert.AreEqual(p, p2); - Assert.AreEqual(new byte[] { 0, 1, 0, 1 }, p2.LoadModuleAssemblies.Single().Dll); + //make it look like a new object we haven't seen before (but which collides on Name) this is the case when sharing with someone that isn't yourself + pStateless.Properties["ID"] = -80; + lmaStateless.Properties["Plugin_ID"] = -80; } + //edit the binary data to represent a new version of the dll that should be imported + lmaStateless.Properties["Dll"] = new byte[] { 0, 1, 0, 1 }; - [Test] - public void JsonTest() - { - foreach (var oldP in CatalogueRepository.GetAllObjects()) - oldP.DeleteInDatabase(); + var n = new SharedPluginImporter(pStateless,new []{lmaStateless}); + + //accept that it is an update + var p2 = n.Import(RepositoryLocator, new AcceptAllCheckNotifier()); - var fi = new FileInfo("CommitAssemblyEmptyAssembly.dll"); - Assert.IsTrue(fi.Exists); + Assert.AreEqual(p, p2); + Assert.AreEqual(new byte[] { 0, 1, 0, 1 }, p2.LoadModuleAssemblies.Single().Dll); + } + + + [Test] + public void JsonTest() + { + foreach (var oldP in CatalogueRepository.GetAllObjects()) + oldP.DeleteInDatabase(); + + var fi = new FileInfo("CommitAssemblyEmptyAssembly.dll"); + Assert.IsTrue(fi.Exists); - var p = new Plugin(CatalogueRepository, fi); - var lma = new LoadModuleAssembly(CatalogueRepository, fi, p); + var p = new Plugin(CatalogueRepository, fi); + var lma = new LoadModuleAssembly(CatalogueRepository, fi, p); - var pStateless = new ShareDefinition(p); - var lmaStatelessArray = new[] {new ShareDefinition(lma)}; + var pStateless = new ShareDefinition(p); + var lmaStatelessArray = new[] {new ShareDefinition(lma)}; - BinaryFormatter bf = new BinaryFormatter(); - string s; - string sa; + BinaryFormatter bf = new BinaryFormatter(); + string s; + string sa; - using (MemoryStream ms = new MemoryStream()) - { - bf.Serialize(ms, pStateless); - s = Convert.ToBase64String(ms.ToArray()); - } + using (MemoryStream ms = new MemoryStream()) + { + bf.Serialize(ms, pStateless); + s = Convert.ToBase64String(ms.ToArray()); + } - using (MemoryStream ms = new MemoryStream()) - { - bf.Serialize(ms, lmaStatelessArray); - sa = Convert.ToBase64String(ms.ToArray()); - } + using (MemoryStream ms = new MemoryStream()) + { + bf.Serialize(ms, lmaStatelessArray); + sa = Convert.ToBase64String(ms.ToArray()); + } - var import = new SharedPluginImporter(s, sa); + var import = new SharedPluginImporter(s, sa); - var p2 = import.Import(RepositoryLocator, new AcceptAllCheckNotifier()); + var p2 = import.Import(RepositoryLocator, new AcceptAllCheckNotifier()); - Assert.AreEqual(p.LoadModuleAssemblies.Count(),p2.LoadModuleAssemblies.Count()); - Assert.AreEqual(p.LoadModuleAssemblies.First().Dll,p2.LoadModuleAssemblies.First().Dll); - Assert.AreEqual(p.ID, p2.ID); + Assert.AreEqual(p.LoadModuleAssemblies.Count(),p2.LoadModuleAssemblies.Count()); + Assert.AreEqual(p.LoadModuleAssemblies.First().Dll,p2.LoadModuleAssemblies.First().Dll); + Assert.AreEqual(p.ID, p2.ID); - }*/ - } -} + }*/ +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/AllKeywordsDescribedTest.cs b/Rdmp.Core.Tests/Curation/Integration/AllKeywordsDescribedTest.cs index 828e168823..dcab4d9ef6 100644 --- a/Rdmp.Core.Tests/Curation/Integration/AllKeywordsDescribedTest.cs +++ b/Rdmp.Core.Tests/Curation/Integration/AllKeywordsDescribedTest.cs @@ -8,128 +8,120 @@ using System.Collections.Generic; using System.Linq; using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.Curation.Data; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +[TestFixture] +public class AllKeywordsDescribedTest : DatabaseTests { - [TestFixture] - public class AllKeywordsDescribedTest :DatabaseTests + [OneTimeSetUp] + protected override void OneTimeSetUp() { + base.OneTimeSetUp(); - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); - - CatalogueRepository.CommentStore.ReadComments(TestContext.CurrentContext.TestDirectory); - } + CatalogueRepository.CommentStore.ReadComments(TestContext.CurrentContext.TestDirectory); + } - [Test] - public void AllTablesDescribed() - { - //ensures the DQERepository gets a chance to add its help text - new DQERepository(CatalogueRepository); + [Test] + public void AllTablesDescribed() + { + //ensures the DQERepository gets a chance to add its help text + new DQERepository(CatalogueRepository); - List problems = new List(); + var databaseTypes = typeof(Catalogue).Assembly.GetTypes().Where(t => + typeof(IMapsDirectlyToDatabaseTable).IsAssignableFrom(t) && !t.IsInterface && !t.IsAbstract && + !t.Name.StartsWith("Spontaneous") && !t.Name.Contains("Proxy")).ToArray(); - var databaseTypes = typeof(Catalogue).Assembly.GetTypes().Where(t => typeof(IMapsDirectlyToDatabaseTable).IsAssignableFrom(t) && !t.IsInterface && !t.IsAbstract && !t.Name.StartsWith("Spontaneous") && !t.Name.Contains("Proxy")).ToArray(); - - foreach (var type in databaseTypes) + var problems = databaseTypes + .Select(type => new { - var docs = CatalogueRepository.CommentStore[type.Name]??CatalogueRepository.CommentStore["I"+type.Name]; - - if(string.IsNullOrWhiteSpace(docs)) - problems.Add("Type " + type.Name + " does not have an entry in the help dictionary (maybe the class doesn't have documentation? - try adding /// style comments to the class)"); - - } - foreach (string problem in problems) - Console.WriteLine("Fatal Problem:" + problem); - - Assert.AreEqual(0,problems.Count); - } - - [Test] - public void AllForeignKeysDescribed() - { - List allKeys = new List(); - - //ensures the DQERepository gets a chance to add its help text - new DQERepository(CatalogueRepository); - - allKeys.AddRange(GetForeignKeys(CatalogueTableRepository.DiscoveredServer)); - allKeys.AddRange(GetForeignKeys(DataExportTableRepository.DiscoveredServer)); - allKeys.AddRange(GetForeignKeys(new DiscoveredServer(DataQualityEngineConnectionString))); - - List problems = new List(); - foreach (string fkName in allKeys) - { - if (!CatalogueRepository.CommentStore.ContainsKey(fkName)) - problems.Add(fkName + " is a foreign Key (which does not CASCADE) but does not have any HelpText"); - } - - foreach (string problem in problems) - Console.WriteLine("Fatal Problem:" + problem); - - Assert.AreEqual(0, problems.Count, @"Add a description for each of these to KeywordHelp.txt"); - } - - [Test] - public void AllUserIndexesDescribed() - { - List allIndexes = new List(); - - //ensures the DQERepository gets a chance to add its help text - new DQERepository(CatalogueRepository); - - allIndexes.AddRange(GetIndexes(CatalogueTableRepository.DiscoveredServer)); - allIndexes.AddRange(GetIndexes(DataExportTableRepository.DiscoveredServer)); - allIndexes.AddRange(GetIndexes(new DiscoveredServer(DataQualityEngineConnectionString))); - - List problems = new List(); - foreach (string idx in allIndexes) - { - if (!CatalogueRepository.CommentStore.ContainsKey(idx)) - problems.Add(idx + " is an index but does not have any HelpText"); - } - - foreach (string problem in problems) - Console.WriteLine("Fatal Problem:" + problem); - - Assert.AreEqual(0,problems.Count,@"Add a description for each of these to KeywordHelp.txt"); - - } - - private IEnumerable GetForeignKeys(DiscoveredServer server) - { - using (var con = server.GetConnection()) - { - con.Open(); - var r = server.GetCommand(@"select name from sys.foreign_keys where delete_referential_action = 0", con).ExecuteReader(); + type, + docs = CatalogueRepository.CommentStore[type.Name] ?? + CatalogueRepository.CommentStore[$"I{type.Name}"] + }) + .Where(t => string.IsNullOrWhiteSpace(t.docs)) + .Select(t => + $"Type {t.type.Name} does not have an entry in the help dictionary (maybe the class doesn't have documentation? - try adding /// style comments to the class)") + .ToList(); + foreach (var problem in problems) + Console.WriteLine($"Fatal Problem:{problem}"); + + Assert.AreEqual(0, problems.Count); + } - while (r.Read()) - yield return (string)r["name"]; - } - } + [Test] + public void AllForeignKeysDescribed() + { + var allKeys = new List(); - private IEnumerable GetIndexes(DiscoveredServer server) - { - using (var con = server.GetConnection()) - { - con.Open(); - var r = server.GetCommand(@"select si.name from sys.indexes si + //ensures the DQERepository gets a chance to add its help text + new DQERepository(CatalogueRepository); + + allKeys.AddRange(GetForeignKeys(CatalogueTableRepository.DiscoveredServer)); + allKeys.AddRange(GetForeignKeys(DataExportTableRepository.DiscoveredServer)); + allKeys.AddRange(GetForeignKeys(new DiscoveredServer(DataQualityEngineConnectionString))); + + var problems = allKeys.Where(fkName => !CatalogueRepository.CommentStore.ContainsKey(fkName)) + .Select(fkName => $"{fkName} is a foreign Key (which does not CASCADE) but does not have any HelpText") + .ToList(); + + foreach (var problem in problems) + Console.WriteLine($"Fatal Problem:{problem}"); + + Assert.AreEqual(0, problems.Count, @"Add a description for each of these to KeywordHelp.txt"); + } + + [Test] + public void AllUserIndexesDescribed() + { + var allIndexes = new List(); + + //ensures the DQERepository gets a chance to add its help text + new DQERepository(CatalogueRepository); + + allIndexes.AddRange(GetIndexes(CatalogueTableRepository.DiscoveredServer)); + allIndexes.AddRange(GetIndexes(DataExportTableRepository.DiscoveredServer)); + allIndexes.AddRange(GetIndexes(new DiscoveredServer(DataQualityEngineConnectionString))); + + var problems = new List(); + foreach (var idx in allIndexes) + if (!CatalogueRepository.CommentStore.ContainsKey(idx)) + problems.Add($"{idx} is an index but does not have any HelpText"); + + foreach (var problem in problems) + Console.WriteLine($"Fatal Problem:{problem}"); + + Assert.AreEqual(0, problems.Count, @"Add a description for each of these to KeywordHelp.txt"); + } + + private static IEnumerable GetForeignKeys(DiscoveredServer server) + { + using var con = server.GetConnection(); + con.Open(); + var r = server.GetCommand(@"select name from sys.foreign_keys where delete_referential_action = 0", con) + .ExecuteReader(); + + while (r.Read()) + yield return (string)r["name"]; + } + + private static IEnumerable GetIndexes(DiscoveredServer server) + { + using var con = server.GetConnection(); + con.Open(); + var r = server.GetCommand(@"select si.name from sys.indexes si JOIN sys.objects so ON si.[object_id] = so.[object_id] WHERE so.type = 'U' AND is_primary_key = 0 and si.name is not null and so.name <> 'sysdiagrams'", con).ExecuteReader(); - while (r.Read()) - yield return (string)r["name"]; - } - } + while (r.Read()) + yield return (string)r["name"]; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ArgumentTypeTests.cs b/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ArgumentTypeTests.cs index 1290507b9e..bd6914a824 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ArgumentTypeTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ArgumentTypeTests.cs @@ -12,50 +12,41 @@ using Rdmp.Core.Curation.Data.DataLoad; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.ArgumentTests +namespace Rdmp.Core.Tests.Curation.Integration.ArgumentTests; + +public class ArgumentTypeTests : UnitTests { - public class ArgumentTypeTests:UnitTests + private object[] _expectedAnswers = + { + 5, + new CultureInfo("en-us"), + CultureInfo.CurrentCulture + }; + + [TestCase(typeof(int), "5", 0)] + [TestCase(typeof(CultureInfo), "en-us", 1)] + public void Test_Type_WithStringValue(Type t, string val, int expectedAnswerIdx) + { + var arg = WhenIHaveA(); + + arg.SetType(t); + arg.Value = val; + + Assert.AreEqual(_expectedAnswers[expectedAnswerIdx], arg.GetValueAsSystemType()); + } + + [Test] + public void TestClassDemandingDouble_CreateArgumentsForClassIfNotExists() + { + var args = WhenIHaveA().CreateArgumentsForClassIfNotExists(); + + Assert.AreEqual(1.0, args.Single().GetValueAsSystemType()); + Assert.AreEqual("1", args.Single().Value); + } + + private class TestClassDemandingDouble { - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); - base.SetupMEF(); - } - - object[] _expectedAnswers = new object[] - { - 5, - new CultureInfo("en-us"), - CultureInfo.CurrentCulture - }; - - [TestCase(typeof(int),"5",0)] - [TestCase(typeof(CultureInfo),"en-us",1)] - public void Test_Type_WithStringValue(Type t, string val, int expectedAnswerIdx) - { - var arg = WhenIHaveA(); - - arg.SetType(t); - arg.Value = val; - - Assert.AreEqual(_expectedAnswers[expectedAnswerIdx],arg.GetValueAsSystemType()); - } - - [Test] - public void TestClassDemandingDouble_CreateArgumentsForClassIfNotExists() - { - var args = WhenIHaveA().CreateArgumentsForClassIfNotExists(); - - Assert.AreEqual(1.0,args.Single().GetValueAsSystemType()); - Assert.AreEqual("1",args.Single().Value); - - } - - class TestClassDemandingDouble - { - [DemandsInitialization("some field",defaultValue:1)] - public double MyVar { get; set; } - } + [DemandsInitialization("some field", defaultValue: 1)] + public double MyVar { get; set; } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ProcessTaskArgumentTests.cs b/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ProcessTaskArgumentTests.cs index 61594a3f89..9eccce8937 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ProcessTaskArgumentTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ProcessTaskArgumentTests.cs @@ -17,393 +17,390 @@ using Rdmp.Core.DataLoad; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.ArgumentTests +namespace Rdmp.Core.Tests.Curation.Integration.ArgumentTests; + +public class ProcessTaskArgumentTests : DatabaseTests { - public class ProcessTaskArgumentTests:DatabaseTests + [Test] + [TestCase(true)] + [TestCase(false)] + public void TypeOfTableInfo(bool declareAsInterface) { - [Test] - [TestCase(true)] - [TestCase(false)] - public void TypeOfTableInfo(bool declareAsInterface) - { - string tableInfoName = "TableInfoFor_" + new StackTrace().GetFrame(0).GetMethod().Name; + var tableInfoName = $"TableInfoFor_{new StackTrace().GetFrame(0).GetMethod().Name}"; + + var toCleanup = CatalogueRepository.GetAllObjects() + .SingleOrDefault(t => t.Name.Equals(tableInfoName)); + + toCleanup?.DeleteInDatabase(); - TableInfo toCleanup = CatalogueRepository.GetAllObjects().SingleOrDefault(t => t.Name.Equals(tableInfoName)); - - if(toCleanup != null) - toCleanup.DeleteInDatabase(); + var loadMetadata = new LoadMetadata(CatalogueRepository); - var loadMetadata = new LoadMetadata(CatalogueRepository); + try + { + var pt = new ProcessTask(CatalogueRepository, loadMetadata, LoadStage.AdjustStaging); + var pta = new ProcessTaskArgument(CatalogueRepository, pt); + + pta.SetType(declareAsInterface ? typeof(ITableInfo) : typeof(TableInfo)); + var tableInfo = new TableInfo(CatalogueRepository, tableInfoName); try - { - var pt = new ProcessTask(CatalogueRepository, loadMetadata, LoadStage.AdjustStaging); - var pta = new ProcessTaskArgument(CatalogueRepository, pt); - - if(declareAsInterface) - pta.SetType(typeof(ITableInfo)); - else - pta.SetType(typeof (TableInfo)); - - var tableInfo = new TableInfo(CatalogueRepository, tableInfoName); - try - { - pta.SetValue(tableInfo); - pta.SaveToDatabase(); - - var newInstanceOfPTA = CatalogueRepository.GetObjectByID(pta.ID); - - Assert.AreEqual(newInstanceOfPTA.Value,pta.Value); - - TableInfo t1 = (TableInfo) pta.GetValueAsSystemType(); - TableInfo t2 = (TableInfo)newInstanceOfPTA.GetValueAsSystemType(); - - Assert.AreEqual(t1.ID,t2.ID); - } - finally - { - tableInfo.DeleteInDatabase(); - } + { + pta.SetValue(tableInfo); + pta.SaveToDatabase(); + + var newInstanceOfPTA = CatalogueRepository.GetObjectByID(pta.ID); + + Assert.AreEqual(newInstanceOfPTA.Value, pta.Value); + + var t1 = (TableInfo)pta.GetValueAsSystemType(); + var t2 = (TableInfo)newInstanceOfPTA.GetValueAsSystemType(); + + Assert.AreEqual(t1.ID, t2.ID); } finally { - loadMetadata.DeleteInDatabase(); + tableInfo.DeleteInDatabase(); } } - [Test] - public void TypeOfPreLoadDiscardedColumn() + finally { - string methodName = new StackTrace().GetFrame(0).GetMethod().Name; - string tableInfoName = "TableInfoFor_" + methodName; - string preLoadDiscardedColumnName = "PreLoadDiscardedColumnFor_" + methodName; + loadMetadata.DeleteInDatabase(); + } + } - TableInfo toCleanup = CatalogueRepository.GetAllObjects().SingleOrDefault(t => t.Name.Equals(tableInfoName)); - PreLoadDiscardedColumn toCleanupCol = CatalogueRepository.GetAllObjects() - .SingleOrDefault(c => c.RuntimeColumnName.Equals(preLoadDiscardedColumnName)); - - //must delete pre load discarded first - if (toCleanupCol != null) - toCleanupCol.DeleteInDatabase(); + [Test] + public void TypeOfPreLoadDiscardedColumn() + { + var methodName = new StackTrace().GetFrame(0).GetMethod().Name; + var tableInfoName = $"TableInfoFor_{methodName}"; + var preLoadDiscardedColumnName = $"PreLoadDiscardedColumnFor_{methodName}"; - if (toCleanup != null) - toCleanup.DeleteInDatabase(); + var toCleanup = CatalogueRepository.GetAllObjects() + .SingleOrDefault(t => t.Name.Equals(tableInfoName)); + var toCleanupCol = CatalogueRepository.GetAllObjects() + .SingleOrDefault(c => c.RuntimeColumnName.Equals(preLoadDiscardedColumnName)); - var lmd = new LoadMetadata(CatalogueRepository); + //must delete pre load discarded first + toCleanupCol?.DeleteInDatabase(); - try - { - var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustStaging); - var pta = new ProcessTaskArgument(CatalogueRepository, pt); + toCleanup?.DeleteInDatabase(); - pta.SetType(typeof(PreLoadDiscardedColumn)); + var lmd = new LoadMetadata(CatalogueRepository); + + try + { + var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustStaging); + var pta = new ProcessTaskArgument(CatalogueRepository, pt); + + pta.SetType(typeof(PreLoadDiscardedColumn)); - var tableInfo = new TableInfo(CatalogueRepository, tableInfoName); + var tableInfo = new TableInfo(CatalogueRepository, tableInfoName); - PreLoadDiscardedColumn preloadDiscardedColumn = new PreLoadDiscardedColumn(CatalogueRepository, tableInfo, preLoadDiscardedColumnName); - try - { - pta.SetValue(preloadDiscardedColumn); - pta.SaveToDatabase(); + var preloadDiscardedColumn = + new PreLoadDiscardedColumn(CatalogueRepository, tableInfo, preLoadDiscardedColumnName); + try + { + pta.SetValue(preloadDiscardedColumn); + pta.SaveToDatabase(); - var newInstanceOfPTA = CatalogueRepository.GetObjectByID(pta.ID); - Assert.AreEqual(newInstanceOfPTA.Value, pta.Value); + var newInstanceOfPTA = CatalogueRepository.GetObjectByID(pta.ID); + Assert.AreEqual(newInstanceOfPTA.Value, pta.Value); - PreLoadDiscardedColumn p1 = (PreLoadDiscardedColumn)pta.GetValueAsSystemType(); - PreLoadDiscardedColumn p2 = (PreLoadDiscardedColumn)newInstanceOfPTA.GetValueAsSystemType(); + var p1 = (PreLoadDiscardedColumn)pta.GetValueAsSystemType(); + var p2 = (PreLoadDiscardedColumn)newInstanceOfPTA.GetValueAsSystemType(); - Assert.AreEqual(p1.ID, p2.ID); - } - finally - { - preloadDiscardedColumn.DeleteInDatabase(); - tableInfo.DeleteInDatabase(); - } + Assert.AreEqual(p1.ID, p2.ID); } finally { - lmd.DeleteInDatabase(); + preloadDiscardedColumn.DeleteInDatabase(); + tableInfo.DeleteInDatabase(); } } - - [Test] - public void TableInfoType_FetchAfterDelete_ReturnsNull() + finally { - string tableInfoName = "TableInfoFor_" + new StackTrace().GetFrame(0).GetMethod().Name; + lmd.DeleteInDatabase(); + } + } + + [Test] + public void TableInfoType_FetchAfterDelete_ReturnsNull() + { + var tableInfoName = $"TableInfoFor_{new StackTrace().GetFrame(0).GetMethod().Name}"; - TableInfo toCleanup = CatalogueRepository.GetAllObjects().SingleOrDefault(t => t.Name.Equals(tableInfoName)); + var toCleanup = CatalogueRepository.GetAllObjects() + .SingleOrDefault(t => t.Name.Equals(tableInfoName)); - if (toCleanup != null) - toCleanup.DeleteInDatabase(); + toCleanup?.DeleteInDatabase(); - var lmd = new LoadMetadata(CatalogueRepository); + var lmd = new LoadMetadata(CatalogueRepository); - try - { - var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustStaging); - var pta = new ProcessTaskArgument(CatalogueRepository, pt); + try + { + var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustStaging); + var pta = new ProcessTaskArgument(CatalogueRepository, pt); - //Prepare to receive a TableInfo object - pta.SetType(typeof(TableInfo)); + //Prepare to receive a TableInfo object + pta.SetType(typeof(TableInfo)); - var tableInfo = new TableInfo(CatalogueRepository, tableInfoName); - - //Heres the TableInfo object - pta.SetValue(tableInfo); - pta.SaveToDatabase(); + var tableInfo = new TableInfo(CatalogueRepository, tableInfoName); - //Lolz I just deleted it out of the database - tableInfo.DeleteInDatabase(); + //Heres the TableInfo object + pta.SetValue(tableInfo); + pta.SaveToDatabase(); - //give the object back now please? - returns null because it's gone (new behaviour) - Assert.IsNull(pta.GetValueAsSystemType()); + //Lolz I just deleted it out of the database + tableInfo.DeleteInDatabase(); - //old behaviour - /*var ex = Assert.Throws(()=>pta.GetValueAsSystemType()); - StringAssert.Contains("Could not find TableInfo with ID",ex.Message);*/ - } - finally - { - lmd.DeleteInDatabase(); - } - } + //give the object back now please? - returns null because it's gone (new behaviour) + Assert.IsNull(pta.GetValueAsSystemType()); - [Test] - public void LieToProcessTaskArgumentAboutWhatTypeIs_Throws() + //old behaviour + /*var ex = Assert.Throws(()=>pta.GetValueAsSystemType()); + StringAssert.Contains("Could not find TableInfo with ID",ex.Message);*/ + } + finally { - string tableInfoName = "TableInfoFor_" + new StackTrace().GetFrame(0).GetMethod().Name; + lmd.DeleteInDatabase(); + } + } - TableInfo toCleanup = CatalogueRepository.GetAllObjects().SingleOrDefault(t => t.Name.Equals(tableInfoName)); + [Test] + public void LieToProcessTaskArgumentAboutWhatTypeIs_Throws() + { + var tableInfoName = $"TableInfoFor_{new StackTrace().GetFrame(0).GetMethod().Name}"; + + var toCleanup = CatalogueRepository.GetAllObjects() + .SingleOrDefault(t => t.Name.Equals(tableInfoName)); - if (toCleanup != null) - toCleanup.DeleteInDatabase(); + toCleanup?.DeleteInDatabase(); - var lmd = new LoadMetadata(CatalogueRepository); + var lmd = new LoadMetadata(CatalogueRepository); + try + { + var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustStaging); + var pta = new ProcessTaskArgument(CatalogueRepository, pt); + var tableInfo = new TableInfo(CatalogueRepository, tableInfoName); try { - var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustStaging); - var pta = new ProcessTaskArgument(CatalogueRepository, pt); - var tableInfo = new TableInfo(CatalogueRepository, tableInfoName); - try - { - //tell it that we are going to give it a PreLoadDiscardedColumn - pta.SetType(typeof(PreLoadDiscardedColumn)); - //then surprise! heres a TableInfo! - var ex = Assert.Throws(()=>pta.SetValue(tableInfo)); - StringAssert.Contains("has an incompatible Type specified (Rdmp.Core.Curation.Data.DataLoad.PreLoadDiscardedColumn)",ex.Message); - - } - finally - { - tableInfo.DeleteInDatabase(); - } + //tell it that we are going to give it a PreLoadDiscardedColumn + pta.SetType(typeof(PreLoadDiscardedColumn)); + //then surprise! heres a TableInfo! + var ex = Assert.Throws(() => pta.SetValue(tableInfo)); + StringAssert.Contains( + "has an incompatible Type specified (Rdmp.Core.Curation.Data.DataLoad.PreLoadDiscardedColumn)", + ex.Message); } finally { - - lmd.DeleteInDatabase(); + tableInfo.DeleteInDatabase(); } } - - private ProcessTaskArgument CreateNewProcessTaskArgumentInDatabase(out LoadMetadata lmd) + finally { - lmd = new LoadMetadata(CatalogueRepository); - - var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustStaging); - return new ProcessTaskArgument(CatalogueRepository, pt); + lmd.DeleteInDatabase(); } + } - [Test] - public void TestEncryptedPasswordHostArgumentType() - { - LoadMetadata lmd = null; - ProcessTaskArgument pta = null; + private ProcessTaskArgument CreateNewProcessTaskArgumentInDatabase(out LoadMetadata lmd) + { + lmd = new LoadMetadata(CatalogueRepository); - try - { - pta = CreateNewProcessTaskArgumentInDatabase(out lmd); - pta.SetType(typeof(EncryptedString)); - pta.SetValue(new EncryptedString(CatalogueRepository) { Value = "test123" }); - pta.SaveToDatabase(); + var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustStaging); + return new ProcessTaskArgument(CatalogueRepository, pt); + } - var loadedPta = CatalogueRepository.GetObjectByID(pta.ID); - var value = loadedPta.GetValueAsSystemType() as EncryptedString; - Assert.NotNull(value); - Assert.AreEqual("test123", value.GetDecryptedValue()); - } - finally + [Test] + public void TestEncryptedPasswordHostArgumentType() + { + LoadMetadata lmd = null; + ProcessTaskArgument pta = null; + + try + { + pta = CreateNewProcessTaskArgumentInDatabase(out lmd); + pta.SetType(typeof(EncryptedString)); + pta.SetValue(new EncryptedString(CatalogueRepository) { Value = "test123" }); + pta.SaveToDatabase(); + + var loadedPta = CatalogueRepository.GetObjectByID(pta.ID); + var value = loadedPta.GetValueAsSystemType() as EncryptedString; + Assert.NotNull(value); + Assert.AreEqual("test123", value.GetDecryptedValue()); + } + finally + { + if (pta != null) { - if (pta != null) - { - var processTask = CatalogueRepository.GetObjectByID(pta.ProcessTask_ID); - processTask.DeleteInDatabase(); - } - - if (lmd != null) - lmd.DeleteInDatabase(); + var processTask = CatalogueRepository.GetObjectByID(pta.ProcessTask_ID); + processTask.DeleteInDatabase(); } + + lmd?.DeleteInDatabase(); } + } - [Test] - public void TestArgumentCreation() + [Test] + public void TestArgumentCreation() + { + var lmd = new LoadMetadata(CatalogueRepository, "TestArgumentCreation"); + var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustRaw); + pt.CreateArgumentsForClassIfNotExists(); + try { + var arg = pt.ProcessTaskArguments.Single(); - LoadMetadata lmd = new LoadMetadata(CatalogueRepository,"TestArgumentCreation"); - var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustRaw); - pt.CreateArgumentsForClassIfNotExists(); - try - { - var arg = pt.ProcessTaskArguments.Single(); + Assert.AreEqual("MyBool", arg.Name); + Assert.AreEqual("System.Boolean", arg.Type); + Assert.AreEqual("Fishes", arg.Description); + Assert.AreEqual("True", arg.Value); + Assert.AreEqual(true, arg.GetValueAsSystemType()); + } + finally + { + pt.DeleteInDatabase(); + lmd.DeleteInDatabase(); + } + } - Assert.AreEqual("MyBool", arg.Name); - Assert.AreEqual("System.Boolean", arg.Type); - Assert.AreEqual("Fishes", arg.Description); - Assert.AreEqual("True",arg.Value); - Assert.AreEqual(true, arg.GetValueAsSystemType()); + [Test] + public void TestNestedDemandsGetPutIntoDatabaseAndCanBeBroughtBack() + { + var pipe = new Pipeline(CatalogueRepository, "NestedPipe"); + var pc = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicDataReleaseDestination), -1, + "Coconuts"); + pipe.DestinationPipelineComponent_ID = pc.ID; + pipe.SaveToDatabase(); - } - finally - { - pt.DeleteInDatabase(); - lmd.DeleteInDatabase(); - } - } + //some of the DemandsInitialization on BasicDataReleaseDestination should be nested + var f = new ArgumentFactory(); + Assert.True( + ArgumentFactory.GetRequiredProperties(typeof(BasicDataReleaseDestination)) + .Any(r => r.ParentPropertyInfo != null)); - [Test] - public void TestNestedDemandsGetPutIntoDatabaseAndCanBeBroughtBack() - { - var pipe = new Pipeline(CatalogueRepository, "NestedPipe"); - var pc = new PipelineComponent(CatalogueRepository, pipe, typeof (BasicDataReleaseDestination), -1, - "Coconuts"); - pipe.DestinationPipelineComponent_ID = pc.ID; - pipe.SaveToDatabase(); + //new pc should have no arguments + Assert.That(pc.GetAllArguments(), Is.Empty); + + //we create them (the root and nested ones!) + var args = pc.CreateArgumentsForClassIfNotExists(); - //some of the DemandsInitialization on BasicDataReleaseDestination should be nested - var f = new ArgumentFactory(); - Assert.True( - f.GetRequiredProperties(typeof(BasicDataReleaseDestination)).Any(r => r.ParentPropertyInfo != null)); + //and get all arguments / create arguments for class should have handled that + Assert.That(pc.GetAllArguments().Any()); - //new pc should have no arguments - Assert.That(pc.GetAllArguments(), Is.Empty); + var match = args.Single(a => a.Name == "ReleaseSettings.DeleteFilesOnSuccess"); + match.SetValue(true); + match.SaveToDatabase(); - //we create them (the root and nested ones!) - var args = pc.CreateArgumentsForClassIfNotExists(); - - //and get all arguments / create arguments for class should have handled that - Assert.That(pc.GetAllArguments().Any()); + var useCase = ReleaseUseCase.DesignTime(); - var match = args.Single(a => a.Name == "ReleaseSettings.DeleteFilesOnSuccess"); - match.SetValue(true); - match.SaveToDatabase(); + var destInstance = DataFlowPipelineEngineFactory.CreateDestinationIfExists(pipe); - var useCase = ReleaseUseCase.DesignTime(); + Assert.AreEqual(true, ((BasicDataReleaseDestination)destInstance).ReleaseSettings.DeleteFilesOnSuccess); + } - var factory = new DataFlowPipelineEngineFactory(useCase,RepositoryLocator.CatalogueRepository.MEF); - var destInstance = factory.CreateDestinationIfExists(pipe); - Assert.AreEqual(true, ((BasicDataReleaseDestination)destInstance).ReleaseSettings.DeleteFilesOnSuccess); - } + [Test] + public void TestArgumentWithTypeThatIsEnum() + { + var pipe = new Pipeline(CatalogueRepository, "p"); + var pc = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicDataReleaseDestination), -1, + "c"); + var arg = new PipelineComponentArgument(CatalogueRepository, pc); - [Test] - public void TestArgumentWithTypeThatIsEnum() + try { - var pipe = new Pipeline(CatalogueRepository, "p"); - var pc = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicDataReleaseDestination), -1, - "c"); - - var arg = new PipelineComponentArgument(CatalogueRepository, pc); - - try - { - arg.SetType(typeof(ExitCodeType)); - arg.SetValue(ExitCodeType.OperationNotRequired); + arg.SetType(typeof(ExitCodeType)); + arg.SetValue(ExitCodeType.OperationNotRequired); - //should have set Value string to the ID of the object - Assert.AreEqual(arg.Value, ExitCodeType.OperationNotRequired.ToString()); + //should have set Value string to the ID of the object + Assert.AreEqual(arg.Value, ExitCodeType.OperationNotRequired.ToString()); - arg.SaveToDatabase(); + arg.SaveToDatabase(); - //but as system Type should return the server - Assert.AreEqual(arg.GetValueAsSystemType(), ExitCodeType.OperationNotRequired); - } - finally - { - pipe.DeleteInDatabase(); - } + //but as system Type should return the server + Assert.AreEqual(arg.GetValueAsSystemType(), ExitCodeType.OperationNotRequired); } - - [Test] - [TestCase(true)] - [TestCase(false)] - public void TestArgumentWithTypeThatIsInterface(bool useInterfaceDeclaration) + finally { - var pipe = new Pipeline(CatalogueRepository, "p"); - var pc = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicDataReleaseDestination), -1, - "c"); + pipe.DeleteInDatabase(); + } + } - var arg = new PipelineComponentArgument(CatalogueRepository, pc); + [Test] + [TestCase(true)] + [TestCase(false)] + public void TestArgumentWithTypeThatIsInterface(bool useInterfaceDeclaration) + { + var pipe = new Pipeline(CatalogueRepository, "p"); + var pc = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicDataReleaseDestination), -1, + "c"); - var server = new ExternalDatabaseServer(CatalogueRepository, "fish",null); + var arg = new PipelineComponentArgument(CatalogueRepository, pc); - try - { - arg.SetType(useInterfaceDeclaration ? typeof (IExternalDatabaseServer) : typeof (ExternalDatabaseServer)); + var server = new ExternalDatabaseServer(CatalogueRepository, "fish", null); - arg.SetValue(server); + try + { + arg.SetType(useInterfaceDeclaration ? typeof(IExternalDatabaseServer) : typeof(ExternalDatabaseServer)); - //should have set Value string to the ID of the object - Assert.AreEqual(arg.Value,server.ID.ToString()); + arg.SetValue(server); - arg.SaveToDatabase(); + //should have set Value string to the ID of the object + Assert.AreEqual(arg.Value, server.ID.ToString()); - //but as system Type should return the server - Assert.AreEqual(arg.GetValueAsSystemType(),server); - } - finally - { - pipe.DeleteInDatabase(); - server.DeleteInDatabase(); - } - } + arg.SaveToDatabase(); - [Test] - public void TestArgumentThatIsDictionary() + //but as system Type should return the server + Assert.AreEqual(arg.GetValueAsSystemType(), server); + } + finally { - var pipe = new Pipeline(CatalogueRepository, "p"); - var pc = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicDataReleaseDestination), -1, - "c"); + pipe.DeleteInDatabase(); + server.DeleteInDatabase(); + } + } - try + [Test] + public void TestArgumentThatIsDictionary() + { + var pipe = new Pipeline(CatalogueRepository, "p"); + var pc = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicDataReleaseDestination), -1, + "c"); + + try + { + var arg = new PipelineComponentArgument(CatalogueRepository, pc) { - var arg = new PipelineComponentArgument(CatalogueRepository, pc); - arg.Name = "MyNames"; - arg.SetType(typeof(Dictionary)); - arg.SaveToDatabase(); - - Assert.AreEqual(typeof(Dictionary), arg.GetConcreteSystemType()); - - var ti1 = new TableInfo(CatalogueRepository, "test1"); - var ti2 = new TableInfo(CatalogueRepository, "test2"); - - var val = new Dictionary(); - val.Add(ti1,"Fish"); - val.Add(ti2,"Fish"); - - arg.SetValue(val); - - arg.SaveToDatabase(); - - var val2 = (Dictionary) arg.GetValueAsSystemType(); - Assert.AreEqual(2,val2.Count); - Assert.AreEqual("Fish", val2[ti1]); - Assert.AreEqual("Fish", val2[ti2]); - } - finally + Name = "MyNames" + }; + arg.SetType(typeof(Dictionary)); + arg.SaveToDatabase(); + + Assert.AreEqual(typeof(Dictionary), arg.GetConcreteSystemType()); + + var ti1 = new TableInfo(CatalogueRepository, "test1"); + var ti2 = new TableInfo(CatalogueRepository, "test2"); + + var val = new Dictionary { - pipe.DeleteInDatabase(); - } - } + { ti1, "Fish" }, + { ti2, "Fish" } + }; + + arg.SetValue(val); + arg.SaveToDatabase(); + + var val2 = (Dictionary)arg.GetValueAsSystemType(); + Assert.AreEqual(2, val2.Count); + Assert.AreEqual("Fish", val2[ti1]); + Assert.AreEqual("Fish", val2[ti2]); + } + finally + { + pipe.DeleteInDatabase(); + } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ProcessTaskTests.cs b/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ProcessTaskTests.cs index 937927107f..3cad82713f 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ProcessTaskTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/ProcessTaskTests.cs @@ -9,121 +9,121 @@ using Rdmp.Core.Curation.Data.DataLoad; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.ArgumentTests +namespace Rdmp.Core.Tests.Curation.Integration.ArgumentTests; + +public class ProcessTaskTests : DatabaseTests { - public class ProcessTaskTests : DatabaseTests + [Test] + public void CloneProcessTask_ToSameLoadMetadataWithoutArguments() { + var test = new LoadMetadata(CatalogueRepository); + var processTask1 = new ProcessTask(CatalogueRepository, test, LoadStage.AdjustRaw) + { + Name = "Franky", + Order = 999 + }; - [Test] - public void CloneProcessTask_ToSameLoadMetadataWithoutArguments() + try { - LoadMetadata test = new LoadMetadata(CatalogueRepository); - ProcessTask processTask1 = new ProcessTask(CatalogueRepository, test, LoadStage.AdjustRaw) - { - Name = "Franky", - Order = 999 - }; - - try - { - processTask1.SaveToDatabase(); - - var clone = processTask1.CloneToNewLoadMetadataStage(test,LoadStage.GetFiles); - Assert.AreNotSame(clone.ID, processTask1.ID); - Assert.IsFalse(clone.ID == processTask1.ID); - - //get fresh copy out of database to ensure it is still there - ProcessTask orig = CatalogueRepository.GetObjectByID(processTask1.ID); - clone = CatalogueRepository.GetObjectByID(clone.ID); - - Assert.IsFalse(orig.ID == clone.ID); - Assert.AreEqual(LoadStage.AdjustRaw,orig.LoadStage); - Assert.AreEqual(LoadStage.GetFiles, clone.LoadStage); - - Assert.AreEqual(orig.Order,clone.Order); - Assert.AreEqual(orig.Path, clone.Path); - Assert.AreEqual(orig.ProcessTaskType, clone.ProcessTaskType); - Assert.AreEqual(orig.LoadMetadata_ID, clone.LoadMetadata_ID); - - clone.DeleteInDatabase(); - } - finally - { - processTask1.DeleteInDatabase(); - test.DeleteInDatabase(); - } + processTask1.SaveToDatabase(); + + var clone = processTask1.CloneToNewLoadMetadataStage(test, LoadStage.GetFiles); + Assert.AreNotSame(clone.ID, processTask1.ID); + Assert.IsFalse(clone.ID == processTask1.ID); + + //get fresh copy out of database to ensure it is still there + var orig = CatalogueRepository.GetObjectByID(processTask1.ID); + clone = CatalogueRepository.GetObjectByID(clone.ID); + + Assert.IsFalse(orig.ID == clone.ID); + Assert.AreEqual(LoadStage.AdjustRaw, orig.LoadStage); + Assert.AreEqual(LoadStage.GetFiles, clone.LoadStage); + + Assert.AreEqual(orig.Order, clone.Order); + Assert.AreEqual(orig.Path, clone.Path); + Assert.AreEqual(orig.ProcessTaskType, clone.ProcessTaskType); + Assert.AreEqual(orig.LoadMetadata_ID, clone.LoadMetadata_ID); + + clone.DeleteInDatabase(); } + finally + { + processTask1.DeleteInDatabase(); + test.DeleteInDatabase(); + } + } - [Test] - public void CloneProcessTask_ToNewLoadMetadataWithArguments() + [Test] + public void CloneProcessTask_ToNewLoadMetadataWithArguments() + { + //setup parents + var parent1 = new LoadMetadata(CatalogueRepository); + var parent2 = new LoadMetadata(CatalogueRepository); + + //make sure we didn't magically create the same ID somehow + Assert.AreNotEqual(parent1.ID, parent2.ID); + + //setup things to clone in parent1 + var processTask1 = new ProcessTask(CatalogueRepository, parent1, LoadStage.AdjustRaw); + var arg = new ProcessTaskArgument(CatalogueRepository, processTask1) { - //setup parents - LoadMetadata parent1 = new LoadMetadata(CatalogueRepository); - LoadMetadata parent2 = new LoadMetadata(CatalogueRepository); - - //make sure we didn't magically create the same ID somehow - Assert.AreNotEqual(parent1.ID,parent2.ID); - - //setup things to clone in parent1 - ProcessTask processTask1 = new ProcessTask(CatalogueRepository, parent1, LoadStage.AdjustRaw); - ProcessTaskArgument arg = new ProcessTaskArgument(CatalogueRepository, processTask1); - arg.Name = "TestArg"; - arg.SetType(typeof (System.String)); - arg.SetValue("TestValue"); - arg.SaveToDatabase(); - - processTask1.Name = "Franky"; - processTask1.Order = 999; - processTask1.SaveToDatabase(); - - try - { - //clone to parent 2 - var clone = processTask1.CloneToNewLoadMetadataStage(parent2, LoadStage.GetFiles); - Assert.AreNotSame(clone.ID, processTask1.ID); - Assert.IsFalse(clone.ID == processTask1.ID); - - //////////////////////////////////////////////////////////////////CHECK CLONAGE OF PROCESS TASK //////////////////////////////////////////////////////////// - //get fresh copy out of database to ensure it is still there - ProcessTask orig = CatalogueRepository.GetObjectByID(processTask1.ID); - clone = CatalogueRepository.GetObjectByID(clone.ID); - - //ids must have changed - Assert.IsFalse(orig.ID == clone.ID); - - //load stages must be correct per what we requested - Assert.AreEqual(LoadStage.AdjustRaw, orig.LoadStage); - Assert.AreEqual(LoadStage.GetFiles, clone.LoadStage); - - //all regular values must have been cloned successfully - Assert.AreEqual(orig.Order, clone.Order); - Assert.AreEqual(orig.Path, clone.Path); - Assert.AreEqual(orig.ProcessTaskType, clone.ProcessTaskType); - - Assert.AreEqual(parent1.ID, orig.LoadMetadata_ID); - Assert.AreEqual(parent2.ID, clone.LoadMetadata_ID); - /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - - //////////////////////////////////////////////////////////////////CHECK CLONAGE OF ARGUMENTS //////////////////////////////////////////////////////////// - - ProcessTaskArgument clonearg= clone.ProcessTaskArguments.SingleOrDefault(); - Assert.NotNull(clonearg); - - Assert.AreNotEqual(clonearg.ID,arg.ID); - Assert.AreEqual(clonearg.GetType(),arg.GetType()); - Assert.AreEqual(clonearg.Name,arg.Name); - Assert.AreEqual(clonearg.Value,arg.Value); - - /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - clone.DeleteInDatabase(); - } - finally - { - processTask1.DeleteInDatabase(); - - parent1.DeleteInDatabase(); - parent2.DeleteInDatabase(); - } + Name = "TestArg" + }; + arg.SetType(typeof(string)); + arg.SetValue("TestValue"); + arg.SaveToDatabase(); + + processTask1.Name = "Franky"; + processTask1.Order = 999; + processTask1.SaveToDatabase(); + + try + { + //clone to parent 2 + var clone = processTask1.CloneToNewLoadMetadataStage(parent2, LoadStage.GetFiles); + Assert.AreNotSame(clone.ID, processTask1.ID); + Assert.IsFalse(clone.ID == processTask1.ID); + + //////////////////////////////////////////////////////////////////CHECK CLONAGE OF PROCESS TASK //////////////////////////////////////////////////////////// + //get fresh copy out of database to ensure it is still there + var orig = CatalogueRepository.GetObjectByID(processTask1.ID); + clone = CatalogueRepository.GetObjectByID(clone.ID); + + //ids must have changed + Assert.IsFalse(orig.ID == clone.ID); + + //load stages must be correct per what we requested + Assert.AreEqual(LoadStage.AdjustRaw, orig.LoadStage); + Assert.AreEqual(LoadStage.GetFiles, clone.LoadStage); + + //all regular values must have been cloned successfully + Assert.AreEqual(orig.Order, clone.Order); + Assert.AreEqual(orig.Path, clone.Path); + Assert.AreEqual(orig.ProcessTaskType, clone.ProcessTaskType); + + Assert.AreEqual(parent1.ID, orig.LoadMetadata_ID); + Assert.AreEqual(parent2.ID, clone.LoadMetadata_ID); + /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + //////////////////////////////////////////////////////////////////CHECK CLONAGE OF ARGUMENTS //////////////////////////////////////////////////////////// + + var clonearg = clone.ProcessTaskArguments.SingleOrDefault(); + Assert.NotNull(clonearg); + + Assert.AreNotEqual(clonearg.ID, arg.ID); + Assert.AreEqual(clonearg.GetType(), arg.GetType()); + Assert.AreEqual(clonearg.Name, arg.Name); + Assert.AreEqual(clonearg.Value, arg.Value); + + /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + clone.DeleteInDatabase(); + } + finally + { + processTask1.DeleteInDatabase(); + + parent1.DeleteInDatabase(); + parent2.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/TestArgumentedClass.cs b/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/TestArgumentedClass.cs index 3971b0e222..b38d416a2e 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/TestArgumentedClass.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ArgumentTests/TestArgumentedClass.cs @@ -6,11 +6,10 @@ using Rdmp.Core.Curation.Data; -namespace Rdmp.Core.Tests.Curation.Integration.ArgumentTests +namespace Rdmp.Core.Tests.Curation.Integration.ArgumentTests; + +public class TestArgumentedClass { - public class TestArgumentedClass - { - [DemandsInitialization("Fishes", DemandType.Unspecified,true)] - public bool MyBool { get; set; } - } + [DemandsInitialization("Fishes", DemandType.Unspecified, true)] + public bool MyBool { get; set; } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/BundledLookupTableTests.cs b/Rdmp.Core.Tests/Curation/Integration/BundledLookupTableTests.cs index f129cba050..cc1d94d16d 100644 --- a/Rdmp.Core.Tests/Curation/Integration/BundledLookupTableTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/BundledLookupTableTests.cs @@ -11,30 +11,31 @@ using Rdmp.Core.QueryBuilding; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class BundledLookupTableTests : UnitTests { - public class BundledLookupTableTests : UnitTests + [Test] + public void TestLookupGetDataTableFetchSql() + { + var l = WhenIHaveA(); + var t = l.PrimaryKey.TableInfo; + + var bundle = new BundledLookupTable(t); + Assert.AreEqual("select * from [MyDb]..[ChildTable]", bundle.GetDataTableFetchSql()); + } + + [Test] + public void TestLookupGetDataTableFetchSql_WithCatalogue() { - [Test] - public void TestLookupGetDataTableFetchSql() - { - var l = WhenIHaveA(); - var t =l.PrimaryKey.TableInfo; - - var bundle = new BundledLookupTable(t); - Assert.AreEqual("select * from [MyDb]..[ChildTable]", bundle.GetDataTableFetchSql()); - } - [Test] - public void TestLookupGetDataTableFetchSql_WithCatalogue() - { - var l = WhenIHaveA(); - var t = l.PrimaryKey.TableInfo; + var l = WhenIHaveA(); + var t = l.PrimaryKey.TableInfo; - var engineer = new ForwardEngineerCatalogue(t, t.ColumnInfos); - engineer.ExecuteForwardEngineering(out var cata,out _, out var eis); + var engineer = new ForwardEngineerCatalogue(t, t.ColumnInfos); + engineer.ExecuteForwardEngineering(out var cata, out _, out var eis); - var bundle = new BundledLookupTable(t); - Assert.AreEqual(@" + var bundle = new BundledLookupTable(t); + Assert.AreEqual(@" SELECT ChildCol, @@ -43,11 +44,11 @@ public void TestLookupGetDataTableFetchSql_WithCatalogue() ChildTable", bundle.GetDataTableFetchSql()); - // ei 1 is suplemental now - eis[1].ExtractionCategory = ExtractionCategory.Supplemental; - eis[1].SaveToDatabase(); + // ei 1 is suplemental now + eis[1].ExtractionCategory = ExtractionCategory.Supplemental; + eis[1].SaveToDatabase(); - Assert.AreEqual(@" + Assert.AreEqual(@" SELECT ChildCol @@ -55,15 +56,16 @@ public void TestLookupGetDataTableFetchSql_WithCatalogue() ChildTable", bundle.GetDataTableFetchSql()); - // ei 0 is marked IsExtractionIdentifier - so is also not a valid - // lookup extractable column (Lookups shouldn't have patient linkage - // identifiers in them so) - eis[0].IsExtractionIdentifier = true; - eis[0].SaveToDatabase(); + // ei 0 is marked IsExtractionIdentifier - so is also not a valid + // lookup extractable column (Lookups shouldn't have patient linkage + // identifiers in them so) + eis[0].IsExtractionIdentifier = true; + eis[0].SaveToDatabase(); - // so now there are no columns at all that are extractable - var ex = Assert.Throws(() => bundle.GetDataTableFetchSql()); - Assert.AreEqual("Lookup table 'ChildTable' has a Catalogue defined 'ChildTable' but it has no Core extractable columns", ex.Message); - } + // so now there are no columns at all that are extractable + var ex = Assert.Throws(() => bundle.GetDataTableFetchSql()); + Assert.AreEqual( + "Lookup table 'ChildTable' has a Catalogue defined 'ChildTable' but it has no Core extractable columns", + ex.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/CatalogueCheckTests.cs b/Rdmp.Core.Tests/Curation/Integration/CatalogueCheckTests.cs index 25bfff9fc0..5eff4ab016 100644 --- a/Rdmp.Core.Tests/Curation/Integration/CatalogueCheckTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/CatalogueCheckTests.cs @@ -9,61 +9,60 @@ using FAnsi; using NUnit.Framework; using Rdmp.Core.Curation.Data; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class CatalogueCheckTests : DatabaseTests { - public class CatalogueCheckTests:DatabaseTests + [Test] + public void CatalogueCheck_DodgyName() { - [Test] - public void CatalogueCheck_DodgyName() + var cata = new Catalogue(CatalogueRepository, "fish") { - var cata = new Catalogue(CatalogueRepository, "fish"); - //name broken - cata.Name = @"c:\bob.txt#"; - var ex = Assert.Throws(()=>cata.Check(new ThrowImmediatelyCheckNotifier())); - Assert.IsTrue(ex.Message.Contains("The following invalid characters were found:'\\','.','#'")); - - cata.DeleteInDatabase(); - } + Name = @"c:\bob.txt#" + }; - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void CatalogueCheck_FetchData(DatabaseType databaseType) - { - DataTable dt = new DataTable(); - dt.Columns.Add("Name"); - dt.Rows.Add("Frank"); - dt.Rows.Add("Peter"); + var ex = Assert.Throws(() => cata.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.IsTrue(ex.Message.Contains("The following invalid characters were found:'\\','.','#'")); - var database = GetCleanedServer(databaseType); - var tbl = database.CreateTable("CatalogueCheck_CanReadText",dt); + cata.DeleteInDatabase(); + } - var cata = Import(tbl); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void CatalogueCheck_FetchData(DatabaseType databaseType) + { + var dt = new DataTable(); + dt.Columns.Add("Name"); + dt.Rows.Add("Frank"); + dt.Rows.Add("Peter"); - //shouldn't be any errors - var tomemory = new ToMemoryCheckNotifier(); - cata.Check(tomemory); - Assert.AreEqual(CheckResult.Success,tomemory.GetWorst()); + var database = GetCleanedServer(databaseType); + var tbl = database.CreateTable("CatalogueCheck_CanReadText", dt); - //delete all the records in the table - tbl.Truncate(); - cata.Check(tomemory); + var cata = Import(tbl); - //now it should warn us that it is empty - Assert.AreEqual(CheckResult.Warning, tomemory.GetWorst()); + //shouldn't be any errors + var tomemory = new ToMemoryCheckNotifier(); + cata.Check(tomemory); + Assert.AreEqual(CheckResult.Success, tomemory.GetWorst()); - tbl.Drop(); + //delete all the records in the table + tbl.Truncate(); + cata.Check(tomemory); + //now it should warn us that it is empty + Assert.AreEqual(CheckResult.Warning, tomemory.GetWorst()); - cata.Check(tomemory); + tbl.Drop(); - //now it should fail checks - Assert.AreEqual(CheckResult.Fail, tomemory.GetWorst()); + cata.Check(tomemory); - } + //now it should fail checks + Assert.AreEqual(CheckResult.Fail, tomemory.GetWorst()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/CatalogueItemTests.cs b/Rdmp.Core.Tests/Curation/Integration/CatalogueItemTests.cs index 4f775ad9b5..c0c3f2d43a 100644 --- a/Rdmp.Core.Tests/Curation/Integration/CatalogueItemTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/CatalogueItemTests.cs @@ -8,178 +8,168 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +internal class CatalogueItemTests : DatabaseTests { - class CatalogueItemTests : DatabaseTests + [Test] + public void constructor_newTestCatalogueItem_pass() { + var parent = new Catalogue(CatalogueRepository, "GROG"); - [Test] - public void constructor_newTestCatalogueItem_pass() - { - - var parent = new Catalogue(CatalogueRepository, "GROG"); + var child1 = new CatalogueItem(CatalogueRepository, parent, "GROG_ITEM1"); + var child2 = new CatalogueItem(CatalogueRepository, parent, "GROG_ITEM2"); - CatalogueItem child1 = new CatalogueItem(CatalogueRepository, parent, "GROG_ITEM1"); - CatalogueItem child2 = new CatalogueItem(CatalogueRepository, parent, "GROG_ITEM2"); + Assert.IsTrue(child1.Catalogue_ID == parent.ID); + Assert.IsTrue(child2.Catalogue_ID == parent.ID); - Assert.IsTrue(child1.Catalogue_ID == parent.ID); - Assert.IsTrue(child2.Catalogue_ID == parent.ID); + Assert.IsTrue(child1.ID != child2.ID); - Assert.IsTrue(child1.ID != child2.ID); - - child1.DeleteInDatabase(); - child2.DeleteInDatabase(); - parent.DeleteInDatabase(); - } + child1.DeleteInDatabase(); + child2.DeleteInDatabase(); + parent.DeleteInDatabase(); + } - [Test] - public void TestSettingColumnInfoToNull() - { + [Test] + public void TestSettingColumnInfoToNull() + { + var parent = new Catalogue(CatalogueRepository, "GROG"); - var parent = new Catalogue(CatalogueRepository, "GROG"); + var child1 = new CatalogueItem(CatalogueRepository, parent, "GROG_ITEM1"); + child1.SetColumnInfo(null); - CatalogueItem child1 = new CatalogueItem(CatalogueRepository, parent, "GROG_ITEM1"); - child1.SetColumnInfo(null); - - Assert.IsNull(child1.ColumnInfo_ID); - child1.DeleteInDatabase(); - parent.DeleteInDatabase(); - } + Assert.IsNull(child1.ColumnInfo_ID); + child1.DeleteInDatabase(); + parent.DeleteInDatabase(); + } - [Test] - public void GetAllCatalogueItemsForCatalogueID_NewCatalogue_pass() - { - Catalogue parent = new Catalogue(CatalogueRepository, "ZOMBIEMAN"); + [Test] + public void GetAllCatalogueItemsForCatalogueID_NewCatalogue_pass() + { + var parent = new Catalogue(CatalogueRepository, "ZOMBIEMAN"); - var child1 = new CatalogueItem(CatalogueRepository, parent, "ZOMBIEMAN_ITEM1"); - var child2 = new CatalogueItem(CatalogueRepository, parent, "ZOMBIEMAN_ITEM2"); + var child1 = new CatalogueItem(CatalogueRepository, parent, "ZOMBIEMAN_ITEM1"); + var child2 = new CatalogueItem(CatalogueRepository, parent, "ZOMBIEMAN_ITEM2"); - CatalogueItem[] children = parent.CatalogueItems; + var children = parent.CatalogueItems; - Assert.AreEqual(children.Length,2); - Assert.IsTrue(children[0].ID == child1.ID || children[1].ID == child1.ID); - Assert.IsTrue(children[0].ID == child2.ID || children[1].ID == child2.ID); - Assert.IsTrue(children[0].ID != children[1].ID); + Assert.AreEqual(children.Length, 2); + Assert.IsTrue(children[0].ID == child1.ID || children[1].ID == child1.ID); + Assert.IsTrue(children[0].ID == child2.ID || children[1].ID == child2.ID); + Assert.IsTrue(children[0].ID != children[1].ID); - children[0].DeleteInDatabase(); - children[1].DeleteInDatabase(); - parent.DeleteInDatabase(); - } + children[0].DeleteInDatabase(); + children[1].DeleteInDatabase(); + parent.DeleteInDatabase(); + } - [Test] - public void update_changeAllPropertiesOfCatalogueItem_passes() + [Test] + public void update_changeAllPropertiesOfCatalogueItem_passes() + { + var parent = new Catalogue(CatalogueRepository, "KONGOR"); + var child = new CatalogueItem(CatalogueRepository, parent, "KONGOR_SUPERKING") { - Catalogue parent = new Catalogue(CatalogueRepository, "KONGOR"); - CatalogueItem child = new CatalogueItem(CatalogueRepository, parent, "KONGOR_SUPERKING") - { - Agg_method = "Adding SetUp", - Comments = "do not change amagad super secret!", - Limitations = "Extreme limitaitons", - Description = - "Exciting things are going down in the streets of new your this time of year it would be a great idea if you were to go there", - Name = "KONGOR_MINIMAN", - Periodicity = Catalogue.CataloguePeriodicity.Monthly, - Research_relevance = "Highly relevant to all fields of subatomic particle study", - Statistical_cons = "Dangerous cons frequent the areas that this stats is happening, be afraid", - Topic = "nothing much, lots of stuff" - }; + Agg_method = "Adding SetUp", + Comments = "do not change amagad super secret!", + Limitations = "Extreme limitaitons", + Description = + "Exciting things are going down in the streets of new your this time of year it would be a great idea if you were to go there", + Name = "KONGOR_MINIMAN", + Periodicity = Catalogue.CataloguePeriodicity.Monthly, + Research_relevance = "Highly relevant to all fields of subatomic particle study", + Statistical_cons = "Dangerous cons frequent the areas that this stats is happening, be afraid", + Topic = "nothing much, lots of stuff" + }; + + child.SaveToDatabase(); + + var childAfter = CatalogueRepository.GetObjectByID(child.ID); + + Assert.IsTrue(child.Name == childAfter.Name); + Assert.IsTrue(child.Agg_method == childAfter.Agg_method); + Assert.IsTrue(child.Comments == childAfter.Comments); + Assert.IsTrue(child.Limitations == childAfter.Limitations); + Assert.IsTrue(child.Description == childAfter.Description); + Assert.IsTrue(child.Periodicity == childAfter.Periodicity); + Assert.IsTrue(child.Research_relevance == childAfter.Research_relevance); + Assert.IsTrue(child.Statistical_cons == childAfter.Statistical_cons); + Assert.IsTrue(child.Topic == childAfter.Topic); + + child.DeleteInDatabase(); + parent.DeleteInDatabase(); + } + + [Test] + public void clone_CloneCatalogueItemWithIDIntoCatalogue_passes() + { + var parent = new Catalogue(CatalogueRepository, "KONGOR"); + var parent2 = new Catalogue(CatalogueRepository, "KONGOR2"); + var child = new CatalogueItem(CatalogueRepository, parent, "KONGOR_SUPERKING") + { + Agg_method = "Adding SetUp", + Comments = "do not change amagad super secret!", + Limitations = "Extreme limitaitons", + Description = + "Exciting things are going down in the streets of new your this time of year it would be a great idea if you were to go there", + Name = "KONGOR_MINIMAN", + Periodicity = Catalogue.CataloguePeriodicity.Monthly, + Research_relevance = "Highly relevant to all fields of subatomic particle study", + Statistical_cons = "Dangerous cons frequent the areas that this stats is happening, be afraid", + Topic = "nothing much, lots of stuff" + }; + + CatalogueItem cloneChild = null; + try + { child.SaveToDatabase(); - - CatalogueItem childAfter = CatalogueRepository.GetObjectByID(child.ID); - - Assert.IsTrue(child.Name == childAfter.Name); - Assert.IsTrue(child.Agg_method == childAfter.Agg_method); - Assert.IsTrue(child.Comments == childAfter.Comments); - Assert.IsTrue(child.Limitations == childAfter.Limitations); - Assert.IsTrue(child.Description == childAfter.Description); - Assert.IsTrue(child.Periodicity == childAfter.Periodicity); - Assert.IsTrue(child.Research_relevance == childAfter.Research_relevance); - Assert.IsTrue(child.Statistical_cons == childAfter.Statistical_cons); - Assert.IsTrue(child.Topic == childAfter.Topic); + cloneChild = child.CloneCatalogueItemWithIDIntoCatalogue(parent2); + + //get the clone that was returned + Assert.AreEqual(cloneChild.Catalogue_ID, parent2.ID); //it is in the second one + Assert.AreNotEqual(cloneChild.Catalogue_ID, parent.ID); //it is not in the first one + Assert.AreNotEqual(cloneChild.ID, child.ID); //it has a new ID + + Assert.AreEqual(cloneChild.Limitations, child.Limitations); + Assert.AreEqual(cloneChild.Description, child.Description); + Assert.AreEqual(cloneChild.Name, child.Name); + Assert.AreEqual(cloneChild.Periodicity, child.Periodicity); + Assert.AreEqual(cloneChild.Research_relevance, child.Research_relevance); + Assert.AreEqual(cloneChild.Statistical_cons, child.Statistical_cons); + Assert.AreEqual(cloneChild.Topic, child.Topic); + } + finally + { + cloneChild?.DeleteInDatabase(); child.DeleteInDatabase(); parent.DeleteInDatabase(); + parent2.DeleteInDatabase(); } + } - [Test] - public void clone_CloneCatalogueItemWithIDIntoCatalogue_passes() - { - Catalogue parent = new Catalogue(CatalogueRepository,"KONGOR"); - Catalogue parent2 = new Catalogue(CatalogueRepository, "KONGOR2"); - - CatalogueItem child = new CatalogueItem(CatalogueRepository, parent, "KONGOR_SUPERKING") - { - Agg_method = "Adding SetUp", - Comments = "do not change amagad super secret!", - Limitations = "Extreme limitaitons", - Description = - "Exciting things are going down in the streets of new your this time of year it would be a great idea if you were to go there", - Name = "KONGOR_MINIMAN", - Periodicity = Catalogue.CataloguePeriodicity.Monthly, - Research_relevance = "Highly relevant to all fields of subatomic particle study", - Statistical_cons = "Dangerous cons frequent the areas that this stats is happening, be afraid", - Topic = "nothing much, lots of stuff" - }; - - CatalogueItem cloneChild = null; - try - { - child.SaveToDatabase(); - cloneChild = child.CloneCatalogueItemWithIDIntoCatalogue(parent2); - - //get the clone that was returned - Assert.AreEqual(cloneChild.Catalogue_ID, parent2.ID); //it is in the second one - Assert.AreNotEqual(cloneChild.Catalogue_ID, parent.ID); //it is not in the first one - Assert.AreNotEqual(cloneChild.ID, child.ID); //it has a new ID - - Assert.AreEqual(cloneChild.Limitations, child.Limitations); - Assert.AreEqual(cloneChild.Description, child.Description); - Assert.AreEqual(cloneChild.Name, child.Name); - Assert.AreEqual(cloneChild.Periodicity, child.Periodicity); - Assert.AreEqual(cloneChild.Research_relevance, child.Research_relevance); - Assert.AreEqual(cloneChild.Statistical_cons, child.Statistical_cons); - Assert.AreEqual(cloneChild.Topic, child.Topic); - } - finally - { - if (cloneChild != null) - cloneChild.DeleteInDatabase(); - - child.DeleteInDatabase(); - parent.DeleteInDatabase(); - parent2.DeleteInDatabase(); - - } - } - - [TestCase(true)] - [TestCase(false)] - public void TestDeleting_CascadesToExtractionInformations(bool makeOrphanFirst) - { - var c = new Catalogue(CatalogueRepository,"My new cata"); - var ci = new CatalogueItem(CatalogueRepository, c, "myci"); - - var t = new TableInfo(CatalogueRepository, "myt"); - var col = new ColumnInfo(CatalogueRepository, "mycol", "varchar(10)", t); + [TestCase(true)] + [TestCase(false)] + public void TestDeleting_CascadesToExtractionInformations(bool makeOrphanFirst) + { + var c = new Catalogue(CatalogueRepository, "My new cata"); + var ci = new CatalogueItem(CatalogueRepository, c, "myci"); - var ei = new ExtractionInformation(CatalogueRepository, ci, col,"fff"); + var t = new TableInfo(CatalogueRepository, "myt"); + var col = new ColumnInfo(CatalogueRepository, "mycol", "varchar(10)", t); - if(makeOrphanFirst) - { - col.DeleteInDatabase(); - } + var ei = new ExtractionInformation(CatalogueRepository, ci, col, "fff"); - c.DeleteInDatabase(); + if (makeOrphanFirst) col.DeleteInDatabase(); - Assert.IsFalse(c.Exists()); - Assert.IsFalse(ci.Exists()); - Assert.IsFalse(ei.Exists()); + c.DeleteInDatabase(); - Assert.IsTrue(t.Exists()); - Assert.AreEqual(!makeOrphanFirst,col.Exists()); + Assert.IsFalse(c.Exists()); + Assert.IsFalse(ci.Exists()); + Assert.IsFalse(ei.Exists()); - } + Assert.IsTrue(t.Exists()); + Assert.AreEqual(!makeOrphanFirst, col.Exists()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/CatalogueTests.cs b/Rdmp.Core.Tests/Curation/Integration/CatalogueTests.cs index 278766bf92..57b197b80d 100644 --- a/Rdmp.Core.Tests/Curation/Integration/CatalogueTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/CatalogueTests.cs @@ -11,570 +11,553 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class CatalogueTests : UnitTests { - public class CatalogueTests : UnitTests + [Test] + public void Test_GetObjects_Catalogue() { - [Test] - public void Test_GetObjects_Catalogue() - { - Catalogue catalogueWithId = new Catalogue(Repository, "bob"); - Catalogue[] catas = Repository.GetAllObjects(); + var catalogueWithId = new Catalogue(Repository, "bob"); + var catas = Repository.GetAllObjects(); - Assert.IsTrue(catas.Length > 0); + Assert.IsTrue(catas.Length > 0); - catalogueWithId.DeleteInDatabase(); - } + catalogueWithId.DeleteInDatabase(); + } - [Test] - public void SettingPropertyViaRelationshipDoesntSave_NoticeHowYouHaveToCacheThePropertyCatalogueToSetIt() - { - Catalogue c = new Catalogue(Repository,"frank"); - CatalogueItem ci = new CatalogueItem(Repository,c,"bob"); + [Test] + public void SettingPropertyViaRelationshipDoesntSave_NoticeHowYouHaveToCacheThePropertyCatalogueToSetIt() + { + var c = new Catalogue(Repository, "frank"); + var ci = new CatalogueItem(Repository, c, "bob"); - var cata = ci.Catalogue; - cata.Name = "fish2"; - cata.SaveToDatabase(); - Assert.AreEqual("fish2", ci.Catalogue.Name); + var cata = ci.Catalogue; + cata.Name = "fish2"; + cata.SaveToDatabase(); + Assert.AreEqual("fish2", ci.Catalogue.Name); - //now thanks to lazy this works... but it's ambiguous (it works if the property is referenced via IInjectKnown) - ci.Catalogue.Name = "fish"; - ci.Catalogue.SaveToDatabase(); - Assert.AreEqual("fish",ci.Catalogue.Name); + //now thanks to lazy this works... but it's ambiguous (it works if the property is referenced via IInjectKnown) + ci.Catalogue.Name = "fish"; + ci.Catalogue.SaveToDatabase(); + Assert.AreEqual("fish", ci.Catalogue.Name); - c.DeleteInDatabase(); - } - - - [Test] - public void update_changeNameOfCatalogue_passes() - { - //create a new one - var cata = new Catalogue(Repository, "fishing"); - int expectedID = cata.ID; + c.DeleteInDatabase(); + } + + + [Test] + public void update_changeNameOfCatalogue_passes() + { + //create a new one + var cata = new Catalogue(Repository, "fishing"); + var expectedID = cata.ID; - //find it and change its name - Catalogue[] catas = Repository.GetAllObjects().ToArray(); + //find it and change its name + var catas = Repository.GetAllObjects().ToArray(); - foreach (var catalogue in catas) + foreach (var catalogue in catas) + if (catalogue.ID == expectedID) { - if (catalogue.ID == expectedID) - { - catalogue.Name = "fish"; - catalogue.SaveToDatabase(); - } + catalogue.Name = "fish"; + catalogue.SaveToDatabase(); } - //find it again and see if its name has changed - then delete it so we don't polute the db - Catalogue[] catasAfter = Repository.GetAllObjects().ToArray(); + //find it again and see if its name has changed - then delete it so we don't polute the db + var catasAfter = Repository.GetAllObjects().ToArray(); - foreach (var catalogue in catasAfter) + foreach (var catalogue in catasAfter) + if (catalogue.ID == expectedID) { - if (catalogue.ID == expectedID) - { - Assert.AreEqual(catalogue.Name, "fish"); - catalogue.DeleteInDatabase(); - } + Assert.AreEqual(catalogue.Name, "fish"); + catalogue.DeleteInDatabase(); } - } + } - [Test] - public void update_changeAllProperties_pass() - { - //create a new one - var cata = new Catalogue(Repository, "fishing"); - int expectedID = cata.ID; + [Test] + public void update_changeAllProperties_pass() + { + //create a new one + var cata = new Catalogue(Repository, "fishing"); + var expectedID = cata.ID; - //find it and change its name - Catalogue[] catas = Repository.GetAllObjects().ToArray(); + //find it and change its name + var catas = Repository.GetAllObjects().ToArray(); - foreach (var catalogue in catas) + foreach (var catalogue in catas) + if (catalogue.ID == expectedID) { - if (catalogue.ID == expectedID) - { - catalogue.Access_options = "backwards,frontwards"; - catalogue.API_access_URL = new Uri("http://API.html"); - catalogue.Acronym = "abc"; - catalogue.Attribution_citation = "belongs to dave"; - catalogue.Browse_URL = new Uri("http://browse.html"); - catalogue.Bulk_Download_URL = new Uri("http://bulk.html"); - catalogue.Contact_details = "thomasnind"; - catalogue.Geographical_coverage = "fullspectrum"; - catalogue.Resource_owner = "blackhole"; - catalogue.Description = "exciting stuff of great excitement"; - catalogue.Detail_Page_URL = new Uri("http://detail.html"); - catalogue.Last_revision_date = DateTime.Parse("01/01/01"); - catalogue.Name = "kaptainshield"; - catalogue.Background_summary = "£50 preferred"; - catalogue.Periodicity = Catalogue.CataloguePeriodicity.Monthly; - catalogue.Query_tool_URL = new Uri("http://querier.html"); - catalogue.Source_URL = new Uri("http://blackholeSun.html"); - catalogue.Time_coverage = "comprehensive"; - catalogue.Search_keywords = "excitement,fishmongery"; - catalogue.Type = Catalogue.CatalogueType.ResearchStudy; - catalogue.Update_freq = "Every darmn second!"; - catalogue.Update_sched = "periodically on request"; - - catalogue.Country_of_origin = "United Kingdom"; - catalogue.Data_standards = "Highly Standardised"; - catalogue.Administrative_contact_address = "Candyland"; - catalogue.Administrative_contact_email = "big@brother.com"; - catalogue.Administrative_contact_name = "Uncle Sam"; - catalogue.Administrative_contact_telephone = "12345 67890"; - catalogue.Explicit_consent = true; - catalogue.Ethics_approver = "Tayside Supernatural Department"; - catalogue.Source_of_data_collection = "Invented by Unit Test"; - catalogue.SubjectNumbers = "100,000,000"; - - catalogue.SaveToDatabase(); - } + catalogue.Access_options = "backwards,frontwards"; + catalogue.API_access_URL = new Uri("http://API.html"); + catalogue.Acronym = "abc"; + catalogue.Attribution_citation = "belongs to dave"; + catalogue.Browse_URL = new Uri("http://browse.html"); + catalogue.Bulk_Download_URL = new Uri("http://bulk.html"); + catalogue.Contact_details = "thomasnind"; + catalogue.Geographical_coverage = "fullspectrum"; + catalogue.Resource_owner = "blackhole"; + catalogue.Description = "exciting stuff of great excitement"; + catalogue.Detail_Page_URL = new Uri("http://detail.html"); + catalogue.Last_revision_date = DateTime.Parse("01/01/01"); + catalogue.Name = "kaptainshield"; + catalogue.Background_summary = "£50 preferred"; + catalogue.Periodicity = Catalogue.CataloguePeriodicity.Monthly; + catalogue.Query_tool_URL = new Uri("http://querier.html"); + catalogue.Source_URL = new Uri("http://blackholeSun.html"); + catalogue.Time_coverage = "comprehensive"; + catalogue.Search_keywords = "excitement,fishmongery"; + catalogue.Type = Catalogue.CatalogueType.ResearchStudy; + catalogue.Update_freq = "Every darmn second!"; + catalogue.Update_sched = "periodically on request"; + + catalogue.Country_of_origin = "United Kingdom"; + catalogue.Data_standards = "Highly Standardised"; + catalogue.Administrative_contact_address = "Candyland"; + catalogue.Administrative_contact_email = "big@brother.com"; + catalogue.Administrative_contact_name = "Uncle Sam"; + catalogue.Administrative_contact_telephone = "12345 67890"; + catalogue.Explicit_consent = true; + catalogue.Ethics_approver = "Tayside Supernatural Department"; + catalogue.Source_of_data_collection = "Invented by Unit Test"; + catalogue.SubjectNumbers = "100,000,000"; + + catalogue.SaveToDatabase(); } + //find it again and see if it has changed - then delete it so we don't polute the db + var catasAfter = Repository.GetAllObjects().ToArray(); - //find it again and see if it has changed - then delete it so we don't polute the db - Catalogue[] catasAfter = Repository.GetAllObjects().ToArray(); - - foreach (var catalogue in catasAfter) + foreach (var catalogue in catasAfter) + if (catalogue.ID == expectedID) { - if (catalogue.ID == expectedID) - { - Assert.AreEqual(catalogue.Access_options , "backwards,frontwards"); - Assert.AreEqual(catalogue.API_access_URL , new Uri("http://API.html")); - Assert.AreEqual(catalogue.Acronym , "abc"); - Assert.AreEqual(catalogue.Attribution_citation , "belongs to dave"); - Assert.AreEqual(catalogue.Browse_URL , new Uri("http://browse.html")); - Assert.AreEqual(catalogue.Bulk_Download_URL , new Uri("http://bulk.html")); - Assert.AreEqual(catalogue.Contact_details , "thomasnind"); - Assert.AreEqual(catalogue.Geographical_coverage, "fullspectrum"); - Assert.AreEqual(catalogue.Resource_owner, "blackhole"); - Assert.AreEqual(catalogue.Description , "exciting stuff of great excitement"); - Assert.AreEqual(catalogue.Detail_Page_URL , new Uri("http://detail.html")); - Assert.AreEqual(catalogue.Last_revision_date , DateTime.Parse("01/01/01")); - Assert.AreEqual(catalogue.Name , "kaptainshield"); - Assert.AreEqual(catalogue.Background_summary, "£50 preferred"); - Assert.AreEqual(catalogue.Periodicity , Catalogue.CataloguePeriodicity.Monthly); - Assert.AreEqual(catalogue.Query_tool_URL , new Uri("http://querier.html")); - Assert.AreEqual(catalogue.Source_URL , new Uri("http://blackholeSun.html")); - Assert.AreEqual(catalogue.Time_coverage , "comprehensive"); - Assert.AreEqual(catalogue.Search_keywords, "excitement,fishmongery"); - Assert.AreEqual(catalogue.Type , Catalogue.CatalogueType.ResearchStudy); - Assert.AreEqual(catalogue.Update_freq , "Every darmn second!"); - Assert.AreEqual(catalogue.Update_sched , "periodically on request"); - - - Assert.AreEqual(catalogue.Country_of_origin , "United Kingdom"); - Assert.AreEqual(catalogue.Data_standards , "Highly Standardised"); - Assert.AreEqual(catalogue.Administrative_contact_address , "Candyland"); - Assert.AreEqual(catalogue.Administrative_contact_email , "big@brother.com"); - Assert.AreEqual(catalogue.Administrative_contact_name , "Uncle Sam"); - Assert.AreEqual(catalogue.Administrative_contact_telephone , "12345 67890"); - Assert.AreEqual(catalogue.Explicit_consent , true); - Assert.AreEqual(catalogue.Ethics_approver , "Tayside Supernatural Department"); - Assert.AreEqual(catalogue.Source_of_data_collection , "Invented by Unit Test"); - Assert.AreEqual(catalogue.SubjectNumbers, "100,000,000"); - - - catalogue.DeleteInDatabase(); - } + Assert.AreEqual(catalogue.Access_options, "backwards,frontwards"); + Assert.AreEqual(catalogue.API_access_URL, new Uri("http://API.html")); + Assert.AreEqual(catalogue.Acronym, "abc"); + Assert.AreEqual(catalogue.Attribution_citation, "belongs to dave"); + Assert.AreEqual(catalogue.Browse_URL, new Uri("http://browse.html")); + Assert.AreEqual(catalogue.Bulk_Download_URL, new Uri("http://bulk.html")); + Assert.AreEqual(catalogue.Contact_details, "thomasnind"); + Assert.AreEqual(catalogue.Geographical_coverage, "fullspectrum"); + Assert.AreEqual(catalogue.Resource_owner, "blackhole"); + Assert.AreEqual(catalogue.Description, "exciting stuff of great excitement"); + Assert.AreEqual(catalogue.Detail_Page_URL, new Uri("http://detail.html")); + Assert.AreEqual(catalogue.Last_revision_date, DateTime.Parse("01/01/01")); + Assert.AreEqual(catalogue.Name, "kaptainshield"); + Assert.AreEqual(catalogue.Background_summary, "£50 preferred"); + Assert.AreEqual(catalogue.Periodicity, Catalogue.CataloguePeriodicity.Monthly); + Assert.AreEqual(catalogue.Query_tool_URL, new Uri("http://querier.html")); + Assert.AreEqual(catalogue.Source_URL, new Uri("http://blackholeSun.html")); + Assert.AreEqual(catalogue.Time_coverage, "comprehensive"); + Assert.AreEqual(catalogue.Search_keywords, "excitement,fishmongery"); + Assert.AreEqual(catalogue.Type, Catalogue.CatalogueType.ResearchStudy); + Assert.AreEqual(catalogue.Update_freq, "Every darmn second!"); + Assert.AreEqual(catalogue.Update_sched, "periodically on request"); + + + Assert.AreEqual(catalogue.Country_of_origin, "United Kingdom"); + Assert.AreEqual(catalogue.Data_standards, "Highly Standardised"); + Assert.AreEqual(catalogue.Administrative_contact_address, "Candyland"); + Assert.AreEqual(catalogue.Administrative_contact_email, "big@brother.com"); + Assert.AreEqual(catalogue.Administrative_contact_name, "Uncle Sam"); + Assert.AreEqual(catalogue.Administrative_contact_telephone, "12345 67890"); + Assert.AreEqual(catalogue.Explicit_consent, true); + Assert.AreEqual(catalogue.Ethics_approver, "Tayside Supernatural Department"); + Assert.AreEqual(catalogue.Source_of_data_collection, "Invented by Unit Test"); + Assert.AreEqual(catalogue.SubjectNumbers, "100,000,000"); + + + catalogue.DeleteInDatabase(); } - } + } - [Test] - public void create_blankConstructorCatalogue_createsNewInDatabase() - { - int before = Repository.GetAllObjects().Count(); + [Test] + public void create_blankConstructorCatalogue_createsNewInDatabase() + { + var before = Repository.GetAllObjects().Length; - var newCatalogue = new Catalogue(Repository, "fishing"); - int expectedID = newCatalogue.ID; + var newCatalogue = new Catalogue(Repository, "fishing"); + var expectedID = newCatalogue.ID; - Assert.IsTrue(expectedID > 1); + Assert.IsTrue(expectedID > 1); - Catalogue[] catasAfter = Repository.GetAllObjects().ToArray(); - int after = catasAfter.Count(); + var catasAfter = Repository.GetAllObjects().ToArray(); + var after = catasAfter.Length; - Assert.AreEqual(before, after - 1); + Assert.AreEqual(before, after - 1); - int numberDeleted = 0; - foreach (Catalogue cata in catasAfter) + var numberDeleted = 0; + foreach (var cata in catasAfter) + if (cata.ID == expectedID) { - if (cata.ID == expectedID) - { - cata.DeleteInDatabase(); - numberDeleted++; - } + cata.DeleteInDatabase(); + numberDeleted++; } - Assert.AreEqual(numberDeleted, 1); + Assert.AreEqual(numberDeleted, 1); + } + + [Test] + public void GetCatalogueWithID_InvalidID_throwsException() + { + Assert.Throws(() => Repository.GetObjectByID(-1)); + } + + [Test] + public void GetCatalogueWithID_validID_pass() + { + var c = new Catalogue(Repository, "TEST"); + + Assert.NotNull(c); + Assert.True(c.Name == "TEST"); + + c.DeleteInDatabase(); + } + + + [Test] + public void TestGetTablesAndLookupTables() + { + //One catalogue + var cata = new Catalogue(Repository, "TestGetTablesAndLookupTables"); + + //6 virtual columns + var ci1 = new CatalogueItem(Repository, cata, "Col1"); + var ci2 = new CatalogueItem(Repository, cata, "Col2"); + var ci3 = new CatalogueItem(Repository, cata, "Col3"); + var ci4 = new CatalogueItem(Repository, cata, "Col4"); + var ci5 = new CatalogueItem(Repository, cata, "Description"); + var ci6 = new CatalogueItem(Repository, cata, "Code"); + + //2 columns come from table 1 + var t1 = new TableInfo(Repository, "Table1"); + var t1_c1 = new ColumnInfo(Repository, "Col1", "varchar(10)", t1); + var t1_c2 = new ColumnInfo(Repository, "Col2", "int", t1); + + //2 columns come from table 2 + var t2 = new TableInfo(Repository, "Table2"); + var t2_c1 = new ColumnInfo(Repository, "Col3", "varchar(10)", t2); + var t2_c2 = new ColumnInfo(Repository, "Col4", "int", t2); + + //2 columns come from the lookup table + var t3 = new TableInfo(Repository, "Table3"); + var t3_c1 = new ColumnInfo(Repository, "Description", "varchar(10)", t3); + var t3_c2 = new ColumnInfo(Repository, "Code", "int", t3); + + //wire SetUp virtual columns to underlying columns + ci1.SetColumnInfo(t1_c1); + ci2.SetColumnInfo(t1_c2); + ci3.SetColumnInfo(t2_c1); + ci4.SetColumnInfo(t2_c2); + ci5.SetColumnInfo(t3_c1); + ci6.SetColumnInfo(t3_c2); + + //configure the lookup relationship + var lookup = new Lookup(Repository, t3_c1, t1_c2, t3_c2, ExtractionJoinType.Left, ""); + try + { + var allTables = cata.GetTableInfoList(true).ToArray(); + Assert.Contains(t1, allTables); + Assert.Contains(t2, allTables); + Assert.Contains(t3, allTables); + + var normalTablesOnly = cata.GetTableInfoList(false).ToArray(); + Assert.AreEqual(2, normalTablesOnly.Length); + Assert.Contains(t1, normalTablesOnly); + Assert.Contains(t2, normalTablesOnly); + + var lookupTablesOnly = cata.GetLookupTableInfoList(); + Assert.AreEqual(1, lookupTablesOnly.Length); + Assert.Contains(t3, lookupTablesOnly); + + cata.GetTableInfos(out var normalTables, out var lookupTables); + Assert.AreEqual(2, normalTables.Count); + Assert.AreEqual(1, lookupTables.Count); + + Assert.Contains(t1, normalTables); + Assert.Contains(t2, normalTables); + Assert.Contains(t3, lookupTables); } + finally + { + lookup.DeleteInDatabase(); - [Test] - public void GetCatalogueWithID_InvalidID_throwsException() + t1.DeleteInDatabase(); + t2.DeleteInDatabase(); + t3.DeleteInDatabase(); + + cata.DeleteInDatabase(); + } + } + + [Test] + public void CatalogueFolder_DefaultIsRoot() + { + var c = new Catalogue(Repository, "bob"); + try { - Assert.Throws(() => Repository.GetObjectByID(-1)); + Assert.AreEqual("\\", c.Folder); } + finally + { + c.DeleteInDatabase(); + } + } - [Test] - public void GetCatalogueWithID_validID_pass() + [Test] + public void CatalogueFolder_ChangeAndSave() + { + var c = new Catalogue(Repository, "bob"); + try { - Catalogue c = new Catalogue(Repository, "TEST"); + c.Folder = "\\Research\\Important"; + Assert.AreEqual("\\research\\important", c.Folder); + c.SaveToDatabase(); - Assert.NotNull(c); - Assert.True(c.Name == "TEST"); - + var c2 = Repository.GetObjectByID(c.ID); + Assert.AreEqual("\\research\\important", c2.Folder); + } + finally + { c.DeleteInDatabase(); } + } - [Test] - public void TestGetTablesAndLookupTables() + [Test] + public void CatalogueFolder_CannotSetToNonRoot() + { + var c = new Catalogue(Repository, "bob"); + try { - //One catalogue - Catalogue cata = new Catalogue(Repository, "TestGetTablesAndLookupTables"); - - //6 virtual columns - CatalogueItem ci1 = new CatalogueItem(Repository, cata, "Col1"); - CatalogueItem ci2 = new CatalogueItem(Repository, cata, "Col2"); - CatalogueItem ci3 = new CatalogueItem(Repository, cata, "Col3"); - CatalogueItem ci4 = new CatalogueItem(Repository, cata, "Col4"); - CatalogueItem ci5 = new CatalogueItem(Repository, cata, "Description"); - CatalogueItem ci6 = new CatalogueItem(Repository, cata, "Code"); - - //2 columns come from table 1 - TableInfo t1 = new TableInfo(Repository, "Table1"); - ColumnInfo t1_c1 = new ColumnInfo(Repository, "Col1","varchar(10)",t1); - ColumnInfo t1_c2 = new ColumnInfo(Repository, "Col2", "int", t1); - - //2 columns come from table 2 - TableInfo t2 = new TableInfo(Repository, "Table2"); - ColumnInfo t2_c1 = new ColumnInfo(Repository, "Col3", "varchar(10)", t2); - ColumnInfo t2_c2 = new ColumnInfo(Repository, "Col4", "int", t2); - - //2 columns come from the lookup table - TableInfo t3 = new TableInfo(Repository, "Table3"); - ColumnInfo t3_c1 = new ColumnInfo(Repository, "Description", "varchar(10)", t3); - ColumnInfo t3_c2 = new ColumnInfo(Repository, "Code", "int", t3); - - //wire SetUp virtual columns to underlying columns - ci1.SetColumnInfo(t1_c1); - ci2.SetColumnInfo( t1_c2); - ci3.SetColumnInfo( t2_c1); - ci4.SetColumnInfo( t2_c2); - ci5.SetColumnInfo( t3_c1); - ci6.SetColumnInfo( t3_c2); - - //configure the lookup relationship - var lookup = new Lookup(Repository, t3_c1, t1_c2, t3_c2,ExtractionJoinType.Left, ""); - try - { - var allTables = cata.GetTableInfoList(true).ToArray(); - Assert.Contains(t1,allTables); - Assert.Contains(t2, allTables); - Assert.Contains(t3, allTables); - - var normalTablesOnly = cata.GetTableInfoList(false).ToArray(); - Assert.AreEqual(2,normalTablesOnly.Length); - Assert.Contains(t1,normalTablesOnly); - Assert.Contains(t2, normalTablesOnly); - - var lookupTablesOnly = cata.GetLookupTableInfoList(); - Assert.AreEqual(1,lookupTablesOnly.Length); - Assert.Contains(t3,lookupTablesOnly); - - List normalTables, lookupTables; - cata.GetTableInfos(out normalTables, out lookupTables); - Assert.AreEqual(2,normalTables.Count); - Assert.AreEqual(1, lookupTables.Count); - - Assert.Contains(t1,normalTables); - Assert.Contains(t2, normalTables); - Assert.Contains(t3,lookupTables); - } - finally - { - lookup.DeleteInDatabase(); - - t1.DeleteInDatabase(); - t2.DeleteInDatabase(); - t3.DeleteInDatabase(); - - cata.DeleteInDatabase(); - } + var ex = Assert.Throws(() => c.Folder = "fish"); + Assert.AreEqual(@"All catalogue paths must start with \. Invalid path was:fish", ex.Message); + } + finally + { + c.DeleteInDatabase(); } + } - [Test] - public void CatalogueFolder_DefaultIsRoot() + [Test] + public void CatalogueFolder_CannotSetToNull() + { + var c = new Catalogue(Repository, "bob"); + try { - var c = new Catalogue(Repository, "bob"); - try - { - Assert.AreEqual("\\",c.Folder); - } - finally - { - c.DeleteInDatabase(); - } + var ex = Assert.Throws(() => c.Folder = null); + Assert.AreEqual( + @"An attempt was made to set Catalogue Folder to null, every Catalogue must have a folder, set it to \ if you want the root", + ex.Message); } - [Test] - public void CatalogueFolder_ChangeAndSave() + finally { - var c = new Catalogue(Repository, "bob"); - try - { - c.Folder = "\\Research\\Important"; - Assert.AreEqual("\\research\\important", c.Folder); - c.SaveToDatabase(); - - var c2 = Repository.GetObjectByID(c.ID); - Assert.AreEqual("\\research\\important", c2.Folder); - } - finally - { - c.DeleteInDatabase(); - } + c.DeleteInDatabase(); } + } - - [Test] - public void CatalogueFolder_CannotSetToNonRoot() + [Test] + public void CatalogueFolder_CannotHaveDoubleSlashes() + { + var c = new Catalogue(Repository, "bob"); + try { - var c = new Catalogue(Repository, "bob"); - try - { - var ex = Assert.Throws(()=>c.Folder = "fish"); - Assert.AreEqual(@"All catalogue paths must start with \. Invalid path was:fish",ex.Message); - } - finally - { - c.DeleteInDatabase(); - } + //notice the @ symbol that makes the double slashes actual double slashes - common error we might make and what this test is designed to prevent + var ex = Assert.Throws(() => c.Folder = @"\\bob\\"); + Assert.AreEqual(@"Catalogue paths cannot contain double slashes '\\', Invalid path was:\\bob\\", + ex.Message); + } + finally + { + c.DeleteInDatabase(); } + } - [Test] - public void CatalogueFolder_CannotSetToNull() + [Test] + public void RelatedCatalogueTest_NoCatalogues() + { + var t = new TableInfo(Repository, "MyTable"); + try { - var c = new Catalogue(Repository, "bob"); - try - { - var ex = Assert.Throws(()=>c.Folder = null); - Assert.AreEqual(@"An attempt was made to set Catalogue Folder to null, every Catalogue must have a folder, set it to \ if you want the root", ex.Message); - } - finally - { - c.DeleteInDatabase(); - } + Assert.AreEqual(0, t.GetAllRelatedCatalogues().Length); } - - [Test] - public void CatalogueFolder_CannotHaveDoubleSlashes() + finally { - var c = new Catalogue(Repository, "bob"); - try - { - //notice the @ symbol that makes the double slashes actual double slashes - common error we might make and what this test is designed to prevent - var ex = Assert.Throws(()=>c.Folder = @"\\bob\\"); - Assert.AreEqual(@"Catalogue paths cannot contain double slashes '\\', Invalid path was:\\bob\\", ex.Message); - } - finally - { - c.DeleteInDatabase(); - } + t.DeleteInDatabase(); } + } - [Test] - public void RelatedCatalogueTest_NoCatalogues() + [Test] + [TestCase(true)] + [TestCase(false)] + public void RelatedCatalogueTest_OneCatalogue(bool createExtractionInformation) + { + var t = new TableInfo(Repository, "MyTable"); + var c = new ColumnInfo(Repository, "MyCol", "varchar(10)", t); + + var cata = new Catalogue(Repository, "MyCata"); + var ci = new CatalogueItem(Repository, cata, "MyCataItem"); + + try { - TableInfo t = new TableInfo(Repository,"MyTable"); - try - { - Assert.AreEqual(0,t.GetAllRelatedCatalogues().Length); - } - finally - { - t.DeleteInDatabase(); - } - + if (createExtractionInformation) + new ExtractionInformation(Repository, ci, c, "dbo.SomeFunc('Bob') as MySelectLine"); + else + ci.SetColumnInfo(c); + + var catas = t.GetAllRelatedCatalogues(); + Assert.AreEqual(1, catas.Length); + Assert.AreEqual(cata, catas[0]); } - - [Test] - [TestCase(true)] - [TestCase(false)] - public void RelatedCatalogueTest_OneCatalogue(bool createExtractionInformation) + finally { - TableInfo t = new TableInfo(Repository, "MyTable"); - ColumnInfo c = new ColumnInfo(Repository,"MyCol","varchar(10)",t); - - Catalogue cata = new Catalogue(Repository,"MyCata"); - CatalogueItem ci = new CatalogueItem(Repository,cata,"MyCataItem"); - - try - { - if (createExtractionInformation) - new ExtractionInformation(Repository, ci, c, "dbo.SomeFunc('Bob') as MySelectLine"); - else - ci.SetColumnInfo(c); - - var catas = t.GetAllRelatedCatalogues(); - Assert.AreEqual(1, catas.Length); - Assert.AreEqual(cata,catas[0]); - } - finally - { - ci.DeleteInDatabase(); - cata.DeleteInDatabase(); - t.DeleteInDatabase(); - } + ci.DeleteInDatabase(); + cata.DeleteInDatabase(); + t.DeleteInDatabase(); } + } - [Test] - [TestCase(true)] - [TestCase(false)] - public void RelatedCatalogueTest_TwoCatalogues_TwoColumnsEach(bool createExtractionInformation) + [Test] + [TestCase(true)] + [TestCase(false)] + public void RelatedCatalogueTest_TwoCatalogues_TwoColumnsEach(bool createExtractionInformation) + { + var t = new TableInfo(Repository, "MyTable"); + var c1 = new ColumnInfo(Repository, "MyCol1", "varchar(10)", t); + var c2 = new ColumnInfo(Repository, "MyCol2", "varchar(10)", t); + + var cata1 = new Catalogue(Repository, "cata1"); + var ci1_1 = new CatalogueItem(Repository, cata1, "MyCataItem1_1"); + var ci1_2 = new CatalogueItem(Repository, cata1, "MyCataItem1_2"); + + var cata2 = new Catalogue(Repository, "cata2"); + var ci2_1 = new CatalogueItem(Repository, cata2, "MyCataItem2_1"); + var ci2_2 = new CatalogueItem(Repository, cata2, "MyCataItem2_2"); + try { - TableInfo t = new TableInfo(Repository, "MyTable"); - ColumnInfo c1 = new ColumnInfo(Repository, "MyCol1", "varchar(10)", t); - ColumnInfo c2 = new ColumnInfo(Repository, "MyCol2", "varchar(10)", t); - - Catalogue cata1 = new Catalogue(Repository, "cata1"); - CatalogueItem ci1_1 = new CatalogueItem(Repository, cata1, "MyCataItem1_1"); - CatalogueItem ci1_2 = new CatalogueItem(Repository, cata1, "MyCataItem1_2"); - - Catalogue cata2 = new Catalogue(Repository, "cata2"); - CatalogueItem ci2_1 = new CatalogueItem(Repository, cata2, "MyCataItem2_1"); - CatalogueItem ci2_2 = new CatalogueItem(Repository, cata2, "MyCataItem2_2"); - try + if (createExtractionInformation) { - if (createExtractionInformation) - { - new ExtractionInformation(Repository, ci1_1, c1, "dbo.SomeFunc('Bob') as MySelectLine"); - new ExtractionInformation(Repository, ci1_2, c2, "dbo.SomeFunc('Bob') as MySelectLine"); - new ExtractionInformation(Repository, ci2_1, c2, "dbo.SomeFunc('Bob') as MySelectLine"); - new ExtractionInformation(Repository, ci2_2, c1, "dbo.SomeFunc('Bob') as MySelectLine"); - } - else - { - ci1_1.SetColumnInfo(c1); - ci1_2.SetColumnInfo(c2); - ci2_1.SetColumnInfo(c2); - ci2_2.SetColumnInfo(c1); - - } - - - - var catas = t.GetAllRelatedCatalogues(); - Assert.AreEqual(2, catas.Length); - Assert.IsTrue(catas.Contains(cata1)); - Assert.IsTrue(catas.Contains(cata2)); + new ExtractionInformation(Repository, ci1_1, c1, "dbo.SomeFunc('Bob') as MySelectLine"); + new ExtractionInformation(Repository, ci1_2, c2, "dbo.SomeFunc('Bob') as MySelectLine"); + new ExtractionInformation(Repository, ci2_1, c2, "dbo.SomeFunc('Bob') as MySelectLine"); + new ExtractionInformation(Repository, ci2_2, c1, "dbo.SomeFunc('Bob') as MySelectLine"); } - finally + else { - cata1.DeleteInDatabase(); - cata2.DeleteInDatabase(); - t.DeleteInDatabase(); + ci1_1.SetColumnInfo(c1); + ci1_2.SetColumnInfo(c2); + ci2_1.SetColumnInfo(c2); + ci2_2.SetColumnInfo(c1); } + + var catas = t.GetAllRelatedCatalogues(); + Assert.AreEqual(2, catas.Length); + Assert.IsTrue(catas.Contains(cata1)); + Assert.IsTrue(catas.Contains(cata2)); } + finally + { + cata1.DeleteInDatabase(); + cata2.DeleteInDatabase(); + t.DeleteInDatabase(); + } + } - [TestCase("\\","\\")] - [TestCase("\\fish", "fish")] - [TestCase("\\fish\\dog\\cat", "cat")] - public void TestTreeNode_FullName_CleanPaths(string fullName,string expectedName) - { - var r1 = WhenIHaveA(); - r1.Folder = fullName; + [TestCase("\\", "\\")] + [TestCase("\\fish", "fish")] + [TestCase("\\fish\\dog\\cat", "cat")] + public void TestTreeNode_FullName_CleanPaths(string fullName, string expectedName) + { + var r1 = WhenIHaveA(); + r1.Folder = fullName; - var tree = FolderHelper.BuildFolderTree(new[] { r1 }); + var tree = FolderHelper.BuildFolderTree(new[] { r1 }); - var bottomFolder = tree; - - while(bottomFolder.ChildFolders.Any()) - { - bottomFolder = bottomFolder.ChildFolders.Single(); - } + var bottomFolder = tree; - Assert.AreEqual(expectedName, bottomFolder.Name); - Assert.AreEqual(fullName, bottomFolder.FullName); - } + while (bottomFolder.ChildFolders.Any()) bottomFolder = bottomFolder.ChildFolders.Single(); - [TestCase("\\admissions\\", "\\admissions")] - [TestCase("\\ADMissions\\", "\\admissions")] - public void TestFolderHelperAdjust(string input, string expectedOutput) - { - Assert.AreEqual(expectedOutput, FolderHelper.Adjust(input)); - } + Assert.AreEqual(expectedName, bottomFolder.Name); + Assert.AreEqual(fullName, bottomFolder.FullName); + } - [Test] - public void TestBuildFolderTree() - { - var r1 = WhenIHaveA(); - r1.Folder = "\\"; + [TestCase("\\admissions\\", "\\admissions")] + [TestCase("\\ADMissions\\", "\\admissions")] + public void TestFolderHelperAdjust(string input, string expectedOutput) + { + Assert.AreEqual(expectedOutput, FolderHelper.Adjust(input)); + } + + [Test] + public void TestBuildFolderTree() + { + var r1 = WhenIHaveA(); + r1.Folder = "\\"; - var r2 = WhenIHaveA(); - r2.Folder = "\\"; + var r2 = WhenIHaveA(); + r2.Folder = "\\"; - var cat = WhenIHaveA(); - cat.Folder = "\\dog\\fish\\cat"; + var cat = WhenIHaveA(); + cat.Folder = "\\dog\\fish\\cat"; - // give it some malformed ones too - var fun = WhenIHaveA(); - fun.Folder = "\\fun"; + // give it some malformed ones too + var fun = WhenIHaveA(); + fun.Folder = "\\fun"; - var morefun = WhenIHaveA(); - morefun.Folder = "\\fun"; + var morefun = WhenIHaveA(); + morefun.Folder = "\\fun"; - var objects = new IHasFolder[] - { - r1,r2,cat,fun,morefun - }; + var objects = new IHasFolder[] + { + r1, r2, cat, fun, morefun + }; - var tree = FolderHelper.BuildFolderTree(objects); - Assert.Contains(r1, tree.ChildObjects); - Assert.Contains(r2, tree.ChildObjects); + var tree = FolderHelper.BuildFolderTree(objects); + Assert.Contains(r1, tree.ChildObjects); + Assert.Contains(r2, tree.ChildObjects); - Assert.Contains(cat, tree["dog"]["fish"]["cat"].ChildObjects); + Assert.Contains(cat, tree["dog"]["fish"]["cat"].ChildObjects); - Assert.Contains(fun, tree["fun"].ChildObjects); - Assert.Contains(morefun, tree["fun"].ChildObjects); - } + Assert.Contains(fun, tree["fun"].ChildObjects); + Assert.Contains(morefun, tree["fun"].ChildObjects); + } - /// - /// Tests when you have - /// \ - /// \ somefolder - /// +cata1 - /// \ somesub - /// +cata2 - /// - [Test] - public void TestBuildFolderTree_MiddleBranches() - { - var cata1 = WhenIHaveA(); - cata1.Folder = "\\somefolder"; + /// + /// Tests when you have + /// \ + /// \ somefolder + /// +cata1 + /// \ somesub + /// +cata2 + /// + [Test] + public void TestBuildFolderTree_MiddleBranches() + { + var cata1 = WhenIHaveA(); + cata1.Folder = "\\somefolder"; - var cata2 = WhenIHaveA(); - cata2.Folder = "\\somefolder\\somesub"; + var cata2 = WhenIHaveA(); + cata2.Folder = "\\somefolder\\somesub"; - var objects = new IHasFolder[] - { - cata1,cata2 - }; + var objects = new IHasFolder[] + { + cata1, cata2 + }; - var tree = FolderHelper.BuildFolderTree(objects); - Assert.IsEmpty(tree.ChildObjects, "Should be no Catalogues on the root"); + var tree = FolderHelper.BuildFolderTree(objects); + Assert.IsEmpty(tree.ChildObjects, "Should be no Catalogues on the root"); - Assert.AreEqual(1, tree.ChildFolders.Count()); - Assert.AreEqual(1, tree["somefolder"].ChildFolders.Count()); - Assert.IsEmpty(tree["somefolder"]["somesub"].ChildFolders); + Assert.AreEqual(1, tree.ChildFolders.Count); + Assert.AreEqual(1, tree["somefolder"].ChildFolders.Count); + Assert.IsEmpty(tree["somefolder"]["somesub"].ChildFolders); - Assert.Contains(cata1, tree["somefolder"].ChildObjects); - Assert.Contains(cata2, tree["somefolder"]["somesub"].ChildObjects); - } + Assert.Contains(cata1, tree["somefolder"].ChildObjects); + Assert.Contains(cata2, tree["somefolder"]["somesub"].ChildObjects); } -} - \ No newline at end of file +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ColumnInfoTests.cs b/Rdmp.Core.Tests/Curation/Integration/ColumnInfoTests.cs index f31c156f0f..bf34a0f17f 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ColumnInfoTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ColumnInfoTests.cs @@ -11,136 +11,130 @@ using Rdmp.Core.Curation.Data.DataLoad; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +internal class ColumnInfoTests : DatabaseTests { - class ColumnInfoTests : DatabaseTests + [Test] + public void CreateNewColumnInfoInDatabase_NewColumns_NewColumnsAreEqualAfterSave() { + TableInfo parent = null; + ColumnInfo child = null; - - - [Test] - public void CreateNewColumnInfoInDatabase_NewColumns_NewColumnsAreEqualAfterSave() + try { - TableInfo parent = null; - ColumnInfo child=null; - - try + parent = new TableInfo(CatalogueRepository, "CHI"); + child = new ColumnInfo(CatalogueRepository, "chi", "varchar(10)", parent) { - parent = new TableInfo(CatalogueRepository, "CHI"); - child = new ColumnInfo(CatalogueRepository, "chi", "varchar(10)", parent) - { - Description = "The community health index, 10 digits of which the first 6 are date of birth", - Status = ColumnInfo.ColumnStatus.Active, - RegexPattern = "\\d*", - ValidationRules = "Last digit must be odd for gents and even for ladies" - }; - - child.SaveToDatabase(); - - ColumnInfo childAfter = CatalogueRepository.GetObjectByID(child.ID); + Description = "The community health index, 10 digits of which the first 6 are date of birth", + Status = ColumnInfo.ColumnStatus.Active, + RegexPattern = "\\d*", + ValidationRules = "Last digit must be odd for gents and even for ladies" + }; - Assert.AreEqual(child.Name, childAfter.Name); - Assert.AreEqual(child.Description, childAfter.Description); - Assert.AreEqual(child.Status, childAfter.Status); - Assert.AreEqual(child.RegexPattern, childAfter.RegexPattern); - Assert.AreEqual(child.ValidationRules, childAfter.ValidationRules); + child.SaveToDatabase(); - } - finally - { - child.DeleteInDatabase(); - parent.DeleteInDatabase(); - } - + var childAfter = CatalogueRepository.GetObjectByID(child.ID); + Assert.AreEqual(child.Name, childAfter.Name); + Assert.AreEqual(child.Description, childAfter.Description); + Assert.AreEqual(child.Status, childAfter.Status); + Assert.AreEqual(child.RegexPattern, childAfter.RegexPattern); + Assert.AreEqual(child.ValidationRules, childAfter.ValidationRules); } - - [Test] - public void GetAllColumnInfos_moreThan1_pass() + finally { + child.DeleteInDatabase(); + parent.DeleteInDatabase(); + } + } - TableInfo parent = new TableInfo(CatalogueRepository, "Slalom"); + [Test] + public void GetAllColumnInfos_moreThan1_pass() + { + var parent = new TableInfo(CatalogueRepository, "Slalom"); + + try + { + var ci = new ColumnInfo(CatalogueRepository, "MyAwesomeColumn", "varchar(1000)", parent); try { - var ci = new ColumnInfo(CatalogueRepository, "MyAwesomeColumn","varchar(1000)", parent); - - try - { - Assert.IsTrue(CatalogueRepository.GetAllObjectsWithParent(parent).Count() ==1); - } - finally - { - ci.DeleteInDatabase(); - } + Assert.IsTrue(CatalogueRepository.GetAllObjectsWithParent(parent).Length == 1); } finally { - parent.DeleteInDatabase(); + ci.DeleteInDatabase(); } } - - [Test] - public void CreateNewColumnInfoInDatabase_valid_pass() + finally { - TableInfo parent = new TableInfo(CatalogueRepository, "Lazors"); - ColumnInfo columnInfo = new ColumnInfo(CatalogueRepository, "Lazor Reflection Vol","varchar(1000)",parent); + parent.DeleteInDatabase(); + } + } - Assert.NotNull(columnInfo); + [Test] + public void CreateNewColumnInfoInDatabase_valid_pass() + { + var parent = new TableInfo(CatalogueRepository, "Lazors"); + var columnInfo = new ColumnInfo(CatalogueRepository, "Lazor Reflection Vol", "varchar(1000)", parent); - columnInfo.DeleteInDatabase(); + Assert.NotNull(columnInfo); - var ex = Assert.Throws(() => CatalogueRepository.GetObjectByID(columnInfo.ID)); - Assert.IsTrue(ex.Message.StartsWith("Could not find ColumnInfo with ID " + columnInfo.ID), ex.Message); + columnInfo.DeleteInDatabase(); - parent.DeleteInDatabase(); - } + var ex = Assert.Throws(() => + CatalogueRepository.GetObjectByID(columnInfo.ID)); + Assert.IsTrue(ex.Message.StartsWith($"Could not find ColumnInfo with ID {columnInfo.ID}"), ex.Message); - [Test] - public void update_changeAllProperties_pass() + parent.DeleteInDatabase(); + } + + [Test] + public void update_changeAllProperties_pass() + { + var parent = new TableInfo(CatalogueRepository, "Rokkits"); + var column = new ColumnInfo(CatalogueRepository, "ExplosiveVol", "varchar(1000)", parent) { - TableInfo parent = new TableInfo(CatalogueRepository, "Rokkits"); - ColumnInfo column = new ColumnInfo(CatalogueRepository, "ExplosiveVol","varchar(1000)", parent) - { - Digitisation_specs = "Highly digitizable", - Format = "Jpeg", - Name = "mycol", - Source = "Bazooka", - Data_type = "Whatever" - }; + Digitisation_specs = "Highly digitizable", + Format = "Jpeg", + Name = "mycol", + Source = "Bazooka", + Data_type = "Whatever" + }; - column.SaveToDatabase(); + column.SaveToDatabase(); - ColumnInfo columnAfter = CatalogueRepository.GetObjectByID(column.ID); + var columnAfter = CatalogueRepository.GetObjectByID(column.ID); - Assert.IsTrue(columnAfter.Digitisation_specs == "Highly digitizable"); - Assert.IsTrue(columnAfter.Format == "Jpeg"); - Assert.IsTrue(columnAfter.Name == "mycol"); - Assert.IsTrue(columnAfter.Source == "Bazooka"); - Assert.IsTrue(columnAfter.Data_type == "Whatever"); + Assert.IsTrue(columnAfter.Digitisation_specs == "Highly digitizable"); + Assert.IsTrue(columnAfter.Format == "Jpeg"); + Assert.IsTrue(columnAfter.Name == "mycol"); + Assert.IsTrue(columnAfter.Source == "Bazooka"); + Assert.IsTrue(columnAfter.Data_type == "Whatever"); - columnAfter.DeleteInDatabase(); - parent.DeleteInDatabase(); - } + columnAfter.DeleteInDatabase(); + parent.DeleteInDatabase(); + } - [Test] - public void Test_GetRAWStageTypeWhenPreLoadDiscardedDilution() - { - TableInfo parent = new TableInfo(CatalogueRepository, "Rokkits"); - ColumnInfo column = new ColumnInfo(CatalogueRepository, "MyCol", "varchar(4)", parent); + [Test] + public void Test_GetRAWStageTypeWhenPreLoadDiscardedDilution() + { + var parent = new TableInfo(CatalogueRepository, "Rokkits"); + var column = new ColumnInfo(CatalogueRepository, "MyCol", "varchar(4)", parent); - var discard = new PreLoadDiscardedColumn(CatalogueRepository, parent, "MyCol"); - discard.SqlDataType = "varchar(10)"; - discard.Destination = DiscardedColumnDestination.Dilute; - discard.SaveToDatabase(); + var discard = new PreLoadDiscardedColumn(CatalogueRepository, parent, "MyCol") + { + SqlDataType = "varchar(10)", + Destination = DiscardedColumnDestination.Dilute + }; + discard.SaveToDatabase(); - Assert.AreEqual("varchar(4)", column.GetRuntimeDataType(LoadStage.PostLoad)); - Assert.AreEqual("varchar(4)", column.GetRuntimeDataType(LoadStage.AdjustStaging)); - Assert.AreEqual("varchar(10)", column.GetRuntimeDataType(LoadStage.AdjustRaw)); + Assert.AreEqual("varchar(4)", column.GetRuntimeDataType(LoadStage.PostLoad)); + Assert.AreEqual("varchar(4)", column.GetRuntimeDataType(LoadStage.AdjustStaging)); + Assert.AreEqual("varchar(10)", column.GetRuntimeDataType(LoadStage.AdjustRaw)); - discard.DeleteInDatabase(); - parent.DeleteInDatabase(); - - } + discard.DeleteInDatabase(); + parent.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/CommitInProgressTests.cs b/Rdmp.Core.Tests/Curation/Integration/CommitInProgressTests.cs index b37cedb707..548c0ce1ce 100644 --- a/Rdmp.Core.Tests/Curation/Integration/CommitInProgressTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/CommitInProgressTests.cs @@ -4,84 +4,81 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using MapsDirectlyToDatabaseTable; -using MapsDirectlyToDatabaseTable.Revertable; using NUnit.Framework; using Rdmp.Core.CommandExecution; using Rdmp.Core.Curation.Data; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Revertable; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class CommitInProgressTests : DatabaseTests { - public class CommitInProgressTests : DatabaseTests + [Test] + public void CommitInProgress_CatalogueModify() { - [Test] - public void CommitInProgress_CatalogueModify() - { - var c = new Catalogue(CatalogueRepository, "Hey"); - - using var start = new CommitInProgress(RepositoryLocator, new CommitInProgressSettings(c)); - - // no changes but let's spam this for added complexity - c.SaveToDatabase(); - c.SaveToDatabase(); - c.SaveToDatabase(); - - var activator = new ThrowImmediatelyActivator(RepositoryLocator); - - Assert.IsNull(start.TryFinish(activator),"No changes made to Catalogue so expected no commit"); - - c.Name = "abadaba"; - c.IsDeprecated = true; - c.SaveToDatabase(); - - var commit = start.TryFinish(activator); - Assert.IsNotNull(commit); - - Assert.AreEqual(1, commit.Mementos.Length); - Assert.AreNotEqual(commit.Mementos[0].BeforeYaml, commit.Mementos[0].AfterYaml); - } - - /// - /// Tests that when there is a on object(s) e.g. - /// that uses transactions. Cancelling the will leave everything back - /// how it was - /// - [Test] - public void CommitInProgress_TestCancellation() - { - var c = new Catalogue(CatalogueRepository, "Hey"); + var c = new Catalogue(CatalogueRepository, "Hey"); + + using var start = new CommitInProgress(RepositoryLocator, new CommitInProgressSettings(c)); + + // no changes but let's spam this for added complexity + c.SaveToDatabase(); + c.SaveToDatabase(); + c.SaveToDatabase(); + + var activator = new ThrowImmediatelyActivator(RepositoryLocator); + + Assert.IsNull(start.TryFinish(activator), "No changes made to Catalogue so expected no commit"); - Assert.AreEqual(ChangeDescription.NoChanges,c.HasLocalChanges().Evaluation, - "We just created this Catalogue, how can db copy be different?!"); + c.Name = "abadaba"; + c.IsDeprecated = true; + c.SaveToDatabase(); - var start = new CommitInProgress(RepositoryLocator, new CommitInProgressSettings(c) - { - UseTransactions = true - }); + var commit = start.TryFinish(activator); + Assert.IsNotNull(commit); + + Assert.AreEqual(1, commit.Mementos.Length); + Assert.AreNotEqual(commit.Mementos[0].BeforeYaml, commit.Mementos[0].AfterYaml); + } + + /// + /// Tests that when there is a on object(s) e.g. + /// that uses transactions. Cancelling the will leave everything back + /// how it was + /// + [Test] + public void CommitInProgress_TestCancellation() + { + var c = new Catalogue(CatalogueRepository, "Hey"); + + Assert.AreEqual(ChangeDescription.NoChanges, c.HasLocalChanges().Evaluation, + "We just created this Catalogue, how can db copy be different?!"); + + var start = new CommitInProgress(RepositoryLocator, new CommitInProgressSettings(c) + { + UseTransactions = true + }); - // there is a CommitInProgress on c so db should not have - c.Name = "abadaba"; - c.IsDeprecated = true; + // there is a CommitInProgress on c so db should not have + c.Name = "abadaba"; + c.IsDeprecated = true; - Assert.AreEqual(ChangeDescription.DatabaseCopyDifferent, c.HasLocalChanges().Evaluation, - "We have local changes"); + Assert.AreEqual(ChangeDescription.DatabaseCopyDifferent, c.HasLocalChanges().Evaluation, + "We have local changes"); - c.SaveToDatabase(); + c.SaveToDatabase(); - Assert.AreEqual(ChangeDescription.NoChanges, c.HasLocalChanges().Evaluation, - "Should be saved inside the transaction"); + Assert.AreEqual(ChangeDescription.NoChanges, c.HasLocalChanges().Evaluation, + "Should be saved inside the transaction"); - // abandon the commit - start.Dispose(); + // abandon the commit + start.Dispose(); - Assert.AreEqual(ChangeDescription.DatabaseCopyDifferent, c.HasLocalChanges().Evaluation, - "With transaction rolled back the Catalogue should now no longer match db state - i.e. be unsaved"); + Assert.AreEqual(ChangeDescription.DatabaseCopyDifferent, c.HasLocalChanges().Evaluation, + "With transaction rolled back the Catalogue should now no longer match db state - i.e. be unsaved"); - c.RevertToDatabaseState(); + c.RevertToDatabaseState(); - Assert.AreEqual("Hey", c.Name); - } + Assert.AreEqual("Hey", c.Name); } -} - \ No newline at end of file +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ComprehensiveQueryPerformanceCounterTests.cs b/Rdmp.Core.Tests/Curation/Integration/ComprehensiveQueryPerformanceCounterTests.cs index 1f81db1202..ef637d7f68 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ComprehensiveQueryPerformanceCounterTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ComprehensiveQueryPerformanceCounterTests.cs @@ -7,40 +7,38 @@ using System.Linq; using NUnit.Framework; using Rdmp.Core.Curation.Data; -using ReusableLibraryCode; -using ReusableLibraryCode.Performance; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Performance; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class ComprehensiveQueryPerformanceCounterTests : DatabaseTests { - public class ComprehensiveQueryPerformanceCounterTests : DatabaseTests + [Test] + public void TestPerformance() { - [Test] - public void TestPerformance() - { - if (TestDatabaseSettings.UseFileSystemRepo) - Assert.Inconclusive("No queries are run when using file back repository"); + if (TestDatabaseSettings.UseFileSystemRepo) + Assert.Inconclusive("No queries are run when using file back repository"); - var pCounter = new ComprehensiveQueryPerformanceCounter(); - //enable performance counting - DatabaseCommandHelper.PerformanceCounter = pCounter; - try - { + var pCounter = new ComprehensiveQueryPerformanceCounter(); + //enable performance counting + DatabaseCommandHelper.PerformanceCounter = pCounter; + try + { + //send some queries + var cata = new Catalogue(CatalogueRepository, "Fish"); - //send some queries - var cata = new Catalogue(CatalogueRepository, "Fish"); + Assert.IsTrue(cata.Name.Equals("Fish")); - Assert.IsTrue(cata.Name.Equals("Fish")); + var commands = pCounter.DictionaryOfQueries.Values.ToArray(); + Assert.IsTrue(commands.Any(c => c.QueryText.Contains("SELECT * FROM [Catalogue] WHERE ID="))); - var commands = pCounter.DictionaryOfQueries.Values.ToArray(); - Assert.IsTrue(commands.Any(c => c.QueryText.Contains("SELECT * FROM [Catalogue] WHERE ID="))); - - cata.DeleteInDatabase(); - } - finally - { - DatabaseCommandHelper.PerformanceCounter = null; - } + cata.DeleteInDatabase(); + } + finally + { + DatabaseCommandHelper.PerformanceCounter = null; } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/CredentialsTests.cs b/Rdmp.Core.Tests/Curation/Integration/CredentialsTests.cs index 707ba6566f..ed1d188bab 100644 --- a/Rdmp.Core.Tests/Curation/Integration/CredentialsTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/CredentialsTests.cs @@ -10,22 +10,20 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Repositories.Managers; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common; -using MapsDirectlyToDatabaseTable; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class CredentialsTests : DatabaseTests { - public class CredentialsTests : DatabaseTests + [OneTimeSetUp] + protected override void OneTimeSetUp() { - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); + base.OneTimeSetUp(); - foreach (TableInfo table in CatalogueRepository.GetAllObjects()) - { - if (table.Name.Equals("GetCredentialsFromATableInfo") + foreach (var table in CatalogueRepository.GetAllObjects()) + if (table.Name.Equals("GetCredentialsFromATableInfo") || table.Name.Equals("Create2TableInfosThatShareTheSameCredentialAndTestDeletingIt1") || @@ -40,346 +38,350 @@ protected override void OneTimeSetUp() table.Name.Equals("Test") || table.Name.Equals("Tableinfo1") - ) - table.DeleteInDatabase(); - } + ) + table.DeleteInDatabase(); - foreach (DataAccessCredentials cred in CatalogueRepository.GetAllObjects()) - { - if(cred.Name.Equals("bob") - || - cred.Name.Equals("Test") - ) - cred.DeleteInDatabase(); - } + foreach (var cred in CatalogueRepository.GetAllObjects()) + if (cred.Name.Equals("bob") + || + cred.Name.Equals("Test") + ) + cred.DeleteInDatabase(); + } + [Test] + public void CreateNewCredentials() + { + var newCredentials = new DataAccessCredentials(CatalogueRepository, "bob"); - + try + { + Assert.AreEqual("bob", newCredentials.Name); + Assert.AreNotEqual(0, newCredentials.ID); } - - [Test] - public void CreateNewCredentials() + finally { - var newCredentials = new DataAccessCredentials(CatalogueRepository, "bob"); - - try - { - Assert.AreEqual("bob", newCredentials.Name); - Assert.AreNotEqual(0, newCredentials.ID); - } - finally - { - newCredentials.DeleteInDatabase(); - } + newCredentials.DeleteInDatabase(); } + } - [Test] - public void CreateNewCredentialsThenGetByUsernamePasswordCombo() + [Test] + public void CreateNewCredentialsThenGetByUsernamePasswordCombo() + { + var newCredentials = new DataAccessCredentials(CatalogueRepository, "bob") { - var newCredentials = new DataAccessCredentials(CatalogueRepository, "bob"); + Username = "myusername", + Password = "mypassword" + }; - newCredentials.Username = "myusername"; - newCredentials.Password = "mypassword"; - newCredentials.SaveToDatabase(); + newCredentials.SaveToDatabase(); - var newCopy = CatalogueRepository.GetAllObjects().SingleOrDefault(c=>c.Username == "myusername"); - Assert.IsNotNull(newCopy); - - try - { - Assert.NotNull(newCopy); - Assert.AreEqual(newCredentials.ID, newCopy.ID); - Assert.AreEqual(newCredentials.Username, newCopy.Username); - Assert.AreEqual(newCredentials.GetDecryptedPassword(), newCopy.GetDecryptedPassword()); - Assert.AreEqual(newCredentials.Password, newCopy.Password); - } - finally - { - newCredentials.DeleteInDatabase(); - - } - } + var newCopy = CatalogueRepository.GetAllObjects() + .SingleOrDefault(c => c.Username == "myusername"); + Assert.IsNotNull(newCopy); - [Test] - public void TestThe_Any_EnumValue_CannotRequestAnyCredentials() + try { - TableInfo tableInfo = new TableInfo(CatalogueRepository, "GetCredentialsFromATableInfo"); - tableInfo.Name = "My Exciting Table"; - - var creds = new DataAccessCredentials(CatalogueRepository); - try - { - creds.Name = "Test"; - creds.SaveToDatabase(); + Assert.NotNull(newCopy); + Assert.AreEqual(newCredentials.ID, newCopy.ID); + Assert.AreEqual(newCredentials.Username, newCopy.Username); + Assert.AreEqual(newCredentials.GetDecryptedPassword(), newCopy.GetDecryptedPassword()); + Assert.AreEqual(newCredentials.Password, newCopy.Password); + } + finally + { + newCredentials.DeleteInDatabase(); + } + } - tableInfo.SetCredentials(creds, DataAccessContext.InternalDataProcessing); - tableInfo.SaveToDatabase(); + [Test] + public void TestThe_Any_EnumValue_CannotRequestAnyCredentials() + { + var tableInfo = new TableInfo(CatalogueRepository, "GetCredentialsFromATableInfo") + { + Name = "My Exciting Table" + }; - //attempt to request ANY credentials - var ex = Assert.Throws(()=> tableInfo.GetCredentialsIfExists(DataAccessContext.Any)); - Assert.AreEqual("You cannot ask for any credentials, you must supply a usage context.",ex.Message); + var creds = new DataAccessCredentials(CatalogueRepository); + try + { + creds.Name = "Test"; + creds.SaveToDatabase(); + tableInfo.SetCredentials(creds, DataAccessContext.InternalDataProcessing); + tableInfo.SaveToDatabase(); - } - finally - { - tableInfo.DeleteInDatabase(); - creds.DeleteInDatabase(); - } + //attempt to request ANY credentials + var ex = Assert.Throws(() => tableInfo.GetCredentialsIfExists(DataAccessContext.Any)); + Assert.AreEqual("You cannot ask for any credentials, you must supply a usage context.", ex.Message); } - [Test] - public void TestThe_Any_EnumValue() + finally { - TableInfo tableInfo = new TableInfo(CatalogueRepository, "GetCredentialsFromATableInfo"); - tableInfo.Name = "My Exciting Table"; - tableInfo.SaveToDatabase(); - - var creds = new DataAccessCredentials(CatalogueRepository); - try - { - creds.Name = "Test"; - creds.SaveToDatabase(); - - //now create the association as Any - tableInfo.SetCredentials(creds, DataAccessContext.Any); - - //because the credential is liscenced to be used under ANY context, you can make requests under any of the specific contexts and be served the Any result - var creds2 = tableInfo.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); - Assert.NotNull(creds2); - creds2 = tableInfo.GetCredentialsIfExists(DataAccessContext.DataExport); - Assert.NotNull(creds2); - creds2 = tableInfo.GetCredentialsIfExists(DataAccessContext.DataLoad); - Assert.NotNull(creds2); - - } - finally - { - tableInfo.DeleteInDatabase(); - creds.DeleteInDatabase(); - } + tableInfo.DeleteInDatabase(); + creds.DeleteInDatabase(); } + } - [Test] - public void Test_Any_PrioritisingTheMoreAppropriateCredential() + [Test] + public void TestThe_Any_EnumValue() + { + var tableInfo = new TableInfo(CatalogueRepository, "GetCredentialsFromATableInfo") { - TableInfo tableInfo = new TableInfo(CatalogueRepository, "GetCredentialsFromATableInfo"); - tableInfo.Name = "Tableinfo1"; - tableInfo.SaveToDatabase(); + Name = "My Exciting Table" + }; + tableInfo.SaveToDatabase(); - var creds = new DataAccessCredentials(CatalogueRepository); - var creds2 = new DataAccessCredentials(CatalogueRepository); - - try - { - creds.Name = "Test"; - creds.SaveToDatabase(); - - //now create the association as Any - tableInfo.SetCredentials(creds, DataAccessContext.DataLoad); - tableInfo.SetCredentials(creds2, DataAccessContext.Any); - - - Assert.AreEqual(creds, tableInfo.GetCredentialsIfExists(DataAccessContext.DataLoad)); - - } - finally - { - tableInfo.DeleteInDatabase(); - creds.DeleteInDatabase(); - creds2.DeleteInDatabase(); - } + var creds = new DataAccessCredentials(CatalogueRepository); + try + { + creds.Name = "Test"; + creds.SaveToDatabase(); + + //now create the association as Any + tableInfo.SetCredentials(creds, DataAccessContext.Any); + + //because the credential is liscenced to be used under ANY context, you can make requests under any of the specific contexts and be served the Any result + var creds2 = tableInfo.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); + Assert.NotNull(creds2); + creds2 = tableInfo.GetCredentialsIfExists(DataAccessContext.DataExport); + Assert.NotNull(creds2); + creds2 = tableInfo.GetCredentialsIfExists(DataAccessContext.DataLoad); + Assert.NotNull(creds2); } + finally + { + tableInfo.DeleteInDatabase(); + creds.DeleteInDatabase(); + } + } - [Test] - public void SaveAndReloadCredentials() + [Test] + public void Test_Any_PrioritisingTheMoreAppropriateCredential() + { + var tableInfo = new TableInfo(CatalogueRepository, "GetCredentialsFromATableInfo") { - var originalCredentials = new DataAccessCredentials(CatalogueRepository, "bob"); + Name = "Tableinfo1" + }; + tableInfo.SaveToDatabase(); - try - { - originalCredentials.Name = "bob1"; - originalCredentials.Username = "user"; - originalCredentials.Password = "pass"; - originalCredentials.SaveToDatabase(); - - var newCopy = CatalogueRepository.GetObjectByID(originalCredentials.ID); - Assert.AreEqual(originalCredentials.Name, newCopy.Name); - Assert.AreEqual(originalCredentials.Username, newCopy.Username); - Assert.AreEqual(originalCredentials.Password, newCopy.Password); - - //test overridden Equals - Assert.AreEqual(originalCredentials,newCopy); - originalCredentials.Password = "fish"; - Assert.AreEqual(originalCredentials, newCopy);//they are still equal because IDs are the same - - } - finally - { - originalCredentials.DeleteInDatabase(); - } - } + var creds = new DataAccessCredentials(CatalogueRepository); + var creds2 = new DataAccessCredentials(CatalogueRepository); - [Test] - public void GetCredentialsFromATableInfo() + try { + creds.Name = "Test"; + creds.SaveToDatabase(); - TableInfo tableInfo = new TableInfo(CatalogueRepository, "GetCredentialsFromATableInfo"); - tableInfo.Name = "My Exciting Table"; + //now create the association as Any + tableInfo.SetCredentials(creds, DataAccessContext.DataLoad); + tableInfo.SetCredentials(creds2, DataAccessContext.Any); - var creds = new DataAccessCredentials(CatalogueRepository); - try - { - creds.Name = "Test"; - creds.SaveToDatabase(); - tableInfo.SetCredentials(creds, DataAccessContext.InternalDataProcessing); - tableInfo.SaveToDatabase(); + Assert.AreEqual(creds, tableInfo.GetCredentialsIfExists(DataAccessContext.DataLoad)); + } + finally + { + tableInfo.DeleteInDatabase(); + creds.DeleteInDatabase(); + creds2.DeleteInDatabase(); + } + } - //Go via TableInfo and get credentials - DataAccessCredentials creds2 = (DataAccessCredentials)tableInfo.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); - Assert.AreEqual(creds2.Name, creds.Name); - } - finally - { - tableInfo.DeleteInDatabase(); - creds.DeleteInDatabase(); - } + [Test] + public void SaveAndReloadCredentials() + { + var originalCredentials = new DataAccessCredentials(CatalogueRepository, "bob"); + + try + { + originalCredentials.Name = "bob1"; + originalCredentials.Username = "user"; + originalCredentials.Password = "pass"; + originalCredentials.SaveToDatabase(); + + var newCopy = CatalogueRepository.GetObjectByID(originalCredentials.ID); + Assert.AreEqual(originalCredentials.Name, newCopy.Name); + Assert.AreEqual(originalCredentials.Username, newCopy.Username); + Assert.AreEqual(originalCredentials.Password, newCopy.Password); + + //test overridden Equals + Assert.AreEqual(originalCredentials, newCopy); + originalCredentials.Password = "fish"; + Assert.AreEqual(originalCredentials, newCopy); //they are still equal because IDs are the same + } + finally + { + originalCredentials.DeleteInDatabase(); } + } - [Test] - public void Create2TableInfosThatShareTheSameCredentialAndTestDeletingIt_ThrowsThatCredentialsHasDependencies() + [Test] + public void GetCredentialsFromATableInfo() + { + var tableInfo = new TableInfo(CatalogueRepository, "GetCredentialsFromATableInfo") { - //Get all TableInfos that share this credential - TableInfo tableInfo1 = new TableInfo(CatalogueRepository, "Dependency1"); - TableInfo tableInfo2 = new TableInfo(CatalogueRepository, "Dependency2"); - var creds = new DataAccessCredentials(CatalogueRepository, "bob"); + Name = "My Exciting Table" + }; - try - { - - tableInfo1.SetCredentials(creds,DataAccessContext.InternalDataProcessing); - tableInfo2.SetCredentials(creds, DataAccessContext.InternalDataProcessing); - tableInfo1.SaveToDatabase(); - tableInfo2.SaveToDatabase(); - - var ex = Assert.Throws(creds.DeleteInDatabase);//the bit that fails (because tables are there) - Assert.AreEqual("Cannot delete credentials bob because it is in use by one or more TableInfo objects(Dependency1,Dependency2)",ex.Message); - } - finally - { - tableInfo1.DeleteInDatabase();//will work - tableInfo2.DeleteInDatabase();//will work - creds.DeleteInDatabase();//will work - } + var creds = new DataAccessCredentials(CatalogueRepository); + try + { + creds.Name = "Test"; + creds.SaveToDatabase(); - + tableInfo.SetCredentials(creds, DataAccessContext.InternalDataProcessing); + tableInfo.SaveToDatabase(); + //Go via TableInfo and get credentials + var creds2 = + (DataAccessCredentials)tableInfo.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); + Assert.AreEqual(creds2.Name, creds.Name); } - - [Test] - public void GetAllUsersOfACredential() + finally { + tableInfo.DeleteInDatabase(); + creds.DeleteInDatabase(); + } + } - //Get all TableInfos that share this credential - TableInfo tableInfo1 = new TableInfo(CatalogueRepository, "Create2TableInfosThatShareTheSameCredentialAndTestDeletingIt1"); - TableInfo tableInfo2 = new TableInfo(CatalogueRepository, "Create2TableInfosThatShareTheSameCredentialAndTestDeletingIt2"); - var creds = new DataAccessCredentials(CatalogueRepository, "bob"); + [Test] + public void Create2TableInfosThatShareTheSameCredentialAndTestDeletingIt_ThrowsThatCredentialsHasDependencies() + { + //Get all TableInfos that share this credential + var tableInfo1 = new TableInfo(CatalogueRepository, "Dependency1"); + var tableInfo2 = new TableInfo(CatalogueRepository, "Dependency2"); + var creds = new DataAccessCredentials(CatalogueRepository, "bob"); + try + { tableInfo1.SetCredentials(creds, DataAccessContext.InternalDataProcessing); tableInfo2.SetCredentials(creds, DataAccessContext.InternalDataProcessing); tableInfo1.SaveToDatabase(); tableInfo2.SaveToDatabase(); + var ex = Assert.Throws(creds + .DeleteInDatabase); //the bit that fails (because tables are there) + Assert.AreEqual( + "Cannot delete credentials bob because it is in use by one or more TableInfo objects(Dependency1,Dependency2)", + ex.Message); + } + finally + { + tableInfo1.DeleteInDatabase(); //will work + tableInfo2.DeleteInDatabase(); //will work + creds.DeleteInDatabase(); //will work + } + } - ITableInfo[] TablesThatUseCredential = creds.GetAllTableInfosThatUseThis()[DataAccessContext.InternalDataProcessing].ToArray(); + [Test] + public void GetAllUsersOfACredential() + { + //Get all TableInfos that share this credential + var tableInfo1 = new TableInfo(CatalogueRepository, + "Create2TableInfosThatShareTheSameCredentialAndTestDeletingIt1"); + var tableInfo2 = new TableInfo(CatalogueRepository, + "Create2TableInfosThatShareTheSameCredentialAndTestDeletingIt2"); + var creds = new DataAccessCredentials(CatalogueRepository, "bob"); - Assert.Contains(tableInfo1, TablesThatUseCredential); - Assert.Contains(tableInfo2, TablesThatUseCredential); + tableInfo1.SetCredentials(creds, DataAccessContext.InternalDataProcessing); + tableInfo2.SetCredentials(creds, DataAccessContext.InternalDataProcessing); + tableInfo1.SaveToDatabase(); + tableInfo2.SaveToDatabase(); - tableInfo1.DeleteInDatabase(); - tableInfo2.DeleteInDatabase(); - creds.DeleteInDatabase(); - } - [Test] - public void GetConnectionStringFromCatalogueWhereOneTableInfoUsesACredentialsOverride() + var TablesThatUseCredential = + creds.GetAllTableInfosThatUseThis()[DataAccessContext.InternalDataProcessing].ToArray(); + + Assert.Contains(tableInfo1, TablesThatUseCredential); + Assert.Contains(tableInfo2, TablesThatUseCredential); + + tableInfo1.DeleteInDatabase(); + tableInfo2.DeleteInDatabase(); + creds.DeleteInDatabase(); + } + + [Test] + public void GetConnectionStringFromCatalogueWhereOneTableInfoUsesACredentialsOverride() + { + var c = new Catalogue(CatalogueRepository, + "GetConnectionStringFromCatalogueWhereOneTableInfoUsesACredentialsOverride"); + var ci = new CatalogueItem(CatalogueRepository, c, + "GetConnectionStringFromCatalogueWhereOneTableInfoUsesACredentialsOverride"); + var t = new TableInfo(CatalogueRepository, "Test"); + var col = new ColumnInfo(CatalogueRepository, "[mydatabase].[dbo].test.col", "varchar(10)", t); + + var extractionInformation = new ExtractionInformation(CatalogueRepository, ci, col, col.Name); + + DataAccessCredentials cred = null; + try { - Catalogue c = new Catalogue(CatalogueRepository, "GetConnectionStringFromCatalogueWhereOneTableInfoUsesACredentialsOverride"); - CatalogueItem ci = new CatalogueItem(CatalogueRepository, c,"GetConnectionStringFromCatalogueWhereOneTableInfoUsesACredentialsOverride"); - TableInfo t = new TableInfo(CatalogueRepository, "Test"); - ColumnInfo col = new ColumnInfo(CatalogueRepository, "[mydatabase].[dbo].test.col","varchar(10)", t); - - var extractionInformation = new ExtractionInformation(CatalogueRepository, ci, col, col.Name); - - DataAccessCredentials cred = null; - try - { - t.Server = "myserver"; - t.Database = "mydatabase"; - - cred = new DataAccessCredentials(CatalogueRepository, "bob"); - cred.Username = "bob"; - cred.Password = "pass"; - - Assert.AreNotEqual("pass",cred.Password); - Assert.AreEqual("pass", cred.GetDecryptedPassword()); - - - cred.SaveToDatabase(); - t.SetCredentials(cred, DataAccessContext.InternalDataProcessing); - t.SaveToDatabase(); - - var constr = (SqlConnectionStringBuilder)c.GetDistinctLiveDatabaseServer(DataAccessContext.InternalDataProcessing,false).Builder; - Assert.AreEqual("myserver",constr.DataSource); - Assert.False(constr.IntegratedSecurity); - Assert.AreEqual("bob", constr.UserID); - Assert.AreEqual("pass", constr.Password); - - - } - finally + t.Server = "myserver"; + t.Database = "mydatabase"; + + cred = new DataAccessCredentials(CatalogueRepository, "bob") { - t.DeleteInDatabase(); - if(cred != null) - cred.DeleteInDatabase(); - c.DeleteInDatabase();//no need to delete ci because of cascades - - } + Username = "bob", + Password = "pass" + }; + + Assert.AreNotEqual("pass", cred.Password); + Assert.AreEqual("pass", cred.GetDecryptedPassword()); + + cred.SaveToDatabase(); + t.SetCredentials(cred, DataAccessContext.InternalDataProcessing); + t.SaveToDatabase(); + + var constr = + (SqlConnectionStringBuilder)c + .GetDistinctLiveDatabaseServer(DataAccessContext.InternalDataProcessing, false).Builder; + Assert.AreEqual("myserver", constr.DataSource); + Assert.False(constr.IntegratedSecurity); + Assert.AreEqual("bob", constr.UserID); + Assert.AreEqual("pass", constr.Password); + } + finally + { + t.DeleteInDatabase(); + cred?.DeleteInDatabase(); + c.DeleteInDatabase(); //no need to delete ci because of cascades } + } - [Test] - public void Test_BlankPasswords() + [Test] + public void Test_BlankPasswords() + { + var creds = new DataAccessCredentials(CatalogueRepository, "blankpwdCreds") { - var creds = new DataAccessCredentials(CatalogueRepository, "blankpwdCreds"); - creds.Username = "Root"; - creds.Password = ""; + Username = "Root", + Password = "" + }; - creds.SaveToDatabase(); + creds.SaveToDatabase(); - var manager = new TableInfoCredentialsManager(CatalogueTableRepository); - Assert.AreEqual(creds,manager.GetCredentialByUsernameAndPasswordIfExists("Root",null)); - Assert.AreEqual(creds,manager.GetCredentialByUsernameAndPasswordIfExists("Root","")); - } + var manager = new TableInfoCredentialsManager(CatalogueTableRepository); + Assert.AreEqual(creds, manager.GetCredentialByUsernameAndPasswordIfExists("Root", null)); + Assert.AreEqual(creds, manager.GetCredentialByUsernameAndPasswordIfExists("Root", "")); + } - [Test] - public void Test_NoDuplicatePasswords() - { - var t1 = new TableInfo(CatalogueRepository, "tbl1"); - var t2 = new TableInfo(CatalogueRepository, "tbl2"); + [Test] + public void Test_NoDuplicatePasswords() + { + var t1 = new TableInfo(CatalogueRepository, "tbl1"); + var t2 = new TableInfo(CatalogueRepository, "tbl2"); - var credCount = CatalogueRepository.GetAllObjects().Length; + var credCount = CatalogueRepository.GetAllObjects().Length; - //if there is a username then we need to associate it with the TableInfo we just created - DataAccessCredentialsFactory credentialsFactory = new DataAccessCredentialsFactory(CatalogueRepository); - var cred = credentialsFactory.Create(t1, "blarg", "flarg",DataAccessContext.Any); - var cred2 = credentialsFactory.Create(t2, "blarg", "flarg", DataAccessContext.Any); + //if there is a username then we need to associate it with the TableInfo we just created + var credentialsFactory = new DataAccessCredentialsFactory(CatalogueRepository); + var cred = credentialsFactory.Create(t1, "blarg", "flarg", DataAccessContext.Any); + var cred2 = credentialsFactory.Create(t2, "blarg", "flarg", DataAccessContext.Any); - Assert.AreEqual(credCount + 1, CatalogueRepository.GetAllObjects().Length); - - Assert.AreEqual(cred, cred2, $"Expected {nameof(DataAccessCredentialsFactory)} to reuse existing credentials for both tables as they have the same username/password - e.g. bulk insert"); - } + Assert.AreEqual(credCount + 1, CatalogueRepository.GetAllObjects().Length); + + Assert.AreEqual(cred, cred2, + $"Expected {nameof(DataAccessCredentialsFactory)} to reuse existing credentials for both tables as they have the same username/password - e.g. bulk insert"); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/CrossDatabaseTriggerTests.cs b/Rdmp.Core.Tests/Curation/Integration/CrossDatabaseTriggerTests.cs index a13106ce10..0d0449ea3c 100644 --- a/Rdmp.Core.Tests/Curation/Integration/CrossDatabaseTriggerTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/CrossDatabaseTriggerTests.cs @@ -13,128 +13,135 @@ using Rdmp.Core.DataLoad.Triggers; using Rdmp.Core.DataLoad.Triggers.Exceptions; using Rdmp.Core.DataLoad.Triggers.Implementations; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; using TypeGuesser; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class CrossDatabaseTriggerTests : DatabaseTests { - public class CrossDatabaseTriggerTests : DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TriggerImplementationTest(DatabaseType type) { - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TriggerImplementationTest(DatabaseType type) + var db = GetCleanedServer(type); + var tbl = db.CreateTable("MyTable", new[] { - var db = GetCleanedServer(type); - var tbl = db.CreateTable("MyTable", new[] - { - new DatabaseColumnRequest("name", new DatabaseTypeRequest(typeof (string), 30),false), - new DatabaseColumnRequest("bubbles", new DatabaseTypeRequest(typeof (int))) - }); + new DatabaseColumnRequest("name", new DatabaseTypeRequest(typeof(string), 30), false), + new DatabaseColumnRequest("bubbles", new DatabaseTypeRequest(typeof(int))) + }); + + var factory = new TriggerImplementerFactory(type); + var implementer = factory.Create(tbl); + + Assert.AreEqual(TriggerStatus.Missing, implementer.GetTriggerStatus()); - var factory = new TriggerImplementerFactory(type); - var implementer = factory.Create(tbl); - - Assert.AreEqual(TriggerStatus.Missing,implementer.GetTriggerStatus()); + Assert.AreEqual(2, tbl.DiscoverColumns().Length); - Assert.AreEqual(2,tbl.DiscoverColumns().Length); + implementer = factory.Create(tbl); - implementer = factory.Create(tbl); + //no primary keys + Assert.Throws(() => implementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet)); - //no primary keys - Assert.Throws(()=>implementer.CreateTrigger(new ThrowImmediatelyCheckNotifier())); + tbl.CreatePrimaryKey(tbl.DiscoverColumn("name")); - tbl.CreatePrimaryKey(tbl.DiscoverColumn("name")); + implementer = factory.Create(tbl); - implementer = factory.Create(tbl); + implementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); - implementer.CreateTrigger(new ThrowImmediatelyCheckNotifier()); + Assert.AreEqual(4, tbl.DiscoverColumns().Length); - Assert.AreEqual(4, tbl.DiscoverColumns().Length); + var archiveTable = tbl.Database.ExpectTable($"{tbl.GetRuntimeName()}_Archive"); + Assert.IsTrue(archiveTable.Exists()); - var archiveTable = tbl.Database.ExpectTable(tbl.GetRuntimeName() + "_Archive"); - Assert.IsTrue(archiveTable.Exists()); + Assert.AreEqual(7, archiveTable.DiscoverColumns().Length); - Assert.AreEqual(7,archiveTable.DiscoverColumns().Count()); + Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("name"))); + Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("bubbles"))); + Assert.AreEqual(1, + archiveTable.DiscoverColumns().Count(c => + c.GetRuntimeName().Equals("hic_dataLoadrunID", StringComparison.CurrentCultureIgnoreCase))); + Assert.AreEqual(1, + archiveTable.DiscoverColumns().Count(c => + c.GetRuntimeName().Equals("hic_validFrom", StringComparison.CurrentCultureIgnoreCase))); + Assert.AreEqual(1, + archiveTable.DiscoverColumns().Count(c => + c.GetRuntimeName().Equals("hic_validTo", StringComparison.CurrentCultureIgnoreCase))); + Assert.AreEqual(1, + archiveTable.DiscoverColumns().Count(c => + c.GetRuntimeName().Equals("hic_userID", StringComparison.CurrentCultureIgnoreCase))); + Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("hic_status"))); - Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("name"))); - Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("bubbles"))); - Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("hic_dataLoadrunID",StringComparison.CurrentCultureIgnoreCase))); - Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("hic_validFrom",StringComparison.CurrentCultureIgnoreCase))); - Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("hic_validTo",StringComparison.CurrentCultureIgnoreCase))); - Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("hic_userID",StringComparison.CurrentCultureIgnoreCase))); - Assert.AreEqual(1, archiveTable.DiscoverColumns().Count(c => c.GetRuntimeName().Equals("hic_status"))); - - //is the trigger now existing - Assert.AreEqual(TriggerStatus.Enabled, implementer.GetTriggerStatus()); + //is the trigger now existing + Assert.AreEqual(TriggerStatus.Enabled, implementer.GetTriggerStatus()); - //does it function as expected - using(var con = tbl.Database.Server.GetConnection()) - { - con.Open(); - var cmd = tbl.Database.Server.GetCommand(string.Format("INSERT INTO {0}(name,bubbles) VALUES('bob',1)",tbl.GetRuntimeName()),con); - cmd.ExecuteNonQuery(); - - Assert.AreEqual(1,tbl.GetRowCount()); - Assert.AreEqual(0,archiveTable.GetRowCount()); + //does it function as expected + using (var con = tbl.Database.Server.GetConnection()) + { + con.Open(); + var cmd = tbl.Database.Server.GetCommand( + $"INSERT INTO {tbl.GetRuntimeName()}(name,bubbles) VALUES('bob',1)", con); + cmd.ExecuteNonQuery(); - cmd = tbl.Database.Server.GetCommand(string.Format("UPDATE {0} set bubbles=2",tbl.GetRuntimeName()), con); - cmd.ExecuteNonQuery(); - - Assert.AreEqual(1, tbl.GetRowCount()); - Assert.AreEqual(1, archiveTable.GetRowCount()); + Assert.AreEqual(1, tbl.GetRowCount()); + Assert.AreEqual(0, archiveTable.GetRowCount()); - var archive = archiveTable.GetDataTable(); - var dr = archive.Rows.Cast().Single(); - - Assert.AreEqual(((DateTime)dr["hic_validTo"]).Date,DateTime.Now.Date); - } - - //do the strict check too - Assert.IsTrue(implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); + cmd = tbl.Database.Server.GetCommand($"UPDATE {tbl.GetRuntimeName()} set bubbles=2", con); + cmd.ExecuteNonQuery(); - tbl.AddColumn("amagad",new DatabaseTypeRequest(typeof(float),null,new DecimalSize(2,2)),true,30); - implementer = factory.Create(tbl); + Assert.AreEqual(1, tbl.GetRowCount()); + Assert.AreEqual(1, archiveTable.GetRowCount()); - Assert.Throws(() => implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); + var archive = archiveTable.GetDataTable(); + var dr = archive.Rows.Cast().Single(); - archiveTable.AddColumn("amagad", new DatabaseTypeRequest(typeof(float), null, new DecimalSize(2, 2)), true, 30); + Assert.AreEqual(((DateTime)dr["hic_validTo"]).Date, DateTime.Now.Date); + } - var checks = new TriggerChecks(tbl); - checks.Check(new AcceptAllCheckNotifier()); + //do the strict check too + Assert.IsTrue(implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); - Assert.IsTrue(implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); + tbl.AddColumn("amagad", new DatabaseTypeRequest(typeof(float), null, new DecimalSize(2, 2)), true, 30); + implementer = factory.Create(tbl); - - //does it function as expected - using (var con = tbl.Database.Server.GetConnection()) - { - con.Open(); + Assert.Throws(() => + implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); - Assert.AreEqual(1, tbl.GetRowCount()); - Assert.AreEqual(1, archiveTable.GetRowCount()); + archiveTable.AddColumn("amagad", new DatabaseTypeRequest(typeof(float), null, new DecimalSize(2, 2)), true, 30); - var cmd = tbl.Database.Server.GetCommand(string.Format("UPDATE {0} set amagad=1.0", tbl.GetRuntimeName()), con); - cmd.ExecuteNonQuery(); + var checks = new TriggerChecks(tbl); + checks.Check(new AcceptAllCheckNotifier()); - cmd = tbl.Database.Server.GetCommand(string.Format("UPDATE {0} set amagad=.09", tbl.GetRuntimeName()), con); - cmd.ExecuteNonQuery(); + Assert.IsTrue(implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); - Assert.AreEqual(1, tbl.GetRowCount()); - Assert.AreEqual(3, archiveTable.GetRowCount()); - var archive = archiveTable.GetDataTable(); - Assert.AreEqual(1,archive.Rows.Cast().Count(r=>Equals(r["amagad"],(decimal)1.00))); - Assert.AreEqual(2, archive.Rows.Cast().Count(r => r["amagad"] == DBNull.Value)); - } + //does it function as expected + using (var con = tbl.Database.Server.GetConnection()) + { + con.Open(); - string problems; - string worked; - implementer.DropTrigger(out problems,out worked); + Assert.AreEqual(1, tbl.GetRowCount()); + Assert.AreEqual(1, archiveTable.GetRowCount()); - Assert.IsTrue(string.IsNullOrEmpty(problems)); + var cmd = tbl.Database.Server.GetCommand($"UPDATE {tbl.GetRuntimeName()} set amagad=1.0", con); + cmd.ExecuteNonQuery(); - Assert.AreEqual(TriggerStatus.Missing, implementer.GetTriggerStatus()); + cmd = tbl.Database.Server.GetCommand($"UPDATE {tbl.GetRuntimeName()} set amagad=.09", con); + cmd.ExecuteNonQuery(); + + Assert.AreEqual(1, tbl.GetRowCount()); + Assert.AreEqual(3, archiveTable.GetRowCount()); + + var archive = archiveTable.GetDataTable(); + Assert.AreEqual(1, archive.Rows.Cast().Count(r => Equals(r["amagad"], (decimal)1.00))); + Assert.AreEqual(2, archive.Rows.Cast().Count(r => r["amagad"] == DBNull.Value)); } + + implementer.DropTrigger(out var problems, out _); + + Assert.IsTrue(string.IsNullOrEmpty(problems)); + + Assert.AreEqual(TriggerStatus.Missing, implementer.GetTriggerStatus()); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/DataAccess/SuperMultiThreadedVolumeAccess.cs b/Rdmp.Core.Tests/Curation/Integration/DataAccess/SuperMultiThreadedVolumeAccess.cs index 5e192776f5..a04e7d098e 100644 --- a/Rdmp.Core.Tests/Curation/Integration/DataAccess/SuperMultiThreadedVolumeAccess.cs +++ b/Rdmp.Core.Tests/Curation/Integration/DataAccess/SuperMultiThreadedVolumeAccess.cs @@ -10,158 +10,150 @@ using System.Linq; using System.Threading; using FAnsi.Connections; -using MapsDirectlyToDatabaseTable; -using MapsDirectlyToDatabaseTable.Revertable; using NUnit.Framework; using Rdmp.Core.Curation.Data; -using ReusableLibraryCode; +using Rdmp.Core.MapsDirectlyToDatabaseTable; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Revertable; +using Rdmp.Core.ReusableLibraryCode; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.DataAccess +namespace Rdmp.Core.Tests.Curation.Integration.DataAccess; + +public class SuperMultiThreadedVolumeAccess : DatabaseTests { - public class SuperMultiThreadedVolumeAccess:DatabaseTests + [OneTimeSetUp] + protected override void SetUp() { - int _timeoutBefore; + base.SetUp(); - [SetUp] - protected override void SetUp() - { - base.SetUp(); - - _timeoutBefore = DatabaseCommandHelper.GlobalTimeout; - DatabaseCommandHelper.GlobalTimeout = 60; - DeleteRemants(); - } + var timeoutBefore = DatabaseCommandHelper.GlobalTimeout; + DatabaseCommandHelper.GlobalTimeout = 60; - private void DeleteRemants() - { - foreach ( - Catalogue catalogue in - CatalogueRepository.GetAllObjects() - .Where(c => c.Name.StartsWith("SuperMultiThreadedTestCatalogue"))) - catalogue.DeleteInDatabase(); - } + foreach ( + var catalogue in + CatalogueRepository.GetAllObjects() + .Where(c => c.Name.StartsWith("SuperMultiThreadedTestCatalogue", StringComparison.Ordinal))) + catalogue.DeleteInDatabase(); + DatabaseCommandHelper.GlobalTimeout = timeoutBefore; + } - [Test] - [TestCase(true)] - [TestCase(false)] - public void SingleThreadedBulkCatalogueCreation(bool useTransactions) - { - IManagedConnection c= null; + [OneTimeTearDown] + protected void DeleteRemnants() + { + foreach ( + var catalogue in + CatalogueRepository.GetAllObjects() + .Where(c => c.Name.StartsWith("SuperMultiThreadedTestCatalogue", StringComparison.Ordinal))) + catalogue.DeleteInDatabase(); + } - if (CatalogueRepository is not TableRepository && useTransactions) - Assert.Inconclusive("YamlRepository does not support transactions so don't test this"); + [Test] + [TestCase(true)] + [TestCase(false)] + public void SingleThreadedBulkCatalogueCreation(bool useTransactions) + { + IManagedConnection c = null; - if (useTransactions) - c = CatalogueTableRepository.BeginNewTransactedConnection(); - using (c) - { - //create lots of catalogues - for (int i = 0; i < 30; i++) - { - var cata = new Catalogue(CatalogueRepository, "SuperMultiThreadedTestCatalogue" + Guid.NewGuid()); - var copy = CatalogueRepository.GetObjectByID(cata.ID); + if (CatalogueRepository is not TableRepository && useTransactions) + Assert.Inconclusive("YamlRepository does not support transactions so don't test this"); - copy.Description = "fish"; - Assert.IsTrue(copy.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyDifferent); + if (useTransactions) c = CatalogueTableRepository.BeginNewTransactedConnection(); - copy.SaveToDatabase(); - Assert.IsTrue(copy.HasLocalChanges().Evaluation == ChangeDescription.NoChanges); - } + using (c) + { + //create lots of catalogues + for (var i = 0; i < 30; i++) + { + var cata = new Catalogue(CatalogueRepository, $"SuperMultiThreadedTestCatalogue{Guid.NewGuid()}"); + var copy = CatalogueRepository.GetObjectByID(cata.ID); - //now fetch them out of database lots of times - for (int i = 0; i < 100; i++) - CatalogueRepository.GetAllObjects(); + copy.Description = "fish"; + Assert.IsTrue(copy.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyDifferent); - if (useTransactions) - CatalogueTableRepository.EndTransactedConnection(false); + copy.SaveToDatabase(); + Assert.IsTrue(copy.HasLocalChanges().Evaluation == ChangeDescription.NoChanges); } - - - } + //now fetch them out of database lots of times + for (var i = 0; i < 100; i++) + CatalogueRepository.GetAllObjects(); - [Test] - [TestCase(true)] - [TestCase(false)] - public void MultiThreaded(bool useTransactions) - { - FireMultiThreaded(SingleThreadedBulkCatalogueCreation, 5, useTransactions); + if (useTransactions) + CatalogueTableRepository.EndTransactedConnection(false); } + } - [Test] - [TestCase(true)] - [TestCase(false)] - public void SimpleCaseSingleThreaded(bool useTransaction) - { + [Test] + [TestCase(true)] + [TestCase(false)] + public void MultiThreaded(bool useTransactions) + { + FireMultiThreaded(SingleThreadedBulkCatalogueCreation, 5, useTransactions); + } + [Test] + [TestCase(true)] + [TestCase(false)] + public void SimpleCaseSingleThreaded(bool useTransaction) + { + using var con = useTransaction + ? CatalogueTableRepository.BeginNewTransactedConnection() + : CatalogueTableRepository.GetConnection(); + Assert.AreEqual(ConnectionState.Open, con.Connection.State); + Thread.Sleep(1000); + + if (useTransaction) + CatalogueTableRepository.EndTransactedConnection(false); + else + con.Connection.Close(); + + Assert.AreEqual(ConnectionState.Closed, con.Connection.State); + } - using ( - var con = useTransaction - ? CatalogueTableRepository.BeginNewTransactedConnection() - : CatalogueTableRepository.GetConnection()) - { + [Test] + [TestCase(true)] + [TestCase(false)] + public void SimpleCaseMultiThreaded(bool useTransactions) + { + FireMultiThreaded(SimpleCaseSingleThreaded, 50, useTransactions); + } - Assert.AreEqual(ConnectionState.Open, con.Connection.State); - Thread.Sleep(1000); + private void FireMultiThreaded(Action method, int numberToFire, bool useTransactions) + { + if (CatalogueRepository is not TableRepository) + Assert.Inconclusive("We don't have to test this for yaml repos"); - if (useTransaction) - CatalogueTableRepository.EndTransactedConnection(false); - else - con.Connection.Close(); - Assert.AreEqual(ConnectionState.Closed, con.Connection.State); - } + var exes = new List(); - - } + var ts = new List(); - [Test] - [TestCase(true)] - [TestCase(false)] - public void SimpleCaseMultiThreaded(bool useTransactions) + for (var i = 0; i < numberToFire; i++) { - FireMultiThreaded(SimpleCaseSingleThreaded, 50, useTransactions); + var i1 = i; + ts.Add(new Thread(() => + { + try + { + method(useTransactions && i1 == 0); + } + catch (Exception ex) + { + exes.Add(ex); + } + })); } - private void FireMultiThreaded(Action method, int numberToFire, bool useTransactions) - { - if (CatalogueRepository is not TableRepository) - Assert.Inconclusive("We dont have to test this for yaml repos"); - - - List exes = new List(); - + foreach (var thread in ts) + thread.Start(); - List ts = new List(); + while (ts.Any(t => t.IsAlive)) + ts.FirstOrDefault(t => t.IsAlive)?.Join(100); - for (int i = 0; i < numberToFire; i++) - { - int i1 = i; - ts.Add(new Thread(() => { - try - { - - method(useTransactions && i1 == 0); - } - catch (Exception ex) - { - exes.Add(ex); - } - })); - } - - foreach (Thread thread in ts) - thread.Start(); - - while(ts.Any(t=>t.IsAlive)) - Thread.Sleep(100); - - Assert.IsEmpty(exes); - } + Assert.IsEmpty(exes); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/DataAccess/TestDataAccess.cs b/Rdmp.Core.Tests/Curation/Integration/DataAccess/TestDataAccess.cs index fb86429cdb..62cebe110f 100644 --- a/Rdmp.Core.Tests/Curation/Integration/DataAccess/TestDataAccess.cs +++ b/Rdmp.Core.Tests/Curation/Integration/DataAccess/TestDataAccess.cs @@ -14,282 +14,286 @@ using FAnsi.Implementations.MicrosoftSQL; using NUnit.Framework; using Rdmp.Core.Curation.Data; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Repositories; using Rdmp.Core.Repositories.Managers; -using ReusableLibraryCode; -using ReusableLibraryCode.DataAccess; -using ReusableLibraryCode.Exceptions; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Exceptions; using Tests.Common; -using MapsDirectlyToDatabaseTable; -namespace Rdmp.Core.Tests.Curation.Integration.DataAccess +namespace Rdmp.Core.Tests.Curation.Integration.DataAccess; + +public class TestDataAccess : DatabaseTests { - public class TestDataAccess:DatabaseTests - { - - #region Distinct Connection String (from Collection tests - Failing) + #region Distinct Connection String (from Collection tests - Failing) - [Test] - public void TestDistinctCredentials_PasswordMismatch() + [Test] + public void TestDistinctCredentials_PasswordMismatch() + { + var testPoints = new List { - List testPoints = new List(); - - testPoints.Add(new TestAccessPoint("frank","bob","username","mypas")); - testPoints.Add(new TestAccessPoint("frank","bob","username","mydifferentPass")); - - //call this - var ex = Assert.Throws(()=>DataAccessPortal.GetInstance().ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true)); - StringAssert.Contains("collection could not agree on a single Password",ex.Message); - - } + new("frank", "bob", "username", "mypas"), + new("frank", "bob", "username", "mydifferentPass") + }; + + //call this + var ex = Assert.Throws(() => + DataAccessPortal.ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, + true)); + StringAssert.Contains("collection could not agree on a single Password", ex.Message); + } - [Test] - public void TestDistinctCredentials_UsernamePasswordAreNull() + [Test] + public void TestDistinctCredentials_UsernamePasswordAreNull() + { + var testPoints = new List { - List testPoints = new List(); - - testPoints.Add(new TestAccessPoint("frank", "bob", null, null)); - testPoints.Add(new TestAccessPoint("frank", "bob", "username", "mydifferentPass")); - - //call this - var ex = Assert.Throws(()=>DataAccessPortal.GetInstance().ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true)); - StringAssert.Contains("collection could not agree whether to use Credentials",ex.Message); - - } + new("frank", "bob", null, null), + new("frank", "bob", "username", "mydifferentPass") + }; + + //call this + var ex = Assert.Throws(() => + DataAccessPortal.ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, + true)); + StringAssert.Contains("collection could not agree whether to use Credentials", ex.Message); + } - [Test] - public void TestDistinctCredentials_UsernameMismatch() + [Test] + public void TestDistinctCredentials_UsernameMismatch() + { + var testPoints = new List { - List testPoints = new List(); + new("frank", "bob", "usernameasdasd", "mydifferentpass"), + new("frank", "bob", "username", "mydifferentPass") + }; - testPoints.Add(new TestAccessPoint("frank", "bob", "usernameasdasd", "mydifferentpass")); - testPoints.Add(new TestAccessPoint("frank", "bob", "username", "mydifferentPass")); + //call this - //call this - - var ex = Assert.Throws(()=>DataAccessPortal.GetInstance().ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true)); - StringAssert.Contains("collection could not agree on a single Username",ex.Message); - - } + var ex = Assert.Throws(() => + DataAccessPortal.ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, + true)); + StringAssert.Contains("collection could not agree on a single Username", ex.Message); + } - [Test] - public void TestDistinctCredentials_ServerMixedCapitalization_Allowed() + [Test] + public void TestDistinctCredentials_ServerMixedCapitalization_Allowed() + { + var testPoints = new List { - List testPoints = new List(); - - testPoints.Add(new TestAccessPoint("frank", "bob", null,null)); - testPoints.Add(new TestAccessPoint("FRANK", "bob", null, null)); + new("frank", "bob", null, null), + new("FRANK", "bob", null, null) + }; - var server = DataAccessPortal.GetInstance().ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true); - Assert.AreEqual("frank", server.Name); - } + var server = + DataAccessPortal.ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true); + Assert.AreEqual("frank", server.Name); + } - [Test] - public void TestDistinctCredentials_DatabaseMixedCapitalization_NotAllowed() + [Test] + public void TestDistinctCredentials_DatabaseMixedCapitalization_NotAllowed() + { + var testPoints = new List { - List testPoints = new List(); - - testPoints.Add(new TestAccessPoint("frank", "bob", null, null)); - testPoints.Add(new TestAccessPoint("frank", "BOB", null, null)); + new("frank", "bob", null, null), + new("frank", "BOB", null, null) + }; + + var ex = Assert.Throws(() => + DataAccessPortal.ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, + true)); + StringAssert.Contains( + "All data access points must be into the same database, access points 'frankbob' and 'frankBOB' are into different databases", + ex.Message); + } - var ex = Assert.Throws(() => DataAccessPortal.GetInstance().ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true)); - StringAssert.Contains("All data access points must be into the same database, access points 'frankbob' and 'frankBOB' are into different databases", ex.Message); - } - #endregion + #endregion - #region Distinct Connection String (from Collection tests - Passing) + #region Distinct Connection String (from Collection tests - Passing) - [Test] - public void TestDistinctCredentials_WrappedDatabaseName() + [Test] + public void TestDistinctCredentials_WrappedDatabaseName() + { + var testPoints = new List { - List testPoints = new List(); + new("frank", "[bob's Database]", "username", "mypas"), + new("frank", "bob's Database", "username", "mypas") + }; - testPoints.Add(new TestAccessPoint("frank", "[bob's Database]", "username", "mypas")); - testPoints.Add(new TestAccessPoint("frank", "bob's Database", "username", "mypas")); - //call this - var result = DataAccessPortal.GetInstance().ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true); + //call this + var result = + DataAccessPortal.ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true); - //test result - Assert.AreEqual("bob's Database", result.Builder["Initial Catalog"]); - } + //test result + Assert.AreEqual("bob's Database", result.Builder["Initial Catalog"]); + } - [Test] - public void TestDistinctCredentials_PasswordMatch() + [Test] + public void TestDistinctCredentials_PasswordMatch() + { + var testPoints = new List { - List testPoints = new List(); + new("frank", "bob", "username", "mypas"), + new("frank", "bob", "username", "mypas") + }; - testPoints.Add(new TestAccessPoint("frank", "bob", "username", "mypas")); - testPoints.Add(new TestAccessPoint("frank", "bob", "username", "mypas")); + //call this + var result = + DataAccessPortal.ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true); - //call this - var result = DataAccessPortal.GetInstance().ExpectDistinctServer(testPoints.ToArray(), DataAccessContext.InternalDataProcessing, true); - - //test result - Assert.AreEqual("mypas", result.Builder["Password"]); + //test result + Assert.AreEqual("mypas", result.Builder["Password"]); + } - } - #endregion + #endregion - [Test] - public void AsyncTest() - { - if (CatalogueRepository is not TableRepository) - Assert.Inconclusive("Test only applies to database repositories"); + [Test] + public void AsyncTest() + { + if (CatalogueRepository is not TableRepository) + Assert.Inconclusive("Test only applies to database repositories"); - List threads = new List(); + var threads = new List(); - for (int i = 0; i < 30; i++) - threads.Add(new Thread(MessWithCatalogue)); + for (var i = 0; i < 30; i++) + threads.Add(new Thread(MessWithCatalogue)); - foreach (Thread t in threads) - t.Start(); + foreach (var t in threads) + t.Start(); - while(threads.Any(t=>t.ThreadState != ThreadState.Stopped)) - Thread.Sleep(100); + while (threads.Any(t => t.ThreadState != ThreadState.Stopped)) + Thread.Sleep(100); - for (int index = 0; index < asyncExceptions.Count; index++) - { - Console.WriteLine("Exception " + index); - Exception asyncException = asyncExceptions[index]; - Console.WriteLine(ExceptionHelper.ExceptionToListOfInnerMessages(asyncException, true)); - } - Assert.IsEmpty(asyncExceptions); + for (var index = 0; index < asyncExceptions.Count; index++) + { + Console.WriteLine($"Exception {index}"); + var asyncException = asyncExceptions[index]; + Console.WriteLine(ExceptionHelper.ExceptionToListOfInnerMessages(asyncException, true)); } - private List asyncExceptions = new List(); + Assert.IsEmpty(asyncExceptions); + } + + private List asyncExceptions = new(); - private void MessWithCatalogue() + private void MessWithCatalogue() + { + try { - try - { - var repository = new CatalogueRepository(CatalogueTableRepository.ConnectionStringBuilder); - var cata = new Catalogue(repository, "bob"); - cata.Name = "Fuss"; - cata.SaveToDatabase(); - cata.DeleteInDatabase(); - } - catch (Exception ex) + var repository = new CatalogueRepository(CatalogueTableRepository.ConnectionStringBuilder); + var cata = new Catalogue(repository, "bob") { - asyncExceptions.Add(ex); - } + Name = "Fuss" + }; + cata.SaveToDatabase(); + cata.DeleteInDatabase(); + } + catch (Exception ex) + { + asyncExceptions.Add(ex); } + } + + /// + /// Real life test case where TableInfo is the IDataAccessPoint not just the test class + /// + [Test] + public void TestGettingConnectionStrings() + { + foreach (var tbl in CatalogueRepository.GetAllObjects() + .Where(table => table.Name.ToLower().Equals("bob"))) + tbl.DeleteInDatabase(); + + foreach (var c in CatalogueRepository.GetAllObjects() + .Where(cred => cred.Name.ToLower().Equals("bob"))) + c.DeleteInDatabase(); - /// - /// Real life test case where TableInfo is the IDataAccessPoint not just the test class - /// - [Test] - public void TestGettingConnectionStrings() + //test it with TableInfos + var t = new TableInfo(CatalogueRepository, "Bob"); + try { - foreach (TableInfo tbl in CatalogueRepository.GetAllObjects().Where(table => table.Name.ToLower().Equals("bob"))) - tbl.DeleteInDatabase(); - - foreach (var c in CatalogueRepository.GetAllObjects().Where(cred=>cred.Name.ToLower().Equals("bob"))) - c.DeleteInDatabase(); - - //test it with TableInfos - TableInfo t = new TableInfo(CatalogueRepository, "Bob"); + t.Server = "fish"; + t.Database = "bobsDatabase"; + t.SaveToDatabase(); + + //t has no credentials + var server = DataAccessPortal.ExpectServer(t, DataAccessContext.InternalDataProcessing); + + Assert.AreEqual(typeof(SqlConnectionStringBuilder), server.Builder.GetType()); + Assert.AreEqual("fish", ((SqlConnectionStringBuilder)server.Builder).DataSource); + Assert.AreEqual("bobsDatabase", ((SqlConnectionStringBuilder)server.Builder).InitialCatalog); + Assert.AreEqual(true, ((SqlConnectionStringBuilder)server.Builder).IntegratedSecurity); + + var creds = new DataAccessCredentials(CatalogueRepository, "Bob"); try { - t.Server = "fish"; - t.Database = "bobsDatabase"; - t.SaveToDatabase(); + t.SetCredentials(creds, DataAccessContext.InternalDataProcessing, true); + creds.Username = "frank"; + creds.Password = "bobsPassword"; + creds.SaveToDatabase(); - //t has no credentials - var server = DataAccessPortal.GetInstance().ExpectServer(t, DataAccessContext.InternalDataProcessing); + //credentials are cached + t.ClearAllInjections(); + + ////t has some credentials now + server = DataAccessPortal.ExpectServer(t, DataAccessContext.InternalDataProcessing); Assert.AreEqual(typeof(SqlConnectionStringBuilder), server.Builder.GetType()); Assert.AreEqual("fish", ((SqlConnectionStringBuilder)server.Builder).DataSource); Assert.AreEqual("bobsDatabase", ((SqlConnectionStringBuilder)server.Builder).InitialCatalog); - Assert.AreEqual(true, ((SqlConnectionStringBuilder)server.Builder).IntegratedSecurity); - - var creds = new DataAccessCredentials(CatalogueRepository, "Bob"); - try - { - t.SetCredentials(creds, DataAccessContext.InternalDataProcessing, true); - creds.Username = "frank"; - creds.Password = "bobsPassword"; - creds.SaveToDatabase(); - - //credentials are cached - t.ClearAllInjections(); - - ////t has some credentials now - server = DataAccessPortal.GetInstance().ExpectServer(t, DataAccessContext.InternalDataProcessing); - - Assert.AreEqual(typeof(SqlConnectionStringBuilder), server.Builder.GetType()); - Assert.AreEqual("fish", ((SqlConnectionStringBuilder)server.Builder).DataSource); - Assert.AreEqual("bobsDatabase", ((SqlConnectionStringBuilder)server.Builder).InitialCatalog); - Assert.AreEqual("frank", ((SqlConnectionStringBuilder)server.Builder).UserID); - Assert.AreEqual("bobsPassword", ((SqlConnectionStringBuilder)server.Builder).Password); - Assert.AreEqual(false, ((SqlConnectionStringBuilder)server.Builder).IntegratedSecurity); - } - finally - { - var linker = new TableInfoCredentialsManager(CatalogueTableRepository); - linker.BreakAllLinksBetween(creds, t); - creds.DeleteInDatabase(); - } - + Assert.AreEqual("frank", ((SqlConnectionStringBuilder)server.Builder).UserID); + Assert.AreEqual("bobsPassword", ((SqlConnectionStringBuilder)server.Builder).Password); + Assert.AreEqual(false, ((SqlConnectionStringBuilder)server.Builder).IntegratedSecurity); } finally { - t.DeleteInDatabase(); - + var linker = new TableInfoCredentialsManager(CatalogueTableRepository); + linker.BreakAllLinksBetween(creds, t); + creds.DeleteInDatabase(); } } - - - internal class TestAccessPoint:IDataAccessPoint,IDataAccessCredentials + finally { - public string Server { get; set; } - public string Database { get; set; } - public DatabaseType DatabaseType { get; set; } + t.DeleteInDatabase(); + } + } - public string Username { get; set; } - public string Password { get; set; } - public TestAccessPoint(string server, string database, string username, string password) - { - Server = server; - Database = database; - Username = username; - Password = password; - } + internal class TestAccessPoint : IDataAccessPoint, IDataAccessCredentials + { + public string Server { get; set; } + public string Database { get; set; } + public DatabaseType DatabaseType { get; set; } - public IDataAccessCredentials GetCredentialsIfExists(DataAccessContext context) - { - if (Username != null) - return this; + public string Username { get; set; } + public string Password { get; set; } - return null; - } + public TestAccessPoint(string server, string database, string username, string password) + { + Server = server; + Database = database; + Username = username; + Password = password; + } - - public string GetDecryptedPassword() - { - return Password?? ""; - } + public IDataAccessCredentials GetCredentialsIfExists(DataAccessContext context) => + Username != null ? this : (IDataAccessCredentials)null; - public override string ToString() - { - return Server + Database; - } - public IQuerySyntaxHelper GetQuerySyntaxHelper() - { - return new MicrosoftQuerySyntaxHelper(); - } + public string GetDecryptedPassword() => Password ?? ""; - public bool DiscoverExistence(DataAccessContext context, out string reason) - { - reason = "TestDataAccess never finds anything, it's a test"; - return false; - } - } + public override string ToString() => Server + Database; + public IQuerySyntaxHelper GetQuerySyntaxHelper() => MicrosoftQuerySyntaxHelper.Instance; + public bool DiscoverExistence(DataAccessContext context, out string reason) + { + reason = "TestDataAccess never finds anything, it's a test"; + return false; + } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/Dependencies/DependencyTests.cs b/Rdmp.Core.Tests/Curation/Integration/Dependencies/DependencyTests.cs index bcd15a2ebd..eb17a931b2 100644 --- a/Rdmp.Core.Tests/Curation/Integration/Dependencies/DependencyTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/Dependencies/DependencyTests.cs @@ -8,43 +8,41 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.Dependencies +namespace Rdmp.Core.Tests.Curation.Integration.Dependencies; + +public class DependencyTests : DatabaseTests { - public class DependencyTests : DatabaseTests + [Test] + public void ExtractionInformationTriangle() { - [Test] - public void ExtractionInformationTriangle() - { - var t = new TableInfo(CatalogueRepository, "t"); - var col = new ColumnInfo(CatalogueRepository, "mycol", "varchar(10)", t); - - var cat = new Catalogue(CatalogueRepository, "MyCat"); - var ci = new CatalogueItem(CatalogueRepository, cat, "myci"); + var t = new TableInfo(CatalogueRepository, "t"); + var col = new ColumnInfo(CatalogueRepository, "mycol", "varchar(10)", t); + var cat = new Catalogue(CatalogueRepository, "MyCat"); + var ci = new CatalogueItem(CatalogueRepository, cat, "myci"); - try - { - //col depends on tr - Assert.Contains(t,col.GetObjectsThisDependsOn()); - Assert.Contains(col,t.GetObjectsDependingOnThis()); - //catalogue depends on catalogue items existing (slightly counter intuitive but think of it as data flow out of technical low level data through transforms into datasets - and then into researchers and research projects) - Assert.Contains(cat,ci.GetObjectsDependingOnThis()); - Assert.Contains(ci,cat.GetObjectsThisDependsOn()); + try + { + //col depends on tr + Assert.Contains(t, col.GetObjectsThisDependsOn()); + Assert.Contains(col, t.GetObjectsDependingOnThis()); - //catalogue item should not be relying on anything currently (no link to underlying technical data) - Assert.IsNull(ci.GetObjectsThisDependsOn()); + //catalogue depends on catalogue items existing (slightly counter intuitive but think of it as data flow out of technical low level data through transforms into datasets - and then into researchers and research projects) + Assert.Contains(cat, ci.GetObjectsDependingOnThis()); + Assert.Contains(ci, cat.GetObjectsThisDependsOn()); - //now they are associated so the ci should be dependent on the col - ci.SetColumnInfo(col); - Assert.Contains(col, ci.GetObjectsDependingOnThis()); + //catalogue item should not be relying on anything currently (no link to underlying technical data) + Assert.IsNull(ci.GetObjectsThisDependsOn()); - } - finally - { - t.DeleteInDatabase(); - cat.DeleteInDatabase(); - } + //now they are associated so the ci should be dependent on the col + ci.SetColumnInfo(col); + Assert.Contains(col, ci.GetObjectsDependingOnThis()); + } + finally + { + t.DeleteInDatabase(); + cat.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/DitaExtractorTests.cs b/Rdmp.Core.Tests/Curation/Integration/DitaExtractorTests.cs index cd26d7487f..ec09ec9b09 100644 --- a/Rdmp.Core.Tests/Curation/Integration/DitaExtractorTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/DitaExtractorTests.cs @@ -10,144 +10,141 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Reports; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +internal class DitaExtractorTests : DatabaseTests { - class DitaExtractorTests : DatabaseTests - { - private Exception _setupException = null; + private Exception _setupException; - private TestDirectoryHelper _directoryHelper; + private TestDirectoryHelper _directoryHelper; - [OneTimeSetUp] - protected override void OneTimeSetUp() + [OneTimeSetUp] + protected override void OneTimeSetUp() + { + base.OneTimeSetUp(); + + try { - base.OneTimeSetUp(); + _directoryHelper = new TestDirectoryHelper(GetType()); - try - { - _directoryHelper = new TestDirectoryHelper(GetType()); - - _directoryHelper.SetUp(); - - Random random = new Random(); - - //delete all catalogues with duplicate names - Catalogue[] catalogues = CatalogueRepository.GetAllObjects().ToArray(); - - foreach (var cata in catalogues) - if (catalogues.Count(c => c.Name.Equals(cata.Name)) > 1) - { - Console.WriteLine( - $"Deleting Catalogue Called {cata.Name} (because there are multiple Catalogues with this name) in Catalogue database"); - cata.DeleteInDatabase(); - } - - //make sure all Catalogues have acroynms, if they dont then assign them a super random one - foreach (Catalogue cata in CatalogueRepository.GetAllObjects()) - if (string.IsNullOrWhiteSpace(cata.Acronym)) - { - cata.Acronym = "RANDOMACRONYM_" + random.Next(10000); - cata.SaveToDatabase(); - } - } - catch (Exception e) + _directoryHelper.SetUp(); + + var random = new Random(); + + //delete all catalogues with duplicate names + var catalogues = CatalogueRepository.GetAllObjects().ToArray(); + + foreach (var cata in catalogues.GroupBy(c => c.Name).Where(g => g.Count() > 1).SelectMany(y => y)) + cata.DeleteInDatabase(); + + //make sure all Catalogues have acronyms, if they don't then assign them a super random one + foreach (var cata in CatalogueRepository.GetAllObjects() + .Where(c => string.IsNullOrWhiteSpace(c.Acronym))) { - _setupException = e; + cata.Acronym = $"RANDOMACRONYM_{random.Next(10000)}"; + cata.SaveToDatabase(); } } - - [SetUp] - protected override void SetUp() + catch (Exception e) { - base.SetUp(); - if (_setupException != null) - { - Console.WriteLine("TestFixtureSetUp failed in {0} - {1}", GetType(), _setupException.Message); - throw _setupException; - } - - _directoryHelper.DeleteAllEntriesInDir(); + _setupException = e; } + } - [Test] - public void DitaExtractorConstructor_ExtractTestCatalogue_FilesExist() + [SetUp] + protected override void SetUp() + { + base.SetUp(); + if (_setupException != null) { - var testDir = _directoryHelper.Directory; - - //get rid of any old copies lying around - Catalogue oldCatalogueVersion = CatalogueRepository.GetAllObjects().SingleOrDefault(c => c.Name.Equals("DitaExtractorConstructor_ExtractTestCatalogue_FilesExist")); - if(oldCatalogueVersion != null) - oldCatalogueVersion.DeleteInDatabase(); + Console.WriteLine("TestFixtureSetUp failed in {0} - {1}", GetType(), _setupException.Message); + throw _setupException; + } - Catalogue ditaTestCatalogue = new Catalogue(CatalogueRepository, "DitaExtractorConstructor_ExtractTestCatalogue_FilesExist");//name of Catalogue + _directoryHelper.DeleteAllEntriesInDir(); + } - ditaTestCatalogue.Acronym = "DITA_TEST"; - ditaTestCatalogue.Description = - "Test catalogue for the unit test DitaExtractorConstructor_ExtractTestCatalogue_FilesExist in file " + - typeof (DitaExtractorTests).FullName + ".cs"; - ditaTestCatalogue.SaveToDatabase(); + [Test] + public void DitaExtractorConstructor_ExtractTestCatalogue_FilesExist() + { + var testDir = _directoryHelper.Directory; + //get rid of any old copies lying around + var oldCatalogueVersion = CatalogueRepository.GetAllObjects() + .SingleOrDefault(c => c.Name.Equals("DitaExtractorConstructor_ExtractTestCatalogue_FilesExist")); + oldCatalogueVersion?.DeleteInDatabase(); - try + var ditaTestCatalogue = + new Catalogue(CatalogueRepository, "DitaExtractorConstructor_ExtractTestCatalogue_FilesExist") { - DitaCatalogueExtractor extractor = new DitaCatalogueExtractor(CatalogueRepository, testDir); + Acronym = "DITA_TEST", + Description = + $"Test catalogue for the unit test DitaExtractorConstructor_ExtractTestCatalogue_FilesExist in file {typeof(DitaExtractorTests).FullName}.cs" + }; //name of Catalogue - extractor.Extract(new ThrowImmediatelyDataLoadEventListener()); + ditaTestCatalogue.SaveToDatabase(); - //make sure the root mapping files exist for navigating around - Assert.IsTrue(File.Exists(Path.Combine(testDir.FullName, "hic_data_catalogue.ditamap"))); - Assert.IsTrue(File.Exists(Path.Combine(testDir.FullName, "introduction.dita"))); - Assert.IsTrue(File.Exists(Path.Combine(testDir.FullName, "dataset.dita"))); - //make sure the catalogue we created is there - FileInfo ditaCatalogueAsDotDitaFile = new FileInfo(Path.Combine(testDir.FullName, "ditaextractorconstructor_extracttestcatalogue_filesexist.dita"));//name of Dita file (for the Catalogue we just created) - Assert.IsTrue(ditaCatalogueAsDotDitaFile.Exists); - Assert.IsTrue(File.ReadAllText(ditaCatalogueAsDotDitaFile.FullName).Contains(ditaTestCatalogue.Description)); + try + { + var extractor = new DitaCatalogueExtractor(CatalogueRepository, testDir); - } - finally - { - ditaTestCatalogue.DeleteInDatabase(); - foreach (var file in testDir.GetFiles()) - file.Delete(); - } + extractor.Extract(ThrowImmediatelyDataLoadEventListener.Quiet); + + //make sure the root mapping files exist for navigating around + Assert.IsTrue(File.Exists(Path.Combine(testDir.FullName, "hic_data_catalogue.ditamap"))); + Assert.IsTrue(File.Exists(Path.Combine(testDir.FullName, "introduction.dita"))); + Assert.IsTrue(File.Exists(Path.Combine(testDir.FullName, "dataset.dita"))); + + //make sure the catalogue we created is there + var ditaCatalogueAsDotDitaFile = new FileInfo(Path.Combine(testDir.FullName, + "ditaextractorconstructor_extracttestcatalogue_filesexist.dita")); //name of Dita file (for the Catalogue we just created) + Assert.IsTrue(ditaCatalogueAsDotDitaFile.Exists); + Assert.IsTrue(File.ReadAllText(ditaCatalogueAsDotDitaFile.FullName) + .Contains(ditaTestCatalogue.Description)); + } + finally + { + ditaTestCatalogue.DeleteInDatabase(); + foreach (var file in testDir.GetFiles()) + file.Delete(); } + } - [Test] - public void CreateCatalogueWithNoAcronym_CrashesDITAExtractor() + [Test] + public void CreateCatalogueWithNoAcronym_CrashesDITAExtractor() + { + var testDir = _directoryHelper.Directory; + + try { - var testDir = _directoryHelper.Directory; + //create a new Catalogue in the test datbaase that doesnt have a acronym (should crash Dita Extractor) + var myNewCatalogue = new Catalogue(CatalogueRepository, "UnitTestCatalogue") + { + Acronym = "" + }; + myNewCatalogue.SaveToDatabase(); try { - //create a new Catalogue in the test datbaase that doesnt have a acronym (should crash Dita Extractor) - Catalogue myNewCatalogue = new Catalogue(CatalogueRepository, "UnitTestCatalogue"); - myNewCatalogue.Acronym = ""; - myNewCatalogue.SaveToDatabase(); - - try - { - DitaCatalogueExtractor extractor = new DitaCatalogueExtractor(CatalogueRepository, testDir); - var ex = Assert.Throws(()=>extractor.Extract(new ThrowImmediatelyDataLoadEventListener())); - Assert.AreEqual("Dita Extraction requires that each catalogue have a unique Acronym, the catalogue UnitTestCatalogue is missing an Acronym",ex.Message); - - } - finally - { - myNewCatalogue.DeleteInDatabase(); - } - + var extractor = new DitaCatalogueExtractor(CatalogueRepository, testDir); + var ex = Assert.Throws(() => extractor.Extract(ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.AreEqual( + "Dita Extraction requires that each catalogue have a unique Acronym, the catalogue UnitTestCatalogue is missing an Acronym", + ex.Message); } finally { - foreach (var file in testDir.GetFiles()) - file.Delete(); + myNewCatalogue.DeleteInDatabase(); } - } - + finally + { + foreach (var file in testDir.GetFiles()) + file.Delete(); + } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/EncryptionTests.cs b/Rdmp.Core.Tests/Curation/Integration/EncryptionTests.cs index 3448660c63..c25ee7e0e4 100644 --- a/Rdmp.Core.Tests/Curation/Integration/EncryptionTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/EncryptionTests.cs @@ -9,212 +9,220 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Repositories.Managers; -using ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class EncryptionTests : DatabaseTests { - public class EncryptionTests : DatabaseTests + [Test] + public void EncryptAndThenDecryptString() { - [Test] - public void EncryptAndThenDecryptString() - { + var encrypter = CatalogueRepository.EncryptionManager.GetEncrypter(); - var encrypter = CatalogueRepository.EncryptionManager.GetEncrypter(); + const string toEncrypt = "Amagad"; + var encryptedBinaryString = encrypter.Encrypt(toEncrypt); - string toEncrypt = "Amagad"; - string encrytpedBinaryString = encrypter.Encrypt(toEncrypt); + Assert.AreNotEqual(toEncrypt, encryptedBinaryString); + Assert.AreEqual(toEncrypt, encrypter.Decrypt(encryptedBinaryString)); + } - Console.WriteLine("Encrypted password was:" +encrytpedBinaryString); - Assert.AreNotEqual(toEncrypt, encrytpedBinaryString); - Assert.AreEqual(toEncrypt,encrypter.Decrypt(encrytpedBinaryString)); - } + [Test] + public void CheckIfThingsAreEncryptedOrNot() + { + var encrypter = CatalogueRepository.EncryptionManager.GetEncrypter(); - [Test] - public void CheckIfThingsAreEncryptedOrNot() - { + const string toEncrypt = "Amagad"; + var encryptedBinaryString = encrypter.Encrypt(toEncrypt); - var encrypter = CatalogueRepository.EncryptionManager.GetEncrypter(); + Console.WriteLine($"Encrypted password was:{encryptedBinaryString}"); - string toEncrypt = "Amagad"; - string encrytpedBinaryString = encrypter.Encrypt(toEncrypt); + Assert.True(encrypter.IsStringEncrypted(encryptedBinaryString)); + Assert.False(encrypter.IsStringEncrypted(toEncrypt)); + } - Console.WriteLine("Encrypted password was:" + encrytpedBinaryString); - - Assert.True(encrypter.IsStringEncrypted(encrytpedBinaryString)); - Assert.False(encrypter.IsStringEncrypted(toEncrypt)); - } + [Test] + public void MultiEncryptingShouldntBreakIt() + { + //cleanup + foreach (var c in CatalogueRepository.GetAllObjects() + .Where(c => c.Name.Equals("frankieFran"))) + c.DeleteInDatabase(); - [Test] - public void MultiEncryptingShouldntBreakIt() + var creds = new DataAccessCredentials(CatalogueRepository, "frankieFran"); + try { - //cleanup - foreach (var c in CatalogueRepository.GetAllObjects().Where(c => c.Name.Equals("frankieFran"))) - c.DeleteInDatabase(); + //as soon as you set a password it should be encrypted by the credentials class in memory + creds.Password = "fish"; - DataAccessCredentials creds = new DataAccessCredentials(CatalogueRepository, "frankieFran"); - try - { - //as soon as you set a password it should be encrypted by the credentials class in memory - creds.Password = "fish"; - - Assert.AreNotEqual("fish", creds.Password); - Assert.AreEqual("fish", creds.GetDecryptedPassword()); //but we should still be able to decrypt it - - //set the password to the encrypted password - creds.Password = creds.Password; - - //should still work - Assert.AreNotEqual("fish", creds.Password); - Assert.AreEqual("fish", creds.GetDecryptedPassword()); //but we should still be able to decrypt it - } - finally - { - creds.DeleteInDatabase(); - } - } + Assert.AreNotEqual("fish", creds.Password); + Assert.AreEqual("fish", creds.GetDecryptedPassword()); //but we should still be able to decrypt it + //set the password to the encrypted password + creds.Password = creds.Password; - [Test] - public void DataAccessCredentialsEncryption() + //should still work + Assert.AreNotEqual("fish", creds.Password); + Assert.AreEqual("fish", creds.GetDecryptedPassword()); //but we should still be able to decrypt it + } + finally { - //cleanup - foreach (var c in CatalogueRepository.GetAllObjects().Where(c => c.Name.Equals("frankieFran"))) - c.DeleteInDatabase(); - - DataAccessCredentials creds = new DataAccessCredentials(CatalogueRepository, "frankieFran"); - try - { - //as soon as you set a password it should be encrypted by the credentials class in memory - creds.Password = "fish"; - Assert.AreNotEqual("fish",creds.Password); - Assert.AreEqual("fish", creds.GetDecryptedPassword());//but we should still be able to decrypt it - - //save it - creds.SaveToDatabase(); - using (var con = CatalogueTableRepository.GetConnection()) - { - string value; - using(var cmd = DatabaseCommandHelper.GetCommand("Select Password from DataAccessCredentials where Name='frankieFran'", con.Connection, con.Transaction)) - value = (string) cmd.ExecuteScalar(); - - //ensure password in database is encrypted - Assert.AreNotEqual("fish",value); - Assert.AreEqual(creds.Password,value);//does value in database match value in memory (encrypted) - } + creds.DeleteInDatabase(); + } + } - //get a new copy out of the database - DataAccessCredentials newCopy = CatalogueRepository.GetObjectByID(creds.ID); - Assert.AreEqual(creds.Password,newCopy.Password);//passwords should match - Assert.AreNotEqual("fish",creds.Password);//neither should be fish - Assert.AreNotEqual("fish", newCopy.Password); - - //both should decrypt to the same value (fish - Assert.AreEqual("fish",creds.GetDecryptedPassword()); - Assert.AreEqual("fish", newCopy.GetDecryptedPassword()); - } - finally - { - creds.DeleteInDatabase(); - } - } + [Test] + public void DataAccessCredentialsEncryption() + { + //cleanup + foreach (var c in CatalogueRepository.GetAllObjects() + .Where(c => c.Name.Equals("frankieFran"))) + c.DeleteInDatabase(); - [Test] - [TestCase("bob")] - [TestCase(" bob ")] - [TestCase(" b@!#*$(!#W$999sdf0------------ob ")] - public void TestFreakyPasswordValues(string freakyPassword) + var creds = new DataAccessCredentials(CatalogueRepository, "frankieFran"); + try { - //cleanup - foreach (var c in CatalogueRepository.GetAllObjects().Where(c => c.Name.Equals("frankieFran"))) - c.DeleteInDatabase(); - - DataAccessCredentials creds = new DataAccessCredentials(CatalogueRepository, "frankieFran"); - try + //as soon as you set a password it should be encrypted by the credentials class in memory + creds.Password = "fish"; + Assert.AreNotEqual("fish", creds.Password); + Assert.AreEqual("fish", creds.GetDecryptedPassword()); //but we should still be able to decrypt it + + //save it + creds.SaveToDatabase(); + using (var con = CatalogueTableRepository.GetConnection()) { - //as soon as you set a password it should be encrypted by the credentials class in memory - creds.Password = freakyPassword; - Assert.AreNotEqual(freakyPassword, creds.Password); - Assert.AreEqual(freakyPassword, creds.GetDecryptedPassword());//but we should still be able to decrypt it - - //save it - creds.SaveToDatabase(); - using (var con = CatalogueTableRepository.GetConnection()) + string value; + using (var cmd = DatabaseCommandHelper.GetCommand( + "Select Password from DataAccessCredentials where Name='frankieFran'", con.Connection, + con.Transaction)) { - string value; - using(var cmd = DatabaseCommandHelper.GetCommand("Select Password from DataAccessCredentials where Name='frankieFran'", con.Connection, con.Transaction)) - value = (string) cmd.ExecuteScalar(); - - //ensure password in database is encrypted - Assert.AreNotEqual(freakyPassword, value); - Assert.AreEqual(creds.Password, value);//does value in database match value in memory (encrypted) + value = (string)cmd.ExecuteScalar(); } - //get a new copy out of the database - DataAccessCredentials newCopy = CatalogueRepository.GetObjectByID(creds.ID); - Assert.AreEqual(creds.Password, newCopy.Password);//passwords should match - Assert.AreNotEqual(freakyPassword, creds.Password);//neither should be fish - Assert.AreNotEqual(freakyPassword, newCopy.Password); + //ensure password in database is encrypted + Assert.AreNotEqual("fish", value); + Assert.AreEqual(creds.Password, value); //does value in database match value in memory (encrypted) + } - //both should decrypt to the same value (fish - Assert.AreEqual(freakyPassword, creds.GetDecryptedPassword()); - Assert.AreEqual(freakyPassword, newCopy.GetDecryptedPassword()); + //get a new copy out of the database + var newCopy = CatalogueRepository.GetObjectByID(creds.ID); + Assert.AreEqual(creds.Password, newCopy.Password); //passwords should match + Assert.AreNotEqual("fish", creds.Password); //neither should be fish + Assert.AreNotEqual("fish", newCopy.Password); - } - finally - { - creds.DeleteInDatabase(); - } + //both should decrypt to the same value (fish + Assert.AreEqual("fish", creds.GetDecryptedPassword()); + Assert.AreEqual("fish", newCopy.GetDecryptedPassword()); + } + finally + { + creds.DeleteInDatabase(); } + } + [Test] + [TestCase("bob")] + [TestCase(" bob ")] + [TestCase(" b@!#*$(!#W$999sdf0------------ob ")] + public void TestFreakyPasswordValues(string freakyPassword) + { + //cleanup + foreach (var c in CatalogueRepository.GetAllObjects() + .Where(c => c.Name.Equals("frankieFran"))) + c.DeleteInDatabase(); - [Test] - public void MigrationOfOldPasswordsTest() + var creds = new DataAccessCredentials(CatalogueRepository, "frankieFran"); + try { - //cleanup - foreach (var c in CatalogueRepository.GetAllObjects().Where(c => c.Name.Equals("frankieFran"))) - c.DeleteInDatabase(); - - //create a new credentials - DataAccessCredentials creds = new DataAccessCredentials(CatalogueRepository, "frankieFran"); - try + //as soon as you set a password it should be encrypted by the credentials class in memory + creds.Password = freakyPassword; + Assert.AreNotEqual(freakyPassword, creds.Password); + Assert.AreEqual(freakyPassword, creds.GetDecryptedPassword()); //but we should still be able to decrypt it + + //save it + creds.SaveToDatabase(); + using (var con = CatalogueTableRepository.GetConnection()) { - //update the database to an unencrypted password (like would be the case before software patch) - using (var con = CatalogueTableRepository.GetConnection()) + string value; + using (var cmd = DatabaseCommandHelper.GetCommand( + "Select Password from DataAccessCredentials where Name='frankieFran'", con.Connection, + con.Transaction)) { - using (var cmd = DatabaseCommandHelper.GetCommand( - "UPDATE DataAccessCredentials set Password = 'fish' where Name='frankieFran'", con.Connection, - con.Transaction)) - Assert.AreEqual(1, cmd.ExecuteNonQuery()); - + value = (string)cmd.ExecuteScalar(); } - DataAccessCredentials newCopy = CatalogueRepository.GetObjectByID(creds.ID); - - Assert.AreEqual("fish",newCopy.GetDecryptedPassword()); - Assert.AreNotEqual("fish", newCopy.Password); - } - finally - { - creds.DeleteInDatabase(); + //ensure password in database is encrypted + Assert.AreNotEqual(freakyPassword, value); + Assert.AreEqual(creds.Password, value); //does value in database match value in memory (encrypted) } + + //get a new copy out of the database + var newCopy = CatalogueRepository.GetObjectByID(creds.ID); + Assert.AreEqual(creds.Password, newCopy.Password); //passwords should match + Assert.AreNotEqual(freakyPassword, creds.Password); //neither should be fish + Assert.AreNotEqual(freakyPassword, newCopy.Password); + + //both should decrypt to the same value (fish + Assert.AreEqual(freakyPassword, creds.GetDecryptedPassword()); + Assert.AreEqual(freakyPassword, newCopy.GetDecryptedPassword()); + } + finally + { + creds.DeleteInDatabase(); } + } - [Test] - public void PasswordTooLong() + + [Test] + public void MigrationOfOldPasswordsTest() + { + //cleanup + foreach (var c in CatalogueRepository.GetAllObjects() + .Where(c => c.Name.Equals("frankieFran"))) + c.DeleteInDatabase(); + + //create a new credentials + var creds = new DataAccessCredentials(CatalogueRepository, "frankieFran"); + try { - if(RepositoryLocator.CatalogueRepository.EncryptionManager is PasswordEncryptionKeyLocation em && !string.IsNullOrWhiteSpace(em.GetKeyFileLocation())) - Assert.Inconclusive("Could not run test because there is already an encryption key set up. Likely one that handles very long passwords"); + //update the database to an unencrypted password (like would be the case before software patch) + using (var con = CatalogueTableRepository.GetConnection()) + { + using var cmd = DatabaseCommandHelper.GetCommand( + "UPDATE DataAccessCredentials set Password = 'fish' where Name='frankieFran'", con.Connection, + con.Transaction); + Assert.AreEqual(1, cmd.ExecuteNonQuery()); + } - string password = "a"; - for (int i = 0; i < 200; i++) - password += "a"; + var newCopy = CatalogueRepository.GetObjectByID(creds.ID); - var ex = Assert.Throws(() => TestFreakyPasswordValues(password)); - Assert.AreEqual("The free text Value supplied to this class was too long to be encrypted (Length of string was 201)", ex.Message); + Assert.AreEqual("fish", newCopy.GetDecryptedPassword()); + Assert.AreNotEqual("fish", newCopy.Password); + } + finally + { + creds.DeleteInDatabase(); } - } -} + + [Test] + public void PasswordTooLong() + { + if (RepositoryLocator.CatalogueRepository.EncryptionManager is PasswordEncryptionKeyLocation em && + !string.IsNullOrWhiteSpace(em.GetKeyFileLocation())) + Assert.Inconclusive( + "Could not run test because there is already an encryption key set up. Likely one that handles very long passwords"); + + var password = "a"; + for (var i = 0; i < 200; i++) + password += "a"; + + var ex = Assert.Throws(() => TestFreakyPasswordValues(password)); + Assert.AreEqual( + "The free text Value supplied to this class was too long to be encrypted (Length of string was 201)", + ex.Message); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ExtractionFilterParameterSetTests.cs b/Rdmp.Core.Tests/Curation/Integration/ExtractionFilterParameterSetTests.cs index 9601d1a0e4..7dd68d31d3 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ExtractionFilterParameterSetTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ExtractionFilterParameterSetTests.cs @@ -6,40 +6,36 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +internal class ExtractionFilterParameterSetTests : DatabaseTests { - class ExtractionFilterParameterSetTests : DatabaseTests + [Test] + public void ExtractionFilterParameterSet_Deleting() { - [Test] - public void ExtractionFilterParameterSet_Deleting() - { - var cata = new Catalogue(CatalogueRepository, "myCata"); - var cataItem = new CatalogueItem(CatalogueRepository, cata, "MyCol"); + var cata = new Catalogue(CatalogueRepository, "myCata"); + var cataItem = new CatalogueItem(CatalogueRepository, cata, "MyCol"); - var table = new TableInfo(CatalogueRepository, "myTbl"); - var col = new ColumnInfo(CatalogueRepository, "myCol", "varchar(10)", table); + var table = new TableInfo(CatalogueRepository, "myTbl"); + var col = new ColumnInfo(CatalogueRepository, "myCol", "varchar(10)", table); - var ei = new ExtractionInformation(CatalogueRepository, cataItem, col, "[myTbl].[mycol]"); - var filter = new ExtractionFilter(CatalogueRepository, "Age", ei); - filter.WhereSQL = "Age >= @age"; - new ExtractionFilterParameter(CatalogueRepository, "DECLARE @age int", filter); + var ei = new ExtractionInformation(CatalogueRepository, cataItem, col, "[myTbl].[mycol]"); + var filter = new ExtractionFilter(CatalogueRepository, "Age", ei) + { + WhereSQL = "Age >= @age" + }; + new ExtractionFilterParameter(CatalogueRepository, "DECLARE @age int", filter); - var paramSet = new ExtractionFilterParameterSet(CatalogueRepository, filter, "Old"); - var vals = paramSet.CreateNewValueEntries(); + var paramSet = new ExtractionFilterParameterSet(CatalogueRepository, filter, "Old"); + var vals = paramSet.CreateNewValueEntries(); - Assert.AreEqual(1, vals.Length); - Assert.IsTrue(vals[0].Exists()); + Assert.AreEqual(1, vals.Length); + Assert.IsTrue(vals[0].Exists()); - paramSet.DeleteInDatabase(); - Assert.IsFalse(paramSet.Exists()); - Assert.IsFalse(vals[0].Exists()); - } + paramSet.DeleteInDatabase(); + Assert.IsFalse(paramSet.Exists()); + Assert.IsFalse(vals[0].Exists()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ExtractionFilterTests.cs b/Rdmp.Core.Tests/Curation/Integration/ExtractionFilterTests.cs index dcf9c93742..97dbf9cf24 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ExtractionFilterTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ExtractionFilterTests.cs @@ -4,41 +4,42 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using Microsoft.Data.SqlClient; using NUnit.Framework; using Rdmp.Core.Curation.Data; using System; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class ExtractionFilterTests : DatabaseTests { - public class ExtractionFilterTests : DatabaseTests + [Test] + public void TestExtractionFilterDeleting_WhenItHas_ExtractionFilterParameterSet_DirectlyFails() { - [Test] - public void TestExtractionFilterDeleting_WhenItHas_ExtractionFilterParameterSet_DirectlyFails() - { - var filter = GetFilterWithParameterSet(); - var ex = Assert.Throws(()=>filter.DeleteInDatabase()); - Assert.AreEqual("Cannot delete 'Age' because there are one or more ExtractionFilterParameterSet declared on it", ex.Message); - } + var filter = GetFilterWithParameterSet(); + var ex = Assert.Throws(() => filter.DeleteInDatabase()); + Assert.AreEqual("Cannot delete 'Age' because there are one or more ExtractionFilterParameterSet declared on it", + ex.Message); + } + + private ExtractionFilter GetFilterWithParameterSet() + { + var cata = new Catalogue(CatalogueRepository, "myCata"); + var cataItem = new CatalogueItem(CatalogueRepository, cata, "MyCol"); + + var table = new TableInfo(CatalogueRepository, "myTbl"); + var col = new ColumnInfo(CatalogueRepository, "myCol", "varchar(10)", table); - private ExtractionFilter GetFilterWithParameterSet() + var ei = new ExtractionInformation(CatalogueRepository, cataItem, col, "[myTbl].[mycol]"); + var filter = new ExtractionFilter(CatalogueRepository, "Age", ei) { - var cata = new Catalogue(CatalogueRepository, "myCata"); - var cataItem = new CatalogueItem(CatalogueRepository, cata, "MyCol"); - - var table = new TableInfo(CatalogueRepository, "myTbl"); - var col = new ColumnInfo(CatalogueRepository, "myCol", "varchar(10)", table); - - var ei = new ExtractionInformation(CatalogueRepository, cataItem, col, "[myTbl].[mycol]"); - var filter = new ExtractionFilter(CatalogueRepository, "Age", ei); - filter.WhereSQL = "Age >= @age"; - new ExtractionFilterParameter(CatalogueRepository, "DECLARE @age int", filter); - - var paramSet = new ExtractionFilterParameterSet(CatalogueRepository, filter, "Old"); - paramSet.CreateNewValueEntries(); - - return filter; - } + WhereSQL = "Age >= @age" + }; + new ExtractionFilterParameter(CatalogueRepository, "DECLARE @age int", filter); + + var paramSet = new ExtractionFilterParameterSet(CatalogueRepository, filter, "Old"); + paramSet.CreateNewValueEntries(); + + return filter; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ExtractionInformationTests.cs b/Rdmp.Core.Tests/Curation/Integration/ExtractionInformationTests.cs index 5d4f13a75b..043f06bace 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ExtractionInformationTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ExtractionInformationTests.cs @@ -8,148 +8,133 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class ExtractionInformationTests : DatabaseTests { - public class ExtractionInformationTests : DatabaseTests - { - ///////////////Create the things that we are going to create relationships between ///////////////// + ///////////////Create the things that we are going to create relationships between ///////////////// - Catalogue cata; - CatalogueItem cataItem; - TableInfo ti; - ColumnInfo columnInfo; + private Catalogue cata; + private CatalogueItem cataItem; + private TableInfo ti; + private ColumnInfo columnInfo; - [SetUp] - protected override void SetUp() - { - base.SetUp(); + [SetUp] + protected override void SetUp() + { + base.SetUp(); - cata = new Catalogue(CatalogueRepository, "ExtractionInformationTestsCatalogue"); - cataItem = new CatalogueItem(CatalogueRepository, cata, "QuadlzorVelocity"); - ti = new TableInfo(CatalogueRepository, "HighEnergyShizzle"); - columnInfo = new ColumnInfo(CatalogueRepository, "VelocityOfMatter", "int", ti); + cata = new Catalogue(CatalogueRepository, "ExtractionInformationTestsCatalogue"); + cataItem = new CatalogueItem(CatalogueRepository, cata, "QuadlzorVelocity"); + ti = new TableInfo(CatalogueRepository, "HighEnergyShizzle"); + columnInfo = new ColumnInfo(CatalogueRepository, "VelocityOfMatter", "int", ti); - ////////////Check the creation worked ok - Assert.IsNotNull(cata); //catalogue - Assert.IsNotNull(cataItem); + ////////////Check the creation worked ok + Assert.IsNotNull(cata); //catalogue + Assert.IsNotNull(cataItem); - Assert.IsNotNull(ti); //underlying table stuff - Assert.IsNotNull(columnInfo); + Assert.IsNotNull(ti); //underlying table stuff + Assert.IsNotNull(columnInfo); - ////////////// Create links between stuff and check they were created successfully ////////////// + ////////////// Create links between stuff and check they were created successfully ////////////// - //create a link between catalogue item lazor and velocity column - cataItem.SetColumnInfo(columnInfo); - - } + //create a link between catalogue item lazor and velocity column + cataItem.SetColumnInfo(columnInfo); + } - [Test] - public void BasicIDsAreCorrect() - { - ColumnInfo firstLinked = cataItem.ColumnInfo; - Assert.IsTrue(firstLinked != null); - Assert.IsTrue(firstLinked.ID == columnInfo.ID); - } + [Test] + public void BasicIDsAreCorrect() + { + var firstLinked = cataItem.ColumnInfo; + Assert.IsTrue(firstLinked != null); + Assert.IsTrue(firstLinked.ID == columnInfo.ID); + } - [Test] - public void test_creating_ExtractionFilter() - { + [Test] + public void test_creating_ExtractionFilter() + { + ExtractionInformation extractInfo = null; + ExtractionFilter filterFastThings = null; + ExtractionFilterParameter parameter = null; - ExtractionInformation extractInfo = null; - ExtractionFilter filterFastThings = null; - ExtractionFilterParameter parameter = null; + try + { + //define extraction information + extractInfo = new ExtractionInformation(CatalogueRepository, cataItem, columnInfo, + "ROUND(VelocityOfMatter,2) VelocityOfMatterRounded"); - try + //define filter and parameter + filterFastThings = new ExtractionFilter(CatalogueRepository, "FastThings", extractInfo) { - //define extraction information - extractInfo = new ExtractionInformation(CatalogueRepository, cataItem, columnInfo, "ROUND(VelocityOfMatter,2) VelocityOfMatterRounded"); - - //define filter and parameter - filterFastThings = new ExtractionFilter(CatalogueRepository, "FastThings", extractInfo) - { - WhereSQL = "VelocityOfMatter > @X", - Description = "Query to identify things that travel faster than X miles per hour!" - }; - filterFastThings.SaveToDatabase(); - Assert.AreEqual(filterFastThings.Name, "FastThings"); + WhereSQL = "VelocityOfMatter > @X", + Description = "Query to identify things that travel faster than X miles per hour!" + }; + filterFastThings.SaveToDatabase(); + Assert.AreEqual(filterFastThings.Name, "FastThings"); - parameter = new ExtractionFilterParameter(CatalogueRepository, "DECLARE @X INT", filterFastThings); + parameter = new ExtractionFilterParameter(CatalogueRepository, "DECLARE @X INT", filterFastThings); - Assert.IsNotNull(parameter); - Assert.AreEqual(parameter.ParameterName ,"@X"); + Assert.IsNotNull(parameter); + Assert.AreEqual(parameter.ParameterName, "@X"); - parameter.Value = "500"; - parameter.SaveToDatabase(); + parameter.Value = "500"; + parameter.SaveToDatabase(); - ExtractionFilterParameter afterSave = CatalogueRepository.GetObjectByID(parameter.ID); - Assert.AreEqual(afterSave.Value ,"500"); + var afterSave = CatalogueRepository.GetObjectByID(parameter.ID); + Assert.AreEqual(afterSave.Value, "500"); - ExtractionFilter filterFastThings_NewCopyFromDB = CatalogueRepository.GetObjectByID(filterFastThings.ID); + var filterFastThings_NewCopyFromDB = + CatalogueRepository.GetObjectByID(filterFastThings.ID); - Assert.AreEqual(filterFastThings.ID, filterFastThings_NewCopyFromDB.ID); - Assert.AreEqual(filterFastThings.Description, filterFastThings_NewCopyFromDB.Description); - Assert.AreEqual(filterFastThings.Name, filterFastThings_NewCopyFromDB.Name); - Assert.AreEqual(filterFastThings.WhereSQL, filterFastThings_NewCopyFromDB.WhereSQL); - } - finally - { - - if (parameter != null) - parameter.DeleteInDatabase(); - - //filters are children of extraction info with CASCADE DELETE so have to delete this one first if we want to test it programatically (although we could just skip deleting it since SQL will handle it anyway) - if (filterFastThings != null) - filterFastThings.DeleteInDatabase(); - - if(extractInfo != null) - extractInfo.DeleteInDatabase(); - } - + Assert.AreEqual(filterFastThings.ID, filterFastThings_NewCopyFromDB.ID); + Assert.AreEqual(filterFastThings.Description, filterFastThings_NewCopyFromDB.Description); + Assert.AreEqual(filterFastThings.Name, filterFastThings_NewCopyFromDB.Name); + Assert.AreEqual(filterFastThings.WhereSQL, filterFastThings_NewCopyFromDB.WhereSQL); + } + finally + { + parameter?.DeleteInDatabase(); + //filters are children of extraction info with CASCADE DELETE so have to delete this one first if we want to test it programatically (although we could just skip deleting it since SQL will handle it anyway) + filterFastThings?.DeleteInDatabase(); + extractInfo?.DeleteInDatabase(); } + } - [Test] - public void test_creating_ExtractionInformation() - { - - - ExtractionInformation extractInfo =null; + [Test] + public void test_creating_ExtractionInformation() + { + ExtractionInformation extractInfo = null; - try - { - - //define extraction information - //change some values and then save it - extractInfo = new ExtractionInformation(CatalogueRepository, cataItem, columnInfo, "dave") - { - Order = 123, - ExtractionCategory = ExtractionCategory.Supplemental - }; - extractInfo.SaveToDatabase(); - - //confirm the insert worked - Assert.AreEqual(extractInfo.SelectSQL,"dave"); - - //fetch the extraction information via the linked CatalogueItem - ColumnInfo pair (i.e. we are testing the alternate route to fetch ExtractionInformation - by ID or by colum/item pair) - ExtractionInformation extractInfo2_CameFromLinker = cataItem.ExtractionInformation; - Assert.AreEqual(extractInfo.ID, extractInfo2_CameFromLinker.ID); - Assert.AreEqual(extractInfo.SelectSQL, extractInfo2_CameFromLinker.SelectSQL); - - //make sure it saves properly - Assert.AreEqual(extractInfo2_CameFromLinker.Order,123 ); - Assert.AreEqual( extractInfo2_CameFromLinker.ExtractionCategory,ExtractionCategory.Supplemental); - - } - finally + try + { + //define extraction information + //change some values and then save it + extractInfo = new ExtractionInformation(CatalogueRepository, cataItem, columnInfo, "dave") { - - if (extractInfo != null) - extractInfo.DeleteInDatabase(); - - } + Order = 123, + ExtractionCategory = ExtractionCategory.Supplemental + }; + extractInfo.SaveToDatabase(); + + //confirm the insert worked + Assert.AreEqual(extractInfo.SelectSQL, "dave"); + + //fetch the extraction information via the linked CatalogueItem - ColumnInfo pair (i.e. we are testing the alternate route to fetch ExtractionInformation - by ID or by colum/item pair) + var extractInfo2_CameFromLinker = cataItem.ExtractionInformation; + Assert.AreEqual(extractInfo.ID, extractInfo2_CameFromLinker.ID); + Assert.AreEqual(extractInfo.SelectSQL, extractInfo2_CameFromLinker.SelectSQL); + + //make sure it saves properly + Assert.AreEqual(extractInfo2_CameFromLinker.Order, 123); + Assert.AreEqual(extractInfo2_CameFromLinker.ExtractionCategory, ExtractionCategory.Supplemental); + } + finally + { + extractInfo?.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ExtractionInformationUnitTests.cs b/Rdmp.Core.Tests/Curation/Integration/ExtractionInformationUnitTests.cs index 09775e46e8..31cdf00bf7 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ExtractionInformationUnitTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ExtractionInformationUnitTests.cs @@ -8,57 +8,56 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class ExtractionInformationUnitTests : UnitTests { - public class ExtractionInformationUnitTests : UnitTests + [TestCase(null)] + [TestCase(4)] + [TestCase(-3)] + [TestCase(99)] + public void NewExtractionInformation_OrderShouldBeContiguous(int? explicitOrder) { - [TestCase(null)] - [TestCase(4)] - [TestCase(-3)] - [TestCase(99)] - public void NewExtractionInformation_OrderShouldBeContiguous(int? explicitOrder) + //When we have an ExtractionInformation + var ei = WhenIHaveA(); + + if (explicitOrder.HasValue) { - //When we have an ExtractionInformation - var ei = WhenIHaveA(); + ei.Order = explicitOrder.Value; + ei.SaveToDatabase(); + } - if (explicitOrder.HasValue) - { - ei.Order = explicitOrder.Value; - ei.SaveToDatabase(); - } - - Assert.AreEqual(explicitOrder ?? 1,ei.Order); - - // Newly created ones should have the right Order to not collide - var cata = ei.CatalogueItem.Catalogue; - var cataItem = new CatalogueItem(RepositoryLocator.CatalogueRepository, cata, "ci"); - var ei2 = new ExtractionInformation(RepositoryLocator.CatalogueRepository, cataItem, ei.ColumnInfo, "fff"); + Assert.AreEqual(explicitOrder ?? 1, ei.Order); - Assert.AreEqual((explicitOrder ?? 1) + 1,ei2.Order); - } + // Newly created ones should have the right Order to not collide + var cata = ei.CatalogueItem.Catalogue; + var cataItem = new CatalogueItem(RepositoryLocator.CatalogueRepository, cata, "ci"); + var ei2 = new ExtractionInformation(RepositoryLocator.CatalogueRepository, cataItem, ei.ColumnInfo, "fff"); - [Test] - public void NewExtractionInformation_OrderShouldBeContiguous_ManyCalls() - { - //When we have an ExtractionInformation - var ei1 = WhenIHaveA(); - - // Newly created ones should have the right Order to not collide - var cata = ei1.CatalogueItem.Catalogue; - var cataItem2 = new CatalogueItem(RepositoryLocator.CatalogueRepository, cata, "ci"); - var ei2 = new ExtractionInformation(RepositoryLocator.CatalogueRepository, cataItem2, ei1.ColumnInfo, "fff"); + Assert.AreEqual((explicitOrder ?? 1) + 1, ei2.Order); + } - Assert.AreEqual(2,ei2.Order); - - var cataItem3 = new CatalogueItem(RepositoryLocator.CatalogueRepository, cata, "ci"); - var ei3 = new ExtractionInformation(RepositoryLocator.CatalogueRepository, cataItem3, ei1.ColumnInfo, "fff"); + [Test] + public void NewExtractionInformation_OrderShouldBeContiguous_ManyCalls() + { + //When we have an ExtractionInformation + var ei1 = WhenIHaveA(); - Assert.AreEqual(3,ei3.Order); - - var cataItem4 = new CatalogueItem(RepositoryLocator.CatalogueRepository, cata, "ci"); - var ei4 = new ExtractionInformation(RepositoryLocator.CatalogueRepository, cataItem4, ei1.ColumnInfo, "fff"); + // Newly created ones should have the right Order to not collide + var cata = ei1.CatalogueItem.Catalogue; + var cataItem2 = new CatalogueItem(RepositoryLocator.CatalogueRepository, cata, "ci"); + var ei2 = new ExtractionInformation(RepositoryLocator.CatalogueRepository, cataItem2, ei1.ColumnInfo, "fff"); - Assert.AreEqual(4,ei4.Order); - } + Assert.AreEqual(2, ei2.Order); + + var cataItem3 = new CatalogueItem(RepositoryLocator.CatalogueRepository, cata, "ci"); + var ei3 = new ExtractionInformation(RepositoryLocator.CatalogueRepository, cataItem3, ei1.ColumnInfo, "fff"); + + Assert.AreEqual(3, ei3.Order); + + var cataItem4 = new CatalogueItem(RepositoryLocator.CatalogueRepository, cata, "ci"); + var ei4 = new ExtractionInformation(RepositoryLocator.CatalogueRepository, cataItem4, ei1.ColumnInfo, "fff"); + + Assert.AreEqual(4, ei4.Order); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/FilterImportingTests/FilterImporterTests.cs b/Rdmp.Core.Tests/Curation/Integration/FilterImportingTests/FilterImporterTests.cs index b0cd5cdee7..38c1abf636 100644 --- a/Rdmp.Core.Tests/Curation/Integration/FilterImportingTests/FilterImporterTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/FilterImportingTests/FilterImporterTests.cs @@ -4,8 +4,9 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . +using System; using FAnsi.Implementations.MicrosoftSQL; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; @@ -13,223 +14,243 @@ using Rdmp.Core.Curation.FilterImporting.Construction; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.FilterImportingTests +namespace Rdmp.Core.Tests.Curation.Integration.FilterImportingTests; + +[Category("Unit")] +public class FilterImporterTests : UnitTests { - [Category("Unit")] - public class FilterImporterTests : UnitTests - { - [Test] public void FilterCreated_NewFilterGetsSameName() { - //Thing we will be cloning - var master = Mock.Of(x => - x.GetQuerySyntaxHelper() == new MicrosoftQuerySyntaxHelper() && - x.Name == "Space Odyssey"); - - //The factory will return this value - var constructed = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - - //The factory Mock - var factory = new Mock(); - factory.Setup(m => m.CreateNewFilter("Space Odyssey")).Returns(constructed); - - //The thing we are testing - var filterCreator = new FilterImporter(factory.Object,null); - - //The method we are testing - filterCreator.ImportFilter(WhenIHaveA(),master,null); - - //Did the factory mock get ordered to create a filter called "Space Odyssey"? - factory.Verify(f=>f.CreateNewFilter(It.IsAny()),Times.Once); + //Thing we will be cloning + var master = Substitute.For(); + master.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + + master.Name.Returns("Space Odyssey"); + + //The factory will return this value + var constructed = Substitute.For(); + constructed.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + + + //The factory Mock + var factory = Substitute.For(); + factory.CreateNewFilter("Space Odyssey").Returns(constructed); + + //The thing we are testing + var filterCreator = new FilterImporter(factory, null); + + //The method we are testing + filterCreator.ImportFilter(WhenIHaveA(), master, null); + + //Did the factory mock get ordered to create a filter called "Space Odyssey"? + factory.Received(1).CreateNewFilter(Arg.Any()); } [Test] public void FilterCreated_CopyBecauseExistsAlready() { - //The thing we will be importing - var master = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - master.Name = "Space Odyssey"; + //The thing we will be importing + var master = Substitute.For(); + master.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + + master.Name = "Space Odyssey"; - //An existing IFilter that is in the scope that is being imported into (e.g. a data extract configuration) - var existing = Mock.Of(f=> - f.Name == "Space Odyssey" && - f.GetAllParameters()==new ISqlParameter[0]);// has no parameters + //An existing IFilter that is in the scope that is being imported into (e.g. a data extract configuration) + var existing = Substitute.For(); // has no parameters + existing.GetAllParameters().Returns(Array.Empty()); + existing.Name.Returns("Space Odyssey"); + //The factory will return this value + var constructed = Substitute.For(); + constructed.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); - //The factory will return this value - var constructed = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - //The factory Mock - var factory = new Mock(); - factory.Setup(m => m.CreateNewFilter("Copy of Space Odyssey")).Returns(constructed); + //The factory Mock + var factory = Substitute.For(); + factory.CreateNewFilter("Copy of Space Odyssey").Returns(constructed); - //The thing we are testing - var filterCreator = new FilterImporter(factory.Object, null); + //The thing we are testing + var filterCreator = new FilterImporter(factory, null); - //The method we are testing - filterCreator.ImportFilter(WhenIHaveA(), master,new []{existing}); + //The method we are testing + filterCreator.ImportFilter(WhenIHaveA(), master, new[] { existing }); - //Did the factory mock get ordered to create a filter called "Copy of Space Odyssey" (because there was already one called "Space Odyssey" in the same scope) - factory.Verify(); + //Did the factory mock get ordered to create a filter called "Copy of Space Odyssey" (because there was already one called "Space Odyssey" in the same scope) + factory.Received(1); } [Test] public void FilterCreated_Parameters() { - var master = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - master.Name = "Space Odyssey"; - master.WhereSQL = "@hall = 'active'"; - - var constructed = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - var constructedParameter = Mock.Of(); - - var factory = new Mock(); - factory.Setup(m => m.CreateNewFilter("Space Odyssey")).Returns(constructed); - factory.Setup(m => m.CreateNewParameter(constructed, "DECLARE @hall AS varchar(50);")).Returns(constructedParameter); - - var filterCreator = new FilterImporter(factory.Object, null); - //Returns constructed - filterCreator.ImportFilter(WhenIHaveA(), master, null); - - factory.Verify(m => m.CreateNewFilter("Space Odyssey"),Times.Once); - factory.Verify(m=>m.CreateNewParameter(constructed, "DECLARE @hall AS varchar(50);"), Times.Once); + var master = Substitute.For(); + master.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + + master.Name = "Space Odyssey"; + master.WhereSQL = "@hall = 'active'"; + + var constructed = Substitute.For(); + constructed.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + + var constructedParameter = Substitute.For(); + + var factory = Substitute.For(); + factory.CreateNewFilter("Space Odyssey").Returns(constructed); + factory.CreateNewParameter(constructed, "DECLARE @hall AS varchar(50);") + .Returns(constructedParameter); + + var filterCreator = new FilterImporter(factory, null); + //Returns constructed + filterCreator.ImportFilter(WhenIHaveA(), master, null); + + factory.Received(1).CreateNewFilter("Space Odyssey"); + factory.Received(1).CreateNewParameter(constructed, "DECLARE @hall AS varchar(50);"); } + [Test] public void FilterCreated_ParametersWithMasterExplicitTyping() { - //The filter we are cloning - var master = Mock.Of(x => - x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper() && - x.Name == "Space Odyssey" && - x.WhereSQL == "@hall = 'active'"); - - //The existing parameter declared on the filter we are cloning - var masterParameter = Mock.Of( - x => x.GetQuerySyntaxHelper() == new MicrosoftQuerySyntaxHelper() && - x.ParameterName=="@hall" && - x.Comment == "SomeComment" && - x.Value == "500" && - x.ParameterSQL == "DECLARE @hall AS int" - ); - - Mock.Get(master).Setup(m=> m.GetAllParameters()).Returns(new[] {masterParameter}); - //We expect that the filter we are cloning will be asked what its parameters are once (and we tell them the param above) - - - //The return values for our Mock factory - var constructed = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - var constructedParameter = Mock.Of(); - constructedParameter.ParameterSQL = "DECLARE @hall AS int"; - - //The mock factory will return the above instances for the new cloned objects - var factory = Mock.Of( m=> - m.CreateNewFilter("Space Odyssey")==constructed && - m.CreateNewParameter(constructed, "DECLARE @hall AS int")==constructedParameter ); - - //The thing we are actually testing - var filterCreator = new FilterImporter(factory, null); - filterCreator.ImportFilter(WhenIHaveA(), master, null);//Import it brah - - //Master filter should have been asked what its parameters are - Mock.Get(master).Verify(); - - //factory should have been asked to create a new filter called "Space Odyssey" and a parameter with a declaration that matches the master filter SQL (i.e. 'AS int') - Mock.Get(factory).Verify(m=>m.CreateNewFilter("Space Odyssey"),Times.Once); - - //The master filter parameters should have been copied to the child - Assert.AreEqual(constructedParameter.Comment, masterParameter.Comment); - Assert.AreEqual(constructedParameter.ParameterSQL, masterParameter.ParameterSQL); //We actually manually set this above because that's the contract with "CreateNewParameter" - Assert.AreEqual(constructedParameter.Value, masterParameter.Value); + //The filter we are cloning + var master = Substitute.For(); + master.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + master.Name.Returns("Space Odyssey"); + master.WhereSQL.Returns("@hall = 'active'"); + + //The existing parameter declared on the filter we are cloning + var masterParameter = Substitute.For(); + masterParameter.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + masterParameter.ParameterName.Returns("@hall"); + masterParameter.Comment.Returns("SomeComment"); + masterParameter.Value.Returns("500"); + masterParameter.ParameterSQL.Returns("DECLARE @hall AS int"); + + master.GetAllParameters().Returns(new[] { masterParameter }); + //We expect that the filter we are cloning will be asked what its parameters are once (and we tell them the param above) + + + //The return values for our Mock factory + var constructed = Substitute.For(); + constructed.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + var constructedParameter = Substitute.For(); + constructedParameter.ParameterSQL = "DECLARE @hall AS int"; + + //The mock factory will return the above instances for the new cloned objects + var factory = Substitute.For(); + factory.CreateNewFilter("Space Odyssey").Returns(constructed); + factory.CreateNewParameter(constructed, "DECLARE @hall AS int").Returns(constructedParameter); + + //The thing we are actually testing + var filterCreator = new FilterImporter(factory, null); + filterCreator.ImportFilter(WhenIHaveA(), master, null); //Import it brah + + //Master filter should have been asked what its parameters are + master.Received(1); + + //factory should have been asked to create a new filter called "Space Odyssey" and a parameter with a declaration that matches the master filter SQL (i.e. 'AS int') + factory.Received(1).CreateNewFilter("Space Odyssey"); + + //The master filter parameters should have been copied to the child + Assert.AreEqual(constructedParameter.Comment, masterParameter.Comment); + Assert.AreEqual(constructedParameter.ParameterSQL, + masterParameter + .ParameterSQL); //We actually manually set this above because that's the contract with "CreateNewParameter" + Assert.AreEqual(constructedParameter.Value, masterParameter.Value); } [Test] public void FilterCreated_ParametersRenamingDueToExistingParameterInScopeWithSameName() { - //The filter we are cloning - var master = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - master.Name = "Space Odyssey"; - master.WhereSQL = "@hall = 'active'"; - - //An existing parameter that is in the scope that is being imported into - var existingParameter = Mock.Of(x => x.ParameterName=="@hall"); - - //The filter to which the above existing parameter belongs - var existing = Mock.Of(x => - x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()&& - x.GetAllParameters()==new[] { existingParameter }); - existing.Name = "Space Odyssey"; - - //The return value for our Mock factory - var constructed = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - var constructedParameter = Mock.Of(); - - //The mocked factory - var factory = new Mock(); - factory.Setup(m => m.CreateNewFilter("Copy of Space Odyssey")).Returns(constructed); - factory.Setup(m => m.CreateNewParameter(constructed,"DECLARE @hall2 AS varchar(50);")).Returns(constructedParameter); - - //The thing we are testing - var filterCreator = new FilterImporter(factory.Object, null); - filterCreator.ImportFilter(WhenIHaveA(), master, new []{existing}); - - //Existing filter in the scope should have been asked what its parameters are - Mock.Get(existing).Verify(x=>x.GetAllParameters(),Times.Once); - - //The factory should have been asked to create a filter called "Copy of Space Odyssey" and a parameter "@hall2" (because @hall already exists in the import into scope) - factory.Verify(); + //The filter we are cloning + var master = Substitute.For(); + master.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + master.Name = "Space Odyssey"; + master.WhereSQL = "@hall = 'active'"; + + //An existing parameter that is in the scope that is being imported into + var existingParameter = Substitute.For(); + existingParameter.ParameterName.Returns("@hall"); + + //The filter to which the above existing parameter belongs + var existing = Substitute.For(); + existing.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + existing.GetAllParameters().Returns(new[] { existingParameter }); + + existing.Name = "Space Odyssey"; + + //The return value for our Mock factory + var constructed = Substitute.For(); + constructed.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + var constructedParameter = Substitute.For(); + + //The mocked factory + var factory = Substitute.For(); + factory.CreateNewFilter("Copy of Space Odyssey").Returns(constructed); + factory.CreateNewParameter(constructed, "DECLARE @hall2 AS varchar(50);") + .Returns(constructedParameter); + + //The thing we are testing + var filterCreator = new FilterImporter(factory, null); + filterCreator.ImportFilter(WhenIHaveA(), master, new[] { existing }); + + //Existing filter in the scope should have been asked what its parameters are + existing.Received(1).GetAllParameters(); + + //The factory should have been asked to create a filter called "Copy of Space Odyssey" and a parameter "@hall2" (because @hall already exists in the import into scope) + factory.Received(1); } [Test] - public void FilterCreated_ParametersRenamingDueToExistingParameterInScopeWithSameName_MasterContainsMasterParameter() + public void + FilterCreated_ParametersRenamingDueToExistingParameterInScopeWithSameName_MasterContainsMasterParameter() { - //The filter we are cloning - var master = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - master.Name = "Space Odyssey"; - master.WhereSQL = "@hall = 'active'"; - - //The existing parameter declared on the filter we are cloning - var masterParameter = Mock.Of(p => p.ParameterName=="@hall"); - masterParameter.Comment = "SomeComment"; - masterParameter.Value = "500"; - masterParameter.ParameterSQL = "DECLARE @hall AS int"; - - //We expect that the filter we are cloning will be asked what its parameters are once (and we tell them the param above) - Mock.Get(master).Setup(m => m.GetAllParameters()).Returns(new[] { masterParameter }); - - //An existing parameter that is in the scope that is being imported into - var existingParameter = Mock.Of(x => x.ParameterName=="@hall"); - - //The filter to which the above existing parameter belongs - var existing = Mock.Of(x => - x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper() && - x.GetAllParameters()==new[] { existingParameter }); - existing.Name = "Space Odyssey"; - - //The return value for our Mock factory - var constructed = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - var constructedParameter = Mock.Of(); - - //The mocked factory - var factory = new Mock(); - factory.Setup(m => m.CreateNewFilter("Copy of Space Odyssey")).Returns(constructed); - factory.Setup(m => m.CreateNewParameter(constructed, "DECLARE @hall2 AS int")).Returns(constructedParameter); - - //The thing we are testing - var filterCreator = new FilterImporter(factory.Object, null); - filterCreator.ImportFilter(WhenIHaveA(), master, new[] { existing }); - - Assert.AreEqual("@hall2 = 'active'",constructed.WhereSQL); - - //Master filter should have been asked what its parameters are - Mock.Get(master).Verify(m => m.GetAllParameters(),Times.Once); - - //Existing filter in the scope should have been asked what its parameters are - Mock.Get(existing).Verify(); - - //The factory should have been asked to create a filter called "Copy of Space Odyssey" and a parameter "@hall2" (because @hall already exists in the import into scope) with type int because master parameter is type int - factory.Verify(); + //The filter we are cloning + var master = Substitute.For(); + master.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + master.Name = "Space Odyssey"; + master.WhereSQL = "@hall = 'active'"; + + //The existing parameter declared on the filter we are cloning + var masterParameter = Substitute.For(); + masterParameter.ParameterName.Returns("@hall"); + masterParameter.Comment.Returns("SomeComment"); + masterParameter.Value.Returns("400"); + masterParameter.ParameterSQL.Returns("DECLARE @hall AS int"); + + //We expect that the filter we are cloning will be asked what its parameters are once (and we tell them the param above) + master.GetAllParameters().Returns(new[] { masterParameter }); + + //An existing parameter that is in the scope that is being imported into + var existingParameter = Substitute.For(); + existingParameter.ParameterName.Returns("@hall"); + + //The filter to which the above existing parameter belongs + var existing = Substitute.For(); + existing.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + existing.GetAllParameters().Returns(new[] { existingParameter }); + existing.Name.Returns("Space Odyssey"); + + //The return value for our Mock factory + var constructed = Substitute.For(); + constructed.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + var constructedParameter = Substitute.For(); + + //The mocked factory + var factory = Substitute.For(); + factory.CreateNewFilter("Copy of Space Odyssey").Returns(constructed); + factory.CreateNewParameter(constructed, "DECLARE @hall2 AS int").Returns(constructedParameter); + + //The thing we are testing + var filterCreator = new FilterImporter(factory, null); + filterCreator.ImportFilter(WhenIHaveA(), master, new[] { existing }); + + Assert.AreEqual("@hall2 = 'active'", constructed.WhereSQL); + + //Master filter should have been asked what its parameters are + master.Received(1).GetAllParameters(); + + //Existing filter in the scope should have been asked what its parameters are + existing.Received(1); + + //The factory should have been asked to create a filter called "Copy of Space Odyssey" and a parameter "@hall2" (because @hall already exists in the import into scope) with type int because master parameter is type int + factory.Received(1); } - } -} - - +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/FilterImportingTests/ParameterCreatorTests.cs b/Rdmp.Core.Tests/Curation/Integration/FilterImportingTests/ParameterCreatorTests.cs index 14388b75f2..fab9d64bd9 100644 --- a/Rdmp.Core.Tests/Curation/Integration/FilterImportingTests/ParameterCreatorTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/FilterImportingTests/ParameterCreatorTests.cs @@ -6,166 +6,171 @@ using System; using FAnsi.Implementations.MicrosoftSQL; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.FilterImporting; using Rdmp.Core.Curation.FilterImporting.Construction; -namespace Rdmp.Core.Tests.Curation.Integration.FilterImportingTests +namespace Rdmp.Core.Tests.Curation.Integration.FilterImportingTests; + +[Category("Unit")] +public class ParameterCreatorTests { - [Category("Unit")] - public class ParameterCreatorTests - { [Test] public void NoParametersTest_CreateNotCalled() { - var f = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); + var f = Substitute.For(); + f.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); - var factory = new Mock(); - factory.Verify(m => m.CreateNewParameter(It.IsAny(), It.IsAny()),Times.Never); + var factory = Substitute.For(); + factory.DidNotReceive().CreateNewParameter(Arg.Any(), Arg.Any()); - var creator = new ParameterCreator(factory.Object, new ISqlParameter[0], null); - creator.CreateAll(f,null); + var creator = new ParameterCreator(factory, Array.Empty(), null); + creator.CreateAll(f, null); - factory.Verify(); + factory.Received(1); } - + [Test] public void SingleParameterTest_NullReturnFromConstruct_Throws() { - var f = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - f.WhereSQL = "@bob = 'bob'"; - - var factory = Mock.Of(m => m.CreateNewParameter(f,"DECLARE @bob AS varchar(50);")==null); + var f = Substitute.For(); + f.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + f.WhereSQL = "@bob = 'bob'"; + + var factory = Substitute.For(); + factory.CreateNewParameter(f, "DECLARE @bob AS varchar(50);").Returns(l => null); - var creator = new ParameterCreator(factory, null, null); + var creator = new ParameterCreator(factory, null, null); - var ex = Assert.Throws(()=>creator.CreateAll(f,null)); + var ex = Assert.Throws(() => creator.CreateAll(f, null)); - Assert.IsTrue(ex.Message.StartsWith("Parameter construction method returned null")); + Assert.IsTrue(ex.Message.StartsWith("Parameter construction method returned null")); } - + [Test] public void SingleParameterTest_OneParameter_CreateCalled() { - var p = new Mock();//save should be called because there is no VAlue on the parameter - p.Setup(m => m.SaveToDatabase()); + var p = Substitute.For(); //save should be called because there is no VAlue on the parameter + p.SaveToDatabase(); - var f = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - f.WhereSQL = "@bob = 'bob'"; + var f = Substitute.For(); + f.WhereSQL = "@bob = 'bob'"; + f.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + var factory = Substitute.For(); + factory.CreateNewParameter(f, "DECLARE @bob AS varchar(50);").Returns(p); - var factory = new Mock(); - factory.Setup(m => m.CreateNewParameter(f,"DECLARE @bob AS varchar(50);")).Returns(p.Object); - - var creator = new ParameterCreator(factory.Object, null, null); - creator.CreateAll(f,null); + var creator = new ParameterCreator(factory, null, null); + creator.CreateAll(f, null); - p.Verify(m => m.SaveToDatabase(),Times.Once); - p.Verify(); - factory.Verify(m => m.CreateNewParameter(f,"DECLARE @bob AS varchar(50);"),Times.Once); + p.Received(2).SaveToDatabase(); + p.Received(1); + factory.Received(1).CreateNewParameter(f, "DECLARE @bob AS varchar(50);"); } + [Test] public void SingleParameterTest_ParameterAlreadyExists_CreateNotCalled() { - var p = new Mock();//save should be called because there is no VAlue on the parameter - p.Setup(m => m.SaveToDatabase()); - - var existingParameter = Mock.Of(x => - x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper() && - x.ParameterName=="@bob" - ); - - var f = Mock.Of(x => - x.GetQuerySyntaxHelper() == new MicrosoftQuerySyntaxHelper() && - x.WhereSQL == "@bob = 'bob'" && - x.GetAllParameters() == new[] {existingParameter}); - - var factory = new Mock(); - - var creator = new ParameterCreator(factory.Object, null, null); - creator.CreateAll(f,null); - creator.CreateAll(f, null); - creator.CreateAll(f, null);//no matter how many times we call create it shouldn't make more because there is already one - - p.Verify(m=> m.SaveToDatabase(),Times.Never); - factory.Verify(m=> m.CreateNewParameter(f, It.IsAny()),Times.Never); //should never be called because the filter already has + var p = Substitute.For(); //save should be called because there is no VAlue on the parameter + + var existingParameter = Substitute.For(); + existingParameter.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + existingParameter.ParameterName.Returns("@bob"); + + var f = Substitute.For(); + f.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + f.WhereSQL.Returns("@bob = 'bob'"); + f.GetAllParameters().Returns(new[] { existingParameter }); + var factory = Substitute.For(); + + var creator = new ParameterCreator(factory, null, null); + creator.CreateAll(f, null); + creator.CreateAll(f, null); + creator.CreateAll(f, + null); //no matter how many times we call create it shouldn't make more because there is already one + + p.DidNotReceive().SaveToDatabase(); + factory.Received(0).CreateNewParameter(f, Arg.Any()); //should never be called because the filter already has } - + [Test] public void SingleParameterTest_GlobalOverrides_CreateNotCalled() { - var f = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - f.WhereSQL = "@bob = 'bob'"; - - var global = Mock.Of(x=>x.ParameterName=="@bob"); - - var factory = new Mock(); - factory - .Setup(m => m.CreateNewParameter(It.IsAny(), It.IsAny())) - .Throws(); - - var creator = new ParameterCreator(factory.Object, new[] { global }, null); - creator.CreateAll(f,null); - - factory.Verify(); + var f = Substitute.For(); + f.WhereSQL = "@bob = 'bob'"; + f.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + var global = Substitute.For(); + global.ParameterName.Returns("@bob"); + var factory = Substitute.For(); + factory.CreateNewParameter(Arg.Any(), Arg.Any()).Returns(x => { throw new InvalidOperationException(); }); + + var creator = new ParameterCreator(factory, new[] { global }, null); + creator.CreateAll(f, null); + + factory.Received(1); } [Test] public void SingleParameterTest_GlobalButNotSameName_CreateCalled() { - var f = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - f.WhereSQL = "@bob = 'bob'"; + var f = Substitute.For(); + f.WhereSQL = "@bob = 'bob'"; + f.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + var global = Substitute.For(); + global.ParameterName.Returns("@bob"); - var global = Mock.Of(x => x.ParameterName=="@bob"); - - var factory = new Mock(); - factory.Setup(m => m.CreateNewParameter(f, "DECLARE @bob AS varchar(50);")).Returns(Mock.Of); - - var creator = new ParameterCreator(factory.Object, null, null); - creator.CreateAll(f,null); + var factory = Substitute.For(); + factory.CreateNewParameter(f, "DECLARE @bob AS varchar(50);") + .Returns(Substitute.For()); - factory.Verify(m => m.CreateNewParameter(f, "DECLARE @bob AS varchar(50);"),Times.Once); + var creator = new ParameterCreator(factory, null, null); + creator.CreateAll(f, null); + factory.Received(1).CreateNewParameter(f, "DECLARE @bob AS varchar(50);"); } [Test] public void SingleParameterTest_Template_TemplateValuesUsed() { - //The constructor returns - var pstub = Mock.Of(); - - //The filter that requires that the parameters be created - var f = Mock.Of(x => x.GetQuerySyntaxHelper()==new MicrosoftQuerySyntaxHelper()); - f.WhereSQL = "@bob = 'bob'"; - - //The template which is an existing known about parameter from the master filter that is being duplicated. This template will be spotted and used to make the new parameter match the cloned filter's one - var template = Mock.Of(x => x.ParameterName=="@bob"); - - template.ParameterSQL = "DECLARE @bob AS int"; - template.Value = "5"; - template.Comment = "fish"; - - var factory = new Mock(); - factory.Setup(m => m.CreateNewParameter(f, "DECLARE @bob AS int")).Returns(pstub); - - var creator = new ParameterCreator(factory.Object, null, new []{template}); - creator.CreateAll(f,null); - - Assert.AreEqual("5", pstub.Value); - Assert.AreEqual("fish", pstub.Comment); - - factory.Verify(m => m.CreateNewParameter(f, "DECLARE @bob AS int"),Times.Once); + //The constructor returns + var pstub = Substitute.For(); + + //The filter that requires that the parameters be created + var f = Substitute.For(); + f.GetQuerySyntaxHelper().Returns(MicrosoftQuerySyntaxHelper.Instance); + f.WhereSQL = "@bob = 'bob'"; + + //The template which is an existing known about parameter from the master filter that is being duplicated. This template will be spotted and used to make the new parameter match the cloned filter's one + var template = Substitute.For(); + template.ParameterName.Returns("@bob"); + + template.ParameterSQL = "DECLARE @bob AS int"; + template.Value = "5"; + template.Comment = "fish"; + + var factory = Substitute.For(); + factory.CreateNewParameter(f, "DECLARE @bob AS int").Returns(pstub); + + var creator = new ParameterCreator(factory, null, new[] { template }); + creator.CreateAll(f, null); + + Assert.AreEqual("5", pstub.Value); + Assert.AreEqual("fish", pstub.Comment); + + factory.Received(1).CreateNewParameter(f, "DECLARE @bob AS int"); } [TestCase("[MyTable].[MyCol] = @name", "@name", "@name2", "[MyTable].[MyCol] = @name2")] [TestCase("[Col]=@name OR [Col]=@name2", "@name", "@chtulhu", "[Col]=@chtulhu OR [Col]=@name2")] [TestCase("([MyTable].[MyCol] = @name) OR ...", "@name", "@name2", "([MyTable].[MyCol] = @name2) OR ...")] - [TestCase("[MyTable].[MyCol] = @name2", "@name", "@cthulhu", "[MyTable].[MyCol] = @name2")]//No match since it is a substring + [TestCase("[MyTable].[MyCol] = @name2", "@name", "@cthulhu", + "[MyTable].[MyCol] = @name2")] //No match since it is a substring [TestCase("[MyTable].[MyCol] = @name_2", "@name", "@cthulhu", "[MyTable].[MyCol] = @name_2")] - [TestCase("[MyTable].[MyCol] = @name@@coconuts", "@name", "@cthulhu", "[MyTable].[MyCol] = @name@@coconuts")]//No match since @ is a legit word to use in a parameter name making @name@coconuts legal name for a + [TestCase("[MyTable].[MyCol] = @name@@coconuts", "@name", "@cthulhu", + "[MyTable].[MyCol] = @name@@coconuts")] //No match since @ is a legit word to use in a parameter name making @name@coconuts legal name for a [TestCase("@a=a", "@a", "@b", "@b=a")] [TestCase(@"a=@a OR @@ -174,15 +179,15 @@ public void SingleParameterTest_Template_TemplateValuesUsed() b=@b")] public void ReplaceParametersSQL(string haystack, string needle, string replacement, string expectedOutput) { - var output = ParameterCreator.RenameParameterInSQL(haystack, needle, replacement); - Assert.AreEqual(expectedOutput,output); + var output = ParameterCreator.RenameParameterInSQL(haystack, needle, replacement); + Assert.AreEqual(expectedOutput, output); } [Test] public void SequentialReplacementSQL() { - var haystack = - @"/*Paracetamol*/ + var haystack = + @"/*Paracetamol*/ [test]..[prescribing].[approved_name] LIKE @drugName OR /*Ketamine*/ @@ -192,13 +197,13 @@ [test]..[prescribing].[approved_name] LIKE @drugName2 [test]..[prescribing].[approved_name] LIKE @drugName3"; - var newString = ParameterCreator.RenameParameterInSQL(haystack, "@drugName", "@drugName_2"); - newString = ParameterCreator.RenameParameterInSQL(newString, "@drugName2", "@drugName2_2"); - newString = ParameterCreator.RenameParameterInSQL(newString, "@drugName3", "@drugName3_2"); + var newString = ParameterCreator.RenameParameterInSQL(haystack, "@drugName", "@drugName_2"); + newString = ParameterCreator.RenameParameterInSQL(newString, "@drugName2", "@drugName2_2"); + newString = ParameterCreator.RenameParameterInSQL(newString, "@drugName3", "@drugName3_2"); - var expectedoutput = - @"/*Paracetamol*/ + var expectedoutput = + @"/*Paracetamol*/ [test]..[prescribing].[approved_name] LIKE @drugName_2 OR /*Ketamine*/ @@ -208,7 +213,6 @@ [test]..[prescribing].[approved_name] LIKE @drugName2_2 [test]..[prescribing].[approved_name] LIKE @drugName3_2"; - Assert.AreEqual(expectedoutput,newString); + Assert.AreEqual(expectedoutput, newString); } - } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ForwardEngineerANOCatalogueTests.cs b/Rdmp.Core.Tests/Curation/Integration/ForwardEngineerANOCatalogueTests.cs index 9265150b34..9eb7063222 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ForwardEngineerANOCatalogueTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ForwardEngineerANOCatalogueTests.cs @@ -18,480 +18,521 @@ using Rdmp.Core.DataLoad.Modules.Mutilators.Dilution.Operations; using Rdmp.Core.DataLoad.Triggers; using Rdmp.Core.QueryBuilding; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; using Tests.Common.Scenarios; using TypeGuesser; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class ForwardEngineerANOCatalogueTests : TestsRequiringFullAnonymisationSuite { - public class ForwardEngineerANOCatalogueTests : TestsRequiringFullAnonymisationSuite + [SetUp] + protected override void SetUp() { - [SetUp] - protected override void SetUp() - { - base.SetUp(); + base.SetUp(); - BlitzMainDataTables(); + BlitzMainDataTables(); - if(ANOStore_Database.Exists()) - DeleteTables(ANOStore_Database); - } + if (ANOStore_Database.Exists()) + DeleteTables(ANOStore_Database); + } - [Test] - public void PlanManagementTest() - { - var dbName = TestDatabaseNames.GetConsistentName("PlanManagementTests"); + [Test] + public void PlanManagementTest() + { + var dbName = TestDatabaseNames.GetConsistentName("PlanManagementTests"); - var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); + var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); - db.Create(true); + db.Create(true); - BulkTestsData bulk = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), 100); - bulk.SetupTestData(); - bulk.ImportAsCatalogue(); + var bulk = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), 100); + bulk.SetupTestData(); + bulk.ImportAsCatalogue(); - var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue); - planManager.TargetDatabase = db; + var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue) + { + TargetDatabase = db + }; - //no operations are as yet configured - Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); + //no operations are as yet configured + Assert.DoesNotThrow(() => planManager.Check(ThrowImmediatelyCheckNotifier.Quiet)); - //create a table with the same name in the endpoint database to confirm that that's a problem - db.CreateTable(bulk.tableInfo.GetRuntimeName(), new DatabaseColumnRequest[] - { - new DatabaseColumnRequest("fish", "varchar(100)") - }); + //create a table with the same name in the endpoint database to confirm that that's a problem + db.CreateTable(bulk.tableInfo.GetRuntimeName(), new DatabaseColumnRequest[] + { + new("fish", "varchar(100)") + }); - //throws because table already exists - Assert.Throws(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); + //throws because table already exists + Assert.Throws(() => planManager.Check(ThrowImmediatelyCheckNotifier.Quiet)); - db.ExpectTable(bulk.tableInfo.GetRuntimeName()).Drop(); + db.ExpectTable(bulk.tableInfo.GetRuntimeName()).Drop(); - //back to being fine again - Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); + //back to being fine again + Assert.DoesNotThrow(() => planManager.Check(ThrowImmediatelyCheckNotifier.Quiet)); - //setup test rules for migrator - CreateMigrationRules(planManager, bulk); + //setup test rules for migrator + CreateMigrationRules(planManager, bulk); - //rules should pass - Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); + //rules should pass + Assert.DoesNotThrow(() => planManager.Check(ThrowImmediatelyCheckNotifier.Quiet)); - var chi = bulk.GetColumnInfo("chi"); - Assert.Throws(() => + var chi = bulk.GetColumnInfo("chi"); + Assert.Throws(() => { planManager.GetPlanForColumnInfo(chi).Plan = Plan.Drop; - planManager.GetPlanForColumnInfo(chi).Check(new ThrowImmediatelyCheckNotifier()); - + planManager.GetPlanForColumnInfo(chi).Check(ThrowImmediatelyCheckNotifier.Quiet); } + , "Should not be able to drop primary key column"); - ,"Should not be able to drop primary key column"); + db.Drop(); + } - db.Drop(); - } + [Test] + public void CreateANOVersionTest() + { + var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); + + var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); + + db.Create(true); - [Test] - public void CreateANOVersionTest() + var bulk = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), 100); + bulk.SetupTestData(); + bulk.ImportAsCatalogue(); + + var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue) { - var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); + TargetDatabase = db + }; + + //setup test rules for migrator + CreateMigrationRules(planManager, bulk); - var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); + //rules should pass checks + Assert.DoesNotThrow(() => planManager.Check(ThrowImmediatelyCheckNotifier.Quiet)); - db.Create(true); + var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); + engine.Execute(); - BulkTestsData bulk = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), 100); - bulk.SetupTestData(); - bulk.ImportAsCatalogue(); + var anoCatalogue = CatalogueRepository.GetAllObjects().Single(c => c.Folder.StartsWith("\\ano")); + Assert.IsTrue(anoCatalogue.Exists()); - var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator,bulk.catalogue); - planManager.TargetDatabase = db; + db.Drop(); - //setup test rules for migrator - CreateMigrationRules(planManager, bulk); + var exports = CatalogueRepository.GetAllObjects().Length; + var imports = CatalogueRepository.GetAllObjects().Length; - //rules should pass checks - Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); + Assert.AreEqual(exports, imports); + Assert.IsTrue(exports > 0); + } - var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); - engine.Execute(); + [Test] + public void CreateANOVersionTest_IntIdentity() + { + var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); - var anoCatalogue = CatalogueRepository.GetAllObjects().Single(c => c.Folder.StartsWith("\\ano")); - Assert.IsTrue(anoCatalogue.Exists()); + //setup the anonymisation database (destination) + var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); - db.Drop(); + db.Create(true); - var exports = CatalogueRepository.GetAllObjects().Count(); - var imports = CatalogueRepository.GetAllObjects().Count(); + //Create this table in the scratch database + var tbl = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).CreateTable("MyTable", new[] + { + new DatabaseColumnRequest("id", "int identity(1,1)", false) { IsPrimaryKey = true }, + new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 10), false) + }); - Assert.AreEqual(exports, imports); - Assert.IsTrue(exports > 0); - } + var cata = Import(tbl, out var ti, out var cols); - [Test] - public void CreateANOVersionTest_IntIdentity() + var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata) { - var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); + TargetDatabase = db + }; - //setup the anonymisation database (destination) - var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); + var nameCol = cols.Single(c => c.GetRuntimeName().Equals("Name")); - db.Create(true); + //setup test rules for migrator + planManager.Plans[nameCol].Plan = Plan.Drop; - //Create this table in the scratch database - var tbl = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).CreateTable("MyTable", new[] - { - new DatabaseColumnRequest("id", "int identity(1,1)", false) {IsPrimaryKey = true}, - new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof (string), 10), false) - }); + //rules should pass checks + planManager.Check(ThrowImmediatelyCheckNotifier.Quiet); - var cata = Import(tbl,out var ti,out var cols); + var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); + engine.Execute(); - var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata); - planManager.TargetDatabase = db; + var anoCatalogue = CatalogueRepository.GetAllObjects().Single(c => c.Folder.StartsWith("\\ano")); + Assert.IsTrue(anoCatalogue.Exists()); - var nameCol = cols.Single(c => c.GetRuntimeName().Equals("Name")); + //should only be one (the id column + Assert.AreEqual(1, anoCatalogue.CatalogueItems.Length); + var idColInAnoDatabase = anoCatalogue.CatalogueItems[0].ColumnInfo; + Assert.AreEqual("int", idColInAnoDatabase.Data_type); - //setup test rules for migrator - planManager.Plans[nameCol].Plan = Plan.Drop; + db.Drop(); - //rules should pass checks - planManager.Check(new ThrowImmediatelyCheckNotifier()); + var exports = CatalogueRepository.GetAllObjects().Length; + var imports = CatalogueRepository.GetAllObjects().Length; - var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); - engine.Execute(); + Assert.AreEqual(exports, imports); + Assert.IsTrue(exports > 0); + } - var anoCatalogue = CatalogueRepository.GetAllObjects().Single(c => c.Folder.StartsWith("\\ano")); - Assert.IsTrue(anoCatalogue.Exists()); - //should only be one (the id column - Assert.AreEqual(1,anoCatalogue.CatalogueItems.Length); - var idColInAnoDatabase = anoCatalogue.CatalogueItems[0].ColumnInfo; - Assert.AreEqual("int", idColInAnoDatabase.Data_type); - - db.Drop(); + [Test] + [TestCase(false, false)] + [TestCase(false, true)] + [TestCase(true, false)] + [TestCase(true, true)] + public void CreateANOVersion_TestSkippingTables(bool tableInfoAlreadyExistsForSkippedTable, + bool putPlanThroughSerialization) + { + var dbFrom = From; + var dbTo = To; + + dbFrom.Create(true); + dbTo.Create(true); + + var tblFromHeads = dbFrom.CreateTable("Heads", new[] + { + new DatabaseColumnRequest("SkullColor", "varchar(10)"), + new DatabaseColumnRequest("Vertebrae", "varchar(25)") + }); + + var cols = new[] + { + new DatabaseColumnRequest("SpineColor", "varchar(10)"), + new DatabaseColumnRequest("Vertebrae", "varchar(25)") + }; - var exports = CatalogueRepository.GetAllObjects().Count(); - var imports = CatalogueRepository.GetAllObjects().Count(); + var tblFromNeck = dbFrom.CreateTable("Necks", cols); - Assert.AreEqual(exports, imports); - Assert.IsTrue(exports > 0); + //Necks table already exists in the destination so will be skipped for migration but still needs to be imported + var tblToNeck = dbTo.CreateTable("Necks", cols); + + var i1 = new TableInfoImporter(CatalogueRepository, tblFromHeads); + i1.DoImport(out var fromHeadsTableInfo, out var fromHeadsColumnInfo); + + var i2 = new TableInfoImporter(CatalogueRepository, tblFromNeck); + i2.DoImport(out var fromNeckTableInfo, out var fromNeckColumnInfo); + + //Table already exists but does the in Catalogue reference exist? + if (tableInfoAlreadyExistsForSkippedTable) + { + var i3 = new TableInfoImporter(CatalogueRepository, tblToNeck); + i3.DoImport(out var toNecksTableInfo, out var toNecksColumnInfo); } - + //Create a JoinInfo so the query builder knows how to connect the tables + new JoinInfo(CatalogueRepository, + fromHeadsColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), + fromNeckColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), ExtractionJoinType.Inner, null + ); + + var cataEngineer = new ForwardEngineerCatalogue(fromHeadsTableInfo, fromHeadsColumnInfo); + cataEngineer.ExecuteForwardEngineering(out var cata, out var cataItems, out var extractionInformations); + + var cataEngineer2 = new ForwardEngineerCatalogue(fromNeckTableInfo, fromNeckColumnInfo); + cataEngineer2.ExecuteForwardEngineering(cata); - [Test] - [TestCase(false,false)] - [TestCase(false, true)] - [TestCase(true,false)] - [TestCase(true,true)] - public void CreateANOVersion_TestSkippingTables(bool tableInfoAlreadyExistsForSkippedTable,bool putPlanThroughSerialization) + //4 extraction informations in from Catalogue (2 from Heads and 2 from Necks) + Assert.AreEqual(cata.GetAllExtractionInformation(ExtractionCategory.Any).Length, 4); + + //setup ANOTable on head + var anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOSkullColor", "C") + { + NumberOfCharactersToUseInAnonymousRepresentation = 10 + }; + anoTable.SaveToDatabase(); + anoTable.PushToANOServerAsNewTable("varchar(10)", ThrowImmediatelyCheckNotifier.Quiet); + + //////////////////The actual test!///////////////// + var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, cata); + + //ano the table SkullColor + var scPlan = + planManager.GetPlanForColumnInfo( + fromHeadsColumnInfo.Single(col => col.GetRuntimeName().Equals("SkullColor"))); + scPlan.ANOTable = anoTable; + scPlan.Plan = Plan.ANO; + + if (putPlanThroughSerialization) { - var dbFrom = From; - var dbTo = To; + var asString = JsonConvertExtensions.SerializeObject(planManager, RepositoryLocator); - dbFrom.Create(true); - dbTo.Create(true); + planManager = (ForwardEngineerANOCataloguePlanManager)JsonConvertExtensions.DeserializeObject(asString, + typeof(ForwardEngineerANOCataloguePlanManager), RepositoryLocator); + } - var tblFromHeads = dbFrom.CreateTable("Heads", new[] - { - new DatabaseColumnRequest("SkullColor", "varchar(10)"), - new DatabaseColumnRequest("Vertebrae", "varchar(25)") - }); + //not part of serialization + planManager.TargetDatabase = dbTo; + planManager.SkippedTables + .Add(fromNeckTableInfo); //skip the necks table because it already exists (ColumnInfos may or may not exist but physical table definetly does) - var cols = new[] - { - new DatabaseColumnRequest("SpineColor", "varchar(10)"), - new DatabaseColumnRequest("Vertebrae", "varchar(25)") - }; + var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); - var tblFromNeck = dbFrom.CreateTable("Necks",cols); + if (!tableInfoAlreadyExistsForSkippedTable) + { + var ex = Assert.Throws(engine.Execute); + Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "Found '0' ColumnInfos called")); + Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "[Necks].[SpineColor]")); - //Necks table already exists in the destination so will be skipped for migration but still needs to be imported - var tblToNeck = dbTo.CreateTable("Necks", cols); + return; + } - TableInfoImporter i1 = new TableInfoImporter(CatalogueRepository, tblFromHeads); - i1.DoImport(out var fromHeadsTableInfo,out var fromHeadsColumnInfo); + engine.Execute(); - TableInfoImporter i2 = new TableInfoImporter(CatalogueRepository, tblFromNeck); - i2.DoImport(out var fromNeckTableInfo,out var fromNeckColumnInfo); - - //Table already exists but does the in Catalogue reference exist? - if(tableInfoAlreadyExistsForSkippedTable) - { - TableInfoImporter i3 = new TableInfoImporter(CatalogueRepository, tblToNeck); - i3.DoImport(out var toNecksTableInfo,out var toNecksColumnInfo); - } + var newCata = CatalogueRepository.GetAllObjects().Single(c => c.Name.Equals("ANOHeads")); + Assert.IsTrue(newCata.Exists()); - //Create a JoinInfo so the query builder knows how to connect the tables - new JoinInfo(CatalogueRepository, - fromHeadsColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), - fromNeckColumnInfo.Single(c => c.GetRuntimeName().Equals("Vertebrae")), ExtractionJoinType.Inner, null - ); - - var cataEngineer = new ForwardEngineerCatalogue(fromHeadsTableInfo, fromHeadsColumnInfo); - cataEngineer.ExecuteForwardEngineering(out var cata,out var cataItems,out var extractionInformations); - - var cataEngineer2 = new ForwardEngineerCatalogue(fromNeckTableInfo, fromNeckColumnInfo); - cataEngineer2.ExecuteForwardEngineering(cata); - - //4 extraction informations in from Catalogue (2 from Heads and 2 from Necks) - Assert.AreEqual(cata.GetAllExtractionInformation(ExtractionCategory.Any).Count(),4); - - //setup ANOTable on head - var anoTable = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOSkullColor", "C"); - anoTable.NumberOfCharactersToUseInAnonymousRepresentation = 10; - anoTable.SaveToDatabase(); - anoTable.PushToANOServerAsNewTable("varchar(10)",new ThrowImmediatelyCheckNotifier()); - - //////////////////The actual test!///////////////// - var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator,cata); - - //ano the table SkullColor - var scPlan = planManager.GetPlanForColumnInfo(fromHeadsColumnInfo.Single(col => col.GetRuntimeName().Equals("SkullColor"))); - scPlan.ANOTable = anoTable; - scPlan.Plan = Plan.ANO; + var newCataItems = newCata.CatalogueItems; + Assert.AreEqual(newCataItems.Length, 4); - if (putPlanThroughSerialization) - { - var asString = JsonConvertExtensions.SerializeObject(planManager, RepositoryLocator); + //should be extraction informations + //all extraction informations should point to the new table location + Assert.IsTrue(newCataItems.All(ci => ci.ExtractionInformation.SelectSQL.Contains(dbTo.GetRuntimeName()))); - planManager = (ForwardEngineerANOCataloguePlanManager)JsonConvertExtensions.DeserializeObject( asString, typeof(ForwardEngineerANOCataloguePlanManager), RepositoryLocator); - } - - //not part of serialization - planManager.TargetDatabase = dbTo; - planManager.SkippedTables.Add(fromNeckTableInfo);//skip the necks table because it already exists (ColumnInfos may or may not exist but physical table definetly does) + //these columns should all exist + Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SkullColor"))); + Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SpineColor"))); + Assert.IsTrue(newCataItems.Any(ci => + ci.ExtractionInformation.SelectSQL + .Contains("Vertebrae"))); //actually there will be 2 copies of this one from Necks one from Heads - var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); + //new ColumnInfo should have a reference to the anotable + Assert.IsTrue(newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo.ANOTable_ID == anoTable.ID); - if (!tableInfoAlreadyExistsForSkippedTable) - { - var ex = Assert.Throws(engine.Execute); - Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "Found '0' ColumnInfos called")); - Assert.IsTrue(Regex.IsMatch(ex.InnerException.Message, "[Necks].[SpineColor]")); - - return; - } - else - engine.Execute(); - var newCata = CatalogueRepository.GetAllObjects().Single(c => c.Name.Equals("ANOHeads")); - Assert.IsTrue(newCata.Exists()); + var newSpineColorColumnInfo = newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo; + + //table info already existed, make sure the new CatalogueItems point to the same columninfos / table infos + Assert.IsTrue(newCataItems.Select(ci => ci.ColumnInfo).Contains(newSpineColorColumnInfo)); + } + + [Test] + public void CreateANOVersionTest_LookupsAndExtractionInformations() + { + var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); - var newCataItems = newCata.CatalogueItems; - Assert.AreEqual(newCataItems.Count(),4); + var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); - //should be extraction informations - //all extraction informations should point to the new table location - Assert.IsTrue(newCataItems.All(ci => ci.ExtractionInformation.SelectSQL.Contains(dbTo.GetRuntimeName()))); + db.Create(true); - //these columns should all exist - Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SkullColor"))); - Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("SpineColor"))); - Assert.IsTrue(newCataItems.Any(ci => ci.ExtractionInformation.SelectSQL.Contains("Vertebrae"))); //actually there will be 2 copies of this one from Necks one from Heads + var bulk = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), 100); + bulk.SetupTestData(); + bulk.ImportAsCatalogue(); - //new ColumnInfo should have a reference to the anotable - Assert.IsTrue(newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo.ANOTable_ID == anoTable.ID); + //Create a lookup table on the server + var lookupTbl = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).CreateTable("z_sexLookup", new[] + { + new DatabaseColumnRequest("Code", "varchar(1)") { IsPrimaryKey = true }, + new DatabaseColumnRequest("hb_Code", "varchar(1)") { IsPrimaryKey = true }, + new DatabaseColumnRequest("Description", "varchar(100)") + }); + //import a reference to the table + var importer = new TableInfoImporter(CatalogueRepository, lookupTbl); + importer.DoImport(out var lookupTableInfo, out var lookupColumnInfos); - var newSpineColorColumnInfo = newCataItems.Single(ci => ci.Name.Equals("ANOSkullColor")).ColumnInfo; + //Create a Lookup reference + var ciSex = bulk.catalogue.CatalogueItems.Single(c => c.Name == "sex"); + var ciHb = bulk.catalogue.CatalogueItems.Single(c => c.Name == "hb_extract"); - //table info already existed, make sure the new CatalogueItems point to the same columninfos / table infos - Assert.IsTrue(newCataItems.Select(ci=>ci.ColumnInfo).Contains(newSpineColorColumnInfo)); - } - - [Test] - public void CreateANOVersionTest_LookupsAndExtractionInformations() + var eiChi = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "chi"); + eiChi.IsExtractionIdentifier = true; + eiChi.SaveToDatabase(); + + var eiCentury = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "century"); + eiCentury.HashOnDataRelease = true; + eiCentury.ExtractionCategory = ExtractionCategory.Internal; + eiCentury.SaveToDatabase(); + + //add a transform + var eiPostcode = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "current_postcode"); + + eiPostcode.SelectSQL = $"LEFT(10,{eiPostcode.ColumnInfo.TableInfo.Name}.[current_postcode])"; + eiPostcode.Alias = "MyMutilatedColumn"; + eiPostcode.SaveToDatabase(); + + //add a combo transform + var ciComboCol = new CatalogueItem(CatalogueRepository, bulk.catalogue, "ComboColumn"); + + var colForename = bulk.columnInfos.Single(c => c.GetRuntimeName() == "forename"); + var colSurname = bulk.columnInfos.Single(c => c.GetRuntimeName() == "surname"); + + var eiComboCol = new ExtractionInformation(CatalogueRepository, ciComboCol, colForename, + $"{colForename} + ' ' + {colSurname}") { - var dbName = TestDatabaseNames.GetConsistentName("CreateANOVersionTest"); + Alias = "ComboColumn" + }; + eiComboCol.SaveToDatabase(); - var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbName); + var eiDataLoadRunId = + bulk.extractionInformations.Single(ei => ei.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID)); + eiDataLoadRunId.DeleteInDatabase(); - db.Create(true); - BulkTestsData bulk = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), 100); - bulk.SetupTestData(); - bulk.ImportAsCatalogue(); + var lookup = new Lookup(CatalogueRepository, lookupColumnInfos[2], ciSex.ColumnInfo, lookupColumnInfos[0], + ExtractionJoinType.Left, null); - //Create a lookup table on the server - var lookupTbl = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).CreateTable("z_sexLookup", new[] - { - new DatabaseColumnRequest("Code", "varchar(1)"){IsPrimaryKey = true}, - new DatabaseColumnRequest("hb_Code", "varchar(1)"){IsPrimaryKey = true}, - new DatabaseColumnRequest("Description", "varchar(100)") - }); - - //import a reference to the table - TableInfoImporter importer = new TableInfoImporter(CatalogueRepository,lookupTbl); - importer.DoImport(out var lookupTableInfo,out var lookupColumnInfos); - - //Create a Lookup reference - var ciSex = bulk.catalogue.CatalogueItems.Single(c => c.Name == "sex"); - var ciHb = bulk.catalogue.CatalogueItems.Single(c => c.Name == "hb_extract"); - - var eiChi = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "chi"); - eiChi.IsExtractionIdentifier = true; - eiChi.SaveToDatabase(); + //now let's make it worse, let's assume the sex code changes per healthboard therefore the join to the lookup requires both fields sex and hb_extract + var compositeLookup = + new LookupCompositeJoinInfo(CatalogueRepository, lookup, ciHb.ColumnInfo, lookupColumnInfos[1]); - var eiCentury = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "century"); - eiCentury.HashOnDataRelease = true; - eiCentury.ExtractionCategory = ExtractionCategory.Internal; - eiCentury.SaveToDatabase(); + //now let's make the _Desc field in the original Catalogue + var orderToInsertDescriptionFieldAt = ciSex.ExtractionInformation.Order; - //add a transform - var eiPostcode = bulk.extractionInformations.Single(ei => ei.GetRuntimeName() == "current_postcode"); + //bump everyone down 1 + foreach (var toBumpDown in bulk.catalogue.CatalogueItems.Select(ci => ci.ExtractionInformation) + .Where(e => e != null && e.Order > orderToInsertDescriptionFieldAt)) + { + toBumpDown.Order++; + toBumpDown.SaveToDatabase(); + } - eiPostcode.SelectSQL = string.Format("LEFT(10,{0}.[current_postcode])", eiPostcode.ColumnInfo.TableInfo.Name); - eiPostcode.Alias = "MyMutilatedColumn"; - eiPostcode.SaveToDatabase(); + var ciDescription = new CatalogueItem(CatalogueRepository, bulk.catalogue, "Sex_Desc"); + var eiDescription = new ExtractionInformation(CatalogueRepository, ciDescription, lookupColumnInfos[2], + lookupColumnInfos[2].Name) + { + Alias = "Sex_Desc", + Order = orderToInsertDescriptionFieldAt + 1, + ExtractionCategory = ExtractionCategory.Supplemental + }; + eiDescription.SaveToDatabase(); - //add a combo transform - var ciComboCol = new CatalogueItem(CatalogueRepository, bulk.catalogue, "ComboColumn"); + bulk.catalogue.ClearAllInjections(); - var colForename = bulk.columnInfos.Single(c => c.GetRuntimeName() == "forename"); - var colSurname = bulk.columnInfos.Single(c => c.GetRuntimeName() == "surname"); + //check it worked + var qb = new QueryBuilder(null, null); + qb.AddColumnRange(bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); - var eiComboCol = new ExtractionInformation(CatalogueRepository, ciComboCol, colForename,colForename + " + ' ' + " + colSurname ); - eiComboCol.Alias = "ComboColumn"; - eiComboCol.SaveToDatabase(); + //The query builder should be able to succesfully create SQL + Console.WriteLine(qb.SQL); - var eiDataLoadRunId = bulk.extractionInformations.Single(ei => ei.GetRuntimeName().Equals(SpecialFieldNames.DataLoadRunID)); - eiDataLoadRunId.DeleteInDatabase(); - + //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] + Assert.AreEqual(2, qb.TablesUsedInQuery.Count); - var lookup = new Lookup(CatalogueRepository, lookupColumnInfos[2], ciSex.ColumnInfo, lookupColumnInfos[0],ExtractionJoinType.Left, null); - - //now let's make it worse, let's assume the sex code changes per healthboard therefore the join to the lookup requires both fields sex and hb_extract - var compositeLookup = new LookupCompositeJoinInfo(CatalogueRepository, lookup, ciHb.ColumnInfo, lookupColumnInfos[1]); + //the query builder should have identified the lookup + Assert.AreEqual(lookup, qb.GetDistinctRequiredLookups().Single()); - //now let's make the _Desc field in the original Catalogue - int orderToInsertDescriptionFieldAt = ciSex.ExtractionInformation.Order; + //////////////////////////////////////////////////////////////////////////////////////The Actual Bit Being Tested//////////////////////////////////////////////////// + var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator, bulk.catalogue) + { + TargetDatabase = db + }; - //bump everyone down 1 - foreach (var toBumpDown in bulk.catalogue.CatalogueItems.Select(ci=>ci.ExtractionInformation).Where(e =>e != null && e.Order > orderToInsertDescriptionFieldAt)) - { - toBumpDown.Order++; - toBumpDown.SaveToDatabase(); - } - - var ciDescription = new CatalogueItem(CatalogueRepository, bulk.catalogue, "Sex_Desc"); - var eiDescription = new ExtractionInformation(CatalogueRepository, ciDescription, lookupColumnInfos[2],lookupColumnInfos[2].Name); - eiDescription.Alias = "Sex_Desc"; - eiDescription.Order = orderToInsertDescriptionFieldAt +1; - eiDescription.ExtractionCategory = ExtractionCategory.Supplemental; - eiDescription.SaveToDatabase(); - - bulk.catalogue.ClearAllInjections(); - - //check it worked - QueryBuilder qb = new QueryBuilder(null,null); - qb.AddColumnRange(bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any)); - - //The query builder should be able to succesfully create SQL - Console.WriteLine(qb.SQL); - - //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] - Assert.AreEqual(2,qb.TablesUsedInQuery.Count); + //setup test rules for migrator + CreateMigrationRules(planManager, bulk); - //the query builder should have identified the lookup - Assert.AreEqual(lookup,qb.GetDistinctRequiredLookups().Single()); - - //////////////////////////////////////////////////////////////////////////////////////The Actual Bit Being Tested//////////////////////////////////////////////////// - var planManager = new ForwardEngineerANOCataloguePlanManager(RepositoryLocator,bulk.catalogue); - planManager.TargetDatabase = db; + //rules should pass checks + Assert.DoesNotThrow(() => planManager.Check(ThrowImmediatelyCheckNotifier.Quiet)); - //setup test rules for migrator - CreateMigrationRules(planManager, bulk); - - //rules should pass checks - Assert.DoesNotThrow(() => planManager.Check(new ThrowImmediatelyCheckNotifier())); + var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); + engine.Execute(); + //////////////////////////////////////////////////////////////////////////////////////End The Actual Bit Being Tested//////////////////////////////////////////////////// - var engine = new ForwardEngineerANOCatalogueEngine(RepositoryLocator, planManager); - engine.Execute(); - //////////////////////////////////////////////////////////////////////////////////////End The Actual Bit Being Tested//////////////////////////////////////////////////// + var anoCatalogue = CatalogueRepository.GetAllObjects().Single(c => c.Folder.StartsWith("\\ano")); + Assert.IsTrue(anoCatalogue.Exists()); - var anoCatalogue = CatalogueRepository.GetAllObjects().Single(c => c.Folder.StartsWith("\\ano")); - Assert.IsTrue(anoCatalogue.Exists()); + //The new Catalogue should have the same number of ExtractionInformations + var eiSource = bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order) + .ToArray(); + var eiDestination = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei => ei.Order) + .ToArray(); - //The new Catalogue should have the same number of ExtractionInformations - var eiSource = bulk.catalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei=>ei.Order).ToArray(); - var eiDestination = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).OrderBy(ei=>ei.Order).ToArray(); - - Assert.AreEqual(eiSource.Length,eiDestination.Length,"Both the new and the ANO catalogue should have the same number of ExtractionInformations (extractable columns)"); + Assert.AreEqual(eiSource.Length, eiDestination.Length, + "Both the new and the ANO catalogue should have the same number of ExtractionInformations (extractable columns)"); - for (int i = 0; i < eiSource.Length; i++) - { - Assert.AreEqual(eiSource[i].Order , eiDestination[i].Order,"ExtractionInformations in the source and destination Catalogue should have the same order"); - - Assert.AreEqual(eiSource[i].GetRuntimeName(), - eiDestination[i].GetRuntimeName().Replace("ANO",""), "ExtractionInformations in the source and destination Catalogue should have the same names (excluding ANO prefix)"); - - Assert.AreEqual(eiSource[i].ExtractionCategory, eiDestination[i].ExtractionCategory, "Old / New ANO ExtractionInformations did not match on ExtractionCategory"); - Assert.AreEqual(eiSource[i].IsExtractionIdentifier, eiDestination[i].IsExtractionIdentifier, "Old / New ANO ExtractionInformations did not match on IsExtractionIdentifier"); - Assert.AreEqual(eiSource[i].HashOnDataRelease, eiDestination[i].HashOnDataRelease, "Old / New ANO ExtractionInformations did not match on HashOnDataRelease"); - Assert.AreEqual(eiSource[i].IsPrimaryKey, eiDestination[i].IsPrimaryKey, "Old / New ANO ExtractionInformations did not match on IsPrimaryKey"); - } + for (var i = 0; i < eiSource.Length; i++) + { + Assert.AreEqual(eiSource[i].Order, eiDestination[i].Order, + "ExtractionInformations in the source and destination Catalogue should have the same order"); + + Assert.AreEqual(eiSource[i].GetRuntimeName(), + eiDestination[i].GetRuntimeName().Replace("ANO", ""), + "ExtractionInformations in the source and destination Catalogue should have the same names (excluding ANO prefix)"); + + Assert.AreEqual(eiSource[i].ExtractionCategory, eiDestination[i].ExtractionCategory, + "Old / New ANO ExtractionInformations did not match on ExtractionCategory"); + Assert.AreEqual(eiSource[i].IsExtractionIdentifier, eiDestination[i].IsExtractionIdentifier, + "Old / New ANO ExtractionInformations did not match on IsExtractionIdentifier"); + Assert.AreEqual(eiSource[i].HashOnDataRelease, eiDestination[i].HashOnDataRelease, + "Old / New ANO ExtractionInformations did not match on HashOnDataRelease"); + Assert.AreEqual(eiSource[i].IsPrimaryKey, eiDestination[i].IsPrimaryKey, + "Old / New ANO ExtractionInformations did not match on IsPrimaryKey"); + } - //check it worked - QueryBuilder qbdestination = new QueryBuilder(null, null); - qbdestination.AddColumnRange(anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); + //check it worked + var qbdestination = new QueryBuilder(null, null); + qbdestination.AddColumnRange(anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); - //The query builder should be able to succesfully create SQL - Console.WriteLine(qbdestination.SQL); + //The query builder should be able to succesfully create SQL + Console.WriteLine(qbdestination.SQL); - var anoEiPostcode = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("MyMutilatedColumn")); - - //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... - Assert.AreEqual(string.Format("LEFT(10,{0}.[current_postcode])", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()),anoEiPostcode.SelectSQL); + var anoEiPostcode = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(ei => ei.GetRuntimeName().Equals("MyMutilatedColumn")); - var anoEiComboCol = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(ei => ei.GetRuntimeName().Equals("ComboColumn")); + //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... + Assert.AreEqual($"LEFT(10,{anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()}.[current_postcode])", + anoEiPostcode.SelectSQL); - //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... - Assert.AreEqual(string.Format("{0}.[forename] + ' ' + {0}.[surname]", anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()), anoEiComboCol.SelectSQL); + var anoEiComboCol = anoCatalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(ei => ei.GetRuntimeName().Equals("ComboColumn")); - //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] - Assert.AreEqual(2, qbdestination.TablesUsedInQuery.Count); + //The transform on postcode should have been refactored to the new table name and preserve the scalar function LEFT... + Assert.AreEqual( + $"{anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()}.[forename] + ' ' + {anoEiPostcode.ColumnInfo.TableInfo.GetFullyQualifiedName()}.[surname]", + anoEiComboCol.SelectSQL); - //the query builder should have identified the lookup but it should be the new one not the old one - Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Count(), "New query builder for ano catalogue did not correctly identify that there was a Lookup"); - Assert.AreNotEqual(lookup, qbdestination.GetDistinctRequiredLookups().Single(), "New query builder for ano catalogue identified the OLD Lookup!"); - - Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins().Count(),"The new Lookup did not have the composite join key (sex/hb_extract)"); - Assert.AreNotEqual(compositeLookup, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins(), "New query builder for ano catalogue identified the OLD LookupCompositeJoinInfo!"); + //there should be 2 tables involved in the query [z_sexLookup] and [BulkData] + Assert.AreEqual(2, qbdestination.TablesUsedInQuery.Count); - db.Drop(); + //the query builder should have identified the lookup but it should be the new one not the old one + Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Count(), + "New query builder for ano catalogue did not correctly identify that there was a Lookup"); + Assert.AreNotEqual(lookup, qbdestination.GetDistinctRequiredLookups().Single(), + "New query builder for ano catalogue identified the OLD Lookup!"); - var exports = CatalogueRepository.GetAllObjects().Count(); - var imports = CatalogueRepository.GetAllObjects().Count(); + Assert.AreEqual(1, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins().Count(), + "The new Lookup did not have the composite join key (sex/hb_extract)"); + Assert.AreNotEqual(compositeLookup, qbdestination.GetDistinctRequiredLookups().Single().GetSupplementalJoins(), + "New query builder for ano catalogue identified the OLD LookupCompositeJoinInfo!"); - Assert.AreEqual(exports, imports); - Assert.IsTrue(exports > 0); + db.Drop(); - - } + var exports = CatalogueRepository.GetAllObjects().Length; + var imports = CatalogueRepository.GetAllObjects().Length; + Assert.AreEqual(exports, imports); + Assert.IsTrue(exports > 0); + } - private void CreateMigrationRules(ForwardEngineerANOCataloguePlanManager planManager, BulkTestsData bulk) - { - var chi = bulk.GetColumnInfo("chi"); + + private void CreateMigrationRules(ForwardEngineerANOCataloguePlanManager planManager, BulkTestsData bulk) + { + var chi = bulk.GetColumnInfo("chi"); - var anoChi = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOCHI", "C"); - anoChi.NumberOfIntegersToUseInAnonymousRepresentation = 9; - anoChi.NumberOfCharactersToUseInAnonymousRepresentation = 1; - anoChi.SaveToDatabase(); - anoChi.PushToANOServerAsNewTable(chi.Data_type,new ThrowImmediatelyCheckNotifier()); - - planManager.GetPlanForColumnInfo(chi).Plan = Plan.ANO; - planManager.GetPlanForColumnInfo(chi).ANOTable = anoChi; - - var dob = bulk.GetColumnInfo("date_of_birth"); - planManager.GetPlanForColumnInfo(dob).Plan = Plan.Dilute; - planManager.GetPlanForColumnInfo(dob).Dilution = new RoundDateToMiddleOfQuarter(); - - var postcode = bulk.GetColumnInfo("current_postcode"); - planManager.GetPlanForColumnInfo(postcode).Plan = Plan.Dilute; - planManager.GetPlanForColumnInfo(postcode).Dilution = new ExcludeRight3OfUKPostcodes(); - } + var anoChi = new ANOTable(CatalogueRepository, ANOStore_ExternalDatabaseServer, "ANOCHI", "C") + { + NumberOfIntegersToUseInAnonymousRepresentation = 9, + NumberOfCharactersToUseInAnonymousRepresentation = 1 + }; + anoChi.SaveToDatabase(); + anoChi.PushToANOServerAsNewTable(chi.Data_type, ThrowImmediatelyCheckNotifier.Quiet); + + planManager.GetPlanForColumnInfo(chi).Plan = Plan.ANO; + planManager.GetPlanForColumnInfo(chi).ANOTable = anoChi; + + var dob = bulk.GetColumnInfo("date_of_birth"); + planManager.GetPlanForColumnInfo(dob).Plan = Plan.Dilute; + planManager.GetPlanForColumnInfo(dob).Dilution = new RoundDateToMiddleOfQuarter(); + + var postcode = bulk.GetColumnInfo("current_postcode"); + planManager.GetPlanForColumnInfo(postcode).Plan = Plan.Dilute; + planManager.GetPlanForColumnInfo(postcode).Dilution = new ExcludeRight3OfUKPostcodes(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/GetDatabaseDiagramBinaryTest.cs b/Rdmp.Core.Tests/Curation/Integration/GetDatabaseDiagramBinaryTest.cs index 9ce4958459..9f8062c16c 100644 --- a/Rdmp.Core.Tests/Curation/Integration/GetDatabaseDiagramBinaryTest.cs +++ b/Rdmp.Core.Tests/Curation/Integration/GetDatabaseDiagramBinaryTest.cs @@ -5,43 +5,35 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Data.Common; using NUnit.Framework; -using ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class GetDatabaseDiagramBinaryTest : DatabaseTests { - - public class GetDatabaseDiagramBinaryTest:DatabaseTests + [Test] + public void GetBinaryText() { - [Test] - public void GetBinaryText() - { - using (var con = CatalogueTableRepository.GetConnection()) - { - using(DbCommand cmd = DatabaseCommandHelper.GetCommand( - "SELECT definition FROM sysdiagrams where name = 'Catalogue_Data_Diagram' ", - con.Connection, con.Transaction)) - using (var reader = cmd.ExecuteReader()) - { - //The system diagram exists - Assert.IsTrue(reader.Read()); + using var con = CatalogueTableRepository.GetConnection(); + using var cmd = DatabaseCommandHelper.GetCommand( + "SELECT definition FROM sysdiagrams where name = 'Catalogue_Data_Diagram' ", + con.Connection, con.Transaction); + using var reader = cmd.ExecuteReader(); + //The system diagram exists + Assert.IsTrue(reader.Read()); - var bytes = (byte[]) reader[0]; - var bytesAsString = ByteArrayToString(bytes); - - Console.WriteLine(bytesAsString); - Assert.Greater(bytesAsString.Length,100000); - } - } - } + var bytes = (byte[])reader[0]; + var bytesAsString = ByteArrayToString(bytes); - public static string ByteArrayToString(byte[] ba) - { - string hex = BitConverter.ToString(ba); - return hex.Replace("-", ""); - } + Console.WriteLine(bytesAsString); + Assert.Greater(bytesAsString.Length, 100000); } -} + public static string ByteArrayToString(byte[] ba) + { + var hex = BitConverter.ToString(ba); + return hex.Replace("-", ""); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/GovernanceTests.cs b/Rdmp.Core.Tests/Curation/Integration/GovernanceTests.cs index 30e36ec157..63e1e565e0 100644 --- a/Rdmp.Core.Tests/Curation/Integration/GovernanceTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/GovernanceTests.cs @@ -11,141 +11,159 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Governance; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -using MapsDirectlyToDatabaseTable; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class GovernanceTests : DatabaseTests { - public class GovernanceTests:DatabaseTests + [OneTimeTearDown] + protected void OneTimeTearDown() { - [Test] - public void TestCreatingGovernance_StartsAtToday() - { - var gov = GetGov(); - - Assert.NotNull(gov); - Assert.AreEqual(gov.StartDate,DateTime.Now.Date); - } - [Test] - public void TestCreatingGovernance_ChangeName() - { - if (CatalogueRepository is not TableRepository) - Assert.Inconclusive("This test for stale objects only applies to database repositories"); - - var gov = GetGov(); - gov.Name = "Fish"; - GovernancePeriod freshCopy = CatalogueRepository.GetObjectByID(gov.ID); - - //local change not applied yet - Assert.AreNotEqual(gov.Name,freshCopy.Name); - - //comitted change to database - gov.SaveToDatabase(); - - //notice that this fresh copy is still desynced - Assert.AreNotEqual(gov.Name,freshCopy.Name); - - //sync it - freshCopy = CatalogueRepository.GetObjectByID(gov.ID); - Assert.AreEqual(gov.Name ,freshCopy.Name); + //delete all governance periods + foreach (var governancePeriod in toCleanup) + try + { + governancePeriod.DeleteInDatabase(); + } + catch (Exception e) + { + Console.WriteLine($"Ignoring exception {e.Message} during clean up"); + } + } - } + [Test] + public void TestCreatingGovernance_StartsAtToday() + { + var gov = GetGov(); - [Test] - public void TestCreatingGovernance_CannotHaveSameNames() - { - var gov1 = GetGov(); - var gov2 = GetGov(); + Assert.NotNull(gov); + Assert.AreEqual(gov.StartDate, DateTime.Now.Date); + } - gov1.Name = "HiDuplicate"; - gov1.SaveToDatabase(); + [Test] + public void TestCreatingGovernance_ChangeName() + { + if (CatalogueRepository is not TableRepository) + Assert.Inconclusive("This test for stale objects only applies to database repositories"); - gov2.Name = "HiDuplicate"; + var gov = GetGov(); + gov.Name = "Fish"; + var freshCopy = CatalogueRepository.GetObjectByID(gov.ID); - if(CatalogueRepository is TableRepository) - { - var ex = Assert.Throws(gov2.SaveToDatabase); - StringAssert.StartsWith("Cannot insert duplicate key row in object 'dbo.GovernancePeriod' with unique index 'idxGovernancePeriodNameMustBeUnique'. The duplicate key value is (HiDuplicate)", ex.Message); - } + //local change not applied yet + Assert.AreNotEqual(gov.Name, freshCopy.Name); - } + //comitted change to database + gov.SaveToDatabase(); - [Test] - public void Checkability_ExpiresBeforeStarts() - { - var gov = GetGov(); - gov.Name = "TestExpiryBeforeStarting"; + //notice that this fresh copy is still desynced + Assert.AreNotEqual(gov.Name, freshCopy.Name); - //valid to start with - gov.Check(new ThrowImmediatelyCheckNotifier()); + //sync it + freshCopy = CatalogueRepository.GetObjectByID(gov.ID); + Assert.AreEqual(gov.Name, freshCopy.Name); + } - gov.EndDate = DateTime.MinValue; - var ex = Assert.Throws(()=>gov.Check(new ThrowImmediatelyCheckNotifier()));//no longer valid - notice there is no SaveToDatabase because we can shouldnt be going back to db anyway - Assert.AreEqual("GovernancePeriod TestExpiryBeforeStarting expires before it begins!", ex.Message); - } + [Test] + public void TestCreatingGovernance_CannotHaveSameNames() + { + var gov1 = GetGov(); + var gov2 = GetGov(); - [Test] - public void Checkability_NoExpiryDateWarning() - { - var gov = GetGov(); - gov.Name = "NeverExpires"; + gov1.Name = "HiDuplicate"; + gov1.SaveToDatabase(); - //valid to start with - var ex = Assert.Throws(()=>gov.Check(new ThrowImmediatelyCheckNotifier(){ThrowOnWarning = true})); - Assert.AreEqual("There is no end date for GovernancePeriod NeverExpires",ex.Message); + gov2.Name = "HiDuplicate"; + if (CatalogueRepository is TableRepository) + { + var ex = Assert.Throws(gov2.SaveToDatabase); + StringAssert.StartsWith( + "Cannot insert duplicate key row in object 'dbo.GovernancePeriod' with unique index 'idxGovernancePeriodNameMustBeUnique'. The duplicate key value is (HiDuplicate)", + ex?.Message); } + } - [TestCase(true)] - [TestCase(false)] - public void GovernsCatalogue(bool memoryRepository) - { - ICatalogueRepository repo = memoryRepository ? (ICatalogueRepository) new MemoryCatalogueRepository() : CatalogueRepository; + [Test] + public void Checkability_ExpiresBeforeStarts() + { + var gov = GetGov(); + gov.Name = "TestExpiryBeforeStarting"; - var gov = GetGov(repo); - Catalogue c = new Catalogue(repo, "GovernedCatalogue"); - try - { - Assert.AreEqual(gov.GovernedCatalogues.Count(), 0); + //valid to start with + gov.Check(ThrowImmediatelyCheckNotifier.Quiet); - //should be no governanced catalogues for this governancer yet - gov.CreateGovernanceRelationshipTo(c); + gov.EndDate = DateTime.MinValue; + var ex = Assert.Throws(() => + gov.Check(ThrowImmediatelyCheckNotifier + .Quiet)); //no longer valid - notice there is no SaveToDatabase because we can shouldn't be going back to db anyway + Assert.AreEqual("GovernancePeriod TestExpiryBeforeStarting expires before it begins!", ex?.Message); + } - var allCatalogues = gov.GovernedCatalogues.ToArray(); - var governedCatalogue = allCatalogues[0]; - Assert.AreEqual(governedCatalogue, c); //we now govern C - } - finally - { - gov.DeleteGovernanceRelationshipTo(c); - Assert.AreEqual(gov.GovernedCatalogues.Count(), 0); //we govern c nevermore! + [Test] + public void Checkability_NoExpiryDateWarning() + { + var gov = GetGov(); + gov.Name = "NeverExpires"; - c.DeleteInDatabase(); - } - } + //valid to start with + var ex = Assert.Throws(() => gov.Check(ThrowImmediatelyCheckNotifier.QuietPicky)); + Assert.AreEqual("There is no end date for GovernancePeriod NeverExpires", ex?.Message); + } + + [TestCase(true)] + [TestCase(false)] + public void GovernsCatalogue(bool memoryRepository) + { + var repo = memoryRepository ? (ICatalogueRepository)new MemoryCatalogueRepository() : CatalogueRepository; - [Test] - public void GovernsSameCatalogueTwice() + var gov = GetGov(repo); + var c = new Catalogue(repo, "GovernedCatalogue"); + try { - Catalogue c = new Catalogue(CatalogueRepository, "GovernedCatalogue"); - - var gov = GetGov(); - Assert.AreEqual(gov.GovernedCatalogues.Count(), 0);//should be no governanced catalogues for this governancer yet + Assert.AreEqual(gov.GovernedCatalogues.Count(), 0); + //should be no governanced catalogues for this governancer yet gov.CreateGovernanceRelationshipTo(c); - gov.CreateGovernanceRelationshipTo(c); - } - - List toCleanup = new List(); - private GovernancePeriod GetGov(ICatalogueRepository repo = null) + var allCatalogues = gov.GovernedCatalogues.ToArray(); + var governedCatalogue = allCatalogues[0]; + Assert.AreEqual(governedCatalogue, c); //we now govern C + } + finally { - GovernancePeriod gov = new GovernancePeriod(repo??CatalogueRepository); - toCleanup.Add(gov); + gov.DeleteGovernanceRelationshipTo(c); + Assert.AreEqual(gov.GovernedCatalogues.Count(), 0); //we govern c nevermore! - return gov; + c.DeleteInDatabase(); } } -} + + [Test] + public void GovernsSameCatalogueTwice() + { + var c = new Catalogue(CatalogueRepository, "GovernedCatalogue"); + + var gov = GetGov(); + Assert.AreEqual(gov.GovernedCatalogues.Count(), + 0); //should be no governanced catalogues for this governancer yet + + gov.CreateGovernanceRelationshipTo(c); + gov.CreateGovernanceRelationshipTo(c); + } + + + private List toCleanup = new(); + + private GovernancePeriod GetGov(ICatalogueRepository repo = null) + { + var gov = new GovernancePeriod(repo ?? CatalogueRepository); + toCleanup.Add(gov); + + return gov; + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/HangingConnectionTest.cs b/Rdmp.Core.Tests/Curation/Integration/HangingConnectionTest.cs index 7d7a50fd6c..ef77c4f3a6 100644 --- a/Rdmp.Core.Tests/Curation/Integration/HangingConnectionTest.cs +++ b/Rdmp.Core.Tests/Curation/Integration/HangingConnectionTest.cs @@ -11,74 +11,73 @@ using NUnit.Framework; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class HangingConnectionTest : DatabaseTests { - - public class HangingConnectionTest:DatabaseTests - { - string testDbName = "HangingConnectionTest"; + private string testDbName = "HangingConnectionTest"; - [Test] - [TestCase(true)] - [TestCase(false)] - public void TestConnection(bool explicitClose) - { - //drop it if it existed - if (DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName).Exists()) - DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName).Drop(); - - DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase(testDbName); - Thread.Sleep(500); + [Test] + [TestCase(true)] + [TestCase(false)] + public void TestConnection(bool explicitClose) + { + //drop it if it existed + if (DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName).Exists()) + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName).Drop(); - ThrowIfDatabaseLock(); - - var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName); + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase(testDbName); + Thread.Sleep(500); - ThrowIfDatabaseLock(); + ThrowIfDatabaseLock(); - using (var con = db.Server.GetConnection()) - { - con.Open(); + var db = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testDbName); - //we are currently connected so this should throw - Assert.Throws(ThrowIfDatabaseLock); + ThrowIfDatabaseLock(); - } - Thread.Sleep(500); + using (var con = db.Server.GetConnection()) + { + con.Open(); - if (explicitClose) - { - SqlConnection.ClearAllPools(); - Thread.Sleep(500); - Assert.DoesNotThrow(ThrowIfDatabaseLock);//in this case we told .net to clear the pools which leaves the server free of locks/hanging connections - } - else - { - Assert.Throws(ThrowIfDatabaseLock);//despite us closing the connection and using the 'using' block .net still keeps a connection in sleep state to the server >< - } - - db.Drop(); + //we are currently connected so this should throw + Assert.Throws(ThrowIfDatabaseLock); } - void ThrowIfDatabaseLock() + Thread.Sleep(500); + + if (explicitClose) + { + SqlConnection.ClearAllPools(); + Thread.Sleep(500); + Assert.DoesNotThrow( + ThrowIfDatabaseLock); //in this case we told .net to clear the pools which leaves the server free of locks/hanging connections + } + else { - var serverCopy = new DiscoveredServer(new SqlConnectionStringBuilder(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder.ConnectionString)); - serverCopy.ChangeDatabase("master"); - using (var con = serverCopy.GetConnection()) + Assert.Throws( + ThrowIfDatabaseLock); //despite us closing the connection and using the 'using' block .net still keeps a connection in sleep state to the server >< + } + + db.Drop(); + } + + private void ThrowIfDatabaseLock() + { + var serverCopy = + new DiscoveredServer(new SqlConnectionStringBuilder(DiscoveredServerICanCreateRandomDatabasesAndTablesOn + .Builder.ConnectionString)); + serverCopy.ChangeDatabase("master"); + using var con = serverCopy.GetConnection(); + con.Open(); + var r = serverCopy.GetCommand("exec sp_who2", con).ExecuteReader(); + while (r.Read()) + if (r["DBName"].Equals(testDbName)) { - con.Open(); - var r = serverCopy.GetCommand("exec sp_who2", con).ExecuteReader(); - while (r.Read()) - if (r["DBName"].Equals(testDbName)) - { - object[] vals = new object[r.VisibleFieldCount]; - r.GetValues(vals); - throw new Exception("Someone is locking " + testDbName + ":" + Environment.NewLine + string.Join(",", vals)); - - } + var vals = new object[r.VisibleFieldCount]; + r.GetValues(vals); + throw new Exception( + $"Someone is locking {testDbName}:{Environment.NewLine}{string.Join(",", vals)}"); } - - } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/LinkerTests.cs b/Rdmp.Core.Tests/Curation/Integration/LinkerTests.cs index 8e89c18069..967102ec6b 100644 --- a/Rdmp.Core.Tests/Curation/Integration/LinkerTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/LinkerTests.cs @@ -8,75 +8,71 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +internal class LinkerTests : DatabaseTests { - class LinkerTests : DatabaseTests + [Test] + public void AddSameLinkTwice() { - [Test] - public void AddSameLinkTwice() + Catalogue predator = null; + CatalogueItem lazor = null; + TableInfo highEnergyTable = null; + ColumnInfo velocityColumn = null; + try { - Catalogue predator = null; - CatalogueItem lazor = null; - TableInfo highEnergyTable = null; - ColumnInfo velocityColumn = null; - try - { - ///////////////Create the things that we are going to create relationships between ///////////////// - predator = new Catalogue(CatalogueRepository, "Predator"); - lazor = new CatalogueItem(CatalogueRepository, predator, "QuadlzorVelocity"); - highEnergyTable = new TableInfo(CatalogueRepository, "HighEnergyShizzle"); - velocityColumn = new ColumnInfo(CatalogueRepository, "Velocity Of Matter", "int", highEnergyTable); + ///////////////Create the things that we are going to create relationships between ///////////////// + predator = new Catalogue(CatalogueRepository, "Predator"); + lazor = new CatalogueItem(CatalogueRepository, predator, "QuadlzorVelocity"); + highEnergyTable = new TableInfo(CatalogueRepository, "HighEnergyShizzle"); + velocityColumn = new ColumnInfo(CatalogueRepository, "Velocity Of Matter", "int", highEnergyTable); - //now you can add as many links as you want, it just skips them - lazor.SetColumnInfo(velocityColumn); - Assert.AreEqual(lazor.ColumnInfo,velocityColumn); - - } - finally - { - lazor.DeleteInDatabase(); //delete child - predator.DeleteInDatabase(); //delete parent - - velocityColumn.DeleteInDatabase();//delete child - highEnergyTable.DeleteInDatabase();//delete parent - } - - + //now you can add as many links as you want, it just skips them + lazor.SetColumnInfo(velocityColumn); + Assert.AreEqual(lazor.ColumnInfo, velocityColumn); } - - [Test] - public void AddLinkBetween_createNewLink_pass() + finally { + lazor.DeleteInDatabase(); //delete child + predator.DeleteInDatabase(); //delete parent - ///////////////Create the things that we are going to create relationships between ///////////////// - var predator = new Catalogue(CatalogueRepository, "Predator"); - var lazor = new CatalogueItem(CatalogueRepository, predator, "QuadlzorVelocity"); - var highEnergyTable = new TableInfo(CatalogueRepository, "HighEnergyShizzle"); - var velocityColumn = new ColumnInfo(CatalogueRepository, "Velocity Of Matter", "int", highEnergyTable); + velocityColumn.DeleteInDatabase(); //delete child + highEnergyTable.DeleteInDatabase(); //delete parent + } + } - ////////////Check the creation worked ok - Assert.IsNotNull(predator); //catalogue - Assert.IsNotNull(lazor); + [Test] + public void AddLinkBetween_createNewLink_pass() + { + ///////////////Create the things that we are going to create relationships between ///////////////// + var predator = new Catalogue(CatalogueRepository, "Predator"); + var lazor = new CatalogueItem(CatalogueRepository, predator, "QuadlzorVelocity"); + var highEnergyTable = new TableInfo(CatalogueRepository, "HighEnergyShizzle"); + var velocityColumn = new ColumnInfo(CatalogueRepository, "Velocity Of Matter", "int", highEnergyTable); - Assert.IsNotNull(highEnergyTable); //underlying table stuff - Assert.IsNotNull(velocityColumn); + ////////////Check the creation worked ok + Assert.IsNotNull(predator); //catalogue + Assert.IsNotNull(lazor); - ////////////// Create links between stuff and check they were created successfully ////////////// + Assert.IsNotNull(highEnergyTable); //underlying table stuff + Assert.IsNotNull(velocityColumn); - //create a link between catalogue item lazor and velocity column - lazor.SetColumnInfo(velocityColumn); - Assert.IsTrue(lazor.ColumnInfo.ID == velocityColumn.ID); - - ////////////////cleanup ---- Delete everything that we created -------- ////////////// - velocityColumn.DeleteInDatabase(); //delete causes CASCADE: CatalogueItem no longer associated with ColumnInfo because ColumnInfo died - - lazor.RevertToDatabaseState(); + ////////////// Create links between stuff and check they were created successfully ////////////// - Assert.IsNull(lazor.ColumnInfo);//involves a database query so won't actually invalidate the below + //create a link between catalogue item lazor and velocity column + lazor.SetColumnInfo(velocityColumn); + Assert.IsTrue(lazor.ColumnInfo.ID == velocityColumn.ID); - predator.DeleteInDatabase(); + ////////////////cleanup ---- Delete everything that we created -------- ////////////// + velocityColumn + .DeleteInDatabase(); //delete causes CASCADE: CatalogueItem no longer associated with ColumnInfo because ColumnInfo died - highEnergyTable.DeleteInDatabase(); - } + lazor.RevertToDatabaseState(); + + Assert.IsNull(lazor.ColumnInfo); //involves a database query so won't actually invalidate the below + + predator.DeleteInDatabase(); + + highEnergyTable.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/LoadMetadataTests.cs b/Rdmp.Core.Tests/Curation/Integration/LoadMetadataTests.cs index 1dba3eba0f..49a9b113ed 100644 --- a/Rdmp.Core.Tests/Curation/Integration/LoadMetadataTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/LoadMetadataTests.cs @@ -9,94 +9,93 @@ using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataLoad.Engine.Checks.Checkers; using Rdmp.Core.DataLoad.Engine.DatabaseManagement.EntityNaming; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class LoadMetadataTests : DatabaseTests { - public class LoadMetadataTests : DatabaseTests + [Test] + public void CreateNewAndGetBackFromDatabase() { - [Test] - public void CreateNewAndGetBackFromDatabase() + var loadMetadata = new LoadMetadata(CatalogueRepository); + + try { - var loadMetadata = new LoadMetadata(CatalogueRepository); - - try - { - - loadMetadata.LocationOfFlatFiles = TestContext.CurrentContext.TestDirectory; - loadMetadata.SaveToDatabase(); - - var loadMetadataWithIdAfterwards = CatalogueRepository.GetObjectByID(loadMetadata.ID); - Assert.AreEqual(loadMetadataWithIdAfterwards.LocationOfFlatFiles, TestContext.CurrentContext.TestDirectory); - } - finally - { - loadMetadata.DeleteInDatabase(); - } - } + loadMetadata.LocationOfFlatFiles = TestContext.CurrentContext.TestDirectory; + loadMetadata.SaveToDatabase(); - [Test] - public void Test_IgnoreTrigger_GetSet() + var loadMetadataWithIdAfterwards = CatalogueRepository.GetObjectByID(loadMetadata.ID); + Assert.AreEqual(loadMetadataWithIdAfterwards.LocationOfFlatFiles, TestContext.CurrentContext.TestDirectory); + } + finally { - var loadMetadata = new LoadMetadata(CatalogueRepository); - - try - { - //default - Assert.IsFalse(loadMetadata.IgnoreTrigger); - loadMetadata.SaveToDatabase(); - Assert.IsFalse(loadMetadata.IgnoreTrigger); - loadMetadata.SaveToDatabase(); - - loadMetadata.IgnoreTrigger = true; - Assert.IsTrue(loadMetadata.IgnoreTrigger); - loadMetadata.RevertToDatabaseState(); - Assert.IsFalse(loadMetadata.IgnoreTrigger); - - - loadMetadata.IgnoreTrigger = true; - Assert.IsTrue(loadMetadata.IgnoreTrigger); - loadMetadata.SaveToDatabase(); - var lmd2 = RepositoryLocator.CatalogueRepository.GetObjectByID(loadMetadata.ID); - Assert.IsTrue(lmd2.IgnoreTrigger); - } - finally - { - loadMetadata.DeleteInDatabase(); - } + loadMetadata.DeleteInDatabase(); } + } - [Test] - public void TestPreExecutionChecker_TablesDontExist() + [Test] + public void Test_IgnoreTrigger_GetSet() + { + var loadMetadata = new LoadMetadata(CatalogueRepository); + + try { - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var tbl = db.ExpectTable("Imaginary"); + //default + Assert.IsFalse(loadMetadata.IgnoreTrigger); + loadMetadata.SaveToDatabase(); + Assert.IsFalse(loadMetadata.IgnoreTrigger); + loadMetadata.SaveToDatabase(); - Assert.IsFalse(tbl.Exists()); + loadMetadata.IgnoreTrigger = true; + Assert.IsTrue(loadMetadata.IgnoreTrigger); + loadMetadata.RevertToDatabaseState(); + Assert.IsFalse(loadMetadata.IgnoreTrigger); - var lmd = RdmpMockFactory.Mock_LoadMetadataLoadingTable(tbl); - var checker = new PreExecutionChecker(lmd, new HICDatabaseConfiguration(db.Server)); - var ex = Assert.Throws(()=>checker.Check(new ThrowImmediatelyCheckNotifier())); - StringAssert.IsMatch("Table '.*Imaginary.*' does not exist", ex.Message); + loadMetadata.IgnoreTrigger = true; + Assert.IsTrue(loadMetadata.IgnoreTrigger); + loadMetadata.SaveToDatabase(); + var lmd2 = RepositoryLocator.CatalogueRepository.GetObjectByID(loadMetadata.ID); + Assert.IsTrue(lmd2.IgnoreTrigger); } - [Test] - public void TestPreExecutionChecker_TableIsTableValuedFunction() + finally { - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - TestableTableValuedFunction f = new TestableTableValuedFunction(); - f.Create(db,CatalogueRepository); + loadMetadata.DeleteInDatabase(); + } + } - var tbl = f.TableInfoCreated.Discover(DataAccessContext.DataLoad); - Assert.IsTrue(tbl.Exists()); + [Test] + public void TestPreExecutionChecker_TablesDontExist() + { + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var tbl = db.ExpectTable("Imaginary"); - var lmd = RdmpMockFactory.Mock_LoadMetadataLoadingTable(f.TableInfoCreated); - var checker = new PreExecutionChecker(lmd, new HICDatabaseConfiguration(db.Server)); - var ex = Assert.Throws(() => checker.Check(new ThrowImmediatelyCheckNotifier())); + Assert.IsFalse(tbl.Exists()); - StringAssert.IsMatch("Table '.*MyAwesomeFunction.*' is a TableValuedFunction", ex.Message); - } - } -} + var lmd = RdmpMockFactory.Mock_LoadMetadataLoadingTable(tbl); + var checker = new PreExecutionChecker(lmd, new HICDatabaseConfiguration(db.Server)); + var ex = Assert.Throws(() => checker.Check(ThrowImmediatelyCheckNotifier.Quiet)); + + StringAssert.IsMatch("Table '.*Imaginary.*' does not exist", ex.Message); + } + + [Test] + public void TestPreExecutionChecker_TableIsTableValuedFunction() + { + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var f = new TestableTableValuedFunction(); + f.Create(db, CatalogueRepository); + + var tbl = f.TableInfoCreated.Discover(DataAccessContext.DataLoad); + Assert.IsTrue(tbl.Exists()); + + var lmd = RdmpMockFactory.Mock_LoadMetadataLoadingTable(f.TableInfoCreated); + var checker = new PreExecutionChecker(lmd, new HICDatabaseConfiguration(db.Server)); + var ex = Assert.Throws(() => checker.Check(ThrowImmediatelyCheckNotifier.Quiet)); + + StringAssert.IsMatch("Table '.*MyAwesomeFunction.*' is a TableValuedFunction", ex.Message); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/LoadProgressTest.cs b/Rdmp.Core.Tests/Curation/Integration/LoadProgressTest.cs index d6bdce8bf5..90a810658f 100644 --- a/Rdmp.Core.Tests/Curation/Integration/LoadProgressTest.cs +++ b/Rdmp.Core.Tests/Curation/Integration/LoadProgressTest.cs @@ -5,66 +5,63 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration -{ +namespace Rdmp.Core.Tests.Curation.Integration; - public class LoadProgressTest : DatabaseTests +public class LoadProgressTest : DatabaseTests +{ + [Test] + public void CreateNewScheduleTest() { - [Test] - public void CreateNewScheduleTest() - { - var loadMetadata = new LoadMetadata(CatalogueRepository); - var loadProgress = new LoadProgress(CatalogueRepository, loadMetadata); + var loadMetadata = new LoadMetadata(CatalogueRepository); + var loadProgress = new LoadProgress(CatalogueRepository, loadMetadata); - Assert.AreEqual(loadProgress.LoadMetadata_ID, loadMetadata.ID); + Assert.AreEqual(loadProgress.LoadMetadata_ID, loadMetadata.ID); - loadProgress.DeleteInDatabase(); - loadMetadata.DeleteInDatabase(); - } + loadProgress.DeleteInDatabase(); + loadMetadata.DeleteInDatabase(); + } - /// - /// This tests that when fetching 2 copies (references) to the same database - /// record that the instances are considered 'Equal' (for purposes of ) - /// - [Test] - public void LoadProgress_Equals() - { - // only applies to databases - if (CatalogueRepository is not TableRepository) - return; + /// + /// This tests that when fetching 2 copies (references) to the same database + /// record that the instances are considered 'Equal' (for purposes of ) + /// + [Test] + public void LoadProgress_Equals() + { + // only applies to databases + if (CatalogueRepository is not TableRepository) + return; + + var loadMetadata = new LoadMetadata(CatalogueRepository); - var loadMetadata = new LoadMetadata(CatalogueRepository); + var progress = new LoadProgress(CatalogueRepository, loadMetadata); + var progressCopy = CatalogueRepository.GetObjectByID(progress.ID); - LoadProgress progress = new LoadProgress(CatalogueRepository, loadMetadata); - LoadProgress progressCopy = CatalogueRepository.GetObjectByID(progress.ID); - - progressCopy.Name = "fish"; - progressCopy.OriginDate = new DateTime(2001,01,01); - - try - { - //values are different - Assert.AreNotEqual(progressCopy.OriginDate, progress.OriginDate); - Assert.AreNotEqual(progressCopy.Name, progress.Name); + progressCopy.Name = "fish"; + progressCopy.OriginDate = new DateTime(2001, 01, 01); - //IDs are the same - Assert.AreEqual(progressCopy.ID, progress.ID); + try + { + //values are different + Assert.AreNotEqual(progressCopy.OriginDate, progress.OriginDate); + Assert.AreNotEqual(progressCopy.Name, progress.Name); - //therefore objects are the same - Assert.IsTrue(progressCopy.Equals(progress)); + //IDs are the same + Assert.AreEqual(progressCopy.ID, progress.ID); - } - finally - { - progress.DeleteInDatabase(); - loadMetadata.DeleteInDatabase(); - } + //therefore objects are the same + Assert.IsTrue(progressCopy.Equals(progress)); + } + finally + { + progress.DeleteInDatabase(); + loadMetadata.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/LoadProgressUnitTests.cs b/Rdmp.Core.Tests/Curation/Integration/LoadProgressUnitTests.cs index cb154d0ff0..874f723b6c 100644 --- a/Rdmp.Core.Tests/Curation/Integration/LoadProgressUnitTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/LoadProgressUnitTests.cs @@ -9,89 +9,89 @@ using Rdmp.Core.Curation.Data; using Rdmp.Core.DataLoad.Engine.Job.Scheduling; using Rdmp.Core.DataLoad.Engine.LoadProcess.Scheduling.Strategy; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; using System; using System.IO; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class LoadProgressUnitTests : UnitTests { - public class LoadProgressUnitTests : UnitTests + [Test] + public void LoadProgress_Checks_BadDates() { - [Test] - public void LoadProgress_Checks_BadDates() - { - var lp = WhenIHaveA(); - - lp.Check(new ThrowImmediatelyCheckNotifier()); - - //Bad Origin Date - lp.OriginDate = DateTime.Now.AddDays(1); - Assert.Throws(()=>lp.Check(new ThrowImmediatelyCheckNotifier())); - - //Back to normal - lp.RevertToDatabaseState(); - lp.Check(new ThrowImmediatelyCheckNotifier()); - - //Bad ProgressDate - lp.DataLoadProgress = DateTime.Now.AddDays(1); - Assert.Throws(()=>lp.Check(new ThrowImmediatelyCheckNotifier())); - - //Back to normal - lp.RevertToDatabaseState(); - lp.Check(new ThrowImmediatelyCheckNotifier()); - - // valid progress (1 year) - lp.OriginDate = new DateTime(2001,1,1); - lp.DataLoadProgress = new DateTime(2002,1,1); - lp.Check(new ThrowImmediatelyCheckNotifier()); - } + var lp = WhenIHaveA(); + + lp.Check(ThrowImmediatelyCheckNotifier.Quiet); + + //Bad Origin Date + lp.OriginDate = DateTime.Now.AddDays(1); + Assert.Throws(() => lp.Check(ThrowImmediatelyCheckNotifier.Quiet)); + + //Back to normal + lp.RevertToDatabaseState(); + lp.Check(ThrowImmediatelyCheckNotifier.Quiet); + + //Bad ProgressDate + lp.DataLoadProgress = DateTime.Now.AddDays(1); + Assert.Throws(() => lp.Check(ThrowImmediatelyCheckNotifier.Quiet)); + + //Back to normal + lp.RevertToDatabaseState(); + lp.Check(ThrowImmediatelyCheckNotifier.Quiet); + + // valid progress (1 year) + lp.OriginDate = new DateTime(2001, 1, 1); + lp.DataLoadProgress = new DateTime(2002, 1, 1); + lp.Check(ThrowImmediatelyCheckNotifier.Quiet); + } + + [Test] + public void LoadProgress_JobFactory_NoDates() + { + var lp = WhenIHaveA(); + - [Test] - public void LoadProgress_JobFactory_NoDates() + lp.OriginDate = new DateTime(2001, 1, 1); + + // We are fully up-to-date + lp.DataLoadProgress = DateTime.Now; + + lp.Check(ThrowImmediatelyCheckNotifier.Quiet); + + var stratFactory = + new JobDateGenerationStrategyFactory(new AnyAvailableLoadProgressSelectionStrategy(lp.LoadMetadata)); + var strat = stratFactory.Create(lp, ThrowImmediatelyDataLoadEventListener.Quiet); + + var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.WorkDirectory), + "LoadProgress_JobFactory_NoDates", true); + + var lmd = lp.LoadMetadata; + lmd.LocationOfFlatFiles = dir.RootPath.FullName; + + foreach (var cata in lmd.GetAllCatalogues()) { - var lp = WhenIHaveA(); - - - - lp.OriginDate = new DateTime(2001,1,1); - - // We are fully up-to-date - lp.DataLoadProgress = DateTime.Now; - - lp.Check(new ThrowImmediatelyCheckNotifier()); - - var stratFactory = new JobDateGenerationStrategyFactory(new AnyAvailableLoadProgressSelectionStrategy(lp.LoadMetadata)); - var strat = stratFactory.Create(lp,new ThrowImmediatelyDataLoadEventListener()); - - var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.WorkDirectory),"LoadProgress_JobFactory_NoDates",true); - - var lmd = lp.LoadMetadata; - lmd.LocationOfFlatFiles = dir.RootPath.FullName; - - foreach(var cata in lmd.GetAllCatalogues()) - { - cata.LoggingDataTask = "ff"; - cata.SaveToDatabase(); - } - - lmd.SaveToDatabase(); - - var jobFactory = new SingleScheduledJobFactory(lp,strat,999,lp.LoadMetadata,null); - var job = jobFactory.Create(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), null); - - Assert.IsNull(job); - - // We have 1 day to load (date is the last fully loaded date) - lp.DataLoadProgress = DateTime.Now.AddDays(-2); - lp.SaveToDatabase(); - - strat = stratFactory.Create(lp,new ThrowImmediatelyDataLoadEventListener()); - jobFactory = new SingleScheduledJobFactory(lp,strat,999,lp.LoadMetadata,null); - - job = jobFactory.Create(RepositoryLocator,new ThrowImmediatelyDataLoadEventListener(),null); - Assert.AreEqual(1,((ScheduledDataLoadJob)job).DatesToRetrieve.Count); + cata.LoggingDataTask = "ff"; + cata.SaveToDatabase(); } + + lmd.SaveToDatabase(); + + var jobFactory = new SingleScheduledJobFactory(lp, strat, 999, lp.LoadMetadata, null); + var job = jobFactory.Create(RepositoryLocator, ThrowImmediatelyDataLoadEventListener.Quiet, null); + + Assert.IsNull(job); + + // We have 1 day to load (date is the last fully loaded date) + lp.DataLoadProgress = DateTime.Now.AddDays(-2); + lp.SaveToDatabase(); + + strat = stratFactory.Create(lp, ThrowImmediatelyDataLoadEventListener.Quiet); + jobFactory = new SingleScheduledJobFactory(lp, strat, 999, lp.LoadMetadata, null); + + job = jobFactory.Create(RepositoryLocator, ThrowImmediatelyDataLoadEventListener.Quiet, null); + Assert.AreEqual(1, ((ScheduledDataLoadJob)job).DatesToRetrieve.Count); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/LookupTest.cs b/Rdmp.Core.Tests/Curation/Integration/LookupTest.cs index 15c4b642f2..6a51c11d52 100644 --- a/Rdmp.Core.Tests/Curation/Integration/LookupTest.cs +++ b/Rdmp.Core.Tests/Curation/Integration/LookupTest.cs @@ -16,435 +16,498 @@ using Rdmp.Core.Providers; using Rdmp.Core.QueryBuilding; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class LookupTest : DatabaseTests { - public class LookupTest : DatabaseTests + [Test] + public void Test_MultipleLookupReferences() { + BlitzMainDataTables(); - [Test] - public void Test_MultipleLookupReferences() - { - BlitzMainDataTables(); - - var tiHeader = new TableInfo(CatalogueRepository,"Head"); - var tiHeader_Code = new ColumnInfo(CatalogueRepository,"code","",tiHeader); - - var tiLookup = new TableInfo(CatalogueRepository,"z_HeadLookup"); - var tiLookup_Code = new ColumnInfo(CatalogueRepository,"code","",tiLookup); - var tiLookup_Desc = new ColumnInfo(CatalogueRepository,"desc","",tiLookup); - - var lookup = new Lookup(CatalogueRepository,tiLookup_Desc,tiHeader_Code,tiLookup_Code,ExtractionJoinType.Left,null); - - var cata1 = new Catalogue(CatalogueRepository,"Catalogue1"); - var cata2 = new Catalogue(CatalogueRepository,"Catalogue2"); - - var cata1_code = new CatalogueItem(CatalogueRepository,cata1,"code"); - var cata1_desc = new CatalogueItem(CatalogueRepository,cata1,"desc"); - new ExtractionInformation(CatalogueRepository,cata1_code,tiHeader_Code,"[tbl]..[code]"); - new ExtractionInformation(CatalogueRepository,cata1_desc,tiLookup_Desc,"[lookup]..[desc]"); - - var cata2_code = new CatalogueItem(CatalogueRepository,cata2,"code"); - var cata2_desc = new CatalogueItem(CatalogueRepository,cata2,"desc"); - new ExtractionInformation(CatalogueRepository,cata2_code,tiHeader_Code,"[tbl]..[code]"); - new ExtractionInformation(CatalogueRepository,cata2_desc,tiLookup_Desc,"[lookup]..[desc]"); - - new CatalogueChildProvider(CatalogueRepository,null, new ThrowImmediatelyCheckNotifier(){ThrowOnWarning=true},null); - - } + var tiHeader = new TableInfo(CatalogueRepository, "Head"); + var tiHeader_Code = new ColumnInfo(CatalogueRepository, "code", "", tiHeader); - [Test] - public void CreateLookup_linkWithSelfThrowsException() - { + var tiLookup = new TableInfo(CatalogueRepository, "z_HeadLookup"); + var tiLookup_Code = new ColumnInfo(CatalogueRepository, "code", "", tiLookup); + var tiLookup_Desc = new ColumnInfo(CatalogueRepository, "desc", "", tiLookup); - TableInfo parent=null; - ColumnInfo child=null; - ColumnInfo child2=null; - ColumnInfo child3=null; + var lookup = new Lookup(CatalogueRepository, tiLookup_Desc, tiHeader_Code, tiLookup_Code, + ExtractionJoinType.Left, null); - try - { - parent = new TableInfo(CatalogueRepository, "unit_test_CreateLookup"); - child = new ColumnInfo(CatalogueRepository, "unit_test_CreateLookup", "int", parent); - child2 = new ColumnInfo(CatalogueRepository, "unit_test_CreateLookup", "int", parent); - child3 = new ColumnInfo(CatalogueRepository, "unit_test_CreateLookup", "int", parent); + var cata1 = new Catalogue(CatalogueRepository, "Catalogue1"); + var cata2 = new Catalogue(CatalogueRepository, "Catalogue2"); - Assert.Throws(()=>new Lookup(CatalogueRepository, child, child2, child3, ExtractionJoinType.Left, null)); - } - finally - { - //cleanup - try{child.DeleteInDatabase();}catch (Exception){} - try{child2.DeleteInDatabase();}catch (Exception){} - try{child3.DeleteInDatabase();}catch (Exception){} - try{parent.DeleteInDatabase();}catch (Exception){} - - } - } + var cata1_code = new CatalogueItem(CatalogueRepository, cata1, "code"); + var cata1_desc = new CatalogueItem(CatalogueRepository, cata1, "desc"); + new ExtractionInformation(CatalogueRepository, cata1_code, tiHeader_Code, "[tbl]..[code]"); + new ExtractionInformation(CatalogueRepository, cata1_desc, tiLookup_Desc, "[lookup]..[desc]"); - [TestCase(true)] - [TestCase(false)] - public void CreateLookup_linkWithOtherTable(bool memoryRepo) - { - var repo = memoryRepo? (ICatalogueRepository)new MemoryCatalogueRepository():CatalogueRepository; + var cata2_code = new CatalogueItem(CatalogueRepository, cata2, "code"); + var cata2_desc = new CatalogueItem(CatalogueRepository, cata2, "desc"); + new ExtractionInformation(CatalogueRepository, cata2_code, tiHeader_Code, "[tbl]..[code]"); + new ExtractionInformation(CatalogueRepository, cata2_desc, tiLookup_Desc, "[lookup]..[desc]"); + + new CatalogueChildProvider(CatalogueRepository, null, ThrowImmediatelyCheckNotifier.QuietPicky, null); + } - TableInfo parent = null; - TableInfo parent2 = null; + [Test] + public void CreateLookup_linkWithSelfThrowsException() + { + TableInfo parent = null; + ColumnInfo child = null; + ColumnInfo child2 = null; + ColumnInfo child3 = null; - ColumnInfo child = null; - ColumnInfo child2 = null; - ColumnInfo child3 = null; + try + { + parent = new TableInfo(CatalogueRepository, "unit_test_CreateLookup"); + child = new ColumnInfo(CatalogueRepository, "unit_test_CreateLookup", "int", parent); + child2 = new ColumnInfo(CatalogueRepository, "unit_test_CreateLookup", "int", parent); + child3 = new ColumnInfo(CatalogueRepository, "unit_test_CreateLookup", "int", parent); + Assert.Throws(() => + new Lookup(CatalogueRepository, child, child2, child3, ExtractionJoinType.Left, null)); + } + finally + { + //cleanup try { - parent = new TableInfo(repo, "unit_test_CreateLookup"); - parent2 = new TableInfo(repo, "unit_test_CreateLookupOther"); - child = new ColumnInfo(repo, "unit_test_CreateLookup", "int", parent); //lookup desc - child2 = new ColumnInfo(repo, "unit_test_CreateLookup", "int", parent2); //fk in data table - child3 = new ColumnInfo(repo, "unit_test_CreateLookup", "int", parent); //pk in lookup - - new Lookup(repo, child, child2, child3, ExtractionJoinType.Left, null); - - Assert.AreEqual(child.GetAllLookupForColumnInfoWhereItIsA(LookupType.Description).Length, 1); - Assert.AreEqual(child2.GetAllLookupForColumnInfoWhereItIsA(LookupType.Description).Length, 0); - Assert.AreEqual(child.GetAllLookupForColumnInfoWhereItIsA(LookupType.AnyKey).Length, 0); - Assert.AreEqual(child2.GetAllLookupForColumnInfoWhereItIsA(LookupType.AnyKey).Length, 1); - Assert.AreEqual(child3.GetAllLookupForColumnInfoWhereItIsA(LookupType.AnyKey).Length, 1); + child.DeleteInDatabase(); + } + catch (Exception) + { + } + try + { + child2.DeleteInDatabase(); + } + catch (Exception) + { + } - Assert.IsTrue(parent.IsLookupTable()); - Assert.IsFalse(parent2.IsLookupTable()); + try + { + child3.DeleteInDatabase(); } - finally + catch (Exception) { - //cleanup - try { child.DeleteInDatabase(); }catch (Exception) { } - try { child2.DeleteInDatabase(); }catch (Exception) { } - try { child3.DeleteInDatabase(); }catch (Exception) { } - try { parent.DeleteInDatabase(); }catch (Exception) { } - try { parent2.DeleteInDatabase(); }catch (Exception) { } + } + try + { + parent.DeleteInDatabase(); + } + catch (Exception) + { } } - [Test] - public void CompositeLookupTest() + try { - - - TableInfo fkTable = null; - TableInfo pkTable = null; - ColumnInfo desc = null; - ColumnInfo fk = null; - ColumnInfo pk = null; - - ColumnInfo fk2 = null; - ColumnInfo pk2 = null; - - Lookup lookup = null; - LookupCompositeJoinInfo composite=null; - - try - { + parent.DeleteInDatabase(); + } + catch (Exception) + { + } + } - //table 1 - the dataset table, it has 2 foreign keys e.g. TestCode, Healthboard - fkTable = new TableInfo(CatalogueRepository, "UnitTest_Biochemistry"); - fk = new ColumnInfo(CatalogueRepository, "UnitTest_BCTestCode", "int", fkTable); - fk2 = new ColumnInfo(CatalogueRepository, "UnitTest_BCHealthBoard", "int", fkTable); + [TestCase(true)] + [TestCase(false)] + public void CreateLookup_linkWithOtherTable(bool memoryRepo) + { + var repo = memoryRepo ? (ICatalogueRepository)new MemoryCatalogueRepository() : CatalogueRepository; - //table 2 - the lookup table, it has 2 primary keys e.g. TestCode,Healthboard and 1 description e.g. TestDescription (the Healthboard makes it a composite JOIN which allows for the same TestCode being mapped to a different discription in Tayside vs Fife (healthboard) - pkTable = new TableInfo(CatalogueRepository, "UnitTest_BiochemistryLookup"); - pk = new ColumnInfo(CatalogueRepository, "UnitTest_TestCode", "int", pkTable); - pk2 = new ColumnInfo(CatalogueRepository, "UnitTest_Healthboard", "int", pkTable); - desc = new ColumnInfo(CatalogueRepository, "UnitTest_TestDescription", "int", pkTable); - lookup = new Lookup(CatalogueRepository, desc, fk, pk, ExtractionJoinType.Left, null); + TableInfo parent = null; + TableInfo parent2 = null; - Assert.AreEqual(lookup.PrimaryKey.Name, pk.Name); - Assert.AreEqual(lookup.PrimaryKey.ID, pk.ID); + ColumnInfo child = null; + ColumnInfo child2 = null; + ColumnInfo child3 = null; - Assert.AreEqual(lookup.ForeignKey.Name, fk.Name); - Assert.AreEqual(lookup.ForeignKey.ID, fk.ID); + try + { + parent = new TableInfo(repo, "unit_test_CreateLookup"); + parent2 = new TableInfo(repo, "unit_test_CreateLookupOther"); + child = new ColumnInfo(repo, "unit_test_CreateLookup", "int", parent); //lookup desc + child2 = new ColumnInfo(repo, "unit_test_CreateLookup", "int", parent2); //fk in data table + child3 = new ColumnInfo(repo, "unit_test_CreateLookup", "int", parent); //pk in lookup - Assert.AreEqual(lookup.Description.Name, desc.Name); - Assert.AreEqual(lookup.Description.ID, desc.ID); + new Lookup(repo, child, child2, child3, ExtractionJoinType.Left, null); - //Create the composite lookup - composite = new LookupCompositeJoinInfo(CatalogueRepository, lookup, fk2, pk2); + Assert.AreEqual(child.GetAllLookupForColumnInfoWhereItIsA(LookupType.Description).Length, 1); + Assert.AreEqual(child2.GetAllLookupForColumnInfoWhereItIsA(LookupType.Description).Length, 0); + Assert.AreEqual(child.GetAllLookupForColumnInfoWhereItIsA(LookupType.AnyKey).Length, 0); + Assert.AreEqual(child2.GetAllLookupForColumnInfoWhereItIsA(LookupType.AnyKey).Length, 1); + Assert.AreEqual(child3.GetAllLookupForColumnInfoWhereItIsA(LookupType.AnyKey).Length, 1); - Assert.AreEqual(composite.OriginalLookup_ID, lookup.ID); - - Assert.AreEqual(composite.PrimaryKey.ID, pk2.ID); - Assert.AreEqual(composite.PrimaryKey_ID, pk2.ID); - Assert.AreEqual(composite.PrimaryKey.Name, pk2.Name); - Assert.AreEqual(composite.ForeignKey.ID, fk2.ID); - Assert.AreEqual(composite.ForeignKey_ID, fk2.ID); - Assert.AreEqual(composite.ForeignKey.Name, fk2.Name); + Assert.IsTrue(parent.IsLookupTable()); + Assert.IsFalse(parent2.IsLookupTable()); + } + finally + { + //cleanup + try + { + child.DeleteInDatabase(); + } + catch (Exception) + { + } - //get a fresh copy out of memory now that we have created the Lookup composite key, confirm the integrity of that relationship - Assert.AreEqual(lookup.GetSupplementalJoins().Count() , 1); - Assert.AreEqual(lookup.GetSupplementalJoins().Cast().First().ID, composite.ID); + try + { + child2.DeleteInDatabase(); + } + catch (Exception) + { + } - composite.DeleteInDatabase(); - composite = null; + try + { + child3.DeleteInDatabase(); + } + catch (Exception) + { + } - Assert.AreEqual(lookup.GetSupplementalJoins().Count(), 0); + try + { + parent.DeleteInDatabase(); + } + catch (Exception) + { } - catch (Exception ex) + + try { - Console.Write(ex.ToString()); - throw; + parent2.DeleteInDatabase(); } - finally + catch (Exception) { - //cleanup - if(composite != null) - composite.DeleteInDatabase(); - - lookup?.DeleteInDatabase(); - - desc?.DeleteInDatabase(); - fk?.DeleteInDatabase(); - pk?.DeleteInDatabase(); - fk2?.DeleteInDatabase(); - pk2?.DeleteInDatabase(); - fkTable?.DeleteInDatabase(); - pkTable?.DeleteInDatabase(); } } + } + [Test] + public void CompositeLookupTest() + { + TableInfo fkTable = null; + TableInfo pkTable = null; + ColumnInfo desc = null; + ColumnInfo fk = null; + ColumnInfo pk = null; + + ColumnInfo fk2 = null; + ColumnInfo pk2 = null; - [Test] - public void CompositeLookupTest_SQL() + Lookup lookup = null; + LookupCompositeJoinInfo composite = null; + + try { - - //this only works for MSSQL Servers - if (CatalogueTableRepository.DiscoveredServer.DatabaseType != DatabaseType.MicrosoftSQLServer) - Assert.Ignore("This test only targets Microsft SQL Servers"); + //table 1 - the dataset table, it has 2 foreign keys e.g. TestCode, Healthboard + fkTable = new TableInfo(CatalogueRepository, "UnitTest_Biochemistry"); + fk = new ColumnInfo(CatalogueRepository, "UnitTest_BCTestCode", "int", fkTable); + fk2 = new ColumnInfo(CatalogueRepository, "UnitTest_BCHealthBoard", "int", fkTable); - TableInfo fkTable = null; - TableInfo pkTable = null; - ColumnInfo desc = null; - ColumnInfo fk = null; - ColumnInfo pk = null; + //table 2 - the lookup table, it has 2 primary keys e.g. TestCode,Healthboard and 1 description e.g. TestDescription (the Healthboard makes it a composite JOIN which allows for the same TestCode being mapped to a different discription in Tayside vs Fife (healthboard) + pkTable = new TableInfo(CatalogueRepository, "UnitTest_BiochemistryLookup"); + pk = new ColumnInfo(CatalogueRepository, "UnitTest_TestCode", "int", pkTable); + pk2 = new ColumnInfo(CatalogueRepository, "UnitTest_Healthboard", "int", pkTable); + desc = new ColumnInfo(CatalogueRepository, "UnitTest_TestDescription", "int", pkTable); + lookup = new Lookup(CatalogueRepository, desc, fk, pk, ExtractionJoinType.Left, null); - ColumnInfo fk2 = null; - ColumnInfo pk2 = null; + Assert.AreEqual(lookup.PrimaryKey.Name, pk.Name); + Assert.AreEqual(lookup.PrimaryKey.ID, pk.ID); - Lookup lookup = null; - LookupCompositeJoinInfo composite = null; + Assert.AreEqual(lookup.ForeignKey.Name, fk.Name); + Assert.AreEqual(lookup.ForeignKey.ID, fk.ID); - try - { + Assert.AreEqual(lookup.Description.Name, desc.Name); + Assert.AreEqual(lookup.Description.ID, desc.ID); - //table 1 - the dataset table, it has 2 foreign keys e.g. TestCode, Healthboard - fkTable = new TableInfo(CatalogueRepository, "UnitTest_Biochemistry"); - fk = new ColumnInfo(CatalogueRepository, "UnitTest_BCTestCode", "int", fkTable); - fk2 = new ColumnInfo(CatalogueRepository, "UnitTest_BCHealthBoard", "int", fkTable); + //Create the composite lookup + composite = new LookupCompositeJoinInfo(CatalogueRepository, lookup, fk2, pk2); - //table 2 - the lookup table, it has 2 primary keys e.g. TestCode,Healthboard and 1 description e.g. TestDescription (the Healthboard makes it a composite JOIN which allows for the same TestCode being mapped to a different discription in Tayside vs Fife (healthboard) - pkTable = new TableInfo(CatalogueRepository, "UnitTest_BiochemistryLookup"); - pk = new ColumnInfo(CatalogueRepository, "UnitTest_TestCode", "int", pkTable); - pk2 = new ColumnInfo(CatalogueRepository, "UnitTest_Healthboard", "int", pkTable); - desc = new ColumnInfo(CatalogueRepository, "UnitTest_TestDescription", "int", pkTable); - lookup = new Lookup(CatalogueRepository, desc, fk, pk, ExtractionJoinType.Left, null); + Assert.AreEqual(composite.OriginalLookup_ID, lookup.ID); - string joinSQL = JoinHelper.GetJoinSQL(lookup); + Assert.AreEqual(composite.PrimaryKey.ID, pk2.ID); + Assert.AreEqual(composite.PrimaryKey_ID, pk2.ID); + Assert.AreEqual(composite.PrimaryKey.Name, pk2.Name); - Assert.AreEqual(joinSQL,"UnitTest_Biochemistry Left JOIN UnitTest_BiochemistryLookup ON UnitTest_BCTestCode = UnitTest_TestCode"); + Assert.AreEqual(composite.ForeignKey.ID, fk2.ID); + Assert.AreEqual(composite.ForeignKey_ID, fk2.ID); + Assert.AreEqual(composite.ForeignKey.Name, fk2.Name); - //Create the composite lookup - composite = new LookupCompositeJoinInfo(CatalogueRepository, lookup, fk2, pk2); + //get a fresh copy out of memory now that we have created the Lookup composite key, confirm the integrity of that relationship + Assert.AreEqual(lookup.GetSupplementalJoins().Count(), 1); + Assert.AreEqual(lookup.GetSupplementalJoins().Cast().First().ID, composite.ID); - string joinSQL_AfterAddingCompositeKey = JoinHelper.GetJoinSQL(lookup); + composite.DeleteInDatabase(); + composite = null; - Assert.AreEqual(joinSQL_AfterAddingCompositeKey, "UnitTest_Biochemistry Left JOIN UnitTest_BiochemistryLookup ON UnitTest_BCTestCode = UnitTest_TestCode AND UnitTest_BCHealthBoard = UnitTest_Healthboard"); - } - catch (Exception ex) - { - Console.Write(ex.ToString()); - throw; - } - finally - { - //cleanup - if (composite != null) - composite.DeleteInDatabase(); - - lookup?.DeleteInDatabase(); - - desc?.DeleteInDatabase(); - fk?.DeleteInDatabase(); - pk?.DeleteInDatabase(); - fk2?.DeleteInDatabase(); - pk2?.DeleteInDatabase(); - fkTable?.DeleteInDatabase(); - pkTable?.DeleteInDatabase(); - } + Assert.AreEqual(lookup.GetSupplementalJoins().Count(), 0); + } + catch (Exception ex) + { + Console.Write(ex.ToString()); + throw; } + finally + { + //cleanup + composite?.DeleteInDatabase(); + + lookup?.DeleteInDatabase(); + + desc?.DeleteInDatabase(); + fk?.DeleteInDatabase(); + pk?.DeleteInDatabase(); + fk2?.DeleteInDatabase(); + pk2?.DeleteInDatabase(); + fkTable?.DeleteInDatabase(); + pkTable?.DeleteInDatabase(); + } + } - [Test] - public void LookupTest_CustomSql() + [Test] + public void CompositeLookupTest_SQL() + { + //this only works for MSSQL Servers + if (CatalogueTableRepository.DiscoveredServer.DatabaseType != DatabaseType.MicrosoftSQLServer) + Assert.Ignore("This test only targets Microsft SQL Servers"); + + TableInfo fkTable = null; + TableInfo pkTable = null; + ColumnInfo desc = null; + ColumnInfo fk = null; + ColumnInfo pk = null; + + ColumnInfo fk2 = null; + ColumnInfo pk2 = null; + + Lookup lookup = null; + LookupCompositeJoinInfo composite = null; + + try { + //table 1 - the dataset table, it has 2 foreign keys e.g. TestCode, Healthboard + fkTable = new TableInfo(CatalogueRepository, "UnitTest_Biochemistry"); + fk = new ColumnInfo(CatalogueRepository, "UnitTest_BCTestCode", "int", fkTable); + fk2 = new ColumnInfo(CatalogueRepository, "UnitTest_BCHealthBoard", "int", fkTable); - //this only works for MSSQL Servers - if (CatalogueTableRepository.DiscoveredServer.DatabaseType != DatabaseType.MicrosoftSQLServer) - Assert.Ignore("This test only targets Microsft SQL Servers"); + //table 2 - the lookup table, it has 2 primary keys e.g. TestCode,Healthboard and 1 description e.g. TestDescription (the Healthboard makes it a composite JOIN which allows for the same TestCode being mapped to a different discription in Tayside vs Fife (healthboard) + pkTable = new TableInfo(CatalogueRepository, "UnitTest_BiochemistryLookup"); + pk = new ColumnInfo(CatalogueRepository, "UnitTest_TestCode", "int", pkTable); + pk2 = new ColumnInfo(CatalogueRepository, "UnitTest_Healthboard", "int", pkTable); + desc = new ColumnInfo(CatalogueRepository, "UnitTest_TestDescription", "int", pkTable); + lookup = new Lookup(CatalogueRepository, desc, fk, pk, ExtractionJoinType.Left, null); - TableInfo fkTable = null; - TableInfo pkTable = null; - ColumnInfo desc = null; - ColumnInfo fk = null; - ColumnInfo pk = null; + var joinSQL = JoinHelper.GetJoinSQL(lookup); - ColumnInfo fk2 = null; - ColumnInfo pk2 = null; + Assert.AreEqual(joinSQL, + "UnitTest_Biochemistry Left JOIN UnitTest_BiochemistryLookup ON UnitTest_BCTestCode = UnitTest_TestCode"); - Lookup lookup = null; + //Create the composite lookup + composite = new LookupCompositeJoinInfo(CatalogueRepository, lookup, fk2, pk2); - try - { + var joinSQL_AfterAddingCompositeKey = JoinHelper.GetJoinSQL(lookup); + + Assert.AreEqual(joinSQL_AfterAddingCompositeKey, + "UnitTest_Biochemistry Left JOIN UnitTest_BiochemistryLookup ON UnitTest_BCTestCode = UnitTest_TestCode AND UnitTest_BCHealthBoard = UnitTest_Healthboard"); + } + catch (Exception ex) + { + Console.Write(ex.ToString()); + throw; + } + finally + { + //cleanup + composite?.DeleteInDatabase(); + + lookup?.DeleteInDatabase(); + + desc?.DeleteInDatabase(); + fk?.DeleteInDatabase(); + pk?.DeleteInDatabase(); + fk2?.DeleteInDatabase(); + pk2?.DeleteInDatabase(); + fkTable?.DeleteInDatabase(); + pkTable?.DeleteInDatabase(); + } + } - //table 1 - the dataset table, it has 2 foreign keys e.g. TestCode, Healthboard - fkTable = new TableInfo(CatalogueRepository, "UnitTest_Biochemistry"); - fk = new ColumnInfo(CatalogueRepository, "One", "int", fkTable); - fk2 = new ColumnInfo(CatalogueRepository, "Two", "int", fkTable); - //table 2 - the lookup table, it has 2 primary keys e.g. TestCode,Healthboard and 1 description e.g. TestDescription (the Healthboard makes it a composite JOIN which allows for the same TestCode being mapped to a different discription in Tayside vs Fife (healthboard) - pkTable = new TableInfo(CatalogueRepository, "UnitTest_BiochemistryLookup"); - pk = new ColumnInfo(CatalogueRepository, "One", "int", pkTable); - pk2 = new ColumnInfo(CatalogueRepository, "Two", "int", pkTable); - desc = new ColumnInfo(CatalogueRepository, "UnitTest_TestDescription", "int", pkTable); - lookup = new Lookup(CatalogueRepository, desc, fk, pk, ExtractionJoinType.Left, null); + [Test] + public void LookupTest_CustomSql() + { + //this only works for MSSQL Servers + if (CatalogueTableRepository.DiscoveredServer.DatabaseType != DatabaseType.MicrosoftSQLServer) + Assert.Ignore("This test only targets Microsft SQL Servers"); + + TableInfo fkTable = null; + TableInfo pkTable = null; + ColumnInfo desc = null; + ColumnInfo fk = null; + ColumnInfo pk = null; - string joinSQL = JoinHelper.GetJoinSQL(lookup); + ColumnInfo fk2 = null; + ColumnInfo pk2 = null; - Assert.AreEqual(joinSQL, "UnitTest_Biochemistry Left JOIN UnitTest_BiochemistryLookup ON One = One"); + Lookup lookup = null; - //Create the custom lookup - var cmd = new ExecuteCommandSetExtendedProperty(new ThrowImmediatelyActivator(RepositoryLocator), - new[] { lookup }, - ExtendedProperty.CustomJoinSql, -@"{0}.One={1}.One AND + try + { + //table 1 - the dataset table, it has 2 foreign keys e.g. TestCode, Healthboard + fkTable = new TableInfo(CatalogueRepository, "UnitTest_Biochemistry"); + fk = new ColumnInfo(CatalogueRepository, "One", "int", fkTable); + fk2 = new ColumnInfo(CatalogueRepository, "Two", "int", fkTable); + + //table 2 - the lookup table, it has 2 primary keys e.g. TestCode,Healthboard and 1 description e.g. TestDescription (the Healthboard makes it a composite JOIN which allows for the same TestCode being mapped to a different discription in Tayside vs Fife (healthboard) + pkTable = new TableInfo(CatalogueRepository, "UnitTest_BiochemistryLookup"); + pk = new ColumnInfo(CatalogueRepository, "One", "int", pkTable); + pk2 = new ColumnInfo(CatalogueRepository, "Two", "int", pkTable); + desc = new ColumnInfo(CatalogueRepository, "UnitTest_TestDescription", "int", pkTable); + lookup = new Lookup(CatalogueRepository, desc, fk, pk, ExtractionJoinType.Left, null); + + var joinSQL = JoinHelper.GetJoinSQL(lookup); + + Assert.AreEqual(joinSQL, "UnitTest_Biochemistry Left JOIN UnitTest_BiochemistryLookup ON One = One"); + + //Create the custom lookup + var cmd = new ExecuteCommandSetExtendedProperty(new ThrowImmediatelyActivator(RepositoryLocator), + new[] { lookup }, + ExtendedProperty.CustomJoinSql, + @"{0}.One={1}.One AND ({0}.Two = {0}.Two OR ({0}.{Two} is null AND {1}.Two is null) )"); - cmd.Execute(); + cmd.Execute(); - string joinSQL_AfterAddingCompositeKey = JoinHelper.GetJoinSQL(lookup); + var joinSQL_AfterAddingCompositeKey = JoinHelper.GetJoinSQL(lookup); - Assert.AreEqual(joinSQL_AfterAddingCompositeKey, - "UnitTest_Biochemistry Left JOIN UnitTest_BiochemistryLookup ON UnitTest_Biochemistry.One=UnitTest_BiochemistryLookup.One AND (UnitTest_Biochemistry.Two = UnitTest_Biochemistry.Two OR (UnitTest_Biochemistry.{Two} is null AND UnitTest_BiochemistryLookup.Two is null) )"); - } - catch (Exception ex) - { - Console.Write(ex.ToString()); - throw; - } - finally - { - lookup?.DeleteInDatabase(); - - desc?.DeleteInDatabase(); - fk?.DeleteInDatabase(); - pk?.DeleteInDatabase(); - fk2?.DeleteInDatabase(); - pk2?.DeleteInDatabase(); - fkTable?.DeleteInDatabase(); - pkTable?.DeleteInDatabase(); - } + Assert.AreEqual(joinSQL_AfterAddingCompositeKey, + "UnitTest_Biochemistry Left JOIN UnitTest_BiochemistryLookup ON UnitTest_Biochemistry.One=UnitTest_BiochemistryLookup.One AND (UnitTest_Biochemistry.Two = UnitTest_Biochemistry.Two OR (UnitTest_Biochemistry.{Two} is null AND UnitTest_BiochemistryLookup.Two is null) )"); } - - [TestCase(LookupTestCase.SingleKeySingleDescriptionNoVirtualColumn)] - [TestCase(LookupTestCase.SingleKeySingleDescription)] - public void TestLookupCommand(LookupTestCase testCase) + catch (Exception ex) { - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + Console.Write(ex.ToString()); + throw; + } + finally + { + lookup?.DeleteInDatabase(); + + desc?.DeleteInDatabase(); + fk?.DeleteInDatabase(); + pk?.DeleteInDatabase(); + fk2?.DeleteInDatabase(); + pk2?.DeleteInDatabase(); + fkTable?.DeleteInDatabase(); + pkTable?.DeleteInDatabase(); + } + } - DataTable dt = new DataTable(); - dt.Columns.Add("ID"); - dt.Columns.Add("SendingLocation"); - dt.Columns.Add("DischargeLocation"); - dt.Columns.Add("Country"); + [TestCase(LookupTestCase.SingleKeySingleDescriptionNoVirtualColumn)] + [TestCase(LookupTestCase.SingleKeySingleDescription)] + public void TestLookupCommand(LookupTestCase testCase) + { + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var maintbl = db.CreateTable("MainDataset", dt); + var dt = new DataTable(); + dt.Columns.Add("ID"); + dt.Columns.Add("SendingLocation"); + dt.Columns.Add("DischargeLocation"); + dt.Columns.Add("Country"); - var mainCata = Import(maintbl); + var maintbl = db.CreateTable("MainDataset", dt); - DataTable dtLookup = new DataTable(); - dtLookup.Columns.Add("LocationCode"); - dtLookup.Columns.Add("Line1"); - dtLookup.Columns.Add("Line2"); - dtLookup.Columns.Add("Postcode"); - dtLookup.Columns.Add("Country"); + var mainCata = Import(maintbl); - var lookuptbl = db.CreateTable("Lookup", dtLookup); + var dtLookup = new DataTable(); + dtLookup.Columns.Add("LocationCode"); + dtLookup.Columns.Add("Line1"); + dtLookup.Columns.Add("Line2"); + dtLookup.Columns.Add("Postcode"); + dtLookup.Columns.Add("Country"); - var lookupCata = Import(lookuptbl); + var lookuptbl = db.CreateTable("Lookup", dtLookup); - ExtractionInformation fkEi = mainCata.GetAllExtractionInformation(ExtractionCategory.Any).Single(n => n.GetRuntimeName() == "SendingLocation"); - ColumnInfo fk = mainCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "SendingLocation"); - ColumnInfo pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "LocationCode"); + var lookupCata = Import(lookuptbl); - ColumnInfo descLine1 = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Line1"); - ColumnInfo descLine2 = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Line2"); + var fkEi = mainCata.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(n => n.GetRuntimeName() == "SendingLocation"); + var fk = mainCata.GetTableInfoList(false).Single().ColumnInfos + .Single(n => n.GetRuntimeName() == "SendingLocation"); + var pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos + .Single(n => n.GetRuntimeName() == "LocationCode"); - ExecuteCommandCreateLookup cmd = null; + var descLine1 = lookupCata.GetTableInfoList(false).Single().ColumnInfos + .Single(n => n.GetRuntimeName() == "Line1"); + var descLine2 = lookupCata.GetTableInfoList(false).Single().ColumnInfos + .Single(n => n.GetRuntimeName() == "Line2"); - var sqlBefore = GetSql(mainCata); + ExecuteCommandCreateLookup cmd = null; - switch (testCase) - { - case LookupTestCase.SingleKeySingleDescriptionNoVirtualColumn: - cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk,null, false); - cmd.Execute(); - - //sql should not have changed because we didn't create an new ExtractionInformation virtual column - Assert.AreEqual(sqlBefore,GetSql(mainCata)); - break; - case LookupTestCase.SingleKeySingleDescription: - cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk,null, true); - cmd.Execute(); - - //should have the lookup join and the virtual column _Desc - var sqlAfter = GetSql(mainCata); - Assert.IsTrue(sqlAfter.Contains("JOIN")); - Assert.IsTrue(sqlAfter.Contains("SendingLocation_Desc")); - break; - default: - throw new ArgumentOutOfRangeException("testCase"); - } - - foreach (var d in CatalogueRepository.GetAllObjects()) - d.DeleteInDatabase(); - foreach (var d in CatalogueRepository.GetAllObjects()) - d.DeleteInDatabase(); - foreach (var d in CatalogueRepository.GetAllObjects()) - d.DeleteInDatabase(); - foreach (var d in CatalogueRepository.GetAllObjects()) - d.DeleteInDatabase(); - - maintbl.Drop(); - lookuptbl.Drop(); - } + var sqlBefore = GetSql(mainCata); - private string GetSql(ICatalogue mainCata) + switch (testCase) { - mainCata.ClearAllInjections(); + case LookupTestCase.SingleKeySingleDescriptionNoVirtualColumn: + cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk, null, false); + cmd.Execute(); + + //sql should not have changed because we didn't create an new ExtractionInformation virtual column + Assert.AreEqual(sqlBefore, GetSql(mainCata)); + break; + case LookupTestCase.SingleKeySingleDescription: + cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk, null, true); + cmd.Execute(); - var qb = new QueryBuilder(null, null); - qb.AddColumnRange(mainCata.GetAllExtractionInformation(ExtractionCategory.Any)); - return qb.SQL; + //should have the lookup join and the virtual column _Desc + var sqlAfter = GetSql(mainCata); + Assert.IsTrue(sqlAfter.Contains("JOIN")); + Assert.IsTrue(sqlAfter.Contains("SendingLocation_Desc")); + break; + default: + throw new ArgumentOutOfRangeException(nameof(testCase)); } + + foreach (var d in CatalogueRepository.GetAllObjects()) + d.DeleteInDatabase(); + foreach (var d in CatalogueRepository.GetAllObjects()) + d.DeleteInDatabase(); + foreach (var d in CatalogueRepository.GetAllObjects()) + d.DeleteInDatabase(); + foreach (var d in CatalogueRepository.GetAllObjects()) + d.DeleteInDatabase(); + + maintbl.Drop(); + lookuptbl.Drop(); } - public enum LookupTestCase + private static string GetSql(ICatalogue mainCata) { - SingleKeySingleDescriptionNoVirtualColumn, - SingleKeySingleDescription, + mainCata.ClearAllInjections(); + + var qb = new QueryBuilder(null, null); + qb.AddColumnRange(mainCata.GetAllExtractionInformation(ExtractionCategory.Any)); + return qb.SQL; } } + +public enum LookupTestCase +{ + SingleKeySingleDescriptionNoVirtualColumn, + SingleKeySingleDescription +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/MEFCheckerTests.cs b/Rdmp.Core.Tests/Curation/Integration/MEFCheckerTests.cs index 26ef26efea..31717b5486 100644 --- a/Rdmp.Core.Tests/Curation/Integration/MEFCheckerTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/MEFCheckerTests.cs @@ -6,99 +6,55 @@ using System; using System.IO; -using System.Linq; using NUnit.Framework; using Rdmp.Core.Curation.Checks; using Rdmp.Core.Curation.Data; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class MEFCheckerTests : UnitTests { - - public class MEFCheckerTests:UnitTests + [Test] + public void FindClass_WrongCase_FoundAnyway() { - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); - - SetupMEF(); - } - - [Test] - public void FindClass_WrongCase_FoundAnyway() - { - Assert.AreEqual(typeof(Catalogue),Repository.MEF.GetType("catalogue")); - } - - [Test] - public void FindClass_EmptyString() - { - MEFChecker m = new MEFChecker(Repository.MEF, "", s => Assert.Fail()); - var ex = Assert.Throws(()=>m.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("MEFChecker was asked to check for the existence of an Export class but the _classToFind string was empty",ex.Message); - } - - [Test] - public void FindClass_CorrectNamespace() - { - MEFChecker m = new MEFChecker(Repository.MEF, "Rdmp.Core.DataLoad.Modules.Attachers.AnySeparatorFileAttacher", s => Assert.Fail()); - m.Check(new ThrowImmediatelyCheckNotifier()); - } - - [Test] - public void FindClass_WrongNamespace() - { - MEFChecker m = new MEFChecker(Repository.MEF, "CatalogueLibrary.AnySeparatorFileAttacher", s => Assert.Pass()); - m.Check(new AcceptAllCheckNotifier()); - - Assert.Fail("Expected the class not to be found but to be identified under the correct namespace (above)"); - } - - [Test] - public void FindClass_NonExistant() - { - MEFChecker m = new MEFChecker(Repository.MEF, "CatalogueLibrary.UncleSam", s => Assert.Fail()); - var ex = Assert.Throws(()=>m.Check(new ThrowImmediatelyCheckNotifier())); - StringAssert.Contains("Could not find MEF class called CatalogueLibrary.UncleSam in LoadModuleAssembly.GetAllTypes() and couldn't even find any with the same basic name",ex.Message); - } - - [Test] - public void DllFileDuplication_Ignored() - { - // Setup 2 directories that will contain duplicate copies of the same dll - var badDir1 = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"Bad1")); - var badDir2 = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Bad2")); - - if (badDir1.Exists) - badDir1.Delete(true); - - badDir1.Create(); - - if (badDir2.Exists) - badDir2.Delete(true); - - badDir2.Create(); - - var dllToCopy = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"Rdmp.Core.dll")); - - // copy the dll to both folders - File.Copy(dllToCopy.FullName, Path.Combine(badDir1.FullName,"Rdmp.Core.dll")); - File.Copy(dllToCopy.FullName, Path.Combine(badDir2.FullName, "Rdmp.Core.dll")); - - var tomem = new ToMemoryCheckNotifier(); - - var sdc = new SafeDirectoryCatalog(tomem, badDir1.FullName,badDir2.FullName); + Assert.AreEqual(typeof(Catalogue), Core.Repositories.MEF.GetType("catalogue")); + } - Assert.AreEqual(sdc .DuplicateDllsIgnored, 1); - - badDir1.Delete(true); - badDir2.Delete(true); + [Test] + public void FindClass_EmptyString() + { + var m = new MEFChecker("", s => Assert.Fail()); + var ex = Assert.Throws(() => m.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "MEFChecker was asked to check for the existence of an Export class but the _classToFind string was empty", + ex.Message); + } - } + [Test] + public void FindClass_CorrectNamespace() + { + var m = new MEFChecker("Rdmp.Core.DataLoad.Modules.Attachers.AnySeparatorFileAttacher", s => Assert.Fail()); + m.Check(ThrowImmediatelyCheckNotifier.Quiet); + } + [Test] + public void FindClass_WrongNamespace() + { + var m = new MEFChecker("CatalogueLibrary.AnySeparatorFileAttacher", s => Assert.Pass()); + m.Check(new AcceptAllCheckNotifier()); + Assert.Fail("Expected the class not to be found but to be identified under the correct namespace (above)"); + } + [Test] + public void FindClass_NonExistent() + { + var m = new MEFChecker("CatalogueLibrary.UncleSam", s => Assert.Fail()); + var ex = Assert.Throws(() => m.Check(ThrowImmediatelyCheckNotifier.Quiet)); + StringAssert.Contains( + "Could not find MEF class called CatalogueLibrary.UncleSam in LoadModuleAssembly.GetAllTypes() and couldn't even find any with the same basic name", + ex?.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/MementoTests.cs b/Rdmp.Core.Tests/Curation/Integration/MementoTests.cs index b95914469b..7998386c7d 100644 --- a/Rdmp.Core.Tests/Curation/Integration/MementoTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/MementoTests.cs @@ -9,80 +9,78 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class MementoTests : DatabaseTests { - public class MementoTests : DatabaseTests + [Test] + public void FakeMemento_Catalogue_Modify() { - [Test] - public void FakeMemento_Catalogue_Modify() - { - var c = new Catalogue(CatalogueRepository,"Hey"); + var c = new Catalogue(CatalogueRepository, "Hey"); - var g = Guid.NewGuid(); - var commit = new Commit(CatalogueRepository, g, "Breaking stuff!"); + var g = Guid.NewGuid(); + var commit = new Commit(CatalogueRepository, g, "Breaking stuff!"); - var mem = new Memento(CatalogueRepository,commit,MementoType.Modify,c,"yar","blerg"); - mem.SaveToDatabase(); + var mem = new Memento(CatalogueRepository, commit, MementoType.Modify, c, "yar", "blerg"); + mem.SaveToDatabase(); - mem.BeforeYaml = "haha"; - Assert.AreEqual("haha", mem.BeforeYaml); - mem.RevertToDatabaseState(); - Assert.AreEqual("yar", mem.BeforeYaml); + mem.BeforeYaml = "haha"; + Assert.AreEqual("haha", mem.BeforeYaml); + mem.RevertToDatabaseState(); + Assert.AreEqual("yar", mem.BeforeYaml); - var mem2 = CatalogueRepository.GetObjectByID(mem.ID); + var mem2 = CatalogueRepository.GetObjectByID(mem.ID); - Assert.AreEqual(g, new Guid(mem2.Commit.Transaction)); - Assert.AreEqual("blerg", mem2.AfterYaml); - Assert.AreEqual("yar", mem2.BeforeYaml); - Assert.AreEqual(MementoType.Modify, mem2.Type); - Assert.AreEqual(Environment.UserName, mem2.Commit.Username); - Assert.AreEqual(c, mem2.GetReferencedObject(RepositoryLocator)); - } + Assert.AreEqual(g, new Guid(mem2.Commit.Transaction)); + Assert.AreEqual("blerg", mem2.AfterYaml); + Assert.AreEqual("yar", mem2.BeforeYaml); + Assert.AreEqual(MementoType.Modify, mem2.Type); + Assert.AreEqual(Environment.UserName, mem2.Commit.Username); + Assert.AreEqual(c, mem2.GetReferencedObject(RepositoryLocator)); + } + + + [Test] + public void FakeMemento_Catalogue_Add() + { + var c = new Catalogue(CatalogueRepository, "Hey"); + var g = Guid.NewGuid(); + var commit = new Commit(CatalogueRepository, g, "Breaking stuff!"); - [Test] - public void FakeMemento_Catalogue_Add() + var mem = new Memento(CatalogueRepository, commit, MementoType.Add, c, null, "blerg"); + mem.SaveToDatabase(); + + foreach (var check in new[] { mem, CatalogueRepository.GetObjectByID(mem.ID) }) { - var c = new Catalogue(CatalogueRepository, "Hey"); - - var g = Guid.NewGuid(); - var commit = new Commit(CatalogueRepository, g, "Breaking stuff!"); - - var mem = new Memento(CatalogueRepository, commit, MementoType.Add, c, null, "blerg"); - mem.SaveToDatabase(); - - foreach(var check in new[] { mem, CatalogueRepository.GetObjectByID(mem.ID) }) - { - Assert.IsNull(check.BeforeYaml); - Assert.AreEqual(g, new Guid(check.Commit.Transaction)); - Assert.AreEqual("blerg", check.AfterYaml); - Assert.AreEqual(MementoType.Add, check.Type); - Assert.AreEqual(Environment.UserName, check.Commit.Username); - Assert.AreEqual(c, check.GetReferencedObject(RepositoryLocator)); - } + Assert.IsNull(check.BeforeYaml); + Assert.AreEqual(g, new Guid(check.Commit.Transaction)); + Assert.AreEqual("blerg", check.AfterYaml); + Assert.AreEqual(MementoType.Add, check.Type); + Assert.AreEqual(Environment.UserName, check.Commit.Username); + Assert.AreEqual(c, check.GetReferencedObject(RepositoryLocator)); } + } + + [Test] + public void FakeMemento_Catalogue_Delete() + { + var c = new Catalogue(CatalogueRepository, "Hey"); + + var g = Guid.NewGuid(); + var commit = new Commit(CatalogueRepository, g, "Breaking stuff!"); + + var mem = new Memento(CatalogueRepository, commit, MementoType.Delete, c, "blah", null); + mem.SaveToDatabase(); - [Test] - public void FakeMemento_Catalogue_Delete() + foreach (var check in new[] { mem, CatalogueRepository.GetObjectByID(mem.ID) }) { - var c = new Catalogue(CatalogueRepository, "Hey"); - - var g = Guid.NewGuid(); - var commit = new Commit(CatalogueRepository, g, "Breaking stuff!"); - - var mem = new Memento(CatalogueRepository, commit, MementoType.Delete, c, "blah", null); - mem.SaveToDatabase(); - - foreach (var check in new[] { mem, CatalogueRepository.GetObjectByID(mem.ID) }) - { - Assert.IsNull(check.AfterYaml); - Assert.AreEqual(g, new Guid(check.Commit.Transaction)); - Assert.AreEqual("blah", check.BeforeYaml); - Assert.AreEqual(MementoType.Delete, check.Type); - Assert.AreEqual(Environment.UserName, check.Commit.Username); - Assert.AreEqual(c, check.GetReferencedObject(RepositoryLocator)); - } + Assert.IsNull(check.AfterYaml); + Assert.AreEqual(g, new Guid(check.Commit.Transaction)); + Assert.AreEqual("blah", check.BeforeYaml); + Assert.AreEqual(MementoType.Delete, check.Type); + Assert.AreEqual(Environment.UserName, check.Commit.Username); + Assert.AreEqual(c, check.GetReferencedObject(RepositoryLocator)); } } -} - \ No newline at end of file +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/MetadataLoggingConfigurationChecksTests.cs b/Rdmp.Core.Tests/Curation/Integration/MetadataLoggingConfigurationChecksTests.cs index 2d659c830d..e5ecbfce88 100644 --- a/Rdmp.Core.Tests/Curation/Integration/MetadataLoggingConfigurationChecksTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/MetadataLoggingConfigurationChecksTests.cs @@ -9,107 +9,109 @@ using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.Curation.Data.Defaults; using Rdmp.Core.DataLoad.Engine.Checks.Checkers; -using ReusableLibraryCode.Checks; -using System; using System.Linq; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class MetadataLoggingConfigurationChecksTests : UnitTests { - public class MetadataLoggingConfigurationChecksTests : UnitTests + [Test] + public void Test_NoLoggingTask() + { + var lmd = WhenIHaveA(); + var cata1 = lmd.GetAllCatalogues().Single(); + var cata2 = WhenIHaveA(); + cata2.LoadMetadata_ID = lmd.ID; + + Assert.AreEqual(2, lmd.GetAllCatalogues().Count()); + + var checks = new MetadataLoggingConfigurationChecks(lmd); + var toMem = new ToMemoryCheckNotifier(); + checks.Check(toMem); + + AssertFailWithFix("Catalogues Mycata,Mycata do not have a logging task specified", + "Create a new Logging Task called 'MyLoad'?", toMem); + } + + [Test] + public void Test_MismatchedLoggingTask() + { + var lmd = WhenIHaveA(); + var cata1 = lmd.GetAllCatalogues().Single(); + var cata2 = WhenIHaveA(); + cata2.LoadMetadata_ID = lmd.ID; + + cata1.LoggingDataTask = "OMG YEAGH"; + + Assert.AreEqual(2, lmd.GetAllCatalogues().Count()); + + var checks = new MetadataLoggingConfigurationChecks(lmd); + var toMem = new ToMemoryCheckNotifier(); + checks.Check(toMem); + + AssertFailWithFix("Some catalogues have NULL LoggingDataTasks", "Set task to OMG YEAGH", toMem); + } + + [Test] + public void Test_MissingLoggingServer() + { + var lmd = WhenIHaveA(); + var cata1 = lmd.GetAllCatalogues().Single(); + var cata2 = WhenIHaveA(); + cata2.LoadMetadata_ID = lmd.ID; + + cata1.LoggingDataTask = "OMG YEAGH"; + cata1.LiveLoggingServer_ID = 2; + cata2.LoggingDataTask = "OMG YEAGH"; + cata2.LiveLoggingServer_ID = null; + + Assert.AreEqual(2, lmd.GetAllCatalogues().Count()); + + var checks = new MetadataLoggingConfigurationChecks(lmd); + var toMem = new ToMemoryCheckNotifier(); + checks.Check(toMem); + + AssertFailWithFix("Some catalogues have NULL LiveLoggingServer_ID", "Set LiveLoggingServer_ID to 2", toMem); + } + + [Test] + public void Test_MissingLoggingServer_UseDefault() + { + var lmd = WhenIHaveA(); + var cata1 = lmd.GetAllCatalogues().Single(); + var cata2 = WhenIHaveA(); + + var eds = WhenIHaveA(); + eds.Name = "My Logging Server"; + eds.SaveToDatabase(); + + cata2.LoadMetadata_ID = lmd.ID; + + cata1.LoggingDataTask = "OMG YEAGH"; + cata1.LiveLoggingServer_ID = null; + cata2.LoggingDataTask = "OMG YEAGH"; + cata2.LiveLoggingServer_ID = null; + + var defaults = RepositoryLocator.CatalogueRepository; + defaults.SetDefault(PermissableDefaults.LiveLoggingServer_ID, eds); + + Assert.AreEqual(2, lmd.GetAllCatalogues().Count()); + + var checks = new MetadataLoggingConfigurationChecks(lmd); + var toMem = new ToMemoryCheckNotifier(); + checks.Check(toMem); + + AssertFailWithFix("Some catalogues have NULL LiveLoggingServer_ID", + $"Set LiveLoggingServer_ID to 'My Logging Server' (the default)", toMem); + } + + private static void AssertFailWithFix(string expectedMessage, string expectedFix, ToMemoryCheckNotifier toMem) { - [Test] - public void Test_NoLoggingTask() - { - var lmd = WhenIHaveA(); - var cata1 = lmd.GetAllCatalogues().Single(); - var cata2 = WhenIHaveA(); - cata2.LoadMetadata_ID = lmd.ID; - - Assert.AreEqual(2,lmd.GetAllCatalogues().Count()); - - var checks = new MetadataLoggingConfigurationChecks(lmd); - var toMem = new ToMemoryCheckNotifier(); - checks.Check(toMem); - - AssertFailWithFix("Catalogues Mycata,Mycata do not have a logging task specified","Create a new Logging Task called 'MyLoad'?",toMem); - } - - [Test] - public void Test_MismatchedLoggingTask() - { - var lmd = WhenIHaveA(); - var cata1 = lmd.GetAllCatalogues().Single(); - var cata2 = WhenIHaveA(); - cata2.LoadMetadata_ID = lmd.ID; - - cata1.LoggingDataTask = "OMG YEAGH"; - - Assert.AreEqual(2,lmd.GetAllCatalogues().Count()); - - var checks = new MetadataLoggingConfigurationChecks(lmd); - var toMem = new ToMemoryCheckNotifier(); - checks.Check(toMem); - - AssertFailWithFix("Some catalogues have NULL LoggingDataTasks","Set task to OMG YEAGH",toMem); - } - - [Test] - public void Test_MissingLoggingServer() - { - var lmd = WhenIHaveA(); - var cata1 = lmd.GetAllCatalogues().Single(); - var cata2 = WhenIHaveA(); - cata2.LoadMetadata_ID = lmd.ID; - - cata1.LoggingDataTask = "OMG YEAGH"; - cata1.LiveLoggingServer_ID = 2; - cata2.LoggingDataTask = "OMG YEAGH"; - cata2.LiveLoggingServer_ID = null; - - Assert.AreEqual(2,lmd.GetAllCatalogues().Count()); - - var checks = new MetadataLoggingConfigurationChecks(lmd); - var toMem = new ToMemoryCheckNotifier(); - checks.Check(toMem); - - AssertFailWithFix("Some catalogues have NULL LiveLoggingServer_ID","Set LiveLoggingServer_ID to 2",toMem); - } - [Test] - public void Test_MissingLoggingServer_UseDefault() - { - var lmd = WhenIHaveA(); - var cata1 = lmd.GetAllCatalogues().Single(); - var cata2 = WhenIHaveA(); - - var eds = WhenIHaveA(); - eds.Name = "My Logging Server"; - eds.SaveToDatabase(); - - cata2.LoadMetadata_ID = lmd.ID; - - cata1.LoggingDataTask = "OMG YEAGH"; - cata1.LiveLoggingServer_ID = null; - cata2.LoggingDataTask = "OMG YEAGH"; - cata2.LiveLoggingServer_ID = null; - - var defaults = RepositoryLocator.CatalogueRepository; - defaults.SetDefault(PermissableDefaults.LiveLoggingServer_ID,eds); - - Assert.AreEqual(2,lmd.GetAllCatalogues().Count()); - - var checks = new MetadataLoggingConfigurationChecks(lmd); - var toMem = new ToMemoryCheckNotifier(); - checks.Check(toMem); - - AssertFailWithFix("Some catalogues have NULL LiveLoggingServer_ID",$"Set LiveLoggingServer_ID to 'My Logging Server' (the default)",toMem); - } - private void AssertFailWithFix(string expectedMessage, string expectedFix, ToMemoryCheckNotifier toMem) - { - var msg = toMem.Messages.Where(m => m.Result == CheckResult.Fail).First(); - - Assert.AreEqual(expectedMessage,msg.Message,"Expected error message was wrong"); - Assert.AreEqual(expectedFix,msg.ProposedFix,"Expected proposed fix was wrong"); - } - } -} + var msg = toMem.Messages.First(m => m.Result == CheckResult.Fail); + + Assert.AreEqual(expectedMessage, msg.Message, "Expected error message was wrong"); + Assert.AreEqual(expectedFix, msg.ProposedFix, "Expected proposed fix was wrong"); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/MySqlTriggerImplementerTests.cs b/Rdmp.Core.Tests/Curation/Integration/MySqlTriggerImplementerTests.cs index 8a80870267..5163722df7 100644 --- a/Rdmp.Core.Tests/Curation/Integration/MySqlTriggerImplementerTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/MySqlTriggerImplementerTests.cs @@ -4,24 +4,20 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using FAnsi; -using FAnsi.Discovery; using NUnit.Framework; using Rdmp.Core.DataLoad.Triggers.Implementations; -using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class MySqlTriggerImplementerTests { - public class MySqlTriggerImplementerTests + [TestCase("4.0", true)] + [TestCase("5.1", true)] + [TestCase("8.5", false)] + [TestCase("5.5.64-MariaDB", true)] + [TestCase("10.5.64-MariaDB", false)] + public void TestOldNew(string versionString, bool expectToUseOldMethod) { - [TestCase("4.0",true)] - [TestCase("5.1",true)] - [TestCase("8.5",false)] - [TestCase("5.5.64-MariaDB",true)] - [TestCase("10.5.64-MariaDB",false)] - public void TestOldNew(string versionString, bool expectToUseOldMethod) - { - Assert.AreEqual(expectToUseOldMethod,MySqlTriggerImplementer.UseOldDateTimeDefaultMethod(versionString)); - } + Assert.AreEqual(expectToUseOldMethod, MySqlTriggerImplementer.UseOldDateTimeDefaultMethod(versionString)); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/BetweenCatalogueAndDataExportObscureDependencyFinderTests.cs b/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/BetweenCatalogueAndDataExportObscureDependencyFinderTests.cs index 336e443590..cb86dda2fc 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/BetweenCatalogueAndDataExportObscureDependencyFinderTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/BetweenCatalogueAndDataExportObscureDependencyFinderTests.cs @@ -5,74 +5,75 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using MapsDirectlyToDatabaseTable.Revertable; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport; using Rdmp.Core.DataExport.Data; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Revertable; using Rdmp.Core.Startup; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.ObscureDependencyTests +namespace Rdmp.Core.Tests.Curation.Integration.ObscureDependencyTests; + +public class BetweenCatalogueAndDataExportObscureDependencyFinderTests : DatabaseTests { - public class BetweenCatalogueAndDataExportObscureDependencyFinderTests : DatabaseTests + [Test] + public void PreventDeletingCatalogueBecauseOfLinkedDatasetTest() { - [Test] - public void PreventDeletingCatalogueBecauseOfLinkedDatasetTest() - { - var obscura = new BetweenCatalogueAndDataExportObscureDependencyFinder(RepositoryLocator); - var cata = new Catalogue(CatalogueRepository, "MyCata"); - - //catalogue exists in isolation so is deletable - Assert.DoesNotThrow(()=>obscura.ThrowIfDeleteDisallowed(cata)); - - //there is a new dataset which is linked to Catalogue - var dataset = new ExtractableDataSet(DataExportRepository,cata); - - //and suddenly we cannot delete the catalogue - var ex = Assert.Throws(() => obscura.ThrowIfDeleteDisallowed(cata)); - Assert.IsTrue(ex.Message.Contains("Cannot delete Catalogue MyCata because there are ExtractableDataSets which depend on them ")); - - //also if we try to force through a delete it should behave in identical manner - var ex2 = Assert.Throws(cata.DeleteInDatabase); - Assert.IsTrue(ex2.Message.Contains("Cannot delete Catalogue MyCata because there are ExtractableDataSets which depend on them ")); - - //now we delete the linked dataset - dataset.DeleteInDatabase(); - - //and because there is now no longer a dataset dependency on the catalogue we can delete it - Assert.DoesNotThrow(() => obscura.ThrowIfDeleteDisallowed(cata)); - - //and the delete works too - cata.DeleteInDatabase(); - - //both objects still exist in memory of course but we should be able to see they have disapeared - Assert.IsTrue(dataset.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyWasDeleted); - Assert.IsTrue(cata.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyWasDeleted); - } - - [Test] - public void AllowDeletingWhenDataExportManagerIsNotSet() - { - var noDataExportManagerExists = new LinkedRepositoryProvider(CatalogueTableRepository.ConnectionString,null); - - var obscura1 = new BetweenCatalogueAndDataExportObscureDependencyFinder(RepositoryLocator); - var obscura2 = new BetweenCatalogueAndDataExportObscureDependencyFinder(noDataExportManagerExists); - - var cata = new Catalogue(CatalogueRepository, "MyCata"); - var dataset = new ExtractableDataSet(DataExportRepository,cata); - - //we cannot delete it because there is a dependency - var ex = Assert.Throws(() => obscura1.ThrowIfDeleteDisallowed(cata)); - Assert.IsTrue(ex.Message.Contains("Cannot delete Catalogue MyCata because there are ExtractableDataSets which depend on them ")); - - //the second finder simulates when the repository locator doesn't have a record of the data export repository so it is unable to check it so it will let you delete it just fine - Assert.DoesNotThrow(() => obscura2.ThrowIfDeleteDisallowed(cata)); - - //now delete them in the correct order - dataset.DeleteInDatabase(); - cata.DeleteInDatabase(); - - } + var obscura = new BetweenCatalogueAndDataExportObscureDependencyFinder(RepositoryLocator); + var cata = new Catalogue(CatalogueRepository, "MyCata"); + + //catalogue exists in isolation so is deletable + Assert.DoesNotThrow(() => obscura.ThrowIfDeleteDisallowed(cata)); + + //there is a new dataset which is linked to Catalogue + var dataset = new ExtractableDataSet(DataExportRepository, cata); + + //and suddenly we cannot delete the catalogue + var ex = Assert.Throws(() => obscura.ThrowIfDeleteDisallowed(cata)); + Assert.IsTrue(ex.Message.Contains( + "Cannot delete Catalogue MyCata because there are ExtractableDataSets which depend on them ")); + + //also if we try to force through a delete it should behave in identical manner + var ex2 = Assert.Throws(cata.DeleteInDatabase); + Assert.IsTrue(ex2.Message.Contains( + "Cannot delete Catalogue MyCata because there are ExtractableDataSets which depend on them ")); + + //now we delete the linked dataset + dataset.DeleteInDatabase(); + + //and because there is now no longer a dataset dependency on the catalogue we can delete it + Assert.DoesNotThrow(() => obscura.ThrowIfDeleteDisallowed(cata)); + + //and the delete works too + cata.DeleteInDatabase(); + + //both objects still exist in memory of course but we should be able to see they have disapeared + Assert.IsTrue(dataset.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyWasDeleted); + Assert.IsTrue(cata.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyWasDeleted); + } + + [Test] + public void AllowDeletingWhenDataExportManagerIsNotSet() + { + var noDataExportManagerExists = new LinkedRepositoryProvider(CatalogueTableRepository.ConnectionString, null); + + var obscura1 = new BetweenCatalogueAndDataExportObscureDependencyFinder(RepositoryLocator); + var obscura2 = new BetweenCatalogueAndDataExportObscureDependencyFinder(noDataExportManagerExists); + + var cata = new Catalogue(CatalogueRepository, "MyCata"); + var dataset = new ExtractableDataSet(DataExportRepository, cata); + + //we cannot delete it because there is a dependency + var ex = Assert.Throws(() => obscura1.ThrowIfDeleteDisallowed(cata)); + Assert.IsTrue(ex.Message.Contains( + "Cannot delete Catalogue MyCata because there are ExtractableDataSets which depend on them ")); + + //the second finder simulates when the repository locator doesn't have a record of the data export repository so it is unable to check it so it will let you delete it just fine + Assert.DoesNotThrow(() => obscura2.ThrowIfDeleteDisallowed(cata)); + + //now delete them in the correct order + dataset.DeleteInDatabase(); + cata.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/ObjectSharingObscureDependencyFinderTests.cs b/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/ObjectSharingObscureDependencyFinderTests.cs index 63253845a2..7d3806b528 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/ObjectSharingObscureDependencyFinderTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/ObjectSharingObscureDependencyFinderTests.cs @@ -12,101 +12,102 @@ using Rdmp.Core.DataExport.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.ObscureDependencyTests +namespace Rdmp.Core.Tests.Curation.Integration.ObscureDependencyTests; + +public class ObjectSharingObscureDependencyFinderTests : DatabaseTests { - public class ObjectSharingObscureDependencyFinderTests: DatabaseTests + private ShareManager _share; + + [SetUp] + protected override void SetUp() { - private ShareManager _share; + base.SetUp(); + _share = new ShareManager(RepositoryLocator); + } - [SetUp] - protected override void SetUp() - { - base.SetUp(); - _share = new ShareManager(RepositoryLocator); - } + [Test] + public void TestPruning() + { + var c = new Catalogue(CatalogueRepository, "Catapault"); + var ci = new CatalogueItem(CatalogueRepository, c, "string"); - [Test] - public void TestPruning() - { - Catalogue c = new Catalogue(CatalogueRepository,"Catapault"); - var ci = new CatalogueItem(CatalogueRepository, c, "string"); - - Catalogue c2 = new Catalogue(CatalogueRepository,"Catapault (Import)"); - var ci2 = new CatalogueItem(CatalogueRepository, c2, "string (Import)"); + var c2 = new Catalogue(CatalogueRepository, "Catapault (Import)"); + var ci2 = new CatalogueItem(CatalogueRepository, c2, "string (Import)"); - Assert.AreEqual(CatalogueRepository.GetAllObjects().Count(), 0); - var ec = _share.GetNewOrExistingExportFor(c); - var eci = _share.GetNewOrExistingExportFor(ci); + Assert.AreEqual(CatalogueRepository.GetAllObjects().Length, 0); + var ec = _share.GetNewOrExistingExportFor(c); + var eci = _share.GetNewOrExistingExportFor(ci); - _share.GetImportAs(ec.SharingUID, c2); - _share.GetImportAs(eci.SharingUID, ci2); - - Assert.AreEqual(2,CatalogueRepository.GetAllObjects().Count()); - Assert.AreEqual(2,CatalogueRepository.GetAllObjects().Count()); - Assert.AreEqual(2,CatalogueRepository.GetAllObjects().Count());//successive calls shouldhn't generate extra entries since they are same obj - Assert.AreEqual(2,CatalogueRepository.GetAllObjects().Count()); + _share.GetImportAs(ec.SharingUID, c2); + _share.GetImportAs(eci.SharingUID, ci2); - //cannot delete the shared object - Assert.Throws(c.DeleteInDatabase); + Assert.AreEqual(2, CatalogueRepository.GetAllObjects().Length); + Assert.AreEqual(2, CatalogueRepository.GetAllObjects().Length); + Assert.AreEqual(2, + CatalogueRepository.GetAllObjects() + .Length); //successive calls shouldhn't generate extra entries since they are same obj + Assert.AreEqual(2, CatalogueRepository.GetAllObjects().Length); - //can delete the import because that's ok - Assert.DoesNotThrow(c2.DeleteInDatabase); + //cannot delete the shared object + Assert.Throws(c.DeleteInDatabase); - //now that we deleted the import it should have deleted everything else including the CatalogueItem import which magically disapeared when we deleted the Catalogue via database level cascade events - Assert.AreEqual(0,CatalogueRepository.GetAllObjects().Count()); + //can delete the import because that's ok + Assert.DoesNotThrow(c2.DeleteInDatabase); - _share.GetImportAs(eci.SharingUID, ci2); - } + //now that we deleted the import it should have deleted everything else including the CatalogueItem import which magically disapeared when we deleted the Catalogue via database level cascade events + Assert.AreEqual(0, CatalogueRepository.GetAllObjects().Length); - [Test] - public void CannotDeleteSharedObjectTest() - { - //create a test catalogue - Catalogue c = new Catalogue(CatalogueRepository,"blah"); + _share.GetImportAs(eci.SharingUID, ci2); + } - Assert.IsFalse(_share.IsExportedObject(c)); + [Test] + public void CannotDeleteSharedObjectTest() + { + //create a test catalogue + var c = new Catalogue(CatalogueRepository, "blah"); + + Assert.IsFalse(_share.IsExportedObject(c)); - //make it exportable - var exportDefinition = _share.GetNewOrExistingExportFor(c); + //make it exportable + var exportDefinition = _share.GetNewOrExistingExportFor(c); - Assert.IsTrue(_share.IsExportedObject(c)); + Assert.IsTrue(_share.IsExportedObject(c)); - //cannot delete because object is shared externally - Assert.Throws(c.DeleteInDatabase); + //cannot delete because object is shared externally + Assert.Throws(c.DeleteInDatabase); - //no longer exportable - exportDefinition.DeleteInDatabase(); + //no longer exportable + exportDefinition.DeleteInDatabase(); - //no longer shared - Assert.IsFalse(_share.IsExportedObject(c)); + //no longer shared + Assert.IsFalse(_share.IsExportedObject(c)); - //now we can delete it - c.DeleteInDatabase(); - } + //now we can delete it + c.DeleteInDatabase(); + } - [Test] - public void CascadeDeleteImportDefinitions() - { - Project p = new Project(DataExportRepository, "prah"); + [Test] + public void CascadeDeleteImportDefinitions() + { + var p = new Project(DataExportRepository, "prah"); - var exportDefinition = _share.GetNewOrExistingExportFor(p); + var exportDefinition = _share.GetNewOrExistingExportFor(p); - Project p2 = new Project(DataExportRepository, "prah2"); + var p2 = new Project(DataExportRepository, "prah2"); - var importDefinition = _share.GetImportAs(exportDefinition.SharingUID, p2); + var importDefinition = _share.GetImportAs(exportDefinition.SharingUID, p2); - //import definition exists - Assert.IsTrue(importDefinition.Exists()); + //import definition exists + Assert.IsTrue(importDefinition.Exists()); - //delete local import - p2.DeleteInDatabase(); + //delete local import + p2.DeleteInDatabase(); - //cascade should have deleted the import definition since the imported object version is gone - Assert.IsFalse(importDefinition.Exists()); + //cascade should have deleted the import definition since the imported object version is gone + Assert.IsFalse(importDefinition.Exists()); - //clear SetUp the exported version too - exportDefinition.DeleteInDatabase(); - p.DeleteInDatabase(); - } + //clear SetUp the exported version too + exportDefinition.DeleteInDatabase(); + p.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/ValidationXMLObscureDependencyFinderTests.cs b/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/ValidationXMLObscureDependencyFinderTests.cs index 700f6b0827..617c66c6aa 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/ValidationXMLObscureDependencyFinderTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ObscureDependencyTests/ValidationXMLObscureDependencyFinderTests.cs @@ -7,34 +7,33 @@ using System; using System.Linq; using System.Text.RegularExpressions; -using MapsDirectlyToDatabaseTable.Versioning; using NUnit.Framework; using Rdmp.Core.Curation.Data; -using Rdmp.Core.Startup; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Versioning; +using Rdmp.Core.ReusableLibraryCode.Checks; using Rdmp.Core.Validation; using Rdmp.Core.Validation.Constraints.Secondary; using Rdmp.Core.Validation.Dependency; using Rdmp.Core.Validation.Dependency.Exceptions; -using ReusableLibraryCode.Checks; using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.Curation.Integration.ObscureDependencyTests +namespace Rdmp.Core.Tests.Curation.Integration.ObscureDependencyTests; + +public class ValidationXMLObscureDependencyFinderTests : DatabaseTests { - public class ValidationXMLObscureDependencyFinderTests: DatabaseTests + [Test] + public void TestGettingTheUsualSuspects() { - [Test] - public void TestGettingTheUsualSuspects() - { - ValidationXMLObscureDependencyFinder finder = new ValidationXMLObscureDependencyFinder( RepositoryLocator); - - //forces call to initialize - finder.ThrowIfDeleteDisallowed(null); + var finder = new ValidationXMLObscureDependencyFinder(RepositoryLocator); + + //forces call to initialize + finder.ThrowIfDeleteDisallowed(null); - //this guy should be a usual suspect! - Assert.IsTrue(finder.TheUsualSuspects.Any(s => s.Type == typeof(ReferentialIntegrityConstraint))); + //this guy should be a usual suspect! + Assert.IsTrue(finder.TheUsualSuspects.Any(s => s.Type == typeof(ReferentialIntegrityConstraint))); - var testXML = + var testXML = @" @@ -51,151 +50,140 @@ public void TestGettingTheUsualSuspects() "; - bool kaizerSoze = false; - foreach (Suspect suspect in finder.TheUsualSuspects) - { - string pattern = string.Format(suspect.Pattern, 10029); + Assert.IsTrue(finder.TheUsualSuspects.Select(suspect => string.Format(suspect.Pattern, 10029)) + .Any(pattern => Regex.IsMatch(testXML, pattern, RegexOptions.Singleline))); + } + + + [Test] + public void DeleteAReferencedValidationXML() + { + var testData = SetupTestData(out var l2ColumnInfo); + try + { + Validator.LocatorForXMLDeserialization = RepositoryLocator; + + var worked = Validator.LoadFromXml(testData.catalogue.ValidatorXML); + + //notice that it is the ID of the referenced column that is maintained not the name of it! that is because we need to use a data access portal to get the contents of the column which might be in a different table (and normally would be) + Assert.IsFalse(testData.catalogue.ValidatorXML.Contains("previous_address_L2")); + Assert.IsTrue(testData.catalogue.ValidatorXML.Contains(l2ColumnInfo.ID.ToString())); - kaizerSoze = Regex.IsMatch(testXML, pattern,RegexOptions.Singleline); + Assert.IsTrue(testData.catalogue.ValidatorXML.Contains("previous_address_L1")); - if (kaizerSoze) - break; - } - - Assert.IsTrue(kaizerSoze); + //we expect the validation XML to find the reference + var finder = new ValidationXMLObscureDependencyFinder(RepositoryLocator); + + //and explode + Assert.Throws(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); + + Assert.AreEqual(0, finder.CataloguesWithBrokenValidationXml.Count); + + //now clear the validation XML + testData.catalogue.ValidatorXML = + testData.catalogue.ValidatorXML.Insert(100, "I've got a lovely bunch of coconuts!"); + testData.catalogue.SaveToDatabase(); + + //column info should be deleteable but only because we got ourselves onto the forbidlist + Assert.DoesNotThrow(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); + Assert.AreEqual(1, finder.CataloguesWithBrokenValidationXml.Count); + + testData.catalogue.ValidatorXML = ""; + testData.catalogue.SaveToDatabase(); + + //column info should be deleteable now that we cleared the XML + Assert.DoesNotThrow(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); + } + finally + { + testData.DeleteCatalogue(); } + } + + [Test] + public void Test_DeleteAColumnInfoThatIsReferenced() + { + var startup = new Startup.Startup(RepositoryLocator); + startup.DoStartup(IgnoreAllErrorsCheckNotifier.Instance); + var testData = SetupTestData(out var l2ColumnInfo); - [Test] - public void DeleteAReferencedValidationXML() + try { - ColumnInfo l2ColumnInfo; - BulkTestsData testData = SetupTestData(out l2ColumnInfo); - try - { - Validator.LocatorForXMLDeserialization = RepositoryLocator; - - var worked = Validator.LoadFromXml(testData.catalogue.ValidatorXML); - - //notice that it is the ID of the referenced column that is maintained not the name of it! that is because we need to use a data access portal to get the contents of the column which might be in a different table (and normally would be) - Assert.IsFalse(testData.catalogue.ValidatorXML.Contains("previous_address_L2")); - Assert.IsTrue(testData.catalogue.ValidatorXML.Contains(l2ColumnInfo.ID.ToString())); - - Assert.IsTrue(testData.catalogue.ValidatorXML.Contains("previous_address_L1")); - - //we expect the validation XML to find the reference - ValidationXMLObscureDependencyFinder finder = new ValidationXMLObscureDependencyFinder(RepositoryLocator); - - //and explode - Assert.Throws(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); - - Assert.AreEqual(0,finder.CataloguesWithBrokenValidationXml.Count); - - //now clear the validation XML - testData.catalogue.ValidatorXML = testData.catalogue.ValidatorXML.Insert(100,"I've got a lovely bunch of coconuts!"); - testData.catalogue.SaveToDatabase(); - - //column info should be deleteable but only because we got ourselves onto the forbidlist - Assert.DoesNotThrow(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); - Assert.AreEqual(1, finder.CataloguesWithBrokenValidationXml.Count); - - testData.catalogue.ValidatorXML = ""; - testData.catalogue.SaveToDatabase(); - - //column info should be deleteable now that we cleared the XML - Assert.DoesNotThrow(() => finder.ThrowIfDeleteDisallowed(l2ColumnInfo)); - } - finally - { - testData.DeleteCatalogue(); - } + //should fail because of the validation constraint being dependent on it + Assert.Throws(() => l2ColumnInfo.DeleteInDatabase()); } - - [Test] - public void Test_DeleteAColumnInfoThatIsReferenced() + finally { - var startup = new Startup.Startup(new EnvironmentInfo(),RepositoryLocator); - startup.DoStartup(new IgnoreAllErrorsCheckNotifier()); - - ColumnInfo l2ColumnInfo; - var testData = SetupTestData(out l2ColumnInfo); - - try - { - //should fail because of the validation constraint being dependent on it - Assert.Throws(()=>l2ColumnInfo.DeleteInDatabase()); - } - finally - { - testData.catalogue.ValidatorXML = null; - testData.catalogue.SaveToDatabase(); - - testData.DeleteCatalogue(); - } + testData.catalogue.ValidatorXML = null; + testData.catalogue.SaveToDatabase(); + + testData.DeleteCatalogue(); } + } - [Test] - public void TestRunningSetupMultipleTimes() + [Test] + public void TestRunningSetupMultipleTimes() + { + var startup = new Startup.Startup(RepositoryLocator); + try { - - var startup = new Startup.Startup(new EnvironmentInfo(),RepositoryLocator); - try - { - startup.DoStartup(new IgnoreAllErrorsCheckNotifier()); - } - catch (InvalidPatchException patchException) - { - throw new Exception("Problem in patch " + patchException.ScriptName ,patchException); - } - //there should be all the obscure dependencies we need done with only the first call to this function - int numberAfterFirstRun = - ((CatalogueObscureDependencyFinder) CatalogueRepository.ObscureDependencyFinder) - .OtherDependencyFinders.Count; - - startup.DoStartup(new IgnoreAllErrorsCheckNotifier()); - startup.DoStartup(new IgnoreAllErrorsCheckNotifier()); - startup.DoStartup(new IgnoreAllErrorsCheckNotifier()); - - //there should not be any replication! and doubling SetUp! - Assert.AreEqual(numberAfterFirstRun, - ((CatalogueObscureDependencyFinder) CatalogueRepository.ObscureDependencyFinder) - .OtherDependencyFinders.Count); - - + startup.DoStartup(IgnoreAllErrorsCheckNotifier.Instance); + } + catch (InvalidPatchException patchException) + { + throw new Exception($"Problem in patch {patchException.ScriptName}", patchException); } - #region setup test data with some validation rule + //there should be all the obscure dependencies we need done with only the first call to this function + var numberAfterFirstRun = + ((CatalogueObscureDependencyFinder)CatalogueRepository.ObscureDependencyFinder) + .OtherDependencyFinders.Count; - private BulkTestsData SetupTestData(out ColumnInfo l2ColumnInfo) - { - //Setup test data - var testData = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); - testData.SetupTestData(); - testData.ImportAsCatalogue(); + startup.DoStartup(IgnoreAllErrorsCheckNotifier.Instance); + startup.DoStartup(IgnoreAllErrorsCheckNotifier.Instance); + startup.DoStartup(IgnoreAllErrorsCheckNotifier.Instance); - //Setup some validation rules - Validator v = new Validator(); + //there should not be any replication! and doubling SetUp! + Assert.AreEqual(numberAfterFirstRun, + ((CatalogueObscureDependencyFinder)CatalogueRepository.ObscureDependencyFinder) + .OtherDependencyFinders.Count); + } - //rule is that previous address line 1 cannot be the same as previous address line 2 - var iv = new ItemValidator("previous_address_L1"); - l2ColumnInfo = testData.columnInfos.Single(c => c.GetRuntimeName().Equals("previous_address_L2")); + #region setup test data with some validation rule - //define the secondary constraint - var referentialConstraint = new ReferentialIntegrityConstraint(CatalogueRepository); - referentialConstraint.InvertLogic = true; - referentialConstraint.OtherColumnInfo = l2ColumnInfo; + private BulkTestsData SetupTestData(out ColumnInfo l2ColumnInfo) + { + //Setup test data + var testData = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); + testData.SetupTestData(); + testData.ImportAsCatalogue(); - //add it to the item validator for previous_address_L1 - iv.SecondaryConstraints.Add(referentialConstraint); + //Setup some validation rules + var v = new Validator(); - //add the completed item validator to the validator (normally there would be 1 item validator per column with validation but in this test we only have 1) - v.ItemValidators.Add(iv); + //rule is that previous address line 1 cannot be the same as previous address line 2 + var iv = new ItemValidator("previous_address_L1"); + l2ColumnInfo = testData.columnInfos.Single(c => c.GetRuntimeName().Equals("previous_address_L2")); - testData.catalogue.ValidatorXML = v.SaveToXml(); - testData.catalogue.SaveToDatabase(); + //define the secondary constraint + var referentialConstraint = new ReferentialIntegrityConstraint(CatalogueRepository) + { + InvertLogic = true, + OtherColumnInfo = l2ColumnInfo + }; - return testData; - } - #endregion + //add it to the item validator for previous_address_L1 + iv.SecondaryConstraints.Add(referentialConstraint); + + //add the completed item validator to the validator (normally there would be 1 item validator per column with validation but in this test we only have 1) + v.ItemValidators.Add(iv); + + testData.catalogue.ValidatorXML = v.SaveToXml(); + testData.catalogue.SaveToDatabase(); + + return testData; } -} + + #endregion +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/PasswordEncryptionKeyLocationTests.cs b/Rdmp.Core.Tests/Curation/Integration/PasswordEncryptionKeyLocationTests.cs index 826bd6df34..d1b1f1559b 100644 --- a/Rdmp.Core.Tests/Curation/Integration/PasswordEncryptionKeyLocationTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/PasswordEncryptionKeyLocationTests.cs @@ -12,87 +12,85 @@ using Rdmp.Core.Repositories.Managers; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class PasswordEncryptionKeyLocationTests : DatabaseTests { - public class PasswordEncryptionKeyLocationTests:DatabaseTests + [SetUp] + protected override void SetUp() { - [SetUp] - protected override void SetUp() - { - base.SetUp(); - - var keyLocation = new PasswordEncryptionKeyLocation(CatalogueRepository); - - if(keyLocation.GetKeyFileLocation() != null) - Assert.Inconclusive(); - } + base.SetUp(); - [Test] - public void NoKeyFileToStartWith() - { - var keyLocation = new PasswordEncryptionKeyLocation(CatalogueRepository); + var keyLocation = new PasswordEncryptionKeyLocation(CatalogueRepository); - //there shouldn't already be a key - Assert.IsNull(keyLocation.GetKeyFileLocation()); + if (keyLocation.GetKeyFileLocation() != null) + Assert.Inconclusive(); + } - var e = Assert.Throws(keyLocation.DeleteKey); - Assert.AreEqual("Cannot delete key because there is no key file configured", e.Message); + [Test] + public void NoKeyFileToStartWith() + { + var keyLocation = new PasswordEncryptionKeyLocation(CatalogueRepository); - } + //there shouldn't already be a key + Assert.IsNull(keyLocation.GetKeyFileLocation()); - [Test] - public void CreateKeyFile() - { - var keyLocation = new PasswordEncryptionKeyLocation(CatalogueRepository); - var file = keyLocation.CreateNewKeyFile(Path.Combine(TestContext.CurrentContext.TestDirectory,"my.key")); + var e = Assert.Throws(keyLocation.DeleteKey); + Assert.AreEqual("Cannot delete key because there is no key file configured", e.Message); + } - Console.WriteLine("Key file location is:" + file.FullName); - Console.WriteLine("Text put into file is:" + Environment.NewLine + File.ReadAllText(file.FullName)); + [Test] + public void CreateKeyFile() + { + var keyLocation = new PasswordEncryptionKeyLocation(CatalogueRepository); + var file = keyLocation.CreateNewKeyFile(Path.Combine(TestContext.CurrentContext.TestDirectory, "my.key")); - Assert.IsTrue(file.FullName.EndsWith("my.key")); + Console.WriteLine($"Key file location is:{file.FullName}"); + Console.WriteLine($"Text put into file is:{Environment.NewLine}{File.ReadAllText(file.FullName)}"); - Assert.AreEqual(file.FullName, keyLocation.GetKeyFileLocation()); - keyLocation.DeleteKey(); + Assert.IsTrue(file.FullName.EndsWith("my.key")); - Assert.IsNull(keyLocation.GetKeyFileLocation()); - } + Assert.AreEqual(file.FullName, keyLocation.GetKeyFileLocation()); + keyLocation.DeleteKey(); - [Test] - public void Encrypt() - { - string value = "MyPieceOfText"; + Assert.IsNull(keyLocation.GetKeyFileLocation()); + } - Console.WriteLine("String is:" + value); + [Test] + public void Encrypt() + { + var value = "MyPieceOfText"; - EncryptedString encrypter = new EncryptedString(CatalogueRepository); - Assert.IsFalse(encrypter.IsStringEncrypted(value)); + Console.WriteLine($"String is:{value}"); - //should do pass through encryption - encrypter.Value = value; - Assert.AreNotEqual(value,encrypter.Value); - Assert.AreEqual(value,encrypter.GetDecryptedValue()); + var encrypter = new EncryptedString(CatalogueRepository); + Assert.IsFalse(encrypter.IsStringEncrypted(value)); - Console.WriteLine("Encrypted (stock) is:" + encrypter.Value); - Console.WriteLine("Decrypted (stock) is:" + encrypter.GetDecryptedValue()); + //should do pass through encryption + encrypter.Value = value; + Assert.AreNotEqual(value, encrypter.Value); + Assert.AreEqual(value, encrypter.GetDecryptedValue()); - var keyLocation = new PasswordEncryptionKeyLocation(CatalogueRepository); - keyLocation.CreateNewKeyFile(Path.Combine(TestContext.CurrentContext.TestDirectory, "my.key")); - var p = keyLocation.OpenKeyFile(); + Console.WriteLine($"Encrypted (stock) is:{encrypter.Value}"); + Console.WriteLine($"Decrypted (stock) is:{encrypter.GetDecryptedValue()}"); - CatalogueRepository.EncryptionManager.ClearAllInjections(); + var keyLocation = new PasswordEncryptionKeyLocation(CatalogueRepository); + keyLocation.CreateNewKeyFile(Path.Combine(TestContext.CurrentContext.TestDirectory, "my.key")); + var p = keyLocation.OpenKeyFile(); - var s = CatalogueRepository.EncryptionManager.GetEncrypter(); - var exception = Assert.Throws(()=>s.Decrypt(encrypter.Value)); - Assert.IsTrue(exception.Message.StartsWith("Could not decrypt an encrypted string, possibly you are trying to decrypt it after having changed the PrivateKey ")); + CatalogueRepository.EncryptionManager.ClearAllInjections(); - string encrypted = s.Encrypt(value); - Console.WriteLine("Encrypted (with key) is:" + encrypted); - Console.WriteLine("Decrypted (with key) is:" + s.Decrypt(encrypted)); + var s = CatalogueRepository.EncryptionManager.GetEncrypter(); + var exception = Assert.Throws(() => s.Decrypt(encrypter.Value)); + Assert.IsTrue(exception.Message.StartsWith( + "Could not decrypt an encrypted string, possibly you are trying to decrypt it after having changed the PrivateKey ")); - Assert.IsTrue(encrypter.IsStringEncrypted(encrypted)); + var encrypted = s.Encrypt(value); + Console.WriteLine($"Encrypted (with key) is:{encrypted}"); + Console.WriteLine($"Decrypted (with key) is:{s.Decrypt(encrypted)}"); - keyLocation.DeleteKey(); - } + Assert.IsTrue(encrypter.IsStringEncrypted(encrypted)); + keyLocation.DeleteKey(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/PipelineTests.cs b/Rdmp.Core.Tests/Curation/Integration/PipelineTests.cs index af0f960e24..a2904766d2 100644 --- a/Rdmp.Core.Tests/Curation/Integration/PipelineTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/PipelineTests.cs @@ -6,7 +6,6 @@ using System; using System.Linq; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.Curation.Data.Pipelines; using Rdmp.Core.DataLoad.Engine.Pipeline.Components.Anonymisation; @@ -15,221 +14,232 @@ using Rdmp.Core.DataLoad.Modules.DataFlowOperations; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class PipelineTests : DatabaseTests { - public class PipelineTests : DatabaseTests + [Test] + public void SetupAndSaveAPipeline() { - [Test] - public void SetupAndSaveAPipeline() + var pipeline = new Pipeline(CatalogueRepository, "Bob"); + + try { - Pipeline pipeline = new Pipeline(CatalogueRepository, "Bob"); + Assert.AreEqual(pipeline.Name, "Bob"); + + var pipelineComponent = + new PipelineComponent(CatalogueRepository, pipeline, typeof(BasicAnonymisationEngine), 0); try { - Assert.AreEqual(pipeline.Name,"Bob"); + Assert.AreEqual(pipelineComponent.Class, typeof(BasicAnonymisationEngine).FullName); + + var argument1 = (PipelineComponentArgument)pipelineComponent.CreateNewArgument(); + var argument2 = new PipelineComponentArgument(CatalogueRepository, pipelineComponent); - PipelineComponent pipelineComponent = new PipelineComponent(CatalogueRepository, pipeline, typeof (BasicAnonymisationEngine), 0); - try { - Assert.AreEqual(pipelineComponent.Class,typeof(BasicAnonymisationEngine).FullName); - - PipelineComponentArgument argument1 = (PipelineComponentArgument) pipelineComponent.CreateNewArgument(); - PipelineComponentArgument argument2 = new PipelineComponentArgument(CatalogueRepository, pipelineComponent); - - try - { - argument1.SetType(typeof(string)); - argument1.SetValue("bob"); - argument1.SaveToDatabase(); - - var dt = DateTime.Now ; - dt = new DateTime(dt.Ticks - (dt.Ticks % TimeSpan.TicksPerSecond),dt.Kind);//get rid of the milliseconds - - argument2.SetType(typeof(DateTime)); - argument2.SetValue(dt); - argument2.SaveToDatabase(); - - PipelineComponentArgument argument2Copy = CatalogueRepository.GetObjectByID(argument2.ID); - Assert.AreEqual(dt,argument2Copy.GetValueAsSystemType()); - } - finally - { - - argument1.DeleteInDatabase(); - argument2.DeleteInDatabase(); - } + argument1.SetType(typeof(string)); + argument1.SetValue("bob"); + argument1.SaveToDatabase(); + + var dt = DateTime.Now; + dt = new DateTime(dt.Ticks - dt.Ticks % TimeSpan.TicksPerSecond, + dt.Kind); //get rid of the milliseconds + + argument2.SetType(typeof(DateTime)); + argument2.SetValue(dt); + argument2.SaveToDatabase(); + + var argument2Copy = CatalogueRepository.GetObjectByID(argument2.ID); + Assert.AreEqual(dt, argument2Copy.GetValueAsSystemType()); } finally { - pipelineComponent.DeleteInDatabase(); + argument1.DeleteInDatabase(); + argument2.DeleteInDatabase(); } - } - finally + finally { - pipeline.DeleteInDatabase(); + pipelineComponent.DeleteInDatabase(); } } - - [Test] - public void ClonePipelineNaming() + finally { - Pipeline p = new Pipeline(CatalogueRepository); - p.Name = "My Pipe"; - p.SaveToDatabase(); - - var clone1 = p.Clone(); - var clone2 = p.Clone(); - var clone3 = p.Clone(); - - Assert.AreEqual("My Pipe (Clone)",clone1.Name); - Assert.AreEqual("My Pipe (Clone2)",clone2.Name); - Assert.AreEqual("My Pipe (Clone3)",clone3.Name); - - var cloneOfClone1 = clone3.Clone(); - var cloneOfClone2 = clone3.Clone(); - - Assert.AreEqual("My Pipe (Clone3) (Clone)",cloneOfClone1.Name); - Assert.AreEqual("My Pipe (Clone3) (Clone2)",cloneOfClone2.Name); + pipeline.DeleteInDatabase(); } + } - - /// - /// Tests the ability to clone a including all - /// components and arguments. - /// - /// - [Test] - public void CloneAPipeline() + [Test] + public void ClonePipelineNaming() + { + var p = new Pipeline(CatalogueRepository) { - Pipeline p = new Pipeline(CatalogueRepository); - - var source = new PipelineComponent(CatalogueRepository, p, typeof (DelimitedFlatFileAttacher), 0); - source.CreateArgumentsForClassIfNotExists(); - - var middle = new PipelineComponent(CatalogueRepository, p, typeof (ColumnRenamer), 1); - middle.CreateArgumentsForClassIfNotExists(); - - var middle2 = new PipelineComponent(CatalogueRepository, p, typeof(ColumnForbidder), 1); - middle2.CreateArgumentsForClassIfNotExists(); - - var destination = new PipelineComponent(CatalogueRepository, p, typeof (DataTableUploadDestination), 2); - destination.CreateArgumentsForClassIfNotExists(); - - p.SourcePipelineComponent_ID = source.ID; - p.DestinationPipelineComponent_ID = destination.ID; - p.SaveToDatabase(); - - int componentsBefore = RepositoryLocator.CatalogueRepository.GetAllObjects().Count(); - int argumentsBefore = RepositoryLocator.CatalogueRepository.GetAllObjects().Count(); - - var arg = p.PipelineComponents.Single(c => c.Class == typeof (ColumnRenamer).ToString()).PipelineComponentArguments.Single(a => a.Name == "ColumnNameToFind"); - arg.SetValue("MyMostCoolestColumnEver"); - arg.SaveToDatabase(); - - //Execute the cloning process - var p2 = p.Clone(); - - Assert.AreNotEqual(p2, p); - Assert.AreNotEqual(p2.ID,p.ID); - - Assert.AreEqual(p2.Name, p.Name + " (Clone)"); - - Assert.AreEqual(componentsBefore *2, RepositoryLocator.CatalogueRepository.GetAllObjects().Count()); - Assert.AreEqual(argumentsBefore *2, RepositoryLocator.CatalogueRepository.GetAllObjects().Count()); - - //p the original should have a pipeline component that has the value we set earlier - Assert.AreEqual( - p.PipelineComponents.Single(c => c.Class == typeof(ColumnRenamer).ToString()).PipelineComponentArguments.Single(a => a.Name == "ColumnNameToFind").Value, - "MyMostCoolestColumnEver" - ); - - //p2 the clone should have a pipeline component too since it's a clone - Assert.AreEqual( - p2.PipelineComponents.Single(c => c.Class == typeof(ColumnRenamer).ToString()).PipelineComponentArguments.Single(a => a.Name == "ColumnNameToFind").Value, - "MyMostCoolestColumnEver" - ); - - //both should have source and destination components - Assert.NotNull(p2.DestinationPipelineComponent_ID); - Assert.NotNull(p2.SourcePipelineComponent_ID); - - //but with different IDs because they are clones - Assert.AreNotEqual(p.DestinationPipelineComponent_ID, p2.DestinationPipelineComponent_ID); - Assert.AreNotEqual(p.SourcePipelineComponent_ID, p2.SourcePipelineComponent_ID); - - p.DeleteInDatabase(); - p2.DeleteInDatabase(); - } + Name = "My Pipe" + }; + p.SaveToDatabase(); - [Test] - public void CloneAPipeline_BrokenPipes() - { - Pipeline p = new Pipeline(CatalogueRepository); + var clone1 = p.Clone(); + var clone2 = p.Clone(); + var clone3 = p.Clone(); + + Assert.AreEqual("My Pipe (Clone)", clone1.Name); + Assert.AreEqual("My Pipe (Clone2)", clone2.Name); + Assert.AreEqual("My Pipe (Clone3)", clone3.Name); - //Setup a pipeline with a source component type that doesn't exist - var source = new PipelineComponent(CatalogueRepository, p, typeof (DelimitedFlatFileAttacher), 0); - source.Class = "Trollololol"; - source.SaveToDatabase(); + var cloneOfClone1 = clone3.Clone(); + var cloneOfClone2 = clone3.Clone(); - var arg = source.CreateNewArgument(); + Assert.AreEqual("My Pipe (Clone3) (Clone)", cloneOfClone1.Name); + Assert.AreEqual("My Pipe (Clone3) (Clone2)", cloneOfClone2.Name); + } - //Also give the source component a non existent argument - arg.GetType().GetProperty("Type").SetValue(arg,"fffffzololz"); - arg.SaveToDatabase(); - p.SourcePipelineComponent_ID = source.ID; - p.SaveToDatabase(); - - Assert.AreEqual("fffffzololz",p.Source.GetAllArguments().Single().Type); + /// + /// Tests the ability to clone a including all + /// components and arguments. + /// + /// + [Test] + public void CloneAPipeline() + { + var p = new Pipeline(CatalogueRepository); - var clone = p.Clone(); + var source = new PipelineComponent(CatalogueRepository, p, typeof(DelimitedFlatFileAttacher), 0); + source.CreateArgumentsForClassIfNotExists(); - Assert.AreEqual(clone.Source.Class,p.Source.Class); - Assert.AreEqual("fffffzololz",clone.Source.GetAllArguments().Single().Type); + var middle = new PipelineComponent(CatalogueRepository, p, typeof(ColumnRenamer), 1); + middle.CreateArgumentsForClassIfNotExists(); - p.DeleteInDatabase(); - clone.DeleteInDatabase(); + var middle2 = new PipelineComponent(CatalogueRepository, p, typeof(ColumnForbidder), 1); + middle2.CreateArgumentsForClassIfNotExists(); + var destination = new PipelineComponent(CatalogueRepository, p, typeof(DataTableUploadDestination), 2); + destination.CreateArgumentsForClassIfNotExists(); - } - [Test] - public void DeletePipelineSource_ClearsReference() + p.SourcePipelineComponent_ID = source.ID; + p.DestinationPipelineComponent_ID = destination.ID; + p.SaveToDatabase(); + + var componentsBefore = RepositoryLocator.CatalogueRepository.GetAllObjects().Length; + var argumentsBefore = RepositoryLocator.CatalogueRepository.GetAllObjects().Length; + + var arg = p.PipelineComponents.Single(c => c.Class == typeof(ColumnRenamer).ToString()) + .PipelineComponentArguments.Single(a => a.Name == "ColumnNameToFind"); + arg.SetValue("MyMostCoolestColumnEver"); + arg.SaveToDatabase(); + + //Execute the cloning process + var p2 = p.Clone(); + + Assert.AreNotEqual(p2, p); + Assert.AreNotEqual(p2.ID, p.ID); + + Assert.AreEqual(p2.Name, $"{p.Name} (Clone)"); + + Assert.AreEqual(componentsBefore * 2, + RepositoryLocator.CatalogueRepository.GetAllObjects().Length); + Assert.AreEqual(argumentsBefore * 2, + RepositoryLocator.CatalogueRepository.GetAllObjects().Length); + + //p the original should have a pipeline component that has the value we set earlier + Assert.AreEqual( + p.PipelineComponents.Single(c => c.Class == typeof(ColumnRenamer).ToString()).PipelineComponentArguments + .Single(a => a.Name == "ColumnNameToFind").Value, + "MyMostCoolestColumnEver" + ); + + //p2 the clone should have a pipeline component too since it's a clone + Assert.AreEqual( + p2.PipelineComponents.Single(c => c.Class == typeof(ColumnRenamer).ToString()).PipelineComponentArguments + .Single(a => a.Name == "ColumnNameToFind").Value, + "MyMostCoolestColumnEver" + ); + + //both should have source and destination components + Assert.NotNull(p2.DestinationPipelineComponent_ID); + Assert.NotNull(p2.SourcePipelineComponent_ID); + + //but with different IDs because they are clones + Assert.AreNotEqual(p.DestinationPipelineComponent_ID, p2.DestinationPipelineComponent_ID); + Assert.AreNotEqual(p.SourcePipelineComponent_ID, p2.SourcePipelineComponent_ID); + + p.DeleteInDatabase(); + p2.DeleteInDatabase(); + } + + [Test] + public void CloneAPipeline_BrokenPipes() + { + var p = new Pipeline(CatalogueRepository); + + //Setup a pipeline with a source component type that doesn't exist + var source = new PipelineComponent(CatalogueRepository, p, typeof(DelimitedFlatFileAttacher), 0) { - Pipeline p = new Pipeline(CatalogueRepository); + Class = "Trollololol" + }; + source.SaveToDatabase(); - //Setup a pipeline with a source component - var source = new PipelineComponent(CatalogueRepository, p, typeof(DelimitedFlatFileAttacher), 0); - source.Class = "Trollololol"; - p.SourcePipelineComponent_ID = source.ID; - p.SaveToDatabase(); + var arg = source.CreateNewArgument(); - // delete the source - source.DeleteInDatabase(); - p.RevertToDatabaseState(); + //Also give the source component a non existent argument + arg.GetType().GetProperty("Type").SetValue(arg, "fffffzololz"); + arg.SaveToDatabase(); - // should also clear the reference - Assert.IsNull(p.SourcePipelineComponent_ID); - } + p.SourcePipelineComponent_ID = source.ID; + p.SaveToDatabase(); + + Assert.AreEqual("fffffzololz", p.Source.GetAllArguments().Single().Type); + + var clone = p.Clone(); - [Test] - public void DeletePipelineDestination_ClearsReference() + Assert.AreEqual(clone.Source.Class, p.Source.Class); + Assert.AreEqual("fffffzololz", clone.Source.GetAllArguments().Single().Type); + + p.DeleteInDatabase(); + clone.DeleteInDatabase(); + } + + [Test] + public void DeletePipelineSource_ClearsReference() + { + var p = new Pipeline(CatalogueRepository); + + //Setup a pipeline with a source component + var source = new PipelineComponent(CatalogueRepository, p, typeof(DelimitedFlatFileAttacher), 0) { - Pipeline p = new Pipeline(CatalogueRepository); + Class = "Trollololol" + }; + p.SourcePipelineComponent_ID = source.ID; + p.SaveToDatabase(); + + // delete the source + source.DeleteInDatabase(); + p.RevertToDatabaseState(); - //Setup a pipeline with a source component - var dest = new PipelineComponent(CatalogueRepository, p, typeof(DelimitedFlatFileAttacher), 0); - dest.Class = "Trollololol"; - p.DestinationPipelineComponent_ID = dest.ID; - p.SaveToDatabase(); + // should also clear the reference + Assert.IsNull(p.SourcePipelineComponent_ID); + } - // delete the dest - dest.DeleteInDatabase(); - p.RevertToDatabaseState(); + [Test] + public void DeletePipelineDestination_ClearsReference() + { + var p = new Pipeline(CatalogueRepository); - // should also clear the reference - Assert.IsNull(p.DestinationPipelineComponent_ID); - } + //Setup a pipeline with a source component + var dest = new PipelineComponent(CatalogueRepository, p, typeof(DelimitedFlatFileAttacher), 0) + { + Class = "Trollololol" + }; + p.DestinationPipelineComponent_ID = dest.ID; + p.SaveToDatabase(); + + // delete the dest + dest.DeleteInDatabase(); + p.RevertToDatabaseState(); + + // should also clear the reference + Assert.IsNull(p.DestinationPipelineComponent_ID); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/AggregateBuilderTestsBase.cs b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/AggregateBuilderTestsBase.cs index 09ef3fe1ba..bdb6dab974 100644 --- a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/AggregateBuilderTestsBase.cs +++ b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/AggregateBuilderTestsBase.cs @@ -9,48 +9,46 @@ using Rdmp.Core.Curation.Data.Aggregation; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.AggregateBuilderTests +namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.AggregateBuilderTests; + +public class AggregateBuilderTestsBase : DatabaseTests { - public class AggregateBuilderTestsBase : DatabaseTests + protected Catalogue _c; + protected CatalogueItem _cataItem1; + protected CatalogueItem _cataItem2; + protected TableInfo _ti; + protected ColumnInfo _columnInfo1; + protected ColumnInfo _columnInfo2; + protected ExtractionInformation _ei1; + protected ExtractionInformation _ei2; + protected AggregateConfiguration _configuration; + protected AggregateDimension _dimension1; + protected AggregateDimension _dimension2; + + [SetUp] + protected override void SetUp() { - protected Catalogue _c; - protected CatalogueItem _cataItem1; - protected CatalogueItem _cataItem2; - protected TableInfo _ti; - protected ColumnInfo _columnInfo1; - protected ColumnInfo _columnInfo2; - protected ExtractionInformation _ei1; - protected ExtractionInformation _ei2; - protected AggregateConfiguration _configuration; - protected AggregateDimension _dimension1; - protected AggregateDimension _dimension2; - - [SetUp] - protected override void SetUp() - { - base.SetUp(); - - _c = new Catalogue(CatalogueRepository, "AggregateBuilderTests"); - _cataItem1 = new CatalogueItem(CatalogueRepository, _c, "Col1"); - _cataItem2 = new CatalogueItem(CatalogueRepository, _c, "Col2"); - - _ti = new TableInfo(CatalogueRepository, "T1"); - _columnInfo1 = new ColumnInfo(CatalogueRepository, "Col1", "varchar(100)", _ti); - _columnInfo2 = new ColumnInfo(CatalogueRepository, "Col2", "date", _ti); - - _ei1 = new ExtractionInformation(CatalogueRepository, _cataItem1, _columnInfo1, _columnInfo1.Name); - _ei2 = new ExtractionInformation(CatalogueRepository, _cataItem2, _columnInfo2, _columnInfo2.Name); - - _configuration = new AggregateConfiguration(CatalogueRepository, _c, "MyConfig"); - - _dimension1 = new AggregateDimension(CatalogueRepository, _ei1, _configuration); - _dimension2 = new AggregateDimension(CatalogueRepository, _ei2, _configuration); - - _dimension1.Order = 1; - _dimension1.SaveToDatabase(); - _dimension2.Order = 2; - _dimension2.SaveToDatabase(); - } + base.SetUp(); + + _c = new Catalogue(CatalogueRepository, "AggregateBuilderTests"); + _cataItem1 = new CatalogueItem(CatalogueRepository, _c, "Col1"); + _cataItem2 = new CatalogueItem(CatalogueRepository, _c, "Col2"); + + _ti = new TableInfo(CatalogueRepository, "T1"); + _columnInfo1 = new ColumnInfo(CatalogueRepository, "Col1", "varchar(100)", _ti); + _columnInfo2 = new ColumnInfo(CatalogueRepository, "Col2", "date", _ti); + + _ei1 = new ExtractionInformation(CatalogueRepository, _cataItem1, _columnInfo1, _columnInfo1.Name); + _ei2 = new ExtractionInformation(CatalogueRepository, _cataItem2, _columnInfo2, _columnInfo2.Name); + + _configuration = new AggregateConfiguration(CatalogueRepository, _c, "MyConfig"); + + _dimension1 = new AggregateDimension(CatalogueRepository, _ei1, _configuration); + _dimension2 = new AggregateDimension(CatalogueRepository, _ei2, _configuration); + _dimension1.Order = 1; + _dimension1.SaveToDatabase(); + _dimension2.Order = 2; + _dimension2.SaveToDatabase(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/AggregateDataBasedTests.cs b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/AggregateDataBasedTests.cs index c779834b33..88c2f6051a 100644 --- a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/AggregateDataBasedTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/AggregateDataBasedTests.cs @@ -11,687 +11,715 @@ using FAnsi.Discovery; using FAnsi.Discovery.QuerySyntax; using FAnsi.Discovery.QuerySyntax.Aggregation; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; -using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Aggregation; using Rdmp.Core.Curation.Data.Spontaneous; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataLoad.Engine.Pipeline.Destinations; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.QueryBuilding; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; using TypeGuesser; -namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.AggregateBuilderTests +namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.AggregateBuilderTests; + +public class AggregateDataBasedTests : DatabaseTests { - public class AggregateDataBasedTests:DatabaseTests + private static DataTable GetTestDataTable() { - private DataTable GetTestDataTable() + var dt = new DataTable { - DataTable dt = new DataTable(); - dt.TableName = "AggregateDataBasedTests"; + TableName = "AggregateDataBasedTests" + }; - dt.Columns.Add("EventDate"); - dt.Columns.Add("Category"); - dt.Columns.Add("NumberInTrouble"); + dt.Columns.Add("EventDate"); + dt.Columns.Add("Category"); + dt.Columns.Add("NumberInTrouble"); - dt.Rows.Add("2001-01-01", "T", "7"); - dt.Rows.Add("2001-01-02", "T", "11"); - dt.Rows.Add("2001-01-01", "T", "49"); + dt.Rows.Add("2001-01-01", "T", "7"); + dt.Rows.Add("2001-01-02", "T", "11"); + dt.Rows.Add("2001-01-01", "T", "49"); - dt.Rows.Add("2002-02-01", "T", "13"); - dt.Rows.Add("2002-03-02", "T", "17"); - dt.Rows.Add("2003-01-01", "T", "19"); - dt.Rows.Add("2003-04-02", "T", "23"); - + dt.Rows.Add("2002-02-01", "T", "13"); + dt.Rows.Add("2002-03-02", "T", "17"); + dt.Rows.Add("2003-01-01", "T", "19"); + dt.Rows.Add("2003-04-02", "T", "23"); - dt.Rows.Add("2002-01-01", "F", "29"); - dt.Rows.Add("2002-01-01", "F", "31"); - dt.Rows.Add("2001-01-01", "E&, %a' mp;E", "37"); - dt.Rows.Add("2002-01-01", "E&, %a' mp;E", "41"); - dt.Rows.Add("2005-01-01", "E&, %a' mp;E", "59"); //note there are no records in 2004 it is important for axis tests (axis involves you having to build a calendar table) + dt.Rows.Add("2002-01-01", "F", "29"); + dt.Rows.Add("2002-01-01", "F", "31"); - dt.Rows.Add(null, "G", "47"); - dt.Rows.Add("2001-01-01", "G", "53"); + dt.Rows.Add("2001-01-01", "E&, %a' mp;E", "37"); + dt.Rows.Add("2002-01-01", "E&, %a' mp;E", "41"); + dt.Rows.Add("2005-01-01", "E&, %a' mp;E", + "59"); //note there are no records in 2004 it is important for axis tests (axis involves you having to build a calendar table) - return dt; - } + dt.Rows.Add(null, "G", "47"); + dt.Rows.Add("2001-01-01", "G", "53"); + + return dt; + } + + #region Helper methods + + private DiscoveredTable UploadTestDataAsTableToServer(DatabaseType type, out ICatalogue catalogue, + out ExtractionInformation[] extractionInformations, out ITableInfo tableinfo) + { + var listener = ThrowImmediatelyDataLoadEventListener.Quiet; + + var db = GetCleanedServer(type); + + var data = GetTestDataTable(); + + var uploader = new DataTableUploadDestination(); + uploader.PreInitialize(db, listener); + uploader.ProcessPipelineData(data, listener, new GracefulCancellationToken()); + uploader.Dispose(listener, null); + var tbl = db.ExpectTable(uploader.TargetTableName); + + Assert.IsTrue(tbl.Exists()); + + catalogue = Import(tbl, out tableinfo, out _, out _, out extractionInformations); + + return tbl; + } + + private static void Destroy(DiscoveredTable tbl, params IDeleteable[] deletablesInOrderOfDeletion) + { + tbl.Drop(); + foreach (var deleteable in deletablesInOrderOfDeletion) + deleteable.DeleteInDatabase(); + } + + private static DataTable GetResultForBuilder(AggregateBuilder builder, DiscoveredTable tbl) + { + var sql = builder.SQL; - #region Helper methods + using var con = tbl.Database.Server.GetConnection(); + con.Open(); + var da = tbl.Database.Server.GetDataAdapter(sql, con); + var toReturn = new DataTable(); + da.Fill(toReturn); - private DiscoveredTable UploadTestDataAsTableToServer(DatabaseType type, out ICatalogue catalogue, out ExtractionInformation[] extractionInformations, out ITableInfo tableinfo) + return toReturn; + } + + + + private static void AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(AggregateBuilder builder, DatabaseType type) + { + var syntaxHelper = new QuerySyntaxHelperFactory().Create(type); + var declaration = syntaxHelper.GetParameterDeclaration("@category", new DatabaseTypeRequest(typeof(string), 1)); + + var repo = new MemoryCatalogueRepository(); + + var ORContainer = new SpontaneouslyInventedFilterContainer(repo, null, null, FilterContainerOperation.OR); + var constParam = new ConstantParameter(declaration, "'T'", "T Category Only", syntaxHelper); + + //this is deliberately duplication, it tests that the parameter compiles as well as that any dynamic sql doesn't get thrown by quotes + var filter1 = new SpontaneouslyInventedFilter(repo, ORContainer, "(Category=@category OR Category = 'T')", + "Category Is @category", + "ensures the records belong to the category @category", new ISqlParameter[] { constParam }); + var filter2 = new SpontaneouslyInventedFilter(repo, ORContainer, "NumberInTrouble > 42", + "number in trouble greater than 42", "See above", null); + + ORContainer.AddChild(filter1); + ORContainer.AddChild(filter2); + + builder.RootFilterContainer = ORContainer; + } + + private AggregateConfiguration SetupAggregateWithAxis(DatabaseType type, + ExtractionInformation[] extractionInformations, + ICatalogue catalogue, out AggregateDimension axisDimension) + { + var dateDimension = + extractionInformations.Single( + e => e.GetRuntimeName().Equals("EventDate", StringComparison.CurrentCultureIgnoreCase)); + var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); + axisDimension = new AggregateDimension(CatalogueRepository, dateDimension, configuration); + + var axis = new AggregateContinuousDateAxis(CatalogueRepository, axisDimension) { - var listener = new ThrowImmediatelyDataLoadEventListener(); - - var db = GetCleanedServer(type); + StartDate = "'2000-01-01'", + AxisIncrement = AxisIncrement.Year + }; + axis.SaveToDatabase(); + return configuration; + } + + private AggregateConfiguration SetupAggregateWithPivot(DatabaseType type, + ExtractionInformation[] extractionInformations, + ICatalogue catalogue, out AggregateDimension axisDimension, out AggregateDimension pivotDimension) + { + var axisCol = + extractionInformations.Single( + e => e.GetRuntimeName().Equals("EventDate", StringComparison.CurrentCultureIgnoreCase)); + var categoryCol = + extractionInformations.Single( + e => e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase)); + - var data = GetTestDataTable(); + var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); + axisDimension = new AggregateDimension(CatalogueRepository, axisCol, configuration); + pivotDimension = new AggregateDimension(CatalogueRepository, categoryCol, configuration); - var uploader = new DataTableUploadDestination(); - uploader.PreInitialize(db, listener); - uploader.ProcessPipelineData(data, listener, new GracefulCancellationToken()); - uploader.Dispose(listener, null); - var tbl = db.ExpectTable(uploader.TargetTableName); + var axis = new AggregateContinuousDateAxis(CatalogueRepository, axisDimension) + { + StartDate = "'2000-01-01'", + AxisIncrement = AxisIncrement.Year + }; + axis.SaveToDatabase(); + return configuration; + } - Assert.IsTrue(tbl.Exists()); + #endregion - catalogue = Import(tbl,out tableinfo,out _,out _, out extractionInformations); + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + //[TestCase(DatabaseType.Oracle)]// doesn't quite work yet :) needs full implementation of database abstraction layer for Oracle to work + public void Count_CorrectNumberOfRowsCalculated(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); - return tbl; + var builder = new AggregateBuilder(null, "count(*)", null, new[] { tableInfo }); + var resultTable = GetResultForBuilder(builder, tbl); + try + { + //table has 14 rows + VerifyRowExist(resultTable, 14); } - private void Destroy(DiscoveredTable tbl, params IDeleteable[] deletablesInOrderOfDeletion) + finally { - tbl.Drop(); - foreach (IDeleteable deleteable in deletablesInOrderOfDeletion) - deleteable.DeleteInDatabase(); + Destroy(tbl, catalogue, tableInfo); } + } - private DataTable GetResultForBuilder(AggregateBuilder builder, DiscoveredTable tbl) - { - string sql = builder.SQL; + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_CategoryWithCount_Correct(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate + var categoryDimension = extractionInformations.Single(e => + e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase)); + var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); + var dimension = new AggregateDimension(CatalogueRepository, categoryDimension, configuration); - using (var con = tbl.Database.Server.GetConnection()) - { - con.Open(); - var da = tbl.Database.Server.GetDataAdapter(sql, con); - var toReturn = new DataTable(); - da.Fill(toReturn); + try + { + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(dimension); + var resultTable = GetResultForBuilder(builder, tbl); - return toReturn; - } + VerifyRowExist(resultTable, "T", 7); + VerifyRowExist(resultTable, "F", 2); + VerifyRowExist(resultTable, "E&, %a' mp;E", 3); + VerifyRowExist(resultTable, "G", 2); } - + finally + { + Destroy(tbl, configuration, catalogue, tableInfo); + } + } - private void AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(AggregateBuilder builder, DatabaseType type) + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_CategoryWithSum_Correct(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate + var categoryDimension = extractionInformations.Single(e => + e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase)); + var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); + var dimension = new AggregateDimension(CatalogueRepository, categoryDimension, configuration); + + configuration.CountSQL = "sum(NumberInTrouble)"; + configuration.SaveToDatabase(); + try { - var syntaxHelper = new QuerySyntaxHelperFactory().Create(type); - var declaration = syntaxHelper.GetParameterDeclaration("@category", new DatabaseTypeRequest(typeof(string), 1)); + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(dimension); + var resultTable = GetResultForBuilder(builder, tbl); - var repo = new MemoryCatalogueRepository(); + VerifyRowExist(resultTable, "T", 139); + VerifyRowExist(resultTable, "F", 60); + VerifyRowExist(resultTable, "E&, %a' mp;E", 137); + VerifyRowExist(resultTable, "G", 100); + Assert.AreEqual(4, resultTable.Rows.Count); + } + finally + { + Destroy(tbl, configuration, catalogue, tableInfo); + } + } - var ORContainer = new SpontaneouslyInventedFilterContainer(repo,null, null, FilterContainerOperation.OR); - var constParam = new ConstantParameter(declaration, "'T'", "T Category Only", syntaxHelper); - - //this is deliberately duplication, it tests that the parameter compiles as well as that any dynamic sql doesn't get thrown by quotes - var filter1 = new SpontaneouslyInventedFilter(repo,ORContainer, "(Category=@category OR Category = 'T')", "Category Is @category", - "ensures the records belong to the category @category", new ISqlParameter[] { constParam }); - var filter2 = new SpontaneouslyInventedFilter(repo,ORContainer, "NumberInTrouble > 42", - "number in trouble greater than 42", "See above", null); + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_CategoryWithSum_WHEREStatement(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); - ORContainer.AddChild(filter1); - ORContainer.AddChild(filter2); + //setup the aggregate + var categoryDimension = extractionInformations.Single(e => + e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase)); + var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); + var dimension = new AggregateDimension(CatalogueRepository, categoryDimension, configuration); - builder.RootFilterContainer = ORContainer; - } + configuration.CountSQL = "sum(NumberInTrouble)"; + configuration.SaveToDatabase(); - private AggregateConfiguration SetupAggregateWithAxis(DatabaseType type, ExtractionInformation[] extractionInformations, - ICatalogue catalogue, out AggregateDimension axisDimension) + try { - var dateDimension = - extractionInformations.Single( - e => e.GetRuntimeName().Equals("EventDate", StringComparison.CurrentCultureIgnoreCase)); - var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); - axisDimension = new AggregateDimension(CatalogueRepository, dateDimension, configuration); - - var axis = new AggregateContinuousDateAxis(CatalogueRepository, axisDimension); - axis.StartDate = "'2000-01-01'"; - axis.AxisIncrement = AxisIncrement.Year; - axis.SaveToDatabase(); - return configuration; - } + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(dimension); - private AggregateConfiguration SetupAggregateWithPivot(DatabaseType type, ExtractionInformation[] extractionInformations, - ICatalogue catalogue, out AggregateDimension axisDimension, out AggregateDimension pivotDimension) + AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder, type); + + var resultTable = GetResultForBuilder(builder, tbl); + + //T is matched on all records so they are summed + VerifyRowExist(resultTable, "T", 139); + //VerifyRowExist(resultTable, "F", 60); //F does not have any records over 42 and isn't T so shouldnt be matched + VerifyRowExist(resultTable, "E&, %a' mp;E", 59); //E has 1 records over 42 + VerifyRowExist(resultTable, "G", 100); //47 + 53 + Assert.AreEqual(3, resultTable.Rows.Count); + } + finally { - var axisCol = - extractionInformations.Single( - e => e.GetRuntimeName().Equals("EventDate", StringComparison.CurrentCultureIgnoreCase)); - var categoryCol = - extractionInformations.Single( - e => e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase)); - - - var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); - axisDimension = new AggregateDimension(CatalogueRepository, axisCol, configuration); - pivotDimension = new AggregateDimension(CatalogueRepository, categoryCol, configuration); - - var axis = new AggregateContinuousDateAxis(CatalogueRepository, axisDimension); - axis.StartDate = "'2000-01-01'"; - axis.AxisIncrement = AxisIncrement.Year; - axis.SaveToDatabase(); - return configuration; + Destroy(tbl, configuration, catalogue, tableInfo); } + } + + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_AxisWithSum_Correct(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate with axis + var configuration = SetupAggregateWithAxis(type, extractionInformations, catalogue, out var dimension); - #endregion + configuration.CountSQL = "sum(NumberInTrouble)"; + configuration.SaveToDatabase(); - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - //[TestCase(DatabaseType.Oracle)]// doesn't quite work yet :) needs full implementation of database abstraction layer for Oracle to work - public void Count_CorrectNumberOfRowsCalculated(DatabaseType type) + try { - var tbl = UploadTestDataAsTableToServer(type,out var catalogue, out var extractionInformations, out var tableInfo); + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(dimension); - var builder = new AggregateBuilder(null, "count(*)", null,new []{tableInfo}); var resultTable = GetResultForBuilder(builder, tbl); - try - { - //table has 14 rows - VerifyRowExist(resultTable, 14); - } - finally - { - Destroy(tbl, catalogue, tableInfo); - } - } - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_CategoryWithCount_Correct(DatabaseType type) + //axis is ordered ascending by date starting in 2000 so that row should come first + Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); + + VerifyRowExist(resultTable, "2000", + null); //because it is a SUM the ANSI return should be null not 0 since it is a sum of no records + VerifyRowExist(resultTable, "2001", 157); + VerifyRowExist(resultTable, "2002", 131); + VerifyRowExist(resultTable, "2003", 42); + VerifyRowExist(resultTable, "2004", null); + VerifyRowExist(resultTable, "2005", 59); + VerifyRowExist(resultTable, "2006", null); + VerifyRowExist(resultTable, "2007", null); + } + finally { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate - var categoryDimension = extractionInformations.Single(e => e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase)); - var configuration = new AggregateConfiguration(CatalogueRepository,catalogue,"GroupBy_Category"); - var dimension = new AggregateDimension(CatalogueRepository, categoryDimension, configuration); - - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(dimension); - var resultTable = GetResultForBuilder(builder, tbl); - - VerifyRowExist(resultTable, "T", 7); - VerifyRowExist(resultTable, "F", 2); - VerifyRowExist(resultTable, "E&, %a' mp;E", 3); - VerifyRowExist(resultTable, "G", 2); - } - finally - { - Destroy(tbl, configuration, catalogue, tableInfo); - } + Destroy(tbl, configuration, catalogue, tableInfo); } + } + + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_AxisWithCount_HAVING(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate with axis + var configuration = SetupAggregateWithAxis(type, extractionInformations, catalogue, out var dimension); + + configuration.CountSQL = "count(*)"; + configuration.HavingSQL = "count(*)>3"; //matches only years with more than 3 records + configuration.SaveToDatabase(); - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_CategoryWithSum_Correct(DatabaseType type) + try { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate - var categoryDimension = extractionInformations.Single(e => e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase)); - var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); - var dimension = new AggregateDimension(CatalogueRepository, categoryDimension, configuration); - - configuration.CountSQL = "sum(NumberInTrouble)"; - configuration.SaveToDatabase(); - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(dimension); - var resultTable = GetResultForBuilder(builder, tbl); - - VerifyRowExist(resultTable, "T", 139); - VerifyRowExist(resultTable, "F", 60); - VerifyRowExist(resultTable, "E&, %a' mp;E", 137); - VerifyRowExist(resultTable, "G", 100); - Assert.AreEqual(4,resultTable.Rows.Count); - } - finally - { - Destroy(tbl, configuration, catalogue, tableInfo); - } + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(dimension); + + var resultTable = GetResultForBuilder(builder, tbl); + + //axis is ordered ascending by date starting in 2000 so that row should come first + Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); + + VerifyRowExist(resultTable, "2000", + null); //records only showing where there are more than 3 records (HAVING refers to the year since there's no pivot) + VerifyRowExist(resultTable, "2001", 5); + VerifyRowExist(resultTable, "2002", 5); + VerifyRowExist(resultTable, "2003", null); + VerifyRowExist(resultTable, "2004", null); + VerifyRowExist(resultTable, "2005", null); + VerifyRowExist(resultTable, "2006", null); + VerifyRowExist(resultTable, "2007", null); + } + finally + { + Destroy(tbl, configuration, catalogue, tableInfo); } + } + + + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_AxisWithCount_WHERECorrect(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate with axis + var configuration = SetupAggregateWithAxis(type, extractionInformations, catalogue, out var dimension); - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_CategoryWithSum_WHEREStatement(DatabaseType type) + configuration.CountSQL = "count(NumberInTrouble)"; + configuration.SaveToDatabase(); + try { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate - var categoryDimension = extractionInformations.Single(e => e.GetRuntimeName().Equals("Category", StringComparison.CurrentCultureIgnoreCase)); - var configuration = new AggregateConfiguration(CatalogueRepository, catalogue, "GroupBy_Category"); - var dimension = new AggregateDimension(CatalogueRepository, categoryDimension, configuration); - - configuration.CountSQL = "sum(NumberInTrouble)"; - configuration.SaveToDatabase(); - - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(dimension); - - AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder,type); - - var resultTable = GetResultForBuilder(builder, tbl); - - //T is matched on all records so they are summed - VerifyRowExist(resultTable, "T", 139); - //VerifyRowExist(resultTable, "F", 60); //F does not have any records over 42 and isn't T so shouldnt be matched - VerifyRowExist(resultTable, "E&, %a' mp;E", 59); //E has 1 records over 42 - VerifyRowExist(resultTable, "G", 100); //47 + 53 - Assert.AreEqual(3, resultTable.Rows.Count); - } - finally - { - Destroy(tbl, configuration, catalogue, tableInfo); - } + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(dimension); + + AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder, type); + + var resultTable = GetResultForBuilder(builder, tbl); + + //axis is ordered ascending by date starting in 2000 so that row should come first + Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); + + VerifyRowExist(resultTable, "2000", null); + VerifyRowExist(resultTable, "2001", 4); //4 are T or > 42 + VerifyRowExist(resultTable, "2002", 2); + VerifyRowExist(resultTable, "2003", 2); //only the first date in the test data is <= 2003-01-01 } - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_AxisWithSum_Correct(DatabaseType type) + finally { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate with axis - AggregateDimension dimension; - var configuration = SetupAggregateWithAxis(type, extractionInformations, catalogue, out dimension); - - configuration.CountSQL = "sum(NumberInTrouble)"; - configuration.SaveToDatabase(); - - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(dimension); - - var resultTable = GetResultForBuilder(builder, tbl); - - //axis is ordered ascending by date starting in 2000 so that row should come first - Assert.IsTrue(AreBasicallyEquals( "2000",resultTable.Rows[0][0])); - - VerifyRowExist(resultTable, "2000", null); //because it is a SUM the ANSI return should be null not 0 since it is a sum of no records - VerifyRowExist(resultTable, "2001", 157); - VerifyRowExist(resultTable, "2002", 131); - VerifyRowExist(resultTable, "2003", 42); - VerifyRowExist(resultTable, "2004", null); - VerifyRowExist(resultTable, "2005", 59); - VerifyRowExist(resultTable, "2006", null); - VerifyRowExist(resultTable, "2007", null); - } - finally - { - Destroy(tbl, configuration, catalogue, tableInfo); - } - + Destroy(tbl, configuration, catalogue, tableInfo); } - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_AxisWithCount_HAVING(DatabaseType type) + } + + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_PivotWithSum_Correct(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate pivot (and axis) + var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out var axisDimension, + out var pivotDimension); + + configuration.CountSQL = "sum(NumberInTrouble)"; + configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category + + configuration.SaveToDatabase(); + try { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate with axis - AggregateDimension dimension; - var configuration = SetupAggregateWithAxis(type, extractionInformations, catalogue, out dimension); - - configuration.CountSQL = "count(*)"; - configuration.HavingSQL = "count(*)>3"; //matches only years with more than 3 records - configuration.SaveToDatabase(); - - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(dimension); - - var resultTable = GetResultForBuilder(builder, tbl); - - //axis is ordered ascending by date starting in 2000 so that row should come first - Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); - - VerifyRowExist(resultTable, "2000", null); //records only showing where there are more than 3 records (HAVING refers to the year since there's no pivot) - VerifyRowExist(resultTable, "2001", 5); - VerifyRowExist(resultTable, "2002", 5); - VerifyRowExist(resultTable, "2003", null); - VerifyRowExist(resultTable, "2004", null); - VerifyRowExist(resultTable, "2005", null); - VerifyRowExist(resultTable, "2006", null); - VerifyRowExist(resultTable, "2007", null); - } - finally - { - Destroy(tbl, configuration, catalogue, tableInfo); - } + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(axisDimension); + builder.AddColumn(pivotDimension); + builder.SetPivotToDimensionID(pivotDimension); + var resultTable = GetResultForBuilder(builder, tbl); + + //axis is ordered ascending by date starting in 2000 so that row should come first + Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); + + Assert.AreEqual("T", resultTable.Columns[1].ColumnName); + Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[2].ColumnName); + Assert.AreEqual("F", resultTable.Columns[3].ColumnName); + Assert.AreEqual("G", resultTable.Columns[4].ColumnName); + + //T,E,F,G + VerifyRowExist(resultTable, "2000", null, null, null, + null); //no records in 2000 but it is important it appears still because that is what the axis says + VerifyRowExist(resultTable, "2001", 67, 37, null, 53); + VerifyRowExist(resultTable, "2002", 30, 41, 60, null); + VerifyRowExist(resultTable, "2003", 42, null, null, null); + VerifyRowExist(resultTable, "2004", null, null, null, null); + VerifyRowExist(resultTable, "2005", null, 59, null, null); + VerifyRowExist(resultTable, "2006", null, null, null, null); + VerifyRowExist(resultTable, "2007", null, null, null, null); } + finally + { + Destroy(tbl, configuration, catalogue, tableInfo); + } + } + + + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_PivotWithSum_WHEREStatement(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate pivot (and axis) + var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out var axisDimension, + out var pivotDimension); + configuration.CountSQL = "sum(NumberInTrouble)"; + configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_AxisWithCount_WHERECorrect(DatabaseType type) + configuration.SaveToDatabase(); + try { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate with axis - AggregateDimension dimension; - var configuration = SetupAggregateWithAxis(type, extractionInformations, catalogue, out dimension); - - configuration.CountSQL = "count(NumberInTrouble)"; - configuration.SaveToDatabase(); - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(dimension); - - AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder,type); - - var resultTable = GetResultForBuilder(builder, tbl); - - //axis is ordered ascending by date starting in 2000 so that row should come first - Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); - - VerifyRowExist(resultTable, "2000", null); - VerifyRowExist(resultTable, "2001", 4); //4 are T or > 42 - VerifyRowExist(resultTable, "2002", 2); - VerifyRowExist(resultTable, "2003", 2); //only the first date in the test data is <= 2003-01-01 - } - finally - { - Destroy(tbl, configuration, catalogue, tableInfo); - } - } + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(axisDimension); + builder.AddColumn(pivotDimension); + builder.SetPivotToDimensionID(pivotDimension); - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_PivotWithSum_Correct(DatabaseType type) + AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder, type); + + var resultTable = GetResultForBuilder(builder, tbl); + + //axis is ordered ascending by date starting in 2000 so that row should come first + Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); + + Assert.AreEqual("T", resultTable.Columns[1].ColumnName); + Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[2].ColumnName); + Assert.AreEqual("G", resultTable.Columns[3].ColumnName); + + //T,E,G - F does not appear because WHERE throws it out (both counts are below 42) + VerifyRowExist(resultTable, "2000", null, null, + null); //no records in 2000 but it is important it appears still because that is what the axis says + VerifyRowExist(resultTable, "2001", 67, null, 53); + VerifyRowExist(resultTable, "2002", 30, null, null); + VerifyRowExist(resultTable, "2003", 42, null, null); + VerifyRowExist(resultTable, "2004", null, null, null); + VerifyRowExist(resultTable, "2005", null, 59, null); + VerifyRowExist(resultTable, "2006", null, null, null); + VerifyRowExist(resultTable, "2007", null, null, null); + } + finally { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate pivot (and axis) - AggregateDimension axisDimension; - AggregateDimension pivotDimension; - var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out axisDimension, out pivotDimension); - - configuration.CountSQL = "sum(NumberInTrouble)"; - configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category - - configuration.SaveToDatabase(); - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(axisDimension); - builder.AddColumn(pivotDimension); - builder.SetPivotToDimensionID(pivotDimension); - - var resultTable = GetResultForBuilder(builder, tbl); - - //axis is ordered ascending by date starting in 2000 so that row should come first - Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); - - Assert.AreEqual("T",resultTable.Columns[1].ColumnName); - Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[2].ColumnName); - Assert.AreEqual("F", resultTable.Columns[3].ColumnName); - Assert.AreEqual("G", resultTable.Columns[4].ColumnName); - - //T,E,F,G - VerifyRowExist(resultTable, "2000", null, null, null, null);//no records in 2000 but it is important it appears still because that is what the axis says - VerifyRowExist(resultTable, "2001", 67, 37, null, 53); - VerifyRowExist(resultTable, "2002", 30, 41, 60, null); - VerifyRowExist(resultTable, "2003", 42, null, null, null); - VerifyRowExist(resultTable, "2004", null, null, null, null); - VerifyRowExist(resultTable, "2005", null, 59,null , null); - VerifyRowExist(resultTable, "2006", null, null, null, null); - VerifyRowExist(resultTable, "2007", null, null, null, null); - } - finally - { - Destroy(tbl, configuration, catalogue, tableInfo); - } - + Destroy(tbl, configuration, catalogue, tableInfo); } + } + + /// + /// A test which checks the behaviour of Aggregate Building when there is an axis, a pivot and a TopX in which the TopX selection is the 'Top 2 count column' + /// This translates as 'identify the top 2 pivot values which have the highest counts matching the WHERE condition and pivot those categories only (for all data)' + /// + /// + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_PivotWithSum_Top2BasedonCountColumnDesc(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate pivot (and axis) + var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out var axisDimension, + out var pivotDimension); + + configuration.CountSQL = "sum(NumberInTrouble)"; + configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_PivotWithSum_WHEREStatement(DatabaseType type) + configuration.SaveToDatabase(); + + var topx = new AggregateTopX(CatalogueRepository, configuration, 2) { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate pivot (and axis) - AggregateDimension axisDimension; - AggregateDimension pivotDimension; - var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out axisDimension, out pivotDimension); - - configuration.CountSQL = "sum(NumberInTrouble)"; - configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category - - configuration.SaveToDatabase(); - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(axisDimension); - builder.AddColumn(pivotDimension); - builder.SetPivotToDimensionID(pivotDimension); - - AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder,type); - - var resultTable = GetResultForBuilder(builder, tbl); - - //axis is ordered ascending by date starting in 2000 so that row should come first - Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); - - Assert.AreEqual("T", resultTable.Columns[1].ColumnName); - Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[2].ColumnName); - Assert.AreEqual("G", resultTable.Columns[3].ColumnName); - - //T,E,G - F does not appear because WHERE throws it out (both counts are below 42) - VerifyRowExist(resultTable, "2000", null, null, null); //no records in 2000 but it is important it appears still because that is what the axis says - VerifyRowExist(resultTable, "2001", 67, null, 53); - VerifyRowExist(resultTable, "2002", 30, null, null); - VerifyRowExist(resultTable, "2003", 42, null, null); - VerifyRowExist(resultTable, "2004", null, null, null); - VerifyRowExist(resultTable, "2005", null, 59, null); - VerifyRowExist(resultTable, "2006", null, null, null); - VerifyRowExist(resultTable, "2007", null, null, null); - } - finally - { - Destroy(tbl, configuration, catalogue, tableInfo); - } - } + OrderByDirection = AggregateTopXOrderByDirection.Descending + }; + topx.SaveToDatabase(); + try + { + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(axisDimension); + builder.AddColumn(pivotDimension); + builder.SetPivotToDimensionID(pivotDimension); + + var resultTable = GetResultForBuilder(builder, tbl); - /// - /// A test which checks the behaviour of Aggregate Building when there is an axis, a pivot and a TopX in which the TopX selection is the 'Top 2 count column' - /// This translates as 'identify the top 2 pivot values which have the highest counts matching the WHERE condition and pivot those categories only (for all data)' - /// - /// - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_PivotWithSum_Top2BasedonCountColumnDesc(DatabaseType type) + //axis is ordered ascending by date starting in 2000 so that row should come first + Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); + + Assert.AreEqual("T", resultTable.Columns[1].ColumnName); + Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[2].ColumnName); + + //T,E,G - F does not appear because WHERE throws it out (both counts are below 42) + VerifyRowExist(resultTable, "2000", null, + null); //no records in 2000 but it is important it appears still because that is what the axis says + VerifyRowExist(resultTable, "2001", 67, 37); + VerifyRowExist(resultTable, "2002", 30, 41); + VerifyRowExist(resultTable, "2003", 42, null); + VerifyRowExist(resultTable, "2004", null, null); + VerifyRowExist(resultTable, "2005", null, 59); + VerifyRowExist(resultTable, "2006", null, null); + VerifyRowExist(resultTable, "2007", null, null); + } + finally { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate pivot (and axis) - AggregateDimension axisDimension; - AggregateDimension pivotDimension; - var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out axisDimension, out pivotDimension); - - configuration.CountSQL = "sum(NumberInTrouble)"; - configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category - - configuration.SaveToDatabase(); - - var topx = new AggregateTopX(CatalogueRepository, configuration, 2); - topx.OrderByDirection = AggregateTopXOrderByDirection.Descending; - topx.SaveToDatabase(); - - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(axisDimension); - builder.AddColumn(pivotDimension); - builder.SetPivotToDimensionID(pivotDimension); - - var resultTable = GetResultForBuilder(builder, tbl); - - //axis is ordered ascending by date starting in 2000 so that row should come first - Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); - - Assert.AreEqual("T", resultTable.Columns[1].ColumnName); - Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[2].ColumnName); - - //T,E,G - F does not appear because WHERE throws it out (both counts are below 42) - VerifyRowExist(resultTable, "2000", null, null); //no records in 2000 but it is important it appears still because that is what the axis says - VerifyRowExist(resultTable, "2001", 67, 37); - VerifyRowExist(resultTable, "2002", 30, 41); - VerifyRowExist(resultTable, "2003", 42, null); - VerifyRowExist(resultTable, "2004", null, null); - VerifyRowExist(resultTable, "2005", null, 59); - VerifyRowExist(resultTable, "2006", null, null); - VerifyRowExist(resultTable, "2007", null, null); - } - finally - { - Destroy(tbl, topx, configuration, catalogue, tableInfo); - } + Destroy(tbl, topx, configuration, catalogue, tableInfo); } + } - /// - /// A test which checks the behaviour of Aggregate Building when there is an axis, a pivot and a TopX in which the TopX selection is the 'Top 2 count column' - /// This translates as 'identify the top 2 pivot values which have the highest counts matching the WHERE condition and pivot those categories only (for all data)' - /// - /// - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_PivotWithSum_Top2AlphabeticalAsc_WHEREStatement(DatabaseType type) + /// + /// A test which checks the behaviour of Aggregate Building when there is an axis, a pivot and a TopX in which the TopX selection is the 'Top 2 count column' + /// This translates as 'identify the top 2 pivot values which have the highest counts matching the WHERE condition and pivot those categories only (for all data)' + /// + /// + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_PivotWithSum_Top2AlphabeticalAsc_WHEREStatement(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate pivot (and axis) + var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out var axisDimension, + out var pivotDimension); + + configuration.CountSQL = "sum(NumberInTrouble)"; + configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category + configuration.SaveToDatabase(); + + var topx = new AggregateTopX(CatalogueRepository, configuration, 2) { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate pivot (and axis) - AggregateDimension axisDimension; - AggregateDimension pivotDimension; - var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out axisDimension, out pivotDimension); - - configuration.CountSQL = "sum(NumberInTrouble)"; - configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category - configuration.SaveToDatabase(); - - var topx = new AggregateTopX(CatalogueRepository, configuration, 2); - topx.OrderByDirection = AggregateTopXOrderByDirection.Descending; - topx.OrderByDimensionIfAny_ID = pivotDimension.ID; - topx.OrderByDirection = AggregateTopXOrderByDirection.Ascending; - topx.SaveToDatabase(); - - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(axisDimension); - builder.AddColumn(pivotDimension); - builder.SetPivotToDimensionID(pivotDimension); - - AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder,type); - - var resultTable = GetResultForBuilder(builder, tbl); - - //axis is ordered ascending by date starting in 2000 so that row should come first - Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); - - //sort in AggregateTopX is the pivot dimension asc (i.e. order alphabetically) - Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[1].ColumnName); - Assert.AreEqual("G", resultTable.Columns[2].ColumnName); - - //E,G (note that only 1 value appears for E because WHERE throws out rest). Also note the two columns are E and G because that is Top 2 when alphabetically sorted of the pivot values (E,F,G,T) that match the filter (F doesn't) - VerifyRowExist(resultTable, "2000", null, null); //no records in 2000 but it is important it appears still because that is what the axis says - VerifyRowExist(resultTable, "2001", null, 53); - VerifyRowExist(resultTable, "2002", null, null); - VerifyRowExist(resultTable, "2003", null, null); - VerifyRowExist(resultTable, "2004", null, null); - VerifyRowExist(resultTable, "2005", 59, null); - VerifyRowExist(resultTable, "2006", null, null); - VerifyRowExist(resultTable, "2007", null, null); - } - finally - { - Destroy(tbl, topx, configuration, catalogue, tableInfo); - } + OrderByDirection = AggregateTopXOrderByDirection.Descending, + OrderByDimensionIfAny_ID = pivotDimension.ID + }; + topx.OrderByDirection = AggregateTopXOrderByDirection.Ascending; + topx.SaveToDatabase(); + + try + { + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(axisDimension); + builder.AddColumn(pivotDimension); + builder.SetPivotToDimensionID(pivotDimension); + + AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder, type); + + var resultTable = GetResultForBuilder(builder, tbl); + + //axis is ordered ascending by date starting in 2000 so that row should come first + Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); + + //sort in AggregateTopX is the pivot dimension asc (i.e. order alphabetically) + Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[1].ColumnName); + Assert.AreEqual("G", resultTable.Columns[2].ColumnName); + + //E,G (note that only 1 value appears for E because WHERE throws out rest). Also note the two columns are E and G because that is Top 2 when alphabetically sorted of the pivot values (E,F,G,T) that match the filter (F doesn't) + VerifyRowExist(resultTable, "2000", null, + null); //no records in 2000 but it is important it appears still because that is what the axis says + VerifyRowExist(resultTable, "2001", null, 53); + VerifyRowExist(resultTable, "2002", null, null); + VerifyRowExist(resultTable, "2003", null, null); + VerifyRowExist(resultTable, "2004", null, null); + VerifyRowExist(resultTable, "2005", 59, null); + VerifyRowExist(resultTable, "2006", null, null); + VerifyRowExist(resultTable, "2007", null, null); + } + finally + { + Destroy(tbl, topx, configuration, catalogue, tableInfo); } + } + + /// + /// Assemble an aggregate which returns the top 1 pivot dimension HAVING count(*) less than 2 + /// + /// + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void GroupBy_PivotWithSum_HAVING_Top1_WHERE(DatabaseType type) + { + var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, + out var tableInfo); + + //setup the aggregate pivot (and axis) + var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out var axisDimension, + out var pivotDimension); + + configuration.CountSQL = "sum(NumberInTrouble)"; + configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category + + configuration.HavingSQL = "count(*)<5"; //throws out 'T' - /// - /// Assemble an aggregate which returns the top 1 pivot dimension HAVING count(*) less than 2 - /// - /// - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void GroupBy_PivotWithSum_HAVING_Top1_WHERE(DatabaseType type) + configuration.SaveToDatabase(); + + var topx = new AggregateTopX(CatalogueRepository, configuration, 1) + { + OrderByDirection = AggregateTopXOrderByDirection.Descending + }; //Top 1 (highest count columns should be used for pivot) + topx.SaveToDatabase(); + + try + { + //get the result of the aggregate + var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); + builder.AddColumn(axisDimension); + builder.AddColumn(pivotDimension); + builder.SetPivotToDimensionID(pivotDimension); + + AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder, type); + + var resultTable = GetResultForBuilder(builder, tbl); + + //axis is ordered ascending by date starting in 2000 so that row should come first + Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); + + //where logic matches T in spades but HAVING statement throws it out for having more than 4 records total + Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[1].ColumnName); + + //Only E appears because of Top 1 pivot statement + VerifyRowExist(resultTable, "2000", + null); //all E records are discarded except 59 because that is the WHERE logic + VerifyRowExist(resultTable, "2001", null); + VerifyRowExist(resultTable, "2002", null); + VerifyRowExist(resultTable, "2003", null); + VerifyRowExist(resultTable, "2004", null); + VerifyRowExist(resultTable, "2005", 59); + VerifyRowExist(resultTable, "2006", null); + VerifyRowExist(resultTable, "2007", null); + } + finally { - var tbl = UploadTestDataAsTableToServer(type, out var catalogue, out var extractionInformations, out var tableInfo); - - //setup the aggregate pivot (and axis) - AggregateDimension axisDimension; - AggregateDimension pivotDimension; - var configuration = SetupAggregateWithPivot(type, extractionInformations, catalogue, out axisDimension, out pivotDimension); - - configuration.CountSQL = "sum(NumberInTrouble)"; - configuration.PivotOnDimensionID = pivotDimension.ID; //pivot on the Category - - configuration.HavingSQL = "count(*)<5"; //throws out 'T' - - configuration.SaveToDatabase(); - - var topx = new AggregateTopX(CatalogueRepository, configuration, 1); //Top 1 (highest count columns should be used for pivot) - topx.OrderByDirection = AggregateTopXOrderByDirection.Descending; - topx.SaveToDatabase(); - - try - { - //get the result of the aggregate - var builder = new AggregateBuilder(null, configuration.CountSQL, configuration); - builder.AddColumn(axisDimension); - builder.AddColumn(pivotDimension); - builder.SetPivotToDimensionID(pivotDimension); - - AddWHEREToBuilder_CategoryIsTOrNumberGreaterThan42(builder,type); - - var resultTable = GetResultForBuilder(builder, tbl); - - //axis is ordered ascending by date starting in 2000 so that row should come first - Assert.IsTrue(AreBasicallyEquals("2000", resultTable.Rows[0][0])); - - //where logic matches T in spades but HAVING statement throws it out for having more than 4 records total - Assert.AreEqual("E&, %a' mp;E", resultTable.Columns[1].ColumnName); - - //Only E appears because of Top 1 pivot statement - VerifyRowExist(resultTable, "2000", null); //all E records are discarded except 59 because that is the WHERE logic - VerifyRowExist(resultTable, "2001", null); - VerifyRowExist(resultTable, "2002", null); - VerifyRowExist(resultTable, "2003", null); - VerifyRowExist(resultTable, "2004", null); - VerifyRowExist(resultTable, "2005", 59); - VerifyRowExist(resultTable, "2006", null); - VerifyRowExist(resultTable, "2007", null); - } - finally - { - Destroy(tbl, topx, configuration, catalogue, tableInfo); - } + Destroy(tbl, topx, configuration, catalogue, tableInfo); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/MicrosoftAggregateBuilderTests.cs b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/MicrosoftAggregateBuilderTests.cs index 7c3b7b2409..1933221130 100644 --- a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/MicrosoftAggregateBuilderTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/MicrosoftAggregateBuilderTests.cs @@ -4,25 +4,22 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using Microsoft.Data.SqlClient; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.QueryBuilding; using System; -namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.AggregateBuilderTests +namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.AggregateBuilderTests; + +public class MicrosoftAggregateBuilderTests : AggregateBuilderTestsBase { - public class MicrosoftAggregateBuilderTests:AggregateBuilderTestsBase + [Test] + public void TestAggregateBuilding_NoConfigurationOneDimension() { - - - [Test] - public void TestAggregateBuilding_NoConfigurationOneDimension() - { - var builder = new AggregateBuilder(null, "count(*)", null); - builder.AddColumn(_dimension1); + var builder = new AggregateBuilder(null, "count(*)", null); + builder.AddColumn(_dimension1); - Assert.AreEqual(CollapseWhitespace(@"/**/ + Assert.AreEqual(CollapseWhitespace(@"/**/ SELECT Col1, count(*) AS MyCount @@ -31,19 +28,19 @@ public void TestAggregateBuilding_NoConfigurationOneDimension() group by Col1 order by -Col1"),CollapseWhitespace(builder.SQL)); - } - - /// - /// Tests the systems ability to figure out the alias of the count column when it has " AS " (e.g. in a cast scalar function) - /// - [Test] - public void TestAggregateBuilding_AS_InCount() - { - var builder = new AggregateBuilder(null, "count(cast(1 AS int))", null); - builder.AddColumn(_dimension1); +Col1"), CollapseWhitespace(builder.SQL)); + } - Assert.AreEqual(CollapseWhitespace(@"/**/ + /// + /// Tests the systems ability to figure out the alias of the count column when it has " AS " (e.g. in a cast scalar function) + /// + [Test] + public void TestAggregateBuilding_AS_InCount() + { + var builder = new AggregateBuilder(null, "count(cast(1 AS int))", null); + builder.AddColumn(_dimension1); + + Assert.AreEqual(CollapseWhitespace(@"/**/ SELECT Col1, count(cast(1 AS int)) AS MyCount @@ -52,17 +49,17 @@ public void TestAggregateBuilding_AS_InCount() group by Col1 order by -Col1"),CollapseWhitespace(builder.SQL)); - } +Col1"), CollapseWhitespace(builder.SQL)); + } - [Test] - public void TestAggregateBuilding_NoConfigurationTwoDimension() - { - var builder = new AggregateBuilder(null, "count(*)", null); - builder.AddColumn(_dimension1); - builder.AddColumn(_dimension2); + [Test] + public void TestAggregateBuilding_NoConfigurationTwoDimension() + { + var builder = new AggregateBuilder(null, "count(*)", null); + builder.AddColumn(_dimension1); + builder.AddColumn(_dimension2); - Assert.AreEqual(CollapseWhitespace(CollapseWhitespace(@"/**/ + Assert.AreEqual(CollapseWhitespace(CollapseWhitespace(@"/**/ SELECT Col1, Col2, @@ -74,17 +71,17 @@ group by Col2 order by Col1, -Col2")),CollapseWhitespace(builder.SQL)); - } +Col2")), CollapseWhitespace(builder.SQL)); + } - [Test] - public void TestAggregateBuilding_ConfigurationTwoDimension() - { - var builder = new AggregateBuilder(null, "count(*)", _configuration); - builder.AddColumn(_dimension1); - builder.AddColumn(_dimension2); + [Test] + public void TestAggregateBuilding_ConfigurationTwoDimension() + { + var builder = new AggregateBuilder(null, "count(*)", _configuration); + builder.AddColumn(_dimension1); + builder.AddColumn(_dimension2); - Assert.AreEqual(CollapseWhitespace(@"/*MyConfig*/ + Assert.AreEqual(CollapseWhitespace(@"/*MyConfig*/ SELECT Col1, Col2, @@ -97,68 +94,72 @@ group by order by Col1, Col2"), CollapseWhitespace(builder.SQL)); - } + } - [Test] - public void TwoTopXObjects() - { - var topX1 = new AggregateTopX(CatalogueRepository, _configuration, 10); - var ex = Assert.Throws(() => new AggregateTopX(CatalogueRepository, _configuration, 10)); - - Assert.AreEqual("AggregateConfiguration MyConfig already has a TopX",ex.Message); - topX1.DeleteInDatabase(); - } - - [TestCase("count(*)",true)] - [TestCase("count(*)", false)] - [TestCase("max(Col1)",true)] - [TestCase("max(Col2)", false)] - public void TestAggregateBuilding_NoConfigurationTwoDimension_Top10(string countColField,bool asc) + [Test] + public void TwoTopXObjects() + { + var topX1 = new AggregateTopX(CatalogueRepository, _configuration, 10); + var ex = Assert.Throws(() => new AggregateTopX(CatalogueRepository, _configuration, 10)); + + Assert.AreEqual("AggregateConfiguration MyConfig already has a TopX", ex.Message); + topX1.DeleteInDatabase(); + } + + [TestCase("count(*)", true)] + [TestCase("count(*)", false)] + [TestCase("max(Col1)", true)] + [TestCase("max(Col2)", false)] + public void TestAggregateBuilding_NoConfigurationTwoDimension_Top10(string countColField, bool asc) + { + var topX = new AggregateTopX(CatalogueRepository, _configuration, 10) { - var topX = new AggregateTopX(CatalogueRepository, _configuration, 10); - topX.OrderByDirection = asc + OrderByDirection = asc ? AggregateTopXOrderByDirection.Ascending - : AggregateTopXOrderByDirection.Descending; - topX.SaveToDatabase(); + : AggregateTopXOrderByDirection.Descending + }; + topX.SaveToDatabase(); - var beforeCountSQL = _configuration.CountSQL; - _configuration.CountSQL = countColField; + var beforeCountSQL = _configuration.CountSQL; + _configuration.CountSQL = countColField; - var builder = _configuration.GetQueryBuilder(); - - Assert.AreEqual(CollapseWhitespace(@"/*MyConfig*/ + var builder = _configuration.GetQueryBuilder(); + + Assert.AreEqual(CollapseWhitespace($@"/*MyConfig*/ SELECT TOP 10 Col1, Col2, -"+countColField+@" AS MyCount +{countColField} AS MyCount FROM T1 group by Col1, Col2 order by -"+countColField+" " + (asc?"asc":"desc")),CollapseWhitespace(builder.SQL)); +{countColField} {(asc ? "asc" : "desc")}"), CollapseWhitespace(builder.SQL)); - _configuration.CountSQL = beforeCountSQL; - topX.DeleteInDatabase(); - } + _configuration.CountSQL = beforeCountSQL; + topX.DeleteInDatabase(); + } - [TestCase(true)] - [TestCase(false)] - public void TestAggregateBuilding_NoConfigurationTwoDimension_Top10DimensionOrder(bool asc) + [TestCase(true)] + [TestCase(false)] + public void TestAggregateBuilding_NoConfigurationTwoDimension_Top10DimensionOrder(bool asc) + { + var topX = new AggregateTopX(CatalogueRepository, _configuration, 10) { - var topX = new AggregateTopX(CatalogueRepository, _configuration, 10); - topX.OrderByDimensionIfAny_ID = _dimension1.ID; - topX.OrderByDirection = asc + OrderByDimensionIfAny_ID = _dimension1.ID, + OrderByDirection = asc ? AggregateTopXOrderByDirection.Ascending - : AggregateTopXOrderByDirection.Descending; - topX.SaveToDatabase(); - - var builder = _configuration.GetQueryBuilder(); + : AggregateTopXOrderByDirection.Descending + }; + topX.SaveToDatabase(); - Assert.AreEqual(CollapseWhitespace(@"/*MyConfig*/ + var builder = _configuration.GetQueryBuilder(); + + Assert.AreEqual(CollapseWhitespace($@"/*MyConfig*/ SELECT TOP 10 Col1, @@ -170,22 +171,20 @@ group by Col1, Col2 order by -Col1 " + (asc ? "asc" : "desc")), CollapseWhitespace(builder.SQL)); - - topX.DeleteInDatabase(); - } +Col1 {(asc ? "asc" : "desc")}"), CollapseWhitespace(builder.SQL)); - [Test] - public void TestAggregateBuilding_NoConfigurationNoDimensions() - { - var builder = new AggregateBuilder(null, "count(*)", null,new []{_ti}); - - Assert.AreEqual(CollapseWhitespace(@"/**/ + topX.DeleteInDatabase(); + } + + [Test] + public void TestAggregateBuilding_NoConfigurationNoDimensions() + { + var builder = new AggregateBuilder(null, "count(*)", null, new[] { _ti }); + + Assert.AreEqual(CollapseWhitespace(@"/**/ SELECT count(*) AS MyCount FROM T1"), CollapseWhitespace(builder.SQL)); - } - } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/MySqlAggregateBuilderTests.cs b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/MySqlAggregateBuilderTests.cs index 5dcda80250..ff628881a7 100644 --- a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/MySqlAggregateBuilderTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/AggregateBuilderTests/MySqlAggregateBuilderTests.cs @@ -8,29 +8,31 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.QueryBuilding; -using ReusableLibraryCode.Settings; +using Rdmp.Core.ReusableLibraryCode.Settings; -namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.AggregateBuilderTests +namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.AggregateBuilderTests; + +public class MySqlAggregateBuilderTests : AggregateBuilderTestsBase { - public class MySqlAggregateBuilderTests : AggregateBuilderTestsBase + [Test] + public void Test_AggregateBuilder_MySql_Top32() { - [Test] - public void Test_AggregateBuilder_MySql_Top32() + _ti.DatabaseType = DatabaseType.MySql; + _ti.SaveToDatabase(); + + var builder = new AggregateBuilder(null, "count(*)", null); + builder.AddColumn(_dimension1); + + var topx = new AggregateTopX(CatalogueRepository, _configuration, 32) { - _ti.DatabaseType = DatabaseType.MySql; - _ti.SaveToDatabase(); + OrderByDimensionIfAny_ID = _dimension1.ID + }; + topx.SaveToDatabase(); - var builder = new AggregateBuilder(null, "count(*)", null); - builder.AddColumn(_dimension1); + builder.AggregateTopX = topx; - var topx = new AggregateTopX(CatalogueRepository, _configuration, 32); - topx.OrderByDimensionIfAny_ID = _dimension1.ID; - topx.SaveToDatabase(); - builder.AggregateTopX = topx; - - - Assert.AreEqual(CollapseWhitespace(@"/**/ + Assert.AreEqual(CollapseWhitespace(@"/**/ SELECT Col1, count(*) AS MyCount @@ -40,30 +42,32 @@ group by Col1 order by Col1 desc -LIMIT 32"),CollapseWhitespace(builder.SQL.Trim())); +LIMIT 32"), CollapseWhitespace(builder.SQL.Trim())); - topx.DeleteInDatabase(); - } - [TestCase(true)] - [TestCase(false)] - public void Test_AggregateBuilder_MySql_Top31OrderByCountAsc(bool useAliasForGroupBy) - { - _ti.DatabaseType = DatabaseType.MySql; - _ti.SaveToDatabase(); + topx.DeleteInDatabase(); + } - UserSettings.UseAliasInsteadOfTransformInGroupByAggregateGraphs = useAliasForGroupBy; + [TestCase(true)] + [TestCase(false)] + public void Test_AggregateBuilder_MySql_Top31OrderByCountAsc(bool useAliasForGroupBy) + { + _ti.DatabaseType = DatabaseType.MySql; + _ti.SaveToDatabase(); + + UserSettings.UseAliasInsteadOfTransformInGroupByAggregateGraphs = useAliasForGroupBy; - var builder = new AggregateBuilder(null, "count(*)", null); - builder.AddColumn(_dimension1); + var builder = new AggregateBuilder(null, "count(*)", null); + builder.AddColumn(_dimension1); - var topx = new AggregateTopX(CatalogueRepository, _configuration, 31); - topx.OrderByDirection = AggregateTopXOrderByDirection.Ascending; - builder.AggregateTopX = topx; + var topx = new AggregateTopX(CatalogueRepository, _configuration, 31) + { + OrderByDirection = AggregateTopXOrderByDirection.Ascending + }; + builder.AggregateTopX = topx; - if (useAliasForGroupBy) - { - Assert.AreEqual(CollapseWhitespace(@"/**/ + Assert.AreEqual(useAliasForGroupBy + ? CollapseWhitespace(@"/**/ SELECT Col1, count(*) AS MyCount @@ -73,11 +77,8 @@ group by Col1 order by MyCount asc -LIMIT 31"), CollapseWhitespace(builder.SQL)); - } - else - { - Assert.AreEqual(CollapseWhitespace(@"/**/ +LIMIT 31") + : CollapseWhitespace(@"/**/ SELECT Col1, count(*) AS MyCount @@ -88,13 +89,10 @@ group by order by count(*) asc LIMIT 31"), CollapseWhitespace(builder.SQL)); - } - - topx.DeleteInDatabase(); + topx.DeleteInDatabase(); - UserSettings.UseAliasInsteadOfTransformInGroupByAggregateGraphs = false; - } + UserSettings.UseAliasInsteadOfTransformInGroupByAggregateGraphs = false; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/MicrosoftQueryBuilderTests.cs b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/MicrosoftQueryBuilderTests.cs index a1128ce66d..04be62e17b 100644 --- a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/MicrosoftQueryBuilderTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/MicrosoftQueryBuilderTests.cs @@ -10,48 +10,50 @@ using Rdmp.Core.QueryBuilding; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.QueryBuilderTests +namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.QueryBuilderTests; + +internal class MicrosoftQueryBuilderTests : DatabaseTests { - class MicrosoftQueryBuilderTests:DatabaseTests + [Test] + public void TestQueryBuilder_MicrosoftSQLServer_Top35() { - [Test] - public void TestQueryBuilder_MicrosoftSQLServer_Top35() + var t = new TableInfo(CatalogueRepository, "[db]..[tbl]") + { + DatabaseType = DatabaseType.MicrosoftSQLServer + }; + t.SaveToDatabase(); + + var col = new ColumnInfo(CatalogueRepository, "[db]..[tbl].[col]", "varchar(10)", t); + Assert.AreEqual("col", col.GetRuntimeName()); + + var cata = new Catalogue(CatalogueRepository, "cata"); + var catalogueItem = new CatalogueItem(CatalogueRepository, cata, "col"); + var extractionInfo = new ExtractionInformation(CatalogueRepository, catalogueItem, col, col.Name); + + var qb = new QueryBuilder(null, null) { - var t = new TableInfo(CatalogueRepository, "[db]..[tbl]"); - t.DatabaseType = DatabaseType.MicrosoftSQLServer; - t.SaveToDatabase(); - - var col = new ColumnInfo(CatalogueRepository, "[db]..[tbl].[col]", "varchar(10)", t); - Assert.AreEqual("col", col.GetRuntimeName()); - - var cata = new Catalogue(CatalogueRepository, "cata"); - var catalogueItem = new CatalogueItem(CatalogueRepository, cata, "col"); - var extractionInfo = new ExtractionInformation(CatalogueRepository, catalogueItem, col, col.Name); - - var qb = new QueryBuilder(null, null); - qb.TopX = 35; - qb.AddColumn(extractionInfo); - Assert.AreEqual( - CollapseWhitespace( + TopX = 35 + }; + qb.AddColumn(extractionInfo); + Assert.AreEqual( + CollapseWhitespace( @"SELECT TOP 35 [db]..[tbl].[col] FROM [db]..[tbl]") - , CollapseWhitespace(qb.SQL)); + , CollapseWhitespace(qb.SQL)); - //editting the topX should invalidate the SQL automatically - qb.TopX = 50; - Assert.AreEqual( - CollapseWhitespace( + //editting the topX should invalidate the SQL automatically + qb.TopX = 50; + Assert.AreEqual( + CollapseWhitespace( @"SELECT TOP 50 [db]..[tbl].[col] FROM [db]..[tbl]") - ,CollapseWhitespace(qb.SQL)); - - } + , CollapseWhitespace(qb.SQL)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/MySqlQueryBuilderTests.cs b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/MySqlQueryBuilderTests.cs index 0c78fc487e..98c2dcaa2c 100644 --- a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/MySqlQueryBuilderTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/MySqlQueryBuilderTests.cs @@ -10,72 +10,76 @@ using Rdmp.Core.QueryBuilding; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.QueryBuilderTests +namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.QueryBuilderTests; + +public class MySqlQueryBuilderTests : DatabaseTests { - public class MySqlQueryBuilderTests : DatabaseTests + [Test] + public void TestQueryBuilder_MySql_Normal() { - [Test] - public void TestQueryBuilder_MySql_Normal() + var t = new TableInfo(CatalogueRepository, "`db`.`tbl`") { - var t = new TableInfo(CatalogueRepository, "`db`.`tbl`"); - t.DatabaseType = DatabaseType.MySql; - t.SaveToDatabase(); + DatabaseType = DatabaseType.MySql + }; + t.SaveToDatabase(); - var col = new ColumnInfo(CatalogueRepository, "`db`.`tbl`.`col`","varchar(10)",t); - Assert.AreEqual("col",col.GetRuntimeName()); + var col = new ColumnInfo(CatalogueRepository, "`db`.`tbl`.`col`", "varchar(10)", t); + Assert.AreEqual("col", col.GetRuntimeName()); - var cata = new Catalogue(CatalogueRepository,"cata"); - var catalogueItem = new CatalogueItem(CatalogueRepository, cata, "col"); - var extractionInfo = new ExtractionInformation(CatalogueRepository, catalogueItem, col, col.Name); + var cata = new Catalogue(CatalogueRepository, "cata"); + var catalogueItem = new CatalogueItem(CatalogueRepository, cata, "col"); + var extractionInfo = new ExtractionInformation(CatalogueRepository, catalogueItem, col, col.Name); - var qb = new QueryBuilder(null, null); - qb.AddColumn(extractionInfo); - Assert.AreEqual(CollapseWhitespace( - @"SELECT + var qb = new QueryBuilder(null, null); + qb.AddColumn(extractionInfo); + Assert.AreEqual(CollapseWhitespace( + @"SELECT `db`.`tbl`.`col` FROM `db`.`tbl`" - ),CollapseWhitespace(qb.SQL)); + ), CollapseWhitespace(qb.SQL)); + } - } - [Test] - public void TestQueryBuilder_MySql_Top35() + [Test] + public void TestQueryBuilder_MySql_Top35() + { + var t = new TableInfo(CatalogueRepository, "`db`.`tbl`") { - var t = new TableInfo(CatalogueRepository, "`db`.`tbl`"); - t.DatabaseType = DatabaseType.MySql; - t.SaveToDatabase(); + DatabaseType = DatabaseType.MySql + }; + t.SaveToDatabase(); - var col = new ColumnInfo(CatalogueRepository, "`db`.`tbl`.`col`", "varchar(10)", t); - Assert.AreEqual("col", col.GetRuntimeName()); + var col = new ColumnInfo(CatalogueRepository, "`db`.`tbl`.`col`", "varchar(10)", t); + Assert.AreEqual("col", col.GetRuntimeName()); - var cata = new Catalogue(CatalogueRepository, "cata"); - var catalogueItem = new CatalogueItem(CatalogueRepository, cata, "col"); - var extractionInfo = new ExtractionInformation(CatalogueRepository, catalogueItem, col, col.Name); + var cata = new Catalogue(CatalogueRepository, "cata"); + var catalogueItem = new CatalogueItem(CatalogueRepository, cata, "col"); + var extractionInfo = new ExtractionInformation(CatalogueRepository, catalogueItem, col, col.Name); - var qb = new QueryBuilder(null, null); - qb.TopX = 35; - qb.AddColumn(extractionInfo); - Assert.AreEqual( - CollapseWhitespace( + var qb = new QueryBuilder(null, null) + { + TopX = 35 + }; + qb.AddColumn(extractionInfo); + Assert.AreEqual( + CollapseWhitespace( @"SELECT `db`.`tbl`.`col` FROM `db`.`tbl` LIMIT 35") - , CollapseWhitespace(qb.SQL)); + , CollapseWhitespace(qb.SQL)); - //editting the topX should invalidate the SQL automatically - qb.TopX = 50; - Assert.AreEqual( - CollapseWhitespace( + //editting the topX should invalidate the SQL automatically + qb.TopX = 50; + Assert.AreEqual( + CollapseWhitespace( @"SELECT `db`.`tbl`.`col` FROM `db`.`tbl` LIMIT 50") - , CollapseWhitespace(qb.SQL)); - - } + , CollapseWhitespace(qb.SQL)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/QueryBuilderUnitTests.cs b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/QueryBuilderUnitTests.cs index b4cad32a0b..4db76ed05f 100644 --- a/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/QueryBuilderUnitTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/QueryBuildingTests/QueryBuilderTests/QueryBuilderUnitTests.cs @@ -8,116 +8,116 @@ using Rdmp.Core.Curation.Data; using Rdmp.Core.QueryBuilding; using System; -using System.Collections.Generic; -using System.Text; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.QueryBuilderTests +namespace Rdmp.Core.Tests.Curation.Integration.QueryBuildingTests.QueryBuilderTests; + +internal class QueryBuilderUnitTests : UnitTests { - class QueryBuilderUnitTests:UnitTests + [Test] + public void Test_IsPrimaryExtractionTable_TwoTables() { - [Test] - public void Test_IsPrimaryExtractionTable_TwoTables() - { - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); - c1.TableInfo.IsPrimaryExtractionTable = true; - c1.TableInfo.SaveToDatabase(); + c1.TableInfo.IsPrimaryExtractionTable = true; + c1.TableInfo.SaveToDatabase(); - c2.TableInfo.IsPrimaryExtractionTable = true; - c2.TableInfo.SaveToDatabase(); + c2.TableInfo.IsPrimaryExtractionTable = true; + c2.TableInfo.SaveToDatabase(); - QueryBuilder builder = new QueryBuilder(null, null); - builder.AddColumn(new ColumnInfoToIColumn(Repository,c1)); - builder.AddColumn(new ColumnInfoToIColumn(Repository, c2)); + var builder = new QueryBuilder(null, null); + builder.AddColumn(new ColumnInfoToIColumn(Repository, c1)); + builder.AddColumn(new ColumnInfoToIColumn(Repository, c2)); - var ex = Assert.Throws(()=>{var s = builder.SQL;}); + var ex = Assert.Throws(() => + { + var s = builder.SQL; + }); - StringAssert.Contains("There are multiple tables marked as IsPrimaryExtractionTable",ex.Message); - } + StringAssert.Contains("There are multiple tables marked as IsPrimaryExtractionTable", ex.Message); + } - [Test] - public void Test_TwoTables_JoinFound() - { - //4 tables - var c1 = WhenIHaveA(); - var c2 = WhenIHaveA(); + [Test] + public void Test_TwoTables_JoinFound() + { + //4 tables + var c1 = WhenIHaveA(); + var c2 = WhenIHaveA(); - //1 is primary - c1.TableInfo.IsPrimaryExtractionTable = true; - c1.TableInfo.SaveToDatabase(); + //1 is primary + c1.TableInfo.IsPrimaryExtractionTable = true; + c1.TableInfo.SaveToDatabase(); - var j1 = new JoinInfo(Repository, c2, c1, ExtractionJoinType.Inner, null); + var j1 = new JoinInfo(Repository, c2, c1, ExtractionJoinType.Inner, null); - QueryBuilder builder = new QueryBuilder(null, null); - builder.AddColumn(new ColumnInfoToIColumn(Repository, c1)); - builder.AddColumn(new ColumnInfoToIColumn(Repository, c2)); + var builder = new QueryBuilder(null, null); + builder.AddColumn(new ColumnInfoToIColumn(Repository, c1)); + builder.AddColumn(new ColumnInfoToIColumn(Repository, c2)); - StringAssert.Contains("JOIN", builder.SQL); + StringAssert.Contains("JOIN", builder.SQL); - //we have 1 legit join go go team! - Assert.AreEqual(1, builder.JoinsUsedInQuery.Count); - Assert.AreEqual(j1, builder.JoinsUsedInQuery[0]); - } + //we have 1 legit join go go team! + Assert.AreEqual(1, builder.JoinsUsedInQuery.Count); + Assert.AreEqual(j1, builder.JoinsUsedInQuery[0]); + } - [Test] - public void Test_FourTables_MultipleRoutes() - { - //4 tables - var c1 = WhenIHaveA(); - c1.Name = "c1"; - c1.SaveToDatabase(); - c1.TableInfo.Name = "t1"; - c1.TableInfo.IsPrimaryExtractionTable = true; //t1 is primary - c1.TableInfo.SaveToDatabase(); - - var c2 = WhenIHaveA(); - c2.Name = "c2"; - c2.SaveToDatabase(); - c2.TableInfo.Name = "t2"; - c2.TableInfo.SaveToDatabase(); - - var c3 = WhenIHaveA(); - c3.Name = "c3"; - c3.SaveToDatabase(); - c3.TableInfo.Name = "t3"; - c3.TableInfo.SaveToDatabase(); - - var c4 = WhenIHaveA(); - c4.Name = "c4"; - c4.SaveToDatabase(); - c4.TableInfo.Name = "t4"; - c4.TableInfo.SaveToDatabase(); - - - /* c2 - * / \ - * c1 c4 - * \ / - * c3 - * - * */ - - var j1 = new JoinInfo(Repository,c2,c1,ExtractionJoinType.Inner,null); - var j2 = new JoinInfo(Repository, c3, c1, ExtractionJoinType.Inner, null); - var j3 = new JoinInfo(Repository, c4, c2, ExtractionJoinType.Inner, null); - var j4 = new JoinInfo(Repository, c4, c3, ExtractionJoinType.Inner, null); - - - QueryBuilder builder = new QueryBuilder(null, null); - builder.AddColumn(new ColumnInfoToIColumn(Repository, c1)); - builder.AddColumn(new ColumnInfoToIColumn(Repository, c2)); - builder.AddColumn(new ColumnInfoToIColumn(Repository, c3)); - builder.AddColumn(new ColumnInfoToIColumn(Repository, c4)); - - Console.WriteLine(builder.SQL); - - //should be using only 3 of the 4 joins because we already have a route to c4 without a fourth join - Assert.AreEqual(3, builder.JoinsUsedInQuery.Count); - Assert.Contains(j1,builder.JoinsUsedInQuery); - Assert.Contains(j2, builder.JoinsUsedInQuery); - Assert.Contains(j3, builder.JoinsUsedInQuery); - } + [Test] + public void Test_FourTables_MultipleRoutes() + { + //4 tables + var c1 = WhenIHaveA(); + c1.Name = "c1"; + c1.SaveToDatabase(); + c1.TableInfo.Name = "t1"; + c1.TableInfo.IsPrimaryExtractionTable = true; //t1 is primary + c1.TableInfo.SaveToDatabase(); + + var c2 = WhenIHaveA(); + c2.Name = "c2"; + c2.SaveToDatabase(); + c2.TableInfo.Name = "t2"; + c2.TableInfo.SaveToDatabase(); + + var c3 = WhenIHaveA(); + c3.Name = "c3"; + c3.SaveToDatabase(); + c3.TableInfo.Name = "t3"; + c3.TableInfo.SaveToDatabase(); + + var c4 = WhenIHaveA(); + c4.Name = "c4"; + c4.SaveToDatabase(); + c4.TableInfo.Name = "t4"; + c4.TableInfo.SaveToDatabase(); + + + /* c2 + * / \ + * c1 c4 + * \ / + * c3 + * + * */ + + var j1 = new JoinInfo(Repository, c2, c1, ExtractionJoinType.Inner, null); + var j2 = new JoinInfo(Repository, c3, c1, ExtractionJoinType.Inner, null); + var j3 = new JoinInfo(Repository, c4, c2, ExtractionJoinType.Inner, null); + var j4 = new JoinInfo(Repository, c4, c3, ExtractionJoinType.Inner, null); + + + var builder = new QueryBuilder(null, null); + builder.AddColumn(new ColumnInfoToIColumn(Repository, c1)); + builder.AddColumn(new ColumnInfoToIColumn(Repository, c2)); + builder.AddColumn(new ColumnInfoToIColumn(Repository, c3)); + builder.AddColumn(new ColumnInfoToIColumn(Repository, c4)); + + Console.WriteLine(builder.SQL); + + //should be using only 3 of the 4 joins because we already have a route to c4 without a fourth join + Assert.AreEqual(3, builder.JoinsUsedInQuery.Count); + Assert.Contains(j1, builder.JoinsUsedInQuery); + Assert.Contains(j2, builder.JoinsUsedInQuery); + Assert.Contains(j3, builder.JoinsUsedInQuery); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/ServerDefaultsTests.cs b/Rdmp.Core.Tests/Curation/Integration/ServerDefaultsTests.cs index 70f00ef209..68ac9c50b6 100644 --- a/Rdmp.Core.Tests/Curation/Integration/ServerDefaultsTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/ServerDefaultsTests.cs @@ -10,68 +10,68 @@ using Rdmp.Core.Databases; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class ServerDefaultsTests : DatabaseTests { - public class ServerDefaultsTests : DatabaseTests + [Test] + public void TestClearSameDefaultTwice() { - [Test] - public void TestClearSameDefaultTwice() - { - Assert.IsNotNull(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); - CatalogueRepository.ClearDefault(PermissableDefaults.LiveLoggingServer_ID); - CatalogueRepository.ClearDefault(PermissableDefaults.LiveLoggingServer_ID); - CatalogueRepository.ClearDefault(PermissableDefaults.LiveLoggingServer_ID); - Assert.IsNull(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); - } + Assert.IsNotNull(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); + CatalogueRepository.ClearDefault(PermissableDefaults.LiveLoggingServer_ID); + CatalogueRepository.ClearDefault(PermissableDefaults.LiveLoggingServer_ID); + CatalogueRepository.ClearDefault(PermissableDefaults.LiveLoggingServer_ID); + Assert.IsNull(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); + } - [Test] - public void CreateNewExternalServerAndConfigureItAsDefault() - { - var databaseServer = new ExternalDatabaseServer(CatalogueRepository, "Deleteme",null); + [Test] + public void CreateNewExternalServerAndConfigureItAsDefault() + { + var databaseServer = new ExternalDatabaseServer(CatalogueRepository, "Deleteme", null); - try - { - Assert.AreEqual("Deleteme",databaseServer.Name); - databaseServer.Password = "nothing"; //automatically encrypts password + try + { + Assert.AreEqual("Deleteme", databaseServer.Name); + databaseServer.Password = "nothing"; //automatically encrypts password - Assert.AreNotEqual("nothing",databaseServer.Password);//should not match what we just set it to - Assert.AreEqual("nothing", databaseServer.GetDecryptedPassword());//should match what we set it to because of explicit call to decrypt + Assert.AreNotEqual("nothing", databaseServer.Password); //should not match what we just set it to + Assert.AreEqual("nothing", + databaseServer + .GetDecryptedPassword()); //should match what we set it to because of explicit call to decrypt - databaseServer.Server = "Bob"; - databaseServer.Database = "TEST"; - databaseServer.SaveToDatabase(); - - Catalogue cata = new Catalogue(CatalogueRepository, "TestCatalogueFor_CreateNewExternalServerAndConfigureItAsDefault"); - cata.DeleteInDatabase(); + databaseServer.Server = "Bob"; + databaseServer.Database = "TEST"; + databaseServer.SaveToDatabase(); - } - finally - { - databaseServer.DeleteInDatabase(); - } + var cata = new Catalogue(CatalogueRepository, + "TestCatalogueFor_CreateNewExternalServerAndConfigureItAsDefault"); + cata.DeleteInDatabase(); } - - [Test] - public void TestDeletingClearsDefault() + finally { - var eds = new ExternalDatabaseServer(CatalogueRepository, "mydb", new LoggingDatabasePatcher()); - var old = CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); + databaseServer.DeleteInDatabase(); + } + } - try - { - //make the new server the default for logging - CatalogueRepository.SetDefault(PermissableDefaults.LiveLoggingServer_ID,eds); + [Test] + public void TestDeletingClearsDefault() + { + var eds = new ExternalDatabaseServer(CatalogueRepository, "mydb", new LoggingDatabasePatcher()); + var old = CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); - //now we deleted it! - eds.DeleteInDatabase(); + try + { + //make the new server the default for logging + CatalogueRepository.SetDefault(PermissableDefaults.LiveLoggingServer_ID, eds); - Assert.IsNull(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); - } - finally - { + //now we deleted it! + eds.DeleteInDatabase(); - CatalogueRepository.SetDefault(PermissableDefaults.LiveLoggingServer_ID, old); - } + Assert.IsNull(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); + } + finally + { + CatalogueRepository.SetDefault(PermissableDefaults.LiveLoggingServer_ID, old); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/SupportingDocumentTests.cs b/Rdmp.Core.Tests/Curation/Integration/SupportingDocumentTests.cs index 119e9fa889..1d9cc9cf09 100644 --- a/Rdmp.Core.Tests/Curation/Integration/SupportingDocumentTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/SupportingDocumentTests.cs @@ -8,39 +8,40 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class SupportingDocumentTests : DatabaseTests { - public class SupportingDocumentTests : DatabaseTests + [Test] + public void test_SupportingDocument_CreateAndDestroy() { - [Test] - public void test_SupportingDocument_CreateAndDestroy() - { - Catalogue cata = new Catalogue(CatalogueRepository, "deleteme"); - SupportingDocument doc = new SupportingDocument(CatalogueRepository, cata,"davesFile"); + var cata = new Catalogue(CatalogueRepository, "deleteme"); + var doc = new SupportingDocument(CatalogueRepository, cata, "davesFile"); - Assert.AreEqual(doc.Name ,"davesFile"); + Assert.AreEqual(doc.Name, "davesFile"); - doc.DeleteInDatabase(); - cata.DeleteInDatabase(); - } + doc.DeleteInDatabase(); + cata.DeleteInDatabase(); + } - [Test] - public void test_SupportingDocument_CreateChangeSaveDestroy() + [Test] + public void test_SupportingDocument_CreateChangeSaveDestroy() + { + var cata = new Catalogue(CatalogueRepository, "deleteme"); + var doc = new SupportingDocument(CatalogueRepository, cata, "davesFile") { - Catalogue cata = new Catalogue(CatalogueRepository, "deleteme"); - SupportingDocument doc = new SupportingDocument(CatalogueRepository, cata, "davesFile"); - doc.Description = "some exciting file that dave loves"; - doc.SaveToDatabase(); + Description = "some exciting file that dave loves" + }; + doc.SaveToDatabase(); - Assert.AreEqual(doc.Name, "davesFile"); - Assert.AreEqual(doc.Description, "some exciting file that dave loves"); + Assert.AreEqual(doc.Name, "davesFile"); + Assert.AreEqual(doc.Description, "some exciting file that dave loves"); - SupportingDocument docAfterCommit = CatalogueRepository.GetObjectByID(doc.ID); + var docAfterCommit = CatalogueRepository.GetObjectByID(doc.ID); - Assert.AreEqual(docAfterCommit.Description,doc.Description); + Assert.AreEqual(docAfterCommit.Description, doc.Description); - doc.DeleteInDatabase(); - cata.DeleteInDatabase(); - } + doc.DeleteInDatabase(); + cata.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/TableInfoSynchronizerTests.cs b/Rdmp.Core.Tests/Curation/Integration/TableInfoSynchronizerTests.cs index 157e82b15f..cfb970809e 100644 --- a/Rdmp.Core.Tests/Curation/Integration/TableInfoSynchronizerTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/TableInfoSynchronizerTests.cs @@ -10,179 +10,178 @@ using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; using TypeGuesser; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class TableInfoSynchronizerTests : DatabaseTests { - public class TableInfoSynchronizerTests:DatabaseTests + private DiscoveredServer _server; + private ITableInfo tableInfoCreated; + private ColumnInfo[] columnInfosCreated; + private DiscoveredDatabase _database; + + private const string TABLE_NAME = "TableInfoSynchronizerTests"; + + [SetUp] + protected override void SetUp() { - private DiscoveredServer _server; - private ITableInfo tableInfoCreated; - private ColumnInfo[] columnInfosCreated; - private DiscoveredDatabase _database; + base.SetUp(); - private const string TABLE_NAME = "TableInfoSynchronizerTests"; + _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + _server = _database.Server; - [SetUp] - protected override void SetUp() + using (var con = _server.GetConnection()) { - base.SetUp(); + con.Open(); + _server.GetCommand($"CREATE TABLE {TABLE_NAME}(Name varchar(10), Address varchar(500))", con) + .ExecuteNonQuery(); + } - _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - _server = _database.Server; + var tbl = _database.ExpectTable("TableInfoSynchronizerTests"); - using (var con = _server.GetConnection()) - { - con.Open(); - _server.GetCommand("CREATE TABLE " + TABLE_NAME + "(Name varchar(10), Address varchar(500))",con).ExecuteNonQuery(); - } + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out tableInfoCreated, out columnInfosCreated); + } - var tbl = _database.ExpectTable("TableInfoSynchronizerTests"); - - TableInfoImporter importer = new TableInfoImporter(CatalogueRepository,tbl); - importer.DoImport(out tableInfoCreated,out columnInfosCreated); - } + [Test] + public void SynchronizationTests_NoChanges() + { + Assert.AreEqual(TABLE_NAME, tableInfoCreated.GetRuntimeName()); - [Test] - public void SynchronizationTests_NoChanges() + var synchronizer = new TableInfoSynchronizer(tableInfoCreated); + Assert.AreEqual(true, synchronizer.Synchronize(ThrowImmediatelyCheckNotifier.Quiet)); + } + + [Test] + [TestCase(true)] + [TestCase(false)] + public void SynchronizationTests_ColumnDropped(bool acceptChanges) + { + Assert.AreEqual(TABLE_NAME, tableInfoCreated.GetRuntimeName()); + + var table = _database.ExpectTable(TABLE_NAME); + var colToDrop = table.DiscoverColumn("Address"); + table.DropColumn(colToDrop); + + var synchronizer = new TableInfoSynchronizer(tableInfoCreated); + + if (acceptChanges) { - Assert.AreEqual(TABLE_NAME , tableInfoCreated.GetRuntimeName()); - - TableInfoSynchronizer synchronizer = new TableInfoSynchronizer(tableInfoCreated); - Assert.AreEqual(true,synchronizer.Synchronize(new ThrowImmediatelyCheckNotifier())); + //accept changes should result in a synchronized table + Assert.AreEqual(true, synchronizer.Synchronize(new AcceptAllCheckNotifier())); + Assert.AreEqual(1, tableInfoCreated.ColumnInfos.Length); //should only be 1 remaining } + else + { + var ex = Assert.Throws(() => synchronizer.Synchronize(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("The ColumnInfo Address no longer appears in the live table.", ex.Message); + } + } - [Test] - [TestCase(true)] - [TestCase(false)] - public void SynchronizationTests_ColumnDropped(bool acceptChanges) + [Test] + [TestCase(true)] + [TestCase(false)] + public void SynchronizationTests_ColumnAdded(bool acceptChanges) + { + using (var con = _database.Server.GetConnection()) { - Assert.AreEqual(TABLE_NAME, tableInfoCreated.GetRuntimeName()); + con.Open(); + _server.GetCommand($"ALTER TABLE {TABLE_NAME} ADD Birthday datetime not null", con).ExecuteNonQuery(); + } - var table = _database.ExpectTable(TABLE_NAME); - var colToDrop = table.DiscoverColumn("Address"); - table.DropColumn(colToDrop); - - TableInfoSynchronizer synchronizer = new TableInfoSynchronizer(tableInfoCreated); - if (acceptChanges) - { - //accept changes should result in a synchronized table - Assert.AreEqual(true,synchronizer.Synchronize(new AcceptAllCheckNotifier())); - Assert.AreEqual(1,tableInfoCreated.ColumnInfos.Length);//should only be 1 remaining - } - else - { - var ex = Assert.Throws(() => synchronizer.Synchronize(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("The ColumnInfo Address no longer appears in the live table.", ex.Message); - } - + var synchronizer = new TableInfoSynchronizer(tableInfoCreated); + + if (acceptChanges) + { + //accept changes should result in a synchronized table + Assert.AreEqual(true, synchronizer.Synchronize(new AcceptAllCheckNotifier())); + Assert.AreEqual(3, tableInfoCreated.ColumnInfos.Length); //should 3 now } + else + { + var ex = Assert.Throws(() => synchronizer.Synchronize(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("The following columns are missing from the TableInfo:Birthday", ex.Message); + } + } - [Test] - [TestCase(true)] - [TestCase(false)] - public void SynchronizationTests_ColumnAdded(bool acceptChanges) + [Test] + [TestCase(true)] + [TestCase(false)] + public void SynchronizationTests_ColumnAddedWithCatalogue(bool acceptChanges) + { + var cataEngineer = new ForwardEngineerCatalogue(tableInfoCreated, columnInfosCreated); + cataEngineer.ExecuteForwardEngineering(out var cata, out var cataItems, out var extractionInformations); + + try { - - using (var con = _database.Server.GetConnection()) + Assert.AreEqual(TABLE_NAME, cata.Name); + Assert.AreEqual(2, cataItems.Length); + Assert.AreEqual(2, extractionInformations.Length); + + using (var con = _server.GetConnection()) { con.Open(); - _server.GetCommand("ALTER TABLE " + TABLE_NAME + " ADD Birthday datetime not null", con).ExecuteNonQuery(); + _server.GetCommand($"ALTER TABLE {TABLE_NAME} ADD Birthday datetime not null", con).ExecuteNonQuery(); } - - TableInfoSynchronizer synchronizer = new TableInfoSynchronizer(tableInfoCreated); + var synchronizer = new TableInfoSynchronizer(tableInfoCreated); if (acceptChanges) { //accept changes should result in a synchronized table Assert.AreEqual(true, synchronizer.Synchronize(new AcceptAllCheckNotifier())); - Assert.AreEqual(3, tableInfoCreated.ColumnInfos.Length);//should 3 now + Assert.AreEqual(3, tableInfoCreated.ColumnInfos.Length); //should 3 now + Assert.AreEqual(3, cata.CatalogueItems.Length); //should 3 now + Assert.AreEqual(3, cata.GetAllExtractionInformation(ExtractionCategory.Any).Length); //should 3 now + + Assert.AreEqual(1, + cata.GetAllExtractionInformation(ExtractionCategory.Any) + .Count(e => e.SelectSQL.Contains("Birthday"))); + Assert.AreEqual(1, cata.CatalogueItems.Count(ci => ci.Name.Contains("Birthday"))); } else { - var ex = Assert.Throws(() => synchronizer.Synchronize(new ThrowImmediatelyCheckNotifier())); + var ex = Assert.Throws(() => synchronizer.Synchronize(ThrowImmediatelyCheckNotifier.Quiet)); Assert.AreEqual("The following columns are missing from the TableInfo:Birthday", ex.Message); } } - - [Test] - [TestCase(true)] - [TestCase(false)] - public void SynchronizationTests_ColumnAddedWithCatalogue(bool acceptChanges) + finally { - ForwardEngineerCatalogue cataEngineer = new ForwardEngineerCatalogue(tableInfoCreated, columnInfosCreated); - cataEngineer.ExecuteForwardEngineering(out var cata, out var cataItems, out var extractionInformations); - - try - { - Assert.AreEqual(TABLE_NAME,cata.Name); - Assert.AreEqual(2, cataItems.Length); - Assert.AreEqual(2, extractionInformations.Length); - - using (var con = _server.GetConnection()) - { - con.Open(); - _server.GetCommand("ALTER TABLE " + TABLE_NAME + " ADD Birthday datetime not null", con).ExecuteNonQuery(); - } - - TableInfoSynchronizer synchronizer = new TableInfoSynchronizer(tableInfoCreated); - - if (acceptChanges) - { - //accept changes should result in a synchronized table - Assert.AreEqual(true, synchronizer.Synchronize(new AcceptAllCheckNotifier())); - Assert.AreEqual(3, tableInfoCreated.ColumnInfos.Length);//should 3 now - Assert.AreEqual(3, cata.CatalogueItems.Length);//should 3 now - Assert.AreEqual(3, cata.GetAllExtractionInformation(ExtractionCategory.Any).Length);//should 3 now - - Assert.AreEqual(1,cata.GetAllExtractionInformation(ExtractionCategory.Any).Count(e=>e.SelectSQL.Contains("Birthday"))); - Assert.AreEqual(1,cata.CatalogueItems.Count(ci => ci.Name.Contains("Birthday"))); - } - else - { - var ex = Assert.Throws(() => synchronizer.Synchronize(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("The following columns are missing from the TableInfo:Birthday", ex.Message); - } - } - finally - { - cata.DeleteInDatabase(); - } + cata.DeleteInDatabase(); } + } - /// - /// RDMPDEV-1548 This test explores an issue in v3.1 RDMP where synchronization of a TableInfo would fail if there were other tables - /// in the database which contained brackets in the table name - /// - [Test] - public void Test_SynchronizeTable_BracketsInTableName() + /// + /// RDMPDEV-1548 This test explores an issue in v3.1 RDMP where synchronization of a TableInfo would fail if there were other tables + /// in the database which contained brackets in the table name + /// + [Test] + public void Test_SynchronizeTable_BracketsInTableName() + { + var db = _database; + + //FAnsi doesn't let you create tables with brackets in the names so we have to do it manually + using (var con = db.Server.GetConnection()) { - var db = _database; + con.Open(); + var cmd = db.Server.GetCommand("CREATE TABLE [BB (ff)] (A int not null)", con); + cmd.ExecuteNonQuery(); + } - //FAnsi doesn't let you create tables with brackets in the names so we have to do it manually - using(var con = db.Server.GetConnection()) + var tbl = db.CreateTable("FF", + new DatabaseColumnRequest[] { - con.Open(); - var cmd = db.Server.GetCommand("CREATE TABLE [BB (ff)] (A int not null)",con); - cmd.ExecuteNonQuery(); - } - - var tbl = db.CreateTable("FF", - new DatabaseColumnRequest[] - { - new DatabaseColumnRequest("F",new DatabaseTypeRequest(typeof(int))) - }); + new("F", new DatabaseTypeRequest(typeof(int))) + }); - Import(tbl,out var ti,out _); + Import(tbl, out var ti, out _); - var s = new TableInfoSynchronizer(ti); - s.Synchronize(new ThrowImmediatelyCheckNotifier()); - } + var s = new TableInfoSynchronizer(ti); + s.Synchronize(ThrowImmediatelyCheckNotifier.Quiet); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/TableInfoTests.cs b/Rdmp.Core.Tests/Curation/Integration/TableInfoTests.cs index 9bcadaaba8..258e80eb3b 100644 --- a/Rdmp.Core.Tests/Curation/Integration/TableInfoTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/TableInfoTests.cs @@ -16,216 +16,217 @@ using Rdmp.Core.Curation.Data.EntityNaming; using Rdmp.Core.DataLoad.Triggers; using Rdmp.Core.DataLoad.Triggers.Implementations; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +internal class TableInfoTests : DatabaseTests { - class TableInfoTests : DatabaseTests + [Test] + public void GetAllTableInfos_moreThan1_pass() { - [Test] - public void GetAllTableInfos_moreThan1_pass() - { - var tableInfo = new TableInfo(CatalogueRepository, "AMAGAD!!!"); - Assert.IsTrue(CatalogueRepository.GetAllObjects().Any()); - tableInfo.DeleteInDatabase(); - } + var tableInfo = new TableInfo(CatalogueRepository, "AMAGAD!!!"); + Assert.IsTrue(CatalogueRepository.GetAllObjects().Any()); + tableInfo.DeleteInDatabase(); + } - [Test] - public void CreateNewTableInfoInDatabase_valid_pass() - { - TableInfo table = new TableInfo(CatalogueRepository, "TestDB..TestTableName"); + [Test] + public void CreateNewTableInfoInDatabase_valid_pass() + { + var table = new TableInfo(CatalogueRepository, "TestDB..TestTableName"); - Assert.NotNull(table); + Assert.NotNull(table); - table.DeleteInDatabase(); + table.DeleteInDatabase(); - var ex = Assert.Throws(() => CatalogueRepository.GetObjectByID(table.ID)); - Assert.AreEqual(ex.Message, "Could not find TableInfo with ID " + table.ID); - } + var ex = Assert.Throws(() => CatalogueRepository.GetObjectByID(table.ID)); + Assert.AreEqual(ex.Message, $"Could not find TableInfo with ID {table.ID}"); + } - [Test] - public void update_changeAllProperties_pass() + [Test] + public void update_changeAllProperties_pass() + { + var table = new TableInfo(CatalogueRepository, "CHI_AMALG..SearchStuff") { - TableInfo table = new TableInfo(CatalogueRepository, "CHI_AMALG..SearchStuff") - { - Database = "CHI_AMALG", - Server = "Highly restricted", - Name = "Fishmongery!", - DatabaseType = DatabaseType.Oracle - }; + Database = "CHI_AMALG", + Server = "Highly restricted", + Name = "Fishmongery!", + DatabaseType = DatabaseType.Oracle + }; - table.SaveToDatabase(); + table.SaveToDatabase(); - TableInfo tableAfter = CatalogueRepository.GetObjectByID(table.ID); + var tableAfter = CatalogueRepository.GetObjectByID(table.ID); - Assert.IsTrue(tableAfter.Database == "CHI_AMALG"); - Assert.IsTrue(tableAfter.Server == "Highly restricted"); - Assert.IsTrue(tableAfter.Name == "Fishmongery!"); - Assert.IsTrue(tableAfter.DatabaseType == DatabaseType.Oracle); + Assert.IsTrue(tableAfter.Database == "CHI_AMALG"); + Assert.IsTrue(tableAfter.Server == "Highly restricted"); + Assert.IsTrue(tableAfter.Name == "Fishmongery!"); + Assert.IsTrue(tableAfter.DatabaseType == DatabaseType.Oracle); - tableAfter.DeleteInDatabase(); - } + tableAfter.DeleteInDatabase(); + } + [Test] + [TestCase("[TestDB]..[TestTableName]", "[TestDB]..[TestTableName].[ANOMyCol]")] + [TestCase("TestDB..TestTableName", "TestDB..TestTableName.ANOMyCol")] + public void CreateNewTableInfoInDatabase_Naming(string tableName, string columnName) + { + var table = new TableInfo(CatalogueRepository, tableName) + { + Database = "TestDB" + }; + table.SaveToDatabase(); - [Test] - [TestCase("[TestDB]..[TestTableName]", "[TestDB]..[TestTableName].[ANOMyCol]")] - [TestCase("TestDB..TestTableName", "TestDB..TestTableName.ANOMyCol")] - public void CreateNewTableInfoInDatabase_Naming(string tableName, string columnName) + var c = new ColumnInfo(CatalogueRepository, columnName, "varchar(100)", table) { - TableInfo table = new TableInfo(CatalogueRepository, tableName); - table.Database = "TestDB"; - table.SaveToDatabase(); - - ColumnInfo c = new ColumnInfo(CatalogueRepository, columnName, "varchar(100)", table); - c.ANOTable_ID = -100; - - try - { - Assert.AreEqual("ANOMyCol",c.GetRuntimeName()); - Assert.AreEqual("MyCol", c.GetRuntimeName(LoadStage.AdjustRaw)); - Assert.AreEqual("ANOMyCol", c.GetRuntimeName(LoadStage.PostLoad)); - - Assert.AreEqual("TestTableName", table.GetRuntimeName()); - Assert.AreEqual("TestTableName", table.GetRuntimeName(LoadBubble.Raw)); - Assert.AreEqual("TestDB_TestTableName_STAGING", table.GetRuntimeName(LoadBubble.Staging)); - - Assert.AreEqual("TestTableName_STAGING", table.GetRuntimeName(LoadBubble.Staging, new SuffixBasedNamer())); - Assert.AreEqual("TestDB_TestTableName_STAGING", table.GetRuntimeName(LoadBubble.Staging, new FixedStagingDatabaseNamer("TestDB"))); - - Assert.AreEqual("TestTableName", table.GetRuntimeName(LoadBubble.Live)); - - } - finally - { - c.DeleteInDatabase(); - table.DeleteInDatabase(); - } - } + ANOTable_ID = -100 + }; - [Test] - public void TestCreateTableInSchemaAndImportAsTableInfo() + try { - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + Assert.AreEqual("ANOMyCol", c.GetRuntimeName()); + Assert.AreEqual("MyCol", c.GetRuntimeName(LoadStage.AdjustRaw)); + Assert.AreEqual("ANOMyCol", c.GetRuntimeName(LoadStage.PostLoad)); - using (var con = db.Server.GetConnection()) - { - con.Open(); + Assert.AreEqual("TestTableName", table.GetRuntimeName()); + Assert.AreEqual("TestTableName", table.GetRuntimeName(LoadBubble.Raw)); + Assert.AreEqual("TestDB_TestTableName_STAGING", table.GetRuntimeName(LoadBubble.Staging)); - db.Server.GetCommand("CREATE SCHEMA Omg", con).ExecuteNonQuery(); + Assert.AreEqual("TestTableName_STAGING", table.GetRuntimeName(LoadBubble.Staging, new SuffixBasedNamer())); + Assert.AreEqual("TestDB_TestTableName_STAGING", + table.GetRuntimeName(LoadBubble.Staging, new FixedStagingDatabaseNamer("TestDB"))); - var tbl = db.CreateTable("Fish", new [] {new DatabaseColumnRequest("MyCol", "int"){IsPrimaryKey = true}},schema: "Omg"); + Assert.AreEqual("TestTableName", table.GetRuntimeName(LoadBubble.Live)); + } + finally + { + c.DeleteInDatabase(); + table.DeleteInDatabase(); + } + } - Assert.AreEqual("Fish", tbl.GetRuntimeName()); - Assert.AreEqual( "Omg", tbl.Schema); - Assert.IsTrue(tbl.GetFullyQualifiedName().EndsWith("[Omg].[Fish]")); + [Test] + public void TestCreateTableInSchemaAndImportAsTableInfo() + { + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - Assert.IsTrue(tbl.Exists()); + using var con = db.Server.GetConnection(); + con.Open(); - Import(tbl,out var ti,out var cols); + db.Server.GetCommand("CREATE SCHEMA Omg", con).ExecuteNonQuery(); - Assert.AreEqual("Omg",ti.Schema); - var tbl2 = ti.Discover(DataAccessContext.InternalDataProcessing); - Assert.AreEqual("Omg",tbl2.Schema); - Assert.IsTrue(tbl2.Exists()); + var tbl = db.CreateTable("Fish", new[] { new DatabaseColumnRequest("MyCol", "int") { IsPrimaryKey = true } }, + "Omg"); - Assert.IsTrue(ti.Name.EndsWith("[Omg].[Fish]")); + Assert.AreEqual("Fish", tbl.GetRuntimeName()); + Assert.AreEqual("Omg", tbl.Schema); + Assert.IsTrue(tbl.GetFullyQualifiedName().EndsWith("[Omg].[Fish]")); - Assert.IsTrue(ti.GetFullyQualifiedName().EndsWith("[Omg].[Fish]")); + Assert.IsTrue(tbl.Exists()); - var c = cols.Single(); + Import(tbl, out var ti, out var cols); - Assert.AreEqual("MyCol",c.GetRuntimeName()); - StringAssert.Contains("[Omg].[Fish]",c.GetFullyQualifiedName()); + Assert.AreEqual("Omg", ti.Schema); + var tbl2 = ti.Discover(DataAccessContext.InternalDataProcessing); + Assert.AreEqual("Omg", tbl2.Schema); + Assert.IsTrue(tbl2.Exists()); - //should be primary key - Assert.IsTrue(c.IsPrimaryKey); + Assert.IsTrue(ti.Name.EndsWith("[Omg].[Fish]")); - var triggerFactory = new TriggerImplementerFactory(DatabaseType.MicrosoftSQLServer); - var impl = triggerFactory.Create(tbl); - - Assert.AreEqual(TriggerStatus.Missing,impl.GetTriggerStatus()); + Assert.IsTrue(ti.GetFullyQualifiedName().EndsWith("[Omg].[Fish]")); - impl.CreateTrigger(new ThrowImmediatelyCheckNotifier()); + var c = cols.Single(); - Assert.AreEqual(TriggerStatus.Enabled, impl.GetTriggerStatus()); + Assert.AreEqual("MyCol", c.GetRuntimeName()); + StringAssert.Contains("[Omg].[Fish]", c.GetFullyQualifiedName()); - Assert.IsTrue( impl.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); + //should be primary key + Assert.IsTrue(c.IsPrimaryKey); - //should be synced - var sync = new TableInfoSynchronizer(ti); - sync.Synchronize(new AcceptAllCheckNotifier()); + var triggerFactory = new TriggerImplementerFactory(DatabaseType.MicrosoftSQLServer); + var impl = triggerFactory.Create(tbl); - //Test importing the _Legacy table valued function that should be created in the Omg schema and test synching that too. - var tvf = ti.Discover(DataAccessContext.InternalDataProcessing).Database.ExpectTableValuedFunction("Fish_Legacy", "Omg"); - Assert.IsTrue(tvf.Exists()); + Assert.AreEqual(TriggerStatus.Missing, impl.GetTriggerStatus()); - var importerTvf = new TableValuedFunctionImporter(CatalogueRepository, tvf); - importerTvf.DoImport(out var tvfTi,out var tvfCols); + impl.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); - Assert.AreEqual("Omg",tvfTi.Schema); + Assert.AreEqual(TriggerStatus.Enabled, impl.GetTriggerStatus()); - var syncTvf = new TableInfoSynchronizer(tvfTi); - syncTvf.Synchronize(new ThrowImmediatelyCheckNotifier()); + Assert.IsTrue(impl.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); - StringAssert.EndsWith("[Omg].Fish_Legacy(@index) AS Fish_Legacy",tvfTi.Name); - } - } + //should be synced + var sync = new TableInfoSynchronizer(ti); + sync.Synchronize(new AcceptAllCheckNotifier()); - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void TestView(DatabaseType dbType) - { - var db = GetCleanedServer(dbType); - var syntax = db.Server.GetQuerySyntaxHelper(); + //Test importing the _Legacy table valued function that should be created in the Omg schema and test synching that too. + var tvf = ti.Discover(DataAccessContext.InternalDataProcessing).Database + .ExpectTableValuedFunction("Fish_Legacy", "Omg"); + Assert.IsTrue(tvf.Exists()); + + var importerTvf = new TableValuedFunctionImporter(CatalogueRepository, tvf); + importerTvf.DoImport(out var tvfTi, out var tvfCols); + + Assert.AreEqual("Omg", tvfTi.Schema); + + var syncTvf = new TableInfoSynchronizer(tvfTi); + syncTvf.Synchronize(ThrowImmediatelyCheckNotifier.Quiet); + + StringAssert.EndsWith("[Omg].Fish_Legacy(@index) AS Fish_Legacy", tvfTi.Name); + } + + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void TestView(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); + var syntax = db.Server.GetQuerySyntaxHelper(); - DataTable dt = new DataTable(); - dt.Columns.Add("FF"); + var dt = new DataTable(); + dt.Columns.Add("FF"); - var tbl = db.CreateTable("MyTable",dt); - Import(tbl,out var tblInfo,out _); - - Assert.IsTrue(tblInfo.Discover(DataAccessContext.InternalDataProcessing).Exists()); - Assert.AreEqual(TableType.Table,tblInfo.Discover(DataAccessContext.InternalDataProcessing).TableType); + var tbl = db.CreateTable("MyTable", dt); + Import(tbl, out var tblInfo, out _); - var viewName = "MyView"; + Assert.IsTrue(tblInfo.Discover(DataAccessContext.InternalDataProcessing).Exists()); + Assert.AreEqual(TableType.Table, tblInfo.Discover(DataAccessContext.InternalDataProcessing).TableType); - //oracle likes to create stuff under your user account not the database your actually using! - if(dbType == DatabaseType.Oracle) - viewName = syntax.EnsureFullyQualified(tbl.Database.GetRuntimeName(), null, "MyView"); - - //postgres hates upper case tables (unless they are wrapped) - if (dbType == DatabaseType.PostgreSql) - viewName = syntax.EnsureWrapped(viewName); + var viewName = "MyView"; - var sql = string.Format(@"CREATE VIEW {0} AS + //oracle likes to create stuff under your user account not the database your actually using! + if (dbType == DatabaseType.Oracle) + viewName = syntax.EnsureFullyQualified(tbl.Database.GetRuntimeName(), null, "MyView"); + + //postgres hates upper case tables (unless they are wrapped) + if (dbType == DatabaseType.PostgreSql) + viewName = syntax.EnsureWrapped(viewName); + + var sql = string.Format(@"CREATE VIEW {0} AS SELECT {2} FROM {1}", -viewName, - tbl.GetFullyQualifiedName(), -syntax.EnsureWrapped("FF")); - - using(var con = tbl.Database.Server.GetConnection()) - { - con.Open(); - - var cmd = tbl.GetCommand(sql,con); - cmd.ExecuteNonQuery(); - } - - var view = tbl.Database.ExpectTable("MyView",null,TableType.View); - Import(view,out var viewInfo,out _); - - var sync = new TableInfoSynchronizer(viewInfo); - sync.Synchronize(new ThrowImmediatelyCheckNotifier()); - - Assert.IsTrue(viewInfo.Discover(DataAccessContext.InternalDataProcessing).Exists()); - Assert.AreEqual(TableType.View,viewInfo.Discover(DataAccessContext.InternalDataProcessing).TableType); - - view.Drop(); - Assert.IsFalse(view.Exists()); - + viewName, + tbl.GetFullyQualifiedName(), + syntax.EnsureWrapped("FF")); + + using (var con = tbl.Database.Server.GetConnection()) + { + con.Open(); + + var cmd = tbl.GetCommand(sql, con); + cmd.ExecuteNonQuery(); } + + var view = tbl.Database.ExpectTable("MyView", null, TableType.View); + Import(view, out var viewInfo, out _); + + var sync = new TableInfoSynchronizer(viewInfo); + sync.Synchronize(ThrowImmediatelyCheckNotifier.Quiet); + + Assert.IsTrue(viewInfo.Discover(DataAccessContext.InternalDataProcessing).Exists()); + Assert.AreEqual(TableType.View, viewInfo.Discover(DataAccessContext.InternalDataProcessing).TableType); + + view.Drop(); + Assert.IsFalse(view.Exists()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/TableNamingConventionTests.cs b/Rdmp.Core.Tests/Curation/Integration/TableNamingConventionTests.cs index c0995dd0f3..7f1cf105f4 100644 --- a/Rdmp.Core.Tests/Curation/Integration/TableNamingConventionTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/TableNamingConventionTests.cs @@ -11,55 +11,52 @@ using Rdmp.Core.Curation.Data.EntityNaming; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +internal class TableNamingConventionTests : DatabaseTests { - class TableNamingConventionTests : DatabaseTests + [Test] + public void GetAllTableInfos_moreThan1_pass() { - [Test] - public void GetAllTableInfos_moreThan1_pass() - { - var ti = new TableInfo(CatalogueRepository, "AMAGAD!!!"); - Assert.IsTrue(CatalogueRepository.GetAllObjects().Any()); - ti.DeleteInDatabase(); - } + var ti = new TableInfo(CatalogueRepository, "AMAGAD!!!"); + Assert.IsTrue(CatalogueRepository.GetAllObjects().Any()); + ti.DeleteInDatabase(); + } - [Test] - public void update_changeAllProperties_pass() + [Test] + public void update_changeAllProperties_pass() + { + var tableInfo = new TableInfo(CatalogueRepository, "CHI_AMALG..SearchStuff") { - var tableInfo = new TableInfo(CatalogueRepository, "CHI_AMALG..SearchStuff") - { - Database = "CHI_AMALG", - Server = "Highly restricted", - Name = "Fishmongery!", - DatabaseType = DatabaseType.Oracle - }; + Database = "CHI_AMALG", + Server = "Highly restricted", + Name = "Fishmongery!", + DatabaseType = DatabaseType.Oracle + }; - tableInfo.SaveToDatabase(); + tableInfo.SaveToDatabase(); - var tableInfoAfter = CatalogueRepository.GetObjectByID(tableInfo.ID); + var tableInfoAfter = CatalogueRepository.GetObjectByID(tableInfo.ID); - Assert.IsTrue(tableInfoAfter.Database == "CHI_AMALG"); - Assert.IsTrue(tableInfoAfter.Server == "Highly restricted"); - Assert.IsTrue(tableInfoAfter.Name == "Fishmongery!"); - Assert.IsTrue(tableInfoAfter.DatabaseType == DatabaseType.Oracle); + Assert.IsTrue(tableInfoAfter.Database == "CHI_AMALG"); + Assert.IsTrue(tableInfoAfter.Server == "Highly restricted"); + Assert.IsTrue(tableInfoAfter.Name == "Fishmongery!"); + Assert.IsTrue(tableInfoAfter.DatabaseType == DatabaseType.Oracle); - tableInfoAfter.DeleteInDatabase(); - - } - - [Test] - public void SuffixBasedTableNamingConventionHelper() - { - const string baseTableName = "MyTable"; - var namingScheme = new SuffixBasedNamer(); + tableInfoAfter.DeleteInDatabase(); + } - var stagingTable = namingScheme.GetName(baseTableName, LoadBubble.Staging); - Assert.AreEqual("MyTable_STAGING", stagingTable); + [Test] + public void SuffixBasedTableNamingConventionHelper() + { + const string baseTableName = "MyTable"; + var namingScheme = new SuffixBasedNamer(); - var newLookupTable = namingScheme.GetName(baseTableName, LoadBubble.Live); - Assert.AreEqual("MyTable", newLookupTable); - } + var stagingTable = namingScheme.GetName(baseTableName, LoadBubble.Staging); + Assert.AreEqual("MyTable_STAGING", stagingTable); + var newLookupTable = namingScheme.GetName(baseTableName, LoadBubble.Live); + Assert.AreEqual("MyTable", newLookupTable); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/TableValuedFunctionTests/AggregationTests.cs b/Rdmp.Core.Tests/Curation/Integration/TableValuedFunctionTests/AggregationTests.cs index a60826b8e8..2ffb6f0238 100644 --- a/Rdmp.Core.Tests/Curation/Integration/TableValuedFunctionTests/AggregationTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/TableValuedFunctionTests/AggregationTests.cs @@ -12,58 +12,59 @@ using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.TableValuedFunctionTests +namespace Rdmp.Core.Tests.Curation.Integration.TableValuedFunctionTests; + +public class AggregationTests : DatabaseTests { - public class AggregationTests :DatabaseTests + private TestableTableValuedFunction _function; + + private void CreateFunction(ICatalogueRepository repo) { - private TestableTableValuedFunction _function; + _function = new TestableTableValuedFunction(); + _function.Create(GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), repo); + } - private void CreateFunction(ICatalogueRepository repo) - { - _function = new TestableTableValuedFunction(); - _function.Create(GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), repo); - } + [Test] + public void GenerateAggregateManuallyTest() + { + CreateFunction(CatalogueRepository); - [Test] - public void GenerateAggregateManuallyTest() - { - CreateFunction(CatalogueRepository); + //do a count * on the query builder + var queryBuilder = new AggregateBuilder("", "count(*)", null, new[] { _function.TableInfoCreated }); - //do a count * on the query builder - AggregateBuilder queryBuilder = new AggregateBuilder("", "count(*)", null,new[] { _function.TableInfoCreated }); + Assert.IsTrue(queryBuilder.SQL.Contains(@"SELECT")); + Assert.IsTrue(queryBuilder.SQL.Contains(@"count(*)")); - Assert.IsTrue(queryBuilder.SQL.Contains(@"SELECT")); - Assert.IsTrue(queryBuilder.SQL.Contains(@"count(*)")); + Assert.IsTrue(queryBuilder.SQL.Contains(@"DECLARE @name AS varchar(50);")); + Assert.IsTrue(queryBuilder.SQL.Contains(@"SET @name='fish';")); - Assert.IsTrue(queryBuilder.SQL.Contains(@"DECLARE @name AS varchar(50);")); - Assert.IsTrue(queryBuilder.SQL.Contains(@"SET @name='fish';")); + Assert.IsTrue( + queryBuilder.SQL.Contains("..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction")); - Assert.IsTrue(queryBuilder.SQL.Contains("..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction")); + Console.WriteLine(queryBuilder.SQL); + } - Console.WriteLine(queryBuilder.SQL); - } - - [TestCase(false)] - [TestCase(true)] - public void GenerateAggregateViaAggregateConfigurationTest(bool memoryRepo) + [TestCase(false)] + [TestCase(true)] + public void GenerateAggregateViaAggregateConfigurationTest(bool memoryRepo) + { + var repo = memoryRepo ? (ICatalogueRepository)new MemoryCatalogueRepository() : CatalogueRepository; + CreateFunction(repo); + + var agg = new AggregateConfiguration(repo, _function.Cata, "MyExcitingAggregate"); + + try { - ICatalogueRepository repo = memoryRepo ? (ICatalogueRepository) new MemoryCatalogueRepository() : CatalogueRepository; - CreateFunction(repo); + agg.HavingSQL = "count(*)>1"; + agg.SaveToDatabase(); - var agg = new AggregateConfiguration(repo, _function.Cata, "MyExcitingAggregate"); - - try - { - agg.HavingSQL = "count(*)>1"; - agg.SaveToDatabase(); + var aggregateForcedJoin = repo.AggregateForcedJoinManager; + aggregateForcedJoin.CreateLinkBetween(agg, _function.TableInfoCreated); - var aggregateForcedJoin = repo.AggregateForcedJoinManager; - aggregateForcedJoin.CreateLinkBetween(agg, _function.TableInfoCreated); + var queryBuilder = agg.GetQueryBuilder(); - AggregateBuilder queryBuilder = agg.GetQueryBuilder(); - - Assert.AreEqual( - @"DECLARE @startNumber AS int; + Assert.AreEqual( + $@"DECLARE @startNumber AS int; SET @startNumber=5; DECLARE @stopNumber AS int; SET @stopNumber=10; @@ -73,54 +74,55 @@ public void GenerateAggregateViaAggregateConfigurationTest(bool memoryRepo) SELECT count(*) AS MyCount FROM -[" + TestDatabaseNames.Prefix +@"ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction +[{TestDatabaseNames.Prefix}ScratchArea]..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction HAVING count(*)>1", queryBuilder.SQL); - - } - finally - { - agg.DeleteInDatabase(); - } } - - [Test] - public void GenerateAggregateUsingOverridenParametersTest() + finally { - CreateFunction(CatalogueRepository); + agg.DeleteInDatabase(); + } + } + + [Test] + public void GenerateAggregateUsingOverridenParametersTest() + { + CreateFunction(CatalogueRepository); - var agg = new AggregateConfiguration(CatalogueRepository, _function.Cata, "MyExcitingAggregate"); + var agg = new AggregateConfiguration(CatalogueRepository, _function.Cata, "MyExcitingAggregate"); - try + try + { + var param = new AnyTableSqlParameter(CatalogueRepository, agg, "DECLARE @name AS varchar(50);") { - var param = new AnyTableSqlParameter(CatalogueRepository, agg, "DECLARE @name AS varchar(50);"); - param.Value = "'lobster'"; - param.SaveToDatabase(); + Value = "'lobster'" + }; + param.SaveToDatabase(); - var aggregateForcedJoin = new AggregateForcedJoin(CatalogueTableRepository); - aggregateForcedJoin.CreateLinkBetween(agg, _function.TableInfoCreated); + var aggregateForcedJoin = new AggregateForcedJoin(CatalogueTableRepository); + aggregateForcedJoin.CreateLinkBetween(agg, _function.TableInfoCreated); - //do a count * on the query builder - AggregateBuilder queryBuilder = agg.GetQueryBuilder(); + //do a count * on the query builder + var queryBuilder = agg.GetQueryBuilder(); - Assert.IsTrue(queryBuilder.SQL.Contains(@"SELECT")); - Assert.IsTrue(queryBuilder.SQL.Contains(@"count(*)")); + Assert.IsTrue(queryBuilder.SQL.Contains(@"SELECT")); + Assert.IsTrue(queryBuilder.SQL.Contains(@"count(*)")); - //should have this version of things - Assert.IsTrue(queryBuilder.SQL.Contains(@"DECLARE @name AS varchar(50);")); - Assert.IsTrue(queryBuilder.SQL.Contains(@"SET @name='lobster';")); + //should have this version of things + Assert.IsTrue(queryBuilder.SQL.Contains(@"DECLARE @name AS varchar(50);")); + Assert.IsTrue(queryBuilder.SQL.Contains(@"SET @name='lobster';")); - //isntead of this verison of things - Assert.IsFalse(queryBuilder.SQL.Contains(@"SET @name='fish';")); + //isntead of this verison of things + Assert.IsFalse(queryBuilder.SQL.Contains(@"SET @name='fish';")); - Assert.IsTrue(queryBuilder.SQL.Contains("..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction")); + Assert.IsTrue( + queryBuilder.SQL.Contains("..MyAwesomeFunction(@startNumber,@stopNumber,@name) AS MyAwesomeFunction")); - Console.WriteLine(queryBuilder.SQL); - } - finally - { - agg.DeleteInDatabase(); - } + Console.WriteLine(queryBuilder.SQL); + } + finally + { + agg.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/TableValuedFunctionTests/ImportAndTestTests.cs b/Rdmp.Core.Tests/Curation/Integration/TableValuedFunctionTests/ImportAndTestTests.cs index 47d6cb649d..2c902d3fcd 100644 --- a/Rdmp.Core.Tests/Curation/Integration/TableValuedFunctionTests/ImportAndTestTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/TableValuedFunctionTests/ImportAndTestTests.cs @@ -7,231 +7,227 @@ using System; using System.Linq; using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable; -using MapsDirectlyToDatabaseTable.Revertable; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data.Cohort; -using ReusableLibraryCode.Checks; +using Rdmp.Core.MapsDirectlyToDatabaseTable; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Revertable; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.TableValuedFunctionTests +namespace Rdmp.Core.Tests.Curation.Integration.TableValuedFunctionTests; + +public class ImportAndTestTests : DatabaseTests { - public class ImportAndTestTests : DatabaseTests + private TestableTableValuedFunction _function = new(); + private DiscoveredDatabase _database; + + [SetUp] + protected override void SetUp() { - private TestableTableValuedFunction _function = new TestableTableValuedFunction(); - private DiscoveredDatabase _database; + base.SetUp(); - [SetUp] - protected override void SetUp() - { - base.SetUp(); + _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + _function.Create(_database, CatalogueRepository); + } - _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - _function.Create(_database, CatalogueRepository); - } + [Test] + public void FunctionWorks() + { + var server = _database.Server; + using var con = server.GetConnection(); + con.Open(); + var r = server.GetCommand("Select * from dbo.MyAwesomeFunction(5,10,'Fish')", con).ExecuteReader(); - [Test] - public void FunctionWorks() - { - var server = _database.Server; - using (var con = server.GetConnection()) - { - con.Open(); - var r = server.GetCommand("Select * from dbo.MyAwesomeFunction(5,10,'Fish')",con).ExecuteReader(); + r.Read(); + Assert.AreEqual(5, r["Number"]); + Assert.AreEqual("Fish", r["Name"]); - r.Read(); - Assert.AreEqual(5, r["Number"]); - Assert.AreEqual("Fish", r["Name"]); + r.Read(); + Assert.AreEqual(6, r["Number"]); + Assert.AreEqual("Fish", r["Name"]); - r.Read(); - Assert.AreEqual(6, r["Number"]); - Assert.AreEqual("Fish", r["Name"]); + r.Read(); + Assert.AreEqual(7, r["Number"]); + Assert.AreEqual("Fish", r["Name"]); - r.Read(); - Assert.AreEqual(7, r["Number"]); - Assert.AreEqual("Fish", r["Name"]); + r.Read(); + Assert.AreEqual(8, r["Number"]); + Assert.AreEqual("Fish", r["Name"]); - r.Read(); - Assert.AreEqual(8, r["Number"]); - Assert.AreEqual("Fish", r["Name"]); + r.Read(); + Assert.AreEqual(9, r["Number"]); + Assert.AreEqual("Fish", r["Name"]); - r.Read(); - Assert.AreEqual(9, r["Number"]); - Assert.AreEqual("Fish", r["Name"]); + Assert.IsFalse(r.Read()); + } - Assert.IsFalse(r.Read()); - } - } - - [Test] - public void ImportFunctionIntoCatalogue() - { - Assert.AreEqual(2, _function.ColumnInfosCreated.Length); - Assert.IsTrue(_function.TableInfoCreated.Name.Contains("MyAwesomeFunction(@startNumber,@stopNumber,@name)")); - } + [Test] + public void ImportFunctionIntoCatalogue() + { + Assert.AreEqual(2, _function.ColumnInfosCreated.Length); + Assert.IsTrue(_function.TableInfoCreated.Name.Contains("MyAwesomeFunction(@startNumber,@stopNumber,@name)")); + } - [Test] - public void TestDiscovery() - { - var db = _database; - - using (var con = db.Server.BeginNewTransactedConnection()) - { - - //drop function - outside of transaction - db.Server.GetCommand("drop function MyAwesomeFunction", con).ExecuteNonQuery(); + [Test] + public void TestDiscovery() + { + var db = _database; - //create it within the scope of the transaction - var cmd = db.Server.GetCommand(_function.CreateFunctionSQL.Substring(_function.CreateFunctionSQL.IndexOf("GO") + 3), con); - cmd.ExecuteNonQuery(); + using var con = db.Server.BeginNewTransactedConnection(); + //drop function - outside of transaction + db.Server.GetCommand("drop function MyAwesomeFunction", con).ExecuteNonQuery(); - Assert.IsTrue(db.DiscoverTableValuedFunctions(con.ManagedTransaction).Any(tbv => tbv.GetRuntimeName().Equals("MyAwesomeFunction"))); - Assert.IsTrue(db.ExpectTableValuedFunction("MyAwesomeFunction").Exists(con.ManagedTransaction)); + //create it within the scope of the transaction + var cmd = db.Server.GetCommand( + _function.CreateFunctionSQL[(_function.CreateFunctionSQL.IndexOf("GO", StringComparison.Ordinal) + 3)..], + con); + cmd.ExecuteNonQuery(); - var cols = db.ExpectTableValuedFunction("MyAwesomeFunction").DiscoverColumns(con.ManagedTransaction); + Assert.IsTrue(db.DiscoverTableValuedFunctions(con.ManagedTransaction) + .Any(tbv => tbv.GetRuntimeName().Equals("MyAwesomeFunction"))); + Assert.IsTrue(db.ExpectTableValuedFunction("MyAwesomeFunction").Exists(con.ManagedTransaction)); - Assert.AreEqual(2, cols.Length); - Assert.IsTrue(cols[0].GetFullyQualifiedName().Contains("MyAwesomeFunction.[Number]")); - Assert.IsTrue(cols[1].GetFullyQualifiedName().Contains("MyAwesomeFunction.[Name]")); + var cols = db.ExpectTableValuedFunction("MyAwesomeFunction").DiscoverColumns(con.ManagedTransaction); - Assert.AreEqual("int", cols[0].DataType.SQLType); - Assert.AreEqual("varchar(50)", cols[1].DataType.SQLType); + Assert.AreEqual(2, cols.Length); + Assert.IsTrue(cols[0].GetFullyQualifiedName().Contains("MyAwesomeFunction.[Number]")); + Assert.IsTrue(cols[1].GetFullyQualifiedName().Contains("MyAwesomeFunction.[Name]")); - con.ManagedTransaction.CommitAndCloseConnection(); - } - } + Assert.AreEqual("int", cols[0].DataType.SQLType); + Assert.AreEqual("varchar(50)", cols[1].DataType.SQLType); - [Test] - public void Synchronization_ExtraParameter() - { - string expectedMessage = - "MyAwesomeFunction is a Table Valued Function, in the Catalogue it has a parameter called @fish but this parameter no longer appears in the underlying database"; + con.ManagedTransaction.CommitAndCloseConnection(); + } - var excessParameter = new AnyTableSqlParameter(CatalogueRepository, _function.TableInfoCreated, "DECLARE @fish as int"); - var checker = new ToMemoryCheckNotifier(); - _function.TableInfoCreated.Check(checker); - - Assert.IsTrue(checker.Messages.Any(m=>m.Result == CheckResult.Fail - && - m.Message.Contains(expectedMessage))); + [Test] + public void Synchronization_ExtraParameter() + { + var expectedMessage = + "MyAwesomeFunction is a Table Valued Function, in the Catalogue it has a parameter called @fish but this parameter no longer appears in the underlying database"; - var syncer = new TableInfoSynchronizer(_function.TableInfoCreated); + var excessParameter = + new AnyTableSqlParameter(CatalogueRepository, _function.TableInfoCreated, "DECLARE @fish as int"); + var checker = new ToMemoryCheckNotifier(); + _function.TableInfoCreated.Check(checker); - var ex = Assert.Throws(()=>syncer.Synchronize(new ThrowImmediatelyCheckNotifier())); - Assert.IsTrue(ex.Message.Contains(expectedMessage)); + Assert.IsTrue(checker.Messages.Any(m => m.Result == CheckResult.Fail + && + m.Message.Contains(expectedMessage))); - //no changes yet - Assert.IsTrue(excessParameter.HasLocalChanges().Evaluation == ChangeDescription.NoChanges); + var syncer = new TableInfoSynchronizer(_function.TableInfoCreated); - //sync should have proposed to drop the excess parameter (see above), accept the change - Assert.IsTrue(syncer.Synchronize(new AcceptAllCheckNotifier())); + var ex = Assert.Throws(() => syncer.Synchronize(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.IsTrue(ex.Message.Contains(expectedMessage)); - //now parameter shouldnt be there - Assert.IsTrue(excessParameter.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyWasDeleted); + //no changes yet + Assert.IsTrue(excessParameter.HasLocalChanges().Evaluation == ChangeDescription.NoChanges); - } + //sync should have proposed to drop the excess parameter (see above), accept the change + Assert.IsTrue(syncer.Synchronize(new AcceptAllCheckNotifier())); - [Test] - public void Synchronization_MissingParameter() - { - string expectedMessage = "MyAwesomeFunction is a Table Valued Function but it does not have a record of the parameter @startNumber which appears in the underlying database"; + //now parameter shouldnt be there + Assert.IsTrue(excessParameter.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyWasDeleted); + } - AnyTableSqlParameter parameter = (AnyTableSqlParameter)_function.TableInfoCreated.GetAllParameters().Single(p => p.ParameterName.Equals("@startNumber")); - parameter.DeleteInDatabase(); + [Test] + public void Synchronization_MissingParameter() + { + var expectedMessage = + "MyAwesomeFunction is a Table Valued Function but it does not have a record of the parameter @startNumber which appears in the underlying database"; - var syncer = new TableInfoSynchronizer(_function.TableInfoCreated); + var parameter = (AnyTableSqlParameter)_function.TableInfoCreated.GetAllParameters() + .Single(p => p.ParameterName.Equals("@startNumber")); + parameter.DeleteInDatabase(); - var ex = Assert.Throws(() => syncer.Synchronize(new ThrowImmediatelyCheckNotifier())); - Assert.IsTrue(ex.Message.Contains(expectedMessage)); + var syncer = new TableInfoSynchronizer(_function.TableInfoCreated); - //no parameter called @startNumber (because we deleted it right!) - Assert.IsFalse(_function.TableInfoCreated.GetAllParameters().Any(p => p.ParameterName.Equals("@startNumber"))); + var ex = Assert.Throws(() => syncer.Synchronize(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.IsTrue(ex.Message.Contains(expectedMessage)); - //sync should have proposed to create the missing parameter (see above), accept the change - Assert.IsTrue(syncer.Synchronize(new AcceptAllCheckNotifier())); + //no parameter called @startNumber (because we deleted it right!) + Assert.IsFalse(_function.TableInfoCreated.GetAllParameters().Any(p => p.ParameterName.Equals("@startNumber"))); - //now parameter should have reappeared due to accepthing change - Assert.IsTrue(_function.TableInfoCreated.GetAllParameters().Any(p => p.ParameterName.Equals("@startNumber"))); - - } + //sync should have proposed to create the missing parameter (see above), accept the change + Assert.IsTrue(syncer.Synchronize(new AcceptAllCheckNotifier())); - [Test] - public void Synchronization_ParameterDefinitionChanged() - { - string expectedMessage = - "Parameter @startNumber is declared as 'DECLARE @startNumber AS int;' but in the Catalogue it appears as 'DECLARE @startNumber AS datetime;'"; + //now parameter should have reappeared due to accepthing change + Assert.IsTrue(_function.TableInfoCreated.GetAllParameters().Any(p => p.ParameterName.Equals("@startNumber"))); + } - AnyTableSqlParameter parameter = (AnyTableSqlParameter)_function.TableInfoCreated.GetAllParameters().Single(p => p.ParameterName.Equals("@startNumber")); - parameter.ParameterSQL = "DECLARE @startNumber AS datetime;"; - parameter.SaveToDatabase(); + [Test] + public void Synchronization_ParameterDefinitionChanged() + { + var expectedMessage = + "Parameter @startNumber is declared as 'DECLARE @startNumber AS int;' but in the Catalogue it appears as 'DECLARE @startNumber AS datetime;'"; - var syncer = new TableInfoSynchronizer(_function.TableInfoCreated); + var parameter = (AnyTableSqlParameter)_function.TableInfoCreated.GetAllParameters() + .Single(p => p.ParameterName.Equals("@startNumber")); + parameter.ParameterSQL = "DECLARE @startNumber AS datetime;"; + parameter.SaveToDatabase(); - var ex = Assert.Throws(() => syncer.Synchronize(new ThrowImmediatelyCheckNotifier())); - StringAssert.Contains(expectedMessage,ex.Message); + var syncer = new TableInfoSynchronizer(_function.TableInfoCreated); - //no changes should yet have taken place since we didn't accept it yet - Assert.IsTrue(parameter.HasLocalChanges().Evaluation == ChangeDescription.NoChanges); + var ex = Assert.Throws(() => syncer.Synchronize(ThrowImmediatelyCheckNotifier.Quiet)); + StringAssert.Contains(expectedMessage, ex?.Message); - //sync should have proposed to adjusting the datatype - Assert.IsTrue(syncer.Synchronize(new AcceptAllCheckNotifier())); + //no changes should yet have taken place since we didn't accept it yet + Assert.IsTrue(parameter.HasLocalChanges().Evaluation == ChangeDescription.NoChanges); - if(CatalogueRepository is not TableRepository) - { - // with a Yaml repository there is only one copy of the object so no need - // to check for differences in db - return; - } + //sync should have proposed to adjusting the datatype + Assert.IsTrue(syncer.Synchronize(new AcceptAllCheckNotifier())); - //now parameter should have the correct datatype - Assert.IsTrue(parameter.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyDifferent); - var diff = parameter.HasLocalChanges().Differences.Single(); + if (CatalogueRepository is not TableRepository) + // with a Yaml repository there is only one copy of the object so no need + // to check for differences in db + return; - Assert.AreEqual("DECLARE @startNumber AS datetime;",diff.LocalValue); - Assert.AreEqual("DECLARE @startNumber AS int;", diff.DatabaseValue); + //now parameter should have the correct datatype + Assert.IsTrue(parameter.HasLocalChanges().Evaluation == ChangeDescription.DatabaseCopyDifferent); + var diff = parameter.HasLocalChanges().Differences.Single(); - } + Assert.AreEqual("DECLARE @startNumber AS datetime;", diff.LocalValue); + Assert.AreEqual("DECLARE @startNumber AS int;", diff.DatabaseValue); + } - [Test] - public void Synchronization_ParameterRenamed() - { - AnyTableSqlParameter parameter = (AnyTableSqlParameter)_function.TableInfoCreated.GetAllParameters().Single(p => p.ParameterName.Equals("@startNumber")); - parameter.ParameterSQL = "DECLARE @startNum AS int"; - parameter.SaveToDatabase(); + [Test] + public void Synchronization_ParameterRenamed() + { + var parameter = (AnyTableSqlParameter)_function.TableInfoCreated.GetAllParameters() + .Single(p => p.ParameterName.Equals("@startNumber")); + parameter.ParameterSQL = "DECLARE @startNum AS int"; + parameter.SaveToDatabase(); - var syncer = new TableInfoSynchronizer(_function.TableInfoCreated); + var syncer = new TableInfoSynchronizer(_function.TableInfoCreated); - //shouldn't be any - Assert.IsFalse(_function.TableInfoCreated.GetAllParameters().Any(p => p.ParameterName.Equals("@startNumber"))); - syncer.Synchronize(new AcceptAllCheckNotifier()); + //shouldn't be any + Assert.IsFalse(_function.TableInfoCreated.GetAllParameters().Any(p => p.ParameterName.Equals("@startNumber"))); + syncer.Synchronize(new AcceptAllCheckNotifier()); - var after = _function.TableInfoCreated.GetAllParameters(); - //now there should be recreated (actually it will suggest deleting the excess one and creating the underlying one as 2 separate suggestions one after the other) - Assert.IsTrue(after.Any(p => p.ParameterName.Equals("@startNumber"))); + var after = _function.TableInfoCreated.GetAllParameters(); + //now there should be recreated (actually it will suggest deleting the excess one and creating the underlying one as 2 separate suggestions one after the other) + Assert.IsTrue(after.Any(p => p.ParameterName.Equals("@startNumber"))); - //still there should only be 3 parameters - Assert.AreEqual(3,after.Length); + //still there should only be 3 parameters + Assert.AreEqual(3, after.Length); + } - } + [Test] + public void TableInfoCheckingWorks() + { + _function.TableInfoCreated.Check(ThrowImmediatelyCheckNotifier.QuietPicky); + } - [Test] - public void TableInfoCheckingWorks() - { - _function.TableInfoCreated.Check(new ThrowImmediatelyCheckNotifier() { ThrowOnWarning = true }); - } - - [Test] - public void CatalogueCheckingWorks() - { - _function.Cata.Check(new ThrowImmediatelyCheckNotifier() { ThrowOnWarning = true }); - } + [Test] + public void CatalogueCheckingWorks() + { + _function.Cata.Check(ThrowImmediatelyCheckNotifier.QuietPicky); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/TriggerTests.cs b/Rdmp.Core.Tests/Curation/Integration/TriggerTests.cs index ed49ca6b3e..b5c4686a67 100644 --- a/Rdmp.Core.Tests/Curation/Integration/TriggerTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/TriggerTests.cs @@ -9,245 +9,248 @@ using FAnsi; using FAnsi.Discovery; using NUnit.Framework; -using Rdmp.Core.Curation.Data; using Rdmp.Core.DataLoad.Triggers; using Rdmp.Core.DataLoad.Triggers.Exceptions; using Rdmp.Core.DataLoad.Triggers.Implementations; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Exceptions; using Tests.Common; using System.Collections.Generic; using TypeGuesser; using System.Linq; using System.Data; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Exceptions; -namespace Rdmp.Core.Tests.Curation.Integration +namespace Rdmp.Core.Tests.Curation.Integration; + +public class TriggerTests : DatabaseTests { - public class TriggerTests :DatabaseTests + private DiscoveredTable _table; + private DiscoveredTable _archiveTable; + private DiscoveredDatabase _database; + + + public void CreateTable(DatabaseType dbType) { - private DiscoveredTable _table; - private DiscoveredTable _archiveTable; - private DiscoveredDatabase _database; - + _database = GetCleanedServer(dbType); - public void CreateTable(DatabaseType dbType) + _table = _database.CreateTable("TriggerTests", new DatabaseColumnRequest[] { - _database = GetCleanedServer(dbType); + new("name", new DatabaseTypeRequest(typeof(string), 30)) { AllowNulls = false }, + new("bubbles", new DatabaseTypeRequest(typeof(int))) + }); - _table =_database.CreateTable("TriggerTests",new DatabaseColumnRequest[]{ - new DatabaseColumnRequest("name",new DatabaseTypeRequest(typeof(string),30)){AllowNulls = false }, - new DatabaseColumnRequest("bubbles",new DatabaseTypeRequest(typeof(int))), - }); + _archiveTable = _database.ExpectTable("TriggerTests_Archive"); + } - _archiveTable = _database.ExpectTable("TriggerTests_Archive"); - } + private ITriggerImplementer GetImplementer() => + new TriggerImplementerFactory(_database.Server.DatabaseType).Create(_table); - private ITriggerImplementer GetImplementer() - { - return new TriggerImplementerFactory(_database.Server.DatabaseType).Create(_table); - } - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void NoTriggerExists(DatabaseType dbType) - { - CreateTable(dbType); + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void NoTriggerExists(DatabaseType dbType) + { + CreateTable(dbType); - var implementer = GetImplementer(); + var implementer = GetImplementer(); - //most likely doesn't exist but may do - implementer.DropTrigger(out string _, out string _); + //most likely doesn't exist but may do + implementer.DropTrigger(out _, out _); - Assert.AreEqual(TriggerStatus.Missing, implementer.GetTriggerStatus()); - } + Assert.AreEqual(TriggerStatus.Missing, implementer.GetTriggerStatus()); + } - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void CreateWithNoPks_Complain(DatabaseType dbType) - { - CreateTable(dbType); + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void CreateWithNoPks_Complain(DatabaseType dbType) + { + CreateTable(dbType); - var ex = Assert.Throws(() => GetImplementer().CreateTrigger(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("There must be at least 1 primary key", ex.Message); - } + var ex = Assert.Throws(() => + GetImplementer().CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("There must be at least 1 primary key", ex.Message); + } - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void CreateWithPks_Valid(DatabaseType dbType) - { - CreateTable(dbType); + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void CreateWithPks_Valid(DatabaseType dbType) + { + CreateTable(dbType); - _table.CreatePrimaryKey(new []{_table.DiscoverColumn("name")}); - GetImplementer().CreateTrigger(new ThrowImmediatelyCheckNotifier()); + _table.CreatePrimaryKey(new[] { _table.DiscoverColumn("name") }); + GetImplementer().CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); - Assert.AreEqual(TriggerStatus.Enabled, GetImplementer().GetTriggerStatus()); - Assert.AreEqual(true, GetImplementer().CheckUpdateTriggerIsEnabledAndHasExpectedBody()); - } + Assert.AreEqual(TriggerStatus.Enabled, GetImplementer().GetTriggerStatus()); + Assert.AreEqual(true, GetImplementer().CheckUpdateTriggerIsEnabledAndHasExpectedBody()); + } + + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void Create_WithDodgyColumnNames(DatabaseType dbType) + { + _database = GetCleanedServer(dbType); - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void Create_WithDodgyColumnNames(DatabaseType dbType) + _table = _database.CreateTable("Trol lol My Table Select * from Group by fish", new DatabaseColumnRequest[] { - _database = GetCleanedServer(dbType); + new("My Lovely Column Select * From Lolz", new DatabaseTypeRequest(typeof(string), 30)) + { AllowNulls = false, IsPrimaryKey = true }, + new("ANormalColumnName", new DatabaseTypeRequest(typeof(int))), + new("Group By Meeee Colll trollolol", new DatabaseTypeRequest(typeof(int))) + }); - _table =_database.CreateTable("Trol lol My Table Select * from Group by fish",new DatabaseColumnRequest[]{ - new DatabaseColumnRequest("My Lovely Column Select * From Lolz",new DatabaseTypeRequest(typeof(string),30)){AllowNulls = false,IsPrimaryKey = true}, - new DatabaseColumnRequest("ANormalColumnName",new DatabaseTypeRequest(typeof(int))), - new DatabaseColumnRequest("Group By Meeee Colll trollolol",new DatabaseTypeRequest(typeof(int))), - }); + GetImplementer().CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); - GetImplementer().CreateTrigger(new ThrowImmediatelyCheckNotifier()); + Assert.AreEqual(TriggerStatus.Enabled, GetImplementer().GetTriggerStatus()); + Assert.AreEqual(true, GetImplementer().CheckUpdateTriggerIsEnabledAndHasExpectedBody()); + } - Assert.AreEqual(TriggerStatus.Enabled, GetImplementer().GetTriggerStatus()); - Assert.AreEqual(true, GetImplementer().CheckUpdateTriggerIsEnabledAndHasExpectedBody()); - } + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void AlterTest_InvalidThenRecreateItAndItsValidAgain(DatabaseType dbType) + { + CreateWithPks_Valid(dbType); - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void AlterTest_InvalidThenRecreateItAndItsValidAgain(DatabaseType dbType) - { - CreateWithPks_Valid(dbType); + _table.AddColumn("fish", new DatabaseTypeRequest(typeof(int)), true, 500); + _archiveTable.AddColumn("fish", new DatabaseTypeRequest(typeof(int)), true, 500); - _table.AddColumn("fish",new DatabaseTypeRequest(typeof(int)),true,500); - _archiveTable.AddColumn("fish",new DatabaseTypeRequest(typeof(int)),true,500); - - //still not valid because trigger SQL is missing it in the column list - var ex = Assert.Throws(() => GetImplementer().CheckUpdateTriggerIsEnabledAndHasExpectedBody()); - Assert.IsNotNull(ex.Message); + //still not valid because trigger SQL is missing it in the column list + var ex = Assert.Throws(() => + GetImplementer().CheckUpdateTriggerIsEnabledAndHasExpectedBody()); + Assert.IsNotNull(ex.Message); - var implementer = GetImplementer(); - implementer.DropTrigger(out var problemsDroppingTrigger, out _); - Assert.IsEmpty(problemsDroppingTrigger); + var implementer = GetImplementer(); + implementer.DropTrigger(out var problemsDroppingTrigger, out _); + Assert.IsEmpty(problemsDroppingTrigger); - implementer.CreateTrigger(new ThrowImmediatelyCheckNotifier()); + implementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); - Assert.AreEqual(true, implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); - } + Assert.AreEqual(true, implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody()); + } - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void NowTestDataInsertion(DatabaseType dbType) - { - AlterTest_InvalidThenRecreateItAndItsValidAgain(dbType); - - _table.Insert(new Dictionary - { - {"name","Franky" } , - {"bubbles",3 } , - {"hic_validFrom",new DateTime(2001,1,2)} , - {"hic_dataLoadRunID",7 } - }); - - var liveOldRow = _table.GetDataTable().Rows.Cast().Single(r=>r["bubbles"] as int? ==3); - Assert.AreEqual(new DateTime(2001,1,2),((DateTime)liveOldRow[SpecialFieldNames.ValidFrom])); - - RunSQL("UPDATE {0} set bubbles =99",_table.GetFullyQualifiedName()); - - //new value is 99 - Assert.AreEqual(99,ExecuteScalar("Select bubbles FROM {0} where name = 'Franky'",_table.GetFullyQualifiedName())); - //archived value is 3 - Assert.AreEqual(3, ExecuteScalar("Select bubbles FROM {0} where name = 'Franky'", _archiveTable.GetFullyQualifiedName())); - - //Legacy table valued function only works for MicrosoftSQLServer - if(dbType == DatabaseType.MicrosoftSQLServer) - { - //legacy in 2001-01-01 it didn't exist - Assert.IsNull( ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-01') where name = 'Franky'")); - //legacy in 2001-01-03 it did exist and was 3 - Assert.AreEqual(3, ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-03') where name = 'Franky'")); - //legacy boundary case? - Assert.AreEqual(3, ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-02') where name = 'Franky'")); - - //legacy today it is 99 - Assert.AreEqual(99, ExecuteScalar("Select bubbles FROM TriggerTests_Legacy(GETDATE()) where name = 'Franky'")); - } - - // Live row should now reflect that it is validFrom today - var liveNewRow = _table.GetDataTable().Rows.Cast().Single(r=>r["bubbles"] as int? ==99); - Assert.AreEqual(DateTime.Now.Date,((DateTime)liveNewRow[SpecialFieldNames.ValidFrom]).Date); - - // Archived row should not have had its validFrom field broken - var archivedRow = _archiveTable.GetDataTable().Rows.Cast().Single(r=>r["bubbles"] as int? ==3); - Assert.AreEqual(new DateTime(2001,1,2),((DateTime)archivedRow[SpecialFieldNames.ValidFrom])); - } + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void NowTestDataInsertion(DatabaseType dbType) + { + AlterTest_InvalidThenRecreateItAndItsValidAgain(dbType); - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void DiffDatabaseDataFetcherTest(DatabaseType dbType) + _table.Insert(new Dictionary + { + { "name", "Franky" }, + { "bubbles", 3 }, + { "hic_validFrom", new DateTime(2001, 1, 2) }, + { "hic_dataLoadRunID", 7 } + }); + + var liveOldRow = _table.GetDataTable().Rows.Cast().Single(r => r["bubbles"] as int? == 3); + Assert.AreEqual(new DateTime(2001, 1, 2), (DateTime)liveOldRow[SpecialFieldNames.ValidFrom]); + + RunSQL("UPDATE {0} set bubbles =99", _table.GetFullyQualifiedName()); + + //new value is 99 + Assert.AreEqual(99, + ExecuteScalar("Select bubbles FROM {0} where name = 'Franky'", _table.GetFullyQualifiedName())); + //archived value is 3 + Assert.AreEqual(3, + ExecuteScalar("Select bubbles FROM {0} where name = 'Franky'", _archiveTable.GetFullyQualifiedName())); + + //Legacy table valued function only works for MicrosoftSQLServer + if (dbType == DatabaseType.MicrosoftSQLServer) { - CreateTable(dbType); - - _table.CreatePrimaryKey(_table.DiscoverColumn("name")); - - GetImplementer().CreateTrigger(new ThrowImmediatelyCheckNotifier()); - - _table.Insert(new Dictionary - { - {"name","Franky" } , - {"bubbles",3 } , - {"hic_validFrom",new DateTime(2001,1,2)} , - {"hic_dataLoadRunID",7 } - }); - - Thread.Sleep(1000); - RunSQL("UPDATE {0} SET bubbles=1",_table.GetFullyQualifiedName()); - - Thread.Sleep(1000); - RunSQL("UPDATE {0} SET bubbles=2",_table.GetFullyQualifiedName()); - - Thread.Sleep(1000); - RunSQL("UPDATE {0} SET bubbles=3",_table.GetFullyQualifiedName()); - - Thread.Sleep(1000); - RunSQL("UPDATE {0} SET bubbles=4",_table.GetFullyQualifiedName()); - - Thread.Sleep(1000); - - Assert.AreEqual(1,_table.GetRowCount()); - Assert.AreEqual(4,_archiveTable.GetRowCount()); - - Import(_table,out var ti,out var cols); - DiffDatabaseDataFetcher fetcher = new DiffDatabaseDataFetcher(1,ti,7,100); - - fetcher.FetchData(new AcceptAllCheckNotifier()); - Assert.AreEqual(4,fetcher.Updates_New.Rows[0]["bubbles"]); - Assert.AreEqual(3, fetcher.Updates_Replaced.Rows[0]["bubbles"]); - - Assert.AreEqual(1,fetcher.Updates_New.Rows.Count); - Assert.AreEqual(1, fetcher.Updates_Replaced.Rows.Count); + //legacy in 2001-01-01 it didn't exist + Assert.IsNull(ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-01') where name = 'Franky'")); + //legacy in 2001-01-03 it did exist and was 3 + Assert.AreEqual(3, + ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-03') where name = 'Franky'")); + //legacy boundary case? + Assert.AreEqual(3, + ExecuteScalar("Select bubbles FROM TriggerTests_Legacy('2001-01-02') where name = 'Franky'")); + + //legacy today it is 99 + Assert.AreEqual(99, + ExecuteScalar("Select bubbles FROM TriggerTests_Legacy(GETDATE()) where name = 'Franky'")); } + // Live row should now reflect that it is validFrom today + var liveNewRow = _table.GetDataTable().Rows.Cast().Single(r => r["bubbles"] as int? == 99); + Assert.AreEqual(DateTime.Now.Date, ((DateTime)liveNewRow[SpecialFieldNames.ValidFrom]).Date); - [Test] - public void IdentityTest() - { - CreateTable(DatabaseType.MicrosoftSQLServer); - - RunSQL("Alter TABLE TriggerTests ADD myident int identity(1,1) PRIMARY KEY"); + // Archived row should not have had its validFrom field broken + var archivedRow = _archiveTable.GetDataTable().Rows.Cast().Single(r => r["bubbles"] as int? == 3); + Assert.AreEqual(new DateTime(2001, 1, 2), (DateTime)archivedRow[SpecialFieldNames.ValidFrom]); + } - var implementer = new MicrosoftSQLTriggerImplementer(_table); + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void DiffDatabaseDataFetcherTest(DatabaseType dbType) + { + CreateTable(dbType); - implementer.CreateTrigger(new ThrowImmediatelyCheckNotifier()); - implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody(); - } + _table.CreatePrimaryKey(_table.DiscoverColumn("name")); - private object ExecuteScalar(string sql, params string[] args) - { - if(args.Length != 0) - sql = string.Format(sql,args); - - var svr = _database.Server; - using (var con = svr.GetConnection()) - { - con.Open(); - return svr.GetCommand(sql, con).ExecuteScalar(); - } - } + GetImplementer().CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); - private void RunSQL(string sql, params string[] args) + _table.Insert(new Dictionary { - if(args.Length != 0) - sql = string.Format(sql,args); - if (_database == null) - throw new Exception("You must call CreateTable first"); - - using (var con = _database.Server.GetConnection()) - { - con.Open(); - _database.Server.GetCommand(sql, con).ExecuteNonQuery(); - } - } + { "name", "Franky" }, + { "bubbles", 3 }, + { "hic_validFrom", new DateTime(2001, 1, 2) }, + { "hic_dataLoadRunID", 7 } + }); + + Thread.Sleep(1000); + RunSQL("UPDATE {0} SET bubbles=1", _table.GetFullyQualifiedName()); + + Thread.Sleep(1000); + RunSQL("UPDATE {0} SET bubbles=2", _table.GetFullyQualifiedName()); + + Thread.Sleep(1000); + RunSQL("UPDATE {0} SET bubbles=3", _table.GetFullyQualifiedName()); + + Thread.Sleep(1000); + RunSQL("UPDATE {0} SET bubbles=4", _table.GetFullyQualifiedName()); + + Thread.Sleep(1000); + + Assert.AreEqual(1, _table.GetRowCount()); + Assert.AreEqual(4, _archiveTable.GetRowCount()); + + Import(_table, out var ti, out var cols); + var fetcher = new DiffDatabaseDataFetcher(1, ti, 7, 100); + + fetcher.FetchData(new AcceptAllCheckNotifier()); + Assert.AreEqual(4, fetcher.Updates_New.Rows[0]["bubbles"]); + Assert.AreEqual(3, fetcher.Updates_Replaced.Rows[0]["bubbles"]); + + Assert.AreEqual(1, fetcher.Updates_New.Rows.Count); + Assert.AreEqual(1, fetcher.Updates_Replaced.Rows.Count); + } + + + [Test] + public void IdentityTest() + { + CreateTable(DatabaseType.MicrosoftSQLServer); + + RunSQL("Alter TABLE TriggerTests ADD myident int identity(1,1) PRIMARY KEY"); + + var implementer = new MicrosoftSQLTriggerImplementer(_table); + + implementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); + implementer.CheckUpdateTriggerIsEnabledAndHasExpectedBody(); + } + + private object ExecuteScalar(string sql, params string[] args) + { + if (args.Length != 0) + sql = string.Format(sql, args); + + var svr = _database.Server; + using var con = svr.GetConnection(); + con.Open(); + return svr.GetCommand(sql, con).ExecuteScalar(); + } + + private void RunSQL(string sql, params string[] args) + { + if (args.Length != 0) + sql = string.Format(sql, args); + if (_database == null) + throw new Exception("You must call CreateTable first"); + + using var con = _database.Server.GetConnection(); + con.Open(); + _database.Server.GetCommand(sql, con).ExecuteNonQuery(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/Validation/ReferentialIntegrityConstraintTests.cs b/Rdmp.Core.Tests/Curation/Integration/Validation/ReferentialIntegrityConstraintTests.cs index 710aaf9da0..a509e1d228 100644 --- a/Rdmp.Core.Tests/Curation/Integration/Validation/ReferentialIntegrityConstraintTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/Validation/ReferentialIntegrityConstraintTests.cs @@ -8,89 +8,90 @@ using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; -using Rdmp.Core.Validation; using Rdmp.Core.Validation.Constraints.Secondary; -using ReusableLibraryCode.DataAccess; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.Validation -{ - public class ReferentialIntegrityConstraintTests :DatabaseTests - { - private ITableInfo _tableInfo; - private ColumnInfo[] _columnInfo; - private ReferentialIntegrityConstraint _constraint; +namespace Rdmp.Core.Tests.Curation.Integration.Validation; - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); +public class ReferentialIntegrityConstraintTests : DatabaseTests +{ + private ITableInfo _tableInfo; + private ColumnInfo[] _columnInfo; + private ReferentialIntegrityConstraint _constraint; - var tbl = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).ExpectTable("ReferentialIntegrityConstraintTests"); + [OneTimeSetUp] + protected override void OneTimeSetUp() + { + base.OneTimeSetUp(); - if(tbl.Exists()) - tbl.Drop(); + var tbl = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer) + .ExpectTable("ReferentialIntegrityConstraintTests"); - var server = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).Server; - - using (var con = server.GetConnection()) - { - con.Open(); + if (tbl.Exists()) + tbl.Drop(); - server.GetCommand("CREATE TABLE ReferentialIntegrityConstraintTests(MyValue int)", con).ExecuteNonQuery(); - server.GetCommand("INSERT INTO ReferentialIntegrityConstraintTests (MyValue) VALUES (5)", con).ExecuteNonQuery(); - } + var server = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).Server; - TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, tbl); - importer.DoImport(out _tableInfo,out _columnInfo); + using (var con = server.GetConnection()) + { + con.Open(); - _constraint = new ReferentialIntegrityConstraint(CatalogueRepository); - _constraint.OtherColumnInfo = _columnInfo.Single(); + server.GetCommand("CREATE TABLE ReferentialIntegrityConstraintTests(MyValue int)", con).ExecuteNonQuery(); + server.GetCommand("INSERT INTO ReferentialIntegrityConstraintTests (MyValue) VALUES (5)", con) + .ExecuteNonQuery(); } - [Test] - [TestCase(5, false)] - [TestCase("5", false)] - [TestCase(4, true)] - [TestCase(6, true)] - [TestCase(-5, true)] - public void NormalLogic(object value, bool expectFailure) + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out _tableInfo, out _columnInfo); + + _constraint = new ReferentialIntegrityConstraint(CatalogueRepository) { - _constraint.InvertLogic = false; - ValidationFailure failure = _constraint.Validate(value, null, null); + OtherColumnInfo = _columnInfo.Single() + }; + } - //if it did not fail validation and we expected failure - if(failure == null && expectFailure) - Assert.Fail(); + [Test] + [TestCase(5, false)] + [TestCase("5", false)] + [TestCase(4, true)] + [TestCase(6, true)] + [TestCase(-5, true)] + public void NormalLogic(object value, bool expectFailure) + { + _constraint.InvertLogic = false; + var failure = _constraint.Validate(value, null, null); - //or it did fail validation and we did not expect failure - if(failure != null && !expectFailure) - Assert.Fail(); + //if it did not fail validation and we expected failure + if (failure == null && expectFailure) + Assert.Fail(); - Assert.Pass(); - } + //or it did fail validation and we did not expect failure + if (failure != null && !expectFailure) + Assert.Fail(); + Assert.Pass(); + } - [Test] - [TestCase(5, true)] - [TestCase("5", true)] - [TestCase(4, false)] - [TestCase(6, false)] - [TestCase(-5, false)] - public void InvertedLogic(object value, bool expectFailure) - { - _constraint.InvertLogic = true; - ValidationFailure failure = _constraint.Validate(value, null, null); - //if it did not fail validation and we expected failure - if (failure == null && expectFailure) - Assert.Fail(); + [Test] + [TestCase(5, true)] + [TestCase("5", true)] + [TestCase(4, false)] + [TestCase(6, false)] + [TestCase(-5, false)] + public void InvertedLogic(object value, bool expectFailure) + { + _constraint.InvertLogic = true; + var failure = _constraint.Validate(value, null, null); - //or it did fail validation and we did not expect failure - if (failure != null && !expectFailure) - Assert.Fail(); + //if it did not fail validation and we expected failure + if (failure == null && expectFailure) + Assert.Fail(); - Assert.Pass(); - } + //or it did fail validation and we did not expect failure + if (failure != null && !expectFailure) + Assert.Fail(); + + Assert.Pass(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Integration/Validation/StandardRegexTests.cs b/Rdmp.Core.Tests/Curation/Integration/Validation/StandardRegexTests.cs index f40f913dac..888d0d8a35 100644 --- a/Rdmp.Core.Tests/Curation/Integration/Validation/StandardRegexTests.cs +++ b/Rdmp.Core.Tests/Curation/Integration/Validation/StandardRegexTests.cs @@ -7,42 +7,42 @@ using System.Linq; using NUnit.Framework; using Rdmp.Core.Curation.Data; -using Rdmp.Core.Validation; using Rdmp.Core.Validation.Constraints.Secondary; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Integration.Validation +namespace Rdmp.Core.Tests.Curation.Integration.Validation; + +public class StandardRegexTests : DatabaseTests { - public class StandardRegexTests:DatabaseTests + [Test] + public void CreateNew_UseConstraint() { - [Test] - public void CreateNew_UseConstraint() - { - // Clean SetUp any existing regexes - CatalogueRepository.GetAllObjects().Where(r=>r.ConceptName == "Fish").ToList().ForEach(r => r.DeleteInDatabase()); + // Clean SetUp any existing regexes + CatalogueRepository.GetAllObjects().Where(r => r.ConceptName == "Fish").ToList() + .ForEach(r => r.DeleteInDatabase()); - var regex = new StandardRegex(CatalogueRepository); - try - { - Assert.IsNotNull(regex.ConceptName); - Assert.IsTrue(string.IsNullOrEmpty(regex.Description)); + var regex = new StandardRegex(CatalogueRepository); + try + { + Assert.IsNotNull(regex.ConceptName); + Assert.IsTrue(string.IsNullOrEmpty(regex.Description)); - regex.ConceptName = "Fish"; - regex.Regex = "^(Fish)$"; - regex.SaveToDatabase(); + regex.ConceptName = "Fish"; + regex.Regex = "^(Fish)$"; + regex.SaveToDatabase(); - StandardRegexConstraint constraint = new StandardRegexConstraint(CatalogueRepository); - - constraint.CatalogueStandardRegex = regex; - - Assert.IsNull(constraint.Validate("Fish",null,null)); - ValidationFailure failure = constraint.Validate("FishFingers", null, null); - Assert.IsNotNull(failure); - } - finally + var constraint = new StandardRegexConstraint(CatalogueRepository) { - regex.DeleteInDatabase(); - } + CatalogueStandardRegex = regex + }; + + Assert.IsNull(constraint.Validate("Fish", null, null)); + var failure = constraint.Validate("FishFingers", null, null); + Assert.IsNotNull(failure); + } + finally + { + regex.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/JsonSerializationTests/JsonSerializationTests.cs b/Rdmp.Core.Tests/Curation/JsonSerializationTests/JsonSerializationTests.cs index c21a4627f7..7793b3f28c 100644 --- a/Rdmp.Core.Tests/Curation/JsonSerializationTests/JsonSerializationTests.cs +++ b/Rdmp.Core.Tests/Curation/JsonSerializationTests/JsonSerializationTests.cs @@ -4,69 +4,69 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using MapsDirectlyToDatabaseTable; using Newtonsoft.Json; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.ImportExport; using Rdmp.Core.Curation.Data.Serialization; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.JsonSerializationTests +namespace Rdmp.Core.Tests.Curation.JsonSerializationTests; + +public class JsonSerializationTests : DatabaseTests { - public class JsonSerializationTests:DatabaseTests + [Test] + public void TestSerialization_Catalogue() { - [Test] - public void TestSerialization_Catalogue() + if (CatalogueRepository is not TableRepository) + Assert.Inconclusive("This test does not apply for non db repos"); + + var c = new Catalogue(RepositoryLocator.CatalogueRepository, "Fish"); + + var mySerializeable = new MySerializeableTestClass(new ShareManager(RepositoryLocator)) { - if (CatalogueRepository is not TableRepository) - Assert.Inconclusive("This test does not apply for non db repos"); - - Catalogue c = new Catalogue(RepositoryLocator.CatalogueRepository,"Fish"); - - MySerializeableTestClass mySerializeable = new MySerializeableTestClass(new ShareManager(RepositoryLocator)); - mySerializeable.SelectedCatalogue = c; - mySerializeable.Title = "War and Pieces"; - - var dbConverter = new DatabaseEntityJsonConverter(RepositoryLocator); - var lazyConverter = new PickAnyConstructorJsonConverter(RepositoryLocator); - - - var asString = JsonConvert.SerializeObject(mySerializeable, dbConverter,lazyConverter); - var mySerializeableAfter = (MySerializeableTestClass)JsonConvert.DeserializeObject(asString, typeof(MySerializeableTestClass), new JsonConverter[] { dbConverter, lazyConverter }); - - Assert.AreNotEqual(mySerializeable, mySerializeableAfter); - Assert.AreEqual(mySerializeable.SelectedCatalogue, mySerializeableAfter.SelectedCatalogue); - Assert.AreEqual(mySerializeable.SelectedCatalogue.Name, mySerializeableAfter.SelectedCatalogue.Name); - Assert.AreEqual("War and Pieces", mySerializeableAfter.Title); - mySerializeableAfter.SelectedCatalogue.Name = "Cannon balls"; - mySerializeableAfter.SelectedCatalogue.SaveToDatabase(); - - Assert.AreNotEqual(mySerializeable.SelectedCatalogue.Name, mySerializeableAfter.SelectedCatalogue.Name); - } - - //todo null Catalogue test case - } - - - public class MySerializeableTestClass - { - public string Title { get; set; } + SelectedCatalogue = c, + Title = "War and Pieces" + }; - public Catalogue SelectedCatalogue { get; set; } + var dbConverter = new DatabaseEntityJsonConverter(RepositoryLocator); + var lazyConverter = new PickAnyConstructorJsonConverter(RepositoryLocator); - private readonly ShareManager _sm; - public MySerializeableTestClass(IRDMPPlatformRepositoryServiceLocator locator) - { - _sm = new ShareManager(locator); - } - - public MySerializeableTestClass(ShareManager sm) - { - _sm = sm; - } + var asString = JsonConvert.SerializeObject(mySerializeable, dbConverter, lazyConverter); + var mySerializeableAfter = (MySerializeableTestClass)JsonConvert.DeserializeObject(asString, + typeof(MySerializeableTestClass), new JsonConverter[] { dbConverter, lazyConverter }); + + Assert.AreNotEqual(mySerializeable, mySerializeableAfter); + Assert.AreEqual(mySerializeable.SelectedCatalogue, mySerializeableAfter.SelectedCatalogue); + Assert.AreEqual(mySerializeable.SelectedCatalogue.Name, mySerializeableAfter.SelectedCatalogue.Name); + Assert.AreEqual("War and Pieces", mySerializeableAfter.Title); + mySerializeableAfter.SelectedCatalogue.Name = "Cannon balls"; + mySerializeableAfter.SelectedCatalogue.SaveToDatabase(); + + Assert.AreNotEqual(mySerializeable.SelectedCatalogue.Name, mySerializeableAfter.SelectedCatalogue.Name); } + //todo null Catalogue test case } + +public class MySerializeableTestClass +{ + public string Title { get; set; } + + public Catalogue SelectedCatalogue { get; set; } + + private readonly ShareManager _sm; + + public MySerializeableTestClass(IRDMPPlatformRepositoryServiceLocator locator) + { + _sm = new ShareManager(locator); + } + + public MySerializeableTestClass(ShareManager sm) + { + _sm = sm; + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/MemoryRepositoryTests/MemoryRepositoryTests.cs b/Rdmp.Core.Tests/Curation/MemoryRepositoryTests/MemoryRepositoryTests.cs index aa72880761..038dd4d822 100644 --- a/Rdmp.Core.Tests/Curation/MemoryRepositoryTests/MemoryRepositoryTests.cs +++ b/Rdmp.Core.Tests/Curation/MemoryRepositoryTests/MemoryRepositoryTests.cs @@ -4,7 +4,6 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System.Reflection; using FAnsi.Implementation; using FAnsi.Implementations.MicrosoftSQL; using FAnsi.Implementations.MySql; @@ -15,53 +14,52 @@ using Rdmp.Core.QueryBuilding; using Rdmp.Core.Repositories; -namespace Rdmp.Core.Tests.Curation.MemoryRepositoryTests +namespace Rdmp.Core.Tests.Curation.MemoryRepositoryTests; + +[Category("Unit")] +internal class MemoryRepositoryTests { - [Category("Unit")] - class MemoryRepositoryTests - { - readonly MemoryCatalogueRepository _repo = new MemoryCatalogueRepository(); + private readonly MemoryCatalogueRepository _repo = new(); - [OneTimeSetUp] - public virtual void OneTimeSetUp() - { - ImplementationManager.Load(); - ImplementationManager.Load(); - ImplementationManager.Load(); - ImplementationManager.Load(); - } + [OneTimeSetUp] + public virtual void OneTimeSetUp() + { + ImplementationManager.Load(); + ImplementationManager.Load(); + ImplementationManager.Load(); + ImplementationManager.Load(); + } - [Test] - public void TestMemoryRepository_CatalogueConstructor() - { - Catalogue memCatalogue = new Catalogue(_repo, "My New Catalogue"); + [Test] + public void TestMemoryRepository_CatalogueConstructor() + { + var memCatalogue = new Catalogue(_repo, "My New Catalogue"); - Assert.AreEqual(memCatalogue, _repo.GetObjectByID(memCatalogue.ID)); - } + Assert.AreEqual(memCatalogue, _repo.GetObjectByID(memCatalogue.ID)); + } - [Test] - public void TestMemoryRepository_QueryBuilder() - { - Catalogue memCatalogue = new Catalogue(_repo, "My New Catalogue"); + [Test] + public void TestMemoryRepository_QueryBuilder() + { + var memCatalogue = new Catalogue(_repo, "My New Catalogue"); - CatalogueItem myCol = new CatalogueItem(_repo,memCatalogue,"MyCol1"); + var myCol = new CatalogueItem(_repo, memCatalogue, "MyCol1"); - var ti = new TableInfo(_repo, "My table"); - var col = new ColumnInfo(_repo, "Mycol", "varchar(10)", ti); + var ti = new TableInfo(_repo, "My table"); + var col = new ColumnInfo(_repo, "Mycol", "varchar(10)", ti); - ExtractionInformation ei = new ExtractionInformation(_repo, myCol, col, col.Name); + var ei = new ExtractionInformation(_repo, myCol, col, col.Name); - Assert.AreEqual(memCatalogue, _repo.GetObjectByID(memCatalogue.ID)); + Assert.AreEqual(memCatalogue, _repo.GetObjectByID(memCatalogue.ID)); - var qb = new QueryBuilder(null,null); - qb.AddColumnRange(memCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); + var qb = new QueryBuilder(null, null); + qb.AddColumnRange(memCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); - Assert.AreEqual(@" + Assert.AreEqual(@" SELECT Mycol FROM My table", qb.SQL); - } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/MemoryRepositoryTests/MemoryRepositoryVsDatabaseRepository.cs b/Rdmp.Core.Tests/Curation/MemoryRepositoryTests/MemoryRepositoryVsDatabaseRepository.cs index b03e6a6e11..623cd20528 100644 --- a/Rdmp.Core.Tests/Curation/MemoryRepositoryTests/MemoryRepositoryVsDatabaseRepository.cs +++ b/Rdmp.Core.Tests/Curation/MemoryRepositoryTests/MemoryRepositoryVsDatabaseRepository.cs @@ -14,108 +14,106 @@ using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.Curation.Data.Defaults; using Rdmp.Core.Repositories; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.MemoryRepositoryTests +namespace Rdmp.Core.Tests.Curation.MemoryRepositoryTests; + +internal class MemoryRepositoryVsDatabaseRepository : DatabaseTests { - class MemoryRepositoryVsDatabaseRepository:DatabaseTests + [Test] + public void TestMemoryVsDatabaseRepository_CatalogueConstructor() { - [Test] - public void TestMemoryVsDatabaseRepository_CatalogueConstructor() - { - var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository); + var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository); - Catalogue memCatalogue = new Catalogue(memoryRepository, "My New Catalogue"); - Catalogue dbCatalogue = new Catalogue(CatalogueRepository,"My New Catalogue"); - - UnitTests.AssertAreEqual(memCatalogue,dbCatalogue); - } + var memCatalogue = new Catalogue(memoryRepository, "My New Catalogue"); + var dbCatalogue = new Catalogue(CatalogueRepository, "My New Catalogue"); - [Test] - public void TestMemoryVsDatabaseRepository_ProcessTaskConstructor() - { - var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository); + UnitTests.AssertAreEqual(memCatalogue, dbCatalogue); + } - var memLmd = new LoadMetadata(memoryRepository, "My New Load"); - var dbLmd = new LoadMetadata(CatalogueRepository, "My New Load"); + [Test] + public void TestMemoryVsDatabaseRepository_ProcessTaskConstructor() + { + var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository); - UnitTests.AssertAreEqual(memLmd, dbLmd); + var memLmd = new LoadMetadata(memoryRepository, "My New Load"); + var dbLmd = new LoadMetadata(CatalogueRepository, "My New Load"); - var memPt = new ProcessTask(memoryRepository, memLmd, LoadStage.AdjustRaw) { Name = "MyPt" }; - var dbPt = new ProcessTask(CatalogueRepository, dbLmd, LoadStage.AdjustRaw) { Name = "MyPt" }; - - UnitTests.AssertAreEqual(memPt, dbPt); - } + UnitTests.AssertAreEqual(memLmd, dbLmd); + var memPt = new ProcessTask(memoryRepository, memLmd, LoadStage.AdjustRaw) { Name = "MyPt" }; + var dbPt = new ProcessTask(CatalogueRepository, dbLmd, LoadStage.AdjustRaw) { Name = "MyPt" }; + + UnitTests.AssertAreEqual(memPt, dbPt); + } - [Test] - public void TestMemoryRepository_AggregateConfigurationConstructor() - { - var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository); - Catalogue memCatalogue = new Catalogue(memoryRepository, "My New Catalogue"); - Catalogue dbCatalogue = new Catalogue(CatalogueRepository, "My New Catalogue"); + [Test] + public void TestMemoryRepository_AggregateConfigurationConstructor() + { + var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository); - var memAggregate = new AggregateConfiguration(memoryRepository, memCatalogue, "My New Aggregate"); - var dbAggregate = new AggregateConfiguration(CatalogueRepository, dbCatalogue, "My New Aggregate"); + var memCatalogue = new Catalogue(memoryRepository, "My New Catalogue"); + var dbCatalogue = new Catalogue(CatalogueRepository, "My New Catalogue"); - UnitTests.AssertAreEqual(memAggregate, dbAggregate); - } - - [Test] - public void TestMemoryRepository_LiveLogging() - { - var memoryRepository = new MemoryCatalogueRepository(); + var memAggregate = new AggregateConfiguration(memoryRepository, memCatalogue, "My New Aggregate"); + var dbAggregate = new AggregateConfiguration(CatalogueRepository, dbCatalogue, "My New Aggregate"); - var loggingServer = new ExternalDatabaseServer(memoryRepository, "My Logging Server",null); - memoryRepository.SetDefault(PermissableDefaults.LiveLoggingServer_ID, loggingServer); + UnitTests.AssertAreEqual(memAggregate, dbAggregate); + } - Catalogue memCatalogue = new Catalogue(memoryRepository, "My New Catalogue"); - Assert.AreEqual(memCatalogue.LiveLoggingServer_ID,loggingServer.ID); - } + [Test] + public void TestMemoryRepository_LiveLogging() + { + var memoryRepository = new MemoryCatalogueRepository(); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestImportingATable(DatabaseType dbType) - { - DataTable dt = new DataTable(); - dt.Columns.Add("Do"); - dt.Columns.Add("Ray"); - dt.Columns.Add("Me"); - dt.Columns.Add("Fa"); - dt.Columns.Add("So"); + var loggingServer = new ExternalDatabaseServer(memoryRepository, "My Logging Server", null); + memoryRepository.SetDefault(PermissableDefaults.LiveLoggingServer_ID, loggingServer); - var db = GetCleanedServer(dbType); - var tbl = db.CreateTable("OmgTables",dt); + var memCatalogue = new Catalogue(memoryRepository, "My New Catalogue"); + Assert.AreEqual(memCatalogue.LiveLoggingServer_ID, loggingServer.ID); + } - var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository); - - var importer1 = new TableInfoImporter(memoryRepository, tbl, DataAccessContext.Any); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestImportingATable(DatabaseType dbType) + { + var dt = new DataTable(); + dt.Columns.Add("Do"); + dt.Columns.Add("Ray"); + dt.Columns.Add("Me"); + dt.Columns.Add("Fa"); + dt.Columns.Add("So"); - importer1.DoImport(out var memTableInfo,out var memColumnInfos); - var forwardEngineer1 = new ForwardEngineerCatalogue(memTableInfo, memColumnInfos); - forwardEngineer1.ExecuteForwardEngineering(out var memCatalogue,out var memCatalogueItems,out var memExtractionInformations); + var db = GetCleanedServer(dbType); + var tbl = db.CreateTable("OmgTables", dt); + var memoryRepository = new MemoryCatalogueRepository(CatalogueRepository); - var importerdb = new TableInfoImporter(CatalogueRepository, tbl, DataAccessContext.Any); - importerdb.DoImport(out var dbTableInfo, out var dbColumnInfos); - var forwardEngineer2 = new ForwardEngineerCatalogue(dbTableInfo, dbColumnInfos); - forwardEngineer2.ExecuteForwardEngineering(out var dbCatalogue, out var dbCatalogueItems, out var dbExtractionInformations); + var importer1 = new TableInfoImporter(memoryRepository, tbl, DataAccessContext.Any); + importer1.DoImport(out var memTableInfo, out var memColumnInfos); + var forwardEngineer1 = new ForwardEngineerCatalogue(memTableInfo, memColumnInfos); + forwardEngineer1.ExecuteForwardEngineering(out var memCatalogue, out var memCatalogueItems, + out var memExtractionInformations); - UnitTests.AssertAreEqual(memCatalogue,dbCatalogue); - UnitTests.AssertAreEqual(memTableInfo,dbTableInfo); - UnitTests.AssertAreEqual(memCatalogue.CatalogueItems,dbCatalogue.CatalogueItems); - UnitTests.AssertAreEqual(memCatalogue.GetAllExtractionInformation(ExtractionCategory.Any), dbCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); + var importerdb = new TableInfoImporter(CatalogueRepository, tbl, DataAccessContext.Any); + importerdb.DoImport(out var dbTableInfo, out var dbColumnInfos); + var forwardEngineer2 = new ForwardEngineerCatalogue(dbTableInfo, dbColumnInfos); + forwardEngineer2.ExecuteForwardEngineering(out var dbCatalogue, out var dbCatalogueItems, + out var dbExtractionInformations); - UnitTests.AssertAreEqual(memCatalogue.CatalogueItems.Select(ci => ci.ColumnInfo), dbCatalogue.CatalogueItems.Select(ci => ci.ColumnInfo)); - } + UnitTests.AssertAreEqual(memCatalogue, dbCatalogue); + UnitTests.AssertAreEqual(memTableInfo, dbTableInfo); - + UnitTests.AssertAreEqual(memCatalogue.CatalogueItems, dbCatalogue.CatalogueItems); + UnitTests.AssertAreEqual(memCatalogue.GetAllExtractionInformation(ExtractionCategory.Any), + dbCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); - + UnitTests.AssertAreEqual(memCatalogue.CatalogueItems.Select(ci => ci.ColumnInfo), + dbCatalogue.CatalogueItems.Select(ci => ci.ColumnInfo)); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/RefactoringTests/SelectSQLRefactorerTests.cs b/Rdmp.Core.Tests/Curation/RefactoringTests/SelectSQLRefactorerTests.cs index 2d7975cbc5..07ede43d6a 100644 --- a/Rdmp.Core.Tests/Curation/RefactoringTests/SelectSQLRefactorerTests.cs +++ b/Rdmp.Core.Tests/Curation/RefactoringTests/SelectSQLRefactorerTests.cs @@ -4,156 +4,157 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.Curation.Data; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.Sharing.Refactoring; using Rdmp.Core.Sharing.Refactoring.Exceptions; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.RefactoringTests +namespace Rdmp.Core.Tests.Curation.RefactoringTests; + +public class SelectSQLRefactorerTests : UnitTests { - public class SelectSQLRefactorerTests:UnitTests + [Test] + public void RefactorTableName_TestValidReplacement_ColumnInfo() + { + var columnInfo = WhenIHaveA(); + columnInfo.Name = "[database]..[table].[column]"; + + var tableInfo = columnInfo.TableInfo; + tableInfo.Database = "database"; + tableInfo.Name = "[database]..[table]"; + + SelectSQLRefactorer.RefactorTableName(columnInfo, tableInfo, "[database]..[table2]"); + + Assert.AreEqual("[database]..[table2].[column]", columnInfo.Name); + } + + [Test] + public void RefactorTableName_TestValidReplacement_ExtractionInformation() + { + var ei = WhenIHaveA(); + ei.SelectSQL = "UPPER([database]..[table].[column])"; + ei.Alias = "MyCatalogueItem"; + ei.SaveToDatabase(); + + var ci = ei.ColumnInfo; + ci.Name = "[database]..[table].[column]"; + ci.SaveToDatabase(); + + var tableInfo = ei.ColumnInfo.TableInfo; + tableInfo.Database = "database"; + tableInfo.Name = "[database]..[table]"; + tableInfo.SaveToDatabase(); + + SelectSQLRefactorer.RefactorTableName(ei, tableInfo, "[database]..[table2]"); + + Assert.AreEqual("UPPER([database]..[table2].[column])", ei.SelectSQL); + } + + [Test] + [TestCase("UPPER([database]..[table].[column])", true)] + [TestCase( + "dbo.MyScalarFunction([database]..[table].[column]) in Select(distinct [database]..[table].[column] from bob)", + true)] + [TestCase("dbo.MyNewRand()", false)] + [TestCase( + "[dbo].MyScalarFunction([database]..[table].[column]) in Select(distinct [database]..[table].[column] from bob)", + true)] + [TestCase("[dbo].MyNewRand()", false)] + public void RefactorTableName_IsRefactorable_ExtractionInformation(string transformSql, + bool expectedToBeRefactorable) + { + var ei = WhenIHaveA(); + ei.SelectSQL = transformSql; + ei.Alias = "MyCatalogueItem"; + ei.SaveToDatabase(); + + var ci = ei.ColumnInfo; + ci.Name = "[database]..[table].[column]"; + ci.SaveToDatabase(); + + var tableInfo = ei.ColumnInfo.TableInfo; + tableInfo.Database = "database"; + tableInfo.Name = "[database]..[table]"; + tableInfo.SaveToDatabase(); + + var refactorer = new SelectSQLRefactorer(); + + Assert.AreEqual(expectedToBeRefactorable, SelectSQLRefactorer.IsRefactorable(ei)); + + if (expectedToBeRefactorable) + SelectSQLRefactorer.RefactorTableName(ei, tableInfo, "[database]..[table2]"); + else + Assert.Throws(() => + SelectSQLRefactorer.RefactorTableName(ei, tableInfo, "[database]..[table2]")); + } + + [TestCase("[Fish]..[MyTbl]", "[Fish]..[MyTbl2]")] + public void RefactorTableName_IsRefactorable_TableInfoWithNoColumnInfos(string oldName, string newName) + { + var ti = WhenIHaveA(); + ti.Name = oldName; + ti.Database = "Fish"; + ti.SaveToDatabase(); + + foreach (IDeleteable d in ti.ColumnInfos) + d.DeleteInDatabase(); + + Assert.IsTrue(SelectSQLRefactorer.IsRefactorable(ti)); + + Assert.AreEqual(1, SelectSQLRefactorer.RefactorTableName(ti, newName)); + Assert.AreEqual(newName, ti.Name); + } + + [TestCase("[Donkey]..[MyTbl]", "[Fish]..[MyTbl2]", "'[Donkey]..[MyTbl]' has incorrect database property 'Fish'")] + public void RefactorTableName_IsNotRefactorable_TableInfoWithNoColumnInfos(string oldName, string newName, + string expectedReason) { - [Test] - public void RefactorTableName_TestValidReplacement_ColumnInfo() - { - var columnInfo = WhenIHaveA(); - columnInfo.Name = "[database]..[table].[column]"; - - var tableInfo = columnInfo.TableInfo; - tableInfo.Database = "database"; - tableInfo.Name = "[database]..[table]"; - - var refactorer = new SelectSQLRefactorer(); - refactorer.RefactorTableName(columnInfo,tableInfo,"[database]..[table2]"); - - Assert.AreEqual("[database]..[table2].[column]", columnInfo.Name); - } - - [Test] - public void RefactorTableName_TestValidReplacement_ExtractionInformation() - { - var ei = WhenIHaveA(); - ei.SelectSQL = "UPPER([database]..[table].[column])"; - ei.Alias = "MyCatalogueItem"; - ei.SaveToDatabase(); - - var ci = ei.ColumnInfo; - ci.Name = "[database]..[table].[column]"; - ci.SaveToDatabase(); - - var tableInfo = ei.ColumnInfo.TableInfo; - tableInfo.Database = "database"; - tableInfo.Name = "[database]..[table]"; - tableInfo.SaveToDatabase(); - - var refactorer = new SelectSQLRefactorer(); - refactorer.RefactorTableName(ei, tableInfo, "[database]..[table2]"); - - Assert.AreEqual("UPPER([database]..[table2].[column])", ei.SelectSQL); - } - - [Test] - [TestCase("UPPER([database]..[table].[column])",true)] - [TestCase("dbo.MyScalarFunction([database]..[table].[column]) in Select(distinct [database]..[table].[column] from bob)", true)] - [TestCase("dbo.MyNewRand()", false)] - [TestCase("[dbo].MyScalarFunction([database]..[table].[column]) in Select(distinct [database]..[table].[column] from bob)", true)] - [TestCase("[dbo].MyNewRand()", false)] - public void RefactorTableName_IsRefactorable_ExtractionInformation(string transformSql,bool expectedToBeRefactorable) - { - var ei = WhenIHaveA(); - ei.SelectSQL = transformSql; - ei.Alias = "MyCatalogueItem"; - ei.SaveToDatabase(); - - var ci = ei.ColumnInfo; - ci.Name = "[database]..[table].[column]"; - ci.SaveToDatabase(); - - var tableInfo = ei.ColumnInfo.TableInfo; - tableInfo.Database = "database"; - tableInfo.Name = "[database]..[table]"; - tableInfo.SaveToDatabase(); - - var refactorer = new SelectSQLRefactorer(); - - Assert.AreEqual(expectedToBeRefactorable,refactorer.IsRefactorable(ei)); - - if (expectedToBeRefactorable) - refactorer.RefactorTableName(ei, tableInfo, "[database]..[table2]"); - else - Assert.Throws(() => refactorer.RefactorTableName(ei, tableInfo, "[database]..[table2]")); - } - - [TestCase("[Fish]..[MyTbl]","[Fish]..[MyTbl2]")] - public void RefactorTableName_IsRefactorable_TableInfoWithNoColumnInfos(string oldName, string newName) - { - var ti = WhenIHaveA(); - ti.Name = oldName; - ti.Database = "Fish"; - ti.SaveToDatabase(); - - foreach(IDeleteable d in ti.ColumnInfos) - d.DeleteInDatabase(); - - var refactorer = new SelectSQLRefactorer(); - Assert.IsTrue(refactorer.IsRefactorable(ti)); - - Assert.AreEqual(1,refactorer.RefactorTableName(ti,newName)); - Assert.AreEqual(newName,ti.Name); - } - - [TestCase("[Donkey]..[MyTbl]","[Fish]..[MyTbl2]","'[Donkey]..[MyTbl]' has incorrect database propery 'Fish'")] - public void RefactorTableName_IsNotRefactorable_TableInfoWithNoColumnInfos(string oldName, string newName,string expectedReason) - { - var ti = WhenIHaveA(); - ti.Name = oldName; - ti.Database = "Fish"; - ti.SaveToDatabase(); - - foreach(IDeleteable d in ti.ColumnInfos) - d.DeleteInDatabase(); - - var refactorer = new SelectSQLRefactorer(); - Assert.IsFalse(refactorer.IsRefactorable(ti)); - - var ex = Assert.Throws(()=>refactorer.RefactorTableName(ti,newName)); - StringAssert.Contains(expectedReason,ex.Message); - } - - - /// - /// Tests when the Column name does not exactly match the search/replace table name pattern during refactoring. - /// - /// - /// Refactor columns belonging to [Fish].[dbo].[TableA] to now belong in [Fish].[dbo].[TableB] - /// - /// BUT column name is [Fish]..[TableA].[MyCol]. In this case it should be refactored to [Fish].[dbo].[TableB].[MyCol] - /// - /// A column that belongs to which should be refactored even if its name isn't an exact match to the table name - /// The table being renamed, will be renamed MyTbl to MyNewTbl - [TestCase("[Fish]..[MyTbl].[A]", "[Fish]..[MyTbl]")] - [TestCase("[Fish].[dbo].[MyTbl].[A]","[Fish]..[MyTbl]")] - [TestCase("[Fish]..[MyTbl].[A]", "[Fish].[dbo].[MyTbl]")] - [TestCase("[Fish].[dbo].[MyTbl].[A]","[Fish].[dbo].[MyTbl]")] - [TestCase("[Fish].dbo.[MyTbl].[A]", "[Fish]..[MyTbl]")] - [TestCase("[Fish]..[MyTbl].[A]", "[Fish].dbo.[MyTbl]")] - [TestCase("[Fish].dbo.[MyTbl].[A]", "[Fish].dbo.[MyTbl]")] - public void RefactorTableName_IsRefactorable_ColumnInfo(string columnName,string findTableName) - { - var col = WhenIHaveA(); - col.Name = columnName; - col.SaveToDatabase(); - - var refactorer = new SelectSQLRefactorer(); - - var oldName = findTableName; - var newName = oldName.Replace("MyTbl","MyNewTbl"); - - Assert.AreEqual(1,refactorer.RefactorTableName(col,oldName,newName)); - - Assert.AreEqual( newName + ".[A]",col.Name); - } + var ti = WhenIHaveA(); + ti.Name = oldName; + ti.Database = "Fish"; + ti.SaveToDatabase(); + + foreach (IDeleteable d in ti.ColumnInfos) + d.DeleteInDatabase(); + + var refactorer = new SelectSQLRefactorer(); + Assert.IsFalse(SelectSQLRefactorer.IsRefactorable(ti)); + + var ex = Assert.Throws(() => SelectSQLRefactorer.RefactorTableName(ti, newName)); + StringAssert.Contains(expectedReason, ex.Message); + } + + + /// + /// Tests when the Column name does not exactly match the search/replace table name pattern during refactoring. + /// + /// + /// Refactor columns belonging to [Fish].[dbo].[TableA] to now belong in [Fish].[dbo].[TableB] + /// + /// BUT column name is [Fish]..[TableA].[MyCol]. In this case it should be refactored to [Fish].[dbo].[TableB].[MyCol] + /// + /// A column that belongs to which should be refactored even if its name isn't an exact match to the table name + /// The table being renamed, will be renamed MyTbl to MyNewTbl + [TestCase("[Fish]..[MyTbl].[A]", "[Fish]..[MyTbl]")] + [TestCase("[Fish].[dbo].[MyTbl].[A]", "[Fish]..[MyTbl]")] + [TestCase("[Fish]..[MyTbl].[A]", "[Fish].[dbo].[MyTbl]")] + [TestCase("[Fish].[dbo].[MyTbl].[A]", "[Fish].[dbo].[MyTbl]")] + [TestCase("[Fish].dbo.[MyTbl].[A]", "[Fish]..[MyTbl]")] + [TestCase("[Fish]..[MyTbl].[A]", "[Fish].dbo.[MyTbl]")] + [TestCase("[Fish].dbo.[MyTbl].[A]", "[Fish].dbo.[MyTbl]")] + public void RefactorTableName_IsRefactorable_ColumnInfo(string columnName, string findTableName) + { + var col = WhenIHaveA(); + col.Name = columnName; + col.SaveToDatabase(); + + var oldName = findTableName; + var newName = oldName.Replace("MyTbl", "MyNewTbl"); + + Assert.AreEqual(1, SelectSQLRefactorer.RefactorTableName(col, oldName, newName)); + + Assert.AreEqual($"{newName}.[A]", col.Name); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/SimpleExampleTests.cs b/Rdmp.Core.Tests/Curation/SimpleExampleTests.cs index 4fd903886d..1149ba3fbe 100644 --- a/Rdmp.Core.Tests/Curation/SimpleExampleTests.cs +++ b/Rdmp.Core.Tests/Curation/SimpleExampleTests.cs @@ -9,73 +9,72 @@ using FAnsi; using NUnit.Framework; using Rdmp.Core.Curation.Data; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common; -namespace Rdmp.Core.Tests.Curation +namespace Rdmp.Core.Tests.Curation; + +public class SimpleExampleTests : DatabaseTests { - public class SimpleExampleTests : DatabaseTests + [Test] + public void Test1() + { + var cata = new Catalogue(CatalogueRepository, "My Test Cata"); + Assert.IsTrue(cata.Exists()); + } + + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void Test2(DatabaseType type) + { + var database = GetCleanedServer(type); + + Assert.IsTrue(database.Exists()); + Assert.IsEmpty(database.DiscoverTables(true)); + Assert.IsNotNull(database.GetRuntimeName()); + } + + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void TestReadDataLowPrivileges(DatabaseType type) { - [Test] - public void Test1() - { - var cata = new Catalogue(CatalogueRepository, "My Test Cata"); - Assert.IsTrue(cata.Exists()); - } - - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void Test2(DatabaseType type) - { - var database = GetCleanedServer(type); - - Assert.IsTrue(database.Exists()); - Assert.IsEmpty(database.DiscoverTables(true)); - Assert.IsNotNull(database.GetRuntimeName()); - } - - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void TestReadDataLowPrivileges(DatabaseType type) - { - var database = GetCleanedServer(type); - - //create a table on the server - var dt = new DataTable(); - dt.Columns.Add("MyCol"); - dt.Rows.Add("Hi"); - dt.PrimaryKey = new[] {dt.Columns[0]}; - - var tbl = database.CreateTable("MyTable", dt); - - //at this point we are reading it with the credentials setup by GetCleanedServer - Assert.AreEqual(1, tbl.GetRowCount()); - Assert.AreEqual(1, tbl.DiscoverColumns().Count()); - Assert.IsTrue(tbl.DiscoverColumn("MyCol").IsPrimaryKey); - - //create a reference to the table in RMDP - Import(tbl, out var tableInfo, out var columnInfos); - - //setup credentials for the table in RDMP (this will be Inconclusive if you have not enabled it in TestDatabases.txt - SetupLowPrivilegeUserRightsFor(tableInfo,TestLowPrivilegePermissions.Reader); - - //request access to the database using DataLoad context - var newDatabase = DataAccessPortal.GetInstance().ExpectDatabase(tableInfo, DataAccessContext.DataLoad); - - //get new reference to the table - var newTbl = newDatabase.ExpectTable(tableInfo.GetRuntimeName()); - - //the credentials should be different - Assert.AreNotEqual(tbl.Database.Server.ExplicitUsernameIfAny, newTbl.Database.Server.ExplicitUsernameIfAny); - - //try re-reading the data - Assert.AreEqual(1, newTbl.GetRowCount()); - Assert.AreEqual(1, newTbl.DiscoverColumns().Count()); - Assert.IsTrue(newTbl.DiscoverColumn("MyCol").IsPrimaryKey); - - //low priority user shouldn't be able to drop tables - Assert.That(newTbl.Drop,Throws.Exception); - - //normal testing user should be able to - tbl.Drop(); - } + var database = GetCleanedServer(type); + + //create a table on the server + var dt = new DataTable(); + dt.Columns.Add("MyCol"); + dt.Rows.Add("Hi"); + dt.PrimaryKey = new[] { dt.Columns[0] }; + + var tbl = database.CreateTable("MyTable", dt); + + //at this point we are reading it with the credentials setup by GetCleanedServer + Assert.AreEqual(1, tbl.GetRowCount()); + Assert.AreEqual(1, tbl.DiscoverColumns().Length); + Assert.IsTrue(tbl.DiscoverColumn("MyCol").IsPrimaryKey); + + //create a reference to the table in RMDP + Import(tbl, out var tableInfo, out var columnInfos); + + //setup credentials for the table in RDMP (this will be Inconclusive if you have not enabled it in TestDatabases.txt + SetupLowPrivilegeUserRightsFor(tableInfo, TestLowPrivilegePermissions.Reader); + + //request access to the database using DataLoad context + var newDatabase = DataAccessPortal.ExpectDatabase(tableInfo, DataAccessContext.DataLoad); + + //get new reference to the table + var newTbl = newDatabase.ExpectTable(tableInfo.GetRuntimeName()); + + //the credentials should be different + Assert.AreNotEqual(tbl.Database.Server.ExplicitUsernameIfAny, newTbl.Database.Server.ExplicitUsernameIfAny); + + //try re-reading the data + Assert.AreEqual(1, newTbl.GetRowCount()); + Assert.AreEqual(1, newTbl.DiscoverColumns().Length); + Assert.IsTrue(newTbl.DiscoverColumn("MyCol").IsPrimaryKey); + + //low priority user shouldn't be able to drop tables + Assert.That(newTbl.Drop, Throws.Exception); + + //normal testing user should be able to + tbl.Drop(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/AggregateConfigurationTests.cs b/Rdmp.Core.Tests/Curation/Unit/AggregateConfigurationTests.cs index 001973af30..aae025d9a3 100644 --- a/Rdmp.Core.Tests/Curation/Unit/AggregateConfigurationTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/AggregateConfigurationTests.cs @@ -6,59 +6,58 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data.Aggregation; -using ReusableLibraryCode.Settings; using System.Data; +using Rdmp.Core.ReusableLibraryCode.Settings; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +internal class AggregateConfigurationTests : UnitTests { - class AggregateConfigurationTests : UnitTests + [Test] + public void TestStripZeroSeries_EmptyTable() { - [Test] - public void TestStripZeroSeries_EmptyTable() - { - var dt = new DataTable(); - dt.Columns.Add("col1"); - dt.Columns.Add("col2"); + var dt = new DataTable(); + dt.Columns.Add("col1"); + dt.Columns.Add("col2"); - UserSettings.IncludeZeroSeriesInGraphs = false; + UserSettings.IncludeZeroSeriesInGraphs = false; - // empty tables should not get nuked - AggregateConfiguration.AdjustGraphDataTable(dt); - Assert.AreEqual(2, dt.Columns.Count); + // empty tables should not get nuked + AggregateConfiguration.AdjustGraphDataTable(dt); + Assert.AreEqual(2, dt.Columns.Count); - dt.Dispose(); - } - - [TestCase(true)] - [TestCase(false)] - public void TestStripZeroSeries_Nulls(bool includeZeroSeries) - { - var dt = new DataTable(); - dt.Columns.Add("date"); - dt.Columns.Add("col1"); - dt.Columns.Add("col2"); + dt.Dispose(); + } - dt.Rows.Add("2001",0, 12); - dt.Rows.Add("2002",null, 333); + [TestCase(true)] + [TestCase(false)] + public void TestStripZeroSeries_Nulls(bool includeZeroSeries) + { + var dt = new DataTable(); + dt.Columns.Add("date"); + dt.Columns.Add("col1"); + dt.Columns.Add("col2"); - UserSettings.IncludeZeroSeriesInGraphs = includeZeroSeries; + dt.Rows.Add("2001", 0, 12); + dt.Rows.Add("2002", null, 333); - AggregateConfiguration.AdjustGraphDataTable(dt); + UserSettings.IncludeZeroSeriesInGraphs = includeZeroSeries; - if(includeZeroSeries) - { - Assert.AreEqual(3,dt.Columns.Count); - } - else - { - // col1 should have been gotten rid of - Assert.AreEqual(2, dt.Columns.Count); - dt.Columns.Contains("date"); - dt.Columns.Contains("col2"); - } + AggregateConfiguration.AdjustGraphDataTable(dt); - dt.Dispose(); + if (includeZeroSeries) + { + Assert.AreEqual(3, dt.Columns.Count); + } + else + { + // col1 should have been gotten rid of + Assert.AreEqual(2, dt.Columns.Count); + dt.Columns.Contains("date"); + dt.Columns.Contains("col2"); } + + dt.Dispose(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/CacheFetchRequestProviderTests.cs b/Rdmp.Core.Tests/Curation/Unit/CacheFetchRequestProviderTests.cs index 214bcf213f..7fc7c8bdb3 100644 --- a/Rdmp.Core.Tests/Curation/Unit/CacheFetchRequestProviderTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/CacheFetchRequestProviderTests.cs @@ -6,123 +6,124 @@ using System; using System.Collections.Generic; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Caching.Requests; using Rdmp.Core.Caching.Requests.FetchRequestProvider; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Cache; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +public class CacheFetchRequestProviderTests { - [Category("Unit")] - public class CacheFetchRequestProviderTests + /// + /// Test that the fetch request created from the failure request by the provider is valid + /// + [Test] + public void TestFailedFetchRequestProvider_CreationOfFetchRequest() { - /// - /// Test that the fetch request created from the failure request by the provider is valid - /// - [Test] - public void TestFailedFetchRequestProvider_CreationOfFetchRequest() + var failure = Substitute.For(); + failure.FetchRequestStart = new DateTime(2009, 8, 5, 8, 0, 0); + failure.FetchRequestEnd = new DateTime(2009, 8, 5, 16, 0, 0); + failure.LastAttempt = new DateTime(2016, 1, 1, 12, 0, 0); + failure.ResolvedOn = null; + + var failures = new List { - var failure = Mock.Of(); - failure.FetchRequestStart = new DateTime(2009, 8, 5, 8, 0, 0); - failure.FetchRequestEnd = new DateTime(2009, 8, 5, 16, 0, 0); - failure.LastAttempt = new DateTime(2016, 1, 1, 12, 0, 0); - failure.ResolvedOn = null; - - var failures = new List - { - failure - }; - - var cacheProgress = new Mock(); - cacheProgress.Setup(c => c.FetchPage(It.IsAny(), It.IsAny())).Returns(failures); - - var provider = new FailedCacheFetchRequestProvider(cacheProgress.Object, 2); - var fetchRequest = provider.GetNext(new ThrowImmediatelyDataLoadEventListener()); - Assert.IsNotNull(fetchRequest); - Assert.AreEqual(fetchRequest.ChunkPeriod, new TimeSpan(8, 0, 0)); - Assert.AreEqual(fetchRequest.Start, failure.FetchRequestStart); - Assert.IsTrue(fetchRequest.IsRetry); - cacheProgress.Verify(); - } - - /// - /// Test that the provider iterates through multiple batches of data retrieved from a repository correctly - /// - [Test] - public void TestFailedFetchRequestProvider_MultiplePages() + failure + }; + + var cacheProgress = Substitute.For(); + cacheProgress.FetchPage(Arg.Any(), Arg.Any()).Returns(failures); + + var provider = new FailedCacheFetchRequestProvider(cacheProgress, 2); + var fetchRequest = provider.GetNext(ThrowImmediatelyDataLoadEventListener.Quiet); + Assert.IsNotNull(fetchRequest); + Assert.AreEqual(fetchRequest.ChunkPeriod, new TimeSpan(8, 0, 0)); + Assert.AreEqual(fetchRequest.Start, failure.FetchRequestStart); + Assert.IsTrue(fetchRequest.IsRetry); + cacheProgress.Received(1); + } + + /// + /// Test that the provider iterates through multiple batches of data retrieved from a repository correctly + /// + [Test] + public void TestFailedFetchRequestProvider_MultiplePages() + { + // Our set of CacheFetchFailures + var failuresPage1 = new List { - // Our set of CacheFetchFailures - var failuresPage1 = new List - { - Mock.Of(), - Mock.Of() - }; - - var failuresPage2 = new List - { - Mock.Of() - }; - - // Stub this so the 'repository' will return the first page, second page then empty page - var cacheProgress = new Mock(); - cacheProgress.SetupSequence>(c => c.FetchPage(It.IsAny(), It.IsAny())) - .Returns(failuresPage1) - .Returns(failuresPage2) - .Returns(new List()) - .Throws(); - - - var provider = new FailedCacheFetchRequestProvider(cacheProgress.Object, 2); - - // We should get three ICacheFetchRequests in total, followed by a null to signify that there are no more ICacheFetchRequests - Assert.IsNotNull(provider.GetNext(new ThrowImmediatelyDataLoadEventListener())); - Assert.IsNotNull(provider.GetNext(new ThrowImmediatelyDataLoadEventListener())); - Assert.IsNotNull(provider.GetNext(new ThrowImmediatelyDataLoadEventListener())); - Assert.IsNull(provider.GetNext(new ThrowImmediatelyDataLoadEventListener())); - } - - /// - /// If we construct the request with a previous failure, then there should be a save operation when the updated failure is persisted to the database - /// - [Test] - public void FailedCacheFetchRequest_SavesPreviousFailure() + Substitute.For(), + Substitute.For() + }; + + var failuresPage2 = new List { - var previousFailure = GetFailureMock(); + Substitute.For() + }; - var cacheProgress = Mock.Of(c => c.PermissionWindow==Mock.Of()); + // Stub this so the 'repository' will return the first page, second page then empty page + var cacheProgress = Substitute.For(); + // cacheProgress.SetupSequence>(); + cacheProgress.FetchPage(Arg.Any(), Arg.Any()) + .Returns(failuresPage1, + failuresPage2, + new List());//, x => { throw new InvalidOperationException(); }); - var request = new CacheFetchRequest(previousFailure.Object, cacheProgress); - request.RequestFailed(new Exception()); - previousFailure.Verify(); - } + var provider = new FailedCacheFetchRequestProvider(cacheProgress, 2); - /// - /// If we construct the request with a previous failure, then Resolve should be called on it when successful - /// - [Test] - public void FailedCacheFetchRequest_ResolveCalled() - { - var previousFailure = GetFailureMock(); + // We should get three ICacheFetchRequests in total, followed by a null to signify that there are no more ICacheFetchRequests + Assert.IsNotNull(provider.GetNext(ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.IsNotNull(provider.GetNext(ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.IsNotNull(provider.GetNext(ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.IsNull(provider.GetNext(ThrowImmediatelyDataLoadEventListener.Quiet)); + } - var cacheProgress = Mock.Of(c => c.PermissionWindow==Mock.Of()); + /// + /// If we construct the request with a previous failure, then there should be a save operation when the updated failure is persisted to the database + /// + [Test] + public void FailedCacheFetchRequest_SavesPreviousFailure() + { + var previousFailure = GetFailureMock(); - var request = new CacheFetchRequest(previousFailure.Object, cacheProgress); - request.RequestSucceeded(); + var cacheProgress = Substitute.For(); + cacheProgress.PermissionWindow.Returns(Substitute.For()); - previousFailure.Verify(); - } + var request = new CacheFetchRequest(previousFailure, cacheProgress); + request.RequestFailed(new Exception()); - private Mock GetFailureMock() - { - var failure = Mock.Of(f=> - f.FetchRequestEnd == DateTime.Now && - f.FetchRequestStart == DateTime.Now.Subtract(new TimeSpan(1, 0, 0))); - - return Mock.Get(failure); - } + previousFailure.Received(1); + } + + /// + /// If we construct the request with a previous failure, then Resolve should be called on it when successful + /// + [Test] + public void FailedCacheFetchRequest_ResolveCalled() + { + var previousFailure = GetFailureMock(); + + var cacheProgress = Substitute.For(); + cacheProgress.PermissionWindow.Returns(Substitute.For()); + + var request = new CacheFetchRequest(previousFailure, cacheProgress); + request.RequestSucceeded(); + + previousFailure.Received(1); + } + + private static ICacheFetchFailure GetFailureMock() + { + var failure = Substitute.For(); + failure.FetchRequestEnd.Returns(DateTime.Now); + failure.FetchRequestStart.Returns(DateTime.Now.Subtract(new TimeSpan(1, 0, 0))); + + return failure; } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/CacheLagPeriodUnitTests.cs b/Rdmp.Core.Tests/Curation/Unit/CacheLagPeriodUnitTests.cs index f0eded14c1..bf4fe77d9d 100644 --- a/Rdmp.Core.Tests/Curation/Unit/CacheLagPeriodUnitTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/CacheLagPeriodUnitTests.cs @@ -8,25 +8,24 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data.Cache; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +public class CacheLagPeriodUnitTests { - [Category("Unit")] - public class CacheLagPeriodUnitTests + [Test] + public void TestOperator() { - [Test] - public void TestOperator() - { - Assert.IsTrue(new TimeSpan(32, 0, 0, 0) > new CacheLagPeriod("1m")); - Assert.IsTrue(new TimeSpan(24, 0, 0, 0) < new CacheLagPeriod("1m")); + Assert.IsTrue(new TimeSpan(32, 0, 0, 0) > new CacheLagPeriod("1m")); + Assert.IsTrue(new TimeSpan(24, 0, 0, 0) < new CacheLagPeriod("1m")); - Assert.IsTrue(new TimeSpan(3, 0, 0, 0) > new CacheLagPeriod("2d")); - Assert.IsFalse(new TimeSpan(3, 0, 0, 0) > new CacheLagPeriod("3d")); - Assert.IsFalse(new TimeSpan(2, 0, 0, 0) < new CacheLagPeriod("2d")); - Assert.IsTrue(new TimeSpan(1, 0, 0, 0) < new CacheLagPeriod("2d")); + Assert.IsTrue(new TimeSpan(3, 0, 0, 0) > new CacheLagPeriod("2d")); + Assert.IsFalse(new TimeSpan(3, 0, 0, 0) > new CacheLagPeriod("3d")); + Assert.IsFalse(new TimeSpan(2, 0, 0, 0) < new CacheLagPeriod("2d")); + Assert.IsTrue(new TimeSpan(1, 0, 0, 0) < new CacheLagPeriod("2d")); - Assert.IsFalse(new TimeSpan(2, 0, 0, 1) < new CacheLagPeriod("2d")); - Assert.IsFalse(new TimeSpan(2, 0, 0, 0) < new CacheLagPeriod("2d")); - Assert.IsTrue(new TimeSpan(2, 0, 0, 1) > new CacheLagPeriod("2d")); - } + Assert.IsFalse(new TimeSpan(2, 0, 0, 1) < new CacheLagPeriod("2d")); + Assert.IsFalse(new TimeSpan(2, 0, 0, 0) < new CacheLagPeriod("2d")); + Assert.IsTrue(new TimeSpan(2, 0, 0, 1) > new CacheLagPeriod("2d")); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/CatalogueNamingTests.cs b/Rdmp.Core.Tests/Curation/Unit/CatalogueNamingTests.cs index 8c65ceaf34..4771256035 100644 --- a/Rdmp.Core.Tests/Curation/Unit/CatalogueNamingTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/CatalogueNamingTests.cs @@ -7,30 +7,29 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +public class CatalogueNamingTests { - [Category("Unit")] - public class CatalogueNamingTests + [Test] + [TestCase("###")] + [TestCase("Bob\\bob")] + [TestCase("Frank.txt")] + [TestCase("WTF?")] + public void StupidCatalogueNames(string name) { + Assert.IsFalse(Catalogue.IsAcceptableName(name)); + } - [Test] - [TestCase("###")] - [TestCase("Bob\\bob")] - [TestCase("Frank.txt")] - [TestCase("WTF?")] - public void StupidCatalogueNames(string name) - { - Assert.IsFalse(Catalogue.IsAcceptableName(name)); - } - [Test] - [TestCase("Hi")] - [TestCase("MyhExchiting dAtaset")] - [TestCase("Bobs dataset (123)")] - [TestCase("(Break in case of emergency)")] - [TestCase("Bob&Betty")] - public void SensibleCatalogueNames(string name) - { - Assert.IsTrue(Catalogue.IsAcceptableName(name)); - } + [Test] + [TestCase("Hi")] + [TestCase("MyhExchiting dAtaset")] + [TestCase("Bobs dataset (123)")] + [TestCase("(Break in case of emergency)")] + [TestCase("Bob&Betty")] + public void SensibleCatalogueNames(string name) + { + Assert.IsTrue(Catalogue.IsAcceptableName(name)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/CommitAssemblyTest.cs b/Rdmp.Core.Tests/Curation/Unit/CommitAssemblyTest.cs index e05ada425c..27e6c4aa70 100644 --- a/Rdmp.Core.Tests/Curation/Unit/CommitAssemblyTest.cs +++ b/Rdmp.Core.Tests/Curation/Unit/CommitAssemblyTest.cs @@ -6,29 +6,28 @@ using System; using NUnit.Framework; -using Rdmp.Core.DataLoad.Modules; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +public class CommitAssemblyTest { - [Category("Unit")] - public class CommitAssemblyTest + [Test] + public void TestGetTypeByName() + { + const ScheduleStrategy s = ScheduleStrategy.Test; + Console.Write(s.GetType().FullName); + + var t = Type.GetType(s.GetType().AssemblyQualifiedName); + + Assert.AreEqual(s.GetType(), t); + } + + public enum ScheduleStrategy { - [Test] - public void TestGetTypeByName() - { - const ScheduleStrategy s = ScheduleStrategy.Test; - Console.Write(s.GetType().FullName); - - var t = Type.GetType(s.GetType().AssemblyQualifiedName); - - Assert.AreEqual(s.GetType(),t); - } - public enum ScheduleStrategy - { - NotSet, - Override, - Test, - Continuous - }; + NotSet, + Override, + Test, + Continuous } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestBiochemistryCreation.cs b/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestBiochemistryCreation.cs index 73459233dc..c5e48e64e4 100644 --- a/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestBiochemistryCreation.cs +++ b/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestBiochemistryCreation.cs @@ -10,43 +10,43 @@ using BadMedicine.Datasets; using NUnit.Framework; -namespace Rdmp.Core.Tests.Curation.Unit.ExerciseData +namespace Rdmp.Core.Tests.Curation.Unit.ExerciseData; + +[Category("Unit")] +public class TestBiochemistryCreation { - [Category("Unit")] - public class TestBiochemistryCreation + [Test] + [TestCase(1000)] + [TestCase(321)] + [TestCase(100000)] + public void CreateCSV(int numberOfRecords) { - [Test] - [TestCase(1000)] - [TestCase(321)] - [TestCase(100000)] - public void CreateCSV(int numberOfRecords) - { - var r = new Random(500); - var people = new PersonCollection(); - people.GeneratePeople(100,r); + var r = new Random(500); + var people = new PersonCollection(); + people.GeneratePeople(100, r); - var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"DeleteMeTestBiochemistry.csv")); + var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "DeleteMeTestBiochemistry.csv")); - bool finished = false; - int finishedWithRecords = -1; + var finished = false; + var finishedWithRecords = -1; - var biochem = new Biochemistry(new Random(500)); - biochem.RowsGenerated += (s, e) => - { - finished = e.IsFinished; - finishedWithRecords = e.RowsWritten; - }; + var biochem = new Biochemistry(new Random(500)); + biochem.RowsGenerated += (s, e) => + { + finished = e.IsFinished; + finishedWithRecords = e.RowsWritten; + }; - biochem.GenerateTestDataFile(people, f, numberOfRecords); + biochem.GenerateTestDataFile(people, f, numberOfRecords); - //one progress task only, should have reported creating the correct number of rows - Assert.IsTrue(finished); - Assert.AreEqual(numberOfRecords, finishedWithRecords); + //one progress task only, should have reported creating the correct number of rows + Assert.IsTrue(finished); + Assert.AreEqual(numberOfRecords, finishedWithRecords); - Assert.GreaterOrEqual(File.ReadAllLines(f.FullName).Length, numberOfRecords);//can be newlines in middle of file + Assert.GreaterOrEqual(File.ReadAllLines(f.FullName).Length, + numberOfRecords); //can be newlines in middle of file - Console.WriteLine("Created file: " + f.FullName); - f.Delete(); - } + Console.WriteLine($"Created file: {f.FullName}"); + f.Delete(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestDemographyCreation.cs b/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestDemographyCreation.cs index 9bfcb7566c..e1efc2deea 100644 --- a/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestDemographyCreation.cs +++ b/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestDemographyCreation.cs @@ -10,46 +10,45 @@ using BadMedicine.Datasets; using NUnit.Framework; -namespace Rdmp.Core.Tests.Curation.Unit.ExerciseData +namespace Rdmp.Core.Tests.Curation.Unit.ExerciseData; + +[Category("Unit")] +public class TestDemographyCreation { - [Category("Unit")] - public class TestDemographyCreation + [Test] + [TestCase(1000)] + [TestCase(321)] + [TestCase(100000)] + public void CreateCSV(int numberOfRecords) { - [Test] - [TestCase(1000)] - [TestCase(321)] - [TestCase(100000)] - public void CreateCSV(int numberOfRecords) - { - var r = new Random(500); + var r = new Random(500); + + var people = new PersonCollection(); + people.GeneratePeople(100, r); - PersonCollection people = new PersonCollection(); - people.GeneratePeople(100,r); + var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "DeleteMeTestPeople.csv")); - var f =new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"DeleteMeTestPeople.csv")); - - bool finished = false; - int finishedWithRecords = -1; - - - Demography demog = new Demography(r); - demog.RowsGenerated += (s, e) => - { - finished = e.IsFinished; - finishedWithRecords = e.RowsWritten; - }; + var finished = false; + var finishedWithRecords = -1; - demog.GenerateTestDataFile(people, f, numberOfRecords); - //one progress task only, should have reported craeting 10,000 rows - //one progress task only, should have reported creating the correct number of rows - Assert.IsTrue(finished); - Assert.AreEqual(numberOfRecords, finishedWithRecords); + var demog = new Demography(r); + demog.RowsGenerated += (s, e) => + { + finished = e.IsFinished; + finishedWithRecords = e.RowsWritten; + }; + + demog.GenerateTestDataFile(people, f, numberOfRecords); - Assert.GreaterOrEqual(File.ReadAllLines(f.FullName).Length, numberOfRecords);//can be newlines in middle of file + //one progress task only, should have reported craeting 10,000 rows + //one progress task only, should have reported creating the correct number of rows + Assert.IsTrue(finished); + Assert.AreEqual(numberOfRecords, finishedWithRecords); - f.Delete(); - } + Assert.GreaterOrEqual(File.ReadAllLines(f.FullName).Length, + numberOfRecords); //can be newlines in middle of file + f.Delete(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestPrescribingCreation.cs b/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestPrescribingCreation.cs index 4e04f75566..061fc37c31 100644 --- a/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestPrescribingCreation.cs +++ b/Rdmp.Core.Tests/Curation/Unit/ExerciseData/TestPrescribingCreation.cs @@ -6,50 +6,46 @@ using System; using System.IO; +using System.Linq; using BadMedicine; using BadMedicine.Datasets; using NUnit.Framework; -namespace Rdmp.Core.Tests.Curation.Unit.ExerciseData +namespace Rdmp.Core.Tests.Curation.Unit.ExerciseData; + +[Category("Unit")] +public class TestPrescribingCreation { - [Category("Unit")] - public class TestPrescribingCreation + [Test] + [TestCase(1000)] + [TestCase(321)] + [TestCase(100000)] + public void CreateCSV(int numberOfRecords) { - [Test] - [TestCase(1000)] - [TestCase(321)] - [TestCase(100000)] - public void CreateCSV(int numberOfRecords) - { - var r = new Random(500); - - var people = new PersonCollection(); - people.GeneratePeople(100,r); + var r = new Random(500); - var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"DeleteMeTestPrescribing.csv")); + var people = new PersonCollection(); + people.GeneratePeople(100, r); - bool finished = false; - int finishedWithRecords = -1; - - var prescribing = new Prescribing(r); - prescribing.RowsGenerated += (s, e) => - { - finished = e.IsFinished; - finishedWithRecords = e.RowsWritten; - }; + var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "DeleteMeTestPrescribing.csv")); - prescribing.GenerateTestDataFile(people, f, numberOfRecords); + var finished = false; + var finishedWithRecords = -1; - //one progress task only, should have reported creating the correct number of rows - Assert.IsTrue(finished); - Assert.AreEqual(numberOfRecords, finishedWithRecords); - - Assert.GreaterOrEqual(File.ReadAllLines(f.FullName).Length, numberOfRecords);//can be newlines in middle of file + var prescribing = new Prescribing(r); + prescribing.RowsGenerated += (s, e) => + { + finished = e.IsFinished; + finishedWithRecords = e.RowsWritten; + }; - Console.WriteLine("Created file: " + f.FullName); - f.Delete(); - } + prescribing.GenerateTestDataFile(people, f, numberOfRecords); + //one progress task only, should have reported creating the correct number of rows + Assert.IsTrue(finished); + Assert.AreEqual(numberOfRecords, finishedWithRecords); + Assert.GreaterOrEqual(File.ReadLines(f.FullName).Count(), numberOfRecords); //can be newlines in middle of file + f.Delete(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/IColumnTests.cs b/Rdmp.Core.Tests/Curation/Unit/IColumnTests.cs index 418c682eae..e33c21ff4b 100644 --- a/Rdmp.Core.Tests/Curation/Unit/IColumnTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/IColumnTests.cs @@ -7,147 +7,154 @@ using System.Data; using FAnsi.Discovery.QuerySyntax; using FAnsi.Implementations.MicrosoftSQL; -using MapsDirectlyToDatabaseTable; -using MapsDirectlyToDatabaseTable.Attributes; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Spontaneous; +using Rdmp.Core.MapsDirectlyToDatabaseTable; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Attributes; using Rdmp.Core.QueryBuilding; using Rdmp.Core.QueryBuilding.SyntaxChecking; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +internal class IColumnTests { - [Category("Unit")] - class IColumnTests + /// + /// For tests + /// + private class TestColumn : SpontaneousObject, IColumn { - - /// - /// For tests - /// - private class TestColumn:SpontaneousObject,IColumn + public TestColumn() : base(new MemoryRepository()) { - public TestColumn():base(new MemoryRepository()) - { - - } - public string GetRuntimeName() - { - var helper = new MicrosoftQuerySyntaxHelper(); - - return Alias ?? helper.GetRuntimeName(SelectSQL); - } - - public ColumnInfo ColumnInfo { get; private set; } - public int Order { get; set; } - - [Sql] - public string SelectSQL { get; set; } - public string Alias { get; set; } - public bool HashOnDataRelease { get; private set; } - public bool IsExtractionIdentifier { get; private set; } - public bool IsPrimaryKey { get; private set; } - public void Check(ICheckNotifier notifier) - { - new ColumnSyntaxChecker(this).Check(notifier); - } } - [Test] - public void GetRuntimeName_Strings_Pass() + public string GetRuntimeName() { - var syntax = new MicrosoftQuerySyntaxHelper(); - - Assert.AreEqual(syntax.GetRuntimeName("[test]"), "test"); - Assert.AreEqual(syntax.GetRuntimeName("`test`"), "`test`"); - Assert.AreEqual(syntax.GetRuntimeName("`[test]`"), "`[test]`"); - Assert.AreEqual(syntax.GetRuntimeName("[mydb].[test]"), "test"); - Assert.AreEqual(syntax.GetRuntimeName("`mymysqldb`.`test`"), "`test`"); - Assert.AreEqual(syntax.GetRuntimeName("[mydb]..[test]"), "test"); - Assert.AreEqual(syntax.GetRuntimeName("[SERVER].[mydb]..[test]"), "test"); - } + var helper = MicrosoftQuerySyntaxHelper.Instance; - [Test] - public void GetRuntimeName_IColumns_Pass() - { - TestColumn tc = new TestColumn(); + return Alias ?? helper.GetRuntimeName(SelectSQL); + } - tc.Alias = "test"; - Assert.AreEqual(tc.GetRuntimeName(),"test"); + public ColumnInfo ColumnInfo { get; private set; } + public int Order { get; set; } - tc.SelectSQL = "MangleQuery([mydb]..[myExcitingField])"; //still has Alias - Assert.AreEqual(tc.GetRuntimeName(),"test"); + [Sql] public string SelectSQL { get; set; } + public string Alias { get; set; } + public bool HashOnDataRelease { get; private set; } + public bool IsExtractionIdentifier { get; private set; } + public bool IsPrimaryKey { get; private set; } - tc.Alias = null; - tc.SelectSQL = "[mydb]..[myExcitingField]"; - Assert.AreEqual(tc.GetRuntimeName(), "myExcitingField"); - + public void Check(ICheckNotifier notifier) + { + new ColumnSyntaxChecker(this).Check(notifier); } + } - [Test] - public void GetRuntimeName_IColumns_ThrowBecauseMissingAliasOnScalarValueFunction() + [Test] + public void GetRuntimeName_Strings_Pass() + { + var syntax = MicrosoftQuerySyntaxHelper.Instance; + + Assert.AreEqual(syntax.GetRuntimeName("[test]"), "test"); + Assert.AreEqual(syntax.GetRuntimeName("`test`"), "`test`"); + Assert.AreEqual(syntax.GetRuntimeName("`[test]`"), "`[test]`"); + Assert.AreEqual(syntax.GetRuntimeName("[mydb].[test]"), "test"); + Assert.AreEqual(syntax.GetRuntimeName("`mymysqldb`.`test`"), "`test`"); + Assert.AreEqual(syntax.GetRuntimeName("[mydb]..[test]"), "test"); + Assert.AreEqual(syntax.GetRuntimeName("[SERVER].[mydb]..[test]"), "test"); + } + + [Test] + public void GetRuntimeName_IColumns_Pass() + { + var tc = new TestColumn { - TestColumn tc = new TestColumn(); + Alias = "test" + }; - tc.SelectSQL = "MangleQuery([mydb]..[myExcitingField])"; - var ex = Assert.Throws(()=> tc.GetRuntimeName()); - } + Assert.AreEqual(tc.GetRuntimeName(), "test"); + + tc.SelectSQL = "MangleQuery([mydb]..[myExcitingField])"; //still has Alias + Assert.AreEqual(tc.GetRuntimeName(), "test"); + tc.Alias = null; + tc.SelectSQL = "[mydb]..[myExcitingField]"; + Assert.AreEqual(tc.GetRuntimeName(), "myExcitingField"); + } - [Test] - public void CheckSyntax_IColumn_Valid() + [Test] + public void GetRuntimeName_IColumns_ThrowBecauseMissingAliasOnScalarValueFunction() + { + var tc = new TestColumn { - TestColumn tc = new TestColumn(); - - tc.Alias = "[bob smith]"; - tc.Check(new ThrowImmediatelyCheckNotifier()); - tc.Alias = "`bob smith`"; - tc.Check(new ThrowImmediatelyCheckNotifier()); - tc.Alias = "`[bob smith]`"; - tc.Check(new ThrowImmediatelyCheckNotifier()); + SelectSQL = "MangleQuery([mydb]..[myExcitingField])" + }; - } + var ex = Assert.Throws(() => tc.GetRuntimeName()); + } - [Test] - public void CheckSyntax_IColumn_ThrowBecauseInvalidAlias1() + [Test] + public void CheckSyntax_IColumn_Valid() + { + var tc = new TestColumn { - TestColumn tc = new TestColumn(); - tc.Alias = "bob smith"; - var ex = Assert.Throws(()=>tc.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Whitespace found in unwrapped Alias \"bob smith\"",ex.Message); + Alias = "[bob smith]" + }; + + tc.Check(ThrowImmediatelyCheckNotifier.Quiet); + tc.Alias = "`bob smith`"; + tc.Check(ThrowImmediatelyCheckNotifier.Quiet); + tc.Alias = "`[bob smith]`"; + tc.Check(ThrowImmediatelyCheckNotifier.Quiet); + } - } - [Test] - public void CheckSyntax_IColumn_ThrowBecauseInvalidAlias2() + [Test] + public void CheckSyntax_IColumn_ThrowBecauseInvalidAlias1() + { + var tc = new TestColumn { - TestColumn tc = new TestColumn(); - tc.Alias = "`bob"; + Alias = "bob smith" + }; + var ex = Assert.Throws(() => tc.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("Whitespace found in unwrapped Alias \"bob smith\"", ex.Message); + } - var ex = Assert.Throws(() => tc.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Invalid characters found in Alias \"`bob\"",ex.Message); - - } - [Test] - public void CheckSyntax_IColumn_ThrowBecauseInvalidAlias3() + [Test] + public void CheckSyntax_IColumn_ThrowBecauseInvalidAlias2() + { + var tc = new TestColumn { - TestColumn tc = new TestColumn(); - tc.Alias = "bob]"; - var ex = Assert.Throws(() => tc.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Invalid characters found in Alias \"bob]\"",ex.Message); - - } + Alias = "`bob" + }; - [Test] - - public void CheckSyntax_IColumn_ThrowBecauseInvalidSelectSQL() + var ex = Assert.Throws(() => tc.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("Invalid characters found in Alias \"`bob\"", ex.Message); + } + + [Test] + public void CheckSyntax_IColumn_ThrowBecauseInvalidAlias3() + { + var tc = new TestColumn { - TestColumn tc = new TestColumn(); - tc.Alias = "bob"; - tc.SelectSQL = "GetSomething('here'"; - var ex = Assert.Throws(() => tc.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Mismatch in the number of opening '(' and closing ')'",ex.Message); - } + Alias = "bob]" + }; + var ex = Assert.Throws(() => tc.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("Invalid characters found in Alias \"bob]\"", ex.Message); + } + + [Test] + public void CheckSyntax_IColumn_ThrowBecauseInvalidSelectSQL() + { + var tc = new TestColumn + { + Alias = "bob", + SelectSQL = "GetSomething('here'" + }; + var ex = Assert.Throws(() => tc.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("Mismatch in the number of opening '(' and closing ')'", ex.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/IMightBeReadOnlyTests.cs b/Rdmp.Core.Tests/Curation/Unit/IMightBeReadOnlyTests.cs index c4eced53e2..a42e97f7b3 100644 --- a/Rdmp.Core.Tests/Curation/Unit/IMightBeReadOnlyTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/IMightBeReadOnlyTests.cs @@ -13,91 +13,90 @@ using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +internal class IMightBeReadOnlyTests : UnitTests { - class IMightBeReadOnlyTests : UnitTests + [Test] + public void IsReadonly_AggregateFilterContainer() + { + //im probably an orphan + var c = WhenIHaveA(); + Assert.IsFalse(c.ShouldBeReadOnly(out _)); + + //now I am in a cic + var cic = WhenIHaveA(); + cic.Name = "fff"; + cic.CreateRootContainerIfNotExists(); + cic.RootCohortAggregateContainer.AddChild(c.GetAggregate(), 0); + + Assert.IsFalse(c.ShouldBeReadOnly(out _)); + + cic.Frozen = true; + Assert.IsTrue(c.ShouldBeReadOnly(out var reason)); + + Assert.AreEqual("fff is Frozen", reason); + } + + [Test] + public void IsReadonly_ExtractionFilterContainer() + { + var c = WhenIHaveA(); + Assert.IsFalse(c.ShouldBeReadOnly(out _)); + + var ec = c.GetSelectedDataSetIfAny().ExtractionConfiguration; + + Assert.IsFalse(c.ShouldBeReadOnly(out _)); + + ec.Name = "lll"; + ec.IsReleased = true; + Assert.IsTrue(c.ShouldBeReadOnly(out var reason)); + + Assert.AreEqual("lll has already been released", reason); + } + + [Test] + public void IsReadonly_SpontaneousContainer() + { + var memoryrepo = new MemoryCatalogueRepository(); + var c = new SpontaneouslyInventedFilterContainer(memoryrepo, null, null, FilterContainerOperation.AND); + Assert.IsFalse(c.ShouldBeReadOnly(out _), + "Spont containers should never be in UI but let's not tell the programmer they shouldn't be edited"); + } + + + [Test] + public void IsReadonly_AggregateFilter() + { + //im probably an orphan + var f = WhenIHaveA(); + Assert.IsFalse(f.ShouldBeReadOnly(out _)); + + //now I am in a cic + var cic = WhenIHaveA(); + cic.Name = "fff"; + cic.CreateRootContainerIfNotExists(); + cic.RootCohortAggregateContainer.AddChild(f.GetAggregate(), 0); + + Assert.IsFalse(f.ShouldBeReadOnly(out _)); + + cic.Frozen = true; + Assert.IsTrue(f.ShouldBeReadOnly(out var reason)); + + Assert.AreEqual("fff is Frozen", reason); + } + + [Test] + public void IsReadonly_DeployedExtractionFilter() { - [Test] - public void IsReadonly_AggregateFilterContainer() - { - //im probably an orphan - var c = WhenIHaveA(); - Assert.IsFalse(c.ShouldBeReadOnly(out _)); - - //now I am in a cic - var cic = WhenIHaveA(); - cic.Name = "fff"; - cic.CreateRootContainerIfNotExists(); - cic.RootCohortAggregateContainer.AddChild(c.GetAggregate(),0); - - Assert.IsFalse(c.ShouldBeReadOnly(out _)); - - cic.Frozen = true; - Assert.IsTrue(c.ShouldBeReadOnly(out string reason)); - - Assert.AreEqual("fff is Frozen",reason); - } - - [Test] - public void IsReadonly_ExtractionFilterContainer() - { - var c = WhenIHaveA(); - Assert.IsFalse(c.ShouldBeReadOnly(out _)); - - var ec = c.GetSelectedDataSetIfAny().ExtractionConfiguration; - - Assert.IsFalse(c.ShouldBeReadOnly(out _)); - - ec.Name = "lll"; - ec.IsReleased = true; - Assert.IsTrue(c.ShouldBeReadOnly(out string reason)); - - Assert.AreEqual("lll has already been released",reason); - } - - [Test] - public void IsReadonly_SpontaneousContainer() - { - var memoryrepo = new MemoryCatalogueRepository(); - var c = new SpontaneouslyInventedFilterContainer(memoryrepo,null,null,FilterContainerOperation.AND); - Assert.IsFalse(c.ShouldBeReadOnly(out _),"Spont containers should never be in UI but let's not tell the programmer they shouldn't be edited"); - } - - - - [Test] - public void IsReadonly_AggregateFilter() - { - //im probably an orphan - var f = WhenIHaveA(); - Assert.IsFalse(f.ShouldBeReadOnly(out _)); - - //now I am in a cic - var cic = WhenIHaveA(); - cic.Name = "fff"; - cic.CreateRootContainerIfNotExists(); - cic.RootCohortAggregateContainer.AddChild(f.GetAggregate(),0); - - Assert.IsFalse(f.ShouldBeReadOnly(out _)); - - cic.Frozen = true; - Assert.IsTrue(f.ShouldBeReadOnly(out string reason)); - - Assert.AreEqual("fff is Frozen",reason); - } - - [Test] - public void IsReadonly_DeployedExtractionFilter() - { - var f = WhenIHaveA(); - Assert.IsFalse(f.ShouldBeReadOnly(out _)); - - var ec = ((FilterContainer) f.FilterContainer).GetSelectedDataSetIfAny().ExtractionConfiguration; - ec.Name = "lll"; - ec.IsReleased = true; - Assert.IsTrue(f.ShouldBeReadOnly(out string reason)); - - Assert.AreEqual("lll has already been released",reason); - } + var f = WhenIHaveA(); + Assert.IsFalse(f.ShouldBeReadOnly(out _)); + + var ec = ((FilterContainer)f.FilterContainer).GetSelectedDataSetIfAny().ExtractionConfiguration; + ec.Name = "lll"; + ec.IsReleased = true; + Assert.IsTrue(f.ShouldBeReadOnly(out var reason)); + + Assert.AreEqual("lll has already been released", reason); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/ObjectConstructorTests.cs b/Rdmp.Core.Tests/Curation/Unit/ObjectConstructorTests.cs index e4bf2fd3b3..6130dbfec2 100644 --- a/Rdmp.Core.Tests/Curation/Unit/ObjectConstructorTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/ObjectConstructorTests.cs @@ -9,164 +9,157 @@ using System.Linq; using NUnit.Framework; using Rdmp.Core.Curation.Data; -using Rdmp.Core.Repositories; using Rdmp.Core.Repositories.Construction; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +public class ObjectConstructorTests : UnitTests { - [Category("Unit")] - public class ObjectConstructorTests : UnitTests + [Test] + public void ConstructValidTests() { - [Test] - public void ConstructValidTests() - { - var constructor =new ObjectConstructor(); - var testarg = new TestArg(){Text = "amagad"}; - var testarg2 = new TestArg2() { Text = "amagad" }; - - //anyone can construct on object! - constructor.Construct(typeof(TestClass1),testarg); - constructor.Construct(typeof(TestClass1), testarg2); - - //basic case - identical Type parameter - var instance = (TestClass2)constructor.Construct(typeof(TestClass2), testarg); - Assert.AreEqual(instance.A.Text, "amagad"); - //also allowed because testarg2 is a testarg derrived class - constructor.Construct(typeof(TestClass2), testarg2); - - //not allowed because class 3 explicitly requires a TestArg2 - Assert.Throws(()=>constructor.Construct(typeof(TestClass3), testarg)); - - //allowed - constructor.Construct(typeof(TestClass3), testarg2); - - //valid because even though both constructors are valid there is one that matches EXACTLY on Type - constructor.Construct(typeof(TestClass4), testarg2); - - var testarg3 = new TestArg3(); - - //not valid because there are 2 constructors that are both base classes of TestArg3 so ObjectConstructor doesn't know which to invoke - var ex = Assert.Throws(()=>constructor.Construct(typeof (TestClass4), testarg3)); - Assert.IsTrue(ex.Message.Contains("Could not pick the correct constructor between")); - - //exactly the same as the above case but one constructor has been decorated with [UseWithObjectConstructor] attribute - constructor.Construct(typeof (TestClass5), testarg3); - } + var testarg = new TestArg { Text = "amagad" }; + var testarg2 = new TestArg2 { Text = "amagad" }; - [Test] - public void ConstructIfPossibleTests_BlankConstructors() - { - var constructor = new ObjectConstructor(); - - //blank constructors are only used if no params are specified - Assert.IsNotNull(constructor.ConstructIfPossible(typeof(TestClassDefaultConstructor))); - - //no constructor taking an int - Assert.IsNull(constructor.ConstructIfPossible(typeof(TestClassDefaultConstructor),8)); - } + //anyone can construct on object! + ObjectConstructor.Construct(typeof(TestClass1), testarg); + ObjectConstructor.Construct(typeof(TestClass1), testarg2); - [Test] - public void GetRepositoryConstructor_AllDatabaseEntities_OneWinningConstructor() - { - SetupMEF(); - - int countCompatible = 0; + //basic case - identical Type parameter + var instance = (TestClass2)ObjectConstructor.Construct(typeof(TestClass2), testarg); + Assert.AreEqual(instance.A.Text, "amagad"); + //also allowed because testarg2 is a testarg derived class + ObjectConstructor.Construct(typeof(TestClass2), testarg2); - var badTypes = new Dictionary(); - foreach (Type t in MEF.GetAllTypes().Where(typeof(DatabaseEntity).IsAssignableFrom)) - { - try - { - var oc = new ObjectConstructor(); - Assert.IsNotNull(oc.GetRepositoryConstructor(typeof(Catalogue))); - countCompatible++; - } - catch (Exception e) - { - badTypes.Add(t,e); - } - } + //not allowed because class 3 explicitly requires a TestArg2 + Assert.Throws(() => + ObjectConstructor.Construct(typeof(TestClass3), testarg)); - Assert.IsEmpty(badTypes); - Assert.GreaterOrEqual(countCompatible,10); - Console.WriteLine("Found compatible constructors on " + countCompatible + " objects"); - } + //allowed + ObjectConstructor.Construct(typeof(TestClass3), testarg2); - class TestClassDefaultConstructor - { - - } + //valid because even though both constructors are valid there is one that matches EXACTLY on Type + ObjectConstructor.Construct(typeof(TestClass4), testarg2); + var testarg3 = new TestArg3(); - class TestClass1 - { - public TestClass1(object o) - { - - } - } - class TestClass2 - { - public TestArg A { get; set; } + //not valid because there are 2 constructors that are both base classes of TestArg3 so ObjectConstructor doesn't know which to invoke + var ex = Assert.Throws(() => + ObjectConstructor.Construct(typeof(TestClass4), testarg3)); + Assert.IsTrue(ex?.Message.Contains("Could not pick the correct constructor between")); - public TestClass2(TestArg a) - { - A = a; - } - } - class TestClass3 - { - public TestArg2 A { get; set; } + //exactly the same as the above case but one constructor has been decorated with [UseWithObjectConstructor] attribute + ObjectConstructor.Construct(typeof(TestClass5), testarg3); + } - public TestClass3(TestArg2 a) - { - A = a; - } - } + [Test] + public void ConstructIfPossibleTests_BlankConstructors() + { + //blank constructors are only used if no params are specified + Assert.IsNotNull(ObjectConstructor.ConstructIfPossible(typeof(TestClassDefaultConstructor))); - class TestClass4 - { - public TestArg A { get; set; } + //no constructor taking an int + Assert.IsNull(ObjectConstructor.ConstructIfPossible(typeof(TestClassDefaultConstructor), 8)); + } - public TestClass4(TestArg a) + [Test] + public void GetRepositoryConstructor_AllDatabaseEntities_OneWinningConstructor() + { + var countCompatible = 0; + + var badTypes = new Dictionary(); + foreach (var t in Core.Repositories.MEF.GetAllTypes().Where(typeof(DatabaseEntity).IsAssignableFrom)) + try { - A = a; + Assert.IsNotNull(ObjectConstructor.GetRepositoryConstructor(typeof(Catalogue))); + countCompatible++; } - - public TestClass4(TestArg2 a) + catch (Exception e) { - A = a; + badTypes.Add(t, e); } + + Assert.IsEmpty(badTypes); + Assert.GreaterOrEqual(countCompatible, 10); + Console.WriteLine($"Found compatible constructors on {countCompatible} objects"); + } + + private class TestClassDefaultConstructor + { + } + + + private class TestClass1 + { + public TestClass1(object o) + { } + } + private class TestClass2 + { + public TestArg A { get; set; } - class TestClass5 + public TestClass2(TestArg a) { - public TestArg A { get; set; } + A = a; + } + } - public TestClass5(TestArg a) - { - A = a; - } - [UseWithObjectConstructor] - public TestClass5(TestArg2 a) - { - A = a; - } + private class TestClass3 + { + public TestArg2 A { get; set; } + + public TestClass3(TestArg2 a) + { + A = a; } - class TestArg + } + + private class TestClass4 + { + public TestArg A { get; set; } + + public TestClass4(TestArg a) { - public string Text { get; set; } + A = a; } - class TestArg2:TestArg + public TestClass4(TestArg2 a) { - + A = a; } - class TestArg3 : TestArg2 + } + + + private class TestClass5 + { + public TestArg A { get; set; } + + public TestClass5(TestArg a) { + A = a; + } + [UseWithObjectConstructor] + public TestClass5(TestArg2 a) + { + A = a; } } -} + + private class TestArg + { + public string Text { get; set; } + } + + private class TestArg2 : TestArg + { + } + + private class TestArg3 : TestArg2 + { + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/ParameterManagerTests.cs b/Rdmp.Core.Tests/Curation/Unit/ParameterManagerTests.cs index b9b9331986..0ff0b8e98c 100644 --- a/Rdmp.Core.Tests/Curation/Unit/ParameterManagerTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/ParameterManagerTests.cs @@ -4,137 +4,144 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System.Collections.Generic; using System.Linq; using FAnsi.Implementations.MicrosoftSQL; using NUnit.Framework; using Rdmp.Core.QueryBuilding; using Rdmp.Core.QueryBuilding.Parameters; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +public class ParameterManagerTests { - [Category("Unit")] - public class ParameterManagerTests + [Test] + public void Test_ParameterManager_SimpleRename() { - [Test] - public void Test_ParameterManager_SimpleRename() - { - var p1 = new ConstantParameter("DECLARE @fish as int", "1", "fishes be here",new MicrosoftQuerySyntaxHelper()); - var p2 = new ConstantParameter("DECLARE @fish as int", "2", "fishes be here",new MicrosoftQuerySyntaxHelper()); - - var pm1 = new ParameterManager(); - var pm2 = new ParameterManager(); - var pm3 = new ParameterManager(); - - pm1.ParametersFoundSoFarInQueryGeneration[ParameterLevel.QueryLevel].Add(p1); - pm2.ParametersFoundSoFarInQueryGeneration[ParameterLevel.QueryLevel].Add(p2); + var p1 = new ConstantParameter("DECLARE @fish as int", "1", "fishes be here", + MicrosoftQuerySyntaxHelper.Instance); + var p2 = new ConstantParameter("DECLARE @fish as int", "2", "fishes be here", + MicrosoftQuerySyntaxHelper.Instance); + + var pm1 = new ParameterManager(); + var pm2 = new ParameterManager(); + var pm3 = new ParameterManager(); - pm3.ImportAndElevateResolvedParametersFromSubquery(pm1, out Dictionary renames1); - pm3.ImportAndElevateResolvedParametersFromSubquery(pm2, out Dictionary renames2); + pm1.ParametersFoundSoFarInQueryGeneration[ParameterLevel.QueryLevel].Add(p1); + pm2.ParametersFoundSoFarInQueryGeneration[ParameterLevel.QueryLevel].Add(p2); - var final = pm3.GetFinalResolvedParametersList().ToArray(); + pm3.ImportAndElevateResolvedParametersFromSubquery(pm1, out var renames1); + pm3.ImportAndElevateResolvedParametersFromSubquery(pm2, out var renames2); - //the final composite parameters should have a rename in them - Assert.AreEqual("@fish",final[0].ParameterName); - Assert.AreEqual("@fish_2",final[1].ParameterName); + var final = pm3.GetFinalResolvedParametersList().ToArray(); - Assert.IsEmpty(renames1); + //the final composite parameters should have a rename in them + Assert.AreEqual("@fish", final[0].ParameterName); + Assert.AreEqual("@fish_2", final[1].ParameterName); + + Assert.IsEmpty(renames1); + + Assert.AreEqual("@fish", renames2.Single().Key); + Assert.AreEqual("@fish_2", renames2.Single().Value); + } - Assert.AreEqual("@fish",renames2.Single().Key); - Assert.AreEqual("@fish_2",renames2.Single().Value); - } + [Test] + [TestCase(ParameterLevel.TableInfo, ParameterLevel.Global)] + [TestCase(ParameterLevel.QueryLevel, ParameterLevel.Global)] + [TestCase(ParameterLevel.TableInfo, ParameterLevel.CompositeQueryLevel)] + [TestCase(ParameterLevel.TableInfo, ParameterLevel.QueryLevel)] + public void FindOverridenParameters_OneOnlyTest(ParameterLevel addAt, ParameterLevel overridingLevel) + { + var myParameter = new ConstantParameter("DECLARE @fish as int", "1", "fishes be here", + MicrosoftQuerySyntaxHelper.Instance); + var overridingParameter = new ConstantParameter("DECLARE @fish as int", "999", "overriding value", + MicrosoftQuerySyntaxHelper.Instance); - [Test] - [TestCase(ParameterLevel.TableInfo,ParameterLevel.Global)] - [TestCase(ParameterLevel.QueryLevel, ParameterLevel.Global)] - [TestCase(ParameterLevel.TableInfo,ParameterLevel.CompositeQueryLevel)] - [TestCase(ParameterLevel.TableInfo,ParameterLevel.QueryLevel)] - public void FindOverridenParameters_OneOnlyTest(ParameterLevel addAt, ParameterLevel overridingLevel) - { - var myParameter = new ConstantParameter("DECLARE @fish as int", "1", "fishes be here",new MicrosoftQuerySyntaxHelper()); - var overridingParameter = new ConstantParameter("DECLARE @fish as int", "999", "overriding value",new MicrosoftQuerySyntaxHelper()); + var pm = new ParameterManager(); + pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.TableInfo].Add(myParameter); + pm.ParametersFoundSoFarInQueryGeneration[overridingLevel].Add(overridingParameter); - var pm = new ParameterManager(); - pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.TableInfo].Add(myParameter); - pm.ParametersFoundSoFarInQueryGeneration[overridingLevel].Add(overridingParameter); + var overrides = pm.GetOverridenParameters().ToArray(); - var overrides = pm.GetOverridenParameters().ToArray(); + Assert.IsNull(pm.GetOverrideIfAnyFor(overridingParameter)); + Assert.AreEqual(pm.GetOverrideIfAnyFor(myParameter), overridingParameter); - Assert.IsNull(pm.GetOverrideIfAnyFor(overridingParameter)); - Assert.AreEqual(pm.GetOverrideIfAnyFor(myParameter), overridingParameter); + Assert.AreEqual(1, overrides.Length); + Assert.AreEqual(myParameter, overrides[0]); + var final = pm.GetFinalResolvedParametersList().ToArray(); - Assert.AreEqual(1,overrides.Length); - Assert.AreEqual(myParameter, overrides[0]); - var final = pm.GetFinalResolvedParametersList().ToArray(); + Assert.AreEqual(1, final.Length); + Assert.AreEqual(overridingParameter, final[0]); + } - Assert.AreEqual(1, final.Length); - Assert.AreEqual(overridingParameter, final[0]); - } - - [Test] - public void FindOverridenParameters_CaseSensitivityTest() - { - var baseParameter = new ConstantParameter("DECLARE @fish as int", "1", "fishes be here", new MicrosoftQuerySyntaxHelper()); - var overridingParameter = new ConstantParameter("DECLARE @Fish as int", "3", "overriding value", new MicrosoftQuerySyntaxHelper()); + [Test] + public void FindOverridenParameters_CaseSensitivityTest() + { + var baseParameter = new ConstantParameter("DECLARE @fish as int", "1", "fishes be here", + MicrosoftQuerySyntaxHelper.Instance); + var overridingParameter = new ConstantParameter("DECLARE @Fish as int", "3", "overriding value", + MicrosoftQuerySyntaxHelper.Instance); - var pm = new ParameterManager(); - pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.TableInfo].Add(baseParameter); - pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.QueryLevel].Add(overridingParameter); + var pm = new ParameterManager(); + pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.TableInfo].Add(baseParameter); + pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.QueryLevel].Add(overridingParameter); - var parameters = pm.GetFinalResolvedParametersList().ToArray(); + var parameters = pm.GetFinalResolvedParametersList().ToArray(); - Assert.AreEqual(1,parameters.Count()); + Assert.AreEqual(1, parameters.Length); - var final = parameters.Single(); - Assert.AreEqual("@Fish",final.ParameterName); - Assert.AreEqual("3", final.Value); - } - - [Test] - public void FindOverridenParameters_TwoTest() - { - var myParameter1 = new ConstantParameter("DECLARE @fish as int", "1", "fishes be here",new MicrosoftQuerySyntaxHelper()); - var myParameter2 = new ConstantParameter("DECLARE @fish as int", "2", "fishes be here",new MicrosoftQuerySyntaxHelper()); - - var overridingParameter = new ConstantParameter("DECLARE @fish as int", "3", "overriding value",new MicrosoftQuerySyntaxHelper()); - - var pm = new ParameterManager(); - pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.TableInfo].Add(myParameter1); - pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.CompositeQueryLevel].Add(myParameter2); - pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.Global].Add(overridingParameter); - - var overrides = pm.GetOverridenParameters().ToArray(); - - Assert.IsNull(pm.GetOverrideIfAnyFor(overridingParameter)); - Assert.AreEqual(pm.GetOverrideIfAnyFor(myParameter1), overridingParameter); - Assert.AreEqual(pm.GetOverrideIfAnyFor(myParameter2), overridingParameter); - - Assert.AreEqual(2, overrides.Length); - Assert.AreEqual(myParameter1, overrides[0]); - Assert.AreEqual(myParameter2, overrides[1]); - - var final = pm.GetFinalResolvedParametersList().ToArray(); - Assert.AreEqual(1,final.Length); - Assert.AreEqual(overridingParameter, final[0]); - } - - [Test] - public void ParameterDeclarationAndDeconstruction() - { - var param = new ConstantParameter("DECLARE @Fish as int;","3","I've got a lovely bunch of coconuts",new MicrosoftQuerySyntaxHelper()); - var sql = QueryBuilder.GetParameterDeclarationSQL(param); - - Assert.AreEqual(@"/*I've got a lovely bunch of coconuts*/ + var final = parameters.Single(); + Assert.AreEqual("@Fish", final.ParameterName); + Assert.AreEqual("3", final.Value); + } + + [Test] + public void FindOverridenParameters_TwoTest() + { + var myParameter1 = new ConstantParameter("DECLARE @fish as int", "1", "fishes be here", + MicrosoftQuerySyntaxHelper.Instance); + var myParameter2 = new ConstantParameter("DECLARE @fish as int", "2", "fishes be here", + MicrosoftQuerySyntaxHelper.Instance); + + var overridingParameter = new ConstantParameter("DECLARE @fish as int", "3", "overriding value", + MicrosoftQuerySyntaxHelper.Instance); + + var pm = new ParameterManager(); + pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.TableInfo].Add(myParameter1); + pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.CompositeQueryLevel].Add(myParameter2); + pm.ParametersFoundSoFarInQueryGeneration[ParameterLevel.Global].Add(overridingParameter); + + var overrides = pm.GetOverridenParameters().ToArray(); + + Assert.IsNull(pm.GetOverrideIfAnyFor(overridingParameter)); + Assert.AreEqual(pm.GetOverrideIfAnyFor(myParameter1), overridingParameter); + Assert.AreEqual(pm.GetOverrideIfAnyFor(myParameter2), overridingParameter); + + Assert.AreEqual(2, overrides.Length); + Assert.AreEqual(myParameter1, overrides[0]); + Assert.AreEqual(myParameter2, overrides[1]); + + var final = pm.GetFinalResolvedParametersList().ToArray(); + Assert.AreEqual(1, final.Length); + Assert.AreEqual(overridingParameter, final[0]); + } + + [Test] + public void ParameterDeclarationAndDeconstruction() + { + var param = new ConstantParameter("DECLARE @Fish as int;", "3", "I've got a lovely bunch of coconuts", + MicrosoftQuerySyntaxHelper.Instance); + var sql = QueryBuilder.GetParameterDeclarationSQL(param); + + Assert.AreEqual(@"/*I've got a lovely bunch of coconuts*/ DECLARE @Fish as int; SET @Fish=3; ", sql); - var after = ConstantParameter.Parse(sql, new MicrosoftQuerySyntaxHelper()); - - Assert.AreEqual(param.ParameterSQL,after.ParameterSQL); - Assert.AreEqual(param.Value, after.Value); - Assert.AreEqual(param.Comment, after.Comment); - } + var after = ConstantParameter.Parse(sql, MicrosoftQuerySyntaxHelper.Instance); + Assert.AreEqual(param.ParameterSQL, after.ParameterSQL); + Assert.AreEqual(param.Value, after.Value); + Assert.AreEqual(param.Comment, after.Comment); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/PermissionWindowTests.cs b/Rdmp.Core.Tests/Curation/Unit/PermissionWindowTests.cs index ff20ceb3a7..66fc32a907 100644 --- a/Rdmp.Core.Tests/Curation/Unit/PermissionWindowTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/PermissionWindowTests.cs @@ -10,75 +10,77 @@ using Rdmp.Core.Curation.Data; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +public class PermissionWindowTests : DatabaseTests { - - public class PermissionWindowTests:DatabaseTests + [Test] + public void TestSerialisation() { + var period1 = new PermissionWindowPeriod((int)DayOfWeek.Monday, new TimeSpan(0, 0, 0), new TimeSpan(6, 0, 0)); + var period2 = + new PermissionWindowPeriod((int)DayOfWeek.Monday, new TimeSpan(17, 0, 0), new TimeSpan(23, 59, 59)); - [Test] - public void TestSerialisation() + var permissionWindow = new PermissionWindow(CatalogueRepository); + permissionWindow.SetPermissionWindowPeriods(new List { - var period1 = new PermissionWindowPeriod((int) DayOfWeek.Monday, new TimeSpan(0, 0, 0), new TimeSpan(6, 0, 0)); - var period2 = new PermissionWindowPeriod((int) DayOfWeek.Monday, new TimeSpan(17, 0, 0), new TimeSpan(23, 59, 59)); + period1, + period2 + }); - var permissionWindow = new PermissionWindow(CatalogueRepository); - permissionWindow.SetPermissionWindowPeriods(new List - { - period1, - period2 - }); + var permissionPeriodConfig = permissionWindow.PermissionPeriodConfig; - var permissionPeriodConfig = permissionWindow.PermissionPeriodConfig; + var newPermissionWindow = new PermissionWindow(CatalogueRepository) + { + PermissionPeriodConfig = permissionPeriodConfig + }; - var newPermissionWindow = new PermissionWindow(CatalogueRepository); - newPermissionWindow.PermissionPeriodConfig = permissionPeriodConfig; + var periods = newPermissionWindow.PermissionWindowPeriods; + Assert.AreEqual(2, periods.Count); - var periods = newPermissionWindow.PermissionWindowPeriods; - Assert.AreEqual(2, periods.Count); + var newPeriod1 = periods[0]; + Assert.AreEqual((int)DayOfWeek.Monday, newPeriod1.DayOfWeek); - var newPeriod1 = periods[0]; - Assert.AreEqual((int) DayOfWeek.Monday, newPeriod1.DayOfWeek); + Assert.AreEqual(6, newPeriod1.End.Hours); - Assert.AreEqual(6, newPeriod1.End.Hours); + var newPeriod2 = periods[1]; + Assert.AreEqual(17, newPeriod2.Start.Hours); + } - var newPeriod2 = periods[1]; - Assert.AreEqual(17, newPeriod2.Start.Hours); - } - - [Test] - public void TestCurrentlyWithinPermissionPeriod() - { - var dtNow = DateTime.UtcNow; + [Test] + public void TestCurrentlyWithinPermissionPeriod() + { + var dtNow = DateTime.UtcNow; - if ((dtNow.Hour == 23 && dtNow.Minute >= 40) || (dtNow.Hour == 0 && dtNow.Minute <= 5)) - Assert.Inconclusive("This test cannot run at midnight since it is afraid of the dark"); - - var fiveMinutes = new TimeSpan(0, 5, 0); + if (dtNow is { Hour: 23, Minute: >= 40 } or { Hour: 0, Minute: <= 5 }) + Assert.Inconclusive("This test cannot run at midnight since it is afraid of the dark"); - var utcTime = new TimeSpan(dtNow.Hour, dtNow.Minute, dtNow.Second); - var period1 = new PermissionWindowPeriod((int)DateTime.Now.DayOfWeek, utcTime.Subtract(fiveMinutes), utcTime.Add(fiveMinutes)); + var fiveMinutes = new TimeSpan(0, 5, 0); - var permissionWindow = new PermissionWindow(CatalogueRepository); - permissionWindow.SetPermissionWindowPeriods(new List { period1 }); - Assert.IsTrue(permissionWindow.WithinPermissionWindow()); - } + var utcTime = new TimeSpan(dtNow.Hour, dtNow.Minute, dtNow.Second); + var period1 = new PermissionWindowPeriod((int)DateTime.UtcNow.DayOfWeek, utcTime.Subtract(fiveMinutes), + utcTime.Add(fiveMinutes)); - [Test] - public void TestCurrentlyOutsidePermissionPeriod() - { - var dtNow = DateTime.UtcNow; - - if ((dtNow.Hour == 23 && dtNow.Minute >= 50) || (dtNow.Hour == 0 && dtNow.Minute <= 3)) - Assert.Inconclusive("This test cannot run at midnight since it is afraid of the dark"); - - var oneMinute = new TimeSpan(0, 1, 0); - var utcTime = new TimeSpan(dtNow.Hour, dtNow.Minute, dtNow.Second); - var period1 = new PermissionWindowPeriod((int)DateTime.Now.DayOfWeek, utcTime.Add(oneMinute), utcTime.Add(oneMinute)); - - var permissionWindow = new PermissionWindow(CatalogueRepository); - permissionWindow.SetPermissionWindowPeriods(new List { period1 }); - Assert.IsFalse(permissionWindow.WithinPermissionWindow()); - } + var permissionWindow = new PermissionWindow(CatalogueRepository); + permissionWindow.SetPermissionWindowPeriods(new List { period1 }); + Assert.IsTrue(permissionWindow.WithinPermissionWindow()); + } + + [Test] + public void TestCurrentlyOutsidePermissionPeriod() + { + var dtNow = DateTime.UtcNow; + + if (dtNow is { Hour: 23, Minute: >= 50 } or { Hour: 0, Minute: <= 3 }) + Assert.Inconclusive("This test cannot run at midnight since it is afraid of the dark"); + + var oneMinute = new TimeSpan(0, 1, 0); + var utcTime = new TimeSpan(dtNow.Hour, dtNow.Minute, dtNow.Second); + var period1 = + new PermissionWindowPeriod((int)DateTime.Now.DayOfWeek, utcTime.Add(oneMinute), utcTime.Add(oneMinute)); + + var permissionWindow = new PermissionWindow(CatalogueRepository); + permissionWindow.SetPermissionWindowPeriods(new List { period1 }); + Assert.IsFalse(permissionWindow.WithinPermissionWindow()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/PreInitializeTests.cs b/Rdmp.Core.Tests/Curation/Unit/PreInitializeTests.cs index 1e14ea8418..3ff3837892 100644 --- a/Rdmp.Core.Tests/Curation/Unit/PreInitializeTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/PreInitializeTests.cs @@ -9,152 +9,146 @@ using NUnit.Framework; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataFlowPipeline.Requirements; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +public class PreInitializeTests { - [Category("Unit")] - public class PreInitializeTests + private DataFlowPipelineContext context = new(); + private Fish fish = new(); + + [Test] + public void TestNormal() { + var fishUser = new FishUser(); - DataFlowPipelineContext context = new DataFlowPipelineContext(); - Fish fish = new Fish(); + Assert.AreNotEqual(fishUser.IFish, fish); + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, fishUser, fish); + Assert.AreEqual(fishUser.IFish, fish); + } - [Test] - public void TestNormal() - { + [Test] + public void TestOneOFMany() + { + var fishUser = new FishUser(); - FishUser fishUser = new FishUser(); + Assert.AreNotEqual(fishUser.IFish, fish); + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, fishUser, new object(), fish); + Assert.AreEqual(fishUser.IFish, fish); + } - Assert.AreNotEqual(fishUser.IFish, fish); - context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(),fishUser, fish); - Assert.AreEqual(fishUser.IFish, fish); - } + [Test] + public void TestCasting() + { + var fishUser = new FishUser(); - [Test] - public void TestOneOFMany() - { + Assert.AreNotEqual(fishUser.IFish, fish); + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, fishUser, (IFish)fish); + Assert.AreEqual(fishUser.IFish, fish); + } - FishUser fishUser = new FishUser(); - - Assert.AreNotEqual(fishUser.IFish, fish); - context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), fishUser,new object(), fish); - Assert.AreEqual(fishUser.IFish, fish); - } - [Test] - public void TestCasting() - { + [Test] + public void TestDownCasting() + { + var fishUser = new SpecificFishUser(); - FishUser fishUser = new FishUser(); + IFish f = fish; + Assert.AreNotEqual(fishUser.IFish, fish); + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, fishUser, f); + Assert.AreEqual(fishUser.IFish, fish); + } - Assert.AreNotEqual(fishUser.IFish, fish); - context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), fishUser, (IFish)fish); - Assert.AreEqual(fishUser.IFish, fish); - } - - [Test] - public void TestDownCasting() - { - SpecificFishUser fishUser = new SpecificFishUser(); + [Test] + public void TestNoObjects() + { + var fishUser = new SpecificFishUser(); + var ex = Assert.Throws(() => + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, fishUser, Array.Empty())); + Assert.IsTrue(ex.Message.Contains("The following expected types were not passed to PreInitialize:Fish")); + } - IFish f = fish; - Assert.AreNotEqual(fishUser.IFish, fish); - context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), fishUser, f); - Assert.AreEqual(fishUser.IFish, fish); - } - [Test] - public void TestNoObjects() - { - SpecificFishUser fishUser = new SpecificFishUser(); - var ex = Assert.Throws(()=>context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), fishUser, new object[0])); - Assert.IsTrue(ex.Message.Contains("The following expected types were not passed to PreInitialize:Fish")); - } + [Test] + public void TestWrongObjects() + { + var fishUser = new SpecificFishUser(); + var ex = Assert.Throws(() => + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, fishUser, new Penguin())); + Assert.IsTrue(ex.Message.Contains("The following expected types were not passed to PreInitialize:Fish")); + Assert.IsTrue(ex.Message.Contains("The object types passed were:")); + Assert.IsTrue(ex.Message.Contains("Penguin")); + } - [Test] - public void TestWrongObjects() + + private class FishUser : IPipelineRequirement, IDataFlowComponent + { + public IFish IFish; + + public void PreInitialize(IFish value, IDataLoadEventListener listener) { - SpecificFishUser fishUser = new SpecificFishUser(); - var ex = Assert.Throws(() => context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), fishUser, new Penguin())); - Assert.IsTrue(ex.Message.Contains("The following expected types were not passed to PreInitialize:Fish")); - Assert.IsTrue(ex.Message.Contains("The object types passed were:")); - Assert.IsTrue(ex.Message.Contains("Penguin")); + IFish = value; } + #region boiler plate + public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, + GracefulCancellationToken cancellationToken) => + throw new NotImplementedException(); - - private class FishUser:IPipelineRequirement, IDataFlowComponent + public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { - public IFish IFish; - - public void PreInitialize(IFish value, IDataLoadEventListener listener) - { - IFish = value; - - } - #region boiler plate - public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, - GracefulCancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) - { - throw new NotImplementedException(); - } - - public void Abort(IDataLoadEventListener listener) - { - throw new NotImplementedException(); - } - #endregion + throw new NotImplementedException(); } - private class SpecificFishUser : IPipelineRequirement, IDataFlowComponent + + public void Abort(IDataLoadEventListener listener) { - public IFish IFish; - - public void PreInitialize(Fish value, IDataLoadEventListener listener) - { - IFish = value; - - } - #region boiler plate - public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, - GracefulCancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) - { - throw new NotImplementedException(); - } - - public void Abort(IDataLoadEventListener listener) - { - throw new NotImplementedException(); - } - #endregion + throw new NotImplementedException(); } - private interface IFish + + #endregion + } + + private class SpecificFishUser : IPipelineRequirement, IDataFlowComponent + { + public IFish IFish; + + public void PreInitialize(Fish value, IDataLoadEventListener listener) { - string GetFish(); + IFish = value; } - private class Fish:IFish + #region boiler plate + + public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, + GracefulCancellationToken cancellationToken) => + throw new NotImplementedException(); + + public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { - public string GetFish() - { - return "fish"; - } + throw new NotImplementedException(); } - private class Penguin + + public void Abort(IDataLoadEventListener listener) { - public string GetPenguin() - { - return "Penguin"; - } + throw new NotImplementedException(); } + + #endregion + } + + private interface IFish + { + string GetFish(); + } + + private class Fish : IFish + { + public string GetFish() => "fish"; + } + + private class Penguin + { + public static string GetPenguin() => "Penguin"; } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/SimpleColumnInfoTests.cs b/Rdmp.Core.Tests/Curation/Unit/SimpleColumnInfoTests.cs index 9f078fc2a6..15064a041d 100644 --- a/Rdmp.Core.Tests/Curation/Unit/SimpleColumnInfoTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/SimpleColumnInfoTests.cs @@ -8,35 +8,34 @@ using FAnsi; using FAnsi.Discovery; using NUnit.Framework; -using Rdmp.Core.Curation.Data; -using ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.DataAccess; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +public class SimpleColumnInfoTests : DatabaseTests { - public class SimpleColumnInfoTests : DatabaseTests + [Test] + [TestCase("varchar(5)", 5)] + [TestCase("int", -1)] + [TestCase("datetime2", -1)] + [TestCase("nchar(100)", 100)] + [TestCase("char(11)", 11)] + [TestCase("text", int.MaxValue)] + [TestCase("varchar(max)", int.MaxValue)] + public void GetColumnLength(string type, int? expectedLength) { - [Test] - [TestCase("varchar(5)",5)] - [TestCase("int", -1)] - [TestCase("datetime2", -1)] - [TestCase("nchar(100)", 100)] - [TestCase("char(11)", 11)] - [TestCase("text", int.MaxValue)] - [TestCase("varchar(max)", int.MaxValue)] - public void GetColumnLength(string type, int? expectedLength) + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var t = db.CreateTable("MyTable", new[] { - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var t = db.CreateTable("MyTable", new[] - { - new DatabaseColumnRequest("MyCol", type) - }); + new DatabaseColumnRequest("MyCol", type) + }); + + Import(t, out var ti, out var cis); - Import(t, out var ti, out var cis); - - Assert.AreEqual(expectedLength,cis.Single().Discover(DataAccessContext.InternalDataProcessing).DataType.GetLengthIfString()); + Assert.AreEqual(expectedLength, + cis.Single().Discover(DataAccessContext.InternalDataProcessing).DataType.GetLengthIfString()); - ti.DeleteInDatabase(); - } + ti.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/SqlSyntaxHelperTests.cs b/Rdmp.Core.Tests/Curation/Unit/SqlSyntaxHelperTests.cs index 44469a3db0..0dc171f0b0 100644 --- a/Rdmp.Core.Tests/Curation/Unit/SqlSyntaxHelperTests.cs +++ b/Rdmp.Core.Tests/Curation/Unit/SqlSyntaxHelperTests.cs @@ -5,59 +5,58 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using FAnsi.Implementations.MicrosoftSQL; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.QueryBuilding; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +[Category("Unit")] +public class SqlSyntaxHelperTests { - [Category("Unit")] - public class SqlSyntaxHelperTests + [Test] + public void GetNullSubstituteTests() { - [Test] - public void GetNullSubstituteTests() - { - - var ti = Mock.Of(t=>t.GetQuerySyntaxHelper() == new MicrosoftQuerySyntaxHelper()); - - var pk = new PrimaryKeyCollisionResolver(ti); - - Assert.AreEqual("-999",pk.GetNullSubstituteForComparisonsWithDataType("decimal(3)", true)); - Assert.AreEqual("-9999999999", pk.GetNullSubstituteForComparisonsWithDataType("decimal(10)", true)); - Assert.AreEqual("-99.9", pk.GetNullSubstituteForComparisonsWithDataType("decimal(3,1)", true)); - Assert.AreEqual("-.9999", pk.GetNullSubstituteForComparisonsWithDataType("decimal(4,4)", true)); - - - Assert.AreEqual("999", pk.GetNullSubstituteForComparisonsWithDataType("decimal(3)", false)); - Assert.AreEqual("9999999999", pk.GetNullSubstituteForComparisonsWithDataType("decimal(10)", false)); - Assert.AreEqual("99.9", pk.GetNullSubstituteForComparisonsWithDataType("decimal(3,1)", false)); - Assert.AreEqual(".9999", pk.GetNullSubstituteForComparisonsWithDataType("decimal(4,4)", false)); + Assert.AreEqual("-999", + PrimaryKeyCollisionResolver.GetNullSubstituteForComparisonsWithDataType("decimal(3)", true)); + Assert.AreEqual("-9999999999", + PrimaryKeyCollisionResolver.GetNullSubstituteForComparisonsWithDataType("decimal(10)", true)); + Assert.AreEqual("-99.9", + PrimaryKeyCollisionResolver.GetNullSubstituteForComparisonsWithDataType("decimal(3,1)", true)); + Assert.AreEqual("-.9999", + PrimaryKeyCollisionResolver.GetNullSubstituteForComparisonsWithDataType("decimal(4,4)", true)); + + + Assert.AreEqual("999", + PrimaryKeyCollisionResolver.GetNullSubstituteForComparisonsWithDataType("decimal(3)", false)); + Assert.AreEqual("9999999999", + PrimaryKeyCollisionResolver.GetNullSubstituteForComparisonsWithDataType("decimal(10)", false)); + Assert.AreEqual("99.9", + PrimaryKeyCollisionResolver.GetNullSubstituteForComparisonsWithDataType("decimal(3,1)", false)); + Assert.AreEqual(".9999", + PrimaryKeyCollisionResolver.GetNullSubstituteForComparisonsWithDataType("decimal(4,4)", false)); + } - } + [Test] + public void SplitMethod() + { + var syntaxHelper = MicrosoftQuerySyntaxHelper.Instance; - [Test] - public void SplitMethod() - { - var syntaxHelper = new MicrosoftQuerySyntaxHelper(); + syntaxHelper.SplitLineIntoOuterMostMethodAndContents("count(*)", out var method, out var contents); - string contents; - string method; - syntaxHelper.SplitLineIntoOuterMostMethodAndContents("count(*)",out method,out contents); - - Assert.AreEqual("count",method); - Assert.AreEqual("*",contents); + Assert.AreEqual("count", method); + Assert.AreEqual("*", contents); - syntaxHelper.SplitLineIntoOuterMostMethodAndContents("count()", out method, out contents); + syntaxHelper.SplitLineIntoOuterMostMethodAndContents("count()", out method, out contents); - Assert.AreEqual("count", method); - Assert.AreEqual("", contents); + Assert.AreEqual("count", method); + Assert.AreEqual("", contents); - syntaxHelper.SplitLineIntoOuterMostMethodAndContents("LTRIM(RTRIM([Fish]))", out method, out contents); + syntaxHelper.SplitLineIntoOuterMostMethodAndContents("LTRIM(RTRIM([Fish]))", out method, out contents); - Assert.AreEqual("LTRIM", method); - Assert.AreEqual("RTRIM([Fish])", contents); - } + Assert.AreEqual("LTRIM", method); + Assert.AreEqual("RTRIM([Fish])", contents); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/Unit/TestAcronymGeneration.cs b/Rdmp.Core.Tests/Curation/Unit/TestAcronymGeneration.cs index e28e555811..26b988e34a 100644 --- a/Rdmp.Core.Tests/Curation/Unit/TestAcronymGeneration.cs +++ b/Rdmp.Core.Tests/Curation/Unit/TestAcronymGeneration.cs @@ -8,25 +8,23 @@ using Rdmp.Core.Reports; using Tests.Common; -namespace Rdmp.Core.Tests.Curation.Unit +namespace Rdmp.Core.Tests.Curation.Unit; + +public class TestAcronymGeneration : DatabaseTests { - public class TestAcronymGeneration : DatabaseTests + [Test] + [TestCase("bob", "bob")] + [TestCase("Demography", "Demog")] + [TestCase("DMPCatalogue", "DMPC")] + [TestCase("Datasheet1", "D1")] + [TestCase("Frank Bettie Cardinality", "FBC")] + [TestCase("Datashet DMP 32", "DDMP32")] + public void Predict(string name, string predictedAcronym) { + var extractor = new DitaCatalogueExtractor(CatalogueRepository, null); - [Test] - [TestCase("bob","bob")] - [TestCase("Demography", "Demog")] - [TestCase("DMPCatalogue", "DMPC")] - [TestCase("Datasheet1", "D1")] - [TestCase("Frank Bettie Cardinality", "FBC")] - [TestCase("Datashet DMP 32", "DDMP32")] - public void Predict(string name, string predictedAcronym) - { - DitaCatalogueExtractor extractor = new DitaCatalogueExtractor(CatalogueRepository, null); - - string suggestion = extractor.GetAcronymSuggestionFromCatalogueName(name); + var suggestion = DitaCatalogueExtractor.GetAcronymSuggestionFromCatalogueName(name); - Assert.AreEqual(predictedAcronym,suggestion); - } + Assert.AreEqual(predictedAcronym, suggestion); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/UnitTestsAllObjectsSupported.cs b/Rdmp.Core.Tests/Curation/UnitTestsAllObjectsSupported.cs index 771cebb753..35403837c1 100644 --- a/Rdmp.Core.Tests/Curation/UnitTestsAllObjectsSupported.cs +++ b/Rdmp.Core.Tests/Curation/UnitTestsAllObjectsSupported.cs @@ -8,77 +8,73 @@ using System.Collections.Generic; using System.Linq; using System.Reflection; -using MapsDirectlyToDatabaseTable.Revertable; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Spontaneous; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Revertable; using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.Curation +namespace Rdmp.Core.Tests.Curation; + +internal class UnitTestsAllObjectsSupported : UnitTests { - class UnitTestsAllObjectsSupported:UnitTests + /// + /// Who tests the tester? this method does! It makes sure that supports all classes (except + /// those listed in ) and returns a valid value. + /// + [Test] + public void TestAllSupported() { - /// - /// Who tests the tester? this method does! It makes sure that supports all classes (except - /// those listed in ) and returns a valid value. - /// - [Test] - public void TestAllSupported() + //load all DatabaseEntity types + var types = MEF.GetAllTypes() + .Where(static t => typeof(DatabaseEntity).IsAssignableFrom(t) && !t.IsAbstract && !t.IsInterface).ToArray(); + + var methods = typeof(UnitTests).GetMethods(BindingFlags.NonPublic | BindingFlags.Instance); + var method = methods.Single(m => m.Name.Equals("WhenIHaveA") && !m.GetParameters().Any()); + + var notSupported = new List(); + + foreach (var t in types) { - //load all DatabaseEntity types - MEF mef = new MEF(); - mef.Setup(new SafeDirectoryCatalog(TestContext.CurrentContext.TestDirectory)); + //ignore these types too + if (SkipTheseTypes.Contains(t.Name) || t.Name.StartsWith("Spontaneous", StringComparison.Ordinal) || + typeof(SpontaneousObject).IsAssignableFrom(t)) + continue; - var types = mef.GetAllTypes() - .Where(t => typeof (DatabaseEntity).IsAssignableFrom(t) && !t.IsAbstract && !t.IsInterface).ToArray(); + DatabaseEntity instance = null; - var methods = typeof(UnitTests).GetMethods(BindingFlags.NonPublic | BindingFlags.Instance); - var method = methods.Single(m => m.Name.Equals("WhenIHaveA") && !m.GetParameters().Any()); - - List notSupported = new List(); - - foreach (Type t in types) + try { - //ignore these types too - if (SkipTheseTypes.Contains(t.Name) || t.Name.StartsWith("Spontaneous") || typeof(SpontaneousObject).IsAssignableFrom(t)) - continue; - - DatabaseEntity instance = null; + //ensure that the method supports the Type + var generic = method.MakeGenericMethod(t); + instance = (DatabaseEntity)generic.Invoke(this, null); + } + catch (TargetInvocationException exception) + { + if (exception.InnerException is TestCaseNotWrittenYetException) + notSupported.Add(t); + else + throw; + } + //if the instance returned by MakeGenericMethod does not pass checks that's a dealbreaker! + if (instance != null) try { - //ensure that the method supports the Type - var generic = method.MakeGenericMethod(t); - instance = (DatabaseEntity)generic.Invoke(this, null); + //and that it returns an instance + Assert.IsNotNull(instance); + Assert.IsTrue(instance.Exists()); + Assert.AreEqual(ChangeDescription.NoChanges, instance.HasLocalChanges().Evaluation, + "Type was '" + t.Name + "'"); } - catch (TargetInvocationException exception) + catch (Exception e) { - if (exception.InnerException is TestCaseNotWrittenYetException) - notSupported.Add(t); - else - throw; + throw new Exception($"Implementation of WhenIHaveA<{t.Name}> is flawed", e); } - - //if the instance returned by MakeGenericMethod does not pass checks that's a dealbreaker! - if (instance != null) - { - try - { - //and that it returns an instance - Assert.IsNotNull(instance); - Assert.IsTrue(instance.Exists()); - Assert.AreEqual(ChangeDescription.NoChanges, instance.HasLocalChanges().Evaluation,"Type was '" + t.Name+"'"); - } - catch (Exception e) - { - throw new Exception("Implementation of WhenIHaveA<" + t.Name + "> is flawed",e); - } - } - - } - - Assert.IsEmpty(notSupported, "The following Types were not supported by WhenIHaveA:" +Environment.NewLine + string.Join(Environment.NewLine,notSupported.Select(t=>t.Name))); } + + Assert.IsEmpty(notSupported, + $"The following Types were not supported by WhenIHaveA:{Environment.NewLine}{string.Join(Environment.NewLine, notSupported.Select(t => t.Name))}"); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/Curation/YamlRepositoryTests.cs b/Rdmp.Core.Tests/Curation/YamlRepositoryTests.cs index 32825fe2b4..c3b09d93f1 100644 --- a/Rdmp.Core.Tests/Curation/YamlRepositoryTests.cs +++ b/Rdmp.Core.Tests/Curation/YamlRepositoryTests.cs @@ -21,330 +21,326 @@ using System.Text; using Tests.Common; -namespace Rdmp.Core.Tests.Curation +namespace Rdmp.Core.Tests.Curation; + +internal class YamlRepositoryTests { - internal class YamlRepositoryTests + [Test] + public void BlankConstructorsForEveryone() { - [Test] - public void BlankConstructorsForEveryone() - { - StringBuilder sb = new StringBuilder(); - - foreach(var t in new YamlRepository(GetUniqueDirectory()).GetCompatibleTypes()) - { - var blankConstructor = t.GetConstructor(Type.EmptyTypes); - - if (blankConstructor == null) - sb.AppendLine(t.Name); - } + var sb = new StringBuilder(); - if(sb.Length > 0) - { - Assert.Fail($"All data classes must have a blank constructor. The following did not:{Environment.NewLine}{sb}"); - } - } - - [Test] - public void PersistDefaults() + foreach (var t in new YamlRepository(GetUniqueDirectory()).GetCompatibleTypes()) { - var dir = GetUniqueDirectory(); + var blankConstructor = t.GetConstructor(Type.EmptyTypes); - var repo1 = new YamlRepository(dir); - var eds = new ExternalDatabaseServer(repo1,"myServer",null); - repo1.SetDefault(PermissableDefaults.LiveLoggingServer_ID, eds); - - var repo2 = new YamlRepository(dir); - Assert.AreEqual(eds, repo2.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); + if (blankConstructor == null) + sb.AppendLine(t.Name); } - [Test] - public void PersistPackageContents() - { - var dir = GetUniqueDirectory(); + if (sb.Length > 0) + Assert.Fail( + $"All data classes must have a blank constructor. The following did not:{Environment.NewLine}{sb}"); + } - var repo1 = new YamlRepository(dir); + [Test] + public void PersistDefaults() + { + var dir = GetUniqueDirectory(); - var ds = UnitTests.WhenIHaveA(repo1); + var repo1 = new YamlRepository(dir); + var eds = new ExternalDatabaseServer(repo1, "myServer", null); + repo1.SetDefault(PermissableDefaults.LiveLoggingServer_ID, eds); - var package = UnitTests.WhenIHaveA(repo1); + var repo2 = new YamlRepository(dir); + Assert.AreEqual(eds, repo2.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); + } - Assert.IsEmpty(repo1.GetPackageContentsDictionary()); - repo1.PackageManager.AddDataSetToPackage(package, ds); - Assert.IsNotEmpty(repo1.GetPackageContentsDictionary()); + [Test] + public void PersistPackageContents() + { + var dir = GetUniqueDirectory(); - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - Assert.IsNotEmpty(repo2.GetPackageContentsDictionary()); - } + var repo1 = new YamlRepository(dir); - [Test] - public void PersistDataExportPropertyManagerValues() - { - var dir = GetUniqueDirectory(); - - var repo1 = new YamlRepository(dir); - repo1.DataExportPropertyManager.SetValue(DataExportProperty.HashingAlgorithmPattern,"yarg"); - - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - Assert.AreEqual("yarg", repo2.DataExportPropertyManager.GetValue(DataExportProperty.HashingAlgorithmPattern)); - } + var ds = UnitTests.WhenIHaveA(repo1); - [Test] - public void PersistGovernancePeriods() - { - var dir = GetUniqueDirectory(); + var package = UnitTests.WhenIHaveA(repo1); - var repo1 = new YamlRepository(dir); + Assert.IsEmpty(repo1.GetPackageContentsDictionary()); + repo1.PackageManager.AddDataSetToPackage(package, ds); + Assert.IsNotEmpty(repo1.GetPackageContentsDictionary()); - var period = UnitTests.WhenIHaveA(repo1); - var cata = UnitTests.WhenIHaveA(repo1); + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + Assert.IsNotEmpty(repo2.GetPackageContentsDictionary()); + } - Assert.IsEmpty(repo1.GetAllGovernedCatalogues(period)); - repo1.Link(period, cata); - Assert.IsNotEmpty(repo1.GetAllGovernedCatalogues(period)); + [Test] + public void PersistDataExportPropertyManagerValues() + { + var dir = GetUniqueDirectory(); - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - Assert.IsNotEmpty(repo2.GetAllGovernedCatalogues(period)); - } + var repo1 = new YamlRepository(dir); + repo1.DataExportPropertyManager.SetValue(DataExportProperty.HashingAlgorithmPattern, "yarg"); + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + Assert.AreEqual("yarg", repo2.DataExportPropertyManager.GetValue(DataExportProperty.HashingAlgorithmPattern)); + } - [Test] - public void PersistForcedJoins() - { - var dir = GetUniqueDirectory(); + [Test] + public void PersistGovernancePeriods() + { + var dir = GetUniqueDirectory(); - var repo1 = new YamlRepository(dir); + var repo1 = new YamlRepository(dir); - var ac = UnitTests.WhenIHaveA(repo1); - var t = UnitTests.WhenIHaveA(repo1); + var period = UnitTests.WhenIHaveA(repo1); + var cata = UnitTests.WhenIHaveA(repo1); - Assert.IsEmpty(ac.ForcedJoins); - Assert.IsEmpty(repo1.AggregateForcedJoinManager.GetAllForcedJoinsFor(ac)); - repo1.AggregateForcedJoinManager.CreateLinkBetween(ac,t); - Assert.IsNotEmpty(ac.ForcedJoins); - Assert.IsNotEmpty(repo1.AggregateForcedJoinManager.GetAllForcedJoinsFor(ac)); + Assert.IsEmpty(repo1.GetAllGovernedCatalogues(period)); + repo1.Link(period, cata); + Assert.IsNotEmpty(repo1.GetAllGovernedCatalogues(period)); - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - Assert.IsNotEmpty(ac.ForcedJoins); - Assert.IsNotEmpty(repo2.AggregateForcedJoinManager.GetAllForcedJoinsFor(ac)); - } + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + Assert.IsNotEmpty(repo2.GetAllGovernedCatalogues(period)); + } - [Test] - public void PersistCohortSubcontainers() - { - var dir = GetUniqueDirectory(); + [Test] + public void PersistForcedJoins() + { + var dir = GetUniqueDirectory(); - var repo1 = new YamlRepository(dir); + var repo1 = new YamlRepository(dir); - var root = UnitTests.WhenIHaveA(repo1); - var sub1 = new CohortAggregateContainer(repo1,SetOperation.INTERSECT); - var ac = UnitTests.WhenIHaveA(repo1); - - sub1.Order = 2; - sub1.SaveToDatabase(); + var ac = UnitTests.WhenIHaveA(repo1); + var t = UnitTests.WhenIHaveA(repo1); - root.AddChild(sub1); - root.AddChild(ac, 0); + Assert.IsEmpty(ac.ForcedJoins); + Assert.IsEmpty(repo1.AggregateForcedJoinManager.GetAllForcedJoinsFor(ac)); + repo1.AggregateForcedJoinManager.CreateLinkBetween(ac, t); + Assert.IsNotEmpty(ac.ForcedJoins); + Assert.IsNotEmpty(repo1.AggregateForcedJoinManager.GetAllForcedJoinsFor(ac)); - Assert.IsNotEmpty(root.GetOrderedContents()); - Assert.AreEqual(ac,root.GetOrderedContents().ToArray()[0]); - Assert.AreEqual(sub1, root.GetOrderedContents().ToArray()[1]); + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + Assert.IsNotEmpty(ac.ForcedJoins); + Assert.IsNotEmpty(repo2.AggregateForcedJoinManager.GetAllForcedJoinsFor(ac)); + } - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - root = repo2.GetObjectByID(root.ID); - Assert.IsNotEmpty(root.GetOrderedContents()); - Assert.AreEqual(ac, root.GetOrderedContents().ToArray()[0]); - Assert.AreEqual(sub1, root.GetOrderedContents().ToArray()[1]); - } + [Test] + public void PersistCohortSubcontainers() + { + var dir = GetUniqueDirectory(); - [Test] - public void PersistFilterContainers() - { - var dir = GetUniqueDirectory(); + var repo1 = new YamlRepository(dir); - var repo1 = new YamlRepository(dir); + var root = UnitTests.WhenIHaveA(repo1); + var sub1 = new CohortAggregateContainer(repo1, SetOperation.INTERSECT); + var ac = UnitTests.WhenIHaveA(repo1); - var ac = UnitTests.WhenIHaveA(repo1); - ac.CreateRootContainerIfNotExists(); + sub1.Order = 2; + sub1.SaveToDatabase(); - var f = new AggregateFilter(repo1, "my filter"); - ac.RootFilterContainer.AddChild(f); - var sub = new AggregateFilterContainer(repo1,FilterContainerOperation.AND); - ac.RootFilterContainer.AddChild(sub); + root.AddChild(sub1); + root.AddChild(ac, 0); - Assert.AreEqual(sub,ac.RootFilterContainer.GetSubContainers().Single()); - Assert.AreEqual(f, ac.RootFilterContainer.GetFilters().Single()); + Assert.IsNotEmpty(root.GetOrderedContents()); + Assert.AreEqual(ac, root.GetOrderedContents().ToArray()[0]); + Assert.AreEqual(sub1, root.GetOrderedContents().ToArray()[1]); - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - ac = repo2.GetObjectByID(ac.ID); + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + root = repo2.GetObjectByID(root.ID); - Assert.AreEqual(sub, ac.RootFilterContainer.GetSubContainers().Single()); - Assert.AreEqual(f, ac.RootFilterContainer.GetFilters().Single()); - } + Assert.IsNotEmpty(root.GetOrderedContents()); + Assert.AreEqual(ac, root.GetOrderedContents().ToArray()[0]); + Assert.AreEqual(sub1, root.GetOrderedContents().ToArray()[1]); + } - [Test] - public void PersistFilterContainers_Orphans() - { - var dir = GetUniqueDirectory(); + [Test] + public void PersistFilterContainers() + { + var dir = GetUniqueDirectory(); - var repo1 = new YamlRepository(dir); + var repo1 = new YamlRepository(dir); - var ac = UnitTests.WhenIHaveA(repo1); - ac.CreateRootContainerIfNotExists(); - var root = ac.RootFilterContainer; + var ac = UnitTests.WhenIHaveA(repo1); + ac.CreateRootContainerIfNotExists(); - var f = new AggregateFilter(repo1, "my filter"); - ac.RootFilterContainer.AddChild(f); - var sub = new AggregateFilterContainer(repo1,FilterContainerOperation.AND); - ac.RootFilterContainer.AddChild(sub); + var f = new AggregateFilter(repo1, "my filter"); + ac.RootFilterContainer.AddChild(f); + var sub = new AggregateFilterContainer(repo1, FilterContainerOperation.AND); + ac.RootFilterContainer.AddChild(sub); - Assert.AreEqual(sub,ac.RootFilterContainer.GetSubContainers().Single()); - Assert.AreEqual(f, ac.RootFilterContainer.GetFilters().Single()); + Assert.AreEqual(sub, ac.RootFilterContainer.GetSubContainers().Single()); + Assert.AreEqual(f, ac.RootFilterContainer.GetFilters().Single()); - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - ac = repo2.GetObjectByID(ac.ID); + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + ac = repo2.GetObjectByID(ac.ID); - Assert.AreEqual(sub, ac.RootFilterContainer.GetSubContainers().Single()); - Assert.AreEqual(f, ac.RootFilterContainer.GetFilters().Single()); + Assert.AreEqual(sub, ac.RootFilterContainer.GetSubContainers().Single()); + Assert.AreEqual(f, ac.RootFilterContainer.GetFilters().Single()); + } - // Make an orphan container by deleting the root + [Test] + public void PersistFilterContainers_Orphans() + { + var dir = GetUniqueDirectory(); - // don't check before deleting stuff - ((CatalogueObscureDependencyFinder)ac.CatalogueRepository.ObscureDependencyFinder).OtherDependencyFinders.Clear(); + var repo1 = new YamlRepository(dir); - // delete the root filter - ac.RootFilterContainer.DeleteInDatabase(); + var ac = UnitTests.WhenIHaveA(repo1); + ac.CreateRootContainerIfNotExists(); + var root = ac.RootFilterContainer; - // A fresh repo loaded from the same directory - var repo3 = new YamlRepository(dir); + var f = new AggregateFilter(repo1, "my filter"); + ac.RootFilterContainer.AddChild(f); + var sub = new AggregateFilterContainer(repo1, FilterContainerOperation.AND); + ac.RootFilterContainer.AddChild(sub); - // all these things should be gone - Assert.IsFalse(repo3.StillExists(sub)); - Assert.IsFalse(repo3.StillExists(root)); - Assert.IsFalse(repo3.StillExists(f)); + Assert.AreEqual(sub, ac.RootFilterContainer.GetSubContainers().Single()); + Assert.AreEqual(f, ac.RootFilterContainer.GetFilters().Single()); - } + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + ac = repo2.GetObjectByID(ac.ID); - [Test] - public void PersistCredentials() - { - var dir = GetUniqueDirectory(); + Assert.AreEqual(sub, ac.RootFilterContainer.GetSubContainers().Single()); + Assert.AreEqual(f, ac.RootFilterContainer.GetFilters().Single()); - var repo1 = new YamlRepository(dir); + // Make an orphan container by deleting the root - var creds = UnitTests.WhenIHaveA(repo1); - var t = UnitTests.WhenIHaveA(repo1); + // don't check before deleting stuff + ((CatalogueObscureDependencyFinder)ac.CatalogueRepository.ObscureDependencyFinder).OtherDependencyFinders + .Clear(); - Assert.IsEmpty(creds.GetAllTableInfosThatUseThis().SelectMany(v=>v.Value)); - Assert.IsNull(t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad)); - Assert.IsNull(t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.InternalDataProcessing)); + // delete the root filter + ac.RootFilterContainer.DeleteInDatabase(); - repo1.TableInfoCredentialsManager.CreateLinkBetween(creds,t,ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad); + // A fresh repo loaded from the same directory + var repo3 = new YamlRepository(dir); - Assert.AreEqual(t,creds.GetAllTableInfosThatUseThis().SelectMany(v => v.Value).Single()); - Assert.AreEqual(creds,t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad)); - Assert.IsNull(t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.InternalDataProcessing)); + // all these things should be gone + Assert.IsFalse(repo3.StillExists(sub)); + Assert.IsFalse(repo3.StillExists(root)); + Assert.IsFalse(repo3.StillExists(f)); + } + [Test] + public void PersistCredentials() + { + var dir = GetUniqueDirectory(); - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - t = repo2.GetObjectByID(t.ID); + var repo1 = new YamlRepository(dir); - Assert.AreEqual(t, creds.GetAllTableInfosThatUseThis().SelectMany(v => v.Value).Single()); - Assert.AreEqual(creds, t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad)); - Assert.IsNull(t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.InternalDataProcessing)); + var creds = UnitTests.WhenIHaveA(repo1); + var t = UnitTests.WhenIHaveA(repo1); + Assert.IsEmpty(creds.GetAllTableInfosThatUseThis().SelectMany(v => v.Value)); + Assert.IsNull(t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad)); + Assert.IsNull( + t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.InternalDataProcessing)); - } + repo1.TableInfoCredentialsManager.CreateLinkBetween(creds, t, + ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad); + Assert.AreEqual(t, creds.GetAllTableInfosThatUseThis().SelectMany(v => v.Value).Single()); + Assert.AreEqual(creds, t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad)); + Assert.IsNull( + t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.InternalDataProcessing)); - [Test] - public void YamlRepository_LoadSavePluginClass() - { - var dir = GetUniqueDirectory(); + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + t = repo2.GetObjectByID(t.ID); - var repo1 = new YamlRepository(dir); - var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah.zip")); - File.WriteAllBytes(fi.FullName, new byte[] { 0x1, 0x2 }); + Assert.AreEqual(t, creds.GetAllTableInfosThatUseThis().SelectMany(v => v.Value).Single()); + Assert.AreEqual(creds, t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.DataLoad)); + Assert.IsNull( + t.GetCredentialsIfExists(ReusableLibraryCode.DataAccess.DataAccessContext.InternalDataProcessing)); + } - var version = FileVersionInfo.GetVersionInfo(Assembly.GetExecutingAssembly().Location).FileVersion; - var lma1 = UnitTests.WhenIHaveA(repo1); - var lma2 = UnitTests.WhenIHaveA(repo1); + [Test] + public void YamlRepository_LoadSavePluginClass() + { + var dir = GetUniqueDirectory(); + var repo1 = new YamlRepository(dir); + var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Blah.zip")); + File.WriteAllBytes(fi.FullName, new byte[] { 0x1, 0x2 }); - lma1.Plugin.Name = "MyPlugin1.1.1.1.nupkg"; - lma1.Plugin.RdmpVersion = new Version(version); //the version of Rdmp.Core targetted - lma1.Plugin.PluginVersion = new Version(1, 1, 1, 1); //the version of the plugin - lma1.Plugin.SaveToDatabase(); - lma1.SaveToDatabase(); + var version = FileVersionInfo.GetVersionInfo(Assembly.GetExecutingAssembly().Location).FileVersion; - lma2.Plugin.Name = "MyPlugin1.1.1.2.nupkg"; - lma2.Plugin.RdmpVersion = new Version(version);//the version of Rdmp.Core targetted (same as above) - lma2.Plugin.PluginVersion = new Version(1, 1, 1, 2);//the version of the plugin (higher) - lma2.Plugin.SaveToDatabase(); - lma2.SaveToDatabase(); + var lma1 = UnitTests.WhenIHaveA(repo1); + var lma2 = UnitTests.WhenIHaveA(repo1); - var plugins = repo1.PluginManager.GetCompatiblePlugins(); - Assert.That(plugins, Has.Length.EqualTo(1)); - Assert.That(plugins[0], Is.EqualTo(lma2.Plugin)); + lma1.Plugin.Name = "MyPlugin1.1.1.1.nupkg"; + lma1.Plugin.RdmpVersion = new Version(version); //the version of Rdmp.Core targetted + lma1.Plugin.PluginVersion = new Version(1, 1, 1, 1); //the version of the plugin + lma1.Plugin.SaveToDatabase(); + lma1.SaveToDatabase(); - // A fresh repo loaded from the same directory should have persisted object relationships - var repo2 = new YamlRepository(dir); - plugins = repo2.PluginManager.GetCompatiblePlugins(); - Assert.That(plugins, Has.Length.EqualTo(1)); - Assert.That(plugins.Single(), Is.EqualTo(lma2.Plugin)); - } + lma2.Plugin.Name = "MyPlugin1.1.1.2.nupkg"; + lma2.Plugin.RdmpVersion = new Version(version); //the version of Rdmp.Core targetted (same as above) + lma2.Plugin.PluginVersion = new Version(1, 1, 1, 2); //the version of the plugin (higher) + lma2.Plugin.SaveToDatabase(); + lma2.SaveToDatabase(); - [Test] - public void TestYamlRepository_LoadObjects() - { - var dir = new DirectoryInfo(GetUniqueDirectoryName()); - var repo = new YamlRepository(dir); + var plugins = repo1.PluginManager.GetCompatiblePlugins(); + Assert.That(plugins, Has.Length.EqualTo(1)); + Assert.That(plugins[0], Is.EqualTo(lma2.Plugin)); - var c = new Catalogue(repo, "yar"); - Assert.Contains(c, repo.AllObjects.ToArray()); + // A fresh repo loaded from the same directory should have persisted object relationships + var repo2 = new YamlRepository(dir); + plugins = repo2.PluginManager.GetCompatiblePlugins(); + Assert.That(plugins, Has.Length.EqualTo(1)); + Assert.That(plugins.Single(), Is.EqualTo(lma2.Plugin)); + } - // creating a new repo should load the same object back - var repo2 = new YamlRepository(dir); - Assert.Contains(c, repo2.AllObjects.ToArray()); - } + [Test] + public void TestYamlRepository_LoadObjects() + { + var dir = new DirectoryInfo(GetUniqueDirectoryName()); + var repo = new YamlRepository(dir); - [Test] - public void TestYamlRepository_Save() - { - var dir = new DirectoryInfo(GetUniqueDirectoryName()); - var repo = new YamlRepository(dir); + var c = new Catalogue(repo, "yar"); - var c = new Catalogue(repo, "yar"); - c.Name = "ffff"; - c.SaveToDatabase(); + Assert.Contains(c, repo.AllObjects.ToArray()); - // creating a new repo should load the same object back - var repo2 = new YamlRepository(dir); - Assert.Contains(c, repo2.AllObjects.ToArray()); - Assert.AreEqual("ffff", repo2.AllObjects.OfType().Single().Name); - } + // creating a new repo should load the same object back + var repo2 = new YamlRepository(dir); + Assert.Contains(c, repo2.AllObjects.ToArray()); + } - private string GetUniqueDirectoryName() - { - return Path.Combine(TestContext.CurrentContext.WorkDirectory, Guid.NewGuid().ToString().Replace("-", "")); - } + [Test] + public void TestYamlRepository_Save() + { + var dir = new DirectoryInfo(GetUniqueDirectoryName()); + var repo = new YamlRepository(dir); - private DirectoryInfo GetUniqueDirectory() + var c = new Catalogue(repo, "yar") { - return new DirectoryInfo(GetUniqueDirectoryName()); - } + Name = "ffff" + }; + c.SaveToDatabase(); + + // creating a new repo should load the same object back + var repo2 = new YamlRepository(dir); + Assert.Contains(c, repo2.AllObjects.ToArray()); + Assert.AreEqual("ffff", repo2.AllObjects.OfType().Single().Name); } -} + + private static string GetUniqueDirectoryName() => Path.Combine(TestContext.CurrentContext.WorkDirectory, + Guid.NewGuid().ToString().Replace("-", "")); + + private static DirectoryInfo GetUniqueDirectory() => new(GetUniqueDirectoryName()); +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Cloning/CloneExtractionConfigurationTests.cs b/Rdmp.Core.Tests/DataExport/Cloning/CloneExtractionConfigurationTests.cs index f91dfe429f..3f938c0a08 100644 --- a/Rdmp.Core.Tests/DataExport/Cloning/CloneExtractionConfigurationTests.cs +++ b/Rdmp.Core.Tests/DataExport/Cloning/CloneExtractionConfigurationTests.cs @@ -16,54 +16,54 @@ using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.Cloning +namespace Rdmp.Core.Tests.DataExport.Cloning; + +public class CloneExtractionConfigurationTests : TestsRequiringAnExtractionConfiguration { - public class CloneExtractionConfigurationTests:TestsRequiringAnExtractionConfiguration + [Test] + [TestCase(false)] + [TestCase(true)] + public void CloneWithFilters(bool introduceOrphanExtractionInformation) { + if (introduceOrphanExtractionInformation) + IntroduceOrphan(); - [Test] - [TestCase(false)] - [TestCase(true)] - public void CloneWithFilters(bool introduceOrphanExtractionInformation) - { - if (introduceOrphanExtractionInformation) - IntroduceOrphan(); - - Assert.IsEmpty(_configuration.ReleaseLog); + Assert.IsEmpty(_configuration.ReleaseLog); - var filter = new ExtractionFilter(CatalogueRepository, "FilterByFish", _extractionInformations[0]); - try - { - //setup a filter with a parameter - filter.WhereSQL = "Fish = @fish"; - - new ParameterCreator(new ExtractionFilterFactory(_extractionInformations[0]), null, null).CreateAll(filter,null); - filter.SaveToDatabase(); - - Assert.IsTrue(filter.ExtractionFilterParameters.Count()==1); - - //create a root container - var container = new FilterContainer(DataExportRepository); - _selectedDataSet.RootFilterContainer_ID = container.ID; - _selectedDataSet.SaveToDatabase(); - - //create a deployed filter - var importer = new FilterImporter(new DeployedExtractionFilterFactory(DataExportRepository), null); - var deployedFilter = (DeployedExtractionFilter)importer.ImportFilter(container, filter, null); - deployedFilter.FilterContainer_ID = container.ID; - deployedFilter.Name = "FilterByFishDeployed"; - deployedFilter.SaveToDatabase(); - - var param = deployedFilter.ExtractionFilterParameters[0]; - param.Value = "'jormungander'"; - param.SaveToDatabase(); - - ExtractDatasetCommand request = new ExtractDatasetCommand(_configuration,new ExtractableDatasetBundle(_extractableDataSet)); - request.GenerateQueryBuilder(); - Assert.AreEqual( - CollapseWhitespace( -string.Format( -@"DECLARE @fish AS varchar(50); + var filter = new ExtractionFilter(CatalogueRepository, "FilterByFish", _extractionInformations[0]); + try + { + //setup a filter with a parameter + filter.WhereSQL = "Fish = @fish"; + + new ParameterCreator(new ExtractionFilterFactory(_extractionInformations[0]), null, null).CreateAll(filter, + null); + filter.SaveToDatabase(); + + Assert.IsTrue(filter.ExtractionFilterParameters.Count() == 1); + + //create a root container + var container = new FilterContainer(DataExportRepository); + _selectedDataSet.RootFilterContainer_ID = container.ID; + _selectedDataSet.SaveToDatabase(); + + //create a deployed filter + var importer = new FilterImporter(new DeployedExtractionFilterFactory(DataExportRepository), null); + var deployedFilter = (DeployedExtractionFilter)importer.ImportFilter(container, filter, null); + deployedFilter.FilterContainer_ID = container.ID; + deployedFilter.Name = "FilterByFishDeployed"; + deployedFilter.SaveToDatabase(); + + var param = deployedFilter.ExtractionFilterParameters[0]; + param.Value = "'jormungander'"; + param.SaveToDatabase(); + + var request = new ExtractDatasetCommand(_configuration, new ExtractableDatasetBundle(_extractableDataSet)); + request.GenerateQueryBuilder(); + Assert.AreEqual( + CollapseWhitespace( + string.Format( + @"DECLARE @fish AS varchar(50); SET @fish='jormungander'; /*The ID of the cohort in [{0}CohortDatabase]..[Cohort]*/ DECLARE @CohortDefinitionID AS int; @@ -87,80 +87,77 @@ SELECT DISTINCT AND [{0}CohortDatabase]..[Cohort].[cohortDefinition_id]=-599 " - , TestDatabaseNames.Prefix)) - ,CollapseWhitespace(request.QueryBuilder.SQL)); - - ExtractionConfiguration deepClone = _configuration.DeepCloneWithNewIDs(); - Assert.AreEqual(deepClone.Cohort_ID,_configuration.Cohort_ID); - Assert.AreNotEqual(deepClone.ID,_configuration.ID); - try - { - ExtractDatasetCommand request2 = new ExtractDatasetCommand(deepClone, new ExtractableDatasetBundle(_extractableDataSet)); - request2.GenerateQueryBuilder(); - - Assert.AreEqual(request.QueryBuilder.SQL,request2.QueryBuilder.SQL); - - } - finally - { - deepClone.DeleteInDatabase(); - } + , TestDatabaseNames.Prefix)) + , CollapseWhitespace(request.QueryBuilder.SQL)); + + var deepClone = _configuration.DeepCloneWithNewIDs(); + Assert.AreEqual(deepClone.Cohort_ID, _configuration.Cohort_ID); + Assert.AreNotEqual(deepClone.ID, _configuration.ID); + try + { + var request2 = new ExtractDatasetCommand(deepClone, new ExtractableDatasetBundle(_extractableDataSet)); + request2.GenerateQueryBuilder(); + + Assert.AreEqual(request.QueryBuilder.SQL, request2.QueryBuilder.SQL); } - finally + finally { - - filter.DeleteInDatabase(); + deepClone.DeleteInDatabase(); } } + finally + { + filter.DeleteInDatabase(); + } + } - [Test] - public void CloneWithExtractionProgress() + [Test] + public void CloneWithExtractionProgress() + { + var sds = _configuration.SelectedDataSets[0]; + var ci = sds.GetCatalogue().CatalogueItems.First(); + var origProgress = new ExtractionProgress(DataExportRepository, sds, null, DateTime.Now, 10, "fff drrr", ci.ID) { - var sds = _configuration.SelectedDataSets[0]; - var ci = sds.GetCatalogue().CatalogueItems.First(); - var origProgress = new ExtractionProgress(DataExportRepository, sds, null, DateTime.Now, 10, "fff drrr", ci.ID); - origProgress.ProgressDate = new DateTime(2001, 01, 01); - origProgress.SaveToDatabase(); + ProgressDate = new DateTime(2001, 01, 01) + }; + origProgress.SaveToDatabase(); - ExtractionConfiguration deepClone = _configuration.DeepCloneWithNewIDs(); - Assert.AreEqual(deepClone.Cohort_ID, _configuration.Cohort_ID); - Assert.AreNotEqual(deepClone.ID, _configuration.ID); + var deepClone = _configuration.DeepCloneWithNewIDs(); + Assert.AreEqual(deepClone.Cohort_ID, _configuration.Cohort_ID); + Assert.AreNotEqual(deepClone.ID, _configuration.ID); - var clonedSds = deepClone.SelectedDataSets.Single(s => s.ExtractableDataSet_ID == sds.ExtractableDataSet_ID); + var clonedSds = deepClone.SelectedDataSets.Single(s => s.ExtractableDataSet_ID == sds.ExtractableDataSet_ID); - var clonedProgress = clonedSds.ExtractionProgressIfAny; + var clonedProgress = clonedSds.ExtractionProgressIfAny; - Assert.IsNotNull(clonedProgress); - Assert.IsNull(clonedProgress.StartDate); - Assert.IsNull(clonedProgress.ProgressDate, "Cloning a ExtractionProgress should reset its ProgressDate back to null in anticipation of it being extracted again"); - - Assert.AreEqual(clonedProgress.EndDate, origProgress.EndDate); - Assert.AreEqual(clonedProgress.NumberOfDaysPerBatch, origProgress.NumberOfDaysPerBatch); - Assert.AreEqual(clonedProgress.Name, origProgress.Name); - Assert.AreEqual(clonedProgress.ExtractionInformation_ID, origProgress.ExtractionInformation_ID); + Assert.IsNotNull(clonedProgress); + Assert.IsNull(clonedProgress.StartDate); + Assert.IsNull(clonedProgress.ProgressDate, + "Cloning a ExtractionProgress should reset its ProgressDate back to null in anticipation of it being extracted again"); + Assert.AreEqual(clonedProgress.EndDate, origProgress.EndDate); + Assert.AreEqual(clonedProgress.NumberOfDaysPerBatch, origProgress.NumberOfDaysPerBatch); + Assert.AreEqual(clonedProgress.Name, origProgress.Name); + Assert.AreEqual(clonedProgress.ExtractionInformation_ID, origProgress.ExtractionInformation_ID); - deepClone.DeleteInDatabase(); - // remove the progress so that it doesn't trip other tests - origProgress.DeleteInDatabase(); - } + deepClone.DeleteInDatabase(); + // remove the progress so that it doesn't trip other tests + origProgress.DeleteInDatabase(); + } - public void IntroduceOrphan() - { - var cols = _configuration.GetAllExtractableColumnsFor(_extractableDataSet).Cast().ToArray(); - var name = cols.Single(c => c.GetRuntimeName().Equals("Name")); + public void IntroduceOrphan() + { + var cols = _configuration.GetAllExtractableColumnsFor(_extractableDataSet).Cast().ToArray(); - using (var con = DataExportTableRepository.GetConnection()) - { - DataExportTableRepository.DiscoveredServer.GetCommand( - "UPDATE ExtractableColumn set CatalogueExtractionInformation_ID = " + int.MaxValue + " where ID = " + - name.ID, con).ExecuteNonQuery(); - } + var name = cols.Single(c => c.GetRuntimeName().Equals("Name")); - } + using var con = DataExportTableRepository.GetConnection(); + DataExportTableRepository.DiscoveredServer.GetCommand( + $"UPDATE ExtractableColumn set CatalogueExtractionInformation_ID = {int.MaxValue} where ID = {name.ID}", + con).ExecuteNonQuery(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Cohort/CohortTests.cs b/Rdmp.Core.Tests/DataExport/Cohort/CohortTests.cs index b86b4fa79b..4f32a96364 100644 --- a/Rdmp.Core.Tests/DataExport/Cohort/CohortTests.cs +++ b/Rdmp.Core.Tests/DataExport/Cohort/CohortTests.cs @@ -7,91 +7,87 @@ using System.Collections.Generic; using System.Linq; using NUnit.Framework; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.Cohort +namespace Rdmp.Core.Tests.DataExport.Cohort; + +[Category("Database")] +public class CohortTests : TestsRequiringACohort { - [Category("Database")] - public class CohortTests : TestsRequiringACohort + [Test] + public void TestOverridingReleaseIdentifier() { - [Test] - public void TestOverridingReleaseIdentifier() - { - //get cohort without override - Assert.IsNull(_extractableCohort.OverrideReleaseIdentifierSQL); - - //should match global release identifier (from its source because there is no override) - Assert.AreEqual("ReleaseID", _extractableCohort.GetReleaseIdentifier(true)); - - //appy override - _extractableCohort.OverrideReleaseIdentifierSQL = "Fish"; - _extractableCohort.SaveToDatabase(); - - //should now match override - Assert.AreEqual("Fish", _extractableCohort.GetReleaseIdentifier()); - - //now set it back to null (not overriding) - _extractableCohort.OverrideReleaseIdentifierSQL = null; - _extractableCohort.SaveToDatabase(); - - //now check that we are back to the original release identifier - Assert.AreEqual("ReleaseID", _extractableCohort.GetReleaseIdentifier(true)); - - } - - [Test] - public void TestSelf_RecordAllFailures() - { - RecordAllFailures failures = new RecordAllFailures(); - failures.FailureMessages.Add("Hi there Thomas, How's it going?"); + //get cohort without override + Assert.IsNull(_extractableCohort.OverrideReleaseIdentifierSQL); + + //should match global release identifier (from its source because there is no override) + Assert.AreEqual("ReleaseID", _extractableCohort.GetReleaseIdentifier(true)); + + //appy override + _extractableCohort.OverrideReleaseIdentifierSQL = "Fish"; + _extractableCohort.SaveToDatabase(); - Assert.IsFalse(failures.AnyFailMessageLike("Carmageddon")); - - Assert.IsTrue(failures.AnyFailMessageLike("Thomas")); + //should now match override + Assert.AreEqual("Fish", _extractableCohort.GetReleaseIdentifier()); + + //now set it back to null (not overriding) + _extractableCohort.OverrideReleaseIdentifierSQL = null; + _extractableCohort.SaveToDatabase(); + + //now check that we are back to the original release identifier + Assert.AreEqual("ReleaseID", _extractableCohort.GetReleaseIdentifier(true)); + } + + [Test] + public void TestSelf_RecordAllFailures() + { + var failures = new RecordAllFailures(); + failures.FailureMessages.Add("Hi there Thomas, How's it going?"); - Assert.IsTrue(failures.AnyFailMessageLike("Thomas","going")); - Assert.IsTrue(failures.AnyFailMessageLike("Thomas", "going", "Hi")); - Assert.IsTrue(failures.AnyFailMessageLike("thomas", "gOIng", "hi")); + Assert.IsFalse(failures.AnyFailMessageLike("Carmageddon")); - Assert.IsFalse(failures.AnyFailMessageLike("Thomas", "going", "Hi","Fear the babadook")); + Assert.IsTrue(failures.AnyFailMessageLike("Thomas")); + Assert.IsTrue(failures.AnyFailMessageLike("Thomas", "going")); + Assert.IsTrue(failures.AnyFailMessageLike("Thomas", "going", "Hi")); + Assert.IsTrue(failures.AnyFailMessageLike("thomas", "gOIng", "hi")); + + Assert.IsFalse(failures.AnyFailMessageLike("Thomas", "going", "Hi", "Fear the babadook")); + } + + private class RecordAllFailures : ICheckNotifier + { + public RecordAllFailures() + { + FailureMessages = new List(); } - private class RecordAllFailures : ICheckNotifier + public List FailureMessages { get; set; } + + public bool AnyFailMessageLike(params string[] bitsTofind) { - public RecordAllFailures() - { - FailureMessages = new List(); - } - public List FailureMessages { get; set; } - - public bool AnyFailMessageLike(params string[] bitsTofind) - { - return FailureMessages.Any(m => + return FailureMessages.Any(m => { - bool found = bitsTofind.Any(); + var found = bitsTofind.Any(); - foreach(string s in bitsTofind) - if(!m.ToLower().Contains(s.ToLower())) + foreach (var s in bitsTofind) + if (!m.ToLower().Contains(s.ToLower())) found = false; return found; } - ); - } + ); + } - public bool OnCheckPerformed(CheckEventArgs args) - { - if(args.Result == CheckResult.Fail) - FailureMessages.Add(args.Message); + public bool OnCheckPerformed(CheckEventArgs args) + { + if (args.Result == CheckResult.Fail) + FailureMessages.Add(args.Message); - //accept all proposed changes - return true; - } + //accept all proposed changes + return true; } - } -} - +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Cohort/CommittingNewCohortsTests.cs b/Rdmp.Core.Tests/DataExport/Cohort/CommittingNewCohortsTests.cs index 8b340bff81..f0c8f99c8b 100644 --- a/Rdmp.Core.Tests/DataExport/Cohort/CommittingNewCohortsTests.cs +++ b/Rdmp.Core.Tests/DataExport/Cohort/CommittingNewCohortsTests.cs @@ -15,218 +15,239 @@ using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataFlowPipeline.Requirements; using Rdmp.Core.DataLoad.Modules.DataFlowSources; -using Rdmp.Core.Repositories; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.Cohort +namespace Rdmp.Core.Tests.DataExport.Cohort; + +public class CommittingNewCohortsTests : TestsRequiringACohort { - public class CommittingNewCohortsTests : TestsRequiringACohort + private string _filename; + private const string ProjName = "MyProj"; + + [SetUp] + protected override void SetUp() { - private string filename; - private string projName = "MyProj"; + base.SetUp(); - [SetUp] - protected override void SetUp() - { - base.SetUp(); + using var con = _cohortDatabase.Server.GetConnection(); + con.Open(); + EmptyCohortTables(con); - EmptyCohortTables(); + _filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "CommittingNewCohorts.csv"); + + var sw = new StreamWriter(_filename); + sw.WriteLine("PrivateID,ReleaseID,SomeHeader"); + sw.WriteLine("Priv_1111,Pub_1111,Smile buddy"); + sw.WriteLine("Priv_2222,Pub_2222,Your on tv"); + sw.WriteLine("Priv_3333,Pub_3333,Smile buddy"); + sw.Close(); + } - filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "CommittingNewCohorts.csv"); - StreamWriter sw = new StreamWriter(filename); - sw.WriteLine("PrivateID,ReleaseID,SomeHeader"); - sw.WriteLine("Priv_1111,Pub_1111,Smile buddy"); - sw.WriteLine("Priv_2222,Pub_2222,Your on tv"); - sw.WriteLine("Priv_3333,Pub_3333,Smile buddy"); - sw.Close(); - } + [Test] + public void CommittingNewCohortFile_IDPopulated_Throws() + { + var proj = new Project(DataExportRepository, ProjName); + + var request = new CohortCreationRequest(proj, + new CohortDefinition(511, "CommittingNewCohorts", 1, 999, _externalCohortTable), DataExportRepository, + "fish"); + var ex = Assert.Throws(() => request.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "Expected the cohort definition CommittingNewCohorts(Version 1, ID=511) to have a null ID - we are trying to create this, why would it already exist?", + ex.Message); + } + [Test] + public void CommittingNewCohortFile_ProjectNumberNumberMissing() + { + var proj = new Project(DataExportRepository, ProjName); + + var request = new CohortCreationRequest(proj, + new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable), DataExportRepository, + "fish"); + var ex = Assert.Throws(() => request.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "Project MyProj does not have a ProjectNumber specified, it should have the same number as the CohortCreationRequest (999)", + ex.Message); + } - [Test] - public void CommittingNewCohortFile_IDPopulated_Throws() - { - var proj = new Project(DataExportRepository, projName); + [Test] + public void CommittingNewCohortFile_ProjectNumberMismatch() + { + var proj = new Project(DataExportRepository, ProjName) { ProjectNumber = 321 }; + proj.SaveToDatabase(); + + var request = new CohortCreationRequest(proj, + new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable), DataExportRepository, + "fish"); + var ex = Assert.Throws(() => request.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("Project MyProj has ProjectNumber=321 but the CohortCreationRequest.ProjectNumber is 999", + ex.Message); + } - CohortCreationRequest request = new CohortCreationRequest(proj, new CohortDefinition(511, "CommittingNewCohorts",1,999,_externalCohortTable), DataExportRepository, "fish"); - var ex = Assert.Throws(()=>request.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Expected the cohort definition CommittingNewCohorts(Version 1, ID=511) to have a null ID - we are trying to create this, why would it already exist?",ex.Message); - } + [Test] + public void CommittingNewCohortFile_CallPipeline() + { + var listener = ThrowImmediatelyDataLoadEventListener.Quiet; - [Test] - public void CommittingNewCohortFile_ProjectNumberNumberMissing() + var proj = new Project(DataExportRepository, ProjName) { - var proj = new Project(DataExportRepository, projName); + ProjectNumber = 999 + }; + proj.SaveToDatabase(); - CohortCreationRequest request = new CohortCreationRequest(proj, new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable), DataExportRepository, "fish"); - var ex = Assert.Throws(()=>request.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Project MyProj does not have a ProjectNumber specified, it should have the same number as the CohortCreationRequest (999)",ex.Message); - } + var request = new CohortCreationRequest(proj, + new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable), DataExportRepository, + "fish"); + request.Check(ThrowImmediatelyCheckNotifier.Quiet); - [Test] - public void CommittingNewCohortFile_ProjectNumberMismatch() - { - var proj = new Project(DataExportRepository, projName) {ProjectNumber = 321}; - proj.SaveToDatabase(); + var source = new DelimitedFlatFileDataFlowSource(); + var destination = new BasicCohortDestination(); - CohortCreationRequest request = new CohortCreationRequest(proj, new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable), DataExportRepository, "fish"); - var ex = Assert.Throws(()=>request.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Project MyProj has ProjectNumber=321 but the CohortCreationRequest.ProjectNumber is 999",ex.Message); - } + source.Separator = ","; + source.StronglyTypeInput = true; - [Test] - public void CommittingNewCohortFile_CallPipeline() - { - var listener = new ThrowImmediatelyDataLoadEventListener(); + var pipeline = new DataFlowPipelineEngine((DataFlowPipelineContext)request.GetContext(), + source, destination, listener); + pipeline.Initialize(new FlatFileToLoad(new FileInfo(_filename)), request); + pipeline.ExecutePipeline(new GracefulCancellationToken()); - var proj = new Project(DataExportRepository, projName); - proj.ProjectNumber = 999; - proj.SaveToDatabase(); + //there should be a new ExtractableCohort now + Assert.NotNull(request.NewCohortDefinition.ID); - CohortCreationRequest request = new CohortCreationRequest(proj, new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable), DataExportRepository, "fish"); - request.Check(new ThrowImmediatelyCheckNotifier()); + var ec = DataExportRepository.GetAllObjects() + .Single(c => c.OriginID == request.NewCohortDefinition.ID); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - BasicCohortDestination destination = new BasicCohortDestination(); - - source.Separator = ","; - source.StronglyTypeInput = true; - - DataFlowPipelineEngine pipeline = new DataFlowPipelineEngine((DataFlowPipelineContext) request.GetContext(),source,destination,listener); - pipeline.Initialize(new FlatFileToLoad(new FileInfo(filename)),request); - pipeline.ExecutePipeline(new GracefulCancellationToken()); + //with the data in it from the test file + Assert.AreEqual(ec.Count, 3); + } - //there should be a new ExtractableCohort now - Assert.NotNull(request.NewCohortDefinition.ID); + [TestCase(true)] + [TestCase(false)] + public void DeprecateOldCohort(bool deprecate) + { + var proj = new Project(DataExportRepository, ProjName) + { + ProjectNumber = 999 + }; + proj.SaveToDatabase(); + + // we are replacing this imaginary cohort + var definition998 = new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable); + // with this one (v2) + var definition999 = new CohortDefinition(null, "CommittingNewCohorts", 2, 999, _externalCohortTable); + + // Create a basic cohort first + var request1 = new CohortCreationRequest(proj, definition998, DataExportRepository, "fish"); + request1.Check(ThrowImmediatelyCheckNotifier.Quiet); + + using var con = _cohortDatabase.Server.GetManagedConnection(); + request1.PushToServer(con); + request1.ImportAsExtractableCohort(deprecate, false); + + // the definition was imported and should now be a saved ExtractableCohort + var cohort998 = request1.CohortCreatedIfAny; + Assert.IsNotNull(cohort998); + Assert.IsFalse(cohort998.IsDeprecated); + + // define that the new definition attempts to replace the old one + definition999.CohortReplacedIfAny = cohort998; + + var request2 = new CohortCreationRequest(proj, definition999, DataExportRepository, "fish"); + request2.Check(ThrowImmediatelyCheckNotifier.Quiet); + request2.PushToServer(con); + request2.ImportAsExtractableCohort(deprecate, false); + + // after committing the new cohort the old one should be deprecated? + cohort998.RevertToDatabaseState(); + Assert.AreEqual(deprecate, cohort998.IsDeprecated); + } - var ec = DataExportRepository.GetAllObjects().Single(c => c.OriginID == request.NewCohortDefinition.ID); - //with the data in it from the test file - Assert.AreEqual(ec.Count,3); - } + [TestCase(true)] + [TestCase(false)] + public void MigrateUsages(bool migrate) + { + var proj = new Project(DataExportRepository, ProjName) + { + ProjectNumber = 999 + }; + proj.SaveToDatabase(); + + // we are replacing this imaginary cohort + var definition998 = new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable); + // with this one (v2) + var definition999 = new CohortDefinition(null, "CommittingNewCohorts", 2, 999, _externalCohortTable); + + // Create a basic cohort first + var request1 = new CohortCreationRequest(proj, definition998, DataExportRepository, "fish"); + request1.Check(ThrowImmediatelyCheckNotifier.Quiet); + + using var con = _cohortDatabase.Server.GetManagedConnection(); + request1.PushToServer(con); + request1.ImportAsExtractableCohort(true, migrate); + + // the definition was imported and should now be a saved ExtractableCohort + var cohort998 = request1.CohortCreatedIfAny; + Assert.IsNotNull(cohort998); + Assert.IsFalse(cohort998.IsDeprecated); + + // legit user 1 + var ec1 = new ExtractionConfiguration(DataExportRepository, proj) + { + IsReleased = false, + Cohort_ID = cohort998.ID + }; + ec1.SaveToDatabase(); - [TestCase(true)] - [TestCase(false)] - public void DeprecateOldCohort(bool deprecate) + // legit user 2 + var ec2 = new ExtractionConfiguration(DataExportRepository, proj) { - var proj = new Project(DataExportRepository, projName); - proj.ProjectNumber = 999; - proj.SaveToDatabase(); - - // we are replacing this imaginary cohort - var definition998 = new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable); - // with this one (v2) - var definition999 = new CohortDefinition(null, "CommittingNewCohorts", 2, 999, _externalCohortTable); - - // Create a basic cohort first - CohortCreationRequest request1 = new CohortCreationRequest(proj, definition998, DataExportRepository, "fish"); - request1.Check(new ThrowImmediatelyCheckNotifier()); - - using var con = _cohortDatabase.Server.GetManagedConnection(); - request1.PushToServer(con); - request1.ImportAsExtractableCohort(deprecate, false); - - // the definition was imported and should now be a saved ExtractableCohort - var cohort998 = request1.CohortCreatedIfAny; - Assert.IsNotNull(cohort998); - Assert.IsFalse(cohort998.IsDeprecated); - - // define that the new definition attempts to replace the old one - definition999.CohortReplacedIfAny = cohort998; - - CohortCreationRequest request2 = new CohortCreationRequest(proj, definition999, DataExportRepository, "fish"); - request2.Check(new ThrowImmediatelyCheckNotifier()); - request2.PushToServer(con); - request2.ImportAsExtractableCohort(deprecate, false); - - // after committing the new cohort the old one should be deprecated? - cohort998.RevertToDatabaseState(); - Assert.AreEqual(deprecate, cohort998.IsDeprecated); - } - - - [TestCase(true)] - [TestCase(false)] - public void MigrateUsages(bool migrate) + IsReleased = false, + Cohort_ID = cohort998.ID + }; + ec2.SaveToDatabase(); + + // has no cohort yet defined so should not be migrated + var ec3 = new ExtractionConfiguration(DataExportRepository, proj); + + // is frozen so should not be migrated + var ec4 = new ExtractionConfiguration(DataExportRepository, proj) { - var proj = new Project(DataExportRepository, projName); - proj.ProjectNumber = 999; - proj.SaveToDatabase(); - - // we are replacing this imaginary cohort - var definition998 = new CohortDefinition(null, "CommittingNewCohorts", 1, 999, _externalCohortTable); - // with this one (v2) - var definition999 = new CohortDefinition(null, "CommittingNewCohorts", 2, 999, _externalCohortTable); - - // Create a basic cohort first - CohortCreationRequest request1 = new CohortCreationRequest(proj, definition998, DataExportRepository, "fish"); - request1.Check(new ThrowImmediatelyCheckNotifier()); - - using var con = _cohortDatabase.Server.GetManagedConnection(); - request1.PushToServer(con); - request1.ImportAsExtractableCohort(true, migrate); - - // the definition was imported and should now be a saved ExtractableCohort - var cohort998 = request1.CohortCreatedIfAny; - Assert.IsNotNull(cohort998); - Assert.IsFalse(cohort998.IsDeprecated); - - // legit user 1 - var ec1 = new ExtractionConfiguration(DataExportRepository, proj) - { - IsReleased = false, - Cohort_ID = cohort998.ID - }; - ec1.SaveToDatabase(); - - // legit user 2 - var ec2 = new ExtractionConfiguration(DataExportRepository,proj) - { - IsReleased = false, - Cohort_ID = cohort998.ID - }; - ec2.SaveToDatabase(); - - // has no cohort yet defined so should not be migrated - var ec3 = new ExtractionConfiguration(DataExportRepository,proj); - - // is frozen so should not be migrated - var ec4 = new ExtractionConfiguration(DataExportRepository,proj) - { - IsReleased = true, - Cohort_ID = cohort998.ID - }; - ec4.SaveToDatabase(); - - // define that the new definition attempts to replace the old one - definition999.CohortReplacedIfAny = cohort998; - - CohortCreationRequest request2 = new CohortCreationRequest(proj, definition999, DataExportRepository, "fish"); - request2.Check(new ThrowImmediatelyCheckNotifier()); - request2.PushToServer(con); - request2.ImportAsExtractableCohort(true, migrate); - - // the definition was imported and should now be a saved ExtractableCohort - var cohort999 = request2.CohortCreatedIfAny; - Assert.IsNotNull(cohort999); - - // after committing the new cohort who should be migrated? - ec1.RevertToDatabaseState(); - ec2.RevertToDatabaseState(); - ec3.RevertToDatabaseState(); - ec4.RevertToDatabaseState(); - - // should have been updated to use the new cohort - Assert.AreEqual(ec1.Cohort_ID, migrate ? cohort999.ID : cohort998.ID); - Assert.AreEqual(ec2.Cohort_ID, migrate ? cohort999.ID: cohort998.ID); - - // should not have magically gotten a cohort - Assert.IsNull(ec3.Cohort_ID); - - // is frozen so should not have been changed to the new cohort (and therefore still use cohort998) - Assert.AreEqual(ec4.Cohort_ID, cohort998.ID); - } + IsReleased = true, + Cohort_ID = cohort998.ID + }; + ec4.SaveToDatabase(); + + // define that the new definition attempts to replace the old one + definition999.CohortReplacedIfAny = cohort998; + + var request2 = new CohortCreationRequest(proj, definition999, DataExportRepository, "fish"); + request2.Check(ThrowImmediatelyCheckNotifier.Quiet); + request2.PushToServer(con); + request2.ImportAsExtractableCohort(true, migrate); + + // the definition was imported and should now be a saved ExtractableCohort + var cohort999 = request2.CohortCreatedIfAny; + Assert.IsNotNull(cohort999); + + // after committing the new cohort who should be migrated? + ec1.RevertToDatabaseState(); + ec2.RevertToDatabaseState(); + ec3.RevertToDatabaseState(); + ec4.RevertToDatabaseState(); + + // should have been updated to use the new cohort + Assert.AreEqual(ec1.Cohort_ID, migrate ? cohort999.ID : cohort998.ID); + Assert.AreEqual(ec2.Cohort_ID, migrate ? cohort999.ID : cohort998.ID); + + // should not have magically gotten a cohort + Assert.IsNull(ec3.Cohort_ID); + + // is frozen so should not have been changed to the new cohort (and therefore still use cohort998) + Assert.AreEqual(ec4.Cohort_ID, cohort998.ID); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Cohort/ProjectConsistentGuidReleaseIdentifierAllocatorTests.cs b/Rdmp.Core.Tests/DataExport/Cohort/ProjectConsistentGuidReleaseIdentifierAllocatorTests.cs index a40457120e..7b6f2c1f39 100644 --- a/Rdmp.Core.Tests/DataExport/Cohort/ProjectConsistentGuidReleaseIdentifierAllocatorTests.cs +++ b/Rdmp.Core.Tests/DataExport/Cohort/ProjectConsistentGuidReleaseIdentifierAllocatorTests.cs @@ -12,143 +12,146 @@ using Rdmp.Core.CohortCommitting.Pipeline; using Rdmp.Core.CohortCommitting.Pipeline.Destinations.IdentifierAllocation; using Rdmp.Core.DataExport.Data; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; using TypeGuesser; -namespace Rdmp.Core.Tests.DataExport.Cohort +namespace Rdmp.Core.Tests.DataExport.Cohort; + +internal class ProjectConsistentGuidReleaseIdentifierAllocatorTests : DatabaseTests { - class ProjectConsistentGuidReleaseIdentifierAllocatorTests:DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestPreserveHistoricalReleaseIdentifiers(DatabaseType databaseType) { - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestPreserveHistoricalReleaseIdentifiers(DatabaseType databaseType) + var db = GetCleanedServer(databaseType); + + var privateIdentifierDataType = db.Server.GetQuerySyntaxHelper().TypeTranslater + .GetSQLDBTypeForCSharpType(new DatabaseTypeRequest(typeof(string), 10)); + + var wizard = new CreateNewCohortDatabaseWizard(db, CatalogueRepository, DataExportRepository, false); + var ect = wizard.CreateDatabase(new PrivateIdentifierPrototype("chi", privateIdentifierDataType), + new AcceptAllCheckNotifier()); + + var defTable = ect.DiscoverDefinitionTable(); + var cohortTable = ect.DiscoverCohortTable(); + + var p = new Project(DataExportRepository, "MyProject") + { + ProjectNumber = 10 + }; + p.SaveToDatabase(); + + var req = new CohortCreationRequest(p, new CohortDefinition(null, "TestCohort1", 1, p.ProjectNumber.Value, ect), + DataExportRepository, "Ignoreme"); + + var allocator = new ProjectConsistentGuidReleaseIdentifierAllocator(); + allocator.Initialize(req); + + //allocator is being asked to allocate when there are no cohorts at all defined + Assert.AreEqual(0, defTable.GetRowCount()); + Assert.IsNotNull(allocator.AllocateReleaseIdentifier("0101010101")); + + //Now let's define a cohort identifier for someone (0202020202) who is not in our project + defTable.Insert(new Dictionary + { + { "projectNumber", 11 }, //project is not our project + { "version", 1 }, + { "description", "flibble" } + }); + + Assert.AreEqual(1, defTable.GetRowCount()); + + cohortTable.Insert(new Dictionary + { + { ect.GetQuerySyntaxHelper().GetRuntimeName(ect.DefinitionTableForeignKeyField), 1 }, + { "chi", "0202020202" }, + { "ReleaseId", "0x0123" } + }); + + //recreate allocator to clear map + allocator = new ProjectConsistentGuidReleaseIdentifierAllocator(); + allocator.Initialize(req); + + //allocator is being asked to allocate when there are cohorts defined including one with our person 02020202 but that person was in a different project + Assert.AreEqual(1, defTable.GetRowCount()); + Assert.AreEqual(1, cohortTable.GetRowCount()); + Assert.IsNotNull(allocator.AllocateReleaseIdentifier("0202020202")); + Assert.AreNotEqual("0x0123", allocator.AllocateReleaseIdentifier("0202020202")); + + + //Now let's define a cohort identifier for someone (0202020202) who IS in our project + defTable.Insert(new Dictionary + { + { "projectNumber", 10 }, //this is our project number! + { "version", 1 }, + { "description", "flibble" } + }); + + Assert.AreEqual(2, defTable.GetRowCount()); + + cohortTable.Insert(new Dictionary + { + { ect.GetQuerySyntaxHelper().GetRuntimeName(ect.DefinitionTableForeignKeyField), 2 }, + { "chi", "0202020202" }, + { "ReleaseId", "0x0127" } + }); + + //recreate allocator to clear map + allocator = new ProjectConsistentGuidReleaseIdentifierAllocator(); + allocator.Initialize(req); + + //allocator is being asked to allocate when the person 0202020202 has previously appeared under our project (10) + Assert.AreEqual(2, defTable.GetRowCount()); + Assert.AreEqual(2, cohortTable.GetRowCount()); + Assert.IsNotNull(allocator.AllocateReleaseIdentifier("0202020202")); + Assert.AreEqual("0x0127", allocator.AllocateReleaseIdentifier("0202020202")); + + + //finally let's break it by giving it conflicting historical records + //let's pretend that previously we had already got 2 historical batches for the project, batch 1 released 0202020202 as 0x0127 (see above) and batch 2 released 0202020202 as 0x0128 + defTable.Insert(new Dictionary { - var db = GetCleanedServer(databaseType); - - var privateIdentifierDataType = db.Server.GetQuerySyntaxHelper().TypeTranslater.GetSQLDBTypeForCSharpType(new DatabaseTypeRequest(typeof(string),10)); - - var wizard = new CreateNewCohortDatabaseWizard(db,CatalogueRepository,DataExportRepository,false); - var ect = wizard.CreateDatabase(new PrivateIdentifierPrototype("chi", privateIdentifierDataType),new AcceptAllCheckNotifier()); - - var defTable = ect.DiscoverDefinitionTable(); - var cohortTable = ect.DiscoverCohortTable(); - - Project p = new Project(DataExportRepository,"MyProject"); - p.ProjectNumber = 10; - p.SaveToDatabase(); - - var req = new CohortCreationRequest(p,new CohortDefinition(null,"TestCohort1",1,p.ProjectNumber.Value,ect),DataExportRepository,"Ignoreme"); - - var allocator = new ProjectConsistentGuidReleaseIdentifierAllocator(); - allocator.Initialize(req); - - //allocator is being asked to allocate when there are no cohorts at all defined - Assert.AreEqual(0, defTable.GetRowCount()); - Assert.IsNotNull(allocator.AllocateReleaseIdentifier("0101010101")); - - //Now let's define a cohort identifier for someone (0202020202) who is not in our project - defTable.Insert(new Dictionary() - { - {"projectNumber", 11}, //project is not our project - {"version", 1}, - {"description","flibble"} - }); - - Assert.AreEqual(1,defTable.GetRowCount()); - - cohortTable.Insert(new Dictionary() - { - {ect.GetQuerySyntaxHelper().GetRuntimeName(ect.DefinitionTableForeignKeyField), 1}, - {"chi", "0202020202"}, - {"ReleaseId", "0x0123"} - }); - - //recreate allocator to clear map - allocator = new ProjectConsistentGuidReleaseIdentifierAllocator(); - allocator.Initialize(req); - - //allocator is being asked to allocate when there are cohorts defined including one with our person 02020202 but that person was in a different project - Assert.AreEqual(1, defTable.GetRowCount()); - Assert.AreEqual(1,cohortTable.GetRowCount()); - Assert.IsNotNull(allocator.AllocateReleaseIdentifier("0202020202")); - Assert.AreNotEqual("0x0123",allocator.AllocateReleaseIdentifier("0202020202")); - - - //Now let's define a cohort identifier for someone (0202020202) who IS in our project - defTable.Insert(new Dictionary() - { - {"projectNumber", 10}, //this is our project number! - {"version", 1}, - {"description","flibble"} - }); - - Assert.AreEqual(2, defTable.GetRowCount()); - - cohortTable.Insert(new Dictionary() - { - {ect.GetQuerySyntaxHelper().GetRuntimeName(ect.DefinitionTableForeignKeyField), 2}, - {"chi", "0202020202"}, - {"ReleaseId", "0x0127"} - }); - - //recreate allocator to clear map - allocator = new ProjectConsistentGuidReleaseIdentifierAllocator(); - allocator.Initialize(req); - - //allocator is being asked to allocate when the person 0202020202 has previously appeared under our project (10) - Assert.AreEqual(2, defTable.GetRowCount()); - Assert.AreEqual(2, cohortTable.GetRowCount()); - Assert.IsNotNull(allocator.AllocateReleaseIdentifier("0202020202")); - Assert.AreEqual("0x0127", allocator.AllocateReleaseIdentifier("0202020202")); - - - //finally let's break it by giving it conflicting historical records - //let's pretend that previously we had already got 2 historical batches for the project, batch 1 released 0202020202 as 0x0127 (see above) and batch 2 released 0202020202 as 0x0128 - defTable.Insert(new Dictionary() - { - {"projectNumber", 10}, //this is our project number! - {"version", 2}, - {"description","flibble"} - }); - - Assert.AreEqual(3, defTable.GetRowCount()); - - cohortTable.Insert(new Dictionary() - { - {ect.GetQuerySyntaxHelper().GetRuntimeName(ect.DefinitionTableForeignKeyField), 3}, - {"chi", "0202020202"}, - {"ReleaseId", "0x0128"} - }); - - //recreate allocator to clear map - allocator = new ProjectConsistentGuidReleaseIdentifierAllocator(); - allocator.Initialize(req); - - //allocator is being asked to allocate when the person 0202020202 has previously appeared under our project (10) as release identifiers 0x0127 and 0x0128 - Assert.AreEqual(3, defTable.GetRowCount()); - Assert.AreEqual(3, cohortTable.GetRowCount()); - - var ex = Assert.Throws(() => allocator.AllocateReleaseIdentifier("0202020202")); - - //should be complaining about both of these conflicting release identifiers existing - StringAssert.Contains("0x0127",ex.Message); - StringAssert.Contains("0x0128", ex.Message); - - //fix the problem - using (var con = db.Server.GetConnection()) - { - con.Open(); - db.Server.GetCommand("UPDATE " + cohortTable + " SET ReleaseId='0x0127' WHERE ReleaseId='0x0128'", con).ExecuteScalar(); - } - - //should be happy now again - Assert.AreEqual(3, defTable.GetRowCount()); - Assert.AreEqual(3, cohortTable.GetRowCount()); - Assert.IsNotNull(allocator.AllocateReleaseIdentifier("0202020202")); - Assert.AreEqual("0x0127", allocator.AllocateReleaseIdentifier("0202020202")); + { "projectNumber", 10 }, //this is our project number! + { "version", 2 }, + { "description", "flibble" } + }); + Assert.AreEqual(3, defTable.GetRowCount()); + + cohortTable.Insert(new Dictionary + { + { ect.GetQuerySyntaxHelper().GetRuntimeName(ect.DefinitionTableForeignKeyField), 3 }, + { "chi", "0202020202" }, + { "ReleaseId", "0x0128" } + }); + + //recreate allocator to clear map + allocator = new ProjectConsistentGuidReleaseIdentifierAllocator(); + allocator.Initialize(req); + + //allocator is being asked to allocate when the person 0202020202 has previously appeared under our project (10) as release identifiers 0x0127 and 0x0128 + Assert.AreEqual(3, defTable.GetRowCount()); + Assert.AreEqual(3, cohortTable.GetRowCount()); + + var ex = Assert.Throws(() => allocator.AllocateReleaseIdentifier("0202020202")); + + //should be complaining about both of these conflicting release identifiers existing + StringAssert.Contains("0x0127", ex.Message); + StringAssert.Contains("0x0128", ex.Message); + + //fix the problem + using (var con = db.Server.GetConnection()) + { + con.Open(); + db.Server.GetCommand($"UPDATE {cohortTable} SET ReleaseId='0x0127' WHERE ReleaseId='0x0128'", con) + .ExecuteScalar(); } + //should be happy now again + Assert.AreEqual(3, defTable.GetRowCount()); + Assert.AreEqual(3, cohortTable.GetRowCount()); + Assert.IsNotNull(allocator.AllocateReleaseIdentifier("0202020202")); + Assert.AreEqual("0x0127", allocator.AllocateReleaseIdentifier("0202020202")); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/ConfigurationPropertiesTests.cs b/Rdmp.Core.Tests/DataExport/ConfigurationPropertiesTests.cs index 298a2da474..b22e9e50e9 100644 --- a/Rdmp.Core.Tests/DataExport/ConfigurationPropertiesTests.cs +++ b/Rdmp.Core.Tests/DataExport/ConfigurationPropertiesTests.cs @@ -8,19 +8,20 @@ using Rdmp.Core.Repositories.Managers; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport +namespace Rdmp.Core.Tests.DataExport; + +public class ConfigurationPropertiesTests : DatabaseTests { - public class ConfigurationPropertiesTests : DatabaseTests + [Test] + public void CreateNewArgumentAndGetValue() { - [Test] - public void CreateNewArgumentAndGetValue() - { - DataExportRepository.DataExportPropertyManager.SetValue(DataExportProperty.HashingAlgorithmPattern, "hi"); - Assert.AreEqual(DataExportRepository.DataExportPropertyManager.GetValue(DataExportProperty.HashingAlgorithmPattern), "hi"); + DataExportRepository.DataExportPropertyManager.SetValue(DataExportProperty.HashingAlgorithmPattern, "hi"); + Assert.AreEqual( + DataExportRepository.DataExportPropertyManager.GetValue(DataExportProperty.HashingAlgorithmPattern), "hi"); - //make sure delete - DataExportRepository.DataExportPropertyManager.SetValue(DataExportProperty.HashingAlgorithmPattern, null); - Assert.AreEqual(DataExportRepository.DataExportPropertyManager.GetValue(DataExportProperty.HashingAlgorithmPattern), null); - } + //make sure delete + DataExportRepository.DataExportPropertyManager.SetValue(DataExportProperty.HashingAlgorithmPattern, null); + Assert.AreEqual( + DataExportRepository.DataExportPropertyManager.GetValue(DataExportProperty.HashingAlgorithmPattern), null); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/CustomData/CustomDataImportingTests.cs b/Rdmp.Core.Tests/DataExport/CustomData/CustomDataImportingTests.cs index f603b057fd..c1d2a97cae 100644 --- a/Rdmp.Core.Tests/DataExport/CustomData/CustomDataImportingTests.cs +++ b/Rdmp.Core.Tests/DataExport/CustomData/CustomDataImportingTests.cs @@ -16,307 +16,277 @@ using Rdmp.Core.DataExport.DataExtraction.UserPicks; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.CustomData +namespace Rdmp.Core.Tests.DataExport.CustomData; + +public class CustomDataImportingTests : TestsRequiringAnExtractionConfiguration { - public class CustomDataImportingTests : TestsRequiringAnExtractionConfiguration + [Test] + public void Extract_ProjectSpecificCatalogue_WholeDataset() { - [Test] - public void Extract_ProjectSpecificCatalogue_WholeDataset() - { - //make the catalogue a custom catalogue for this project - CustomExtractableDataSet.Project_ID = _project.ID; - CustomExtractableDataSet.SaveToDatabase(); + //make the catalogue a custom catalogue for this project + CustomExtractableDataSet.Project_ID = _project.ID; + CustomExtractableDataSet.SaveToDatabase(); - var pipe = SetupPipeline(); - pipe.Name = "Extract_ProjectSpecificCatalogue_WholeDataset Pipe"; - pipe.SaveToDatabase(); + var pipe = SetupPipeline(); + pipe.Name = "Extract_ProjectSpecificCatalogue_WholeDataset Pipe"; + pipe.SaveToDatabase(); - _configuration.AddDatasetToConfiguration(CustomExtractableDataSet); + _configuration.AddDatasetToConfiguration(CustomExtractableDataSet); - try - { - _request = new ExtractDatasetCommand(_configuration,new ExtractableDatasetBundle(CustomExtractableDataSet)); - ExtractionPipelineUseCase useCase; - IExecuteDatasetExtractionDestination results; - Execute(out useCase, out results); + try + { + _request = new ExtractDatasetCommand(_configuration, + new ExtractableDatasetBundle(CustomExtractableDataSet)); + Execute(out _, out var results); - var customDataCsv = results.DirectoryPopulated.GetFiles().Single(f => f.Name.Equals("custTable99.csv")); + var customDataCsv = results.DirectoryPopulated.GetFiles().Single(f => f.Name.Equals("custTable99.csv")); - Assert.IsNotNull(customDataCsv); - - var lines = File.ReadAllLines(customDataCsv.FullName); + Assert.IsNotNull(customDataCsv); - Assert.AreEqual("SuperSecretThing,ReleaseID",lines[0]); - Assert.AreEqual("monkeys can all secretly fly,Pub_54321",lines[1]); - Assert.AreEqual("the wizard of OZ was a man behind a machine,Pub_11ftw",lines[2]); + var lines = File.ReadAllLines(customDataCsv.FullName); - } - finally - { - _configuration.RemoveDatasetFromConfiguration(CustomExtractableDataSet); - } + Assert.AreEqual("SuperSecretThing,ReleaseID", lines[0]); + Assert.AreEqual("monkeys can all secretly fly,Pub_54321", lines[1]); + Assert.AreEqual("the wizard of OZ was a man behind a machine,Pub_11ftw", lines[2]); } - - - /// - /// Tests that you can add a custom cohort column on the end of an existing dataset as an append. Requires you configure a JoinInfo - /// - [Test] - public void Extract_ProjectSpecificCatalogue_AppendedColumn() + finally { - //make the catalogue a custom catalogue for this project - CustomExtractableDataSet.Project_ID = _project.ID; - CustomExtractableDataSet.SaveToDatabase(); - - var pipe = SetupPipeline(); - pipe.Name = "Extract_ProjectSpecificCatalogue_AppendedColumn Pipe"; - pipe.SaveToDatabase(); + _configuration.RemoveDatasetFromConfiguration(CustomExtractableDataSet); + } + } - var extraColumn = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific).Single(e => e.GetRuntimeName().Equals("SuperSecretThing")); - var asExtractable = new ExtractableColumn(DataExportRepository, _extractableDataSet, _configuration, extraColumn, 10,extraColumn.SelectSQL); - //get rid of any lingering joins - foreach (JoinInfo j in CatalogueRepository.GetAllObjects()) - j.DeleteInDatabase(); + /// + /// Tests that you can add a custom cohort column on the end of an existing dataset as an append. Requires you configure a JoinInfo + /// + [Test] + public void Extract_ProjectSpecificCatalogue_AppendedColumn() + { + //make the catalogue a custom catalogue for this project + CustomExtractableDataSet.Project_ID = _project.ID; + CustomExtractableDataSet.SaveToDatabase(); - //add the ability to join the two tables in the query - var idCol = _extractableDataSet.Catalogue.GetAllExtractionInformation(ExtractionCategory.Core).Single(c => c.IsExtractionIdentifier).ColumnInfo; - var otherIdCol = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific).Single(e => e.GetRuntimeName().Equals("PrivateID")).ColumnInfo; - new JoinInfo(CatalogueRepository,idCol, otherIdCol,ExtractionJoinType.Left,null); + var pipe = SetupPipeline(); + pipe.Name = "Extract_ProjectSpecificCatalogue_AppendedColumn Pipe"; + pipe.SaveToDatabase(); - //generate a new request (this will include the newly created column) - _request = new ExtractDatasetCommand( _configuration, new ExtractableDatasetBundle(_extractableDataSet)); + var extraColumn = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific) + .Single(e => e.GetRuntimeName().Equals("SuperSecretThing")); + var asExtractable = new ExtractableColumn(DataExportRepository, _extractableDataSet, _configuration, + extraColumn, 10, extraColumn.SelectSQL); - var tbl = Database.ExpectTable("TestTable"); - tbl.Truncate(); + //get rid of any lingering joins + foreach (var j in CatalogueRepository.GetAllObjects()) + j.DeleteInDatabase(); - using(var blk = tbl.BeginBulkInsert()) - { - var dt = new DataTable(); - dt.Columns.Add("PrivateID"); - dt.Columns.Add("Name"); - dt.Columns.Add("DateOfBirth"); - - dt.Rows.Add(new object[] {"Priv_12345", "Bob","2001-01-01"}); - dt.Rows.Add(new object[] {"Priv_wtf11", "Frank","2001-10-29"}); - blk.Upload(dt); - } - - ExtractionPipelineUseCase useCase; - IExecuteDatasetExtractionDestination results; - Execute(out useCase, out results); + //add the ability to join the two tables in the query + var idCol = _extractableDataSet.Catalogue.GetAllExtractionInformation(ExtractionCategory.Core) + .Single(c => c.IsExtractionIdentifier).ColumnInfo; + var otherIdCol = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific) + .Single(e => e.GetRuntimeName().Equals("PrivateID")).ColumnInfo; + new JoinInfo(CatalogueRepository, idCol, otherIdCol, ExtractionJoinType.Left, null); - var mainDataTableCsv = results.DirectoryPopulated.GetFiles().Single(f => f.Name.Equals("TestTable.csv")); + //generate a new request (this will include the newly created column) + _request = new ExtractDatasetCommand(_configuration, new ExtractableDatasetBundle(_extractableDataSet)); - Assert.IsNotNull(mainDataTableCsv); - Assert.AreEqual("TestTable.csv", mainDataTableCsv.Name); - - var lines = File.ReadAllLines(mainDataTableCsv.FullName); + var tbl = Database.ExpectTable("TestTable"); + tbl.Truncate(); - Assert.AreEqual("ReleaseID,Name,DateOfBirth,SuperSecretThing", lines[0]); + using (var blk = tbl.BeginBulkInsert()) + { + var dt = new DataTable(); + dt.Columns.Add("PrivateID"); + dt.Columns.Add("Name"); + dt.Columns.Add("DateOfBirth"); + + dt.Rows.Add(new object[] { "Priv_12345", "Bob", "2001-01-01" }); + dt.Rows.Add(new object[] { "Priv_wtf11", "Frank", "2001-10-29" }); + blk.Upload(dt); + } - var bobLine = lines.Single(l => l.StartsWith("Pub_54321,Bob")); - var frankLine = lines.Single(l => l.StartsWith("Pub_11ftw,Frank")); + Execute(out _, out var results); - Assert.AreEqual("Pub_54321,Bob,2001-01-01,monkeys can all secretly fly", bobLine); - Assert.AreEqual("Pub_11ftw,Frank,2001-10-29,the wizard of OZ was a man behind a machine", frankLine); + var mainDataTableCsv = results.DirectoryPopulated.GetFiles().Single(f => f.Name.Equals("TestTable.csv")); - asExtractable.DeleteInDatabase(); - } + Assert.IsNotNull(mainDataTableCsv); + Assert.AreEqual("TestTable.csv", mainDataTableCsv.Name); - /// - /// Tests that you can reference a custom cohort column in the WHERE Sql of a core dataset in extraction. Requires you configure a and specify a - /// - [Test] - public void Extract_ProjectSpecificCatalogue_FilterReference() - { - //make the catalogue a custom catalogue for this project - CustomExtractableDataSet.Project_ID = _project.ID; - CustomExtractableDataSet.SaveToDatabase(); + var lines = File.ReadAllLines(mainDataTableCsv.FullName); - var pipe = SetupPipeline(); - pipe.Name = "Extract_ProjectSpecificCatalogue_FilterReference Pipe"; - pipe.SaveToDatabase(); + Assert.AreEqual("ReleaseID,Name,DateOfBirth,SuperSecretThing", lines[0]); - var rootContainer = new FilterContainer(DataExportRepository); - _selectedDataSet.RootFilterContainer_ID = rootContainer.ID; - _selectedDataSet.SaveToDatabase(); + var bobLine = lines.Single(l => l.StartsWith("Pub_54321,Bob")); + var frankLine = lines.Single(l => l.StartsWith("Pub_11ftw,Frank")); - var filter = new DeployedExtractionFilter(DataExportRepository, "monkeys only", rootContainer); - filter.WhereSQL = "SuperSecretThing = 'monkeys can all secretly fly'"; - filter.SaveToDatabase(); - rootContainer.AddChild(filter); + Assert.AreEqual("Pub_54321,Bob,2001-01-01,monkeys can all secretly fly", bobLine); + Assert.AreEqual("Pub_11ftw,Frank,2001-10-29,the wizard of OZ was a man behind a machine", frankLine); - //get rid of any lingering joins - foreach (JoinInfo j in CatalogueRepository.GetAllObjects()) - j.DeleteInDatabase(); + asExtractable.DeleteInDatabase(); + } - //add the ability to join the two tables in the query - var idCol = _extractableDataSet.Catalogue.GetAllExtractionInformation(ExtractionCategory.Core).Single(c => c.IsExtractionIdentifier).ColumnInfo; - var otherIdCol = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific).Single(e => e.GetRuntimeName().Equals("PrivateID")).ColumnInfo; - new JoinInfo(CatalogueRepository,idCol, otherIdCol, ExtractionJoinType.Left, null); + /// + /// Tests that you can reference a custom cohort column in the WHERE Sql of a core dataset in extraction. Requires you configure a and specify a + /// + [Test] + public void Extract_ProjectSpecificCatalogue_FilterReference() + { + //make the catalogue a custom catalogue for this project + CustomExtractableDataSet.Project_ID = _project.ID; + CustomExtractableDataSet.SaveToDatabase(); - new SelectedDataSetsForcedJoin(DataExportRepository, _selectedDataSet, CustomTableInfo); + var pipe = SetupPipeline(); + pipe.Name = "Extract_ProjectSpecificCatalogue_FilterReference Pipe"; + pipe.SaveToDatabase(); - //generate a new request (this will include the newly created column) - _request = new ExtractDatasetCommand( _configuration, new ExtractableDatasetBundle(_extractableDataSet)); - - var tbl = Database.ExpectTable("TestTable"); - tbl.Truncate(); + var rootContainer = new FilterContainer(DataExportRepository); + _selectedDataSet.RootFilterContainer_ID = rootContainer.ID; + _selectedDataSet.SaveToDatabase(); - using (var blk = tbl.BeginBulkInsert()) - { - var dt = new DataTable(); - dt.Columns.Add("PrivateID"); - dt.Columns.Add("Name"); - dt.Columns.Add("DateOfBirth"); - - dt.Rows.Add(new object[] { "Priv_12345", "Bob", "2001-01-01" }); - dt.Rows.Add(new object[] { "Priv_wtf11", "Frank", "2001-10-29" }); - blk.Upload(dt); - } + var filter = new DeployedExtractionFilter(DataExportRepository, "monkeys only", rootContainer) + { + WhereSQL = "SuperSecretThing = 'monkeys can all secretly fly'" + }; + filter.SaveToDatabase(); + rootContainer.AddChild(filter); - ExtractionPipelineUseCase useCase; - IExecuteDatasetExtractionDestination results; - Execute(out useCase, out results); + //get rid of any lingering joins + foreach (var j in CatalogueRepository.GetAllObjects()) + j.DeleteInDatabase(); - var mainDataTableCsv = results.DirectoryPopulated.GetFiles().Single(f => f.Name.Equals("TestTable.csv")); + //add the ability to join the two tables in the query + var idCol = _extractableDataSet.Catalogue.GetAllExtractionInformation(ExtractionCategory.Core) + .Single(c => c.IsExtractionIdentifier).ColumnInfo; + var otherIdCol = CustomCatalogue.GetAllExtractionInformation(ExtractionCategory.ProjectSpecific) + .Single(e => e.GetRuntimeName().Equals("PrivateID")).ColumnInfo; + new JoinInfo(CatalogueRepository, idCol, otherIdCol, ExtractionJoinType.Left, null); - Assert.IsNotNull(mainDataTableCsv); + new SelectedDataSetsForcedJoin(DataExportRepository, _selectedDataSet, CustomTableInfo); - var lines = File.ReadAllLines(mainDataTableCsv.FullName); + //generate a new request (this will include the newly created column) + _request = new ExtractDatasetCommand(_configuration, new ExtractableDatasetBundle(_extractableDataSet)); - Assert.AreEqual("ReleaseID,Name,DateOfBirth", lines[0]); - Assert.AreEqual("Pub_54321,Bob,2001-01-01", lines[1]); - Assert.AreEqual(2,lines.Length); + var tbl = Database.ExpectTable("TestTable"); + tbl.Truncate(); - + using (var blk = tbl.BeginBulkInsert()) + { + var dt = new DataTable(); + dt.Columns.Add("PrivateID"); + dt.Columns.Add("Name"); + dt.Columns.Add("DateOfBirth"); + + dt.Rows.Add(new object[] { "Priv_12345", "Bob", "2001-01-01" }); + dt.Rows.Add(new object[] { "Priv_wtf11", "Frank", "2001-10-29" }); + blk.Upload(dt); } + Execute(out _, out var results); + var mainDataTableCsv = results.DirectoryPopulated.GetFiles().Single(f => f.Name.Equals("TestTable.csv")); - /* - private List _customTablesToCleanup = new List(); - - [Test] - public void CSVImportPipeline() - { - var customData = GetCustomData(); - string filename = "CustomDataImportingTests.csv"; - File.WriteAllText(filename, customData); + Assert.IsNotNull(mainDataTableCsv); - var engine = GetEnginePointedAtFile(filename); - engine.ExecutePipeline(new GracefulCancellationToken()); + var lines = File.ReadAllLines(mainDataTableCsv.FullName); - var customTableNames = _extractableCohort.GetCustomTableNames().ToArray(); - - Console.WriteLine("Found the following custom tables:"); - foreach (string tableName in customTableNames) - Console.WriteLine(tableName); + Assert.AreEqual("ReleaseID,Name,DateOfBirth", lines[0]); + Assert.AreEqual("Pub_54321,Bob,2001-01-01", lines[1]); + Assert.AreEqual(2, lines.Length); + } - var syntax = _extractableCohort.GetQuerySyntaxHelper(); - Assert.IsTrue(_extractableCohort.GetCustomTableNames().Count(t => syntax.GetRuntimeName(t).Equals(Path.GetFileNameWithoutExtension(filename))) == 1); - _extractableCohort.DeleteCustomData(Path.GetFileNameWithoutExtension(filename)); + /* + private List _customTablesToCleanup = new List(); - File.Delete(filename); - } + [Test] + public void CSVImportPipeline() + { + var customData = GetCustomData(); + string filename = "CustomDataImportingTests.csv"; + File.WriteAllText(filename, customData); + var engine = GetEnginePointedAtFile(filename); + engine.ExecutePipeline(new GracefulCancellationToken()); + var customTableNames = _extractableCohort.GetCustomTableNames().ToArray(); - [Test] - [TestCase(1)] - [TestCase(10)] - public void IterativeBatchLoadingTest(int numberOfBatches) - { + Console.WriteLine("Found the following custom tables:"); + foreach (string tableName in customTableNames) + Console.WriteLine(tableName); - //will actually be ignored in place of us manually firing batches into the destination - var customData = GetCustomData(); - string filename = "fish.txt"; - File.WriteAllText(filename, customData); + var syntax = _extractableCohort.GetQuerySyntaxHelper(); - var engine = GetEnginePointedAtFile("fish.txt"); + Assert.IsTrue(_extractableCohort.GetCustomTableNames().Count(t => syntax.GetRuntimeName(t).Equals(Path.GetFileNameWithoutExtension(filename))) == 1); + _extractableCohort.DeleteCustomData(Path.GetFileNameWithoutExtension(filename)); - ToMemoryDataLoadEventListener listener = new ToMemoryDataLoadEventListener(true); - - Random r = new Random(); - var token = new GracefulCancellationTokenSource(); + File.Delete(filename); + } - for (int i = 0; i < numberOfBatches; i++) - { - DataTable dt = new DataTable(); - dt.TableName = "fish"; - dt.Columns.Add("PrivateID"); - dt.Columns.Add("Age"); - dt.Rows.Add(_cohortKeysGenerated.Keys.First(),r.Next(100)); - engine.Destination.ProcessPipelineData( dt,listener,token.Token); - } - //then give them the null - engine.Destination.ProcessPipelineData( null,listener, token.Token); + [Test] + [TestCase(1)] + [TestCase(10)] + public void IterativeBatchLoadingTest(int numberOfBatches) + { + + //will actually be ignored in place of us manually firing batches into the destination + var customData = GetCustomData(); + string filename = "fish.txt"; + File.WriteAllText(filename, customData); - engine.Source.Dispose(new ThrowImmediatelyDataLoadEventListener(),null ); - engine.Destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); + var engine = GetEnginePointedAtFile("fish.txt"); - //batches are 1 record each so - Assert.AreEqual(numberOfBatches, listener.LastProgressRecieivedByTaskName["Comitting rows to cohort 99_unitTestDataForCohort_V1fish"].Progress.Value); - - var customTableNames = _extractableCohort.GetCustomTableNames().ToArray(); - Console.WriteLine("Found the following custom tables:"); - foreach (string tableName in customTableNames) - Console.WriteLine(tableName); + ToMemoryDataLoadEventListener listener = new ToMemoryDataLoadEventListener(true); - var syntax = _extractableCohort.GetQuerySyntaxHelper(); + Random r = new Random(); + var token = new GracefulCancellationTokenSource(); - Assert.IsTrue(_extractableCohort.GetCustomTableNames().Count(t => syntax.GetRuntimeName(t).Equals("fish")) == 1); - _extractableCohort.DeleteCustomData("fish"); + for (int i = 0; i < numberOfBatches; i++) + { + DataTable dt = new DataTable(); + dt.TableName = "fish"; + dt.Columns.Add("PrivateID"); + dt.Columns.Add("Age"); - File.Delete("fish.txt"); + dt.Rows.Add(_cohortKeysGenerated.Keys.First(),r.Next(100)); + engine.Destination.ProcessPipelineData( dt,listener,token.Token); } - [Test] - [ExpectedException(ExpectedMessage = "Cohort Private Identifier PrivateID not found in DataTable" )] - public void CSVImportPipeline_MissingPrivateIdentifier() - { - Exception ex = null; - string filename = "CSVImportPipeline_MissingPrivateIdentifier.csv"; + //then give them the null + engine.Destination.ProcessPipelineData( null,listener, token.Token); - File.WriteAllText(filename, GetCustomData().Replace("PrivateID", "NHSNumber")); + engine.Source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet,null ); + engine.Destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); - var engine = GetEnginePointedAtFile(filename); + //batches are 1 record each so + Assert.AreEqual(numberOfBatches, listener.LastProgressRecieivedByTaskName["Comitting rows to cohort 99_unitTestDataForCohort_V1fish"].Progress.Value); - try - { - try - { - engine.ExecutePipeline(new GracefulCancellationToken()); - } - catch (Exception e) - { - ex = e; - Console.WriteLine(e.ToString()); - Assert.IsTrue(e.InnerException.Message.StartsWith("Last minute checks (just before committing to the database) f")); - Assert.NotNull(e.InnerException); - throw e.InnerException.InnerException; - } - } - finally - { - engine.Source.Dispose(new ThrowImmediatelyDataLoadEventListener(), ex); - File.Delete(filename); - } - } - [Test] - public void CSVImportPipeline_ReleaseIdentifiersButNoPrivateIdentifier() - { - Exception ex = null; - string filename = "CSVImportPipeline_MissingPrivateIdentifier.csv"; + var customTableNames = _extractableCohort.GetCustomTableNames().ToArray(); + Console.WriteLine("Found the following custom tables:"); + foreach (string tableName in customTableNames) + Console.WriteLine(tableName); + + var syntax = _extractableCohort.GetQuerySyntaxHelper(); + + Assert.IsTrue(_extractableCohort.GetCustomTableNames().Count(t => syntax.GetRuntimeName(t).Equals("fish")) == 1); + _extractableCohort.DeleteCustomData("fish"); + + File.Delete("fish.txt"); + } + + [Test] + [ExpectedException(ExpectedMessage = "Cohort Private Identifier PrivateID not found in DataTable" )] + public void CSVImportPipeline_MissingPrivateIdentifier() + { + Exception ex = null; + string filename = "CSVImportPipeline_MissingPrivateIdentifier.csv"; - File.WriteAllText(filename, GetCustomData_ButWithReleaseIdentifiers()); + File.WriteAllText(filename, GetCustomData().Replace("PrivateID", "NHSNumber")); - var engine = GetEnginePointedAtFile(filename); + var engine = GetEnginePointedAtFile(filename); + try + { try { engine.ExecutePipeline(new GracefulCancellationToken()); @@ -324,70 +294,98 @@ public void CSVImportPipeline_ReleaseIdentifiersButNoPrivateIdentifier() catch (Exception e) { ex = e; + Console.WriteLine(e.ToString()); + Assert.IsTrue(e.InnerException.Message.StartsWith("Last minute checks (just before committing to the database) f")); + Assert.NotNull(e.InnerException); + throw e.InnerException.InnerException; } - finally - { - engine.Source.Dispose(new ThrowImmediatelyDataLoadEventListener(), ex); - File.Delete(filename); - } } + finally + { + engine.Source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); + File.Delete(filename); + } + } + [Test] + public void CSVImportPipeline_ReleaseIdentifiersButNoPrivateIdentifier() + { + Exception ex = null; + string filename = "CSVImportPipeline_MissingPrivateIdentifier.csv"; + + File.WriteAllText(filename, GetCustomData_ButWithReleaseIdentifiers()); + + var engine = GetEnginePointedAtFile(filename); - #region Helper methods - private DataFlowPipelineEngine GetEnginePointedAtFile(string filename) + try { - var source = new DelimitedFlatFileDataFlowSource - { - Separator = ",", - IgnoreBlankLines = true, - UnderReadBehaviour = BehaviourOnUnderReadType.AppendNextLineToCurrentRow, - MakeHeaderNamesSane = true, - StronglyTypeInputBatchSize = -1, - StronglyTypeInput = true - }; + engine.ExecutePipeline(new GracefulCancellationToken()); + } + catch (Exception e) + { + ex = e; + } + finally + { + engine.Source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); + File.Delete(filename); + } + } - CustomCohortDataDestination destination = new CustomCohortDataDestination(); + #region Helper methods + private DataFlowPipelineEngine GetEnginePointedAtFile(string filename) + { + var source = new DelimitedFlatFileDataFlowSource + { + Separator = ",", + IgnoreBlankLines = true, + UnderReadBehaviour = BehaviourOnUnderReadType.AppendNextLineToCurrentRow, + MakeHeaderNamesSane = true, + StronglyTypeInputBatchSize = -1, + StronglyTypeInput = true + }; - var context = new DataFlowPipelineContextFactory().Create( - PipelineUsage.FixedDestination | - PipelineUsage.LogsToTableLoadInfo | - PipelineUsage.LoadsSingleTableInfo | - PipelineUsage.LoadsSingleFlatFile); + CustomCohortDataDestination destination = new CustomCohortDataDestination(); - DataFlowPipelineEngine engine = new DataFlowPipelineEngine(context, source, destination, new ThrowImmediatelyDataLoadEventListener()); + var context = new DataFlowPipelineContextFactory().Create( + PipelineUsage.FixedDestination | + PipelineUsage.LogsToTableLoadInfo | + PipelineUsage.LoadsSingleTableInfo | + PipelineUsage.LoadsSingleFlatFile); - engine.Initialize(_extractableCohort,new FlatFileToLoad(new FileInfo(filename))); - source.Check(new ThrowImmediatelyCheckNotifier()); + DataFlowPipelineEngine engine = new DataFlowPipelineEngine(context, source, destination, ThrowImmediatelyDataLoadEventListener.Quiet); - return engine; - } + engine.Initialize(_extractableCohort,new FlatFileToLoad(new FileInfo(filename))); + source.Check(ThrowImmediatelyCheckNotifier.Quiet); - private string GetCustomData() - { - string customData = "PrivateID,Age" + Environment.NewLine; + return engine; + } - int[] ages = {30, 35, 40}; + private string GetCustomData() + { + string customData = "PrivateID,Age" + Environment.NewLine; - var privateIdentifiers = _cohortKeysGenerated.Keys.Take(3).ToArray();//keys = privateIDs + int[] ages = {30, 35, 40}; - for (int i = 0; i < privateIdentifiers.Length; i++) - customData += privateIdentifiers[i] + "," + ages[i] + Environment.NewLine; + var privateIdentifiers = _cohortKeysGenerated.Keys.Take(3).ToArray();//keys = privateIDs - return customData; - } - private string GetCustomData_ButWithReleaseIdentifiers() - { - string customData = "ReleaseID,Age" + Environment.NewLine; + for (int i = 0; i < privateIdentifiers.Length; i++) + customData += privateIdentifiers[i] + "," + ages[i] + Environment.NewLine; - int[] ages = { 30, 35, 40 }; + return customData; + } + private string GetCustomData_ButWithReleaseIdentifiers() + { + string customData = "ReleaseID,Age" + Environment.NewLine; - var privateIdentifiers = _cohortKeysGenerated.Values.Take(3).ToArray();//note that in this like we take values not keys because values of this dictionary are ReleaseIDs while keys are PrivateIDs + int[] ages = { 30, 35, 40 }; - for (int i = 0; i < privateIdentifiers.Length; i++) - customData += privateIdentifiers[i] + "," + ages[i] + Environment.NewLine; + var privateIdentifiers = _cohortKeysGenerated.Values.Take(3).ToArray();//note that in this like we take values not keys because values of this dictionary are ReleaseIDs while keys are PrivateIDs - return customData; - } + for (int i = 0; i < privateIdentifiers.Length; i++) + customData += privateIdentifiers[i] + "," + ages[i] + Environment.NewLine; - #endregion*/ + return customData; } -} + + #endregion*/ +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Data/ExternalCohortTableTests.cs b/Rdmp.Core.Tests/DataExport/Data/ExternalCohortTableTests.cs index 5ae259ae1c..28aa290c88 100644 --- a/Rdmp.Core.Tests/DataExport/Data/ExternalCohortTableTests.cs +++ b/Rdmp.Core.Tests/DataExport/Data/ExternalCohortTableTests.cs @@ -10,103 +10,107 @@ using Rdmp.Core.CohortCommitting; using Rdmp.Core.DataExport.Data; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport.Data +namespace Rdmp.Core.Tests.DataExport.Data; + +internal class ExternalCohortTableTests : UnitTests { - class ExternalCohortTableTests:UnitTests + /// + /// Demonstrates the minimum properties required to create a . See + /// for how to create one of these based on the datasets currently held in rdmp. + /// + [Test] + public void Create_ExternalCohortTable_Manually() { - /// - /// Demonstrates the minimum properties required to create a . See - /// for how to create one of these based on the datasets currently held in rdmp. - /// - [Test] - public void Create_ExternalCohortTable_Manually() - { - MemoryDataExportRepository repository = new MemoryDataExportRepository(); - var table = new ExternalCohortTable(repository, "My Cohort Database", DatabaseType.MicrosoftSQLServer); - table.Database = "mydb"; - table.PrivateIdentifierField = "chi"; - table.ReleaseIdentifierField = "release"; - table.DefinitionTableForeignKeyField = "c_id"; - table.TableName = "Cohorts"; - table.DefinitionTableName = "InventoryTable"; - table.Server = "superfastdatabaseserver\\sqlexpress"; - table.SaveToDatabase(); - - var ex = Assert.Throws(()=>table.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Could not connect to Cohort database called 'My Cohort Database'",ex.Message); - } - - /// - /// Demonstrates how to get a hydrated instance during unit tests. This will not map to an actually existing database - /// - [Test] - public void Create_ExternalCohortTable_InTests() - { - var tbl = WhenIHaveA(); - - Assert.IsNotNull(tbl); - Assert.IsNotNull(tbl.PrivateIdentifierField); - Assert.IsNotNull(tbl.ReleaseIdentifierField); - } - - [Test] - public void TestExternalCohortTableProperties_BasicValues() + var repository = new MemoryDataExportRepository(); + var table = new ExternalCohortTable(repository, "My Cohort Database", DatabaseType.MicrosoftSQLServer) { - var table = new ExternalCohortTable(RepositoryLocator.DataExportRepository,"ffff",DatabaseType.MicrosoftSQLServer); - - Assert.IsNull(table.Database); - Assert.IsNull(table.Server); - Assert.IsNull(table.TableName); - Assert.IsNull(table.PrivateIdentifierField); - Assert.IsNull(table.ReleaseIdentifierField); - Assert.IsNull(table.DefinitionTableForeignKeyField); - - table.Database = "mydb"; - table.Server = "myserver"; - table.TableName = "mytbl"; - table.PrivateIdentifierField = "priv"; - table.ReleaseIdentifierField = "rel"; - table.DefinitionTableForeignKeyField = "fk"; - - Assert.AreEqual("mydb",table.Database); - Assert.AreEqual("myserver",table.Server); - Assert.AreEqual("[mydb]..[mytbl]",table.TableName); - Assert.IsNull(table.DefinitionTableName); - - Assert.AreEqual("[mydb]..[mytbl].[priv]",table.PrivateIdentifierField); - Assert.AreEqual("[mydb]..[mytbl].[rel]",table.ReleaseIdentifierField); - Assert.AreEqual("[mydb]..[mytbl].[fk]",table.DefinitionTableForeignKeyField); - } - [Test] - public void TestExternalCohortTableProperties_SetFullValues() - { - var table = new ExternalCohortTable(RepositoryLocator.DataExportRepository,"ffff",DatabaseType.MicrosoftSQLServer); - - Assert.IsNull(table.Database); - Assert.IsNull(table.Server); - Assert.IsNull(table.TableName); - Assert.IsNull(table.PrivateIdentifierField); - Assert.IsNull(table.ReleaseIdentifierField); - Assert.IsNull(table.DefinitionTableForeignKeyField); - - table.PrivateIdentifierField = "[mydb]..[mytbl].[priv]"; - table.ReleaseIdentifierField = "[mydb]..[mytbl].[rel]"; - table.DefinitionTableForeignKeyField = "[mydb]..[mytbl].[fk]"; - table.Database = "mydb"; - table.Server = "myserver"; - table.TableName = "[mydb]..[mytbl]"; - - Assert.AreEqual("mydb",table.Database); - Assert.AreEqual("myserver",table.Server); - Assert.AreEqual("[mydb]..[mytbl]",table.TableName); - Assert.IsNull(table.DefinitionTableName); - - Assert.AreEqual("[mydb]..[mytbl].[priv]",table.PrivateIdentifierField); - Assert.AreEqual("[mydb]..[mytbl].[rel]",table.ReleaseIdentifierField); - Assert.AreEqual("[mydb]..[mytbl].[fk]",table.DefinitionTableForeignKeyField); - } + Database = "mydb", + PrivateIdentifierField = "chi", + ReleaseIdentifierField = "release", + DefinitionTableForeignKeyField = "c_id", + TableName = "Cohorts", + DefinitionTableName = "InventoryTable", + Server = "superfastdatabaseserver\\sqlexpress" + }; + table.SaveToDatabase(); + + var ex = Assert.Throws(() => table.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("Could not connect to Cohort database called 'My Cohort Database'", ex.Message); + } + + /// + /// Demonstrates how to get a hydrated instance during unit tests. This will not map to an actually existing database + /// + [Test] + public void Create_ExternalCohortTable_InTests() + { + var tbl = WhenIHaveA(); + + Assert.IsNotNull(tbl); + Assert.IsNotNull(tbl.PrivateIdentifierField); + Assert.IsNotNull(tbl.ReleaseIdentifierField); + } + + [Test] + public void TestExternalCohortTableProperties_BasicValues() + { + var table = new ExternalCohortTable(RepositoryLocator.DataExportRepository, "ffff", + DatabaseType.MicrosoftSQLServer); + + Assert.IsNull(table.Database); + Assert.IsNull(table.Server); + Assert.IsNull(table.TableName); + Assert.IsNull(table.PrivateIdentifierField); + Assert.IsNull(table.ReleaseIdentifierField); + Assert.IsNull(table.DefinitionTableForeignKeyField); + + table.Database = "mydb"; + table.Server = "myserver"; + table.TableName = "mytbl"; + table.PrivateIdentifierField = "priv"; + table.ReleaseIdentifierField = "rel"; + table.DefinitionTableForeignKeyField = "fk"; + + Assert.AreEqual("mydb", table.Database); + Assert.AreEqual("myserver", table.Server); + Assert.AreEqual("[mydb]..[mytbl]", table.TableName); + Assert.IsNull(table.DefinitionTableName); + + Assert.AreEqual("[mydb]..[mytbl].[priv]", table.PrivateIdentifierField); + Assert.AreEqual("[mydb]..[mytbl].[rel]", table.ReleaseIdentifierField); + Assert.AreEqual("[mydb]..[mytbl].[fk]", table.DefinitionTableForeignKeyField); + } + + [Test] + public void TestExternalCohortTableProperties_SetFullValues() + { + var table = new ExternalCohortTable(RepositoryLocator.DataExportRepository, "ffff", + DatabaseType.MicrosoftSQLServer); + + Assert.IsNull(table.Database); + Assert.IsNull(table.Server); + Assert.IsNull(table.TableName); + Assert.IsNull(table.PrivateIdentifierField); + Assert.IsNull(table.ReleaseIdentifierField); + Assert.IsNull(table.DefinitionTableForeignKeyField); + + table.PrivateIdentifierField = "[mydb]..[mytbl].[priv]"; + table.ReleaseIdentifierField = "[mydb]..[mytbl].[rel]"; + table.DefinitionTableForeignKeyField = "[mydb]..[mytbl].[fk]"; + table.Database = "mydb"; + table.Server = "myserver"; + table.TableName = "[mydb]..[mytbl]"; + + Assert.AreEqual("mydb", table.Database); + Assert.AreEqual("myserver", table.Server); + Assert.AreEqual("[mydb]..[mytbl]", table.TableName); + Assert.IsNull(table.DefinitionTableName); + + Assert.AreEqual("[mydb]..[mytbl].[priv]", table.PrivateIdentifierField); + Assert.AreEqual("[mydb]..[mytbl].[rel]", table.ReleaseIdentifierField); + Assert.AreEqual("[mydb]..[mytbl].[fk]", table.DefinitionTableForeignKeyField); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Data/ExtractableCohortAuditLogBuilderTests.cs b/Rdmp.Core.Tests/DataExport/Data/ExtractableCohortAuditLogBuilderTests.cs index c05d8a6792..daa8f64c25 100644 --- a/Rdmp.Core.Tests/DataExport/Data/ExtractableCohortAuditLogBuilderTests.cs +++ b/Rdmp.Core.Tests/DataExport/Data/ExtractableCohortAuditLogBuilderTests.cs @@ -4,102 +4,96 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Cohort; using Rdmp.Core.DataExport.Data; -using System; -using System.Collections.Generic; using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport.Data +namespace Rdmp.Core.Tests.DataExport.Data; + +internal class ExtractableCohortAuditLogBuilderTests : UnitTests { - internal class ExtractableCohortAuditLogBuilderTests : UnitTests - { [Test] public void AuditLogReFetch_FileInfo() { - var builder = new ExtractableCohortAuditLogBuilder(); - - var fi = new FileInfo("durdur.txt"); - var desc = builder.GetDescription(fi); - var moqCohort = Mock.Of(e => e.AuditLog == desc); - var fi2 = builder.GetObjectIfAny(moqCohort, RepositoryLocator); + var fi = new FileInfo("durdur.txt"); + var desc = ExtractableCohortAuditLogBuilder.GetDescription(fi); + + var moqCohort = Substitute.For(); + moqCohort.AuditLog.Returns(desc); + var fi2 = ExtractableCohortAuditLogBuilder.GetObjectIfAny(moqCohort, RepositoryLocator); - Assert.IsNotNull(fi2); - Assert.IsInstanceOf(fi2); - Assert.AreEqual(fi.FullName, ((FileInfo)fi2).FullName); + Assert.IsNotNull(fi2); + Assert.IsInstanceOf(fi2); + Assert.AreEqual(fi.FullName, ((FileInfo)fi2).FullName); } [Test] public void AuditLogReFetch_CohortIdentificationConfiguration() { - var builder = new ExtractableCohortAuditLogBuilder(); - - var cic = WhenIHaveA(); - var desc = builder.GetDescription(cic); - var moqCohort = Mock.Of(e => e.AuditLog == desc); - var cic2 = builder.GetObjectIfAny(moqCohort, RepositoryLocator); + var cic = WhenIHaveA(); + var desc = ExtractableCohortAuditLogBuilder.GetDescription(cic); - Assert.IsNotNull(cic2); - Assert.IsInstanceOf(cic2); - Assert.AreEqual(cic,cic2); + var moqCohort = Substitute.For(); + moqCohort.AuditLog.Returns(desc); + var cic2 = ExtractableCohortAuditLogBuilder.GetObjectIfAny(moqCohort, RepositoryLocator); + Assert.IsNotNull(cic2); + Assert.IsInstanceOf(cic2); + Assert.AreEqual(cic, cic2); } [Test] public void AuditLogReFetch_ExtractionInformation() { - var builder = new ExtractableCohortAuditLogBuilder(); - var ei = WhenIHaveA(); - var desc = builder.GetDescription(ei); + var ei = WhenIHaveA(); + var desc = ExtractableCohortAuditLogBuilder.GetDescription(ei); - var moqCohort = Mock.Of(e => e.AuditLog == desc); - var ei2 = builder.GetObjectIfAny(moqCohort, RepositoryLocator); - - Assert.IsNotNull(ei2); - Assert.IsInstanceOf(ei2); - Assert.AreEqual(ei, ei2); + var moqCohort = Substitute.For(); + moqCohort.AuditLog.Returns(desc); + var ei2 = ExtractableCohortAuditLogBuilder.GetObjectIfAny(moqCohort, RepositoryLocator); + Assert.IsNotNull(ei2); + Assert.IsInstanceOf(ei2); + Assert.AreEqual(ei, ei2); } [Test] public void AuditLogReFetch_WhenAuditLogIsNull() { - var builder = new ExtractableCohortAuditLogBuilder(); - var moqCohort = Mock.Of(e => e.AuditLog == null); - Assert.IsNull(builder.GetObjectIfAny(moqCohort, RepositoryLocator)); + var moqCohort = Substitute.For(); + moqCohort.AuditLog.Returns(x => null); + Assert.IsNull(ExtractableCohortAuditLogBuilder.GetObjectIfAny(moqCohort, RepositoryLocator)); } + [Test] public void AuditLogReFetch_WhenAuditLogIsRubbish() { - var builder = new ExtractableCohortAuditLogBuilder(); - var moqCohort = Mock.Of(e => e.AuditLog == "troll doll dur I invented this cohort myself"); - Assert.IsNull(builder.GetObjectIfAny(moqCohort, RepositoryLocator)); + var moqCohort = Substitute.For(); + moqCohort.AuditLog.Returns("troll doll dur I invented this cohort myself"); + Assert.IsNull(ExtractableCohortAuditLogBuilder.GetObjectIfAny(moqCohort, RepositoryLocator)); } [Test] public void AuditLogReFetch_WhenSourceIsDeleted() { - var builder = new ExtractableCohortAuditLogBuilder(); - - var ei = WhenIHaveA(); - var desc = builder.GetDescription(ei); - - var moqCohort = Mock.Of(e => e.AuditLog == desc); - - // delete the source - ei.DeleteInDatabase(); - - // should now return null - Assert.IsNull(builder.GetObjectIfAny(moqCohort, RepositoryLocator)); + var builder = new ExtractableCohortAuditLogBuilder(); + + var ei = WhenIHaveA(); + var desc = ExtractableCohortAuditLogBuilder.GetDescription(ei); + + var moqCohort = Substitute.For(); + moqCohort.AuditLog.Returns(desc); + + // delete the source + ei.DeleteInDatabase(); + + // should now return null + Assert.IsNull(ExtractableCohortAuditLogBuilder.GetObjectIfAny(moqCohort, RepositoryLocator)); } - } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Data/ExtractableCohortTests.cs b/Rdmp.Core.Tests/DataExport/Data/ExtractableCohortTests.cs index b971e2d441..6839416933 100644 --- a/Rdmp.Core.Tests/DataExport/Data/ExtractableCohortTests.cs +++ b/Rdmp.Core.Tests/DataExport/Data/ExtractableCohortTests.cs @@ -5,42 +5,39 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using NUnit.Framework; -using Rdmp.Core.DataExport.Data; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Settings; using System; -using System.Collections.Generic; -using System.Text; -using Tests.Common; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Settings; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.Data +namespace Rdmp.Core.Tests.DataExport.Data; + +internal class ExtractableCohortTests : TestsRequiringACohort { - class ExtractableCohortTests : TestsRequiringACohort + [Test] + public void TestExtractableCohort_Identifiable() { - [Test] - public void TestExtractableCohort_Identifiable() - { - var cohort = _extractableCohort; + var cohort = _extractableCohort; - Assert.IsNotNull(cohort.GetPrivateIdentifier()); - Assert.AreNotEqual(cohort.GetReleaseIdentifier(),cohort.GetPrivateIdentifier()); + Assert.IsNotNull(cohort.GetPrivateIdentifier()); + Assert.AreNotEqual(cohort.GetReleaseIdentifier(), cohort.GetPrivateIdentifier()); - var ect = cohort.ExternalCohortTable; - ect.ReleaseIdentifierField = ect.PrivateIdentifierField; - ect.SaveToDatabase(); + var ect = cohort.ExternalCohortTable; + ect.ReleaseIdentifierField = ect.PrivateIdentifierField; + ect.SaveToDatabase(); - UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Fail); + UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Fail); - var ex = Assert.Throws(()=>cohort.GetReleaseIdentifier()); + var ex = Assert.Throws(() => cohort.GetReleaseIdentifier()); - Assert.AreEqual("R004 PrivateIdentifierField and ReleaseIdentifierField are the same, this means your cohort will extract identifiable data (no cohort identifier substitution takes place)", ex.Message); + Assert.AreEqual( + "R004 PrivateIdentifierField and ReleaseIdentifierField are the same, this means your cohort will extract identifiable data (no cohort identifier substitution takes place)", + ex.Message); - UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Warning); + UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Warning); - Assert.AreEqual(cohort.GetReleaseIdentifier(),cohort.GetPrivateIdentifier()); + Assert.AreEqual(cohort.GetReleaseIdentifier(), cohort.GetPrivateIdentifier()); - UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Fail); - } + UserSettings.SetErrorReportingLevelFor(ErrorCodes.ExtractionIsIdentifiable, CheckResult.Fail); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Data/ExtractionProgressTests.cs b/Rdmp.Core.Tests/DataExport/Data/ExtractionProgressTests.cs index df66922a4e..e97af90839 100644 --- a/Rdmp.Core.Tests/DataExport/Data/ExtractionProgressTests.cs +++ b/Rdmp.Core.Tests/DataExport/Data/ExtractionProgressTests.cs @@ -4,169 +4,169 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using Microsoft.Data.SqlClient; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; -using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using System; using System.IO; using System.Linq; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.Data +namespace Rdmp.Core.Tests.DataExport.Data; + +internal class ExtractionProgressTests : TestsRequiringAnExtractionConfiguration { - class ExtractionProgressTests : TestsRequiringAnExtractionConfiguration + [Test] + public void ExtractionProgressConstructor_NoTimePeriodicity() { + var cata = new Catalogue(CatalogueRepository, "MyCata"); + var eds = new ExtractableDataSet(DataExportRepository, cata); + var project = new Project(DataExportRepository, "My Proj"); + var config = new ExtractionConfiguration(DataExportRepository, project); + var sds = new SelectedDataSets(DataExportRepository, config, eds, null); + + var ex = Assert.Throws(() => new ExtractionProgress(DataExportRepository, sds)); + Assert.AreEqual( + "Cannot create ExtractionProgress because Catalogue MyCata does not have a time coverage column", + ex.Message); + } - [Test] - public void ExtractionProgressConstructor_NoTimePeriodicity() - { - var cata = new Catalogue(CatalogueRepository, "MyCata"); - var eds = new ExtractableDataSet(DataExportRepository, cata); - var project = new Project(DataExportRepository, "My Proj"); - var config = new ExtractionConfiguration(DataExportRepository, project); - var sds = new SelectedDataSets(DataExportRepository, config,eds,null); + [Test] + public void ExtractionProgressConstructor_Normal() + { + ExtractionProgress progress = null; + Assert.DoesNotThrow(() => progress = CreateAnExtractionProgress()); + progress?.DeleteInDatabase(); + } - var ex = Assert.Throws(()=> new ExtractionProgress(DataExportRepository, sds)); - Assert.AreEqual("Cannot create ExtractionProgress because Catalogue MyCata does not have a time coverage column", ex.Message); + [Test] + public void ExtractionProgressConstructor_CannotHaveTwoAtOnce() + { + var progress = CreateAnExtractionProgress(); - } + var sds = progress.SelectedDataSets; - [Test] - public void ExtractionProgressConstructor_Normal() - { - ExtractionProgress progress = null; ; - Assert.DoesNotThrow(()=> progress = CreateAnExtractionProgress()); - progress?.DeleteInDatabase(); - } + // try to create a second progress for the same dataset being extracted + var ex = Assert.Throws(() => new ExtractionProgress(DataExportRepository, sds)); - [Test] - public void ExtractionProgressConstructor_CannotHaveTwoAtOnce() - { - var progress = CreateAnExtractionProgress(); - - var sds = progress.SelectedDataSets; + Assert.AreEqual("There is already an ExtractionProgress associated with MyCata", ex.Message); - // try to create a second progress for the same dataset being extracted - var ex = Assert.Throws(() => new ExtractionProgress(DataExportRepository, sds)); + // now delete the original and make sure we can recreate it ok + progress.DeleteInDatabase(); + Assert.DoesNotThrow(() => progress = new ExtractionProgress(DataExportRepository, sds)); - Assert.AreEqual("There is already an ExtractionProgress associated with MyCata", ex.Message); + // yeah we can great, let's cleanup the test now + progress.DeleteInDatabase(); + } - // now delete the original and make sure we can recreate it ok - progress.DeleteInDatabase(); - Assert.DoesNotThrow(() => progress = new ExtractionProgress(DataExportRepository, sds)); + [Test] + public void ExtractionProgressConstructor_DeleteSdsMustCascade() + { + var progress = CreateAnExtractionProgress(); - // yeah we can great, let's cleanup the test now - progress.DeleteInDatabase(); - } + Assert.IsTrue(progress.Exists()); + progress.SelectedDataSets.DeleteInDatabase(); + Assert.IsFalse(progress.Exists()); + } - [Test] - public void ExtractionProgressConstructor_DeleteSdsMustCascade() - { - var progress = CreateAnExtractionProgress(); + [Test] + public void ExtractionProgress_RetrySave() + { + var progress = CreateAnExtractionProgress(); + Assert.AreEqual(progress.Retry, RetryStrategy.NoRetry); - Assert.IsTrue(progress.Exists()); - progress.SelectedDataSets.DeleteInDatabase(); - Assert.IsFalse(progress.Exists()); - } + progress.Retry = RetryStrategy.IterativeBackoff1Hour; + progress.SaveToDatabase(); - [Test] - public void ExtractionProgress_RetrySave() - { - var progress = CreateAnExtractionProgress(); - Assert.AreEqual(progress.Retry, RetryStrategy.NoRetry); + progress.RevertToDatabaseState(); + Assert.AreEqual(progress.Retry, RetryStrategy.IterativeBackoff1Hour); - progress.Retry = RetryStrategy.IterativeBackoff1Hour; - progress.SaveToDatabase(); + var freshCopy = progress.Repository.GetObjectByID(progress.ID); + Assert.AreEqual(freshCopy.Retry, RetryStrategy.IterativeBackoff1Hour); - progress.RevertToDatabaseState(); - Assert.AreEqual(progress.Retry, RetryStrategy.IterativeBackoff1Hour); + progress.DeleteInDatabase(); + } - var freshCopy = progress.Repository.GetObjectByID(progress.ID); - Assert.AreEqual(freshCopy.Retry, RetryStrategy.IterativeBackoff1Hour); + [Test] + public void TestQueryGeneration_FirstBatch() + { + Reset(); - progress.DeleteInDatabase(); + _catalogue.TimeCoverage_ExtractionInformation_ID = + _extractionInformations.Single(e => e.GetRuntimeName().Equals("DateOfBirth")).ID; + _catalogue.SaveToDatabase(); - } - [Test] - public void TestQueryGeneration_FirstBatch() + var progress = new ExtractionProgress(DataExportRepository, _request.SelectedDataSets) { - Reset(); + StartDate = new DateTime(2001, 01, 01), + EndDate = new DateTime(2001, 01, 10), + NumberOfDaysPerBatch = 10 + }; + progress.SaveToDatabase(); - _catalogue.TimeCoverage_ExtractionInformation_ID = _extractionInformations.Single(e => e.GetRuntimeName().Equals("DateOfBirth")).ID; - _catalogue.SaveToDatabase(); + _request.GenerateQueryBuilder(); - var progress = new ExtractionProgress(DataExportRepository, _request.SelectedDataSets); - progress.StartDate = new DateTime(2001, 01, 01); - progress.EndDate = new DateTime(2001, 01, 10); - progress.NumberOfDaysPerBatch = 10; - progress.SaveToDatabase(); + Execute(out _, out var result); - _request.GenerateQueryBuilder(); + Assert.IsTrue(result.GeneratesFiles); + var fileContents = File.ReadAllText(result.OutputFile); - Execute(out _, out IExecuteDatasetExtractionDestination result); + // Headers should be in file because it is a first batch + Assert.AreEqual( + $"ReleaseID,Name,DateOfBirth{Environment.NewLine}Pub_54321,Dave,2001-01-01{Environment.NewLine}", + fileContents); - Assert.IsTrue(result.GeneratesFiles); - var fileContents = File.ReadAllText(result.OutputFile); + File.Delete(result.OutputFile); + progress.DeleteInDatabase(); + } - // Headers should be in file because it is a first batch - Assert.AreEqual($"ReleaseID,Name,DateOfBirth{Environment.NewLine}Pub_54321,Dave,2001-01-01{Environment.NewLine}", fileContents); + [Test] + public void TestCloneResetsProgress() + { + CreateAnExtractionProgress(out var config); - File.Delete(result.OutputFile); - progress.DeleteInDatabase(); - } + // get original objects + var origSds = config.SelectedDataSets.Single(); + var origProgress = origSds.ExtractionProgressIfAny; + origProgress.StartDate = new DateTime(2001, 01, 01); + origProgress.ProgressDate = new DateTime(2005, 01, 01); + origProgress.EndDate = new DateTime(2020, 01, 01); + origProgress.SaveToDatabase(); - [Test] - public void TestCloneResetsProgress() - { - CreateAnExtractionProgress(out var config); + //clone + var clone = config.DeepCloneWithNewIDs(); - // get original objects - var origSds = config.SelectedDataSets.Single(); - var origProgress = origSds.ExtractionProgressIfAny; - origProgress.StartDate = new DateTime(2001, 01, 01); - origProgress.ProgressDate = new DateTime(2005, 01, 01); - origProgress.EndDate = new DateTime(2020, 01, 01); - origProgress.SaveToDatabase(); + // get new objects + var cloneSds = clone.SelectedDataSets.Single(); + var cloneProgress = cloneSds.ExtractionProgressIfAny; - //clone - var clone = config.DeepCloneWithNewIDs(); - // get new objects - var cloneSds = clone.SelectedDataSets.Single(); - var cloneProgress = cloneSds.ExtractionProgressIfAny; + // should be different instances + Assert.AreNotSame(origProgress, cloneProgress); + Assert.AreEqual(cloneProgress.StartDate, new DateTime(2001, 01, 01)); + Assert.IsNull(cloneProgress.ProgressDate, "Expected progress to be reset on clone"); + Assert.AreEqual(cloneProgress.EndDate, new DateTime(2020, 01, 01)); + } - // should be different instances - Assert.AreNotSame(origProgress, cloneProgress); + private ExtractionProgress CreateAnExtractionProgress() => CreateAnExtractionProgress(out _); - Assert.AreEqual(cloneProgress.StartDate , new DateTime(2001, 01, 01)); - Assert.IsNull(cloneProgress.ProgressDate,"Expected progress to be reset on clone"); - Assert.AreEqual(cloneProgress.EndDate , new DateTime(2020, 01, 01)); - } + private ExtractionProgress CreateAnExtractionProgress(out ExtractionConfiguration config) + { + var cata = new Catalogue(CatalogueRepository, "MyCata"); + var cataItem = new CatalogueItem(CatalogueRepository, cata, "MyCol"); + var table = new TableInfo(CatalogueRepository, "MyTable"); + var col = new ColumnInfo(CatalogueRepository, "mycol", "datetime", table); - private ExtractionProgress CreateAnExtractionProgress() - { - return CreateAnExtractionProgress(out _); - } - private ExtractionProgress CreateAnExtractionProgress(out ExtractionConfiguration config) - { - var cata = new Catalogue(CatalogueRepository, "MyCata"); - var cataItem = new CatalogueItem(CatalogueRepository, cata, "MyCol"); - var table = new TableInfo(CatalogueRepository, "MyTable"); - var col = new ColumnInfo(CatalogueRepository, "mycol", "datetime", table); - - var ei = new ExtractionInformation(CatalogueRepository, cataItem, col, "mycol"); - cata.TimeCoverage_ExtractionInformation_ID = ei.ID; - cata.SaveToDatabase(); - - var eds = new ExtractableDataSet(DataExportRepository, cata); - var project = new Project(DataExportRepository, "My Proj"); - config = new ExtractionConfiguration(DataExportRepository, project); - var sds = new SelectedDataSets(DataExportRepository, config, eds, null); - - return new ExtractionProgress(DataExportRepository, sds); - } + var ei = new ExtractionInformation(CatalogueRepository, cataItem, col, "mycol"); + cata.TimeCoverage_ExtractionInformation_ID = ei.ID; + cata.SaveToDatabase(); + + var eds = new ExtractableDataSet(DataExportRepository, cata); + var project = new Project(DataExportRepository, "My Proj"); + config = new ExtractionConfiguration(DataExportRepository, project); + var sds = new SelectedDataSets(DataExportRepository, config, eds, null); + + return new ExtractionProgress(DataExportRepository, sds); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/Data/SelectedDataSetsCheckerTests.cs b/Rdmp.Core.Tests/DataExport/Data/SelectedDataSetsCheckerTests.cs index 1cae837279..413fe0368e 100644 --- a/Rdmp.Core.Tests/DataExport/Data/SelectedDataSetsCheckerTests.cs +++ b/Rdmp.Core.Tests/DataExport/Data/SelectedDataSetsCheckerTests.cs @@ -9,98 +9,107 @@ using Rdmp.Core.DataExport.Checks; using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; -using ReusableLibraryCode.Checks; using System; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.Data +namespace Rdmp.Core.Tests.DataExport.Data; + +public class SelectedDataSetsCheckerTests : TestsRequiringAnExtractionConfiguration { - public class SelectedDataSetsCheckerTests : TestsRequiringAnExtractionConfiguration + [Test] + public void NormalUseCasePasses() { + // normal checks pass + var checker = new SelectedDataSetsChecker(new ThrowImmediatelyActivator(RepositoryLocator), _selectedDataSet); + checker.Check(ThrowImmediatelyCheckNotifier.Quiet); + } - [Test] - public void NormalUseCasePasses() - { - // normal checks pass - var checker = new SelectedDataSetsChecker(new ThrowImmediatelyActivator(RepositoryLocator), _selectedDataSet); - checker.Check(new ThrowImmediatelyCheckNotifier()); - } + [Test] + public void TestExtractionProgress_MidwayWithNoAuditRecord() + { + // normal checks pass + var checker = new SelectedDataSetsChecker(new ThrowImmediatelyActivator(RepositoryLocator), _selectedDataSet); - [Test] - public void TestExtractionProgress_MidwayWithNoAuditRecord() + var ep = new ExtractionProgress(DataExportRepository, _selectedDataSet, new DateTime(1990, 1, 1), + new DateTime(2001, 1, 1), 100, "mybatch", + _extractionInformations[0].ID) { - // normal checks pass - var checker = new SelectedDataSetsChecker(new ThrowImmediatelyActivator(RepositoryLocator), _selectedDataSet); + ProgressDate = new DateTime(1995, 1, 1) // we are half way through + }; + + ep.SaveToDatabase(); - var ep = new ExtractionProgress(DataExportRepository, _selectedDataSet, new System.DateTime(1990, 1, 1), new System.DateTime(2001, 1, 1),100,"mybatch", - _extractionInformations[0].ID); + var ex = Assert.Throws(() => checker.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "R0016 ExtractionProgress 'mybatch' is 'in progress' (ProgressDate is not null) but there is no audit of previously extracted SQL (needed for checking cohort changes)", + ex.Message); + } - ep.ProgressDate = new System.DateTime(1995, 1, 1); // we are half way through - ep.SaveToDatabase(); + [Test] + public void TestExtractionProgress_AuditRecordHasDifferentCohort() + { + // normal checks pass + var checker = new SelectedDataSetsChecker(new ThrowImmediatelyActivator(RepositoryLocator), _selectedDataSet); - var ex = Assert.Throws(()=>checker.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("R0016 ExtractionProgress 'mybatch' is 'in progress' (ProgressDate is not null) but there is no audit of previously extracted SQL (needed for checking cohort changes)", ex.Message); - } + foreach (var r in DataExportRepository.GetAllObjects()) r.DeleteInDatabase(); - [Test] - public void TestExtractionProgress_AuditRecordHasDifferentCohort() + var ep = new ExtractionProgress(DataExportRepository, _selectedDataSet, new DateTime(1990, 1, 1), + new DateTime(2001, 1, 1), 100, "mybatch", + _extractionInformations[0].ID) { - // normal checks pass - var checker = new SelectedDataSetsChecker(new ThrowImmediatelyActivator(RepositoryLocator), _selectedDataSet); - - foreach (var r in DataExportRepository.GetAllObjects()) - { - r.DeleteInDatabase(); - } + ProgressDate = new DateTime(1995, 1, 1) // we are half way through + }; - var ep = new ExtractionProgress(DataExportRepository, _selectedDataSet, new System.DateTime(1990, 1, 1), new System.DateTime(2001, 1, 1), 100, "mybatch", - _extractionInformations[0].ID); + ep.SaveToDatabase(); - ep.ProgressDate = new System.DateTime(1995, 1, 1); // we are half way through - ep.SaveToDatabase(); + // audit has SQL that does not contain the cohort ID + var audit = new CumulativeExtractionResults(DataExportRepository, _configuration, + _selectedDataSet.ExtractableDataSet, "select * from [yohoho and a bottle of rum]"); + audit.CompleteAudit(typeof(ExecuteFullExtractionToDatabaseMSSql), "[over the hills and far away]", 333, true, + false); + audit.SaveToDatabase(); - // audit has SQL that does not contain the cohort ID - var audit = new CumulativeExtractionResults(DataExportRepository, _configuration, _selectedDataSet.ExtractableDataSet, "select * from [yohoho and a bottle of rum]"); - audit.CompleteAudit(typeof(ExecuteFullExtractionToDatabaseMSSql), "[over the hills and far away]", 333, true, false); - audit.SaveToDatabase(); + var ex = Assert.Throws(() => checker.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + $"R0017 ExtractionProgress 'mybatch' is 'in progress' (ProgressDate is not null) but we did not find the expected Cohort WHERE Sql in the audit of SQL extracted with the last batch. Did you change the cohort without resetting the ProgressDate? The SQL we expected to find was '[{TestDatabaseNames.Prefix}CohortDatabase]..[Cohort].[cohortDefinition_id]=-599'", + ex.Message); - var ex = Assert.Throws(() => checker.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual( - $"R0017 ExtractionProgress 'mybatch' is 'in progress' (ProgressDate is not null) but we did not find the expected Cohort WHERE Sql in the audit of SQL extracted with the last batch. Did you change the cohort without resetting the ProgressDate? The SQL we expected to find was '[{TestDatabaseSettings.Prefix}CohortDatabase]..[Cohort].[cohortDefinition_id]=-599'",ex.Message); + // tidy up + ep.DeleteInDatabase(); + } - // tidy up - ep.DeleteInDatabase(); - } + [Test] + public void TestExtractionProgress_AuditRecordIsGood_NoProblems() + { + // normal checks pass + var checker = new SelectedDataSetsChecker(new ThrowImmediatelyActivator(RepositoryLocator), _selectedDataSet); - [Test] - public void TestExtractionProgress_AuditRecordIsGood_NoProblems() + var ep = new ExtractionProgress(DataExportRepository, _selectedDataSet, new DateTime(1990, 1, 1), + new DateTime(2001, 1, 1), 100, "mybatch", + _extractionInformations[0].ID) { - // normal checks pass - var checker = new SelectedDataSetsChecker(new ThrowImmediatelyActivator(RepositoryLocator), _selectedDataSet); + ProgressDate = new DateTime(1995, 1, 1) // we are half way through + }; - var ep = new ExtractionProgress(DataExportRepository, _selectedDataSet, new System.DateTime(1990, 1, 1), new System.DateTime(2001, 1, 1), 100, "mybatch", - _extractionInformations[0].ID); + ep.SaveToDatabase(); - ep.ProgressDate = new System.DateTime(1995, 1, 1); // we are half way through - ep.SaveToDatabase(); + foreach (var r in DataExportRepository.GetAllObjects()) r.DeleteInDatabase(); - foreach (var r in DataExportRepository.GetAllObjects()) - { - r.DeleteInDatabase(); - } - - // audit has SQL is good, it contains the correct cohort - var audit = new CumulativeExtractionResults(DataExportRepository, _configuration, _selectedDataSet.ExtractableDataSet, - $"select * from [yohoho and a bottle of rum] WHERE [{TestDatabaseSettings.Prefix}CohortDatabase]..[Cohort].[cohortDefinition_id]=-599'"); + // audit has SQL is good, it contains the correct cohort + var audit = new CumulativeExtractionResults(DataExportRepository, _configuration, + _selectedDataSet.ExtractableDataSet, + $"select * from [yohoho and a bottle of rum] WHERE [{TestDatabaseNames.Prefix}CohortDatabase]..[Cohort].[cohortDefinition_id]=-599'"); - audit.CompleteAudit(typeof(ExecuteFullExtractionToDatabaseMSSql), "[over the hills and far away]", 333, true, false); - audit.SaveToDatabase(); + audit.CompleteAudit(typeof(ExecuteFullExtractionToDatabaseMSSql), "[over the hills and far away]", 333, true, + false); + audit.SaveToDatabase(); - Assert.DoesNotThrow(() => checker.Check(new ThrowImmediatelyCheckNotifier())); + Assert.DoesNotThrow(() => checker.Check(ThrowImmediatelyCheckNotifier.Quiet)); - // tidy up - ep.DeleteInDatabase(); - } + // tidy up + ep.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataAccess/PackageContentsTests.cs b/Rdmp.Core.Tests/DataExport/DataAccess/PackageContentsTests.cs index 69370b07c8..0c3cdf0382 100644 --- a/Rdmp.Core.Tests/DataExport/DataAccess/PackageContentsTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataAccess/PackageContentsTests.cs @@ -8,50 +8,48 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; -using Rdmp.Core.Repositories.Managers; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport.DataAccess +namespace Rdmp.Core.Tests.DataExport.DataAccess; + +public class PackageContentsTests : DatabaseTests { - public class PackageContentsTests:DatabaseTests + [Test] + public void AddAndRemove() { - [Test] - public void AddAndRemove() - { - var cata = new Catalogue(CatalogueRepository, "PackageContentsTests"); + var cata = new Catalogue(CatalogueRepository, "PackageContentsTests"); - var ds = new ExtractableDataSet(DataExportRepository,cata); + var ds = new ExtractableDataSet(DataExportRepository, cata); - var package = new ExtractableDataSetPackage(DataExportRepository, "My Cool Package"); - try - { - Assert.AreEqual("My Cool Package",package.Name); - package.Name = "FishPackage"; - package.SaveToDatabase(); + var package = new ExtractableDataSetPackage(DataExportRepository, "My Cool Package"); + try + { + Assert.AreEqual("My Cool Package", package.Name); + package.Name = "FishPackage"; + package.SaveToDatabase(); - var packageContents = DataExportRepository; + var packageContents = DataExportRepository; - var results = packageContents.GetAllDataSets(package, null); - Assert.AreEqual(0,results.Length); + var results = packageContents.GetAllDataSets(package, null); + Assert.AreEqual(0, results.Length); - packageContents.AddDataSetToPackage(package,ds); + packageContents.AddDataSetToPackage(package, ds); - results = packageContents.GetAllDataSets(package, DataExportRepository.GetAllObjects()); - Assert.AreEqual(1, results.Length); - Assert.AreEqual(ds,results[0]); + results = packageContents.GetAllDataSets(package, DataExportRepository.GetAllObjects()); + Assert.AreEqual(1, results.Length); + Assert.AreEqual(ds, results[0]); - packageContents.RemoveDataSetFromPackage(package,ds); + packageContents.RemoveDataSetFromPackage(package, ds); - //cannot delete the relationship twice - Assert.Throws(() => packageContents.RemoveDataSetFromPackage(package, ds)); - } - finally - { - ds.DeleteInDatabase(); - package.DeleteInDatabase(); - cata.DeleteInDatabase(); - } + //cannot delete the relationship twice + Assert.Throws(() => packageContents.RemoveDataSetFromPackage(package, ds)); + } + finally + { + ds.DeleteInDatabase(); + package.DeleteInDatabase(); + cata.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataAccess/SelectedColumnsTests.cs b/Rdmp.Core.Tests/DataExport/DataAccess/SelectedColumnsTests.cs index b53eb6d666..2402e4628c 100644 --- a/Rdmp.Core.Tests/DataExport/DataAccess/SelectedColumnsTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataAccess/SelectedColumnsTests.cs @@ -10,57 +10,51 @@ using Rdmp.Core.DataExport.Data; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport.DataAccess +namespace Rdmp.Core.Tests.DataExport.DataAccess; + +public class SelectedColumnsTests : DatabaseTests { - public class SelectedColumnsTests:DatabaseTests + //Simple test SelectedColumns in which an extraction configuration is built for a test dataset with a single column configured for extraction + [Test] + public void CreateAndAssociateColumns() { - //Simple test SelectedColumns in which an extraction configuration is built for a test dataset with a single column configured for extraction - [Test] - public void CreateAndAssociateColumns() - { - var cata = new Catalogue(CatalogueRepository, "MyCat"); - var cataItem = new CatalogueItem(CatalogueRepository, cata,"MyCataItem"); - var TableInfo = new TableInfo(CatalogueRepository, "Cata"); - var ColumnInfo = new ColumnInfo(CatalogueRepository, "Col","varchar(10)",TableInfo); - var ExtractionInfo = new ExtractionInformation(CatalogueRepository, cataItem, ColumnInfo, "fish"); - - var ds = new ExtractableDataSet(DataExportRepository,cata); - - var proj = new Project(DataExportRepository, "MyProj"); - var config = new ExtractionConfiguration(DataExportRepository, proj); - - SelectedDataSets selectedDataSets; + var cata = new Catalogue(CatalogueRepository, "MyCat"); + var cataItem = new CatalogueItem(CatalogueRepository, cata, "MyCataItem"); + var TableInfo = new TableInfo(CatalogueRepository, "Cata"); + var ColumnInfo = new ColumnInfo(CatalogueRepository, "Col", "varchar(10)", TableInfo); + var ExtractionInfo = new ExtractionInformation(CatalogueRepository, cataItem, ColumnInfo, "fish"); - var extractableColumn = new ExtractableColumn(DataExportRepository, ds, config, ExtractionInfo, 1, "fish"); + var ds = new ExtractableDataSet(DataExportRepository, cata); - try - { - - selectedDataSets = new SelectedDataSets(DataExportRepository,config, ds,null); + var proj = new Project(DataExportRepository, "MyProj"); + var config = new ExtractionConfiguration(DataExportRepository, proj); - var cols = config.GetAllExtractableColumnsFor(ds); + var extractableColumn = new ExtractableColumn(DataExportRepository, ds, config, ExtractionInfo, 1, "fish"); - Assert.AreEqual(1,cols.Count()); - Assert.AreEqual(extractableColumn, cols.Single()); - - cols = config.GetAllExtractableColumnsFor(ds); + try + { + _ = new SelectedDataSets(DataExportRepository, config, ds, null); - Assert.AreEqual(1, cols.Count()); - Assert.AreEqual(extractableColumn, cols.Single()); - } - finally - { - extractableColumn.DeleteInDatabase(); - config.DeleteInDatabase(); - proj.DeleteInDatabase(); + var cols = config.GetAllExtractableColumnsFor(ds); - ds.DeleteInDatabase(); + Assert.AreEqual(1, cols.Length); + Assert.AreEqual(extractableColumn, cols.Single()); - TableInfo.DeleteInDatabase(); - cata.DeleteInDatabase(); + cols = config.GetAllExtractableColumnsFor(ds); - } + Assert.AreEqual(1, cols.Length); + Assert.AreEqual(extractableColumn, cols.Single()); } + finally + { + extractableColumn.DeleteInDatabase(); + config.DeleteInDatabase(); + proj.DeleteInDatabase(); + + ds.DeleteInDatabase(); + TableInfo.DeleteInDatabase(); + cata.DeleteInDatabase(); + } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExportRepositoryTests.cs b/Rdmp.Core.Tests/DataExport/DataExportRepositoryTests.cs index 408aef1beb..b7727ecb7e 100644 --- a/Rdmp.Core.Tests/DataExport/DataExportRepositoryTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExportRepositoryTests.cs @@ -8,49 +8,46 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; -using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport +namespace Rdmp.Core.Tests.DataExport; + +internal class DataExportRepositoryTests : DatabaseTests { - class DataExportRepositoryTests:DatabaseTests + [Test] + public void TestNoIsExtractionIdentifierFinding() { - [Test] - public void TestNoIsExtractionIdentifierFinding() - { - //nothing in database means no dodgy datasets - Assert.IsEmpty(DataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers()); - - var cata = new Catalogue(CatalogueRepository, "ommn"); - var ds = new ExtractableDataSet(DataExportRepository, cata); - var proj = new Project(DataExportRepository, "proj"); - var config = new ExtractionConfiguration(DataExportRepository, proj); - var sds = new SelectedDataSets(DataExportRepository, config, ds, null); - - //only one selected dataset - var dodgy = DataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers().ToArray(); - Assert.AreEqual(1,dodgy.Count()); - Assert.AreEqual(sds,dodgy[0]); - - //make an extarctable column on that dataset - var col = new ColumnInfo(CatalogueRepository,"ff","varchar(1)",new TableInfo(CatalogueRepository, "fff")); - var ci = new CatalogueItem(CatalogueRepository, cata, "A"); - var ei = new ExtractionInformation(CatalogueRepository, ci, col,col.Name); - var ec = new ExtractableColumn(DataExportRepository, ds, config, ei, 0, col.Name); - - //still shouldn't be dodgy - dodgy = DataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers().ToArray(); - Assert.AreEqual(1, dodgy.Count()); - Assert.AreEqual(sds, dodgy[0]); - - //now make it non dodgy by being IsExtractionIdentifier - ec.IsExtractionIdentifier = true; - ec.SaveToDatabase(); - - //no longer dodgy because there is an extraction identifier - dodgy = DataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers().ToArray(); - Assert.AreEqual(0, dodgy.Count()); - - } + //nothing in database means no dodgy datasets + Assert.IsEmpty(DataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers()); + + var cata = new Catalogue(CatalogueRepository, "ommn"); + var ds = new ExtractableDataSet(DataExportRepository, cata); + var proj = new Project(DataExportRepository, "proj"); + var config = new ExtractionConfiguration(DataExportRepository, proj); + var sds = new SelectedDataSets(DataExportRepository, config, ds, null); + + //only one selected dataset + var dodgy = DataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers().ToArray(); + Assert.AreEqual(1, dodgy.Length); + Assert.AreEqual(sds, dodgy[0]); + + //make an extarctable column on that dataset + var col = new ColumnInfo(CatalogueRepository, "ff", "varchar(1)", new TableInfo(CatalogueRepository, "fff")); + var ci = new CatalogueItem(CatalogueRepository, cata, "A"); + var ei = new ExtractionInformation(CatalogueRepository, ci, col, col.Name); + var ec = new ExtractableColumn(DataExportRepository, ds, config, ei, 0, col.Name); + + //still shouldn't be dodgy + dodgy = DataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers().ToArray(); + Assert.AreEqual(1, dodgy.Length); + Assert.AreEqual(sds, dodgy[0]); + + //now make it non dodgy by being IsExtractionIdentifier + ec.IsExtractionIdentifier = true; + ec.SaveToDatabase(); + + //no longer dodgy because there is an extraction identifier + dodgy = DataExportRepository.GetSelectedDatasetsWithNoExtractionIdentifiers().ToArray(); + Assert.AreEqual(0, dodgy.Length); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/EmptyDataExtractionTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/EmptyDataExtractionTests.cs index 5c818472aa..15c25627da 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/EmptyDataExtractionTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/EmptyDataExtractionTests.cs @@ -9,90 +9,82 @@ using System.Linq; using NUnit.Framework; using Rdmp.Core.CommandExecution; -using Rdmp.Core.Curation.Data.Pipelines; using Rdmp.Core.DataExport.DataExtraction.Pipeline; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.Logging; using Rdmp.Core.QueryBuilding; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +public class EmptyDataExtractionTests : TestsRequiringAnExtractionConfiguration { - public class EmptyDataExtractionTests:TestsRequiringAnExtractionConfiguration + private void TruncateDataTable() { + var server = Database.Server; + using var con = server.GetConnection(); + con.Open(); - private void TruncateDataTable() - { - var server = Database.Server; - using (var con = server.GetConnection()) - { - con.Open(); - - var cmdTruncate = server.GetCommand("TRUNCATE TABLE TestTable",con); - cmdTruncate.ExecuteNonQuery(); - - con.Close(); - } + var cmdTruncate = server.GetCommand("TRUNCATE TABLE TestTable", con); + cmdTruncate.ExecuteNonQuery(); - } + con.Close(); + } - [Test] - [TestCase(false)] - [TestCase(true)] - public void TestAllowingEmptyDatasets(bool allowEmptyDatasetExtractions) - { - Pipeline p = SetupPipeline(); - - TruncateDataTable(); + [Test] + [TestCase(false)] + [TestCase(true)] + public void TestAllowingEmptyDatasets(bool allowEmptyDatasetExtractions) + { + var p = SetupPipeline(); - var host = new ExtractionPipelineUseCase(new ThrowImmediatelyActivator(RepositoryLocator),_request.Configuration.Project, _request, p, DataLoadInfo.Empty); + TruncateDataTable(); - var engine = host.GetEngine(p, new ThrowImmediatelyDataLoadEventListener()); - host.Source.AllowEmptyExtractions = allowEmptyDatasetExtractions; + var host = new ExtractionPipelineUseCase(new ThrowImmediatelyActivator(RepositoryLocator), + _request.Configuration.Project, _request, p, DataLoadInfo.Empty); - var token = new GracefulCancellationToken(); - - if(allowEmptyDatasetExtractions) - { + var engine = host.GetEngine(p, ThrowImmediatelyDataLoadEventListener.Quiet); + host.Source.AllowEmptyExtractions = allowEmptyDatasetExtractions; - var dt = host.Source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), token); - Assert.IsNull(host.Source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), token)); + var token = new GracefulCancellationToken(); - Assert.AreEqual(0,dt.Rows.Count); - Assert.AreEqual(3, dt.Columns.Count); - } - else - { - var exception = Assert.Throws(() => host.Source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), token)); + if (allowEmptyDatasetExtractions) + { + var dt = host.Source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, token); + Assert.IsNull(host.Source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, token)); - Assert.IsTrue(exception.Message.StartsWith("There is no data to load, query returned no rows, query was")); - } + Assert.AreEqual(0, dt.Rows.Count); + Assert.AreEqual(3, dt.Columns.Count); + } + else + { + var exception = Assert.Throws(() => + host.Source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, token)); - p.DeleteInDatabase(); + Assert.IsTrue(exception.Message.StartsWith("There is no data to load, query returned no rows, query was")); } - [Test] - public void ProducesEmptyCSV() - { - TruncateDataTable(); - AllowEmptyExtractions = true; + p.DeleteInDatabase(); + } - ExtractionPipelineUseCase execute; - IExecuteDatasetExtractionDestination result; + [Test] + public void ProducesEmptyCSV() + { + TruncateDataTable(); + AllowEmptyExtractions = true; - Assert.AreEqual(1, _request.ColumnsToExtract.Count(c => c.IsExtractionIdentifier)); + Assert.AreEqual(1, _request.ColumnsToExtract.Count(c => c.IsExtractionIdentifier)); - base.Execute(out execute, out result); + Execute(out _, out var result); - var r = (ExecuteDatasetExtractionFlatFileDestination)result; + var r = (ExecuteDatasetExtractionFlatFileDestination)result; - //this should be what is in the file, the private identifier and the 1 that was put into the table in the first place (see parent class for the test data setup) - Assert.AreEqual(@"ReleaseID,Name,DateOfBirth", File.ReadAllText(r.OutputFile).Trim()); + //this should be what is in the file, the private identifier and the 1 that was put into the table in the first place (see parent class for the test data setup) + Assert.AreEqual(@"ReleaseID,Name,DateOfBirth", File.ReadAllText(r.OutputFile).Trim()); - Assert.AreEqual(1, _request.QueryBuilder.SelectColumns.Count(c => c.IColumn is ReleaseIdentifierSubstitution)); - File.Delete(r.OutputFile); - } + Assert.AreEqual(1, _request.QueryBuilder.SelectColumns.Count(c => c.IColumn is ReleaseIdentifierSubstitution)); + File.Delete(r.OutputFile); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteCrossServerDatasetExtractionSourceTest.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteCrossServerDatasetExtractionSourceTest.cs index 216d7b1b14..797511071c 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteCrossServerDatasetExtractionSourceTest.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteCrossServerDatasetExtractionSourceTest.cs @@ -13,73 +13,77 @@ using Rdmp.Core.DataExport.DataExtraction.Pipeline; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Sources; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +public class ExecuteCrossServerDatasetExtractionSourceTest : TestsRequiringAnExtractionConfiguration { - public class ExecuteCrossServerDatasetExtractionSourceTest : TestsRequiringAnExtractionConfiguration + [Test] + public void CrossServerExtraction() { - [Test] - public void CrossServerExtraction() - { - ExtractionPipelineUseCase execute; - IExecuteDatasetExtractionDestination result; - - base.Execute(out execute, out result); + Execute(out _, out var result); - var r = (ExecuteDatasetExtractionFlatFileDestination)result; + var r = (ExecuteDatasetExtractionFlatFileDestination)result; - //this should be what is in the file, the private identifier and the 1 that was put into the table in the first place (see parent class for the test data setup) - Assert.AreEqual(@"ReleaseID,Name,DateOfBirth -" + _cohortKeysGenerated[_cohortKeysGenerated.Keys.First()] + @",Dave,2001-01-01", File.ReadAllText(r.OutputFile).Trim()); + //this should be what is in the file, the private identifier and the 1 that was put into the table in the first place (see parent class for the test data setup) + Assert.AreEqual($@"ReleaseID,Name,DateOfBirth +{_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()]},Dave,2001-01-01", File.ReadAllText(r.OutputFile).Trim()); - File.Delete(r.OutputFile); - } + File.Delete(r.OutputFile); + } - protected override Pipeline SetupPipeline() - { - var pipeline = new Pipeline(CatalogueRepository, "Empty extraction pipeline"); - var component = new PipelineComponent(CatalogueRepository, pipeline, typeof(ExecuteDatasetExtractionFlatFileDestination), 0, "Destination"); - var arguments = component.CreateArgumentsForClassIfNotExists().ToArray(); - - if (arguments.Length < 3) - throw new Exception("Expected only 2 arguments for type ExecuteDatasetExtractionFlatFileDestination, did somebody add another [DemandsInitialization]? if so handle it below"); - - arguments.Single(a => a.Name.Equals("DateFormat")).SetValue("yyyy-MM-dd"); - arguments.Single(a => a.Name.Equals("DateFormat")).SaveToDatabase(); - - arguments.Single(a=>a.Name.Equals("FlatFileType")).SetValue(ExecuteExtractionToFlatFileType.CSV); - arguments.Single(a=>a.Name.Equals("FlatFileType")).SaveToDatabase(); - - AdjustPipelineComponentDelegate?.Invoke(component); - - var component2 = new PipelineComponent(CatalogueRepository, pipeline, typeof(ExecuteCrossServerDatasetExtractionSource), -1, "Source"); - var arguments2 = component2.CreateArgumentsForClassIfNotExists().ToArray(); - arguments2.Single(a=>a.Name.Equals("AllowEmptyExtractions")).SetValue(false); - arguments2.Single(a => a.Name.Equals("AllowEmptyExtractions")).SaveToDatabase(); - arguments2.Single(a => a.Name.Equals(nameof(ExecuteCrossServerDatasetExtractionSource.TemporaryTableName))).SetValue(""); - arguments2.Single(a => a.Name.Equals(nameof(ExecuteCrossServerDatasetExtractionSource.TemporaryTableName))).SaveToDatabase(); - AdjustPipelineComponentDelegate?.Invoke(component2); - - - //configure the component as the destination - pipeline.DestinationPipelineComponent_ID = component.ID; - pipeline.SourcePipelineComponent_ID = component2.ID; - pipeline.SaveToDatabase(); - - return pipeline; - } - - [Test] - public void HackSQLTest_Normal() - { - if (_request.QueryBuilder == null) - _request.GenerateQueryBuilder(); + protected override Pipeline SetupPipeline() + { + var pipeline = new Pipeline(CatalogueRepository, "Empty extraction pipeline"); + var component = new PipelineComponent(CatalogueRepository, pipeline, + typeof(ExecuteDatasetExtractionFlatFileDestination), 0, "Destination"); + var arguments = component.CreateArgumentsForClassIfNotExists() + .ToArray(); + + if (arguments.Length < 3) + throw new Exception( + "Expected only 2 arguments for type ExecuteDatasetExtractionFlatFileDestination, did somebody add another [DemandsInitialization]? if so handle it below"); + + arguments.Single(a => a.Name.Equals("DateFormat")).SetValue("yyyy-MM-dd"); + arguments.Single(a => a.Name.Equals("DateFormat")).SaveToDatabase(); + + arguments.Single(a => a.Name.Equals("FlatFileType")).SetValue(ExecuteExtractionToFlatFileType.CSV); + arguments.Single(a => a.Name.Equals("FlatFileType")).SaveToDatabase(); + + AdjustPipelineComponentDelegate?.Invoke(component); + + var component2 = new PipelineComponent(CatalogueRepository, pipeline, + typeof(ExecuteCrossServerDatasetExtractionSource), -1, "Source"); + var arguments2 = component2.CreateArgumentsForClassIfNotExists() + .ToArray(); + arguments2.Single(a => a.Name.Equals("AllowEmptyExtractions")).SetValue(false); + arguments2.Single(a => a.Name.Equals("AllowEmptyExtractions")).SaveToDatabase(); + arguments2.Single(a => a.Name.Equals(nameof(ExecuteCrossServerDatasetExtractionSource.TemporaryTableName))) + .SetValue(""); + arguments2.Single(a => a.Name.Equals(nameof(ExecuteCrossServerDatasetExtractionSource.TemporaryTableName))) + .SaveToDatabase(); + AdjustPipelineComponentDelegate?.Invoke(component2); + + + //configure the component as the destination + pipeline.DestinationPipelineComponent_ID = component.ID; + pipeline.SourcePipelineComponent_ID = component2.ID; + pipeline.SaveToDatabase(); + + return pipeline; + } + + [Test] + public void HackSQLTest_Normal() + { + if (_request.QueryBuilder == null) + _request.GenerateQueryBuilder(); - string expectedOutput = -string.Format(@"/*The ID of the cohort in [tempdb]..[Cohort]*/ + var expectedOutput = + string.Format(@"/*The ID of the cohort in [tempdb]..[Cohort]*/ DECLARE @CohortDefinitionID AS int; SET @CohortDefinitionID=-599; /*The project number of project {0}ExtractionConfiguration*/ @@ -98,34 +102,32 @@ SELECT DISTINCT [tempdb]..[Cohort].[cohortDefinition_id]=-599 ", TestDatabaseNames.Prefix); - //cross server is only used if cohort and dataset are on different servers so pretend the cohort is on bob server - var ect = (ExternalCohortTable)_request.ExtractableCohort.ExternalCohortTable; - ect.Server = "bob"; + //cross server is only used if cohort and dataset are on different servers so pretend the cohort is on bob server + var ect = (ExternalCohortTable)_request.ExtractableCohort.ExternalCohortTable; + ect.Server = "bob"; - var e = DataExportRepository.GetObjectByID(_request.ExtractableCohort.ExternalCohortTable_ID); - string origValue = e.Database; + var e = DataExportRepository.GetObjectByID(_request.ExtractableCohort + .ExternalCohortTable_ID); + var origValue = e.Database; - e.Database = CohortDatabaseName; - e.SaveToDatabase(); - try - { - ExecuteCrossServerDatasetExtractionSource s = new ExecuteCrossServerDatasetExtractionSource(); - s.TemporaryDatabaseName = "tempdb"; - s.PreInitialize(_request, new ThrowImmediatelyDataLoadEventListener()); - string hacked = s.HackExtractionSQL(_request.QueryBuilder.SQL, new ThrowImmediatelyDataLoadEventListener() { ThrowOnWarning = true }); - - Assert.AreEqual(expectedOutput.Trim(),hacked.Trim()); - } - finally + e.Database = CohortDatabaseName; + e.SaveToDatabase(); + try + { + var s = new ExecuteCrossServerDatasetExtractionSource { - e.Database = origValue; - e.SaveToDatabase(); - } - } - - - + TemporaryDatabaseName = "tempdb" + }; + s.PreInitialize(_request, ThrowImmediatelyDataLoadEventListener.Quiet); + var hacked = s.HackExtractionSQL(_request.QueryBuilder.SQL, + ThrowImmediatelyDataLoadEventListener.QuietPicky); - - } -} + Assert.AreEqual(expectedOutput.Trim(), hacked.Trim()); + } + finally + { + e.Database = origValue; + e.SaveToDatabase(); + } + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteDatasetExtractionFlatFileDestinationTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteDatasetExtractionFlatFileDestinationTests.cs index 26b8469a5d..62bff5ca5c 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteDatasetExtractionFlatFileDestinationTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteDatasetExtractionFlatFileDestinationTests.cs @@ -4,70 +4,56 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using FAnsi; using FAnsi.Discovery; -using Microsoft.Data.SqlClient; using NUnit.Framework; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.Logging; -using ReusableLibraryCode.Progress; using System; -using System.Collections.Generic; using System.Data; using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; -using Tests.Common; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +internal class ExecuteDatasetExtractionFlatFileDestinationTests : TestsRequiringAnExtractionConfiguration { - class ExecuteDatasetExtractionFlatFileDestinationTests : TestsRequiringAnExtractionConfiguration + [TestCase(true)] + [TestCase(false)] + public void ExtractionDestination_FloatRounding(bool lotsOfDecimalPlaces) { - [TestCase(true)] - [TestCase(false)] - public void ExtractionDestination_FloatRounding(bool lotsOfDecimalPlaces) - { - var dest = new ExecuteDatasetExtractionFlatFileDestination(); + var dest = new ExecuteDatasetExtractionFlatFileDestination(); + + var dt = new DataTable(); + dt.Columns.Add("Floats", typeof(decimal)); - var dt = new DataTable(); - dt.Columns.Add("Floats", typeof(decimal)); + dt.Rows.Add(Math.PI); - dt.Rows.Add(Math.PI); + var lm = new LogManager(new DiscoveredServer(UnitTestLoggingConnectionString)); + lm.CreateNewLoggingTaskIfNotExists("ExtractionDestination_FloatRounding"); - var lm = new LogManager(new DiscoveredServer(UnitTestLoggingConnectionString)); - lm.CreateNewLoggingTaskIfNotExists("ExtractionDestination_FloatRounding"); + var dli = lm.CreateDataLoadInfo("ExtractionDestination_FloatRounding", + nameof(ExecuteDatasetExtractionFlatFileDestinationTests), "test", "", true); - var dli = lm.CreateDataLoadInfo("ExtractionDestination_FloatRounding", nameof(ExecuteDatasetExtractionFlatFileDestinationTests), "test", "", true); - - if(_request.QueryBuilder == null) - { - _request.GenerateQueryBuilder(); - } - dest.RoundFloatsTo = lotsOfDecimalPlaces ? 10 : 2; + if (_request.QueryBuilder == null) _request.GenerateQueryBuilder(); + dest.RoundFloatsTo = lotsOfDecimalPlaces ? 10 : 2; - dest.PreInitialize(_request, new ThrowImmediatelyDataLoadEventListener()); - dest.PreInitialize(_project, new ThrowImmediatelyDataLoadEventListener()); - dest.PreInitialize((DataLoadInfo)dli, new ThrowImmediatelyDataLoadEventListener()); + dest.PreInitialize(_request, ThrowImmediatelyDataLoadEventListener.Quiet); + dest.PreInitialize(_project, ThrowImmediatelyDataLoadEventListener.Quiet); + dest.PreInitialize((DataLoadInfo)dli, ThrowImmediatelyDataLoadEventListener.Quiet); - dest.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - dest.Dispose(new ThrowImmediatelyDataLoadEventListener(),null); + dest.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + dest.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); - Assert.IsNotNull(dest.OutputFile); - FileAssert.Exists(dest.OutputFile); + Assert.IsNotNull(dest.OutputFile); + FileAssert.Exists(dest.OutputFile); - if (lotsOfDecimalPlaces) - { - Assert.AreEqual($"Floats{Environment.NewLine}3.1415926536{Environment.NewLine}", File.ReadAllText(dest.OutputFile)); - } - else - { - Assert.AreEqual($"Floats{Environment.NewLine}3.14{Environment.NewLine}", File.ReadAllText(dest.OutputFile)); - } + Assert.AreEqual( + lotsOfDecimalPlaces + ? $"Floats{Environment.NewLine}3.1415926536{Environment.NewLine}" + : $"Floats{Environment.NewLine}3.14{Environment.NewLine}", File.ReadAllText(dest.OutputFile)); - dt.Dispose(); - } + dt.Dispose(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteFullExtractionToDatabaseMSSqlChecksTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteFullExtractionToDatabaseMSSqlChecksTests.cs index 94afce9fe2..70f3674baa 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteFullExtractionToDatabaseMSSqlChecksTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteFullExtractionToDatabaseMSSqlChecksTests.cs @@ -6,150 +6,152 @@ using FAnsi; using FAnsi.Discovery; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataExport.DataExtraction.Commands; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +public class ExecuteFullExtractionToDatabaseMSSqlChecksTests : DatabaseTests { - public class ExecuteFullExtractionToDatabaseMSSqlChecksTests:DatabaseTests + private IProject _projectStub; + private IExtractCommand _commandStub; + + public DiscoveredDatabase Database { get; set; } + + [SetUp] + protected override void SetUp() { - private IProject _projectStub; - private IExtractCommand _commandStub; - - public DiscoveredDatabase Database { get; set; } + base.SetUp(); - [SetUp] - protected override void SetUp() - { - base.SetUp(); + _projectStub = Substitute.For(); + _projectStub.ProjectNumber = -123; + + var cfg = Substitute.For(); - _projectStub = Mock.Of(); - _projectStub.ProjectNumber = -123; + _commandStub = Substitute.For(); + _commandStub.Configuration.Returns(cfg); + Database = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + } - var cfg = Mock.Of(); - - _commandStub = Mock.Of(cmd => cmd.Configuration==cfg); - Database = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - } + [Test] + public void NoServer() + { + var destination = new ExecuteFullExtractionToDatabaseMSSql(); - + var tomemory = new ToMemoryCheckNotifier(); + destination.Check(tomemory); + Assert.AreEqual(CheckResult.Fail, tomemory.Messages[0].Result); + Assert.IsTrue(tomemory.Messages[0].Message.StartsWith("Target database server property has not been set")); + } - [Test] - public void NoServer() + [Test] + public void ServerMissingServerName() + { + var server = new ExternalDatabaseServer(CatalogueRepository, "Fiction", null); + try { - var destination = new ExecuteFullExtractionToDatabaseMSSql(); + var destination = new ExecuteFullExtractionToDatabaseMSSql + { + TargetDatabaseServer = server + }; var tomemory = new ToMemoryCheckNotifier(); destination.Check(tomemory); - Assert.AreEqual(CheckResult.Fail,tomemory.Messages[0].Result); - Assert.IsTrue(tomemory.Messages[0].Message.StartsWith("Target database server property has not been set")); + Assert.AreEqual(CheckResult.Fail, tomemory.Messages[0].Result); + Assert.IsTrue(tomemory.Messages[0].Message + .StartsWith("TargetDatabaseServer does not have a .Server specified")); } - [Test] - public void ServerMissingServerName() + finally { - var server = new ExternalDatabaseServer(CatalogueRepository, "Fiction",null); - try - { - var destination = new ExecuteFullExtractionToDatabaseMSSql(); - destination.TargetDatabaseServer = server; - - var tomemory = new ToMemoryCheckNotifier(); - destination.Check(tomemory); - - Assert.AreEqual(CheckResult.Fail, tomemory.Messages[0].Result); - Assert.IsTrue(tomemory.Messages[0].Message.StartsWith("TargetDatabaseServer does not have a .Server specified")); - } - finally - { - server.DeleteInDatabase(); - } + server.DeleteInDatabase(); } + } - [TestCase(false)] - [TestCase(true)] - public void ServerDatabaseIsPresentAndCorrect(bool alreadyExists) + [TestCase(false)] + [TestCase(true)] + public void ServerDatabaseIsPresentAndCorrect(bool alreadyExists) + { + var server = new ExternalDatabaseServer(CatalogueRepository, "Fiction", null) { - var server = new ExternalDatabaseServer(CatalogueRepository, "Fiction",null); - server.Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name; - server.Username = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitUsernameIfAny; - server.Password = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitPasswordIfAny; - - //server.Database = "FictionalDatabase"; Ignored by the extractor! - - try - { + Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name, + Username = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitUsernameIfAny, + Password = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitPasswordIfAny + }; - var destination = new ExecuteFullExtractionToDatabaseMSSql(); - destination.PreInitialize(_projectStub, new ThrowImmediatelyDataLoadEventListener()); - destination.PreInitialize(_commandStub, new ThrowImmediatelyDataLoadEventListener()); + //server.Database = "FictionalDatabase"; Ignored by the extractor! - destination.TargetDatabaseServer = server; - destination.TableNamingPattern = "$d"; + try + { + var destination = new ExecuteFullExtractionToDatabaseMSSql(); + destination.PreInitialize(_projectStub, ThrowImmediatelyDataLoadEventListener.Quiet); + destination.PreInitialize(_commandStub, ThrowImmediatelyDataLoadEventListener.Quiet); - if (alreadyExists) - destination.DatabaseNamingPattern = Database.GetRuntimeName(); //database that exists - else - destination.DatabaseNamingPattern = "Fictional$nDatabase"; //database does not exist (but server does) + destination.TargetDatabaseServer = server; + destination.TableNamingPattern = "$d"; - var tomemory = new ToMemoryCheckNotifier(new ThrowImmediatelyCheckNotifier()); - destination.Check(tomemory); + destination.DatabaseNamingPattern = alreadyExists + ? Database.GetRuntimeName() + : //database that exists + "Fictional$nDatabase"; //database does not exist (but server does) - Assert.AreEqual(alreadyExists? CheckResult.Warning: CheckResult.Success, tomemory.GetWorst()); + var tomemory = new ToMemoryCheckNotifier(ThrowImmediatelyCheckNotifier.Quiet); + destination.Check(tomemory); - } - finally - { - server.DeleteInDatabase(); - } + Assert.AreEqual(alreadyExists ? CheckResult.Warning : CheckResult.Success, tomemory.GetWorst()); + } + finally + { + server.DeleteInDatabase(); } + } - [Test] - public void ServerDatabaseIsPresentAndCorrectButHasTablesInIt() + [Test] + public void ServerDatabaseIsPresentAndCorrectButHasTablesInIt() + { + var server = new ExternalDatabaseServer(CatalogueRepository, "Fiction", null) { - var server = new ExternalDatabaseServer(CatalogueRepository, "Fiction",null); - server.Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name; - server.Username = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitUsernameIfAny; - server.Password = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitPasswordIfAny; - //server.Database = "FictionalDatabase"; Ignored by the extractor! - - using (var con = Database.Server.GetConnection()) - { - con.Open(); + Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name, + Username = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitUsernameIfAny, + Password = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitPasswordIfAny + }; + //server.Database = "FictionalDatabase"; Ignored by the extractor! - Database.Server.GetCommand("CREATE TABLE Bob(name varchar(10))", con).ExecuteNonQuery(); - } - - try - { - var destination = new ExecuteFullExtractionToDatabaseMSSql(); - destination.PreInitialize(_projectStub, new ThrowImmediatelyDataLoadEventListener()); - destination.PreInitialize(_commandStub, new ThrowImmediatelyDataLoadEventListener()); - destination.TargetDatabaseServer = server; - destination.TableNamingPattern = "$d"; - destination.DatabaseNamingPattern = "FictionalDatabase"; + using (var con = Database.Server.GetConnection()) + { + con.Open(); - var tomemory = new ToMemoryCheckNotifier(new ThrowImmediatelyCheckNotifier()); - destination.Check(tomemory); + Database.Server.GetCommand("CREATE TABLE Bob(name varchar(10))", con).ExecuteNonQuery(); + } - Assert.AreEqual(CheckResult.Warning, tomemory.GetWorst()); + try + { + var destination = new ExecuteFullExtractionToDatabaseMSSql(); + destination.PreInitialize(_projectStub, ThrowImmediatelyDataLoadEventListener.Quiet); + destination.PreInitialize(_commandStub, ThrowImmediatelyDataLoadEventListener.Quiet); + destination.TargetDatabaseServer = server; + destination.TableNamingPattern = "$d"; + destination.DatabaseNamingPattern = "FictionalDatabase"; - Database.ExpectTable("Bob").Drop(); - } - finally - { - server.DeleteInDatabase(); - } + var tomemory = new ToMemoryCheckNotifier(ThrowImmediatelyCheckNotifier.Quiet); + destination.Check(tomemory); + + Assert.AreEqual(CheckResult.Warning, tomemory.GetWorst()); + + Database.ExpectTable("Bob").Drop(); + } + finally + { + server.DeleteInDatabase(); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteFullExtractionToDatabaseMSSqlDestinationTest.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteFullExtractionToDatabaseMSSqlDestinationTest.cs index 57b1db13fb..87acc4a642 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteFullExtractionToDatabaseMSSqlDestinationTest.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecuteFullExtractionToDatabaseMSSqlDestinationTest.cs @@ -12,204 +12,226 @@ using FAnsi.Discovery; using NUnit.Framework; using Rdmp.Core.Curation.Data; -using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.Curation.Data.Pipelines; using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Sources; -using Rdmp.Core.Repositories; using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +public class ExecuteFullExtractionToDatabaseMSSqlDestinationTest : TestsRequiringAnExtractionConfiguration { - public class ExecuteFullExtractionToDatabaseMSSqlDestinationTest :TestsRequiringAnExtractionConfiguration + private ExternalDatabaseServer _extractionServer; + + private const string _expectedTableName = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest_TestTable"; + private ColumnInfo _columnToTransform; + private Pipeline _pipeline; + + [Test] + public void SQLServerDestination() { - private ExternalDatabaseServer _extractionServer; - - private readonly string _expectedTableName = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest_TestTable"; - private ColumnInfo _columnToTransform; - private Pipeline _pipeline; - - [Test] - public void SQLServerDestination() - { - DiscoveredDatabase dbToExtractTo = null; + DiscoveredDatabase dbToExtractTo = null; - var ci = new CatalogueItem(CatalogueRepository, _catalogue, "YearOfBirth"); - _columnToTransform = _columnInfos.Single(c=>c.GetRuntimeName().Equals("DateOfBirth",StringComparison.CurrentCultureIgnoreCase)); + var ci = new CatalogueItem(CatalogueRepository, _catalogue, "YearOfBirth"); + _columnToTransform = _columnInfos.Single(c => + c.GetRuntimeName().Equals("DateOfBirth", StringComparison.CurrentCultureIgnoreCase)); - string transform = "YEAR(" + _columnToTransform.Name + ")"; + var transform = $"YEAR({_columnToTransform.Name})"; - if (_catalogue.GetAllExtractionInformation(ExtractionCategory.Any).All(ei => ei.GetRuntimeName() != "YearOfBirth")) - { - var ei = new ExtractionInformation(CatalogueRepository, ci, _columnToTransform, transform); - ei.Alias = "YearOfBirth"; - ei.ExtractionCategory = ExtractionCategory.Core; - ei.SaveToDatabase(); - - //make it part of the ExtractionConfiguration - var newColumn = new ExtractableColumn(DataExportRepository, _selectedDataSet.ExtractableDataSet, (ExtractionConfiguration)_selectedDataSet.ExtractionConfiguration, ei, 0, ei.SelectSQL); - newColumn.Alias = ei.Alias; - newColumn.SaveToDatabase(); - - _extractableColumns.Add(newColumn); - } - - CreateLookupsEtc(); - - try + if (_catalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .All(ei => ei.GetRuntimeName() != "YearOfBirth")) + { + var ei = new ExtractionInformation(CatalogueRepository, ci, _columnToTransform, transform) { - _configuration.Name = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest"; - _configuration.SaveToDatabase(); - - var dbname = TestDatabaseNames.GetConsistentName(_project.Name + "_" + _project.ProjectNumber); - dbToExtractTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbname); - if (dbToExtractTo.Exists()) - dbToExtractTo.Drop(); - - base.ExecuteRunner(); - - var destinationTable = dbToExtractTo.ExpectTable(_expectedTableName); - Assert.IsTrue(destinationTable.Exists()); - - var dt = destinationTable.GetDataTable(); - - Assert.AreEqual(1, dt.Rows.Count); - Assert.AreEqual(_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()].Trim(),dt.Rows[0]["ReleaseID"]); - Assert.AreEqual(new DateTime(2001,1,1), dt.Rows[0]["DateOfBirth"]); - Assert.AreEqual(2001, dt.Rows[0]["YearOfBirth"]); - - Assert.AreEqual(_columnToTransform.Data_type, destinationTable.DiscoverColumn("DateOfBirth").DataType.SQLType); - Assert.AreEqual("int",destinationTable.DiscoverColumn("YearOfBirth").DataType.SQLType); - - AssertLookupsEtcExist(dbToExtractTo); - } - finally + Alias = "YearOfBirth", + ExtractionCategory = ExtractionCategory.Core + }; + ei.SaveToDatabase(); + + //make it part of the ExtractionConfiguration + var newColumn = new ExtractableColumn(DataExportRepository, _selectedDataSet.ExtractableDataSet, + (ExtractionConfiguration)_selectedDataSet.ExtractionConfiguration, ei, 0, ei.SelectSQL) { - if(dbToExtractTo != null && dbToExtractTo.Exists()) - dbToExtractTo.Drop(); + Alias = ei.Alias + }; + newColumn.SaveToDatabase(); - _pipeline?.DeleteInDatabase(); - } + _extractableColumns.Add(newColumn); } - private void AssertLookupsEtcExist(DiscoveredDatabase dbToExtractTo) + CreateLookupsEtc(); + + try { - Assert.IsTrue(dbToExtractTo.ExpectTable("ExecuteFullExtractionToDatabaseMSSqlDestinationTest_TestTable_Biochem").Exists()); - Assert.IsTrue(dbToExtractTo.ExpectTable("ExecuteFullExtractionToDatabaseMSSqlDestinationTest_Globals_Hosp").Exists()); - Assert.IsTrue(dbToExtractTo.ExpectTable("ExecuteFullExtractionToDatabaseMSSqlDestinationTest_TestTable_z_fff").Exists()); - } + _configuration.Name = "ExecuteFullExtractionToDatabaseMSSqlDestinationTest"; + _configuration.SaveToDatabase(); + + var dbname = TestDatabaseNames.GetConsistentName($"{_project.Name}_{_project.ProjectNumber}"); + dbToExtractTo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(dbname); + if (dbToExtractTo.Exists()) + dbToExtractTo.Drop(); + + ExecuteRunner(); + + var destinationTable = dbToExtractTo.ExpectTable(_expectedTableName); + Assert.IsTrue(destinationTable.Exists()); - private void CreateLookupsEtc() + var dt = destinationTable.GetDataTable(); + + Assert.AreEqual(1, dt.Rows.Count); + Assert.AreEqual(_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()].Trim(), dt.Rows[0]["ReleaseID"]); + Assert.AreEqual(new DateTime(2001, 1, 1), dt.Rows[0]["DateOfBirth"]); + Assert.AreEqual(2001, dt.Rows[0]["YearOfBirth"]); + + Assert.AreEqual(_columnToTransform.Data_type, + destinationTable.DiscoverColumn("DateOfBirth").DataType.SQLType); + Assert.AreEqual("int", destinationTable.DiscoverColumn("YearOfBirth").DataType.SQLType); + + AssertLookupsEtcExist(dbToExtractTo); + } + finally { - //an extractable file - var filename = Path.Combine(TestContext.CurrentContext.WorkDirectory, "bob.txt"); - - File.WriteAllText(filename,"fishfishfish"); - var doc = new SupportingDocument(CatalogueRepository, _catalogue, "bob"); - doc.URL = new Uri("file://"+filename); - doc.Extractable = true; - doc.SaveToDatabase(); - - //an extractable global file (comes out regardless of datasets) - var filename2 = Path.Combine(TestContext.CurrentContext.WorkDirectory, "bob2.txt"); - - File.WriteAllText(filename2,"fishfishfish2"); - var doc2 = new SupportingDocument(CatalogueRepository, _catalogue, "bob2"); - doc2.URL = new Uri("file://"+filename2); - doc2.Extractable = true; - doc2.IsGlobal = true; - doc2.SaveToDatabase(); - - //an supplemental table in the database (not linked against cohort) - var tbl = CreateDataset(Database,500, 1000, new Random(50)); - - var sql = new SupportingSQLTable(CatalogueRepository, _catalogue, "Biochem"); - var server = new ExternalDatabaseServer(CatalogueRepository, "myserver", null); - server.SetProperties(tbl.Database); - sql.ExternalDatabaseServer_ID = server.ID; - sql.SQL = "SELECT * FROM " + tbl.GetFullyQualifiedName(); - sql.Extractable = true; - sql.SaveToDatabase(); - - - //an supplemental (global) table in the database (not linked against cohort) - var tbl2 = CreateDataset(Database,500, 1000, new Random(50)); - - var sql2 = new SupportingSQLTable(CatalogueRepository, _catalogue, "Hosp"); - sql2.ExternalDatabaseServer_ID = server.ID; - sql2.SQL = "SELECT * FROM " + tbl2.GetFullyQualifiedName(); - sql2.Extractable = true; - sql2.IsGlobal = true; - sql2.SaveToDatabase(); - - - DataTable dtLookup = new DataTable(); - dtLookup.Columns.Add("C"); - dtLookup.Columns.Add("D"); - - dtLookup.Rows.Add("F", "Female"); - dtLookup.Rows.Add("M", "Male"); - dtLookup.Rows.Add("NB", "Non Binary"); - - var lookupTbl = tbl2.Database.CreateTable("z_fff", dtLookup); - - Import(lookupTbl, out var ti, out ColumnInfo[] columnInfos); - - var lookup = new Lookup(CatalogueRepository, columnInfos[0], - _columnToTransform, - columnInfos[1], - ExtractionJoinType.Left,null); - - //we need a CatalogueItem for the description in order to pick SetUp the Lookup as associated with the Catalogue - var ci = new CatalogueItem(CatalogueRepository, _catalogue, "SomeDesc"); - ci.ColumnInfo_ID = columnInfos[1].ID; - ci.SaveToDatabase(); + if (dbToExtractTo?.Exists() == true) + dbToExtractTo.Drop(); + + _pipeline?.DeleteInDatabase(); } + } + private static void AssertLookupsEtcExist(DiscoveredDatabase dbToExtractTo) + { + Assert.IsTrue(dbToExtractTo.ExpectTable("ExecuteFullExtractionToDatabaseMSSqlDestinationTest_TestTable_Biochem") + .Exists()); + Assert.IsTrue(dbToExtractTo.ExpectTable("ExecuteFullExtractionToDatabaseMSSqlDestinationTest_Globals_Hosp") + .Exists()); + Assert.IsTrue(dbToExtractTo.ExpectTable("ExecuteFullExtractionToDatabaseMSSqlDestinationTest_TestTable_z_fff") + .Exists()); + } - protected override Pipeline SetupPipeline() + private void CreateLookupsEtc() + { + //an extractable file + var filename = Path.Combine(TestContext.CurrentContext.WorkDirectory, "bob.txt"); + + File.WriteAllText(filename, "fishfishfish"); + var doc = new SupportingDocument(CatalogueRepository, _catalogue, "bob") { - //create a target server pointer - _extractionServer = new ExternalDatabaseServer(CatalogueRepository, "myserver",null); - _extractionServer.Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name; - _extractionServer.Username = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitUsernameIfAny; - _extractionServer.Password = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitPasswordIfAny; - _extractionServer.SaveToDatabase(); - - //create a pipeline - _pipeline = new Pipeline(CatalogueRepository, "Empty extraction pipeline"); - - //set the destination pipeline - var component = new PipelineComponent(CatalogueRepository, _pipeline, typeof(ExecuteFullExtractionToDatabaseMSSql), 0, "MS SQL Destination"); - var destinationArguments = component.CreateArgumentsForClassIfNotExists().ToList(); - IArgument argumentServer = destinationArguments.Single(a => a.Name == "TargetDatabaseServer"); - IArgument argumentDbNamePattern = destinationArguments.Single(a => a.Name == "DatabaseNamingPattern"); - IArgument argumentTblNamePattern = destinationArguments.Single(a => a.Name == "TableNamingPattern"); - - Assert.AreEqual("TargetDatabaseServer", argumentServer.Name); - argumentServer.SetValue(_extractionServer); - argumentServer.SaveToDatabase(); - argumentDbNamePattern.SetValue(TestDatabaseNames.Prefix + "$p_$n"); - argumentDbNamePattern.SaveToDatabase(); - argumentTblNamePattern.SetValue("$c_$d"); - argumentTblNamePattern.SaveToDatabase(); - AdjustPipelineComponentDelegate?.Invoke(component); - - var component2 = new PipelineComponent(CatalogueRepository, _pipeline, typeof(ExecuteCrossServerDatasetExtractionSource), -1, "Source"); - var arguments2 = component2.CreateArgumentsForClassIfNotExists().ToArray(); - arguments2.Single(a=>a.Name.Equals("AllowEmptyExtractions")).SetValue(false); - arguments2.Single(a => a.Name.Equals("AllowEmptyExtractions")).SaveToDatabase(); - AdjustPipelineComponentDelegate?.Invoke(component2); - - //configure the component as the destination - _pipeline.DestinationPipelineComponent_ID = component.ID; - _pipeline.SourcePipelineComponent_ID = component2.ID; - _pipeline.SaveToDatabase(); - - return _pipeline; - } + URL = new Uri($"file://{filename}"), + Extractable = true + }; + doc.SaveToDatabase(); + + //an extractable global file (comes out regardless of datasets) + var filename2 = Path.Combine(TestContext.CurrentContext.WorkDirectory, "bob2.txt"); + + File.WriteAllText(filename2, "fishfishfish2"); + var doc2 = new SupportingDocument(CatalogueRepository, _catalogue, "bob2") + { + URL = new Uri($"file://{filename2}"), + Extractable = true, + IsGlobal = true + }; + doc2.SaveToDatabase(); + + //an supplemental table in the database (not linked against cohort) + var tbl = CreateDataset(Database, 500, 1000, new Random(50)); + + var sql = new SupportingSQLTable(CatalogueRepository, _catalogue, "Biochem"); + var server = new ExternalDatabaseServer(CatalogueRepository, "myserver", null); + server.SetProperties(tbl.Database); + sql.ExternalDatabaseServer_ID = server.ID; + sql.SQL = $"SELECT * FROM {tbl.GetFullyQualifiedName()}"; + sql.Extractable = true; + sql.SaveToDatabase(); + + + //an supplemental (global) table in the database (not linked against cohort) + var tbl2 = CreateDataset(Database, 500, 1000, new Random(50)); + + var sql2 = new SupportingSQLTable(CatalogueRepository, _catalogue, "Hosp") + { + ExternalDatabaseServer_ID = server.ID, + SQL = $"SELECT * FROM {tbl2.GetFullyQualifiedName()}", + Extractable = true, + IsGlobal = true + }; + sql2.SaveToDatabase(); + + + var dtLookup = new DataTable(); + dtLookup.Columns.Add("C"); + dtLookup.Columns.Add("D"); + + dtLookup.Rows.Add("F", "Female"); + dtLookup.Rows.Add("M", "Male"); + dtLookup.Rows.Add("NB", "Non Binary"); + + var lookupTbl = tbl2.Database.CreateTable("z_fff", dtLookup); + + Import(lookupTbl, out _, out var columnInfos); + + _ = new Lookup(CatalogueRepository, columnInfos[0], + _columnToTransform, + columnInfos[1], + ExtractionJoinType.Left, null); + + //we need a CatalogueItem for the description in order to pick SetUp the Lookup as associated with the Catalogue + var ci = new CatalogueItem(CatalogueRepository, _catalogue, "SomeDesc") + { + ColumnInfo_ID = columnInfos[1].ID + }; + ci.SaveToDatabase(); + } + + + protected override Pipeline SetupPipeline() + { + //create a target server pointer + _extractionServer = new ExternalDatabaseServer(CatalogueRepository, "myserver", null) + { + Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name, + Username = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitUsernameIfAny, + Password = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExplicitPasswordIfAny + }; + _extractionServer.SaveToDatabase(); + + //create a pipeline + _pipeline = new Pipeline(CatalogueRepository, "Empty extraction pipeline"); + + //set the destination pipeline + var component = new PipelineComponent(CatalogueRepository, _pipeline, + typeof(ExecuteFullExtractionToDatabaseMSSql), 0, "MS SQL Destination"); + var destinationArguments = component.CreateArgumentsForClassIfNotExists() + .ToList(); + var argumentServer = destinationArguments.Single(a => a.Name == "TargetDatabaseServer"); + var argumentDbNamePattern = destinationArguments.Single(a => a.Name == "DatabaseNamingPattern"); + var argumentTblNamePattern = destinationArguments.Single(a => a.Name == "TableNamingPattern"); + + Assert.AreEqual("TargetDatabaseServer", argumentServer.Name); + argumentServer.SetValue(_extractionServer); + argumentServer.SaveToDatabase(); + argumentDbNamePattern.SetValue($"{TestDatabaseNames.Prefix}$p_$n"); + argumentDbNamePattern.SaveToDatabase(); + argumentTblNamePattern.SetValue("$c_$d"); + argumentTblNamePattern.SaveToDatabase(); + AdjustPipelineComponentDelegate?.Invoke(component); + + var component2 = new PipelineComponent(CatalogueRepository, _pipeline, + typeof(ExecuteCrossServerDatasetExtractionSource), -1, "Source"); + var arguments2 = component2.CreateArgumentsForClassIfNotExists() + .ToArray(); + arguments2.Single(a => a.Name.Equals("AllowEmptyExtractions")).SetValue(false); + arguments2.Single(a => a.Name.Equals("AllowEmptyExtractions")).SaveToDatabase(); + AdjustPipelineComponentDelegate?.Invoke(component2); + + //configure the component as the destination + _pipeline.DestinationPipelineComponent_ID = component.ID; + _pipeline.SourcePipelineComponent_ID = component2.ID; + _pipeline.SaveToDatabase(); + + return _pipeline; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecutePkSynthesizerDatasetExtractionSourceTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecutePkSynthesizerDatasetExtractionSourceTests.cs index e0b88be485..3908df8180 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/ExecutePkSynthesizerDatasetExtractionSourceTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/ExecutePkSynthesizerDatasetExtractionSourceTests.cs @@ -10,7 +10,6 @@ using System.Text.RegularExpressions; using FAnsi.Discovery; using NUnit.Framework; -using Rdmp.Core.CommandExecution; using Rdmp.Core.CommandExecution.AtomicCommands; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; @@ -19,230 +18,252 @@ using Rdmp.Core.DataExport.DataExtraction.UserPicks; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.Repositories.Managers; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common.Scenarios; using TypeGuesser; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +public class ExecutePkSynthesizerDatasetExtractionSourceTests : TestsRequiringAnExtractionConfiguration { - public class ExecutePkSynthesizerDatasetExtractionSourceTests : TestsRequiringAnExtractionConfiguration + //C24D365B7C271E2C1BC884B5801C2961 + private Regex reghex = new(@"^HASHED: [A-F\d]{32}"); + + [SetUp] + protected override void SetUp() { - //C24D365B7C271E2C1BC884B5801C2961 - Regex reghex = new Regex(@"^HASHED: [A-F\d]{32}"); - - [SetUp] - protected override void SetUp() - { - base.SetUp(); + base.SetUp(); - DataExportRepository.DataExportPropertyManager.SetValue(DataExportProperty.HashingAlgorithmPattern, "CONCAT('HASHED: ',{0})"); - } + DataExportRepository.DataExportPropertyManager.SetValue(DataExportProperty.HashingAlgorithmPattern, + "CONCAT('HASHED: ',{0})"); + } - [Test] - public void Test_CatalogueItems_ExtractionInformationPrimaryKey_IsRespected() - { - var request = SetupExtractDatasetCommand("ExtractionInformationPrimaryKey_IsRespected", new[] { "DateOfBirth" }); + [Test] + public void Test_CatalogueItems_ExtractionInformationPrimaryKey_IsRespected() + { + var request = + SetupExtractDatasetCommand("ExtractionInformationPrimaryKey_IsRespected", new[] { "DateOfBirth" }); + + var source = new ExecutePkSynthesizerDatasetExtractionSource(); + source.PreInitialize(request, ThrowImmediatelyDataLoadEventListener.Quiet); + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + + Assert.That(chunk.PrimaryKey, Is.Not.Null); + Assert.That(chunk.Columns.Cast().ToList(), + Has.Count.EqualTo(_columnInfos.Length)); // NO new column added + Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); + Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("DateOfBirth")); + } - var source = new ExecutePkSynthesizerDatasetExtractionSource(); - source.PreInitialize(request, new ThrowImmediatelyDataLoadEventListener()); - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + [Test] + public void Test_CatalogueItems_ExtractionInformationMultiPrimaryKey_IsRespected() + { + var request = SetupExtractDatasetCommand("ExtractionInformationMultiPrimaryKey_IsRespected", + new[] { "PrivateID", "DateOfBirth" }); - Assert.That(chunk.PrimaryKey, Is.Not.Null); - Assert.That(chunk.Columns.Cast().ToList(), Has.Count.EqualTo(_columnInfos.Count())); // NO new column added - Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); - Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("DateOfBirth")); - } + var source = new ExecutePkSynthesizerDatasetExtractionSource(); + source.PreInitialize(request, ThrowImmediatelyDataLoadEventListener.Quiet); + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - [Test] - public void Test_CatalogueItems_ExtractionInformationMultiPrimaryKey_IsRespected() - { - var request = SetupExtractDatasetCommand("ExtractionInformationMultiPrimaryKey_IsRespected", new[] { "PrivateID", "DateOfBirth" }); + Assert.That(chunk.PrimaryKey, Is.Not.Null); + Assert.That(chunk.Columns.Cast().ToList(), Has.Count.EqualTo(_columnInfos.Length)); + Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(2)); + Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("ReleaseID")); + } - var source = new ExecutePkSynthesizerDatasetExtractionSource(); - source.PreInitialize(request, new ThrowImmediatelyDataLoadEventListener()); - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + [Test] + public void Test_CatalogueItems_NonExtractedPrimaryKey_AreRespected() + { + var request = SetupExtractDatasetCommand("NonExtractedPrimaryKey_AreRespected", System.Array.Empty(), + new[] { "DateOfBirth" }); - Assert.That(chunk.PrimaryKey, Is.Not.Null); - Assert.That(chunk.Columns.Cast().ToList(), Has.Count.EqualTo(_columnInfos.Count())); - Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(2)); - Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("ReleaseID")); - } + var source = new ExecutePkSynthesizerDatasetExtractionSource(); + source.PreInitialize(request, ThrowImmediatelyDataLoadEventListener.Quiet); + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - [Test] - public void Test_CatalogueItems_NonExtractedPrimaryKey_AreRespected() - { - var request = SetupExtractDatasetCommand("NonExtractedPrimaryKey_AreRespected", new string[] { }, pkColumnInfos: new [] { "DateOfBirth" }); + Assert.That(chunk.PrimaryKey, Is.Not.Null); + Assert.That(chunk.Columns.Cast().ToList(), + Has.Count.EqualTo(_columnInfos.Length + 1)); // synth PK is added + Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); + Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("SynthesizedPk")); - var source = new ExecutePkSynthesizerDatasetExtractionSource(); - source.PreInitialize(request, new ThrowImmediatelyDataLoadEventListener()); - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + var firstvalue = chunk.Rows[0]["SynthesizedPk"].ToString(); + Assert.IsTrue(reghex.IsMatch(firstvalue)); + } - Assert.That(chunk.PrimaryKey, Is.Not.Null); - Assert.That(chunk.Columns.Cast().ToList(), Has.Count.EqualTo(_columnInfos.Count() + 1)); // synth PK is added - Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); - Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("SynthesizedPk")); + [Test] + public void Test_CatalogueItems_NonExtractedPrimaryKey_MultiTable_PksAreMerged() + { + var request = SetupExtractDatasetCommand("MultiTable_PksAreMerged", System.Array.Empty(), + new[] { "DateOfBirth" }, true, true); - var firstvalue = chunk.Rows[0]["SynthesizedPk"].ToString(); - Assert.IsTrue(reghex.IsMatch(firstvalue)); - } + var source = new ExecutePkSynthesizerDatasetExtractionSource(); + source.PreInitialize(request, ThrowImmediatelyDataLoadEventListener.Quiet); + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - [Test] - public void Test_CatalogueItems_NonExtractedPrimaryKey_MultiTable_PksAreMerged() - { - var request = SetupExtractDatasetCommand("MultiTable_PksAreMerged", new string[] { }, new[] { "DateOfBirth" }, true, true); - - var source = new ExecutePkSynthesizerDatasetExtractionSource(); - source.PreInitialize(request, new ThrowImmediatelyDataLoadEventListener()); - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + Assert.That(chunk.PrimaryKey, Is.Not.Null); + Assert.That(chunk.Columns.Cast().ToList(), + Has.Count.EqualTo(_columnInfos.Length + 3)); // the "desc" column is added to the existing ones + Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); + Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("SynthesizedPk")); - Assert.That(chunk.PrimaryKey, Is.Not.Null); - Assert.That(chunk.Columns.Cast().ToList(), Has.Count.EqualTo(_columnInfos.Count() + 3)); // the "desc" column is added to the existing ones - Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); - Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("SynthesizedPk")); + var firstvalue = chunk.Rows[0]["SynthesizedPk"].ToString(); + Assert.IsTrue(reghex.IsMatch(firstvalue)); - var firstvalue = chunk.Rows[0]["SynthesizedPk"].ToString(); - Assert.IsTrue(reghex.IsMatch(firstvalue)); + Database.ExpectTable("SimpleLookup").Drop(); + Database.ExpectTable("SimpleJoin").Drop(); + } - Database.ExpectTable("SimpleLookup").Drop(); - Database.ExpectTable("SimpleJoin").Drop(); - } + [Test] + public void Test_CatalogueItems_NonExtractedPrimaryKey_LookupsOnly_IsRespected() + { + var request = SetupExtractDatasetCommand("LookupsOnly_IsRespected", System.Array.Empty(), + new[] { "DateOfBirth" }, true); - [Test] - public void Test_CatalogueItems_NonExtractedPrimaryKey_LookupsOnly_IsRespected() - { - var request = SetupExtractDatasetCommand("LookupsOnly_IsRespected", new string[] { }, pkColumnInfos: new[] { "DateOfBirth" }, withLookup: true); + var source = new ExecutePkSynthesizerDatasetExtractionSource(); + source.PreInitialize(request, ThrowImmediatelyDataLoadEventListener.Quiet); + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - var source = new ExecutePkSynthesizerDatasetExtractionSource(); - source.PreInitialize(request, new ThrowImmediatelyDataLoadEventListener()); - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + Assert.That(chunk.PrimaryKey, Is.Not.Null); + Assert.That(chunk.Columns.Cast().ToList(), + Has.Count.EqualTo(_columnInfos.Length + + 2)); // the "desc" column is added to the existing ones + the SynthPk + Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); + Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("SynthesizedPk")); - Assert.That(chunk.PrimaryKey, Is.Not.Null); - Assert.That(chunk.Columns.Cast().ToList(), Has.Count.EqualTo(_columnInfos.Count() + 2)); // the "desc" column is added to the existing ones + the SynthPk - Assert.That(chunk.PrimaryKey, Has.Length.EqualTo(1)); - Assert.That(chunk.PrimaryKey.First().ColumnName, Is.EqualTo("SynthesizedPk")); + var firstvalue = chunk.Rows[0]["SynthesizedPk"].ToString(); + Assert.IsTrue(reghex.IsMatch(firstvalue)); - var firstvalue = chunk.Rows[0]["SynthesizedPk"].ToString(); - Assert.IsTrue(reghex.IsMatch(firstvalue)); + Database.ExpectTable("SimpleLookup").Drop(); + } - Database.ExpectTable("SimpleLookup").Drop(); - } - - private void SetupJoin() - { - DataTable dt = new DataTable(); + private void SetupJoin() + { + var dt = new DataTable(); - dt.Columns.Add("Name"); - dt.Columns.Add("Description"); - - dt.Rows.Add(new object[] { "Dave", "Is a maniac" }); + dt.Columns.Add("Name"); + dt.Columns.Add("Description"); - var tbl = Database.CreateTable("SimpleJoin", dt, new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) { IsPrimaryKey = true } }); + dt.Rows.Add(new object[] { "Dave", "Is a maniac" }); - var lookupCata = Import(tbl); + var tbl = Database.CreateTable("SimpleJoin", dt, + new[] + { + new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) { IsPrimaryKey = true } + }); - ExtractionInformation fkEi = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(n => n.GetRuntimeName() == "Name"); - ColumnInfo pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Name"); - - new JoinInfo(CatalogueRepository,fkEi.ColumnInfo, pk, ExtractionJoinType.Left, null); + var lookupCata = Import(tbl); - var ci = new CatalogueItem(CatalogueRepository, _catalogue, "Name_2"); - var ei = new ExtractionInformation(CatalogueRepository, ci, pk, pk.Name) - { - Alias = "Name_2" - }; - ei.SaveToDatabase(); - } + var fkEi = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(n => n.GetRuntimeName() == "Name"); + var pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Name"); + + new JoinInfo(CatalogueRepository, fkEi.ColumnInfo, pk, ExtractionJoinType.Left, null); - private void SetupLookupTable() + var ci = new CatalogueItem(CatalogueRepository, _catalogue, "Name_2"); + var ei = new ExtractionInformation(CatalogueRepository, ci, pk, pk.Name) { - DataTable dt = new DataTable(); + Alias = "Name_2" + }; + ei.SaveToDatabase(); + } - dt.Columns.Add("Name"); - dt.Columns.Add("Description"); + private void SetupLookupTable() + { + var dt = new DataTable(); - dt.Rows.Add(new object[] { "Dave", "Is a maniac" }); - - var tbl = Database.CreateTable("SimpleLookup", dt, new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) }); + dt.Columns.Add("Name"); + dt.Columns.Add("Description"); - var lookupCata = Import(tbl); + dt.Rows.Add(new object[] { "Dave", "Is a maniac" }); - ExtractionInformation fkEi = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(n => n.GetRuntimeName() == "Name"); - ColumnInfo pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Name"); + var tbl = Database.CreateTable("SimpleLookup", dt, + new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) }); - ColumnInfo descLine1 = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Description"); + var lookupCata = Import(tbl); - var cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk, null, true); - cmd.Execute(); - } + var fkEi = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(n => n.GetRuntimeName() == "Name"); + var pk = lookupCata.GetTableInfoList(false).Single().ColumnInfos.Single(n => n.GetRuntimeName() == "Name"); - private ExtractDatasetCommand SetupExtractDatasetCommand(string testTableName, string[] pkExtractionColumns, string[] pkColumnInfos = null, bool withLookup = false, bool withJoin = false) - { - DataTable dt = new DataTable(); + var descLine1 = lookupCata.GetTableInfoList(false).Single().ColumnInfos + .Single(n => n.GetRuntimeName() == "Description"); + + var cmd = new ExecuteCommandCreateLookup(CatalogueRepository, fkEi, descLine1, pk, null, true); + cmd.Execute(); + } + + private ExtractDatasetCommand SetupExtractDatasetCommand(string testTableName, string[] pkExtractionColumns, + string[] pkColumnInfos = null, bool withLookup = false, bool withJoin = false) + { + var dt = new DataTable(); - dt.Columns.Add("PrivateID"); - dt.Columns.Add("Name"); - dt.Columns.Add("DateOfBirth"); + dt.Columns.Add("PrivateID"); + dt.Columns.Add("Name"); + dt.Columns.Add("DateOfBirth"); - if (pkColumnInfos != null) - dt.PrimaryKey = - dt.Columns.Cast().Where(col => pkColumnInfos.Contains(col.ColumnName)).ToArray(); + if (pkColumnInfos != null) + dt.PrimaryKey = + dt.Columns.Cast().Where(col => pkColumnInfos.Contains(col.ColumnName)).ToArray(); - dt.Rows.Add(new object[] { _cohortKeysGenerated.Keys.First(), "Dave", "2001-01-01" }); + dt.Rows.Add(new object[] { _cohortKeysGenerated.Keys.First(), "Dave", "2001-01-01" }); - var tbl = Database.CreateTable(testTableName, - dt, - new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50))}); + var tbl = Database.CreateTable(testTableName, + dt, + new[] { new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 50)) }); - _catalogue = Import(tbl, out var tableInfo, out var columnInfos, out var cataItems, out var extractionInformations); + _catalogue = Import(tbl, out var tableInfo, out var columnInfos, out var cataItems, + out var extractionInformations); - ExtractionInformation privateID = extractionInformations.First(e => e.GetRuntimeName().Equals("PrivateID")); - privateID.IsExtractionIdentifier = true; - privateID.SaveToDatabase(); + var privateID = extractionInformations.First(e => e.GetRuntimeName().Equals("PrivateID")); + privateID.IsExtractionIdentifier = true; + privateID.SaveToDatabase(); - if (withLookup) - SetupLookupTable(); + if (withLookup) + SetupLookupTable(); - if (withJoin) - SetupJoin(); + if (withJoin) + SetupJoin(); - _catalogue.ClearAllInjections(); - extractionInformations = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any); + _catalogue.ClearAllInjections(); + extractionInformations = _catalogue.GetAllExtractionInformation(ExtractionCategory.Any); - foreach (var pkExtractionColumn in pkExtractionColumns) - { - ExtractionInformation column = extractionInformations.First(e => e.GetRuntimeName().Equals(pkExtractionColumn)); - column.IsPrimaryKey = true; - column.SaveToDatabase(); - } + foreach (var pkExtractionColumn in pkExtractionColumns) + { + var column = extractionInformations.First(e => e.GetRuntimeName().Equals(pkExtractionColumn)); + column.IsPrimaryKey = true; + column.SaveToDatabase(); + } - SetupDataExport(testTableName, _catalogue, - out var configuration, out var extractableDataSet, out var project); + SetupDataExport(testTableName, _catalogue, + out var configuration, out var extractableDataSet, out var project); - configuration.Cohort_ID = _extractableCohort.ID; - configuration.SaveToDatabase(); + configuration.Cohort_ID = _extractableCohort.ID; + configuration.SaveToDatabase(); - return new ExtractDatasetCommand( configuration, new ExtractableDatasetBundle(extractableDataSet)); - } + return new ExtractDatasetCommand(configuration, new ExtractableDatasetBundle(extractableDataSet)); + } - private void SetupDataExport(string testDbName, ICatalogue catalogue, out ExtractionConfiguration extractionConfiguration, out IExtractableDataSet extractableDataSet, out IProject project) - { - extractableDataSet = new ExtractableDataSet(DataExportRepository, catalogue); + private void SetupDataExport(string testDbName, ICatalogue catalogue, + out ExtractionConfiguration extractionConfiguration, out IExtractableDataSet extractableDataSet, + out IProject project) + { + extractableDataSet = new ExtractableDataSet(DataExportRepository, catalogue); - project = new Project(DataExportRepository, testDbName); - project.ProjectNumber = 1; + project = new Project(DataExportRepository, testDbName) + { + ProjectNumber = 1 + }; - Directory.CreateDirectory(ProjectDirectory); - project.ExtractionDirectory = ProjectDirectory; + Directory.CreateDirectory(ProjectDirectory); + project.ExtractionDirectory = ProjectDirectory; - project.SaveToDatabase(); + project.SaveToDatabase(); - extractionConfiguration = new ExtractionConfiguration(DataExportRepository, project); - extractionConfiguration.AddDatasetToConfiguration(extractableDataSet); + extractionConfiguration = new ExtractionConfiguration(DataExportRepository, project); + extractionConfiguration.AddDatasetToConfiguration(extractableDataSet); - foreach (var ei in _catalogue.GetAllExtractionInformation(ExtractionCategory.Supplemental)) - { - extractionConfiguration.AddColumnToExtraction(extractableDataSet, ei); - } - } + foreach (var ei in _catalogue.GetAllExtractionInformation(ExtractionCategory.Supplemental)) + extractionConfiguration.AddColumnToExtraction(extractableDataSet, ei); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/ExtractionSubdirectoryPatternTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/ExtractionSubdirectoryPatternTests.cs index 9875013926..f8fa30df85 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/ExtractionSubdirectoryPatternTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/ExtractionSubdirectoryPatternTests.cs @@ -5,96 +5,95 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using Moq; using NUnit.Framework; using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataExport.DataExtraction.Commands; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using Rdmp.Core.DataExport.DataExtraction.UserPicks; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +internal class ExtractionSubdirectoryPatternTests : UnitTests { - class ExtractionSubdirectoryPatternTests : UnitTests + [Test] + public void Test_NoRelativePaths() { - - [Test] - public void Test_NoRelativePaths() + var dest = new ExecuteDatasetExtractionFlatFileDestination { - var dest = new ExecuteDatasetExtractionFlatFileDestination(); + ExtractionSubdirectoryPattern = "../../troll" + }; - dest.ExtractionSubdirectoryPattern = "../../troll"; + var ex = Assert.Throws(() => dest.Check(ThrowImmediatelyCheckNotifier.Quiet)); + StringAssert.Contains("ExtractionSubdirectoryPattern cannot contain dots", ex.Message); + } - var ex = Assert.Throws(()=>dest.Check(new ThrowImmediatelyCheckNotifier())); - StringAssert.Contains("ExtractionSubdirectoryPattern cannot contain dots",ex.Message); - } + [TestCase("bad")] + [TestCase("$n")] + [TestCase("$d")] + [TestCase("$a")] + [TestCase("$n")] + public void Test_NoConfigToken(string badString) + { + var dest = new ExecuteDatasetExtractionFlatFileDestination + { + ExtractionSubdirectoryPattern = badString + }; + + var ex = Assert.Throws(() => dest.Check(ThrowImmediatelyCheckNotifier.Quiet)); + StringAssert.Contains("ExtractionSubdirectoryPattern must contain a Configuration element", ex.Message); + } - [TestCase("bad")] - [TestCase("$n")] - [TestCase("$d")] - [TestCase("$a")] - [TestCase("$n")] - public void Test_NoConfigToken(string badString) + [TestCase("$c/fff")] + [TestCase("$i")] + public void Test_NoDatasetToken(string badString) + { + var dest = new ExecuteDatasetExtractionFlatFileDestination { - var dest = new ExecuteDatasetExtractionFlatFileDestination(); + ExtractionSubdirectoryPattern = badString + }; - dest.ExtractionSubdirectoryPattern = badString; + var ex = Assert.Throws(() => dest.Check(ThrowImmediatelyCheckNotifier.Quiet)); + StringAssert.Contains("ExtractionSubdirectoryPattern must contain a Dataset element", ex.Message); + } - var ex = Assert.Throws(()=>dest.Check(new ThrowImmediatelyCheckNotifier())); - StringAssert.Contains("ExtractionSubdirectoryPattern must contain a Configuration element",ex.Message); - } + /* + $c - Configuration Name (e.g. 'Cases') + $i - Configuration ID (e.g. 459) + $d - Dataset name (e.g. 'Prescribing') + $a - Dataset acronym (e.g. 'Presc') + $n - Dataset ID (e.g. 459) + */ + + + [TestCase("$c/$a", "/AAA/C")] + [TestCase("$c/$d", "/AAA/BBB")] + [TestCase("$c/$n", "/AAA/\\d+")] + [TestCase("$i/$a", "/\\d+/C")] + [TestCase("$i/$d", "/\\d+/BBB")] + [TestCase("$i/$n", "/\\d+/\\d+")] + public void Test_ValidPaths(string goodString, string pattern) + { + var sds = WhenIHaveA(); - [TestCase("$c/fff")] - [TestCase("$i")] - public void Test_NoDatasetToken(string badString) - { - var dest = new ExecuteDatasetExtractionFlatFileDestination(); - - dest.ExtractionSubdirectoryPattern = badString; - - var ex = Assert.Throws(()=>dest.Check(new ThrowImmediatelyCheckNotifier())); - StringAssert.Contains("ExtractionSubdirectoryPattern must contain a Dataset element",ex.Message); - } - - /* - $c - Configuration Name (e.g. 'Cases') - $i - Configuration ID (e.g. 459) - $d - Dataset name (e.g. 'Prescribing') - $a - Dataset acronym (e.g. 'Presc') - $n - Dataset ID (e.g. 459) - */ - - - [TestCase("$c/$a", "/AAA/C")] - [TestCase("$c/$d", "/AAA/BBB")] - [TestCase("$c/$n", "/AAA/\\d+")] - [TestCase("$i/$a", "/\\d+/C")] - [TestCase("$i/$d", "/\\d+/BBB")] - [TestCase("$i/$n", "/\\d+/\\d+")] - public void Test_ValidPaths(string goodString, string pattern) - { - var sds = WhenIHaveA(); - + sds.ExtractionConfiguration.Project.ExtractionDirectory = TestContext.CurrentContext.WorkDirectory; + sds.ExtractionConfiguration.Name = "AAA"; + sds.ExtractableDataSet.Catalogue.Name = "BBB"; + sds.ExtractableDataSet.Catalogue.Acronym = "C"; + - sds.ExtractionConfiguration.Project.ExtractionDirectory = TestContext.CurrentContext.WorkDirectory; - sds.ExtractionConfiguration.Name = "AAA"; - sds.ExtractableDataSet.Catalogue.Name = "BBB"; - sds.ExtractableDataSet.Catalogue.Acronym = "C"; - - - var cmd = new ExtractDatasetCommand(sds.ExtractionConfiguration, new ExtractableDatasetBundle(sds.ExtractableDataSet)); - var dest = new ExecuteDatasetExtractionFlatFileDestination(); + var cmd = new ExtractDatasetCommand(sds.ExtractionConfiguration, + new ExtractableDatasetBundle(sds.ExtractableDataSet)); + var dest = new ExecuteDatasetExtractionFlatFileDestination + { + ExtractionSubdirectoryPattern = goodString + }; - dest.ExtractionSubdirectoryPattern = goodString; - Assert.DoesNotThrow(()=>dest.Check(new ThrowImmediatelyCheckNotifier())); + Assert.DoesNotThrow(() => dest.Check(ThrowImmediatelyCheckNotifier.Quiet)); - var answer = dest.GetDirectoryFor(cmd); - StringAssert.IsMatch(pattern,answer.FullName.Replace('\\','/')); - } + var answer = dest.GetDirectoryFor(cmd); + StringAssert.IsMatch(pattern, answer.FullName.Replace('\\', '/')); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/HashedDataExtractionTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/HashedDataExtractionTests.cs index 28f2947d08..eb5a9e7875 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/HashedDataExtractionTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/HashedDataExtractionTests.cs @@ -10,54 +10,52 @@ using Rdmp.Core.DataExport.DataExtraction.Pipeline; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using Rdmp.Core.QueryBuilding; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +public class HashedDataExtractionTests : TestsRequiringAnExtractionConfiguration { - public class HashedDataExtractionTests : TestsRequiringAnExtractionConfiguration + [Test] + public void ExtractNormally() { - [Test] - public void ExtractNormally() + AdjustPipelineComponentDelegate = p => { - AdjustPipelineComponentDelegate = (p) => + if (p.Class.Contains("ExecuteDatasetExtractionSource")) { - if (p.Class.Contains("ExecuteDatasetExtractionSource")) - { - var hashJoinsArg = p.PipelineComponentArguments.Single(a => a.Name.Equals("UseHashJoins")); - hashJoinsArg.SetValue(true); - hashJoinsArg.SaveToDatabase(); - } - }; - - ExtractionPipelineUseCase execute; - IExecuteDatasetExtractionDestination result; + var hashJoinsArg = p.PipelineComponentArguments.Single(a => a.Name.Equals("UseHashJoins")); + hashJoinsArg.SetValue(true); + hashJoinsArg.SaveToDatabase(); + } + }; - _catalogue.Name = "TestTable"; - _catalogue.SaveToDatabase(); - _request.DatasetBundle.DataSet.RevertToDatabaseState(); + _catalogue.Name = "TestTable"; + _catalogue.SaveToDatabase(); + _request.DatasetBundle.DataSet.RevertToDatabaseState(); - Assert.AreEqual(1, _request.ColumnsToExtract.Count(c => c.IsExtractionIdentifier)); - var listener = new ToMemoryDataLoadEventListener(true); + Assert.AreEqual(1, _request.ColumnsToExtract.Count(c => c.IsExtractionIdentifier)); + var listener = new ToMemoryDataLoadEventListener(true); - base.Execute(out execute,out result,listener); + Execute(out _, out var result, listener); - var messages = - listener.EventsReceivedBySender.SelectMany(m => m.Value) - .Where(m=>m.ProgressEventType == ProgressEventType.Information && m.Message.Contains("/*Decided on extraction SQL:*/")) - .ToArray(); + var messages = + listener.EventsReceivedBySender.SelectMany(m => m.Value) + .Where(m => m.ProgressEventType == ProgressEventType.Information && + m.Message.Contains("/*Decided on extraction SQL:*/")) + .ToArray(); - Assert.AreEqual(1,messages.Length,"Expected a message about what the final extraction SQL was"); - Assert.IsTrue(messages[0].Message.Contains(" HASH JOIN "), "expected use of hash matching was not reported by ExecuteDatasetExtractionSource in the SQL actually executed"); + Assert.AreEqual(1, messages.Length, "Expected a message about what the final extraction SQL was"); + Assert.IsTrue(messages[0].Message.Contains(" HASH JOIN "), + "expected use of hash matching was not reported by ExecuteDatasetExtractionSource in the SQL actually executed"); - var r = (ExecuteDatasetExtractionFlatFileDestination)result; + var r = (ExecuteDatasetExtractionFlatFileDestination)result; - //this should be what is in the file, the private identifier and the 1 that was put into the table in the first place (see parent class for the test data setup) - Assert.AreEqual(@"ReleaseID,Name,DateOfBirth -" + _cohortKeysGenerated[_cohortKeysGenerated.Keys.First()] + @",Dave,2001-01-01", File.ReadAllText(r.OutputFile).Trim()); + //this should be what is in the file, the private identifier and the 1 that was put into the table in the first place (see parent class for the test data setup) + Assert.AreEqual($@"ReleaseID,Name,DateOfBirth +{_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()]},Dave,2001-01-01", File.ReadAllText(r.OutputFile).Trim()); - Assert.AreEqual(1, _request.QueryBuilder.SelectColumns.Count(c => c.IColumn is ReleaseIdentifierSubstitution)); - File.Delete(r.OutputFile); - } + Assert.AreEqual(1, _request.QueryBuilder.SelectColumns.Count(c => c.IColumn is ReleaseIdentifierSubstitution)); + File.Delete(r.OutputFile); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/NormalDataExtractionTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/NormalDataExtractionTests.cs index 62b15efb8f..0a883fa157 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/NormalDataExtractionTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/NormalDataExtractionTests.cs @@ -15,60 +15,61 @@ using Rdmp.Core.QueryBuilding; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +public class NormalDataExtractionTests : TestsRequiringAnExtractionConfiguration { - public class NormalDataExtractionTests:TestsRequiringAnExtractionConfiguration + [Test] + public void ExtractNormally() { - [Test] - public void ExtractNormally() - { - ExtractionPipelineUseCase execute; - IExecuteDatasetExtractionDestination result; + _catalogue.Name = "TestTable"; + _catalogue.SaveToDatabase(); + _request.DatasetBundle.DataSet.RevertToDatabaseState(); - _catalogue.Name = "TestTable"; - _catalogue.SaveToDatabase(); - _request.DatasetBundle.DataSet.RevertToDatabaseState(); + Assert.AreEqual(1, _request.ColumnsToExtract.Count(c => c.IsExtractionIdentifier)); - Assert.AreEqual(1, _request.ColumnsToExtract.Count(c => c.IsExtractionIdentifier)); - - base.Execute(out execute,out result); + Execute(out _, out var result); - var r = (ExecuteDatasetExtractionFlatFileDestination)result; + var r = (ExecuteDatasetExtractionFlatFileDestination)result; - //this should be what is in the file, the private identifier and the 1 that was put into the table in the first place (see parent class for the test data setup) - Assert.AreEqual(@"ReleaseID,Name,DateOfBirth -" + _cohortKeysGenerated[_cohortKeysGenerated.Keys.First()] + @",Dave,2001-01-01", File.ReadAllText(r.OutputFile).Trim()); + //this should be what is in the file, the private identifier and the 1 that was put into the table in the first place (see parent class for the test data setup) + Assert.AreEqual($@"ReleaseID,Name,DateOfBirth +{_cohortKeysGenerated[_cohortKeysGenerated.Keys.First()]},Dave,2001-01-01", File.ReadAllText(r.OutputFile).Trim()); - Assert.AreEqual(1, _request.QueryBuilder.SelectColumns.Count(c => c.IColumn is ReleaseIdentifierSubstitution)); - File.Delete(r.OutputFile); - } + Assert.AreEqual(1, _request.QueryBuilder.SelectColumns.Count(c => c.IColumn is ReleaseIdentifierSubstitution)); + File.Delete(r.OutputFile); + } - [Test] - public void DodgyCharactersInCatalogueName() + [Test] + public void DodgyCharactersInCatalogueName() + { + var beforeName = _catalogue.Name; + try { - string beforeName = _catalogue.Name; - try - { - _catalogue.Name = "Fish;#:::FishFish"; - Assert.IsFalse(Catalogue.IsAcceptableName(_catalogue.Name)); - _catalogue.SaveToDatabase(); - _extractableDataSet.RevertToDatabaseState(); + _catalogue.Name = "Fish;#:::FishFish"; + Assert.IsFalse(Catalogue.IsAcceptableName(_catalogue.Name)); + _catalogue.SaveToDatabase(); + _extractableDataSet.RevertToDatabaseState(); + - - var extractionDirectory = new ExtractionDirectory(TestContext.CurrentContext.WorkDirectory, _configuration); + var extractionDirectory = new ExtractionDirectory(TestContext.CurrentContext.WorkDirectory, _configuration); - - var ex = Assert.Throws(() => {var dir = extractionDirectory.GetDirectoryForDataset(_extractableDataSet); }); - Assert.AreEqual("Cannot extract dataset Fish;#:::FishFish because it points at Catalogue with an invalid name, name is invalid because:The following invalid characters were found:'#'", ex.Message); - } - finally + var ex = Assert.Throws(() => { - _catalogue.Name = beforeName; - _catalogue.SaveToDatabase(); - _extractableDataSet.RevertToDatabaseState(); - } + var dir = extractionDirectory.GetDirectoryForDataset(_extractableDataSet); + }); + + Assert.AreEqual( + "Cannot extract dataset Fish;#:::FishFish because it points at Catalogue with an invalid name, name is invalid because:The following invalid characters were found:'#'", + ex.Message); + } + finally + { + _catalogue.Name = beforeName; + _catalogue.SaveToDatabase(); + _extractableDataSet.RevertToDatabaseState(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/RowPeekerTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/RowPeekerTests.cs index 9fa2d2ccef..10b194e603 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/RowPeekerTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/RowPeekerTests.cs @@ -6,76 +6,73 @@ using System; using System.Data; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Sources; using Rdmp.Core.DataLoad.Engine.Pipeline.Sources; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +[Category("Unit")] +internal class RowPeekerTests { - [Category("Unit")] - class RowPeekerTests - { [Test] public void Peeker() { - DataTable dt = new DataTable(); - dt.Columns.Add("MyCol"); - dt.Rows.Add("fish"); - dt.Rows.Add("dish"); - dt.Rows.Add("splish"); - - var mock = new Mock(); - mock.SetupSequence(m=>m.ReadOneRow()) - .Returns(dt.Rows[0]) - .Returns(dt.Rows[1]) - .Returns(dt.Rows[2]) - .Returns(()=>{return null;}); - - RowPeeker p = new RowPeeker(); - var dt2 = new DataTable(); - dt2.Columns.Add("MyCol"); - - //Reads fish and peeks dish - p.AddWhile(mock.Object,r=>(string) r["MyCol"] == "fish",dt2); - - //read one row - Assert.AreEqual(1,dt2.Rows.Count); - Assert.AreEqual("fish",dt2.Rows[0]["MyCol"]); - - var dt3 = new DataTable(); - dt3.Columns.Add("MyCol"); - - //cannot add while there is a peek stored - Assert.Throws(() => p.AddWhile(mock.Object, r => (string) r["MyCol"] == "fish", dt2)); - - //clear the peek - //unpeeks dish - p.AddPeekedRowsIfAny(dt3); - Assert.AreEqual(1,dt3.Rows.Count); - Assert.AreEqual("dish",dt3.Rows[0]["MyCol"]); - - //now we can read into dt4 but the condition is false - //Reads nothing but peeks splish - DataTable dt4 = new DataTable(); - dt4.Columns.Add("MyCol"); - p.AddWhile(mock.Object, r => (string) r["MyCol"] == "fish", dt4); - - Assert.AreEqual(0,dt4.Rows.Count); - - //we passed a null chunk and that pulls back the legit data table - var dt5 = p.AddPeekedRowsIfAny(null); - - Assert.IsNotNull(dt5); - Assert.AreEqual("splish",dt5.Rows[0]["MyCol"]); - - DataTable dt6 = new DataTable(); - dt6.Columns.Add("MyCol"); - p.AddWhile(mock.Object, r => (string) r["MyCol"] == "fish", dt6); - - Assert.AreEqual(0,dt6.Rows.Count); - } + using var dt = new DataTable(); + dt.Columns.Add("MyCol"); + dt.Rows.Add("fish"); + dt.Rows.Add("dish"); + dt.Rows.Add("splish"); + + var mock = Substitute.For(); + mock.ReadOneRow() + .Returns(dt.Rows[0], + dt.Rows[1], + dt.Rows[2], + null); + + var p = new RowPeeker(); + using var dt2 = new DataTable(); + dt2.Columns.Add("MyCol"); + + //Reads fish and peeks dish + p.AddWhile(mock, r => (string)r["MyCol"] == "fish", dt2); + + //read one row + Assert.AreEqual(1, dt2.Rows.Count); + Assert.AreEqual("fish", dt2.Rows[0]["MyCol"]); + + using var dt3 = new DataTable(); + dt3.Columns.Add("MyCol"); + + //cannot add while there is a peek stored + Assert.Throws(() => p.AddWhile(mock, r => (string)r["MyCol"] == "fish", dt2)); - - } -} + //clear the peek + //unpeeks dish + p.AddPeekedRowsIfAny(dt3); + Assert.AreEqual(1, dt3.Rows.Count); + Assert.AreEqual("dish", dt3.Rows[0]["MyCol"]); + + //now we can read into dt4 but the condition is false + //Reads nothing but peeks splish + using var dt4 = new DataTable(); + dt4.Columns.Add("MyCol"); + p.AddWhile(mock, r => (string)r["MyCol"] == "fish", dt4); + + Assert.AreEqual(0, dt4.Rows.Count); + + //we passed a null chunk and that pulls back the legit data table + var dt5 = p.AddPeekedRowsIfAny(null); + + Assert.IsNotNull(dt5); + Assert.AreEqual("splish", dt5.Rows[0]["MyCol"]); + + using var dt6 = new DataTable(); + dt6.Columns.Add("MyCol"); + p.AddWhile(mock, r => (string)r["MyCol"] == "fish", dt6); + + Assert.AreEqual(0, dt6.Rows.Count); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/SimpleFileExtractorTests.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/SimpleFileExtractorTests.cs index 8830a37652..135eb694fa 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/SimpleFileExtractorTests.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/SimpleFileExtractorTests.cs @@ -7,214 +7,228 @@ using NUnit.Framework; using Rdmp.Core.DataExport.DataExtraction.Pipeline; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; using System; using System.IO; using System.Linq; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +internal class SimpleFileExtractorTests { - class SimpleFileExtractorTests + private SimpleFileExtractor _extractor; + private DirectoryInfo _inDir; + private DirectoryInfo _outDir; + private DirectoryInfo _inDirSub1; + private DirectoryInfo _inDirSub2; + + [SetUp] + public void SetUp() + { + _extractor = new SimpleFileExtractor(); + + _inDir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "In")); + _outDir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Out")); + + if (_inDir.Exists) + _inDir.Delete(true); + if (_outDir.Exists) + _outDir.Delete(true); + + _inDir.Create(); + _outDir.Create(); + + _extractor.LocationOfFiles = _inDir; + + File.WriteAllText(Path.Combine(_inDir.FullName, "blah.txt"), "fff"); + File.WriteAllText(Path.Combine(_inDir.FullName, "blah2.txt"), "fff"); + File.WriteAllText(Path.Combine(_inDir.FullName, "Pat1.txt"), "fff"); + + _inDirSub1 = _inDir.CreateSubdirectory("Sub1"); + _inDirSub2 = _inDir.CreateSubdirectory("Sub2"); + + File.WriteAllText(Path.Combine(_inDirSub1.FullName, "subBlah.txt"), "fff"); + File.WriteAllText(Path.Combine(_inDirSub2.FullName, "subBlah2.txt"), "fff"); + } + + [Test] + public void AllFiles() + { + _extractor.Directories = false; + _extractor.Pattern = "*"; + _extractor.OutputDirectoryName = _outDir.FullName; + _extractor.Check(ThrowImmediatelyCheckNotifier.Quiet); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + + _extractor.MoveAll(_outDir, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + + FileAssert.Exists(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.Exists(Path.Combine(_outDir.FullName, "blah2.txt")); + } + + [Test] + public void OneFile() + { + _extractor.Directories = false; + _extractor.Pattern = "blah.*"; + _extractor.OutputDirectoryName = _outDir.FullName; + _extractor.Check(ThrowImmediatelyCheckNotifier.Quiet); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + + _extractor.MoveAll(_outDir, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + + FileAssert.Exists(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName, "Sub1")); + DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName, "Sub2")); + } + + [Test] + public void AllDirs() + { + _extractor.Directories = true; + _extractor.Pattern = "*"; + _extractor.OutputDirectoryName = _outDir.FullName; + _extractor.Check(ThrowImmediatelyCheckNotifier.Quiet); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + + _extractor.MoveAll(_outDir, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + DirectoryAssert.Exists(Path.Combine(_outDir.FullName, "Sub1")); + DirectoryAssert.Exists(Path.Combine(_outDir.FullName, "Sub2")); + } + + [Test] + public void OneDir() + { + _extractor.Directories = true; + _extractor.Pattern = "*1"; + _extractor.OutputDirectoryName = _outDir.FullName; + _extractor.Check(ThrowImmediatelyCheckNotifier.Quiet); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + + _extractor.MoveAll(_outDir, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + DirectoryAssert.Exists(Path.Combine(_outDir.FullName, "Sub1")); + DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName, "Sub2")); + } + + [Test] + public void PatientFiles() + { + _extractor.PerPatient = true; + _extractor.Directories = false; + _extractor.Pattern = "$p.txt"; + _extractor.OutputDirectoryName = _outDir.FullName; + _extractor.Check(ThrowImmediatelyCheckNotifier.Quiet); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + + _extractor.MovePatient("Pat1", "Rel1", _outDir, ThrowImmediatelyDataLoadEventListener.QuietPicky, + new GracefulCancellationToken()); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + FileAssert.Exists(Path.Combine(_outDir.FullName, "Rel1.txt")); + } + + [Test] + public void PatientFileMissingOne() + { + _extractor.PerPatient = true; + _extractor.Directories = false; + _extractor.Pattern = "$p.txt"; + _extractor.OutputDirectoryName = _outDir.FullName; + _extractor.Check(ThrowImmediatelyCheckNotifier.Quiet); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + + var mem = new ToMemoryDataLoadEventListener(true); + + _extractor.MovePatient("Pat1", "Rel1", _outDir, mem, new GracefulCancellationToken()); + _extractor.MovePatient("Pat2", "Rel2", _outDir, mem, new GracefulCancellationToken()); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + FileAssert.Exists(Path.Combine(_outDir.FullName, "Rel1.txt")); + + Assert.AreEqual(ProgressEventType.Warning, mem.GetWorst()); + + StringAssert.StartsWith("No Files were found matching Pattern Pat2.txt in ", + mem.GetAllMessagesByProgressEventType()[ProgressEventType.Warning].Single().Message); + } + + [Test] + public void PatientDirs() + { + _extractor.PerPatient = true; + _extractor.Directories = true; + _extractor.Pattern = "$p"; + _extractor.OutputDirectoryName = _outDir.FullName; + _extractor.Check(ThrowImmediatelyCheckNotifier.Quiet); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + + _extractor.MovePatient("Sub1", "Rel1", _outDir, ThrowImmediatelyDataLoadEventListener.QuietPicky, + new GracefulCancellationToken()); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + DirectoryAssert.Exists(Path.Combine(_outDir.FullName, "Rel1")); + DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName, "Rel2")); + DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName, "Sub1")); + DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName, "Sub2")); + } + + [Test] + public void PatientBothDirs() { - SimpleFileExtractor _extractor; - DirectoryInfo _inDir; - DirectoryInfo _outDir; - private DirectoryInfo _inDirSub1; - private DirectoryInfo _inDirSub2; - - [SetUp] - public void SetUp() - { - - _extractor = new SimpleFileExtractor(); - - _inDir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory,"In")); - _outDir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory,"Out")); - - if(_inDir.Exists) - _inDir.Delete(true); - if(_outDir.Exists) - _outDir.Delete(true); - - _inDir.Create(); - _outDir.Create(); - - _extractor.LocationOfFiles = _inDir; - - File.WriteAllText(Path.Combine(_inDir.FullName,"blah.txt"),"fff"); - File.WriteAllText(Path.Combine(_inDir.FullName,"blah2.txt"),"fff"); - File.WriteAllText(Path.Combine(_inDir.FullName,"Pat1.txt"),"fff"); - - _inDirSub1 = _inDir.CreateSubdirectory("Sub1"); - _inDirSub2 = _inDir.CreateSubdirectory("Sub2"); - - File.WriteAllText(Path.Combine(_inDirSub1.FullName,"subBlah.txt"),"fff"); - File.WriteAllText(Path.Combine(_inDirSub2.FullName,"subBlah2.txt"),"fff"); - } - - [Test] - public void AllFiles() - { - _extractor.Directories = false; - _extractor.Pattern = "*"; - _extractor.OutputDirectoryName = _outDir.FullName; - _extractor.Check(new ThrowImmediatelyCheckNotifier()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - - _extractor.MoveAll(_outDir,new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - - FileAssert.Exists(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.Exists(Path.Combine(_outDir.FullName,"blah2.txt")); - } - [Test] - public void OneFile() - { - _extractor.Directories = false; - _extractor.Pattern = "blah.*"; - _extractor.OutputDirectoryName = _outDir.FullName; - _extractor.Check(new ThrowImmediatelyCheckNotifier()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - - _extractor.MoveAll(_outDir,new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - - FileAssert.Exists(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName,"Sub1")); - DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName,"Sub2")); - } - [Test] - public void AllDirs() - { - _extractor.Directories = true; - _extractor.Pattern = "*"; - _extractor.OutputDirectoryName = _outDir.FullName; - _extractor.Check(new ThrowImmediatelyCheckNotifier()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - - _extractor.MoveAll(_outDir,new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - DirectoryAssert.Exists(Path.Combine(_outDir.FullName,"Sub1")); - DirectoryAssert.Exists(Path.Combine(_outDir.FullName,"Sub2")); - } - [Test] - public void OneDir() - { - _extractor.Directories = true; - _extractor.Pattern = "*1"; - _extractor.OutputDirectoryName = _outDir.FullName; - _extractor.Check(new ThrowImmediatelyCheckNotifier()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - - _extractor.MoveAll(_outDir,new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - DirectoryAssert.Exists(Path.Combine(_outDir.FullName,"Sub1")); - DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName,"Sub2")); - } - [Test] - public void PatientFiles() - { - _extractor.PerPatient = true; - _extractor.Directories = false; - _extractor.Pattern = "$p.txt"; - _extractor.OutputDirectoryName = _outDir.FullName; - _extractor.Check(new ThrowImmediatelyCheckNotifier()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - - _extractor.MovePatient("Pat1","Rel1",_outDir,new ThrowImmediatelyDataLoadEventListener(){ThrowOnWarning=true},new GracefulCancellationToken()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - FileAssert.Exists(Path.Combine(_outDir.FullName,"Rel1.txt")); - } - [Test] - public void PatientFileMissingOne() - { - _extractor.PerPatient = true; - _extractor.Directories = false; - _extractor.Pattern = "$p.txt"; - _extractor.OutputDirectoryName = _outDir.FullName; - _extractor.Check(new ThrowImmediatelyCheckNotifier()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - - var mem = new ToMemoryDataLoadEventListener(true); - - _extractor.MovePatient("Pat1","Rel1",_outDir,mem,new GracefulCancellationToken()); - _extractor.MovePatient("Pat2","Rel2",_outDir,mem,new GracefulCancellationToken()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - FileAssert.Exists(Path.Combine(_outDir.FullName,"Rel1.txt")); - - Assert.AreEqual(ProgressEventType.Warning,mem.GetWorst()); - - StringAssert.StartsWith("No Files were found matching Pattern Pat2.txt in ",mem.GetAllMessagesByProgressEventType()[ProgressEventType.Warning].Single().Message); - } - [Test] - public void PatientDirs() - { - _extractor.PerPatient = true; - _extractor.Directories = true; - _extractor.Pattern = "$p"; - _extractor.OutputDirectoryName = _outDir.FullName; - _extractor.Check(new ThrowImmediatelyCheckNotifier()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - - _extractor.MovePatient("Sub1","Rel1",_outDir,new ThrowImmediatelyDataLoadEventListener(){ThrowOnWarning=true},new GracefulCancellationToken()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - DirectoryAssert.Exists(Path.Combine(_outDir.FullName,"Rel1")); - DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName,"Rel2")); - DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName,"Sub1")); - DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName,"Sub2")); - } - [Test] - public void PatientBothDirs() - { - _extractor.PerPatient = true; - _extractor.Directories = true; - _extractor.Pattern = "$p"; - _extractor.OutputDirectoryName = _outDir.FullName; - _extractor.Check(new ThrowImmediatelyCheckNotifier()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - - _extractor.MovePatient("Sub1","Rel1",_outDir,new ThrowImmediatelyDataLoadEventListener(){ThrowOnWarning=true},new GracefulCancellationToken()); - _extractor.MovePatient("Sub2","Rel2",_outDir,new ThrowImmediatelyDataLoadEventListener(){ThrowOnWarning=true},new GracefulCancellationToken()); - - // does not exist - var ex = Assert.Throws(()=>_extractor.MovePatient("Sub3", "Rel3", _outDir, new ThrowImmediatelyDataLoadEventListener() { ThrowOnWarning = true }, new GracefulCancellationToken())); - Assert.AreEqual($"No Directories were found matching Pattern Sub3 in {_inDir.FullName}. For private identifier 'Sub3'", ex.Message); - - // if not throwing on warnings then a missing sub just passes through and is ignored - _extractor.MovePatient("Sub3", "Rel3", _outDir, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah.txt")); - FileAssert.DoesNotExist(Path.Combine(_outDir.FullName,"blah2.txt")); - DirectoryAssert.Exists(Path.Combine(_outDir.FullName,"Rel1")); - DirectoryAssert.Exists(Path.Combine(_outDir.FullName,"Rel2")); - DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName,"Sub1")); - DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName,"Sub2")); - } + _extractor.PerPatient = true; + _extractor.Directories = true; + _extractor.Pattern = "$p"; + _extractor.OutputDirectoryName = _outDir.FullName; + _extractor.Check(ThrowImmediatelyCheckNotifier.Quiet); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + + _extractor.MovePatient("Sub1", "Rel1", _outDir, ThrowImmediatelyDataLoadEventListener.QuietPicky, + new GracefulCancellationToken()); + _extractor.MovePatient("Sub2", "Rel2", _outDir, ThrowImmediatelyDataLoadEventListener.QuietPicky, + new GracefulCancellationToken()); + + // does not exist + var ex = Assert.Throws(() => _extractor.MovePatient("Sub3", "Rel3", _outDir, + ThrowImmediatelyDataLoadEventListener.QuietPicky, new GracefulCancellationToken())); + Assert.AreEqual( + $"No Directories were found matching Pattern Sub3 in {_inDir.FullName}. For private identifier 'Sub3'", + ex.Message); + + // if not throwing on warnings then a missing sub just passes through and is ignored + _extractor.MovePatient("Sub3", "Rel3", _outDir, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); + + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah.txt")); + FileAssert.DoesNotExist(Path.Combine(_outDir.FullName, "blah2.txt")); + DirectoryAssert.Exists(Path.Combine(_outDir.FullName, "Rel1")); + DirectoryAssert.Exists(Path.Combine(_outDir.FullName, "Rel2")); + DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName, "Sub1")); + DirectoryAssert.DoesNotExist(Path.Combine(_outDir.FullName, "Sub2")); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/SupplementalExtractionResultsTest.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/SupplementalExtractionResultsTest.cs index d705c0b2ee..c45b91d115 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/SupplementalExtractionResultsTest.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/SupplementalExtractionResultsTest.cs @@ -9,28 +9,26 @@ using Rdmp.Core.DataExport.Data; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +internal class SupplementalExtractionResultsTest : DatabaseTests { - class SupplementalExtractionResultsTest:DatabaseTests + [Test] + public void TestCreating() { - [Test] - public void TestCreating() - { - var p = new Project(DataExportRepository, "MyProj"); - - var ec = new ExtractionConfiguration(DataExportRepository, p); + var p = new Project(DataExportRepository, "MyProj"); - var cata = new Catalogue(CatalogueRepository, "MyCata"); - var tbl = new SupportingSQLTable(CatalogueRepository,cata,"Some global data"); + var ec = new ExtractionConfiguration(DataExportRepository, p); - var othertbl = new SupportingSQLTable(CatalogueRepository, cata, "Some global data"); + var cata = new Catalogue(CatalogueRepository, "MyCata"); + var tbl = new SupportingSQLTable(CatalogueRepository, cata, "Some global data"); - var result = new SupplementalExtractionResults(DataExportRepository,ec,"select * from Globalsglba",tbl); + var othertbl = new SupportingSQLTable(CatalogueRepository, cata, "Some global data"); - Assert.IsTrue(result.IsReferenceTo(typeof(SupportingSQLTable))); - Assert.IsTrue(result.IsReferenceTo(tbl)); - Assert.IsFalse(result.IsReferenceTo(othertbl)); + var result = new SupplementalExtractionResults(DataExportRepository, ec, "select * from Globalsglba", tbl); - } + Assert.IsTrue(result.IsReferenceTo(typeof(SupportingSQLTable))); + Assert.IsTrue(result.IsReferenceTo(tbl)); + Assert.IsFalse(result.IsReferenceTo(othertbl)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/DataExtraction/TestCohortRefreshing.cs b/Rdmp.Core.Tests/DataExport/DataExtraction/TestCohortRefreshing.cs index 14f94b2226..5640033088 100644 --- a/Rdmp.Core.Tests/DataExport/DataExtraction/TestCohortRefreshing.cs +++ b/Rdmp.Core.Tests/DataExport/DataExtraction/TestCohortRefreshing.cs @@ -4,10 +4,7 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System; using System.Linq; -using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable.Versioning; using NUnit.Framework; using Rdmp.Core.CohortCommitting.Pipeline; using Rdmp.Core.CohortCommitting.Pipeline.Destinations; @@ -19,170 +16,169 @@ using Rdmp.Core.DataExport.DataExtraction.Pipeline; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Destinations; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; -using ReusableLibraryCode.Progress; +using Rdmp.Core.MapsDirectlyToDatabaseTable.Versioning; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport.DataExtraction +namespace Rdmp.Core.Tests.DataExport.DataExtraction; + +public class TestCohortRefreshing : TestsRequiringAnExtractionConfiguration { - public class TestCohortRefreshing : TestsRequiringAnExtractionConfiguration + [Test] + public void RefreshCohort() { + var pipe = SetupPipeline(); + pipe.Name = "RefreshPipe"; + pipe.SaveToDatabase(); - [Test] - public void RefreshCohort() - { - ExtractionPipelineUseCase useCase; - IExecuteDatasetExtractionDestination results; + Execute(out _, out _); + + var oldcohort = _configuration.Cohort; + + + _configuration.CohortIdentificationConfiguration_ID = + new CohortIdentificationConfiguration(RepositoryLocator.CatalogueRepository, "RefreshCohort.cs").ID; + _configuration.CohortRefreshPipeline_ID = pipe.ID; + _configuration.SaveToDatabase(); + + var engine = new CohortRefreshEngine(ThrowImmediatelyDataLoadEventListener.Quiet, _configuration); + + Assert.NotNull(engine.Request.NewCohortDefinition); + + var oldData = oldcohort.GetExternalData(); + + engine.Request.NewCohortDefinition.CohortReplacedIfAny = oldcohort; + + Assert.AreEqual(oldData.ExternalDescription, engine.Request.NewCohortDefinition.Description); + Assert.AreEqual(oldData.ExternalVersion + 1, engine.Request.NewCohortDefinition.Version); + } + + /// + /// This is a giant scenario test in which we create a cohort of 5 people and a dataset with a single row with 1 person in it and a result field (the basic setup for + /// TestsRequiringAnExtractionConfiguration). + /// + /// 1.We run the extraction. + /// 2.We create a cohort refresh query that pulls the 1 dude from the above single row table + /// 3.We configure a query caching server which the cohort query is setup to use so that after executing the sql to identify the person it will cache the identifier list (of 1) + /// 4.We then the ExtractionConfiguration that its refresh pipeline is a cohort query builder query and build a pipeline for executing the cic and using basic cohort destination + /// 5.We then run the refresh pipeline which should execute the cic and cache the record and commit it as a new version of cohort for the ExtractionConfiguration + /// 6.We then truncate the live table, this will result in the cic returning nobody + /// 7.Without touching the cache we run the cohort refresh pipeline again + /// + /// Thing being tested: After 7 we are confirming that the refresh failed because there was nobody identified by the query, furthermore we then test that the progress messages sent + /// included an explicit message about clearing the cache + /// + [Test] + public void RefreshCohort_WithCaching() + { + var pipe = new Pipeline(CatalogueRepository, "RefreshPipeWithCaching"); + + var source = + new PipelineComponent(CatalogueRepository, pipe, typeof(CohortIdentificationConfigurationSource), 0); + var args = source.CreateArgumentsForClassIfNotExists(); + var freezeArg = args.Single(a => a.Name.Equals("FreezeAfterSuccessfulImport")); + freezeArg.SetValue(false); + freezeArg.SaveToDatabase(); + + var dest = new PipelineComponent(CatalogueRepository, pipe, typeof(BasicCohortDestination), 0); + var argsDest = dest.CreateArgumentsForClassIfNotExists(); + var allocatorArg = argsDest.Single(a => a.Name.Equals("ReleaseIdentifierAllocator")); + allocatorArg.SetValue(null); + allocatorArg.SaveToDatabase(); + + pipe.SourcePipelineComponent_ID = source.ID; + pipe.DestinationPipelineComponent_ID = dest.ID; + pipe.SaveToDatabase(); + + Execute(out _, out _); + + var oldcohort = _configuration.Cohort; - var pipe = SetupPipeline(); - pipe.Name = "RefreshPipe"; - pipe.SaveToDatabase(); + //Create a query cache + var p = new QueryCachingPatcher(); + var queryCacheServer = new ExternalDatabaseServer(CatalogueRepository, "TestCohortRefreshing_CacheTest", p); - Execute(out useCase,out results); + var cachedb = + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("TestCohortRefreshing_CacheTest"); + if (cachedb.Exists()) + cachedb.Drop(); - var oldcohort = _configuration.Cohort; + new MasterDatabaseScriptExecutor(cachedb).CreateAndPatchDatabase(p, ThrowImmediatelyCheckNotifier.Quiet); + queryCacheServer.SetProperties(cachedb); + //Create a Cohort Identification configuration (query) that will identify the cohort + var cic = new CohortIdentificationConfiguration(RepositoryLocator.CatalogueRepository, "RefreshCohort.cs"); - _configuration.CohortIdentificationConfiguration_ID =new CohortIdentificationConfiguration(RepositoryLocator.CatalogueRepository, "RefreshCohort.cs").ID; + try + { + //make it use the cache + cic.QueryCachingServer_ID = queryCacheServer.ID; + cic.SaveToDatabase(); + + //give it a single table query to fetch distinct chi from test data + var agg = cic.CreateNewEmptyConfigurationForCatalogue(_catalogue, null); + + //add the sub query as the only entry in the cic (in the root container) + cic.CreateRootContainerIfNotExists(); + cic.RootCohortAggregateContainer.AddChild(agg, 1); + + //make the ExtractionConfiguration refresh cohort query be the cic + _configuration.CohortIdentificationConfiguration_ID = cic.ID; _configuration.CohortRefreshPipeline_ID = pipe.ID; _configuration.SaveToDatabase(); - var engine = new CohortRefreshEngine(new ThrowImmediatelyDataLoadEventListener(), _configuration); - + //get a refreshing engine + var engine = new CohortRefreshEngine(ThrowImmediatelyDataLoadEventListener.Quiet, _configuration); + engine.Execute(); + Assert.NotNull(engine.Request.NewCohortDefinition); - - var oldData = oldcohort.GetExternalData(); - engine.Request.NewCohortDefinition.CohortReplacedIfAny = oldcohort; + var oldData = oldcohort.GetExternalData(); Assert.AreEqual(oldData.ExternalDescription, engine.Request.NewCohortDefinition.Description); Assert.AreEqual(oldData.ExternalVersion + 1, engine.Request.NewCohortDefinition.Version); - } - /// - /// This is a giant scenario test in which we create a cohort of 5 people and a dataset with a single row with 1 person in it and a result field (the basic setup for - /// TestsRequiringAnExtractionConfiguration). - /// - /// 1.We run the extraction. - /// 2.We create a cohort refresh query that pulls the 1 dude from the above single row table - /// 3.We configure a query caching server which the cohort query is setup to use so that after executing the sql to identify the person it will cache the identifier list (of 1) - /// 4.We then the ExtractionConfiguration that its refresh pipeline is a cohort query builder query and build a pipeline for executing the cic and using basic cohort destination - /// 5.We then run the refresh pipeline which should execute the cic and cache the record and commit it as a new version of cohort for the ExtractionConfiguration - /// 6.We then truncate the live table, this will result in the cic returning nobody - /// 7.Without touching the cache we run the cohort refresh pipeline again - /// - /// Thing being tested: After 7 we are confirming that the refresh failed because there was nobody identified by the query, furthermore we then test that the progress messages sent - /// included an explicit message about clearing the cache - /// - [Test] - public void RefreshCohort_WithCaching() + Assert.AreNotEqual(oldcohort.CountDistinct, engine.Request.CohortCreatedIfAny.CountDistinct); + + //now nuke all data in the catalogue so the cic returns nobody (except that the identifiers are cached eh?) + DataAccessPortal.ExpectDatabase(_tableInfo, DataAccessContext.InternalDataProcessing) + .ExpectTable(_tableInfo.GetRuntimeName()).Truncate(); + + var toMem = new ToMemoryDataLoadEventListener(false); + + //get a new engine + engine = new CohortRefreshEngine(toMem, _configuration); + + //execute it + var ex = Assert.Throws(() => engine.Execute()); + + Assert.IsTrue( + ex.InnerException.InnerException.Message.Contains( + "CohortIdentificationCriteria execution resulted in an empty dataset")); + + //expected this message to happen + //that it did clear the cache + Assert.AreEqual(1, + toMem.EventsReceivedBySender.SelectMany(kvp => kvp.Value) + .Count(msg => msg.Message.Equals("Clearing Cohort Identifier Cache"))); + } + finally { - ExtractionPipelineUseCase useCase; - IExecuteDatasetExtractionDestination results; - - var pipe = new Pipeline(CatalogueRepository, "RefreshPipeWithCaching"); - - var source = new PipelineComponent(CatalogueRepository, pipe, typeof (CohortIdentificationConfigurationSource), 0); - var args = source.CreateArgumentsForClassIfNotExists(); - var freezeArg = args.Single(a => a.Name.Equals("FreezeAfterSuccessfulImport")); - freezeArg.SetValue(false); - freezeArg.SaveToDatabase(); - - var dest = new PipelineComponent(CatalogueRepository, pipe, typeof (BasicCohortDestination), 0); - var argsDest = dest.CreateArgumentsForClassIfNotExists(); - var allocatorArg = argsDest.Single(a => a.Name.Equals("ReleaseIdentifierAllocator")); - allocatorArg.SetValue(null); - allocatorArg.SaveToDatabase(); - - pipe.SourcePipelineComponent_ID = source.ID; - pipe.DestinationPipelineComponent_ID = dest.ID; - pipe.SaveToDatabase(); - - Execute(out useCase, out results); - - var oldcohort = _configuration.Cohort; - - //Create a query cache - var p = new QueryCachingPatcher(); - ExternalDatabaseServer queryCacheServer = new ExternalDatabaseServer(CatalogueRepository, "TestCohortRefreshing_CacheTest", p); - - DiscoveredDatabase cachedb = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("TestCohortRefreshing_CacheTest"); - if (cachedb.Exists()) - cachedb.Drop(); - - new MasterDatabaseScriptExecutor(cachedb).CreateAndPatchDatabase(p, new ThrowImmediatelyCheckNotifier()); - queryCacheServer.SetProperties(cachedb); - - //Create a Cohort Identification configuration (query) that will identify the cohort - CohortIdentificationConfiguration cic = new CohortIdentificationConfiguration(RepositoryLocator.CatalogueRepository, "RefreshCohort.cs"); - - try - { - //make it use the cache - cic.QueryCachingServer_ID = queryCacheServer.ID; - cic.SaveToDatabase(); - - //give it a single table query to fetch distinct chi from test data - var agg = cic.CreateNewEmptyConfigurationForCatalogue(_catalogue, null); - - //add the sub query as the only entry in the cic (in the root container) - cic.CreateRootContainerIfNotExists(); - cic.RootCohortAggregateContainer.AddChild(agg,1); - - //make the ExtractionConfiguration refresh cohort query be the cic - _configuration.CohortIdentificationConfiguration_ID = cic.ID; - _configuration.CohortRefreshPipeline_ID = pipe.ID; - _configuration.SaveToDatabase(); - - //get a refreshing engine - var engine = new CohortRefreshEngine(new ThrowImmediatelyDataLoadEventListener(), _configuration); - engine.Execute(); - - Assert.NotNull(engine.Request.NewCohortDefinition); - - var oldData = oldcohort.GetExternalData(); - - Assert.AreEqual(oldData.ExternalDescription, engine.Request.NewCohortDefinition.Description); - Assert.AreEqual(oldData.ExternalVersion + 1, engine.Request.NewCohortDefinition.Version); - - Assert.AreNotEqual(oldcohort.CountDistinct,engine.Request.CohortCreatedIfAny.CountDistinct); - - //now nuke all data in the catalogue so the cic returns nobody (except that the identifiers are cached eh?) - DataAccessPortal.GetInstance().ExpectDatabase(_tableInfo,DataAccessContext.InternalDataProcessing).ExpectTable(_tableInfo.GetRuntimeName()).Truncate(); - - var toMem = new ToMemoryDataLoadEventListener(false); - - //get a new engine - engine = new CohortRefreshEngine(toMem, _configuration); - - //execute it - var ex = Assert.Throws(()=>engine.Execute()); - - Assert.IsTrue(ex.InnerException.InnerException.Message.Contains("CohortIdentificationCriteria execution resulted in an empty dataset")); - - //expected this message to happen - //that it did clear the cache - Assert.AreEqual(1,toMem.EventsReceivedBySender.SelectMany(kvp=>kvp.Value).Count(msg=>msg.Message.Equals("Clearing Cohort Identifier Cache"))); - - - } - finally - { - //make the ExtractionConfiguration not use the cic query - _configuration.CohortRefreshPipeline_ID = null; - _configuration.CohortIdentificationConfiguration_ID = null; - _configuration.SaveToDatabase(); - - //delete the cic query - cic.QueryCachingServer_ID = null; - cic.SaveToDatabase(); - cic.DeleteInDatabase(); - - //delete the caching database - queryCacheServer.DeleteInDatabase(); - cachedb.Drop(); - } + //make the ExtractionConfiguration not use the cic query + _configuration.CohortRefreshPipeline_ID = null; + _configuration.CohortIdentificationConfiguration_ID = null; + _configuration.SaveToDatabase(); + + //delete the cic query + cic.QueryCachingServer_ID = null; + cic.SaveToDatabase(); + cic.DeleteInDatabase(); + + //delete the caching database + queryCacheServer.DeleteInDatabase(); + cachedb.Drop(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/ExtractionConfigurationTest.cs b/Rdmp.Core.Tests/DataExport/ExtractionConfigurationTest.cs index ddc0e62c4d..3ac5f49f3b 100644 --- a/Rdmp.Core.Tests/DataExport/ExtractionConfigurationTest.cs +++ b/Rdmp.Core.Tests/DataExport/ExtractionConfigurationTest.cs @@ -9,75 +9,56 @@ using Rdmp.Core.DataExport.Data; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport +namespace Rdmp.Core.Tests.DataExport; + +[Category("Database")] +public class ExtractionConfigurationTest : DatabaseTests { - [Category("Database")] - public class ExtractionConfigurationTest : DatabaseTests + [Test] + public void ExtractableColumnTest() { - [Test] - public void ExtractableColumnTest() - { - ExtractableDataSet dataSet =null; - ExtractionConfiguration configuration = null; - Project project = null; - - Catalogue cata = null; - CatalogueItem cataItem = null; - ColumnInfo column = null; - TableInfo table = null; - - ExtractionInformation extractionInformation=null; - ExtractableColumn extractableColumn=null; - - try - { - //setup catalogue side of things - cata = new Catalogue(CatalogueRepository, "unit_test_ExtractableColumnTest_Cata"); - cataItem = new CatalogueItem(CatalogueRepository, cata, "unit_test_ExtractableColumnTest_CataItem"); - table = new TableInfo(CatalogueRepository, "DaveTable"); - column = new ColumnInfo(CatalogueRepository, "Name", "string", table); - cataItem.SetColumnInfo(column); - - extractionInformation = new ExtractionInformation(CatalogueRepository, cataItem, column, "Hashme(Name)"); - - //setup extractor side of things - dataSet = new ExtractableDataSet(DataExportRepository, cata); - project = new Project(DataExportRepository, "unit_test_ExtractableColumnTest_Proj"); + ExtractableDataSet dataSet = null; + ExtractionConfiguration configuration = null; + Project project = null; - configuration = new ExtractionConfiguration(DataExportRepository, project); + Catalogue cata = null; + CatalogueItem cataItem = null; + ColumnInfo column = null; + TableInfo table = null; - extractableColumn = new ExtractableColumn(DataExportRepository, dataSet, configuration, extractionInformation, 0, "Hashme2(Name)"); - Assert.AreEqual(configuration.GetAllExtractableColumnsFor(dataSet).Length, 1); - } - finally - { - if (extractionInformation != null) - extractionInformation.DeleteInDatabase(); + ExtractionInformation extractionInformation = null; - if (column != null) - column.DeleteInDatabase(); - - if (table != null) - table.DeleteInDatabase(); - - if (cataItem != null) - cataItem.DeleteInDatabase(); - - if (configuration != null) - configuration.DeleteInDatabase(); - - if (project != null) - project.DeleteInDatabase(); + try + { + //setup catalogue side of things + cata = new Catalogue(CatalogueRepository, "unit_test_ExtractableColumnTest_Cata"); + cataItem = new CatalogueItem(CatalogueRepository, cata, "unit_test_ExtractableColumnTest_CataItem"); + table = new TableInfo(CatalogueRepository, "DaveTable"); + column = new ColumnInfo(CatalogueRepository, "Name", "string", table); + cataItem.SetColumnInfo(column); - if (dataSet != null) - dataSet.DeleteInDatabase(); + extractionInformation = new ExtractionInformation(CatalogueRepository, cataItem, column, "Hashme(Name)"); - if (cata != null) - cata.DeleteInDatabase(); + //setup extractor side of things + dataSet = new ExtractableDataSet(DataExportRepository, cata); + project = new Project(DataExportRepository, "unit_test_ExtractableColumnTest_Proj"); + configuration = new ExtractionConfiguration(DataExportRepository, project); - - } + _ = new ExtractableColumn(DataExportRepository, dataSet, configuration, extractionInformation, 0, + "Hashme2(Name)"); + Assert.AreEqual(configuration.GetAllExtractableColumnsFor(dataSet).Length, 1); + } + finally + { + extractionInformation?.DeleteInDatabase(); + column?.DeleteInDatabase(); + table?.DeleteInDatabase(); + cataItem?.DeleteInDatabase(); + configuration?.DeleteInDatabase(); + project?.DeleteInDatabase(); + dataSet?.DeleteInDatabase(); + cata?.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/ImportFileTests.cs b/Rdmp.Core.Tests/DataExport/ImportFileTests.cs index e27f245750..34bad9d1b6 100644 --- a/Rdmp.Core.Tests/DataExport/ImportFileTests.cs +++ b/Rdmp.Core.Tests/DataExport/ImportFileTests.cs @@ -12,120 +12,115 @@ using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataFlowPipeline.Requirements; using Rdmp.Core.DataLoad.Modules.DataFlowSources; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport +namespace Rdmp.Core.Tests.DataExport; + +public class ImportFileTests : DatabaseTests { - public class ImportFileTests:DatabaseTests + [Test] + public void ImportFile() { - [Test] - public void ImportFile() - { - string file = Path.GetTempFileName(); - string databaseName = TestDatabaseNames.GetConsistentName(GetType().Name); - - try + var file = Path.GetTempFileName(); + var databaseName = TestDatabaseNames.GetConsistentName(GetType().Name); + + try + { + using (var sw = new StreamWriter(file)) { - using (var sw = new StreamWriter(file)) - { - sw.WriteLine("Name,Surname,Age,Healthiness,DateOfImagining"); - sw.WriteLine("Frank,\"Mortus,M\",41,0.00,2005-12-01"); - sw.WriteLine("Bob,Balie,12,1,2013-06-11"); - sw.WriteLine("Munchen,'Smith',43,0.3,2002-01-01"); - sw.WriteLine("Carnage,Here there is,29,0.91,2005-01-01"); - sw.WriteLine("Nathan,Crumble,51,0.78,2005-01-01"); - sw.Close(); - } - - var source = new DelimitedFlatFileDataFlowSource - { - Separator = ",", - IgnoreBlankLines = true, - MakeHeaderNamesSane = true, - StronglyTypeInputBatchSize = -1, - StronglyTypeInput = true - }; - - source.PreInitialize(new FlatFileToLoad(new FileInfo(file)), new ThrowImmediatelyDataLoadEventListener());//this is the file we want to load - source.Check(new ThrowImmediatelyCheckNotifier()); - - var server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn; - var database = server.ExpectDatabase(databaseName); - - //recreate it - database.Create(true); - - server.ChangeDatabase(databaseName); - - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - - var tbl = database.CreateTable(dt.TableName, dt); - string tableName = tbl.GetRuntimeName(); - - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - - var tablesInDatabase = server.ExpectDatabase(databaseName).DiscoverTables(false); - - //there should be 1 table in this database - Assert.AreEqual(1, tablesInDatabase.Length); - - //it should be called the same as the file loaded - Assert.AreEqual(Path.GetFileNameWithoutExtension(file), tablesInDatabase[0].GetRuntimeName()); - - Assert.AreEqual("varchar(7)", GetColumnType(database, tableName, "Name")); - Assert.AreEqual("varchar(13)", GetColumnType(database, tableName, "Surname")); - Assert.AreEqual("int", GetColumnType(database, tableName, "Age")); - Assert.AreEqual("decimal(3,2)", GetColumnType(database, tableName, "Healthiness")); - Assert.AreEqual("datetime2", GetColumnType(database, tableName, "DateOfImagining")); - - using (var con = (SqlConnection) server.GetConnection()) - { - con.Open(); - - SqlCommand cmdReadData = - new SqlCommand( - "Select * from " + tablesInDatabase[0].GetRuntimeName() + " WHERE Name='Frank'", con); - SqlDataReader r = cmdReadData.ExecuteReader(); - - //expected 1 record only - Assert.IsTrue(r.Read()); - - Assert.AreEqual("Frank", r["Name"]); - Assert.AreEqual("Mortus,M", r["Surname"]); - Assert.AreEqual(41, r["Age"]); - Assert.AreEqual(0.0f, r["Healthiness"]); - Assert.AreEqual(new DateTime(2005, 12, 1), r["DateOfImagining"]); - - //and no more records - Assert.IsFalse(r.Read()); - - con.Close(); - } - - server.ExpectDatabase(databaseName).Drop(); - Assert.IsFalse(server.ExpectDatabase(databaseName).Exists()); + sw.WriteLine("Name,Surname,Age,Healthiness,DateOfImagining"); + sw.WriteLine("Frank,\"Mortus,M\",41,0.00,2005-12-01"); + sw.WriteLine("Bob,Balie,12,1,2013-06-11"); + sw.WriteLine("Munchen,'Smith',43,0.3,2002-01-01"); + sw.WriteLine("Carnage,Here there is,29,0.91,2005-01-01"); + sw.WriteLine("Nathan,Crumble,51,0.78,2005-01-01"); + sw.Close(); } - finally + + var source = new DelimitedFlatFileDataFlowSource + { + Separator = ",", + IgnoreBlankLines = true, + MakeHeaderNamesSane = true, + StronglyTypeInputBatchSize = -1, + StronglyTypeInput = true + }; + + source.PreInitialize(new FlatFileToLoad(new FileInfo(file)), + ThrowImmediatelyDataLoadEventListener.Quiet); //this is the file we want to load + source.Check(ThrowImmediatelyCheckNotifier.Quiet); + + var server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn; + var database = server.ExpectDatabase(databaseName); + + //recreate it + database.Create(true); + + server.ChangeDatabase(databaseName); + + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + + var tbl = database.CreateTable(dt.TableName, dt); + var tableName = tbl.GetRuntimeName(); + + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + + var tablesInDatabase = server.ExpectDatabase(databaseName).DiscoverTables(false); + + //there should be 1 table in this database + Assert.AreEqual(1, tablesInDatabase.Length); + + //it should be called the same as the file loaded + Assert.AreEqual(Path.GetFileNameWithoutExtension(file), tablesInDatabase[0].GetRuntimeName()); + + Assert.AreEqual("varchar(7)", GetColumnType(database, tableName, "Name")); + Assert.AreEqual("varchar(13)", GetColumnType(database, tableName, "Surname")); + Assert.AreEqual("int", GetColumnType(database, tableName, "Age")); + Assert.AreEqual("decimal(3,2)", GetColumnType(database, tableName, "Healthiness")); + Assert.AreEqual("datetime2", GetColumnType(database, tableName, "DateOfImagining")); + + using (var con = (SqlConnection)server.GetConnection()) { - try - { - File.Delete(file); - } - catch (IOException) - { - //Couldn't delete temporary file... oh well - } - + con.Open(); + + var cmdReadData = + new SqlCommand( + $"Select * from {tablesInDatabase[0].GetRuntimeName()} WHERE Name='Frank'", con); + var r = cmdReadData.ExecuteReader(); + + //expected 1 record only + Assert.IsTrue(r.Read()); + + Assert.AreEqual("Frank", r["Name"]); + Assert.AreEqual("Mortus,M", r["Surname"]); + Assert.AreEqual(41, r["Age"]); + Assert.AreEqual(0.0f, r["Healthiness"]); + Assert.AreEqual(new DateTime(2005, 12, 1), r["DateOfImagining"]); + + //and no more records + Assert.IsFalse(r.Read()); + + con.Close(); } + server.ExpectDatabase(databaseName).Drop(); + Assert.IsFalse(server.ExpectDatabase(databaseName).Exists()); } - - private string GetColumnType(DiscoveredDatabase database, string tableName, string colName) + finally { - return - database.ExpectTable(tableName).DiscoverColumn(colName).DataType.SQLType; + try + { + File.Delete(file); + } + catch (IOException) + { + //Couldn't delete temporary file... oh well + } } } -} + + private static string GetColumnType(DiscoveredDatabase database, string tableName, string colName) => + database.ExpectTable(tableName).DiscoverColumn(colName).DataType.SQLType; +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/ProjectChecksTestsComplex.cs b/Rdmp.Core.Tests/DataExport/ProjectChecksTestsComplex.cs index 222c62e8b0..93ff3b445e 100644 --- a/Rdmp.Core.Tests/DataExport/ProjectChecksTestsComplex.cs +++ b/Rdmp.Core.Tests/DataExport/ProjectChecksTestsComplex.cs @@ -10,36 +10,41 @@ using Rdmp.Core.DataExport.Checks; using Rdmp.Core.DataExport.DataExtraction.Pipeline.Sources; using Rdmp.Core.DataFlowPipeline; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport +namespace Rdmp.Core.Tests.DataExport; + +public class ProjectChecksTestsComplex : TestsRequiringAnExtractionConfiguration { - public class ProjectChecksTestsComplex:TestsRequiringAnExtractionConfiguration + [Test] + public void CheckBasicConfiguration() { - [Test] - public void CheckBasicConfiguration() - { - new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator),_project).Check(new ThrowImmediatelyCheckNotifier { ThrowOnWarning = true }); - } + new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator), _project).Check( + ThrowImmediatelyCheckNotifier.QuietPicky); + } - [Test] - public void DatasetIsDisabled() - { - _extractableDataSet.DisableExtraction = true; - _extractableDataSet.SaveToDatabase(); + [Test] + public void DatasetIsDisabled() + { + _extractableDataSet.DisableExtraction = true; + _extractableDataSet.SaveToDatabase(); - //checking should fail - var exception = Assert.Throws(() => new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator), _project).Check(new ThrowImmediatelyCheckNotifier { ThrowOnWarning = true })); - Assert.AreEqual("Dataset TestTable is set to DisableExtraction=true, probably someone doesn't want you extracting this dataset at the moment", exception.Message); + //checking should fail + var exception = Assert.Throws(() => + new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator), _project).Check( + ThrowImmediatelyCheckNotifier.QuietPicky)); + Assert.AreEqual( + "Dataset TestTable is set to DisableExtraction=true, probably someone doesn't want you extracting this dataset at the moment", + exception.Message); - //but if the user goes ahead and executes the extraction that should fail too - var source = new ExecuteDatasetExtractionSource(); - source.PreInitialize(_request, new ThrowImmediatelyDataLoadEventListener()); - var exception2 = Assert.Throws(() => source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); + //but if the user goes ahead and executes the extraction that should fail too + var source = new ExecuteDatasetExtractionSource(); + source.PreInitialize(_request, ThrowImmediatelyDataLoadEventListener.Quiet); + var exception2 = Assert.Throws(() => + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); - Assert.AreEqual("Cannot extract TestTable because DisableExtraction is set to true", exception2.Message); - } + Assert.AreEqual("Cannot extract TestTable because DisableExtraction is set to true", exception2?.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/ProjectChecksTestsSimple.cs b/Rdmp.Core.Tests/DataExport/ProjectChecksTestsSimple.cs index 1b55164bc3..8ac6874346 100644 --- a/Rdmp.Core.Tests/DataExport/ProjectChecksTestsSimple.cs +++ b/Rdmp.Core.Tests/DataExport/ProjectChecksTestsSimple.cs @@ -7,202 +7,193 @@ using System; using System.IO; using System.Linq; -using System.Reflection; using System.Text.RegularExpressions; using NUnit.Framework; using Rdmp.Core.CommandExecution; using Rdmp.Core.DataExport.Checks; using Rdmp.Core.DataExport.Data; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport +namespace Rdmp.Core.Tests.DataExport; + +public class ProjectChecksTestsSimple : DatabaseTests { - public class ProjectChecksTestsSimple:DatabaseTests + [Test] + public void Project_NoConfigurations() { - [Test] - public void Project_NoConfigurations() + var p = new Project(DataExportRepository, "Fish"); + + try { - Project p = new Project(DataExportRepository, "Fish"); - - try - { - var ex = Assert.Throws(()=>new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator),p).Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Project does not have any ExtractionConfigurations yet",ex.Message); - - } - finally - { - p.DeleteInDatabase(); - } + var ex = Assert.Throws(() => + new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator), p).Check( + ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("Project does not have any ExtractionConfigurations yet", ex?.Message); } - - [Test] - public void Project_NoDirectory() + finally { - ExtractionConfiguration config; - Project p = GetProjectWithConfig(out config); - var ex = Assert.Throws(()=>RunTestWithCleanup(p, config)); - Assert.AreEqual("Project does not have an ExtractionDirectory", ex.Message); - + p.DeleteInDatabase(); } + } - [Test] - [TestCase(@"C:\asdlfasdjlfhasjldhfljh")] - [TestCase(@"\\MyMakeyUpeyServer\Where")] - [TestCase(@"Z:\WizardOfOz")] - public void Project_NonExistentDirectory(string dir) - { - ExtractionConfiguration config; - Project p = GetProjectWithConfig(out config); - - p.ExtractionDirectory = dir; - var ex = Assert.Throws(()=>RunTestWithCleanup(p, config)); - Assert.IsTrue(Regex.IsMatch(ex.Message,@"Project ExtractionDirectory .* Does Not Exist")); + [Test] + public void Project_NoDirectory() + { + var p = GetProjectWithConfig(out var config); + var ex = Assert.Throws(() => RunTestWithCleanup(p, config)); + Assert.AreEqual("Project does not have an ExtractionDirectory", ex?.Message); + } - } + [Test] + [TestCase(@"C:\asdlfasdjlfhasjldhfljh")] + [TestCase(@"\\MyMakeyUpeyServer\Where")] + [TestCase(@"Z:\WizardOfOz")] + public void Project_NonExistentDirectory(string dir) + { + var p = GetProjectWithConfig(out var config); - [Test] - public void Project_DodgyCharactersInExtractionDirectoryName() - { - ExtractionConfiguration config; - Project p = GetProjectWithConfig(out config); - p.ExtractionDirectory = @"C:\|||"; + p.ExtractionDirectory = dir; + var ex = Assert.Throws(() => RunTestWithCleanup(p, config)); + Assert.IsTrue(Regex.IsMatch(ex.Message, @"Project ExtractionDirectory .* Does Not Exist")); + } - var ex = Assert.Throws(()=>RunTestWithCleanup(p,config)); - Assert.AreEqual(@"Project ExtractionDirectory ('C:\|||') Does Not Exist", ex.Message); - } + [Test] + public void Project_DodgyCharactersInExtractionDirectoryName() + { + var p = GetProjectWithConfig(out var config); + p.ExtractionDirectory = @"C:\|||"; + + var ex = Assert.Throws(() => RunTestWithCleanup(p, config)); + Assert.AreEqual(@"Project ExtractionDirectory ('C:\|||') Does Not Exist", ex.Message); + } + + [Test] + public void ConfigurationFrozen_Remnants() + { + var p = GetProjectWithConfigDirectory(out var config, out var dir); + + //create remnant directory (empty) + var remnantDir = dir.CreateSubdirectory($"Extr_{config.ID}20011225"); - [Test] - public void ConfigurationFrozen_Remnants() + //with empty subdirectories + remnantDir.CreateSubdirectory("DMPTestCatalogue").CreateSubdirectory("Lookups"); + + config.IsReleased = true; //make environment think config is released + config.SaveToDatabase(); + + try { - DirectoryInfo dir; - ExtractionConfiguration config; - var p = GetProjectWithConfigDirectory(out config, out dir); - - //create remnant directory (empty) - var remnantDir = dir.CreateSubdirectory("Extr_" + config.ID + "20011225"); - - //with empty subdirectories - remnantDir.CreateSubdirectory("DMPTestCatalogue").CreateSubdirectory("Lookups"); - - config.IsReleased = true;//make environment think config is released - config.SaveToDatabase(); - - try - { - //remnant exists - Assert.IsTrue(dir.Exists); - Assert.IsTrue(remnantDir.Exists); - - //resolve accepting deletion - new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator),p).Check(new AcceptAllCheckNotifier()); - - //boom remnant doesnt exist anymore (but parent does obviously) - Assert.IsTrue(dir.Exists); - Assert.IsFalse(Directory.Exists(remnantDir.FullName));//cant use .Exists for some reason, c# caches answer? - - } - finally - { - config.DeleteInDatabase(); - p.DeleteInDatabase(); - } - } + //remnant exists + Assert.IsTrue(dir.Exists); + Assert.IsTrue(remnantDir.Exists); + //resolve accepting deletion + new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator), p).Check(new AcceptAllCheckNotifier()); - [Test] - public void ConfigurationFrozen_RemnantsWithFiles() + //boom remnant doesnt exist anymore (but parent does obviously) + Assert.IsTrue(dir.Exists); + Assert.IsFalse(Directory.Exists(remnantDir.FullName)); //cant use .Exists for some reason, c# caches answer? + } + finally { - DirectoryInfo dir; - ExtractionConfiguration config; - var p = GetProjectWithConfigDirectory(out config, out dir); - - //create remnant directory (empty) - var remnantDir = dir.CreateSubdirectory("Extr_" + config.ID + "20011225"); - - //with empty subdirectories - var lookupDir = remnantDir.CreateSubdirectory("DMPTestCatalogue").CreateSubdirectory("Lookups"); - - //this time put a file in - File.AppendAllLines(Path.Combine(lookupDir.FullName,"Text.txt"),new string[]{"Amagad"}); - - config.IsReleased = true;//make environment think config is released - config.SaveToDatabase(); - try - { - var notifier = new ToMemoryCheckNotifier(); - RunTestWithCleanup(p,config,notifier); - - Assert.IsTrue(notifier.Messages.Any( - m=>m.Result == CheckResult.Fail && - Regex.IsMatch(m.Message,@"Found non-empty folder .* which is left over extracted folder after data release \(First file found was '.*[/\\]DMPTestCatalogue[/\\]Lookups[/\\]Text.txt' but there may be others\)"))); - } - finally - { - remnantDir.Delete(true); - } + config.DeleteInDatabase(); + p.DeleteInDatabase(); } + } - [Test] - public void Configuration_NoDatasets() - { - DirectoryInfo dir; - ExtractionConfiguration config; - var p = GetProjectWithConfigDirectory(out config, out dir); - var ex = Assert.Throws(()=>RunTestWithCleanup(p,config)); - StringAssert.StartsWith("There are no datasets selected for open configuration 'New ExtractionConfiguration",ex.Message); - } + [Test] + public void ConfigurationFrozen_RemnantsWithFiles() + { + var p = GetProjectWithConfigDirectory(out var config, out var dir); + + //create remnant directory (empty) + var remnantDir = dir.CreateSubdirectory($"Extr_{config.ID}20011225"); + //with empty subdirectories + var lookupDir = remnantDir.CreateSubdirectory("DMPTestCatalogue").CreateSubdirectory("Lookups"); - [Test] - public void Configuration_NoProjectNumber() + //this time put a file in + File.AppendAllLines(Path.Combine(lookupDir.FullName, "Text.txt"), new string[] { "Amagad" }); + + config.IsReleased = true; //make environment think config is released + config.SaveToDatabase(); + try { - DirectoryInfo dir; - ExtractionConfiguration config; - var p = GetProjectWithConfigDirectory(out config, out dir); - p.ProjectNumber = null; - var ex = Assert.Throws(()=>RunTestWithCleanup(p, config)); - StringAssert.Contains("Project does not have a Project Number, this is a number which is meaningful to you (as opposed to ID which is the ",ex.Message); - } + var notifier = new ToMemoryCheckNotifier(); + RunTestWithCleanup(p, config, notifier); - private void RunTestWithCleanup(Project p,ExtractionConfiguration config, ICheckNotifier notifier = null) + Assert.IsTrue(notifier.Messages.Any( + m => m.Result == CheckResult.Fail && + Regex.IsMatch(m.Message, + @"Found non-empty folder .* which is left over extracted folder after data release \(First file found was '.*[/\\]DMPTestCatalogue[/\\]Lookups[/\\]Text.txt' but there may be others\)"))); + } + finally { - try - { - new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator),p).Check(notifier??new ThrowImmediatelyCheckNotifier() { ThrowOnWarning = true }); - } - finally - { - config.DeleteInDatabase(); - p.DeleteInDatabase(); - } + remnantDir.Delete(true); } + } + + [Test] + public void Configuration_NoDatasets() + { + var p = GetProjectWithConfigDirectory(out var config, out _); + var ex = Assert.Throws(() => RunTestWithCleanup(p, config)); + StringAssert.StartsWith("There are no datasets selected for open configuration 'New ExtractionConfiguration", + ex.Message); + } + - private Project GetProjectWithConfig(out ExtractionConfiguration config) + [Test] + public void Configuration_NoProjectNumber() + { + var p = GetProjectWithConfigDirectory(out var config, out _); + p.ProjectNumber = null; + var ex = Assert.Throws(() => RunTestWithCleanup(p, config)); + StringAssert.Contains( + "Project does not have a Project Number, this is a number which is meaningful to you (as opposed to ID which is the ", + ex.Message); + } + + private void RunTestWithCleanup(Project p, ExtractionConfiguration config, ICheckNotifier notifier = null) + { + try { - var p = new Project(DataExportRepository, "Fish"); - p.ProjectNumber = -5000; - config = new ExtractionConfiguration(DataExportRepository,p); - return p; + new ProjectChecker(new ThrowImmediatelyActivator(RepositoryLocator), p).Check(notifier ?? + ThrowImmediatelyCheckNotifier.QuietPicky); } + finally + { + config.DeleteInDatabase(); + p.DeleteInDatabase(); + } + } - private Project GetProjectWithConfigDirectory(out ExtractionConfiguration config,out DirectoryInfo dir) + private Project GetProjectWithConfig(out ExtractionConfiguration config) + { + var p = new Project(DataExportRepository, "Fish") { - var p = new Project(DataExportRepository, "Fish"); - config = new ExtractionConfiguration(DataExportRepository, p); + ProjectNumber = -5000 + }; + config = new ExtractionConfiguration(DataExportRepository, p); + return p; + } - var projectFolder = Path.Combine(TestContext.CurrentContext.WorkDirectory, "ProjectCheckerTestDir"); + private Project GetProjectWithConfigDirectory(out ExtractionConfiguration config, out DirectoryInfo dir) + { + var p = new Project(DataExportRepository, "Fish"); + config = new ExtractionConfiguration(DataExportRepository, p); - dir = new DirectoryInfo(projectFolder ); - dir.Create(); + var projectFolder = Path.Combine(TestContext.CurrentContext.WorkDirectory, "ProjectCheckerTestDir"); - p.ExtractionDirectory = projectFolder; - p.ProjectNumber = -5000; - p.SaveToDatabase(); + dir = new DirectoryInfo(projectFolder); + dir.Create(); - return p; - } + p.ExtractionDirectory = projectFolder; + p.ProjectNumber = -5000; + p.SaveToDatabase(); + + return p; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/ProjectCohortIdentificationConfigurationAssociationTests.cs b/Rdmp.Core.Tests/DataExport/ProjectCohortIdentificationConfigurationAssociationTests.cs index 88d0b3d11e..4dbd479367 100644 --- a/Rdmp.Core.Tests/DataExport/ProjectCohortIdentificationConfigurationAssociationTests.cs +++ b/Rdmp.Core.Tests/DataExport/ProjectCohortIdentificationConfigurationAssociationTests.cs @@ -13,59 +13,62 @@ using Rdmp.Core.Providers; using Rdmp.Core.Providers.Nodes.ProjectCohortNodes; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; -namespace Rdmp.Core.Tests.DataExport +namespace Rdmp.Core.Tests.DataExport; + +[Category("Unit")] +public class ProjectCohortIdentificationConfigurationAssociationTests { - [Category("Unit")] - public class ProjectCohortIdentificationConfigurationAssociationTests + [Test] + public void TestOrphanCic() { - - [Test] - public void TestOrphanCic() - { - var memory = new MemoryDataExportRepository(); - var cic = new CohortIdentificationConfiguration(memory, "Mycic"); - var p = new Project(memory,"my proj"); - p.AssociateWithCohortIdentification(cic); + var memory = new MemoryDataExportRepository(); + var cic = new CohortIdentificationConfiguration(memory, "Mycic"); + var p = new Project(memory, "my proj"); + p.AssociateWithCohortIdentification(cic); + + //fetch the instance + var cicAssoc = memory.GetAllObjects().Single(); + + //relationship from p should resolve to the association link + Assert.AreEqual(cicAssoc, p.ProjectCohortIdentificationConfigurationAssociations[0]); + + //relationship from p should resolve to the cic + Assert.AreEqual(cic, p.GetAssociatedCohortIdentificationConfigurations()[0]); + + //in order to make it an orphan we have to suppress the system default behaviour of cascading across the deletion + var obscure = memory.ObscureDependencyFinder as CatalogueObscureDependencyFinder; + obscure?.OtherDependencyFinders.Clear(); + + //make the assoc an orphan + cic.DeleteInDatabase(); + cicAssoc.ClearAllInjections(); - //fetch the instance - var cicAssoc = memory.GetAllObjects().Single(); - - //relationship from p should resolve to the association link - Assert.AreEqual(cicAssoc,p.ProjectCohortIdentificationConfigurationAssociations[0]); - - //relationship from p should resolve to the cic - Assert.AreEqual(cic, p.GetAssociatedCohortIdentificationConfigurations()[0]); + //assoc should still exist + Assert.AreEqual(cicAssoc, p.ProjectCohortIdentificationConfigurationAssociations[0]); + Assert.IsNull(p.ProjectCohortIdentificationConfigurationAssociations[0].CohortIdentificationConfiguration); - //in order to make it an orphan we have to suppress the system default behaviour of cascading across the deletion - var obscure = memory.ObscureDependencyFinder as CatalogueObscureDependencyFinder; - if(obscure != null) - obscure.OtherDependencyFinders.Clear(); + //relationship from p should resolve to the cic + Assert.IsEmpty(p.GetAssociatedCohortIdentificationConfigurations()); - //make the assoc an orphan - cic.DeleteInDatabase(); - cicAssoc.ClearAllInjections(); - - //assoc should still exist - Assert.AreEqual(cicAssoc, p.ProjectCohortIdentificationConfigurationAssociations[0]); - Assert.IsNull(p.ProjectCohortIdentificationConfigurationAssociations[0].CohortIdentificationConfiguration); - - //relationship from p should resolve to the cic - Assert.IsEmpty( p.GetAssociatedCohortIdentificationConfigurations()); + //error should be reported in top right of program + var ex = Assert.Throws(() => + new DataExportChildProvider(new RepositoryProvider(memory), null, ThrowImmediatelyCheckNotifier.Quiet, + null)); + StringAssert.IsMatch( + @"Failed to find Associated Cohort Identification Configuration with ID \d+ which was supposed to be associated with my proj", + ex.Message); - //error should be reported in top right of program - var ex = Assert.Throws(()=>new DataExportChildProvider(new RepositoryProvider(memory), null, new ThrowImmediatelyCheckNotifier(),null)); - StringAssert.IsMatch(@"Failed to find Associated Cohort Identification Configuration with ID \d+ which was supposed to be associated with my proj", ex.Message); + //but UI should still respond + var childProvider = new DataExportChildProvider(new RepositoryProvider(memory), null, + IgnoreAllErrorsCheckNotifier.Instance, null); - //but UI should still respond - var childProvider = new DataExportChildProvider(new RepositoryProvider(memory), null, new IgnoreAllErrorsCheckNotifier(),null); + //the orphan cic should not appear in the tree view under Project=>Cohorts=>Associated Cics + var cohorts = childProvider.GetChildren(p).OfType().Single(); + var cics = childProvider.GetChildren(cohorts).OfType() + .First(); - //the orphan cic should not appear in the tree view under Project=>Cohorts=>Associated Cics - var cohorts = childProvider.GetChildren(p).OfType().Single(); - var cics = childProvider.GetChildren(cohorts).OfType().First(); - - Assert.IsEmpty(childProvider.GetChildren(cics)); - } + Assert.IsEmpty(childProvider.GetChildren(cics)); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/TableValuedFunctionTests/EndToEndTableValuedFunction.cs b/Rdmp.Core.Tests/DataExport/TableValuedFunctionTests/EndToEndTableValuedFunction.cs index 3e39ad11d5..4505acc528 100644 --- a/Rdmp.Core.Tests/DataExport/TableValuedFunctionTests/EndToEndTableValuedFunction.cs +++ b/Rdmp.Core.Tests/DataExport/TableValuedFunctionTests/EndToEndTableValuedFunction.cs @@ -24,164 +24,174 @@ using Rdmp.Core.DataExport.DataExtraction.Pipeline.Sources; using Rdmp.Core.DataExport.DataExtraction.UserPicks; using Rdmp.Core.DataFlowPipeline; -using Rdmp.Core.Repositories; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataExport.TableValuedFunctionTests +namespace Rdmp.Core.Tests.DataExport.TableValuedFunctionTests; + +public class EndToEndTableValuedFunction : DatabaseTests { - public class EndToEndTableValuedFunction:DatabaseTests + private ExtractionInformation _nonTvfExtractionIdentifier; + private ICatalogue _nonTvfCatalogue; + private ITableInfo _nonTvfTableInfo; + private ExternalCohortTable _externalCohortTable; + private ICatalogue _tvfCatalogue; + private ITableInfo _tvfTableInfo; + + //the cohort database + private DiscoveredDatabase _discoveredCohortDatabase; + + //the data database (with the tvf in it) + private DiscoveredDatabase _database; + + private CohortIdentificationConfiguration _cic; + private Project _project; + private Pipeline _pipe; + private AggregateConfiguration _aggregate; + private AggregateConfiguration _cicAggregate; + + [SetUp] + protected override void SetUp() { - private ExtractionInformation _nonTvfExtractionIdentifier; - private ICatalogue _nonTvfCatalogue; - private ITableInfo _nonTvfTableInfo; - private ExternalCohortTable _externalCohortTable; - private ICatalogue _tvfCatalogue; - private ITableInfo _tvfTableInfo; - - //the cohort database - private DiscoveredDatabase _discoveredCohortDatabase; - //the data database (with the tvf in it) - private DiscoveredDatabase _database; - - private CohortIdentificationConfiguration _cic; - private Project _project; - private Pipeline _pipe; - private AggregateConfiguration _aggregate; - private AggregateConfiguration _cicAggregate; - - [SetUp] - protected override void SetUp() - { - base.SetUp(); + base.SetUp(); - _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - } + _database = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + } - [Test] - public void EndToEndTest() - { - var cohortDatabaseNameWillBe = TestDatabaseNames.GetConsistentName("TbvCohort"); - _discoveredCohortDatabase = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseNameWillBe); - - //cleanup - if(_discoveredCohortDatabase.Exists()) - _discoveredCohortDatabase.Drop(); - - //create a normal catalogue - CreateANormalCatalogue(); - - //create a cohort database using wizard - CreateNewCohortDatabaseWizard cohortDatabaseWizard = new CreateNewCohortDatabaseWizard(_discoveredCohortDatabase,CatalogueRepository,DataExportRepository,false); - - _externalCohortTable = cohortDatabaseWizard.CreateDatabase( - new PrivateIdentifierPrototype(_nonTvfExtractionIdentifier) - ,new ThrowImmediatelyCheckNotifier()); - - //create a table valued function - CreateTvfCatalogue(cohortDatabaseNameWillBe); - - //Test 1 - TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException(); - - PopulateCohortDatabaseWithRecordsFromNonTvfCatalogue(); - - //Test 2 - TestWithParameterValueThatRowsAreReturned(); - - //Test 3 - TestUsingTvfForAggregates(); - - //Test 4 - TestAddingTvfToCIC(); - - //Test 5 - TestDataExportOfTvf(); - - //tear down - DataExportRepository.GetAllObjects().Single().DeleteInDatabase(); - _externalCohortTable.DeleteInDatabase(); - - _database.ExpectTable("NonTVFTable").Drop(); - _database.ExpectTableValuedFunction("GetTopXRandom").Drop(); - - //delete global parameter - ((AnyTableSqlParameter)_aggregate.GetAllParameters().Single()).DeleteInDatabase(); - //delete aggregate - _aggregate.DeleteInDatabase(); - - ((AnyTableSqlParameter)_cicAggregate.GetAllParameters().Single()).DeleteInDatabase(); - //delete aggregate - _cicAggregate.DeleteInDatabase(); - - //get rid of the cohort identification configuration - _cic.DeleteInDatabase(); - _pipe.DeleteInDatabase(); - - //get rid of the cohort database + [Test] + public void EndToEndTest() + { + var cohortDatabaseNameWillBe = TestDatabaseNames.GetConsistentName("TbvCohort"); + _discoveredCohortDatabase = + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(cohortDatabaseNameWillBe); + + //cleanup + if (_discoveredCohortDatabase.Exists()) _discoveredCohortDatabase.Drop(); - - _nonTvfCatalogue.DeleteInDatabase(); - _nonTvfTableInfo.DeleteInDatabase(); - _tvfCatalogue.DeleteInDatabase(); - _tvfTableInfo.DeleteInDatabase(); - } + //create a normal catalogue + CreateANormalCatalogue(); + + //create a cohort database using wizard + var cohortDatabaseWizard = new CreateNewCohortDatabaseWizard(_discoveredCohortDatabase, CatalogueRepository, + DataExportRepository, false); + + _externalCohortTable = cohortDatabaseWizard.CreateDatabase( + new PrivateIdentifierPrototype(_nonTvfExtractionIdentifier) + , ThrowImmediatelyCheckNotifier.Quiet); + + //create a table valued function + CreateTvfCatalogue(cohortDatabaseNameWillBe); + + //Test 1 + TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException(); + + PopulateCohortDatabaseWithRecordsFromNonTvfCatalogue(); + + //Test 2 + TestWithParameterValueThatRowsAreReturned(); + + //Test 3 + TestUsingTvfForAggregates(); + + //Test 4 + TestAddingTvfToCIC(); + + //Test 5 + TestDataExportOfTvf(); + + //tear down + DataExportRepository.GetAllObjects().Single().DeleteInDatabase(); + _externalCohortTable.DeleteInDatabase(); + + _database.ExpectTable("NonTVFTable").Drop(); + _database.ExpectTableValuedFunction("GetTopXRandom").Drop(); + //delete global parameter + ((AnyTableSqlParameter)_aggregate.GetAllParameters().Single()).DeleteInDatabase(); + //delete aggregate + _aggregate.DeleteInDatabase(); - private void PopulateCohortDatabaseWithRecordsFromNonTvfCatalogue() + ((AnyTableSqlParameter)_cicAggregate.GetAllParameters().Single()).DeleteInDatabase(); + //delete aggregate + _cicAggregate.DeleteInDatabase(); + + //get rid of the cohort identification configuration + _cic.DeleteInDatabase(); + _pipe.DeleteInDatabase(); + + //get rid of the cohort database + _discoveredCohortDatabase.Drop(); + + _nonTvfCatalogue.DeleteInDatabase(); + _nonTvfTableInfo.DeleteInDatabase(); + + _tvfCatalogue.DeleteInDatabase(); + _tvfTableInfo.DeleteInDatabase(); + } + + + private void PopulateCohortDatabaseWithRecordsFromNonTvfCatalogue() + { + //create a cohort identification configuration (identifies people from datasets using set operations - see CohortManager) + _cic = new CohortIdentificationConfiguration(CatalogueRepository, "TbvfCIC"); + _cic.CreateRootContainerIfNotExists(); + + //turn the catalogue _nonTvfCatalogue into a cohort set and add it to the root container + var newAggregate = _cic.CreateNewEmptyConfigurationForCatalogue(_nonTvfCatalogue, + (s, e) => throw new Exception("Did not expect there to be more than 1!")); + + var root = _cic.RootCohortAggregateContainer; + root.AddChild(newAggregate, 0); + + //create a pipeline for executing this CIC and turning it into a cohort + _pipe = new Pipeline(CatalogueRepository, "CREATE COHORT:By Executing CIC"); + + var source = new PipelineComponent(CatalogueRepository, _pipe, typeof(CohortIdentificationConfigurationSource), + 0, "CIC Source"); + + _project = new Project(DataExportRepository, "TvfProject") { - //create a cohort identification configuration (identifies people from datasets using set operations - see CohortManager) - _cic = new CohortIdentificationConfiguration(CatalogueRepository, "TbvfCIC"); - _cic.CreateRootContainerIfNotExists(); - - //turn the catalogue _nonTvfCatalogue into a cohort set and add it to the root container - var newAggregate = _cic.CreateNewEmptyConfigurationForCatalogue(_nonTvfCatalogue,(s,e)=> { throw new Exception("Did not expect there to be more than 1!"); }); - - var root = _cic.RootCohortAggregateContainer; - root.AddChild(newAggregate,0); - - //create a pipeline for executing this CIC and turning it into a cohort - _pipe = new Pipeline(CatalogueRepository, "CREATE COHORT:By Executing CIC"); - - var source = new PipelineComponent(CatalogueRepository, _pipe,typeof (CohortIdentificationConfigurationSource), 0, "CIC Source"); - - _project = new Project(DataExportRepository, "TvfProject"); - _project.ProjectNumber = 12; - _project.ExtractionDirectory = TestContext.CurrentContext.TestDirectory; - _project.SaveToDatabase(); - - var destination = new PipelineComponent(CatalogueRepository, _pipe, typeof(BasicCohortDestination), 1, "Destination"); - - _pipe.SourcePipelineComponent_ID = source.ID; - _pipe.DestinationPipelineComponent_ID = destination.ID; - _pipe.SaveToDatabase(); - - //create pipeline arguments - source.CreateArgumentsForClassIfNotExists(); - destination.CreateArgumentsForClassIfNotExists(); - - //create pipeline initialization objects - var request = new CohortCreationRequest(_project, new CohortDefinition(null, "MyFirstCohortForTvfTest", 1, 12, _externalCohortTable), DataExportRepository, "Here goes nothing"); - request.CohortIdentificationConfiguration = _cic; - var engine = request.GetEngine(_pipe,new ThrowImmediatelyDataLoadEventListener()); - engine.ExecutePipeline(new GracefulCancellationToken()); - } + ProjectNumber = 12, + ExtractionDirectory = TestContext.CurrentContext.TestDirectory + }; + _project.SaveToDatabase(); + + var destination = + new PipelineComponent(CatalogueRepository, _pipe, typeof(BasicCohortDestination), 1, "Destination"); + + _pipe.SourcePipelineComponent_ID = source.ID; + _pipe.DestinationPipelineComponent_ID = destination.ID; + _pipe.SaveToDatabase(); + + //create pipeline arguments + source.CreateArgumentsForClassIfNotExists(); + destination.CreateArgumentsForClassIfNotExists(); + + //create pipeline initialization objects + var request = new CohortCreationRequest(_project, + new CohortDefinition(null, "MyFirstCohortForTvfTest", 1, 12, _externalCohortTable), DataExportRepository, + "Here goes nothing") + { + CohortIdentificationConfiguration = _cic + }; + var engine = request.GetEngine(_pipe, ThrowImmediatelyDataLoadEventListener.Quiet); + engine.ExecutePipeline(new GracefulCancellationToken()); + } - private void CreateTvfCatalogue(string cohortDatabaseName) + private void CreateTvfCatalogue(string cohortDatabaseName) + { + var svr = _database.Server; + using (var con = svr.GetConnection()) { - var svr = _database.Server; - using (var con = svr.GetConnection()) - { - con.Open(); + con.Open(); - //create the newID view - svr.GetCommand("create view getNewID as select newid() as new_id", con).ExecuteNonQuery(); + //create the newID view + svr.GetCommand("create view getNewID as select newid() as new_id", con).ExecuteNonQuery(); - var sql = string.Format( - @"create function GetTopXRandom (@numberOfRecords int) + var sql = $@"create function GetTopXRandom (@numberOfRecords int) RETURNS @retTable TABLE ( chi varchar(10), @@ -192,262 +202,261 @@ definitionID int while(@numberOfRecords >0) begin -insert into @retTable select top 1 chi,cohortDefinition_id from {0}..Cohort order by (select new_id from getNewID) +insert into @retTable select top 1 chi,cohortDefinition_id from {cohortDatabaseName}..Cohort order by (select new_id from getNewID) set @numberOfRecords = @numberOfRecords - 1 end return end -",cohortDatabaseName); - - svr.GetCommand(sql, con).ExecuteNonQuery(); - } +"; - var tblvf = _database.ExpectTableValuedFunction("GetTopXRandom"); + svr.GetCommand(sql, con).ExecuteNonQuery(); + } - var importer = new TableValuedFunctionImporter(CatalogueRepository, tblvf); - importer.DoImport(out var tbl,out var cols); + var tblvf = _database.ExpectTableValuedFunction("GetTopXRandom"); - var engineer = new ForwardEngineerCatalogue(tbl, cols); - engineer.ExecuteForwardEngineering(out var cata, out var cis, out var eis); + var importer = new TableValuedFunctionImporter(CatalogueRepository, tblvf); + importer.DoImport(out var tbl, out var cols); - Assert.AreEqual("chi", eis[0].GetRuntimeName()); - eis[0].IsExtractionIdentifier = true; - eis[0].SaveToDatabase(); - - _tvfCatalogue = cata; - _tvfTableInfo = tbl; + var engineer = new ForwardEngineerCatalogue(tbl, cols); + engineer.ExecuteForwardEngineering(out var cata, out var cis, out var eis); + Assert.AreEqual("chi", eis[0].GetRuntimeName()); + eis[0].IsExtractionIdentifier = true; + eis[0].SaveToDatabase(); - } + _tvfCatalogue = cata; + _tvfTableInfo = tbl; + } - private void CreateANormalCatalogue() + private void CreateANormalCatalogue() + { + var svr = _database.Server; + using (var con = svr.GetConnection()) { - var svr = _database.Server; - using (var con = svr.GetConnection()) - { - con.Open(); - svr.GetCommand("CREATE TABLE NonTVFTable ( chi varchar(10))",con).ExecuteNonQuery(); - svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0101010101')", con).ExecuteNonQuery(); - svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0202020202')", con).ExecuteNonQuery(); - svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0303030303')", con).ExecuteNonQuery(); - } - - var importer = new TableInfoImporter(CatalogueRepository, svr.Name, - _database.GetRuntimeName(), "NonTVFTable", - DatabaseType.MicrosoftSQLServer,_database.Server.ExplicitUsernameIfAny,_database.Server.ExplicitPasswordIfAny); - - importer.DoImport(out var tbl,out var cols); - - var engineer = new ForwardEngineerCatalogue(tbl, cols); - engineer.ExecuteForwardEngineering(out var cata,out var cis, out var eis); - - _nonTvfExtractionIdentifier = eis.Single(); - _nonTvfExtractionIdentifier.IsExtractionIdentifier = true; - _nonTvfExtractionIdentifier.SaveToDatabase(); - - _nonTvfCatalogue = cata; - _nonTvfTableInfo = tbl; + con.Open(); + svr.GetCommand("CREATE TABLE NonTVFTable ( chi varchar(10))", con).ExecuteNonQuery(); + svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0101010101')", con).ExecuteNonQuery(); + svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0202020202')", con).ExecuteNonQuery(); + svr.GetCommand("INSERT INTO NonTVFTable VALUES ('0303030303')", con).ExecuteNonQuery(); } - private void TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException() - { - //we should have problems reading from the table valued function - var qb = new QueryBuilder("", ""); + var importer = new TableInfoImporter(CatalogueRepository, svr.Name, + _database.GetRuntimeName(), "NonTVFTable", + DatabaseType.MicrosoftSQLServer, _database.Server.ExplicitUsernameIfAny, + _database.Server.ExplicitPasswordIfAny); - //table valued function should have 2 fields (chi and definitionID) - Assert.AreEqual(2, _tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Count()); + importer.DoImport(out var tbl, out var cols); - qb.AddColumnRange(_tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); + var engineer = new ForwardEngineerCatalogue(tbl, cols); + engineer.ExecuteForwardEngineering(out var cata, out var cis, out var eis); - var ex = Assert.Throws(() => Console.WriteLine(qb.SQL)); - Assert.AreEqual("No Value defined for Parameter @numberOfRecords", ex.Message); - } + _nonTvfExtractionIdentifier = eis.Single(); + _nonTvfExtractionIdentifier.IsExtractionIdentifier = true; + _nonTvfExtractionIdentifier.SaveToDatabase(); - private void TestWithParameterValueThatRowsAreReturned() - { - var p = _tvfTableInfo.GetAllParameters().Single(); - p.Value = "5"; - p.SaveToDatabase(); + _nonTvfCatalogue = cata; + _nonTvfTableInfo = tbl; + } - var qb = new QueryBuilder("", ""); - qb.AddColumnRange(_tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); + private void TestThatQueryBuilderWithoutParametersBeingSetThrowsQueryBuildingException() + { + //we should have problems reading from the table valued function + var qb = new QueryBuilder("", ""); - var sql = qb.SQL; + //table valued function should have 2 fields (chi and definitionID) + Assert.AreEqual(2, _tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Length); - var db = DataAccessPortal.GetInstance().ExpectDatabase(_tvfTableInfo, DataAccessContext.InternalDataProcessing); - using (var con = db.Server.GetConnection()) - { - con.Open(); - var r = db.Server.GetCommand(sql, con).ExecuteReader(); + qb.AddColumnRange(_tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); - int rowsReturned = 0; + var ex = Assert.Throws(() => Console.WriteLine(qb.SQL)); + Assert.AreEqual("No Value defined for Parameter @numberOfRecords", ex.Message); + } - while (r.Read()) - { - rowsReturned++; - Assert.NotNull(r["chi"]); - Assert.NotNull(r["definitionID"]); - } + private void TestWithParameterValueThatRowsAreReturned() + { + var p = _tvfTableInfo.GetAllParameters().Single(); + p.Value = "5"; + p.SaveToDatabase(); - Assert.AreEqual(rowsReturned,5); - } - } + var qb = new QueryBuilder("", ""); + qb.AddColumnRange(_tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)); + + var sql = qb.SQL; - private void TestUsingTvfForAggregates() + var db = DataAccessPortal.ExpectDatabase(_tvfTableInfo, DataAccessContext.InternalDataProcessing); + using var con = db.Server.GetConnection(); + con.Open(); + var r = db.Server.GetCommand(sql, con).ExecuteReader(); + + var rowsReturned = 0; + + while (r.Read()) { + rowsReturned++; + Assert.NotNull(r["chi"]); + Assert.NotNull(r["definitionID"]); + } + Assert.AreEqual(rowsReturned, 5); + } - _aggregate = new AggregateConfiguration(CatalogueRepository, _tvfCatalogue,"tvfAggregate"); - - var ei = _tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any).Single(e => !e.IsExtractionIdentifier); - _aggregate.AddDimension(ei); - - //change the parameter to 10 - var p = _tvfTableInfo.GetAllParameters().Single(); - p.Value = "10"; - p.SaveToDatabase(); - - var qb = _aggregate.GetQueryBuilder(); - - //Query should be something like : - /* - * DECLARE @numberOfRecords AS int; - * SET @numberOfRecords=10; - * tvfAggregate - * SELECT - * GetTopXRandom.[definitionID], - * count(*) - * FROM - * [TestDbName_ScratchArea]..GetTopXRandom(@numberOfRecords) AS GetTopXRandom - * group by - * GetTopXRandom.[definitionID] - * order by - * GetTopXRandom.[definitionID] - * - * --Since we only imported 1 cohort we should have 1 row and the count should be the number we requested - * - * */ - - var sql = qb.SQL; - - var db = DataAccessPortal.GetInstance().ExpectDatabase(_tvfTableInfo, DataAccessContext.InternalDataProcessing); - using (var con = db.Server.GetConnection()) - { - con.Open(); - var r = db.Server.GetCommand(sql, con).ExecuteReader(); + private void TestUsingTvfForAggregates() + { + _aggregate = new AggregateConfiguration(CatalogueRepository, _tvfCatalogue, "tvfAggregate"); + + var ei = _tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any) + .Single(e => !e.IsExtractionIdentifier); + _aggregate.AddDimension(ei); + + //change the parameter to 10 + var p = _tvfTableInfo.GetAllParameters().Single(); + p.Value = "10"; + p.SaveToDatabase(); + + var qb = _aggregate.GetQueryBuilder(); + + //Query should be something like : + /* + * DECLARE @numberOfRecords AS int; + * SET @numberOfRecords=10; + * tvfAggregate + * SELECT + * GetTopXRandom.[definitionID], + * count(*) + * FROM + * [TestDbName_ScratchArea]..GetTopXRandom(@numberOfRecords) AS GetTopXRandom + * group by + * GetTopXRandom.[definitionID] + * order by + * GetTopXRandom.[definitionID] + * + * --Since we only imported 1 cohort we should have 1 row and the count should be the number we requested + * + * */ + + var sql = qb.SQL; + + var db = DataAccessPortal.ExpectDatabase(_tvfTableInfo, DataAccessContext.InternalDataProcessing); + using (var con = db.Server.GetConnection()) + { + con.Open(); + var r = db.Server.GetCommand(sql, con).ExecuteReader(); - Assert.IsTrue(r.Read()); + Assert.IsTrue(r.Read()); - Assert.AreEqual(r[1],10); + Assert.AreEqual(r[1], 10); - Assert.IsFalse(r.Read()); - } + Assert.IsFalse(r.Read()); + } - //create a global overriding parameter on the aggregate - var global = new AnyTableSqlParameter(CatalogueRepository, _aggregate, "DECLARE @numberOfRecords AS int;"); - global.Value = "1"; - global.SaveToDatabase(); + //create a global overriding parameter on the aggregate + var global = new AnyTableSqlParameter(CatalogueRepository, _aggregate, "DECLARE @numberOfRecords AS int;") + { + Value = "1" + }; + global.SaveToDatabase(); - //refresh the SQL - sql = _aggregate.GetQueryBuilder().SQL; + //refresh the SQL + sql = _aggregate.GetQueryBuilder().SQL; - using (var con = db.Server.GetConnection()) - { - con.Open(); - var r = db.Server.GetCommand(sql, con).ExecuteReader(); + using (var con = db.Server.GetConnection()) + { + con.Open(); + var r = db.Server.GetCommand(sql, con).ExecuteReader(); - Assert.IsTrue(r.Read()); + Assert.IsTrue(r.Read()); - Assert.AreEqual(r[1], 1);//should now only have 1 record being retrned and counted when executing + Assert.AreEqual(r[1], 1); //should now only have 1 record being retrned and counted when executing - Assert.IsFalse(r.Read()); - } + Assert.IsFalse(r.Read()); } + } - private void TestAddingTvfToCIC() - { - var root = _cic.RootCohortAggregateContainer; - root.Operation = SetOperation.EXCEPT; - root.SaveToDatabase(); + private void TestAddingTvfToCIC() + { + var root = _cic.RootCohortAggregateContainer; + root.Operation = SetOperation.EXCEPT; + root.SaveToDatabase(); - //declare a global parameter of 1 on the aggregate - _cicAggregate = _cic.ImportAggregateConfigurationAsIdentifierList(_aggregate, (s, e) => { return null; }); - - //it should have imported the global parameter as part of the import right? - Assert.AreEqual(1,_cicAggregate.GetAllParameters().Count()); + //declare a global parameter of 1 on the aggregate + _cicAggregate = _cic.ImportAggregateConfigurationAsIdentifierList(_aggregate, (s, e) => null); - //add the new cic to the container - root.AddChild(_cicAggregate,2); + //it should have imported the global parameter as part of the import right? + Assert.AreEqual(1, _cicAggregate.GetAllParameters().Length); - //So container is: - // EXCEPT - //People in _nonTvfCatalogue (3) - //People in _tvfCatalogue (with @numberOfRecords = 1) (1) + //add the new cic to the container + root.AddChild(_cicAggregate, 2); - //result should be 2 - var qb = new CohortQueryBuilder(_cic,null); + //So container is: + // EXCEPT + //People in _nonTvfCatalogue (3) + //People in _tvfCatalogue (with @numberOfRecords = 1) (1) - var sql = qb.SQL; + //result should be 2 + var qb = new CohortQueryBuilder(_cic, null); - var db = DataAccessPortal.GetInstance().ExpectDatabase(_tvfTableInfo, DataAccessContext.InternalDataProcessing); - using (var con = db.Server.GetConnection()) - { - con.Open(); - var r = db.Server.GetCommand(sql, con).ExecuteReader(); + var sql = qb.SQL; - //2 chi numbers should be returned - Assert.IsTrue(r.Read()); - Assert.IsTrue(r.Read()); + var db = DataAccessPortal.ExpectDatabase(_tvfTableInfo, DataAccessContext.InternalDataProcessing); + using var con = db.Server.GetConnection(); + con.Open(); + var r = db.Server.GetCommand(sql, con).ExecuteReader(); - Assert.IsFalse(r.Read()); - } - } + //2 chi numbers should be returned + Assert.IsTrue(r.Read()); + Assert.IsTrue(r.Read()); - private void TestDataExportOfTvf() + Assert.IsFalse(r.Read()); + } + + private void TestDataExportOfTvf() + { + var config = new ExtractionConfiguration(DataExportRepository, _project) { - var config = new ExtractionConfiguration(DataExportRepository, _project); - config.Cohort_ID = DataExportRepository.GetAllObjects().Single().ID; - config.SaveToDatabase(); + Cohort_ID = DataExportRepository.GetAllObjects().Single().ID + }; + config.SaveToDatabase(); - var tvfExtractable = new ExtractableDataSet(DataExportRepository, _tvfCatalogue); + var tvfExtractable = new ExtractableDataSet(DataExportRepository, _tvfCatalogue); - var selected = new SelectedDataSets(DataExportRepository, config, tvfExtractable, null); + var selected = new SelectedDataSets(DataExportRepository, config, tvfExtractable, null); - //make all columns part of the extraction - foreach (ExtractionInformation e in _tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)) - config.AddColumnToExtraction(tvfExtractable, e); - - //the default value should be 10 - Assert.AreEqual("10",_tvfTableInfo.GetAllParameters().Single().Value); + //make all columns part of the extraction + foreach (var e in _tvfCatalogue.GetAllExtractionInformation(ExtractionCategory.Any)) + config.AddColumnToExtraction(tvfExtractable, e); - //configure an extraction specific global of 1 so that only 1 chi number is fetched (which will be in the cohort) - var globalP = new GlobalExtractionFilterParameter(DataExportRepository, config, "DECLARE @numberOfRecords AS int;"); - globalP.Value = "1"; - globalP.SaveToDatabase(); - - var extractionCommand = new ExtractDatasetCommand(config, new ExtractableDatasetBundle(tvfExtractable)); + //the default value should be 10 + Assert.AreEqual("10", _tvfTableInfo.GetAllParameters().Single().Value); - var source = new ExecuteDatasetExtractionSource(); + //configure an extraction specific global of 1 so that only 1 chi number is fetched (which will be in the cohort) + var globalP = + new GlobalExtractionFilterParameter(DataExportRepository, config, "DECLARE @numberOfRecords AS int;") + { + Value = "1" + }; + globalP.SaveToDatabase(); - source.PreInitialize(extractionCommand, new ThrowImmediatelyDataLoadEventListener()); + var extractionCommand = new ExtractDatasetCommand(config, new ExtractableDatasetBundle(tvfExtractable)); - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - - Assert.AreEqual(1,dt.Rows.Count); + var source = new ExecuteDatasetExtractionSource(); - Assert.AreEqual("ReleaseId",dt.Columns[0].ColumnName); + source.PreInitialize(extractionCommand, ThrowImmediatelyDataLoadEventListener.Quiet); - //should be a guid - Assert.IsTrue(dt.Rows[0][0].ToString().Length>10); - Assert.IsTrue(dt.Rows[0][0].ToString().Contains("-")); + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - selected.DeleteInDatabase(); - globalP.DeleteInDatabase(); - config.DeleteInDatabase(); + Assert.AreEqual(1, dt.Rows.Count); - tvfExtractable.DeleteInDatabase(); + Assert.AreEqual("ReleaseId", dt.Columns[0].ColumnName); - } + //should be a guid + Assert.IsTrue(dt.Rows[0][0].ToString().Length > 10); + Assert.IsTrue(dt.Rows[0][0].ToString().Contains('-')); + + selected.DeleteInDatabase(); + globalP.DeleteInDatabase(); + config.DeleteInDatabase(); + + tvfExtractable.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataExport/TestExtractableTables.cs b/Rdmp.Core.Tests/DataExport/TestExtractableTables.cs index 856dc89e7f..3c35d94b7e 100644 --- a/Rdmp.Core.Tests/DataExport/TestExtractableTables.cs +++ b/Rdmp.Core.Tests/DataExport/TestExtractableTables.cs @@ -12,120 +12,124 @@ using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; -using ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataExport +namespace Rdmp.Core.Tests.DataExport; + +[Category("Database")] +public class TestExtractableTables : TestsRequiringACohort { - [Category("Database")] - public class TestExtractableTables : TestsRequiringACohort + [Test] + public void DodgyID_CreateCohortDatabaseTable_Fails() { - [Test] - public void DodgyID_CreateCohortDatabaseTable_Fails() + var ex = Assert.Throws(() => + new ExtractableCohort(DataExportRepository, _externalCohortTable, -899)); + Assert.IsTrue(ex.Message.StartsWith("ID -899 does not exist in Cohort Definitions")); + } + + + [Test] + public void CreateExtractableDataSet() + { + ExtractableDataSet eds = null; + var cata = new Catalogue(CatalogueRepository, "TestExtractableTables Cata"); + try { - var ex = Assert.Throws(() => new ExtractableCohort(DataExportRepository, _externalCohortTable,-899)); - Assert.IsTrue(ex.Message.StartsWith("ID -899 does not exist in Cohort Definitions")); + //creates with a Null Catalogue until it is associated with a catalogue and saved + eds = new ExtractableDataSet(DataExportRepository, cata); + Assert.AreEqual(cata.ID, eds.Catalogue_ID); } + finally + { + eds?.DeleteInDatabase(); - - [Test] - public void CreateExtractableDataSet() - { - - ExtractableDataSet eds = null; - var cata = new Catalogue(CatalogueRepository,"TestExtractableTables Cata"); - try - { - //creates with a Null Catalogue until it is associated with a catalogue and saved - eds = new ExtractableDataSet(DataExportRepository, cata); - Assert.AreEqual(cata.ID,eds.Catalogue_ID); - - } - finally - { - if (eds != null) - eds.DeleteInDatabase(); - - cata.DeleteInDatabase(); - } + cata.DeleteInDatabase(); } + } + + [Test] + public void UpdateProjectDatabaseTable() + { + var table = new Project(DataExportRepository, "unit_test_UpdateProjectDatabaseTable"); - [Test] - public void UpdateProjectDatabaseTable() + try { - Project table = new Project(DataExportRepository, "unit_test_UpdateProjectDatabaseTable"); - - try - { - Assert.AreEqual(table.Name, "unit_test_UpdateProjectDatabaseTable"); - table.Name = "unit_test_UpdateProjectDatabaseTable2"; - table.SaveToDatabase(); - - //get fresh copy from database and ensure that all fields are the same - var tableAfter = DataExportRepository.GetObjectByID(table.ID); - PropertyValuesAreEquals(table, tableAfter); - } - finally - { - table.DeleteInDatabase(); - } + Assert.AreEqual(table.Name, "unit_test_UpdateProjectDatabaseTable"); + table.Name = "unit_test_UpdateProjectDatabaseTable2"; + table.SaveToDatabase(); + //get fresh copy from database and ensure that all fields are the same + var tableAfter = DataExportRepository.GetObjectByID(table.ID); + PropertyValuesAreEquals(table, tableAfter); } - - [Test] - public void CreateExtractionConfiguration() + finally { - Project parent = new Project(DataExportRepository, "unit_test_CreateExtractionConfiguration"); - - ExtractionConfiguration table = new ExtractionConfiguration(DataExportRepository, parent); - - try - { - Assert.AreEqual(table.Username, Environment.UserName); - } - finally - { - table.DeleteInDatabase();//must delete child before parent to preserve referential integrity - parent.DeleteInDatabase(); - } + table.DeleteInDatabase(); } + } + + [Test] + public void CreateExtractionConfiguration() + { + var parent = new Project(DataExportRepository, "unit_test_CreateExtractionConfiguration"); + var table = new ExtractionConfiguration(DataExportRepository, parent); - #region helper methods - public static void PropertyValuesAreEquals(object actual, object expected) + try { - PropertyInfo[] properties = expected.GetType().GetProperties() - .Where(info => !Attribute.IsDefined(info, typeof (DoNotExtractProperty))).ToArray(); - - foreach (PropertyInfo property in properties) - { - //count goes to the database and works out how many people are in the Cohort table underneath ! don't do that for fictional Test tables - if (property.Name.StartsWith("Count")) - continue; - - object expectedValue = property.GetValue(expected, null); - object actualValue = property.GetValue(actual, null); - - if(expectedValue is SqlCommand && actualValue is SqlCommand) //dont compare sql commands they will be subtly different or just refer to different objects iwth the exact same values - continue; - - if (actualValue is IList) - AssertListsAreEquals(property, (IList)actualValue, (IList)expectedValue); - else if (!Equals(expectedValue, actualValue)) - Assert.Fail("Property {0}.{1} does not match. Expected: {2} but was: {3}", property.DeclaringType.Name, property.Name, expectedValue, actualValue); - } + Assert.AreEqual(table.Username, Environment.UserName); } - - private static void AssertListsAreEquals(PropertyInfo property, IList actualList, IList expectedList) + finally { - if (actualList.Count != expectedList.Count) - Assert.Fail("Property {0}.{1} does not match. Expected IList containing {2} elements but was IList containing {3} elements", property.PropertyType.Name, property.Name, expectedList.Count, actualList.Count); + table.DeleteInDatabase(); //must delete child before parent to preserve referential integrity + parent.DeleteInDatabase(); + } + } - for (int i = 0; i < actualList.Count; i++) - if (!Equals(actualList[i], expectedList[i])) - Assert.Fail("Property {0}.{1} does not match. Expected IList with element {1} equals to {2} but was IList with element {1} equals to {3}", property.PropertyType.Name, property.Name, expectedList[i], actualList[i]); + + #region helper methods + + public static void PropertyValuesAreEquals(object actual, object expected) + { + var properties = expected.GetType().GetProperties() + .Where(info => !Attribute.IsDefined(info, typeof(DoNotExtractProperty))).ToArray(); + + foreach (var property in properties) + { + //count goes to the database and works out how many people are in the Cohort table underneath ! don't do that for fictional Test tables + if (property.Name.StartsWith("Count")) + continue; + + var expectedValue = property.GetValue(expected, null); + var actualValue = property.GetValue(actual, null); + + if (expectedValue is SqlCommand && + actualValue is SqlCommand) //don't compare sql commands they will be subtly different or just refer to different objects iwth the exact same values + continue; + + if (actualValue is IList list) + AssertListsAreEquals(property, list, (IList)expectedValue); + else if (!Equals(expectedValue, actualValue)) + Assert.Fail("Property {0}.{1} does not match. Expected: {2} but was: {3}", property.DeclaringType?.Name, + property.Name, expectedValue, actualValue); } - #endregion } -} + + private static void AssertListsAreEquals(PropertyInfo property, IList actualList, IList expectedList) + { + if (actualList.Count != expectedList.Count) + Assert.Fail( + "Property {0}.{1} does not match. Expected IList containing {2} elements but was IList containing {3} elements", + property.PropertyType.Name, property.Name, expectedList.Count, actualList.Count); + + for (var i = 0; i < actualList.Count; i++) + if (!Equals(actualList[i], expectedList[i])) + Assert.Fail( + "Property {0}.{1} does not match. Expected IList with element {1} equals to {2} but was IList with element {1} equals to {3}", + property.PropertyType.Name, property.Name, expectedList[i], actualList[i]); + } + + #endregion +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/DatabaseManagement/EntityNaming/HICDatabaseConfigurationTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/DatabaseManagement/EntityNaming/HICDatabaseConfigurationTests.cs index 9fa901fc97..f997c0f00c 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/DatabaseManagement/EntityNaming/HICDatabaseConfigurationTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/DatabaseManagement/EntityNaming/HICDatabaseConfigurationTests.cs @@ -8,7 +8,7 @@ using System.Linq; using FAnsi; using FAnsi.Discovery; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.EntityNaming; @@ -16,57 +16,55 @@ using Rdmp.Core.DataLoad.Engine.Job; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.DatabaseManagement.EntityNaming +namespace Rdmp.Core.Tests.DataLoad.Engine.DatabaseManagement.EntityNaming; + +internal class HICDatabaseConfigurationTests : UnitTests { - class HICDatabaseConfigurationTests: UnitTests + /// + /// Tests the ability of to predict where tables will exist + /// during a load at various stages (RAW, STAGING etc). This is largely controlled by what tables the + /// says it loads and what the names should be according to + /// the + /// + /// + [TestCase(true)] + [TestCase(false)] + public void TestHICDatabaseConfiguration_ExpectTables(bool testLookup) { + var conf = new HICDatabaseConfiguration(new DiscoveredServer("localhost", "mydb", + DatabaseType.MicrosoftSQLServer, null, null), new FixedStagingDatabaseNamer("mydb")); - /// - /// Tests the ability of to predict where tables will exist - /// during a load at various stages (RAW, STAGING etc). This is largely controlled by what tables the - /// says it loads and what the names should be according to - /// the - /// - /// - [TestCase(true)] - [TestCase(false)] - public void TestHICDatabaseConfiguration_ExpectTables(bool testLookup) - { - var conf = new HICDatabaseConfiguration(new DiscoveredServer("localhost", "mydb", - DatabaseType.MicrosoftSQLServer, null, null), new FixedStagingDatabaseNamer("mydb")); + var ti = WhenIHaveA(); + var lookup = WhenIHaveA(); + lookup.Name = "MyHeartyLookup"; + lookup.Database = "LookupsDb"; + lookup.SaveToDatabase(); - var ti = WhenIHaveA(); - var lookup = WhenIHaveA(); - lookup.Name = "MyHeartyLookup"; - lookup.Database = "LookupsDb"; - lookup.SaveToDatabase(); + var job = Substitute.For(); + job.RegularTablesToLoad.Returns(new List(new[] { ti })); + job.LookupTablesToLoad.Returns(new List(new[] { lookup })); - var job = Mock.Of(m=> - m.RegularTablesToLoad == new List(new []{ti}) && - m.LookupTablesToLoad == new List(new []{lookup})); - - var result = conf.ExpectTables(job, LoadBubble.Raw, testLookup).ToArray(); + var result = conf.ExpectTables(job, LoadBubble.Raw, testLookup).ToArray(); - Assert.AreEqual(testLookup ? 2 : 1,result.Length); - StringAssert.AreEqualIgnoringCase("mydb_RAW",result[0].Database.GetRuntimeName()); - StringAssert.AreEqualIgnoringCase("My_Table",result[0].GetRuntimeName()); + Assert.AreEqual(testLookup ? 2 : 1, result.Length); + StringAssert.AreEqualIgnoringCase("mydb_RAW", result[0].Database.GetRuntimeName()); + StringAssert.AreEqualIgnoringCase("My_Table", result[0].GetRuntimeName()); - if (testLookup) - { - StringAssert.AreEqualIgnoringCase("mydb_RAW",result[1].Database.GetRuntimeName()); - StringAssert.AreEqualIgnoringCase("MyHeartyLookup",result[1].GetRuntimeName()); - } + if (testLookup) + { + StringAssert.AreEqualIgnoringCase("mydb_RAW", result[1].Database.GetRuntimeName()); + StringAssert.AreEqualIgnoringCase("MyHeartyLookup", result[1].GetRuntimeName()); + } - result = conf.ExpectTables(job, LoadBubble.Staging, testLookup).ToArray(); - Assert.AreEqual(testLookup ? 2 : 1,result.Length); - StringAssert.AreEqualIgnoringCase("DLE_STAGING",result[0].Database.GetRuntimeName()); - StringAssert.AreEqualIgnoringCase("mydb_My_Table_STAGING",result[0].GetRuntimeName()); + result = conf.ExpectTables(job, LoadBubble.Staging, testLookup).ToArray(); + Assert.AreEqual(testLookup ? 2 : 1, result.Length); + StringAssert.AreEqualIgnoringCase("DLE_STAGING", result[0].Database.GetRuntimeName()); + StringAssert.AreEqualIgnoringCase("mydb_My_Table_STAGING", result[0].GetRuntimeName()); - if (testLookup) - { - StringAssert.AreEqualIgnoringCase("DLE_STAGING",result[1].Database.GetRuntimeName()); - StringAssert.AreEqualIgnoringCase("mydb_MyHeartyLookup_STAGING",result[1].GetRuntimeName()); - } + if (testLookup) + { + StringAssert.AreEqualIgnoringCase("DLE_STAGING", result[1].Database.GetRuntimeName()); + StringAssert.AreEqualIgnoringCase("mydb_MyHeartyLookup_STAGING", result[1].GetRuntimeName()); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/DatabaseManagement/TableInfoCloneOperationTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/DatabaseManagement/TableInfoCloneOperationTests.cs index a1a937599b..553f1115f4 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/DatabaseManagement/TableInfoCloneOperationTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/DatabaseManagement/TableInfoCloneOperationTests.cs @@ -8,37 +8,35 @@ using Rdmp.Core.Curation.Data; using Rdmp.Core.DataLoad.Engine.DatabaseManagement.EntityNaming; using Rdmp.Core.DataLoad.Engine.DatabaseManagement.Operations; -using ReusableLibraryCode.Progress; -using System; -using System.Collections.Generic; using System.Data; -using System.Text; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.DatabaseManagement +namespace Rdmp.Core.Tests.DataLoad.Engine.DatabaseManagement; + +internal class TableInfoCloneOperationTests : DatabaseTests { - class TableInfoCloneOperationTests : DatabaseTests + [Test] + public void Test_CloneTable() { - [Test] - public void Test_CloneTable() - { - var dt = new DataTable(); - dt.Columns.Add("FF"); - - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var tbl = db.CreateTable("MyTable",dt); - - Import(tbl,out var ti, out _); - - var config = new HICDatabaseConfiguration(tbl.Database.Server); - - //create a RAW table schema called TableName_Isolation - var cloner = new TableInfoCloneOperation(config,(TableInfo)ti,LoadBubble.Live,new ThrowImmediatelyDataLoadEventListener()); - cloner.CloneTable(tbl.Database, tbl.Database,tbl, tbl.GetRuntimeName() + "_copy", true, true, true, ti.PreLoadDiscardedColumns); - - var tbl2 = tbl.Database.ExpectTable(tbl.GetRuntimeName() + "_copy"); - - Assert.IsTrue(tbl2.Exists()); - } + var dt = new DataTable(); + dt.Columns.Add("FF"); + + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var tbl = db.CreateTable("MyTable", dt); + + Import(tbl, out var ti, out _); + + var config = new HICDatabaseConfiguration(tbl.Database.Server); + + //create a RAW table schema called TableName_Isolation + var cloner = new TableInfoCloneOperation(config, (TableInfo)ti, LoadBubble.Live, + ThrowImmediatelyDataLoadEventListener.Quiet); + cloner.CloneTable(tbl.Database, tbl.Database, tbl, $"{tbl.GetRuntimeName()}_copy", true, true, true, + ti.PreLoadDiscardedColumns); + + var tbl2 = tbl.Database.ExpectTable($"{tbl.GetRuntimeName()}_copy"); + + Assert.IsTrue(tbl2.Exists()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/BackfillSqlHelperTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/BackfillSqlHelperTests.cs index 179dac9b2b..a6a161b922 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/BackfillSqlHelperTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/BackfillSqlHelperTests.cs @@ -8,162 +8,159 @@ using System.Collections.Generic; using System.Data.Common; using System.Linq; -using FAnsi; using FAnsi.Discovery; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; using Rdmp.Core.DataLoad.Modules.Mutilators.QueryBuilders; using Rdmp.Core.DataLoad.Triggers; -using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration -{ - public class BackfillSqlHelperTests : FromToDatabaseTests - { - private ICatalogue _catalogue; - - #region Housekeeping +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; - [SetUp] - protected override void SetUp() - { - base.SetUp(); - - DeleteTables(From); - DeleteTables(To); - } - - #endregion - - [Test] - public void TestGenerateSqlForThreeLevelJoinPath_TimePeriodIsGrandparent() - { - ThreeTableSetupWhereTimePeriodIsGrandparent(); +public class BackfillSqlHelperTests : FromToDatabaseTests +{ + private ICatalogue _catalogue; - var ciTimePeriodicity = CatalogueRepository.GetAllObjects().SingleOrDefault(c => c.GetRuntimeName().Equals("HeaderDate")); - if (ciTimePeriodicity == null) - throw new InvalidOperationException("Could not find TimePeriodicity column"); + #region Housekeeping - var sqlHelper = new BackfillSqlHelper(ciTimePeriodicity, From, To); + [SetUp] + protected override void SetUp() + { + base.SetUp(); - var tiHeader = CatalogueRepository.GetAllObjects().Single(t=>t.GetRuntimeName().Equals("Headers")); - var tiSamples = CatalogueRepository.GetAllObjects().Single(t => t.GetRuntimeName().Equals("Samples")); - var tiResults = CatalogueRepository.GetAllObjects().Single(t => t.GetRuntimeName().Equals("Results")); + DeleteTables(From); + DeleteTables(To); + } - var joinInfos = CatalogueRepository.GetAllObjects(); - var joinPath = new List - { - joinInfos.Single(info => info.PrimaryKey.TableInfo_ID == tiHeader.ID), - joinInfos.Single(info => info.PrimaryKey.TableInfo_ID == tiSamples.ID) - }; + #endregion - var sql = sqlHelper.CreateSqlForJoinToTimePeriodicityTable("CurrentTable", tiResults, "TimePeriodicityTable", From, joinPath); + [Test] + public void TestGenerateSqlForThreeLevelJoinPath_TimePeriodIsGrandparent() + { + ThreeTableSetupWhereTimePeriodIsGrandparent(); + + var ciTimePeriodicity = + CatalogueRepository.GetAllObjects() + .SingleOrDefault(c => c.GetRuntimeName().Equals("HeaderDate")) ?? + throw new InvalidOperationException("Could not find TimePeriodicity column"); + var sqlHelper = new BackfillSqlHelper(ciTimePeriodicity, From, To); + + var tiHeader = CatalogueRepository.GetAllObjects().Single(t => t.GetRuntimeName().Equals("Headers")); + var tiSamples = CatalogueRepository.GetAllObjects() + .Single(t => t.GetRuntimeName().Equals("Samples")); + var tiResults = CatalogueRepository.GetAllObjects() + .Single(t => t.GetRuntimeName().Equals("Results")); + + var joinInfos = CatalogueRepository.GetAllObjects(); + var joinPath = new List + { + joinInfos.Single(info => info.PrimaryKey.TableInfo_ID == tiHeader.ID), + joinInfos.Single(info => info.PrimaryKey.TableInfo_ID == tiSamples.ID) + }; + var sql = sqlHelper.CreateSqlForJoinToTimePeriodicityTable("CurrentTable", tiResults, "TimePeriodicityTable", + From, joinPath); - Assert.AreEqual(string.Format(@"SELECT CurrentTable.*, TimePeriodicityTable.HeaderDate AS TimePeriodicityField + Assert.AreEqual(string.Format(@"SELECT CurrentTable.*, TimePeriodicityTable.HeaderDate AS TimePeriodicityField FROM [{0}]..[Results] CurrentTable LEFT JOIN [{0}]..[Samples] j1 ON j1.ID = CurrentTable.SampleID LEFT JOIN [{0}]..[Headers] TimePeriodicityTable ON TimePeriodicityTable.ID = j1.HeaderID", - From.GetRuntimeName()), sql); - } - - private void ThreeTableSetupWhereTimePeriodIsGrandparent() - { - CreateTables("Headers", "ID int NOT NULL, HeaderDate DATETIME, Discipline varchar(32)", "ID"); - CreateTables("Samples", "ID int NOT NULL, HeaderID int NOT NULL, SampleType varchar(32)", "ID", "CONSTRAINT [FK_Headers_Samples] FOREIGN KEY (HeaderID) REFERENCES Headers (ID)"); - CreateTables("Results", "ID int NOT NULL, SampleID int NOT NULL, Result int", "ID", "CONSTRAINT [FK_Samples_Results] FOREIGN KEY (SampleID) REFERENCES Samples (ID)"); + From.GetRuntimeName()), sql); + } - // Set SetUp catalogue entities - ColumnInfo[] ciHeaders; - ColumnInfo[] ciSamples; - ColumnInfo[] ciResults; + private void ThreeTableSetupWhereTimePeriodIsGrandparent() + { + CreateTables("Headers", "ID int NOT NULL, HeaderDate DATETIME, Discipline varchar(32)", "ID"); + CreateTables("Samples", "ID int NOT NULL, HeaderID int NOT NULL, SampleType varchar(32)", "ID", + "CONSTRAINT [FK_Headers_Samples] FOREIGN KEY (HeaderID) REFERENCES Headers (ID)"); + CreateTables("Results", "ID int NOT NULL, SampleID int NOT NULL, Result int", "ID", + "CONSTRAINT [FK_Samples_Results] FOREIGN KEY (SampleID) REFERENCES Samples (ID)"); - var tiHeaders = AddTableToCatalogue(DatabaseName, "Headers", "ID", out ciHeaders, true); - AddTableToCatalogue(DatabaseName, "Samples", "ID", out ciSamples); - AddTableToCatalogue(DatabaseName, "Results", "ID", out ciResults); + // Set SetUp catalogue entities - _catalogue.Time_coverage = "[Headers].[HeaderDate]"; - _catalogue.SaveToDatabase(); + var tiHeaders = AddTableToCatalogue(DatabaseName, "Headers", "ID", out var ciHeaders, true); + AddTableToCatalogue(DatabaseName, "Samples", "ID", out var ciSamples); + AddTableToCatalogue(DatabaseName, "Results", "ID", out var ciResults); - tiHeaders.IsPrimaryExtractionTable = true; - tiHeaders.SaveToDatabase(); + _catalogue.Time_coverage = "[Headers].[HeaderDate]"; + _catalogue.SaveToDatabase(); - Assert.AreEqual(15, _catalogue.CatalogueItems.Count(), "Unexpected number of items in catalogue"); + tiHeaders.IsPrimaryExtractionTable = true; + tiHeaders.SaveToDatabase(); - // Headers (1:M) Samples join - new JoinInfo(CatalogueRepository,ciSamples.Single(ci => ci.GetRuntimeName().Equals("HeaderID")), - ciHeaders.Single(ci => ci.GetRuntimeName().Equals("ID")), - ExtractionJoinType.Left, ""); + Assert.AreEqual(15, _catalogue.CatalogueItems.Length, "Unexpected number of items in catalogue"); - // Samples (1:M) Results join - new JoinInfo(CatalogueRepository,ciResults.Single(info => info.GetRuntimeName().Equals("SampleID")), - ciSamples.Single(info => info.GetRuntimeName().Equals("ID")), - ExtractionJoinType.Left, ""); - } + // Headers (1:M) Samples join + new JoinInfo(CatalogueRepository, ciSamples.Single(ci => ci.GetRuntimeName().Equals("HeaderID")), + ciHeaders.Single(ci => ci.GetRuntimeName().Equals("ID")), + ExtractionJoinType.Left, ""); - private void CreateTables(string tableName, string columnDefinitions, string pkColumn, string fkConstraintString = null) - { - // todo: doesn't do combo primary keys yet + // Samples (1:M) Results join + new JoinInfo(CatalogueRepository, ciResults.Single(info => info.GetRuntimeName().Equals("SampleID")), + ciSamples.Single(info => info.GetRuntimeName().Equals("ID")), + ExtractionJoinType.Left, ""); + } - if (pkColumn == null || string.IsNullOrWhiteSpace(pkColumn)) - throw new InvalidOperationException("Primary Key column is required."); + private void CreateTables(string tableName, string columnDefinitions, string pkColumn, + string fkConstraintString = null) + { + // todo: doesn't do combo primary keys yet - var pkConstraint = String.Format("CONSTRAINT PK_{0} PRIMARY KEY ({1})", tableName, pkColumn); - var stagingTableDefinition = columnDefinitions + ", " + pkConstraint; - var liveTableDefinition = columnDefinitions + String.Format(", "+SpecialFieldNames.ValidFrom+" DATETIME, "+SpecialFieldNames.DataLoadRunID+" int, " + pkConstraint); + if (pkColumn == null || string.IsNullOrWhiteSpace(pkColumn)) + throw new InvalidOperationException("Primary Key column is required."); - if (fkConstraintString != null) - { - stagingTableDefinition += ", " + fkConstraintString; - liveTableDefinition += ", " + fkConstraintString; - } + var pkConstraint = $"CONSTRAINT PK_{tableName} PRIMARY KEY ({pkColumn})"; + var stagingTableDefinition = $"{columnDefinitions}, {pkConstraint}"; + var liveTableDefinition = + $"{columnDefinitions}, {SpecialFieldNames.ValidFrom} DATETIME, {SpecialFieldNames.DataLoadRunID} int, {pkConstraint}"; - CreateTableWithColumnDefinitions(From,tableName, stagingTableDefinition); - CreateTableWithColumnDefinitions(To,tableName, liveTableDefinition); + if (fkConstraintString != null) + { + stagingTableDefinition += $", {fkConstraintString}"; + liveTableDefinition += $", {fkConstraintString}"; } - private ITableInfo AddTableToCatalogue(string databaseName, string tableName, string pkName, out ColumnInfo[] ciList, bool createCatalogue = false) - { - var expectedTable = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName).ExpectTable(tableName); + CreateTableWithColumnDefinitions(From, tableName, stagingTableDefinition); + CreateTableWithColumnDefinitions(To, tableName, liveTableDefinition); + } - var resultsImporter = new TableInfoImporter(CatalogueRepository, expectedTable); + private ITableInfo AddTableToCatalogue(string databaseName, string tableName, string pkName, + out ColumnInfo[] ciList, bool createCatalogue = false) + { + var expectedTable = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName) + .ExpectTable(tableName); - resultsImporter.DoImport(out var ti, out ciList); + var resultsImporter = new TableInfoImporter(CatalogueRepository, expectedTable); - var pkResult = ciList.Single(info => info.GetRuntimeName().Equals(pkName)); - pkResult.IsPrimaryKey = true; - pkResult.SaveToDatabase(); + resultsImporter.DoImport(out var ti, out ciList); - var forwardEngineer = new ForwardEngineerCatalogue(ti, ciList); - if (createCatalogue) - { - CatalogueItem[] cataItems; - ExtractionInformation[] extractionInformations; + var pkResult = ciList.Single(info => info.GetRuntimeName().Equals(pkName)); + pkResult.IsPrimaryKey = true; + pkResult.SaveToDatabase(); - forwardEngineer.ExecuteForwardEngineering(out _catalogue, out cataItems, out extractionInformations); - } - else - forwardEngineer.ExecuteForwardEngineering(_catalogue); + var forwardEngineer = new ForwardEngineerCatalogue(ti, ciList); + if (createCatalogue) + forwardEngineer.ExecuteForwardEngineering(out _catalogue, out _, out _); + else + forwardEngineer.ExecuteForwardEngineering(_catalogue); - return ti; - } + return ti; + } - public void CreateTableWithColumnDefinitions(DiscoveredDatabase db, string tableName, string columnDefinitions) - { - using (var conn = db.Server.GetConnection()) - { - conn.Open(); - CreateTableWithColumnDefinitions(db,tableName, columnDefinitions, conn); - } - } + public static void CreateTableWithColumnDefinitions(DiscoveredDatabase db, string tableName, + string columnDefinitions) + { + using var conn = db.Server.GetConnection(); + conn.Open(); + CreateTableWithColumnDefinitions(db, tableName, columnDefinitions, conn); + } - public void CreateTableWithColumnDefinitions(DiscoveredDatabase db, string tableName, string columnDefinitions, DbConnection conn) - { - var sql = "CREATE TABLE " + tableName + " (" + columnDefinitions + ")"; - db.Server.GetCommand(sql, conn).ExecuteNonQuery(); - } + public static void CreateTableWithColumnDefinitions(DiscoveredDatabase db, string tableName, + string columnDefinitions, DbConnection conn) + { + var sql = $"CREATE TABLE {tableName} ({columnDefinitions})"; + db.Server.GetCommand(sql, conn).ExecuteNonQuery(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/BackfillTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/BackfillTests.cs index 79efbfb5dd..e6a7e1800f 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/BackfillTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/BackfillTests.cs @@ -5,7 +5,6 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Collections.Generic; using Microsoft.Data.SqlClient; using System.Linq; using NUnit.Framework; @@ -16,1069 +15,1115 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.Mutilators; using Rdmp.Core.DataLoad.Triggers; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration -{ - public class BackfillTests : FromToDatabaseTests - { - - private ICatalogue _catalogue; - - - [SetUp] - protected override void SetUp() - { - base.SetUp(); - - BlitzMainDataTables(); - - DeleteTables(From); - DeleteTables(To); - } +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; +public class BackfillTests : FromToDatabaseTests +{ + private ICatalogue _catalogue; - [Test] - public void Backfill_SingleTable_LoadContainsNewerUpdate() - { - SingleTableSetup(); - #region Insert test data - // add To data - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + [SetUp] + protected override void SetUp() + { + base.SetUp(); - var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description, "+SpecialFieldNames.ValidFrom+", "+SpecialFieldNames.DataLoadRunID+") VALUES " + - "(10, '2016-01-10T12:00:00', 'Earlier than corresponding new data, should be updated', '2016-01-10T12:00:00', 1)", connection); - cmd.ExecuteNonQuery(); - } + BlitzMainDataTables(); - // add From data - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + DeleteTables(From); + DeleteTables(To); + } - // newer update - var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description) VALUES " + - "(10, '2016-01-11T12:00:00', 'Newer than in To, should update To')", connection); - cmd.ExecuteNonQuery(); - } - #endregion - // databases are now represent state after push to From and before migration - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + [Test] + public void Backfill_SingleTable_LoadContainsNewerUpdate() + { + SingleTableSetup(); - // check that From contains the correct data - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + #region Insert test data - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows, "Should still be 1 record, this would be migrated to To"); + // add To data + using (var connection = (SqlConnection)To.Server.GetConnection()) + { + connection.Open(); - cmd = new SqlCommand(@"SELECT Description FROM Samples", connection); - var description = cmd.ExecuteScalar().ToString(); - Assert.AreEqual(description, "Newer than in To, should update To", "Description has been altered but is a valid update to To so should not have been touched."); - } + var cmd = new SqlCommand( + $"INSERT INTO [Samples] (ID, SampleDate, Description, {SpecialFieldNames.ValidFrom}, {SpecialFieldNames.DataLoadRunID}) VALUES (10, '2016-01-10T12:00:00', 'Earlier than corresponding new data, should be updated', '2016-01-10T12:00:00', 1)", + connection); + cmd.ExecuteNonQuery(); } - private void SingleTableSetup() + // add From data + using (var connection = (SqlConnection)From.Server.GetConnection()) { - CreateTables("Samples", "ID int NOT NULL, SampleDate DATETIME, Description varchar(1024)", "ID"); - - // Set SetUp catalogue entities - ColumnInfo[] ciSamples; - AddTableToCatalogue(DatabaseName, "Samples", "ID", out ciSamples, true); + connection.Open(); - Assert.AreEqual(5, _catalogue.CatalogueItems.Count(), "Unexpected number of items in catalogue"); + // newer update + var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description) VALUES " + + "(10, '2016-01-11T12:00:00', 'Newer than in To, should update To')", connection); + cmd.ExecuteNonQuery(); } - private void Mutilate(string timeColumnName) + #endregion + + // databases are now represent state after push to From and before migration + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); + + // check that From contains the correct data + using (var connection = (SqlConnection)From.Server.GetConnection()) { - var mutilator = new StagingBackfillMutilator - { - TimePeriodicityField = CatalogueRepository.GetAllObjects().Single(c=>c.Name.Equals(timeColumnName)), - TestContext = true, - TableNamingScheme = new IdentityTableNamingScheme() - }; - - mutilator.Initialize(From, LoadStage.AdjustStaging); - mutilator.Check(new ThrowImmediatelyCheckNotifier()); - mutilator.Mutilate(new ThrowImmediatelyDataLoadJob(To.Server)); + connection.Open(); + + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows, "Should still be 1 record, this would be migrated to To"); + + cmd = new SqlCommand(@"SELECT Description FROM Samples", connection); + var description = cmd.ExecuteScalar().ToString(); + Assert.AreEqual(description, "Newer than in To, should update To", + "Description has been altered but is a valid update to To so should not have been touched."); } + } - [Test] - public void Backfill_SingleTable_LoadContainsOlderUpdate() - { - SingleTableSetup(); + private void SingleTableSetup() + { + CreateTables("Samples", "ID int NOT NULL, SampleDate DATETIME, Description varchar(1024)", "ID"); - #region Insert test data - // add To data - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + // Set SetUp catalogue entities + AddTableToCatalogue(DatabaseName, "Samples", "ID", out _, true); - var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description, " + SpecialFieldNames.ValidFrom + ", " + SpecialFieldNames.DataLoadRunID + ") VALUES " + - "(1, '2016-01-10T12:00:00', 'Later than corresponding new data, should not be updated', '2016-01-10T12:00:00', 1)", connection); - cmd.ExecuteNonQuery(); - } + Assert.AreEqual(5, _catalogue.CatalogueItems.Length, "Unexpected number of items in catalogue"); + } - // add From data - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + private void Mutilate(string timeColumnName) + { + var mutilator = new StagingBackfillMutilator + { + TimePeriodicityField = CatalogueRepository.GetAllObjects() + .Single(c => c.Name.Equals(timeColumnName)), + TestContext = true, + TableNamingScheme = new IdentityTableNamingScheme() + }; + + mutilator.Initialize(From, LoadStage.AdjustStaging); + mutilator.Check(ThrowImmediatelyCheckNotifier.Quiet); + mutilator.Mutilate(new ThrowImmediatelyDataLoadJob(To.Server)); + } - // newer update - var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description) VALUES " + - "(1, '2016-01-09T12:00:00', 'Older than in To, should be deleted by the mutilator')", connection); - cmd.ExecuteNonQuery(); - } - #endregion + [Test] + public void Backfill_SingleTable_LoadContainsOlderUpdate() + { + SingleTableSetup(); - // databases are now represent state after push to From and before migration - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + #region Insert test data - // check that From contains the correct data - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + // add To data + using (var connection = (SqlConnection)To.Server.GetConnection()) + { + connection.Open(); - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(0, numRows, "The record to be loaded is older than the corresponding record in To, should have been deleted"); - } + var cmd = new SqlCommand( + $"INSERT INTO [Samples] (ID, SampleDate, Description, {SpecialFieldNames.ValidFrom}, {SpecialFieldNames.DataLoadRunID}) VALUES (1, '2016-01-10T12:00:00', 'Later than corresponding new data, should not be updated', '2016-01-10T12:00:00', 1)", + connection); + cmd.ExecuteNonQuery(); } - [Test] - public void Backfill_SingleTable_LoadContainsInsert() + // add From data + using (var connection = (SqlConnection)From.Server.GetConnection()) { - SingleTableSetup(); + connection.Open(); - #region Insert test data - // add To data - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + // newer update + var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description) VALUES " + + "(1, '2016-01-09T12:00:00', 'Older than in To, should be deleted by the mutilator')", + connection); + cmd.ExecuteNonQuery(); + } - var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, '2016-01-10T12:00:00', 'Later than corresponding new data, should not be updated', '2016-01-10T12:00:00', 1)", connection); - cmd.ExecuteNonQuery(); - } + #endregion - // add From data - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + // databases are now represent state after push to From and before migration + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); - // newer update - var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description) VALUES " + - "(2, '2016-01-09T12:00:00', 'Does not exist in To, should remain in From after mutilation.')", connection); - cmd.ExecuteNonQuery(); - } - #endregion + // check that From contains the correct data + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - // databases are now represent state after push to From and before migration - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(0, numRows, + "The record to be loaded is older than the corresponding record in To, should have been deleted"); + } + } - // check that From contains the correct data - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + [Test] + public void Backfill_SingleTable_LoadContainsInsert() + { + SingleTableSetup(); - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows, "The record to be loaded is an insert should not have been deleted"); + #region Insert test data - cmd = new SqlCommand(@"SELECT Description FROM Samples", connection); - var description = cmd.ExecuteScalar().ToString(); - Assert.AreEqual(description, "Does not exist in To, should remain in From after mutilation.", "Description has been altered but is a valid update to To so should not have been touched."); + // add To data + using (var connection = (SqlConnection)To.Server.GetConnection()) + { + connection.Open(); - } + var cmd = new SqlCommand( + "INSERT INTO [Samples] (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, '2016-01-10T12:00:00', 'Later than corresponding new data, should not be updated', '2016-01-10T12:00:00', 1)", + connection); + cmd.ExecuteNonQuery(); } - [Test] - public void Backfill_SingleTable_Combined() + // add From data + using (var connection = (SqlConnection)From.Server.GetConnection()) { - SingleTableSetup(); + connection.Open(); - #region Insert test data - // add To data - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + // newer update + var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description) VALUES " + + "(2, '2016-01-09T12:00:00', 'Does not exist in To, should remain in From after mutilation.')", + connection); + cmd.ExecuteNonQuery(); + } - var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, '2016-01-10T12:00:00', 'Earlier than corresponding new data, should be updated', '2016-01-10T12:00:00', 1)," + - "(2, '2016-01-15T12:00:00', 'Later than corresponding new data, should not be updated', '2016-01-15T12:00:00', 2)", connection); - cmd.ExecuteNonQuery(); - } + #endregion - // add From data - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + // databases are now represent state after push to From and before migration + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); - var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description) VALUES " + - "(1, '2016-01-12T12:00:00', 'Later than corresponding new data, should not be updated')," + - "(2, '2016-01-12T12:00:00', 'Earlier than corresponding new data, should be updated')," + - "(3, '2016-01-12T12:00:00', 'New data')", connection); - cmd.ExecuteNonQuery(); - } - #endregion + // check that From contains the correct data + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); + + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows, "The record to be loaded is an insert should not have been deleted"); - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + cmd = new SqlCommand(@"SELECT Description FROM Samples", connection); + var description = cmd.ExecuteScalar().ToString(); + Assert.AreEqual(description, "Does not exist in To, should remain in From after mutilation.", + "Description has been altered but is a valid update to To so should not have been touched."); + } + } - // todo: asserts - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + [Test] + public void Backfill_SingleTable_Combined() + { + SingleTableSetup(); - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(2, numRows, "Record 2 should have been deleted as it is an update to a record for which we have a later version."); - } + #region Insert test data + + // add To data + using (var connection = (SqlConnection)To.Server.GetConnection()) + { + connection.Open(); + var cmd = new SqlCommand( + "INSERT INTO [Samples] (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, '2016-01-10T12:00:00', 'Earlier than corresponding new data, should be updated', '2016-01-10T12:00:00', 1)," + + "(2, '2016-01-15T12:00:00', 'Later than corresponding new data, should not be updated', '2016-01-15T12:00:00', 2)", + connection); + cmd.ExecuteNonQuery(); } - private void TwoTableSetupWhereTimePeriodIsParent() + // add From data + using (var connection = (SqlConnection)From.Server.GetConnection()) { - CreateTables("Samples", "ID int NOT NULL, SampleDate DATETIME, Description varchar(1024)", "ID"); - CreateTables("Results", "ID int NOT NULL, SampleID int NOT NULL, Result int", "ID", "CONSTRAINT [FK_Samples_Results] FOREIGN KEY (SampleID) REFERENCES Samples (ID)"); - - // Set SetUp catalogue entities - ColumnInfo[] ciSamples; - ColumnInfo[] ciResults; + connection.Open(); - var tiSamples = AddTableToCatalogue(DatabaseName, "Samples", "ID", out ciSamples, true); - AddTableToCatalogue(DatabaseName, "Results", "ID", out ciResults); + var cmd = new SqlCommand("INSERT INTO [Samples] (ID, SampleDate, Description) VALUES " + + "(1, '2016-01-12T12:00:00', 'Later than corresponding new data, should not be updated')," + + "(2, '2016-01-12T12:00:00', 'Earlier than corresponding new data, should be updated')," + + "(3, '2016-01-12T12:00:00', 'New data')", connection); + cmd.ExecuteNonQuery(); + } - _catalogue.Time_coverage = "[Samples].[SampleDate]"; - _catalogue.SaveToDatabase(); + #endregion - tiSamples.IsPrimaryExtractionTable = true; - tiSamples.SaveToDatabase(); + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); - Assert.AreEqual(10, _catalogue.CatalogueItems.Count(), "Unexpected number of items in catalogue"); + // todo: asserts + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - // Samples (1:M) Results join - new JoinInfo(CatalogueRepository,ciResults.Single(info => info.GetRuntimeName().Equals("SampleID")), - ciSamples.Single(info => info.GetRuntimeName().Equals("ID")), - ExtractionJoinType.Left, ""); + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(2, numRows, + "Record 2 should have been deleted as it is an update to a record for which we have a later version."); } + } - [Test] - public void Backfill_TwoTables_TimePeriodParent_LoadContainsNewerUpdate() - { - TwoTableSetupWhereTimePeriodIsParent(); + private void TwoTableSetupWhereTimePeriodIsParent() + { + CreateTables("Samples", "ID int NOT NULL, SampleDate DATETIME, Description varchar(1024)", "ID"); + CreateTables("Results", "ID int NOT NULL, SampleID int NOT NULL, Result int", "ID", + "CONSTRAINT [FK_Samples_Results] FOREIGN KEY (SampleID) REFERENCES Samples (ID)"); - #region Insert To test data - const string liveSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, '2016-01-10T12:00:00', '', '2016-01-10T12:00:00', 1)"; + // Set SetUp catalogue entities - const string liveResultsSql = "INSERT INTO Results (ID, SampleID, Result, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(10, 1, 123, '2016-01-10T12:00:00', 1), " + - "(11, 1, 234, '2016-01-10T12:00:00', 1)"; + var tiSamples = AddTableToCatalogue(DatabaseName, "Samples", "ID", out var ciSamples, true); + AddTableToCatalogue(DatabaseName, "Results", "ID", out var ciResults); - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + _catalogue.Time_coverage = "[Samples].[SampleDate]"; + _catalogue.SaveToDatabase(); - var cmd = new SqlCommand(liveSamplesSql, connection); - cmd.ExecuteNonQuery(); + tiSamples.IsPrimaryExtractionTable = true; + tiSamples.SaveToDatabase(); - cmd = new SqlCommand(liveResultsSql, connection); - cmd.ExecuteNonQuery(); - } - #endregion + Assert.AreEqual(10, _catalogue.CatalogueItems.Length, "Unexpected number of items in catalogue"); - #region Add From test data - // add From data - const string stagingSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description) VALUES " + - "(1, '2016-01-15T12:00:00', 'Sample is later than corresponding record in To, contains a child update (ID=11), child insert (ID=12) and this updated description')"; + // Samples (1:M) Results join + new JoinInfo(CatalogueRepository, ciResults.Single(info => info.GetRuntimeName().Equals("SampleID")), + ciSamples.Single(info => info.GetRuntimeName().Equals("ID")), + ExtractionJoinType.Left, ""); + } - const string stagingResultsSql = "INSERT INTO Results (ID, SampleID, Result) VALUES " + - "(10, 1, 123), " + - "(11, 1, 345), " + - "(12, 1, 456)"; + [Test] + public void Backfill_TwoTables_TimePeriodParent_LoadContainsNewerUpdate() + { + TwoTableSetupWhereTimePeriodIsParent(); - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + #region Insert To test data - var cmd = new SqlCommand(stagingSamplesSql, connection); - cmd.ExecuteNonQuery(); + const string liveSamplesSql = + "INSERT INTO Samples (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, '2016-01-10T12:00:00', '', '2016-01-10T12:00:00', 1)"; - cmd = new SqlCommand(stagingResultsSql, connection); - cmd.ExecuteNonQuery(); - } + const string liveResultsSql = + "INSERT INTO Results (ID, SampleID, Result, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(10, 1, 123, '2016-01-10T12:00:00', 1), " + + "(11, 1, 234, '2016-01-10T12:00:00', 1)"; + using (var connection = (SqlConnection)To.Server.GetConnection()) + { + connection.Open(); - #endregion + var cmd = new SqlCommand(liveSamplesSql, connection); + cmd.ExecuteNonQuery(); - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + cmd = new SqlCommand(liveResultsSql, connection); + cmd.ExecuteNonQuery(); + } - // From should be exactly the same as it was before mutilation as there is a single update - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + #endregion - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows); + #region Add From test data - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Results", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(3, numRows); - } - } + // add From data + const string stagingSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description) VALUES " + + "(1, '2016-01-15T12:00:00', 'Sample is later than corresponding record in To, contains a child update (ID=11), child insert (ID=12) and this updated description')"; + + const string stagingResultsSql = "INSERT INTO Results (ID, SampleID, Result) VALUES " + + "(10, 1, 123), " + + "(11, 1, 345), " + + "(12, 1, 456)"; - [Test] - public void Backfill_TwoTables_TimePeriodParent_LoadContainsOlderUpdate() + using (var connection = (SqlConnection)From.Server.GetConnection()) { - TwoTableSetupWhereTimePeriodIsParent(); + connection.Open(); - #region Insert To test data - const string liveSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, '2016-01-10T12:00:00', '', '2016-01-10T12:00:00', 1)"; + var cmd = new SqlCommand(stagingSamplesSql, connection); + cmd.ExecuteNonQuery(); - const string liveResultsSql = "INSERT INTO Results (ID, SampleID, Result, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(10, 1, 123, '2016-01-10T12:00:00', 1), " + - "(11, 1, 234, '2016-01-10T12:00:00', 1)"; + cmd = new SqlCommand(stagingResultsSql, connection); + cmd.ExecuteNonQuery(); + } - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + #endregion - var cmd = new SqlCommand(liveSamplesSql, connection); - cmd.ExecuteNonQuery(); + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); - cmd = new SqlCommand(liveResultsSql, connection); - cmd.ExecuteNonQuery(); - } - #endregion + // From should be exactly the same as it was before mutilation as there is a single update + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - #region Add From test data - // add From data - const string stagingSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description) VALUES " + - "(1, '2016-01-09T12:00:00', 'Sample is earlier than corresponding record in To (also contains an item which has apparently been deleted in the set used for a later load)')"; + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows); - const string stagingResultsSql = "INSERT INTO Results (ID, SampleID, Result) VALUES " + - "(10, 1, 123), " + - "(11, 1, 345), " + - "(12, 1, 456)"; + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Results", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(3, numRows); + } + } - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + [Test] + public void Backfill_TwoTables_TimePeriodParent_LoadContainsOlderUpdate() + { + TwoTableSetupWhereTimePeriodIsParent(); - var cmd = new SqlCommand(stagingSamplesSql, connection); - cmd.ExecuteNonQuery(); + #region Insert To test data - cmd = new SqlCommand(stagingResultsSql, connection); - cmd.ExecuteNonQuery(); - } + const string liveSamplesSql = + "INSERT INTO Samples (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, '2016-01-10T12:00:00', '', '2016-01-10T12:00:00', 1)"; + const string liveResultsSql = + "INSERT INTO Results (ID, SampleID, Result, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(10, 1, 123, '2016-01-10T12:00:00', 1), " + + "(11, 1, 234, '2016-01-10T12:00:00', 1)"; - #endregion + using (var connection = (SqlConnection)To.Server.GetConnection()) + { + connection.Open(); - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + var cmd = new SqlCommand(liveSamplesSql, connection); + cmd.ExecuteNonQuery(); - // From should be exactly the same as it was before mutilation as there is a single update - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + cmd = new SqlCommand(liveResultsSql, connection); + cmd.ExecuteNonQuery(); + } - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows, "Item should still remain as there still should be a single result to insert."); + #endregion - cmd = new SqlCommand(@"SELECT * FROM Results", connection); - using (var reader = cmd.ExecuteReader()) - { - Assert.IsTrue(reader.HasRows); + #region Add From test data - reader.Read(); - Assert.AreEqual(12, reader["ID"]); + // add From data + const string stagingSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description) VALUES " + + "(1, '2016-01-09T12:00:00', 'Sample is earlier than corresponding record in To (also contains an item which has apparently been deleted in the set used for a later load)')"; - var hasMoreResults = reader.Read(); - Assert.IsFalse(hasMoreResults, "Should only be one Result row left in From"); - } + const string stagingResultsSql = "INSERT INTO Results (ID, SampleID, Result) VALUES " + + "(10, 1, 123), " + + "(11, 1, 345), " + + "(12, 1, 456)"; - cmd = new SqlCommand(@"SELECT * FROM Samples", connection); - using (var reader = cmd.ExecuteReader()) - { - Assert.IsTrue(reader.HasRows); + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - reader.Read(); - Assert.AreEqual("", reader["Description"].ToString(), "The To sample had a blank description which should have been copied in to the earlier From record."); + var cmd = new SqlCommand(stagingSamplesSql, connection); + cmd.ExecuteNonQuery(); - var hasMoreResults = reader.Read(); - Assert.IsFalse(hasMoreResults, "Should only be one Samples row in From"); - } - } + cmd = new SqlCommand(stagingResultsSql, connection); + cmd.ExecuteNonQuery(); } - [Test] - public void Backfill_TwoTables_TimePeriodParent_LoadContainInsert() - { - TwoTableSetupWhereTimePeriodIsParent(); + #endregion - #region Insert To test data - const string liveSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, '2016-01-10T12:00:00', '', '2016-01-10T12:00:00', 1)"; + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); - const string liveResultsSql = "INSERT INTO Results (ID, SampleID, Result, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(10, 1, 123, '2016-01-10T12:00:00', 1), " + - "(11, 1, 234, '2016-01-10T12:00:00', 1)"; + // From should be exactly the same as it was before mutilation as there is a single update + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); + + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows, "Item should still remain as there still should be a single result to insert."); - using (var connection = (SqlConnection)To.Server.GetConnection()) + cmd = new SqlCommand(@"SELECT * FROM Results", connection); + using (var reader = cmd.ExecuteReader()) { - connection.Open(); + Assert.IsTrue(reader.HasRows); - var cmd = new SqlCommand(liveSamplesSql, connection); - cmd.ExecuteNonQuery(); + reader.Read(); + Assert.AreEqual(12, reader["ID"]); - cmd = new SqlCommand(liveResultsSql, connection); - cmd.ExecuteNonQuery(); + var hasMoreResults = reader.Read(); + Assert.IsFalse(hasMoreResults, "Should only be one Result row left in From"); } - #endregion - #region Add From test data - // add From data - const string stagingSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description) VALUES " + - "(2, '2016-01-15T12:00:00', 'New sample')"; - - const string stagingResultsSql = "INSERT INTO Results (ID, SampleID, Result) VALUES " + - "(13, 2, 333), " + - "(14, 2, 555), " + - "(15, 2, 666)"; - - using (var connection = (SqlConnection)From.Server.GetConnection()) + cmd = new SqlCommand(@"SELECT * FROM Samples", connection); + using (var reader = cmd.ExecuteReader()) { - connection.Open(); + Assert.IsTrue(reader.HasRows); - var cmd = new SqlCommand(stagingSamplesSql, connection); - cmd.ExecuteNonQuery(); + reader.Read(); + Assert.AreEqual("", reader["Description"].ToString(), + "The To sample had a blank description which should have been copied in to the earlier From record."); - cmd = new SqlCommand(stagingResultsSql, connection); - cmd.ExecuteNonQuery(); + var hasMoreResults = reader.Read(); + Assert.IsFalse(hasMoreResults, "Should only be one Samples row in From"); } + } + } + [Test] + public void Backfill_TwoTables_TimePeriodParent_LoadContainInsert() + { + TwoTableSetupWhereTimePeriodIsParent(); - #endregion + #region Insert To test data - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + const string liveSamplesSql = + "INSERT INTO Samples (ID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, '2016-01-10T12:00:00', '', '2016-01-10T12:00:00', 1)"; - // From should be exactly the same as it was before mutilation as there is a single update - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + const string liveResultsSql = + "INSERT INTO Results (ID, SampleID, Result, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(10, 1, 123, '2016-01-10T12:00:00', 1), " + + "(11, 1, 234, '2016-01-10T12:00:00', 1)"; + + using (var connection = (SqlConnection)To.Server.GetConnection()) + { + connection.Open(); - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows, "This is an insert, no data should be deleted/altered."); + var cmd = new SqlCommand(liveSamplesSql, connection); + cmd.ExecuteNonQuery(); - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Results", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(3, numRows, "This is an insert, no data should be deleted/altered."); - } + cmd = new SqlCommand(liveResultsSql, connection); + cmd.ExecuteNonQuery(); } - private void TwoTableSetupWhereTimePeriodIsChild() - { - CreateTables("Headers", "ID int NOT NULL, Discipline varchar(32)", "ID"); - CreateTables("Samples", - "ID int NOT NULL, HeaderID int NOT NULL, SampleDate DATETIME, Description varchar(1024)", "ID", - "CONSTRAINT [FK_Headers_Samples] FOREIGN KEY (HeaderID) REFERENCES Headers (ID)"); + #endregion - // Set SetUp catalogue entities - ColumnInfo[] ciSamples; - ColumnInfo[] ciHeaders; + #region Add From test data - var tiSamples = AddTableToCatalogue(DatabaseName, "Samples", "ID", out ciSamples, true); - AddTableToCatalogue(DatabaseName, "Headers", "ID", out ciHeaders); + // add From data + const string stagingSamplesSql = "INSERT INTO Samples (ID, SampleDate, Description) VALUES " + + "(2, '2016-01-15T12:00:00', 'New sample')"; - _catalogue.Time_coverage = "[Samples].[SampleDate]"; - _catalogue.SaveToDatabase(); + const string stagingResultsSql = "INSERT INTO Results (ID, SampleID, Result) VALUES " + + "(13, 2, 333), " + + "(14, 2, 555), " + + "(15, 2, 666)"; - tiSamples.IsPrimaryExtractionTable = true; - tiSamples.SaveToDatabase(); + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - Assert.AreEqual(10, _catalogue.CatalogueItems.Count(), "Unexpected number of items in catalogue"); + var cmd = new SqlCommand(stagingSamplesSql, connection); + cmd.ExecuteNonQuery(); - // Headers (1:M) Samples join - new JoinInfo(CatalogueRepository,ciSamples.Single(info => info.GetRuntimeName().Equals("HeaderID")), - ciHeaders.Single(info => info.GetRuntimeName().Equals("ID")), - ExtractionJoinType.Left, ""); + cmd = new SqlCommand(stagingResultsSql, connection); + cmd.ExecuteNonQuery(); } - [Test] - public void Backfill_TwoTables_TimePeriodChild_LoadContainsOlderUpdate() + #endregion + + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); + + // From should be exactly the same as it was before mutilation as there is a single update + using (var connection = (SqlConnection)From.Server.GetConnection()) { - TwoTableSetupWhereTimePeriodIsChild(); + connection.Open(); - #region Insert To test data - const string liveHeaderSql = "INSERT INTO Headers (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, 'Biochemistry', '2016-01-10T12:00:00', 1)"; + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows, "This is an insert, no data should be deleted/altered."); - const string liveSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(10, 1, '2016-01-10T12:00:00', '', '2016-01-10T12:00:00', 1)"; + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Results", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(3, numRows, "This is an insert, no data should be deleted/altered."); + } + } + private void TwoTableSetupWhereTimePeriodIsChild() + { + CreateTables("Headers", "ID int NOT NULL, Discipline varchar(32)", "ID"); + CreateTables("Samples", + "ID int NOT NULL, HeaderID int NOT NULL, SampleDate DATETIME, Description varchar(1024)", "ID", + "CONSTRAINT [FK_Headers_Samples] FOREIGN KEY (HeaderID) REFERENCES Headers (ID)"); - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + // Set SetUp catalogue entities - var cmd = new SqlCommand(liveHeaderSql, connection); - cmd.ExecuteNonQuery(); + var tiSamples = AddTableToCatalogue(DatabaseName, "Samples", "ID", out var ciSamples, true); + AddTableToCatalogue(DatabaseName, "Headers", "ID", out var ciHeaders); - cmd = new SqlCommand(liveSamplesSql, connection); - cmd.ExecuteNonQuery(); - } - #endregion + _catalogue.Time_coverage = "[Samples].[SampleDate]"; + _catalogue.SaveToDatabase(); - #region Add From test data - // add From data - const string stagingHeadersSql = "INSERT INTO Headers (ID, Discipline) VALUES " + - "(1, 'Biochemistry')"; + tiSamples.IsPrimaryExtractionTable = true; + tiSamples.SaveToDatabase(); - const string stagingSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + - "(10, 1, '2016-01-05T12:00:00', '')"; + Assert.AreEqual(10, _catalogue.CatalogueItems.Length, "Unexpected number of items in catalogue"); - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + // Headers (1:M) Samples join + new JoinInfo(CatalogueRepository, ciSamples.Single(info => info.GetRuntimeName().Equals("HeaderID")), + ciHeaders.Single(info => info.GetRuntimeName().Equals("ID")), + ExtractionJoinType.Left, ""); + } - var cmd = new SqlCommand(stagingHeadersSql, connection); - cmd.ExecuteNonQuery(); + [Test] + public void Backfill_TwoTables_TimePeriodChild_LoadContainsOlderUpdate() + { + TwoTableSetupWhereTimePeriodIsChild(); - cmd = new SqlCommand(stagingSamplesSql, connection); - cmd.ExecuteNonQuery(); - } + #region Insert To test data + const string liveHeaderSql = "INSERT INTO Headers (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, 'Biochemistry', '2016-01-10T12:00:00', 1)"; - #endregion + const string liveSamplesSql = + "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(10, 1, '2016-01-10T12:00:00', '', '2016-01-10T12:00:00', 1)"; - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); - // From should be exactly the same as it was before mutilation as there is a single update - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + using (var connection = (SqlConnection)To.Server.GetConnection()) + { + connection.Open(); - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(0, numRows, "Sample should be deleted as it is older than corresponding row in To."); + var cmd = new SqlCommand(liveHeaderSql, connection); + cmd.ExecuteNonQuery(); - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Headers", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(0, numRows, "Header should have been pruned as it no longer has any children in From."); - } + cmd = new SqlCommand(liveSamplesSql, connection); + cmd.ExecuteNonQuery(); } - /// - /// This test has an 'old' child insert, i.e. the date of the insert is before the newest child entry in To. - /// Also, the parent data in To is different from that in From, so we need to ensure the entry in From is updated before we migrate the data, - /// otherwise we will overwrite To with old data - /// - [Test] - public void Backfill_TwoTables_TimePeriodChild_LoadContainsOldInsert_WithOldParentData() - { - TwoTableSetupWhereTimePeriodIsChild(); + #endregion - #region Insert To test data - const string liveHeaderSql = "INSERT INTO Headers (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, 'Biochemistry', '2016-01-15T12:00:00', 1)"; + #region Add From test data - const string liveSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(11, 1, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 1)"; + // add From data + const string stagingHeadersSql = "INSERT INTO Headers (ID, Discipline) VALUES " + + "(1, 'Biochemistry')"; + const string stagingSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + + "(10, 1, '2016-01-05T12:00:00', '')"; - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - var cmd = new SqlCommand(liveHeaderSql, connection); - cmd.ExecuteNonQuery(); + var cmd = new SqlCommand(stagingHeadersSql, connection); + cmd.ExecuteNonQuery(); - cmd = new SqlCommand(liveSamplesSql, connection); - cmd.ExecuteNonQuery(); - } - #endregion + cmd = new SqlCommand(stagingSamplesSql, connection); + cmd.ExecuteNonQuery(); + } - #region Add From test data - // add From data - const string stagingHeadersSql = "INSERT INTO Headers (ID, Discipline) VALUES " + - "(1, 'Haematology')"; // old and incorrect Discipline value + #endregion - const string stagingSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + - "(10, 1, '2016-01-05T12:00:00', '')"; // 'old' insert, missing from loaded data. Can only be added by including the parent, so need to make parent correct before migration + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + // From should be exactly the same as it was before mutilation as there is a single update + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - var cmd = new SqlCommand(stagingHeadersSql, connection); - cmd.ExecuteNonQuery(); + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(0, numRows, "Sample should be deleted as it is older than corresponding row in To."); - cmd = new SqlCommand(stagingSamplesSql, connection); - cmd.ExecuteNonQuery(); - } - - #endregion + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Headers", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(0, numRows, "Header should have been pruned as it no longer has any children in From."); + } + } - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + /// + /// This test has an 'old' child insert, i.e. the date of the insert is before the newest child entry in To. + /// Also, the parent data in To is different from that in From, so we need to ensure the entry in From is updated before we migrate the data, + /// otherwise we will overwrite To with old data + /// + [Test] + public void Backfill_TwoTables_TimePeriodChild_LoadContainsOldInsert_WithOldParentData() + { + TwoTableSetupWhereTimePeriodIsChild(); - // From should be exactly the same as it was before mutilation as there is a single update - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + #region Insert To test data - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows, "Should still be 1 sample"); + const string liveHeaderSql = "INSERT INTO Headers (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, 'Biochemistry', '2016-01-15T12:00:00', 1)"; - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Headers", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows, "Header should still be there (shouldn't be able to delete it as there should be a FK constraint with Samples)"); + const string liveSamplesSql = + "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(11, 1, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 1)"; - cmd = new SqlCommand(@"SELECT Discipline FROM Headers WHERE ID=1", connection); - var discipline = cmd.ExecuteScalar().ToString(); - Assert.AreEqual("Biochemistry", discipline, "Header record in From be updated to reflect what is in To: the To record is authoritative as it contains at least one child from a later date."); - } - } - [Test] - public void Backfill_TwoTables_TimePeriodChild_LoadContainsNewInsert_WithNewParentData() + using (var connection = (SqlConnection)To.Server.GetConnection()) { - TwoTableSetupWhereTimePeriodIsChild(); + connection.Open(); - #region Insert To test data - const string liveHeaderSql = "INSERT INTO Headers (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, 'Biochemistry', '2016-01-15T12:00:00', 1)"; + var cmd = new SqlCommand(liveHeaderSql, connection); + cmd.ExecuteNonQuery(); - const string liveSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(10, 1, '2016-01-15T10:00:00', '', '2016-01-15T12:00:00', 1), " + - "(11, 1, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 1)"; + cmd = new SqlCommand(liveSamplesSql, connection); + cmd.ExecuteNonQuery(); + } + #endregion - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + #region Add From test data - var cmd = new SqlCommand(liveHeaderSql, connection); - cmd.ExecuteNonQuery(); + // add From data + const string stagingHeadersSql = "INSERT INTO Headers (ID, Discipline) VALUES " + + "(1, 'Haematology')"; // old and incorrect Discipline value - cmd = new SqlCommand(liveSamplesSql, connection); - cmd.ExecuteNonQuery(); - } - #endregion + const string stagingSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + + "(10, 1, '2016-01-05T12:00:00', '')"; // 'old' insert, missing from loaded data. Can only be added by including the parent, so need to make parent correct before migration - #region Add From test data - // add From data - const string stagingHeadersSql = "INSERT INTO Headers (ID, Discipline) VALUES " + - "(1, 'Haematology')"; // old and incorrect Discipline value + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - const string stagingSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + - "(12, 1, '2016-01-16T12:00:00', '')"; // 'new' insert, missing from loaded data. SampleDate is newer than any in To so this means that the updated parent data is 'correct' + var cmd = new SqlCommand(stagingHeadersSql, connection); + cmd.ExecuteNonQuery(); - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + cmd = new SqlCommand(stagingSamplesSql, connection); + cmd.ExecuteNonQuery(); + } - var cmd = new SqlCommand(stagingHeadersSql, connection); - cmd.ExecuteNonQuery(); + #endregion - cmd = new SqlCommand(stagingSamplesSql, connection); - cmd.ExecuteNonQuery(); - } + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); - #endregion + // From should be exactly the same as it was before mutilation as there is a single update + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows, "Should still be 1 sample"); - // From should be exactly the same as it was before mutilation as there is a single update - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Headers", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows, + "Header should still be there (shouldn't be able to delete it as there should be a FK constraint with Samples)"); - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows, "Should still be 1 sample"); + cmd = new SqlCommand(@"SELECT Discipline FROM Headers WHERE ID=1", connection); + var discipline = cmd.ExecuteScalar().ToString(); + Assert.AreEqual("Biochemistry", discipline, + "Header record in From be updated to reflect what is in To: the To record is authoritative as it contains at least one child from a later date."); + } + } - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Headers", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(1, numRows, "Header should still be there (shouldn't be able to delete it as there should be a FK constraint with Samples)"); + [Test] + public void Backfill_TwoTables_TimePeriodChild_LoadContainsNewInsert_WithNewParentData() + { + TwoTableSetupWhereTimePeriodIsChild(); - cmd = new SqlCommand(@"SELECT Discipline FROM Headers WHERE ID=1", connection); - var discipline = cmd.ExecuteScalar().ToString(); - Assert.AreEqual("Haematology", discipline, "Header record in From should not be updated as it is 'correct'."); - } - } + #region Insert To test data - [Test] - public void Backfill_TwoTables_TimePeriodChild_Combined() + const string liveHeaderSql = "INSERT INTO Headers (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, 'Biochemistry', '2016-01-15T12:00:00', 1)"; + + const string liveSamplesSql = + "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(10, 1, '2016-01-15T10:00:00', '', '2016-01-15T12:00:00', 1), " + + "(11, 1, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 1)"; + + + using (var connection = (SqlConnection)To.Server.GetConnection()) { - TwoTableSetupWhereTimePeriodIsChild(); + connection.Open(); - #region Insert To test data - const string liveHeaderSql = "INSERT INTO Headers (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, 'Haematology', '2016-01-15T12:00:00', 2), " + - "(2, 'Haematology', '2016-01-05T12:00:00', 1), " + - "(3, 'Biochemistry', '2016-01-15T12:00:00', 2), " + - "(4, 'Haematology', '2016-01-15T12:00:00', 2)"; + var cmd = new SqlCommand(liveHeaderSql, connection); + cmd.ExecuteNonQuery(); - const string liveSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(12, 1, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 2), " + - "(13, 1, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 2), " + - "(14, 2, '2016-01-05T12:00:00', '', '2016-01-05T12:00:00', 1), " + - "(15, 3, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 2), " + - "(16, 4, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 2)"; + cmd = new SqlCommand(liveSamplesSql, connection); + cmd.ExecuteNonQuery(); + } + #endregion - using (var connection = (SqlConnection)To.Server.GetConnection()) - { - connection.Open(); + #region Add From test data - var cmd = new SqlCommand(liveHeaderSql, connection); - cmd.ExecuteNonQuery(); + // add From data + const string stagingHeadersSql = "INSERT INTO Headers (ID, Discipline) VALUES " + + "(1, 'Haematology')"; // old and incorrect Discipline value - cmd = new SqlCommand(liveSamplesSql, connection); - cmd.ExecuteNonQuery(); - } - #endregion - - #region Add From test data - // add From data - const string stagingHeadersSql = "INSERT INTO Headers (ID, Discipline) VALUES " + - "(1, 'Biochemistry'), " + - "(2, 'Biochemistry'), " + - "(3, 'Biochemistry'), " + - "(5, 'Biochemistry')"; - - const string stagingSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + - "(11, 1, '2016-01-05T12:00:00', ''), " + - "(13, 1, '2016-01-05T12:00:00', ''), " + - "(14, 2, '2016-01-15T12:00:00', ''), " + - "(15, 3, '2016-01-05T12:00:00', ''), " + - "(17, 5, '2016-01-05T12:00:00', '')"; - - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + const string stagingSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + + "(12, 1, '2016-01-16T12:00:00', '')"; // 'new' insert, missing from loaded data. SampleDate is newer than any in To so this means that the updated parent data is 'correct' - var cmd = new SqlCommand(stagingHeadersSql, connection); - cmd.ExecuteNonQuery(); + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - cmd = new SqlCommand(stagingSamplesSql, connection); - cmd.ExecuteNonQuery(); - } + var cmd = new SqlCommand(stagingHeadersSql, connection); + cmd.ExecuteNonQuery(); - #endregion + cmd = new SqlCommand(stagingSamplesSql, connection); + cmd.ExecuteNonQuery(); + } - Mutilate("[" + DatabaseName + "].[dbo].[Samples].[SampleDate]"); + #endregion - using (var connection = (SqlConnection)From.Server.GetConnection()) - { - connection.Open(); + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); - var cmd = new SqlCommand(@"SELECT * FROM Samples ORDER BY ID", connection); - using (var reader = cmd.ExecuteReader()) - { - reader.Read(); - Assert.AreEqual(11, reader["ID"]); - Assert.AreEqual("2016-01-05T12:00:00", ((DateTime)reader["SampleDate"]).ToString("s")); + // From should be exactly the same as it was before mutilation as there is a single update + using (var connection = (SqlConnection)From.Server.GetConnection()) + { + connection.Open(); - reader.Read(); - Assert.AreEqual(14, reader["ID"]); - Assert.AreEqual("2016-01-15T12:00:00", ((DateTime)reader["SampleDate"]).ToString("s")); + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows, "Should still be 1 sample"); - reader.Read(); - Assert.AreEqual(17, reader["ID"]); - Assert.AreEqual("2016-01-05T12:00:00", ((DateTime)reader["SampleDate"]).ToString("s")); + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Headers", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(1, numRows, + "Header should still be there (shouldn't be able to delete it as there should be a FK constraint with Samples)"); - Assert.IsFalse(reader.Read(), "Should only be three samples"); - } + cmd = new SqlCommand(@"SELECT Discipline FROM Headers WHERE ID=1", connection); + var discipline = cmd.ExecuteScalar().ToString(); + Assert.AreEqual("Haematology", discipline, + "Header record in From should not be updated as it is 'correct'."); + } + } - cmd = new SqlCommand(@"SELECT * FROM Headers ORDER BY ID", connection); - using (var reader = cmd.ExecuteReader()) - { - Assert.IsTrue(reader.HasRows); + [Test] + public void Backfill_TwoTables_TimePeriodChild_Combined() + { + TwoTableSetupWhereTimePeriodIsChild(); - reader.Read(); - Assert.AreEqual(1, reader["ID"]); - Assert.AreEqual("Haematology", reader["Discipline"]); + #region Insert To test data - reader.Read(); - Assert.AreEqual(2, reader["ID"]); - Assert.AreEqual("Biochemistry", reader["Discipline"]); + const string liveHeaderSql = "INSERT INTO Headers (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, 'Haematology', '2016-01-15T12:00:00', 2), " + + "(2, 'Haematology', '2016-01-05T12:00:00', 1), " + + "(3, 'Biochemistry', '2016-01-15T12:00:00', 2), " + + "(4, 'Haematology', '2016-01-15T12:00:00', 2)"; - reader.Read(); - Assert.AreEqual(5, reader["ID"]); - Assert.AreEqual("Biochemistry", reader["Discipline"]); + const string liveSamplesSql = + "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(12, 1, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 2), " + + "(13, 1, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 2), " + + "(14, 2, '2016-01-05T12:00:00', '', '2016-01-05T12:00:00', 1), " + + "(15, 3, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 2), " + + "(16, 4, '2016-01-15T12:00:00', '', '2016-01-15T12:00:00', 2)"; - Assert.IsFalse(reader.Read(), "Should only be three headers"); - } - } - } - private void CreateTables(string tableName, string columnDefinitions, string pkColumn, string fkConstraintString = null) + using (var connection = (SqlConnection)To.Server.GetConnection()) { - // todo: doesn't do combo primary keys yet + connection.Open(); - if (pkColumn == null || string.IsNullOrWhiteSpace(pkColumn)) - throw new InvalidOperationException("Primary Key column is required."); + var cmd = new SqlCommand(liveHeaderSql, connection); + cmd.ExecuteNonQuery(); - var pkConstraint = String.Format("CONSTRAINT PK_{0} PRIMARY KEY ({1})", tableName, pkColumn); - var stagingTableDefinition = columnDefinitions + ", " + pkConstraint; - var liveTableDefinition = columnDefinitions + String.Format(", hic_validFrom DATETIME, hic_dataLoadRunID int, " + pkConstraint); + cmd = new SqlCommand(liveSamplesSql, connection); + cmd.ExecuteNonQuery(); + } - if (fkConstraintString != null) - { - stagingTableDefinition += ", " + fkConstraintString; - liveTableDefinition += ", " + fkConstraintString; - } + #endregion + #region Add From test data - using (var con = (SqlConnection) From.Server.GetConnection()) - { - con.Open(); - new SqlCommand("CREATE TABLE " + tableName + " (" + stagingTableDefinition + ")",con).ExecuteNonQuery(); - } + // add From data + const string stagingHeadersSql = "INSERT INTO Headers (ID, Discipline) VALUES " + + "(1, 'Biochemistry'), " + + "(2, 'Biochemistry'), " + + "(3, 'Biochemistry'), " + + "(5, 'Biochemistry')"; - using(var con = (SqlConnection)To.Server.GetConnection()) - { - con.Open(); - new SqlCommand("CREATE TABLE " + tableName + " (" + liveTableDefinition + ")",con).ExecuteNonQuery(); - } - } + const string stagingSamplesSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + + "(11, 1, '2016-01-05T12:00:00', ''), " + + "(13, 1, '2016-01-05T12:00:00', ''), " + + "(14, 2, '2016-01-15T12:00:00', ''), " + + "(15, 3, '2016-01-05T12:00:00', ''), " + + "(17, 5, '2016-01-05T12:00:00', '')"; - [Test, Ignore("Restructuring tests")] - public void DeleteNewerCollisionsFromTable() + using (var connection = (SqlConnection)From.Server.GetConnection()) { - #region Set SetUp databases - CreateTables("Header", "ID int NOT NULL, Discipline varchar(32) NOT NULL", "ID"); - - CreateTables("Samples", - "ID int NOT NULL, HeaderID int NOT NULL, SampleDate DATETIME, Description varchar(1024)", - "CONSTRAINT FK_Header_Samples FOREIGN KEY (HeaderID) REFERENCES Header (ID)"); - - CreateTables("Results", "ID int NOT NULL, SampleID int NOT NULL, Result int", - "CONSTRAINT [FK_Samples_Results] FOREIGN KEY (SampleID) REFERENCES Samples (ID)"); - - #endregion - - #region Set SetUp catalogue entities - ColumnInfo[] ciSamples; - var tiSamples = AddSamplesTableToCatalogue(DatabaseName, out ciSamples); - var tiResults = AddResultsTableToCatalogue(DatabaseName, ciSamples); - var tiHeaders = AddHeaderTableToCatalogue(DatabaseName, ciSamples); - - // should be all entities set SetUp now - Assert.AreEqual(15, _catalogue.CatalogueItems.Count(), "Unexpected number of items in catalogue"); - #endregion - - // add data - #region Populate Tables - var connection = (SqlConnection)To.Server.GetConnection(); connection.Open(); - const string liveHeaderDataSql = "INSERT INTO Header (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(1, 'Biochemistry', '2016-01-15T15:00:00', 4), " + - "(3, 'Haematology', '2016-01-15T12:00:00', 3)"; - var liveHeaderDataSqlCommand = new SqlCommand(liveHeaderDataSql, connection); - liveHeaderDataSqlCommand.ExecuteNonQuery(); - - const string liveSampleDataSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(10, 1, '2016-01-10T12:00:00', 'Earlier than corresponding new data, should be updated', '2016-01-10T12:00:00', 1), " + - "(11, 1, '2016-01-15T15:00:00', 'Later than corresponding new data, should not be touched', '2016-01-15T15:00:00', 4), " + - "(14, 3, '2016-01-15T12:00:00', 'Header data of newly loaded (but older) sample should *not* update this rows parent header', '2016-01-15T12:00:00', 3)"; - var liveSampleDataSqlCommand = new SqlCommand(liveSampleDataSql, connection); - liveSampleDataSqlCommand.ExecuteNonQuery(); - - const string liveResultDataSql = "INSERT INTO Results (ID, SampleID, Result, hic_validFrom, hic_dataLoadRunID) VALUES " + - "(100, 10, 999, '2016-01-10T12:00:00', 1), " + - "(101, 10, 888, '2016-01-10T12:00:00', 1), " + - "(102, 11, 456, '2016-01-15T15:00:00', 4), " + - "(103, 11, 654, '2016-01-15T15:00:00', 4), " + - "(107, 14, 111, '2016-01-15T12:00:00', 3)"; - var liveResultDataSqlCommand = new SqlCommand(liveResultDataSql, connection); - liveResultDataSqlCommand.ExecuteNonQuery(); - connection.Close(); - - connection = (SqlConnection)From.Server.GetConnection(); + var cmd = new SqlCommand(stagingHeadersSql, connection); + cmd.ExecuteNonQuery(); + + cmd = new SqlCommand(stagingSamplesSql, connection); + cmd.ExecuteNonQuery(); + } + + #endregion + + Mutilate($"[{DatabaseName}].[dbo].[Samples].[SampleDate]"); + + using (var connection = (SqlConnection)From.Server.GetConnection()) + { connection.Open(); - const string stagingHeaderDataSql = "INSERT INTO Header (ID, Discipline) VALUES " + - "(1, 'Haematology')," + - "(2, 'Biochemistry'), " + - "(3, 'Biochemistry')"; - var stagingHeaderDataSqlCommand = new SqlCommand(stagingHeaderDataSql, connection); - stagingHeaderDataSqlCommand.ExecuteNonQuery(); - - const string stagingSampleDataSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + - "(10, 1, '2016-01-12T13:00:00', 'Later than To data, represents an update and should overwrite To'), " + - "(11, 1, '2016-01-12T13:00:00', 'Earlier than To data, should not overwrite To'), " + - "(12, 2, '2016-01-12T13:00:00', 'New data that we did not have before')," + - "(13, 3, '2016-01-14T12:00:00', 'New data that we did not have before, but parent header record is wrong and been corrected in an earlier load for a later timeperiod (is Biochemistry here but To value of Haematology is correct)')"; - var stagingSampleDataSqlCommand = new SqlCommand(stagingSampleDataSql, connection); - stagingSampleDataSqlCommand.ExecuteNonQuery(); - - - const string stagingResultDataSql = "INSERT INTO Results (ID, SampleID, Result) VALUES " + - "(100, 10, 777), " + // changed 999 to 777 - "(101, 10, 888), " + // unchanged - "(104, 10, 666), " + // added this - "(102, 11, 400), " + // earlier data (which is also wrong, 456 in To), To data is newer and corrected - "(103, 11, 654), " + - "(105, 12, 123), " + // new result (from new sample) - "(106, 13, 123)"; // new result (from new sample) - var stagingResultDataSqlCommand = new SqlCommand(stagingResultDataSql, connection); - stagingResultDataSqlCommand.ExecuteNonQuery(); - - connection.Close(); - #endregion - - // databases are now represent state after push to From and before migration - var mutilator = new StagingBackfillMutilator + var cmd = new SqlCommand(@"SELECT * FROM Samples ORDER BY ID", connection); + using (var reader = cmd.ExecuteReader()) { - TimePeriodicityField = CatalogueRepository.GetAllObjects().Single(ci=>ci.Name == "[" + DatabaseName + "]..[Samples].[SampleDate]"), - TestContext = true, - TableNamingScheme = new IdentityTableNamingScheme() - }; - - mutilator.Initialize(From, LoadStage.AdjustStaging); - mutilator.Check(new ThrowImmediatelyCheckNotifier()); - mutilator.Mutilate(new ThrowImmediatelyDataLoadJob()); - - #region Assert - // check that From contains the correct data - // Sample ID=2 should have been deleted, along with corresponding results 102 and 103 - connection = (SqlConnection)From.Server.GetConnection(); - connection.Open(); + reader.Read(); + Assert.AreEqual(11, reader["ID"]); + Assert.AreEqual("2016-01-05T12:00:00", ((DateTime)reader["SampleDate"]).ToString("s")); - var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Header", connection); - var numRows = cmd.ExecuteScalar(); - Assert.AreEqual(3, numRows, "Should be 3 header records"); + reader.Read(); + Assert.AreEqual(14, reader["ID"]); + Assert.AreEqual("2016-01-15T12:00:00", ((DateTime)reader["SampleDate"]).ToString("s")); - cmd = new SqlCommand(@"SELECT Discipline FROM Header WHERE ID=1", connection); - var discipline = cmd.ExecuteScalar(); - Assert.AreEqual("Biochemistry", discipline, "The mutilator should **NOT** have updated record 1 from Biochemistry to Haematology. Although the load updates one of the To samples, the most recent To sample is later than the most recent loaded sample so the parent data in To takes precedence over the parent data in From."); + reader.Read(); + Assert.AreEqual(17, reader["ID"]); + Assert.AreEqual("2016-01-05T12:00:00", ((DateTime)reader["SampleDate"]).ToString("s")); - // Not convinced about this test case - //cmd = new SqlCommand(@"SELECT Discipline FROM Header WHERE ID=3", connection); - //discipline = cmd.ExecuteScalar(); - //Assert.AreEqual("Haematology", discipline, "The load should **not** have updated record 3 from Haematology to Biochemistry since the loaded record is from an earlier timeperiod."); + Assert.IsFalse(reader.Read(), "Should only be three samples"); + } - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples WHERE ID = 2", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(0, numRows, "Sample ID = 2 has not been deleted"); + cmd = new SqlCommand(@"SELECT * FROM Headers ORDER BY ID", connection); + using (var reader = cmd.ExecuteReader()) + { + Assert.IsTrue(reader.HasRows); - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(2, numRows, "Sample ID = 2 has been deleted but something has happened to the other samples (should be untouched)"); + reader.Read(); + Assert.AreEqual(1, reader["ID"]); + Assert.AreEqual("Haematology", reader["Discipline"]); - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Results WHERE SampleID = 2", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(0, numRows, "Results belonging to Sample ID = 2 have not been deleted"); + reader.Read(); + Assert.AreEqual(2, reader["ID"]); + Assert.AreEqual("Biochemistry", reader["Discipline"]); - cmd = new SqlCommand(@"SELECT COUNT(*) FROM Results", connection); - numRows = cmd.ExecuteScalar(); - Assert.AreEqual(4, numRows, "Results belonging to Sample ID = 2 have been deleted but something has happeded to the other results (should be untouched)"); + reader.Read(); + Assert.AreEqual(5, reader["ID"]); + Assert.AreEqual("Biochemistry", reader["Discipline"]); - connection.Close(); - #endregion + Assert.IsFalse(reader.Read(), "Should only be three headers"); + } + } + } + + private void CreateTables(string tableName, string columnDefinitions, string pkColumn, + string fkConstraintString = null) + { + // todo: doesn't do combo primary keys yet - tiSamples.DeleteInDatabase(); - tiResults.DeleteInDatabase(); - tiHeaders.DeleteInDatabase(); + if (pkColumn == null || string.IsNullOrWhiteSpace(pkColumn)) + throw new InvalidOperationException("Primary Key column is required."); + + var pkConstraint = $"CONSTRAINT PK_{tableName} PRIMARY KEY ({pkColumn})"; + var stagingTableDefinition = $"{columnDefinitions}, {pkConstraint}"; + var liveTableDefinition = + $"{columnDefinitions}, hic_validFrom DATETIME, hic_dataLoadRunID int, {pkConstraint}"; + + if (fkConstraintString != null) + { + stagingTableDefinition += $", {fkConstraintString}"; + liveTableDefinition += $", {fkConstraintString}"; } - private ITableInfo AddSamplesTableToCatalogue(string databaseName, out ColumnInfo[] ciList) + + using (var con = (SqlConnection)From.Server.GetConnection()) { - var ti = AddTableToCatalogue(databaseName, "Samples", "ID", out ciList, true); - _catalogue.Name = databaseName; - - // todo: what should this text actually look like - _catalogue.Time_coverage = "[Samples].[SampleDate]"; - _catalogue.SaveToDatabase(); - return ti; + con.Open(); + new SqlCommand($"CREATE TABLE {tableName} ({stagingTableDefinition})", con).ExecuteNonQuery(); } - private ITableInfo AddResultsTableToCatalogue(string databaseName, ColumnInfo[] ciSamples) + using (var con = (SqlConnection)To.Server.GetConnection()) { - ColumnInfo[] ciList; - var ti = AddTableToCatalogue(databaseName, "Results", "ID", out ciList); - - // setup join infos - new JoinInfo(CatalogueRepository,ciList.Single(info => info.GetRuntimeName().Equals("SampleID")), - ciSamples.Single(info => info.GetRuntimeName().Equals("ID")), - ExtractionJoinType.Left, ""); - return ti; + con.Open(); + new SqlCommand($"CREATE TABLE {tableName} ({liveTableDefinition})", con).ExecuteNonQuery(); } + } - private ITableInfo AddHeaderTableToCatalogue(string databaseName, ColumnInfo[] ciSamples) + [Test] + [Ignore("Restructuring tests")] + public void DeleteNewerCollisionsFromTable() + { + #region Set SetUp databases + + CreateTables("Header", "ID int NOT NULL, Discipline varchar(32) NOT NULL", "ID"); + + CreateTables("Samples", + "ID int NOT NULL, HeaderID int NOT NULL, SampleDate DATETIME, Description varchar(1024)", + "CONSTRAINT FK_Header_Samples FOREIGN KEY (HeaderID) REFERENCES Header (ID)"); + + CreateTables("Results", "ID int NOT NULL, SampleID int NOT NULL, Result int", + "CONSTRAINT [FK_Samples_Results] FOREIGN KEY (SampleID) REFERENCES Samples (ID)"); + + #endregion + + #region Set SetUp catalogue entities + + var tiSamples = AddSamplesTableToCatalogue(DatabaseName, out var ciSamples); + var tiResults = AddResultsTableToCatalogue(DatabaseName, ciSamples); + var tiHeaders = AddHeaderTableToCatalogue(DatabaseName, ciSamples); + + // should be all entities set SetUp now + Assert.AreEqual(15, _catalogue.CatalogueItems.Length, "Unexpected number of items in catalogue"); + + #endregion + + // add data + + #region Populate Tables + + var connection = (SqlConnection)To.Server.GetConnection(); + connection.Open(); + + const string liveHeaderDataSql = + "INSERT INTO Header (ID, Discipline, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(1, 'Biochemistry', '2016-01-15T15:00:00', 4), " + + "(3, 'Haematology', '2016-01-15T12:00:00', 3)"; + var liveHeaderDataSqlCommand = new SqlCommand(liveHeaderDataSql, connection); + liveHeaderDataSqlCommand.ExecuteNonQuery(); + + const string liveSampleDataSql = + "INSERT INTO Samples (ID, HeaderID, SampleDate, Description, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(10, 1, '2016-01-10T12:00:00', 'Earlier than corresponding new data, should be updated', '2016-01-10T12:00:00', 1), " + + "(11, 1, '2016-01-15T15:00:00', 'Later than corresponding new data, should not be touched', '2016-01-15T15:00:00', 4), " + + "(14, 3, '2016-01-15T12:00:00', 'Header data of newly loaded (but older) sample should *not* update this rows parent header', '2016-01-15T12:00:00', 3)"; + var liveSampleDataSqlCommand = new SqlCommand(liveSampleDataSql, connection); + liveSampleDataSqlCommand.ExecuteNonQuery(); + + const string liveResultDataSql = + "INSERT INTO Results (ID, SampleID, Result, hic_validFrom, hic_dataLoadRunID) VALUES " + + "(100, 10, 999, '2016-01-10T12:00:00', 1), " + + "(101, 10, 888, '2016-01-10T12:00:00', 1), " + + "(102, 11, 456, '2016-01-15T15:00:00', 4), " + + "(103, 11, 654, '2016-01-15T15:00:00', 4), " + + "(107, 14, 111, '2016-01-15T12:00:00', 3)"; + var liveResultDataSqlCommand = new SqlCommand(liveResultDataSql, connection); + liveResultDataSqlCommand.ExecuteNonQuery(); + connection.Close(); + + connection = (SqlConnection)From.Server.GetConnection(); + connection.Open(); + + const string stagingHeaderDataSql = "INSERT INTO Header (ID, Discipline) VALUES " + + "(1, 'Haematology')," + + "(2, 'Biochemistry'), " + + "(3, 'Biochemistry')"; + var stagingHeaderDataSqlCommand = new SqlCommand(stagingHeaderDataSql, connection); + stagingHeaderDataSqlCommand.ExecuteNonQuery(); + + const string stagingSampleDataSql = "INSERT INTO Samples (ID, HeaderID, SampleDate, Description) VALUES " + + "(10, 1, '2016-01-12T13:00:00', 'Later than To data, represents an update and should overwrite To'), " + + "(11, 1, '2016-01-12T13:00:00', 'Earlier than To data, should not overwrite To'), " + + "(12, 2, '2016-01-12T13:00:00', 'New data that we did not have before')," + + "(13, 3, '2016-01-14T12:00:00', 'New data that we did not have before, but parent header record is wrong and been corrected in an earlier load for a later timeperiod (is Biochemistry here but To value of Haematology is correct)')"; + var stagingSampleDataSqlCommand = new SqlCommand(stagingSampleDataSql, connection); + stagingSampleDataSqlCommand.ExecuteNonQuery(); + + + const string stagingResultDataSql = "INSERT INTO Results (ID, SampleID, Result) VALUES " + + "(100, 10, 777), " + // changed 999 to 777 + "(101, 10, 888), " + // unchanged + "(104, 10, 666), " + // added this + "(102, 11, 400), " + // earlier data (which is also wrong, 456 in To), To data is newer and corrected + "(103, 11, 654), " + + "(105, 12, 123), " + // new result (from new sample) + "(106, 13, 123)"; // new result (from new sample) + var stagingResultDataSqlCommand = new SqlCommand(stagingResultDataSql, connection); + stagingResultDataSqlCommand.ExecuteNonQuery(); + + connection.Close(); + + #endregion + + // databases are now represent state after push to From and before migration + var mutilator = new StagingBackfillMutilator { - ColumnInfo[] ciList; - var ti = AddTableToCatalogue(databaseName, "Header", "ID", out ciList); + TimePeriodicityField = CatalogueRepository.GetAllObjects().Single(ci => ci.Name == + $"[{DatabaseName}]..[Samples].[SampleDate]"), + TestContext = true, + TableNamingScheme = new IdentityTableNamingScheme() + }; + + mutilator.Initialize(From, LoadStage.AdjustStaging); + mutilator.Check(ThrowImmediatelyCheckNotifier.Quiet); + mutilator.Mutilate(new ThrowImmediatelyDataLoadJob()); + + #region Assert + + // check that From contains the correct data + // Sample ID=2 should have been deleted, along with corresponding results 102 and 103 + connection = (SqlConnection)From.Server.GetConnection(); + connection.Open(); + + var cmd = new SqlCommand(@"SELECT COUNT(*) FROM Header", connection); + var numRows = cmd.ExecuteScalar(); + Assert.AreEqual(3, numRows, "Should be 3 header records"); + + cmd = new SqlCommand(@"SELECT Discipline FROM Header WHERE ID=1", connection); + var discipline = cmd.ExecuteScalar(); + Assert.AreEqual("Biochemistry", discipline, + "The mutilator should **NOT** have updated record 1 from Biochemistry to Haematology. Although the load updates one of the To samples, the most recent To sample is later than the most recent loaded sample so the parent data in To takes precedence over the parent data in From."); + + // Not convinced about this test case + //cmd = new SqlCommand(@"SELECT Discipline FROM Header WHERE ID=3", connection); + //discipline = cmd.ExecuteScalar(); + //Assert.AreEqual("Haematology", discipline, "The load should **not** have updated record 3 from Haematology to Biochemistry since the loaded record is from an earlier timeperiod."); + + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples WHERE ID = 2", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(0, numRows, "Sample ID = 2 has not been deleted"); + + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Samples", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(2, numRows, + "Sample ID = 2 has been deleted but something has happened to the other samples (should be untouched)"); + + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Results WHERE SampleID = 2", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(0, numRows, "Results belonging to Sample ID = 2 have not been deleted"); + + cmd = new SqlCommand(@"SELECT COUNT(*) FROM Results", connection); + numRows = cmd.ExecuteScalar(); + Assert.AreEqual(4, numRows, + "Results belonging to Sample ID = 2 have been deleted but something has happeded to the other results (should be untouched)"); + + connection.Close(); + + #endregion + + tiSamples.DeleteInDatabase(); + tiResults.DeleteInDatabase(); + tiHeaders.DeleteInDatabase(); + } - // setup join infos - new JoinInfo(CatalogueRepository,ciSamples.Single(info => info.GetRuntimeName().Equals("HeaderID")), - ciList.Single(info => info.GetRuntimeName().Equals("ID")), - ExtractionJoinType.Left, ""); + private ITableInfo AddSamplesTableToCatalogue(string databaseName, out ColumnInfo[] ciList) + { + var ti = AddTableToCatalogue(databaseName, "Samples", "ID", out ciList, true); + _catalogue.Name = databaseName; - ti.IsPrimaryExtractionTable = true; - ti.SaveToDatabase(); + // todo: what should this text actually look like + _catalogue.Time_coverage = "[Samples].[SampleDate]"; + _catalogue.SaveToDatabase(); + return ti; + } - return ti; - } + private ITableInfo AddResultsTableToCatalogue(string databaseName, ColumnInfo[] ciSamples) + { + var ti = AddTableToCatalogue(databaseName, "Results", "ID", out var ciList); - private ITableInfo AddTableToCatalogue(string databaseName, string tableName, string pkName, out ColumnInfo[] ciList, bool createCatalogue = false) - { - var table = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName).ExpectTable(tableName); - var resultsImporter = new TableInfoImporter(CatalogueRepository, table); - - resultsImporter.DoImport(out var ti, out ciList); - - var pkResult = ciList.Single(info => info.GetRuntimeName().Equals(pkName)); - pkResult.IsPrimaryKey = true; - pkResult.SaveToDatabase(); - - var forwardEngineer = new ForwardEngineerCatalogue(ti, ciList); - if (createCatalogue) - { - CatalogueItem[] cataItems; - ExtractionInformation[] extractionInformations; + // setup join infos + new JoinInfo(CatalogueRepository, ciList.Single(info => info.GetRuntimeName().Equals("SampleID")), + ciSamples.Single(info => info.GetRuntimeName().Equals("ID")), + ExtractionJoinType.Left, ""); + return ti; + } - forwardEngineer.ExecuteForwardEngineering(out _catalogue, out cataItems, out extractionInformations); - } - else - forwardEngineer.ExecuteForwardEngineering(_catalogue); + private ITableInfo AddHeaderTableToCatalogue(string databaseName, ColumnInfo[] ciSamples) + { + var ti = AddTableToCatalogue(databaseName, "Header", "ID", out var ciList); - return ti; - } + // setup join infos + new JoinInfo(CatalogueRepository, ciSamples.Single(info => info.GetRuntimeName().Equals("HeaderID")), + ciList.Single(info => info.GetRuntimeName().Equals("ID")), + ExtractionJoinType.Left, ""); + ti.IsPrimaryExtractionTable = true; + ti.SaveToDatabase(); + return ti; } - internal class IdentityTableNamingScheme : INameDatabasesAndTablesDuringLoads + private ITableInfo AddTableToCatalogue(string databaseName, string tableName, string pkName, + out ColumnInfo[] ciList, bool createCatalogue = false) { - public string GetDatabaseName(string rootDatabaseName, LoadBubble convention) - { - return rootDatabaseName; - } + var table = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName) + .ExpectTable(tableName); + var resultsImporter = new TableInfoImporter(CatalogueRepository, table); - public string GetName(string tableName, LoadBubble convention) - { - return tableName; - } + resultsImporter.DoImport(out var ti, out ciList); - public bool IsNamedCorrectly(string tableName, LoadBubble convention) - { - return true; - } + var pkResult = ciList.Single(info => info.GetRuntimeName().Equals(pkName)); + pkResult.IsPrimaryKey = true; + pkResult.SaveToDatabase(); + + var forwardEngineer = new ForwardEngineerCatalogue(ti, ciList); + if (createCatalogue) + forwardEngineer.ExecuteForwardEngineering(out _catalogue, out _, out _); + else + forwardEngineer.ExecuteForwardEngineering(_catalogue); + + return ti; } } + +internal class IdentityTableNamingScheme : INameDatabasesAndTablesDuringLoads +{ + public string GetDatabaseName(string rootDatabaseName, LoadBubble convention) => rootDatabaseName; + + public string GetName(string tableName, LoadBubble convention) => tableName; + + public static bool IsNamedCorrectly(string tableName, LoadBubble convention) => true; +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CachedFileRetrieverTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CachedFileRetrieverTests.cs index 1cccdb8cbc..b497712c1c 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CachedFileRetrieverTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CachedFileRetrieverTests.cs @@ -8,7 +8,7 @@ using System.Collections.Generic; using System.IO; using FAnsi.Discovery; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Caching.Layouts; using Rdmp.Core.Curation; @@ -21,190 +21,194 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Engine.Job.Scheduling; using Rdmp.Core.Logging; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class CachedFileRetrieverTests : DatabaseTests { - public class CachedFileRetrieverTests : DatabaseTests + private readonly ILoadProgress _lpMock; + private readonly ICacheProgress _cpMock; + + public CachedFileRetrieverTests() { - private ILoadProgress _lpMock; - private ICacheProgress _cpMock; + _cpMock = Substitute.For(); + _lpMock = Substitute.For(); + _lpMock.CacheProgress.Returns(_cpMock); + } - public CachedFileRetrieverTests() - { - _cpMock = Mock.Of(); - _lpMock = Mock.Of(l=>l.CacheProgress == _cpMock); - } - - [Test(Description = "RDMPDEV-185: Tests the scenario where the files in ForLoading do not match the files that are expected given the job specification. In this case the load process should not continue, otherwise the wrong data will be loaded.")] - public void AttemptToLoadDataWithFilesInForLoading_DisagreementBetweenCacheAndForLoading() + [Test(Description = + "RDMPDEV-185: Tests the scenario where the files in ForLoading do not match the files that are expected given the job specification. In this case the load process should not continue, otherwise the wrong data will be loaded.")] + public void AttemptToLoadDataWithFilesInForLoading_DisagreementBetweenCacheAndForLoading() + { + var tempDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); + var tempDir = Directory.CreateDirectory(tempDirPath); + + try { - var tempDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); - var tempDir = Directory.CreateDirectory(tempDirPath); + // Different file in ForLoading than exists in cache + var loadDirectory = LoadDirectory.CreateDirectoryStructure(tempDir, "CachedFileRetriever"); + var cachedFilePath = Path.Combine(loadDirectory.Cache.FullName, "2016-01-02.zip"); + File.WriteAllText(cachedFilePath, ""); + File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "2016-01-01.zip"), ""); + + // Set SetUp retriever + var cacheLayout = new ZipCacheLayoutOnePerDay(loadDirectory.Cache, new NoSubdirectoriesCachePathResolver()); - try + var retriever = new TestCachedFileRetriever { - // Different file in ForLoading than exists in cache - var loadDirectory = LoadDirectory.CreateDirectoryStructure(tempDir, "CachedFileRetriever"); - var cachedFilePath = Path.Combine(loadDirectory.Cache.FullName, "2016-01-02.zip"); - File.WriteAllText(cachedFilePath, ""); - File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "2016-01-01.zip"), ""); - - // Set SetUp retriever - var cacheLayout = new ZipCacheLayoutOnePerDay(loadDirectory.Cache, new NoSubdirectoriesCachePathResolver()); - - var retriever = new TestCachedFileRetriever() - { - ExtractFilesFromArchive = false, - LoadProgress = _lpMock, - Layout = cacheLayout - }; - - // Set SetUp job - var job = CreateTestJob(loadDirectory); - job.DatesToRetrieve = new List - { - new DateTime(2016, 01, 02) - }; - - // Should fail after determining that the files in ForLoading do not match the job specification - var ex = Assert.Throws(() => retriever.Fetch(job, new GracefulCancellationToken())); - Assert.IsTrue(ex.Message.StartsWith("The files in ForLoading do not match what this job expects to be loading from the cache."), ex.Message + Environment.NewLine + Environment.NewLine + ex.StackTrace); - } - finally + ExtractFilesFromArchive = false, + LoadProgress = _lpMock, + Layout = cacheLayout + }; + + // Set SetUp job + var job = CreateTestJob(loadDirectory); + job.DatesToRetrieve = new List { - tempDir.Delete(true); - } + new(2016, 01, 02) + }; + + // Should fail after determining that the files in ForLoading do not match the job specification + var ex = Assert.Throws(() => + retriever.Fetch(job, new GracefulCancellationToken())); + Assert.IsTrue( + ex.Message.StartsWith( + "The files in ForLoading do not match what this job expects to be loading from the cache."), + ex.Message + Environment.NewLine + Environment.NewLine + ex.StackTrace); } - - [Test(Description = "RDMPDEV-185: Tests the scenario where the files in ForLoading match the files that are expected given the job specification, e.g. a load has after the cache has been populated and a subsequent load with *exactly the same parameters* has been triggered. In this case the load can proceed.")] - public void AttemptToLoadDataWithFilesInForLoading_AgreementBetweenForLoadingAndCache() + finally { - var tempDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); - var tempDir = Directory.CreateDirectory(tempDirPath); - - try - { - // File in cache is the same file as in ForLoading (20160101.zip) - var loadDirectory = LoadDirectory.CreateDirectoryStructure(tempDir, "CachedFileRetriever"); - var cachedFilePath = Path.Combine(loadDirectory.Cache.FullName, "2016-01-01.zip"); - File.WriteAllText(cachedFilePath, ""); - File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "2016-01-01.zip"), ""); - - - // Set SetUp retriever - var cacheLayout = new ZipCacheLayoutOnePerDay(loadDirectory.Cache, new NoSubdirectoriesCachePathResolver()); - - var retriever = new TestCachedFileRetriever() - { - ExtractFilesFromArchive = false, - LoadProgress = _lpMock, - Layout = cacheLayout - - }; - - // Set SetUp job - var job = CreateTestJob(loadDirectory); - job.DatesToRetrieve = new List - { - new DateTime(2016, 01, 01) - }; - - // Should complete successfully, the file in ForLoading matches the job specification - retriever.Fetch(job, new GracefulCancellationToken()); - - // And ForLoading should still have the file in it (i.e. it hasn't mysteriously disappeared) - Assert.IsTrue(File.Exists(Path.Combine(loadDirectory.ForLoading.FullName, "2016-01-01.zip"))); - } - finally - { - tempDir.Delete(true); - } + tempDir.Delete(true); } + } - [Test(Description = "RDMPDEV-185: Tests the default scenario where there are no files in ForLoading.")] - public void AttemptToLoadDataWithoutFilesInForLoading() + [Test(Description = + "RDMPDEV-185: Tests the scenario where the files in ForLoading match the files that are expected given the job specification, e.g. a load has after the cache has been populated and a subsequent load with *exactly the same parameters* has been triggered. In this case the load can proceed.")] + public void AttemptToLoadDataWithFilesInForLoading_AgreementBetweenForLoadingAndCache() + { + var tempDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); + var tempDir = Directory.CreateDirectory(tempDirPath); + + try { - var tempDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); - var tempDir = Directory.CreateDirectory(tempDirPath); + // File in cache is the same file as in ForLoading (20160101.zip) + var loadDirectory = LoadDirectory.CreateDirectoryStructure(tempDir, "CachedFileRetriever"); + var cachedFilePath = Path.Combine(loadDirectory.Cache.FullName, "2016-01-01.zip"); + File.WriteAllText(cachedFilePath, ""); + File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "2016-01-01.zip"), ""); + - try + // Set SetUp retriever + var cacheLayout = new ZipCacheLayoutOnePerDay(loadDirectory.Cache, new NoSubdirectoriesCachePathResolver()); + + var retriever = new TestCachedFileRetriever + { + ExtractFilesFromArchive = false, + LoadProgress = _lpMock, + Layout = cacheLayout + }; + + // Set SetUp job + var job = CreateTestJob(loadDirectory); + job.DatesToRetrieve = new List { - // File in cache only, no files in ForLoading - var loadDirectory = LoadDirectory.CreateDirectoryStructure(tempDir, "CachedFileRetriever"); - var cachedFilePath = Path.Combine(loadDirectory.Cache.FullName, "2016-01-01.zip"); - File.WriteAllText(cachedFilePath, ""); + new(2016, 01, 01) + }; + // Should complete successfully, the file in ForLoading matches the job specification + retriever.Fetch(job, new GracefulCancellationToken()); - // Set SetUp retriever - var cacheLayout = new ZipCacheLayoutOnePerDay(loadDirectory.Cache, new NoSubdirectoriesCachePathResolver()); + // And ForLoading should still have the file in it (i.e. it hasn't mysteriously disappeared) + Assert.IsTrue(File.Exists(Path.Combine(loadDirectory.ForLoading.FullName, "2016-01-01.zip"))); + } + finally + { + tempDir.Delete(true); + } + } - var retriever = new TestCachedFileRetriever() - { - ExtractFilesFromArchive = false, - LoadProgress = _lpMock, - Layout = cacheLayout + [Test(Description = "RDMPDEV-185: Tests the default scenario where there are no files in ForLoading.")] + public void AttemptToLoadDataWithoutFilesInForLoading() + { + var tempDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); + var tempDir = Directory.CreateDirectory(tempDirPath); - }; + try + { + // File in cache only, no files in ForLoading + var loadDirectory = LoadDirectory.CreateDirectoryStructure(tempDir, "CachedFileRetriever"); + var cachedFilePath = Path.Combine(loadDirectory.Cache.FullName, "2016-01-01.zip"); + File.WriteAllText(cachedFilePath, ""); - // Set SetUp job - var job = CreateTestJob(loadDirectory); - job.DatesToRetrieve = new List - { - new DateTime(2016, 01, 01) - }; - // Should complete successfully, there are no files in ForLoading to worry about - retriever.Fetch(job, new GracefulCancellationToken()); + // Set SetUp retriever + var cacheLayout = new ZipCacheLayoutOnePerDay(loadDirectory.Cache, new NoSubdirectoriesCachePathResolver()); - // And the retriever should have copied the cached archive file into ForLoading - Assert.IsTrue(File.Exists(Path.Combine(loadDirectory.ForLoading.FullName, "2016-01-01.zip"))); - } - finally + var retriever = new TestCachedFileRetriever { - tempDir.Delete(true); - } - } + ExtractFilesFromArchive = false, + LoadProgress = _lpMock, + Layout = cacheLayout + }; + + // Set SetUp job + var job = CreateTestJob(loadDirectory); + job.DatesToRetrieve = new List + { + new(2016, 01, 01) + }; + + // Should complete successfully, there are no files in ForLoading to worry about + retriever.Fetch(job, new GracefulCancellationToken()); - private ScheduledDataLoadJob CreateTestJob(ILoadDirectory directory) + // And the retriever should have copied the cached archive file into ForLoading + Assert.IsTrue(File.Exists(Path.Combine(loadDirectory.ForLoading.FullName, "2016-01-01.zip"))); + } + finally { - var catalogue = Mock.Of(c => - c.GetTableInfoList(false) == new TableInfo[0] && - c.GetLookupTableInfoList()==new TableInfo[0] && - c.LoggingDataTask == "TestLogging" - ); - - var logManager = Mock.Of(); - var loadMetadata = Mock.Of(lm => lm.GetAllCatalogues()==new[] { catalogue }); - - var j = new ScheduledDataLoadJob(RepositoryLocator, "Test job", logManager, loadMetadata, directory, new ThrowImmediatelyDataLoadEventListener(), null); - j.LoadProgress = _lpMock; - return j; + tempDir.Delete(true); } } - internal class TestCachedFileRetriever : CachedFileRetriever + private ScheduledDataLoadJob CreateTestJob(ILoadDirectory directory) { - public ICacheLayout Layout; + var catalogue = Substitute.For(); + catalogue.GetTableInfoList(false).Returns(Array.Empty()); + catalogue.GetLookupTableInfoList().Returns(Array.Empty()); + catalogue.LoggingDataTask.Returns("TestLogging"); - public override void Initialize(ILoadDirectory directory, DiscoveredDatabase dbInfo) - { - - } + var logManager = Substitute.For(); + var loadMetadata = Substitute.For(); + loadMetadata.GetAllCatalogues().Returns(new[] { catalogue }); - public override ExitCodeType Fetch(IDataLoadJob dataLoadJob, GracefulCancellationToken cancellationToken) + var j = new ScheduledDataLoadJob(RepositoryLocator, "Test job", logManager, loadMetadata, directory, + ThrowImmediatelyDataLoadEventListener.Quiet, null) { - var scheduledJob = ConvertToScheduledJob(dataLoadJob); - GetDataLoadWorkload(scheduledJob); - ExtractJobs(scheduledJob); - - return ExitCodeType.Success; - } + LoadProgress = _lpMock + }; + return j; + } +} - protected override ICacheLayout CreateCacheLayout(ICacheProgress cacheProgress, IDataLoadEventListener listener) - { - return Layout; - } +internal class TestCachedFileRetriever : CachedFileRetriever +{ + public ICacheLayout Layout; + + public override void Initialize(ILoadDirectory directory, DiscoveredDatabase dbInfo) + { } + + public override ExitCodeType Fetch(IDataLoadJob dataLoadJob, GracefulCancellationToken cancellationToken) + { + var scheduledJob = ConvertToScheduledJob(dataLoadJob); + GetDataLoadWorkload(scheduledJob); + ExtractJobs(scheduledJob); + + return ExitCodeType.Success; + } + + protected override ICacheLayout CreateCacheLayout(ICacheProgress cacheProgress, IDataLoadEventListener listener) => + Layout; } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CheckingTests/ProcessTaskCheckingTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CheckingTests/ProcessTaskCheckingTests.cs index 81c429150c..335032c5ca 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CheckingTests/ProcessTaskCheckingTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CheckingTests/ProcessTaskCheckingTests.cs @@ -8,198 +8,205 @@ using System.IO; using System.Linq; using System.Text.RegularExpressions; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataLoad.Engine.Checks.Checkers; using Rdmp.Core.DataLoad.Modules.Attachers; -using ReusableLibraryCode; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.CheckingTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.CheckingTests; + +public class ProcessTaskCheckingTests : DatabaseTests { - public class ProcessTaskCheckingTests:DatabaseTests + private LoadMetadata _lmd; + private ProcessTask _task; + private ProcessTaskChecks _checker; + private DirectoryInfo _dir; + + [SetUp] + public void CreateTask() { - private LoadMetadata _lmd; - private ProcessTask _task; - private ProcessTaskChecks _checker; - private DirectoryInfo _dir; + _lmd = new LoadMetadata(CatalogueRepository); - [SetUp] - public void CreateTask() - { - _lmd = new LoadMetadata(CatalogueRepository); - - _dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"ProcessTaskCheckingTests")); - _dir.Create(); - - var hicdir = LoadDirectory.CreateDirectoryStructure(_dir, "ProjDir", true); - _lmd.LocationOfFlatFiles = hicdir.RootPath.FullName; - _lmd.SaveToDatabase(); - - Catalogue c = new Catalogue(CatalogueRepository,"c"); - CatalogueItem ci = new CatalogueItem(CatalogueRepository,c,"ci"); - TableInfo t = new TableInfo(CatalogueRepository,"t"); - t.Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name; - t.Database = "mydb"; - t.SaveToDatabase(); - ColumnInfo col = new ColumnInfo(CatalogueRepository,"col","bit",t); - ci.SetColumnInfo(col); - c.LoadMetadata_ID = _lmd.ID; - c.SaveToDatabase(); - - _task = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); - _checker = new ProcessTaskChecks(_lmd); - } - - - [Test] - [TestCase(null,ProcessTaskType.Executable)] - [TestCase("",ProcessTaskType.Executable)] - [TestCase(" ",ProcessTaskType.Executable)] - [TestCase(null,ProcessTaskType.SQLFile)] - [TestCase("",ProcessTaskType.SQLFile)] - [TestCase(" ",ProcessTaskType.SQLFile)] - public void EmptyFilePath(string path, ProcessTaskType typeThatRequiresFiles) - { - _task.ProcessTaskType = typeThatRequiresFiles; - _task.Path = path; - _task.SaveToDatabase(); - var ex = Assert.Throws(()=>_checker.Check(new ThrowImmediatelyCheckNotifier())); - StringAssert.Contains("does not have a path specified",ex.Message); - } + _dir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "ProcessTaskCheckingTests")); + _dir.Create(); - [Test] - [TestCase(null, ProcessTaskType.MutilateDataTable,LoadStage.AdjustStaging)] - [TestCase("", ProcessTaskType.MutilateDataTable, LoadStage.AdjustStaging)] - [TestCase(" ", ProcessTaskType.MutilateDataTable, LoadStage.AdjustRaw)] - [TestCase(null, ProcessTaskType.Attacher, LoadStage.Mounting)] - [TestCase(null, ProcessTaskType.DataProvider, LoadStage.GetFiles)] - public void EmptyClassPath(string path, ProcessTaskType typeThatRequiresMEF, LoadStage stage) + var hicdir = LoadDirectory.CreateDirectoryStructure(_dir, "ProjDir", true); + _lmd.LocationOfFlatFiles = hicdir.RootPath.FullName; + _lmd.SaveToDatabase(); + + var c = new Catalogue(CatalogueRepository, "c"); + var ci = new CatalogueItem(CatalogueRepository, c, "ci"); + var t = new TableInfo(CatalogueRepository, "t") { - _task.ProcessTaskType = typeThatRequiresMEF; - _task.Path = path; - _task.LoadStage = stage; - _task.SaveToDatabase(); - var ex = Assert.Throws(()=>_checker.Check(new ThrowImmediatelyCheckNotifier())); - Assert.IsTrue(Regex.IsMatch(ex.Message,"Path is blank for ProcessTask 'New Process.*' - it should be a class name of type")); - } + Server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Name, + Database = "mydb" + }; + t.SaveToDatabase(); + var col = new ColumnInfo(CatalogueRepository, "col", "bit", t); + ci.SetColumnInfo(col); + c.LoadMetadata_ID = _lmd.ID; + c.SaveToDatabase(); + + _task = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); + _checker = new ProcessTaskChecks(_lmd); + } + - [Test] - public void MEFIncompatibleType() + [Test] + [TestCase(null, ProcessTaskType.Executable)] + [TestCase("", ProcessTaskType.Executable)] + [TestCase(" ", ProcessTaskType.Executable)] + [TestCase(null, ProcessTaskType.SQLFile)] + [TestCase("", ProcessTaskType.SQLFile)] + [TestCase(" ", ProcessTaskType.SQLFile)] + public void EmptyFilePath(string path, ProcessTaskType typeThatRequiresFiles) + { + _task.ProcessTaskType = typeThatRequiresFiles; + _task.Path = path; + _task.SaveToDatabase(); + var ex = Assert.Throws(() => _checker.Check(ThrowImmediatelyCheckNotifier.Quiet)); + StringAssert.Contains("does not have a path specified", ex.Message); + } + + [Test] + [TestCase(null, ProcessTaskType.MutilateDataTable, LoadStage.AdjustStaging)] + [TestCase("", ProcessTaskType.MutilateDataTable, LoadStage.AdjustStaging)] + [TestCase(" ", ProcessTaskType.MutilateDataTable, LoadStage.AdjustRaw)] + [TestCase(null, ProcessTaskType.Attacher, LoadStage.Mounting)] + [TestCase(null, ProcessTaskType.DataProvider, LoadStage.GetFiles)] + public void EmptyClassPath(string path, ProcessTaskType typeThatRequiresMEF, LoadStage stage) + { + _task.ProcessTaskType = typeThatRequiresMEF; + _task.Path = path; + _task.LoadStage = stage; + _task.SaveToDatabase(); + var ex = Assert.Throws(() => _checker.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.IsTrue(Regex.IsMatch(ex.Message, + "Path is blank for ProcessTask 'New Process.*' - it should be a class name of type")); + } + + [Test] + public void MEFIncompatibleType() + { + _task.LoadStage = LoadStage.AdjustStaging; + _task.ProcessTaskType = ProcessTaskType.MutilateDataTable; + _task.Path = typeof(object).ToString(); + _task.SaveToDatabase(); + var ex = Assert.Throws(() => _checker.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "Requested typeToCreate 'System.Object' was not assignable to the required Type 'IMutilateDataTables'", + ex.Message); + } + + [Test] + public void MEFCompatibleType_NoProjectDirectory() + { + _lmd.LocationOfFlatFiles = null; + _lmd.SaveToDatabase(); + + _task.ProcessTaskType = ProcessTaskType.Attacher; + _task.LoadStage = LoadStage.Mounting; + _task.Path = typeof(AnySeparatorFileAttacher).FullName; + _task.SaveToDatabase(); + _task.CreateArgumentsForClassIfNotExists(); + + var ex = Assert.Throws(() => _checker.Check(ThrowImmediatelyCheckNotifier.QuietPicky)); + Assert.AreEqual($@"No Project Directory (LocationOfFlatFiles) has been configured on LoadMetadata {_lmd.Name}", + ex.InnerException.Message); + } + + [Test] + public void MEFCompatibleType_NoArgs() + { + var projDir = + LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), + "DelMeProjDir", true); + try { - _task.LoadStage = LoadStage.AdjustStaging; - _task.ProcessTaskType = ProcessTaskType.MutilateDataTable; - _task.Path = typeof(object).ToString(); + _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; + _task.ProcessTaskType = ProcessTaskType.Attacher; + _task.LoadStage = LoadStage.Mounting; + _task.Path = typeof(AnySeparatorFileAttacher).FullName; _task.SaveToDatabase(); - var ex = Assert.Throws(() => _checker.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Requested typeToCreate 'System.Object' was not assignable to the required Type 'IMutilateDataTables'", ex.Message); + + + var ex = Assert.Throws(() => _checker.Check(ThrowImmediatelyCheckNotifier.QuietPicky)); + + Assert.AreEqual( + @"Class AnySeparatorFileAttacher has a Mandatory property 'Separator' marked with DemandsInitialization but no corresponding argument was provided in ArgumentCollection", + ex.Message); } - [Test] - public void MEFCompatibleType_NoProjectDirectory() + finally { - _lmd.LocationOfFlatFiles = null; - _lmd.SaveToDatabase(); + //delete everything for real + projDir.RootPath.Delete(true); + } + } + [Test] + public void MEFCompatibleType_Passes() + { + var projDir = + LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), + "DelMeProjDir", true); + try + { + _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; _task.ProcessTaskType = ProcessTaskType.Attacher; _task.LoadStage = LoadStage.Mounting; _task.Path = typeof(AnySeparatorFileAttacher).FullName; _task.SaveToDatabase(); - _task.CreateArgumentsForClassIfNotExists(); - var ex = Assert.Throws(()=>_checker.Check(new ThrowImmediatelyCheckNotifier(){ThrowOnWarning = true})); - Assert.AreEqual(@"No Project Directory (LocationOfFlatFiles) has been configured on LoadMetadata " + _lmd.Name, ex.InnerException.Message); - - } - [Test] - public void MEFCompatibleType_NoArgs() - { + //create the arguments + var args = ProcessTaskArgument.CreateArgumentsForClassIfNotExists(_task); - var projDir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "DelMeProjDir", true); - try - { - _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; - _task.ProcessTaskType = ProcessTaskType.Attacher; - _task.LoadStage = LoadStage.Mounting; - _task.Path = typeof(AnySeparatorFileAttacher).FullName; - _task.SaveToDatabase(); + var tblName = (ProcessTaskArgument)args.Single(a => a.Name.Equals("TableName")); + tblName.Value = "MyExcitingTable"; + tblName.SaveToDatabase(); + var filePattern = (ProcessTaskArgument)args.Single(a => a.Name.Equals("FilePattern")); + filePattern.Value = "*.csv"; + filePattern.SaveToDatabase(); - var ex = Assert.Throws(() => _checker.Check(new ThrowImmediatelyCheckNotifier() { ThrowOnWarning = true })); + var separator = (ProcessTaskArgument)args.Single(a => a.Name.Equals("Separator")); + separator.Value = ","; + separator.SaveToDatabase(); - Assert.AreEqual(@"Class AnySeparatorFileAttacher has a Mandatory property 'Separator' marked with DemandsInitialization but no corresponding argument was provided in ArgumentCollection",ex.Message); - + var results = new ToMemoryCheckNotifier(); + _checker.Check(results); - } - finally + foreach (var msg in results.Messages) { - //delete everything for real - projDir.RootPath.Delete(true); - } - } + Console.WriteLine($"({msg.Result}){msg.Message}"); - [Test] - public void MEFCompatibleType_Passes() - { - var projDir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "DelMeProjDir", true); - try - { - _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; - _task.ProcessTaskType = ProcessTaskType.Attacher; - _task.LoadStage = LoadStage.Mounting; - _task.Path = typeof(AnySeparatorFileAttacher).FullName; - _task.SaveToDatabase(); - - //create the arguments - var args = ProcessTaskArgument.CreateArgumentsForClassIfNotExists(_task); - - var tblName = (ProcessTaskArgument)args.Single(a => a.Name.Equals("TableName")); - tblName.Value = "MyExcitingTable"; - tblName.SaveToDatabase(); - - var filePattern = (ProcessTaskArgument)args.Single(a => a.Name.Equals("FilePattern")); - filePattern.Value = "*.csv"; - filePattern.SaveToDatabase(); - - var separator = (ProcessTaskArgument)args.Single(a => a.Name.Equals("Separator")); - separator.Value = ","; - separator.SaveToDatabase(); - - var results = new ToMemoryCheckNotifier(); - _checker.Check(results); - - foreach (var msg in results.Messages) - { - Console.WriteLine("(" + msg.Result + ")" + msg.Message); - - if (msg.Ex != null) - Console.WriteLine(ExceptionHelper.ExceptionToListOfInnerMessages(msg.Ex)); - } - - Assert.AreEqual( CheckResult.Success,results.GetWorst()); - } - finally - { - //delete everything for real - projDir.RootPath.Delete(true); + if (msg.Ex != null) + Console.WriteLine(ExceptionHelper.ExceptionToListOfInnerMessages(msg.Ex)); } - } - [Test] - [TestCase("bob.exe")] - [TestCase(@"""C:\ProgramFiles\My Software With Spaces\bob.exe""")] - [TestCase(@"""C:\ProgramFiles\My Software With Spaces\bob.exe"" arg1 arg2 -f ""c:\my folder\arg3.exe""")] - public void ImaginaryFile(string path) + Assert.AreEqual(CheckResult.Success, results.GetWorst()); + } + finally { - _task.ProcessTaskType = ProcessTaskType.Executable; - _task.Path = path; - _task.SaveToDatabase(); - var ex = Assert.Throws(()=>_checker.Check(new ThrowImmediatelyCheckNotifier(){ThrowOnWarning=true})); - StringAssert.Contains("bob.exe which does not exist at this time.",ex.Message); + //delete everything for real + projDir.RootPath.Delete(true); } + } + [Test] + [TestCase("bob.exe")] + [TestCase(@"""C:\ProgramFiles\My Software With Spaces\bob.exe""")] + [TestCase(@"""C:\ProgramFiles\My Software With Spaces\bob.exe"" arg1 arg2 -f ""c:\my folder\arg3.exe""")] + public void ImaginaryFile(string path) + { + _task.ProcessTaskType = ProcessTaskType.Executable; + _task.Path = path; + _task.SaveToDatabase(); + var ex = Assert.Throws(() => _checker.Check(ThrowImmediatelyCheckNotifier.QuietPicky)); + StringAssert.Contains("bob.exe which does not exist at this time.", ex?.Message); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CoalescerTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CoalescerTests.cs index b55c97f110..a267bc79f4 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CoalescerTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CoalescerTests.cs @@ -11,7 +11,6 @@ using FAnsi; using NUnit.Framework; using Rdmp.Core.Curation; -using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.Curation.Data.EntityNaming; using Rdmp.Core.DataLoad.Engine.DatabaseManagement.EntityNaming; @@ -19,106 +18,108 @@ using Rdmp.Core.DataLoad.Modules.Mutilators; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class CoalescerTests : DatabaseTests { - public class CoalescerTests:DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, true)] + [TestCase(DatabaseType.MySql, false)] + public void TestCoalescer_RampantNullness(DatabaseType type, bool useCustomNamer) { - [TestCase(DatabaseType.MicrosoftSQLServer,true)] - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - [TestCase(DatabaseType.MySql,true)] - [TestCase(DatabaseType.MySql, false)] - public void TestCoalescer_RampantNullness(DatabaseType type,bool useCustomNamer) + var db = GetCleanedServer(type, "TestCoalescer"); + + const int batchCount = 1000; + + using var dt = new DataTable("TestCoalescer_RampantNullness"); + dt.BeginLoadData(); + dt.Columns.Add("pk"); + dt.Columns.Add("f1"); + dt.Columns.Add("f2"); + dt.Columns.Add("f3"); + dt.Columns.Add("f4"); + + var r = new Random(); + + for (var i = 0; i < batchCount; i++) { - var db = GetCleanedServer(type, "TestCoalescer"); + var randInt = r.Next(250); + var randCompleteness = r.Next(4); - int batchCount = 1000; + dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); + dt.Rows.Add(new object[] { randInt, DBNull.Value, DBNull.Value, DBNull.Value, randInt }); + dt.Rows.Add(new object[] { randInt, DBNull.Value, DBNull.Value, randInt, DBNull.Value }); + dt.Rows.Add(new object[] { randInt, DBNull.Value, DBNull.Value, randInt, randInt }); - DataTable dt = new DataTable("TestCoalescer_RampantNullness"); - dt.Columns.Add("pk"); - dt.Columns.Add("f1"); - dt.Columns.Add("f2"); - dt.Columns.Add("f3"); - dt.Columns.Add("f4"); + if (randCompleteness >= 1) + { + dt.Rows.Add(new object[] { randInt, DBNull.Value, randInt, DBNull.Value, DBNull.Value }); + dt.Rows.Add(new object[] { randInt, DBNull.Value, randInt, DBNull.Value, randInt }); + dt.Rows.Add(new object[] { randInt, DBNull.Value, randInt, randInt, DBNull.Value }); + dt.Rows.Add(new object[] { randInt, DBNull.Value, randInt, randInt, randInt }); + } + + if (randCompleteness >= 2) + { + dt.Rows.Add(new object[] { randInt, randInt, DBNull.Value, DBNull.Value, DBNull.Value }); + dt.Rows.Add(new object[] { randInt, randInt, DBNull.Value, DBNull.Value, randInt }); + dt.Rows.Add(new object[] { randInt, randInt, DBNull.Value, randInt, DBNull.Value }); + dt.Rows.Add(new object[] { randInt, randInt, DBNull.Value, randInt, randInt }); + } - Random r = new Random(); - for (int i = 0; i < batchCount; i++) + if (randCompleteness >= 3) { - int randInt = r.Next(250); - int randCompleteness = r.Next(4); - - dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); - dt.Rows.Add(new object[] { randInt, DBNull.Value, DBNull.Value, DBNull.Value, randInt }); - dt.Rows.Add(new object[] { randInt, DBNull.Value, DBNull.Value, randInt, DBNull.Value }); - dt.Rows.Add(new object[] { randInt, DBNull.Value, DBNull.Value, randInt, randInt }); - - if (randCompleteness >=1) - { - dt.Rows.Add(new object[] { randInt, DBNull.Value, randInt, DBNull.Value, DBNull.Value }); - dt.Rows.Add(new object[] { randInt, DBNull.Value, randInt, DBNull.Value, randInt }); - dt.Rows.Add(new object[] { randInt, DBNull.Value, randInt, randInt, DBNull.Value }); - dt.Rows.Add(new object[] { randInt, DBNull.Value, randInt, randInt, randInt }); - } - - if(randCompleteness >=2) - { - dt.Rows.Add(new object[] { randInt, randInt, DBNull.Value, DBNull.Value, DBNull.Value }); - dt.Rows.Add(new object[] { randInt, randInt, DBNull.Value, DBNull.Value, randInt }); - dt.Rows.Add(new object[] { randInt, randInt, DBNull.Value, randInt, DBNull.Value }); - dt.Rows.Add(new object[] { randInt, randInt, DBNull.Value, randInt, randInt }); - } - - - if(randCompleteness >= 3) - { - dt.Rows.Add(new object[] { randInt, randInt, randInt, DBNull.Value, DBNull.Value }); - dt.Rows.Add(new object[] { randInt, randInt, randInt, DBNull.Value, randInt }); - dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, DBNull.Value }); - dt.Rows.Add(new object[] { randInt, DBNull.Value, DBNull.Value, DBNull.Value, DBNull.Value }); - } + dt.Rows.Add(new object[] { randInt, randInt, randInt, DBNull.Value, DBNull.Value }); + dt.Rows.Add(new object[] { randInt, randInt, randInt, DBNull.Value, randInt }); + dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, DBNull.Value }); + dt.Rows.Add(new object[] { randInt, DBNull.Value, DBNull.Value, DBNull.Value, DBNull.Value }); } + } - var tbl = db.CreateTable(dt.TableName, dt); + dt.EndLoadData(); + var tbl = db.CreateTable(dt.TableName, dt); - var importer = new TableInfoImporter(CatalogueRepository, tbl); - importer.DoImport(out var tableInfo,out var colInfos); + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out var tableInfo, out var colInfos); - //lie about what hte primary key is because this component is designed to run in the RAW environment and we are simulating a LIVE TableInfo (correctly) - var pk = colInfos.Single(c => c.GetRuntimeName().Equals("pk")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); + //lie about what hte primary key is because this component is designed to run in the RAW environment and we are simulating a LIVE TableInfo (correctly) + var pk = colInfos.Single(c => c.GetRuntimeName().Equals("pk")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); - INameDatabasesAndTablesDuringLoads namer = null; + INameDatabasesAndTablesDuringLoads namer = null; - if (useCustomNamer) - { - tbl.Rename("AAAA"); - namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "AAAA"); - } - - HICDatabaseConfiguration configuration = new HICDatabaseConfiguration(db.Server,namer); - - var coalescer = new Coalescer(); - coalescer.TableRegexPattern = new Regex(".*"); - coalescer.CreateIndex = true; - coalescer.Initialize(db,LoadStage.AdjustRaw); + if (useCustomNamer) + { + tbl.Rename("AAAA"); + namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "AAAA"); + } + var configuration = new HICDatabaseConfiguration(db.Server, namer); - var job = new ThrowImmediatelyDataLoadJob(configuration, tableInfo); - coalescer.Mutilate(job); + var coalescer = new Coalescer + { + TableRegexPattern = new Regex(".*"), + CreateIndex = true + }; + coalescer.Initialize(db, LoadStage.AdjustRaw); - var dt2 = tbl.GetDataTable(); - foreach (DataRow row in dt2.Rows) - { - Assert.AreNotEqual(DBNull.Value,row["f1"]); - Assert.AreNotEqual(DBNull.Value, row["f2"]); - Assert.AreNotEqual(DBNull.Value, row["f3"]); - Assert.AreNotEqual(DBNull.Value, row["f4"]); - } + var job = new ThrowImmediatelyDataLoadJob(configuration, tableInfo); + coalescer.Mutilate(job); - db.Drop(); + var dt2 = tbl.GetDataTable(); + + foreach (DataRow row in dt2.Rows) + { + Assert.AreNotEqual(DBNull.Value, row["f1"]); + Assert.AreNotEqual(DBNull.Value, row["f2"]); + Assert.AreNotEqual(DBNull.Value, row["f3"]); + Assert.AreNotEqual(DBNull.Value, row["f4"]); } + db.Drop(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/CrossDatabaseDataLoadTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/CrossDatabaseDataLoadTests.cs index 1c4b403129..5fa6d848ba 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/CrossDatabaseDataLoadTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/CrossDatabaseDataLoadTests.cs @@ -28,437 +28,451 @@ using Rdmp.Core.DataLoad.Engine.LoadProcess; using Rdmp.Core.DataLoad.Triggers; using Rdmp.Core.Logging; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using TypeGuesser; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.CrossDatabaseTypeTests -{ +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.CrossDatabaseTypeTests; + +/* + Test currently requires for LowPrivilegeLoaderAccount (e.g. minion) + --------------------------------------------------- - /* - Test currently requires for LowPrivilegeLoaderAccount (e.g. minion) - --------------------------------------------------- - - create database DLE_STAGING + create database DLE_STAGING - use DLE_STAGING + use DLE_STAGING - CREATE USER [minion] FOR LOGIN [minion] + CREATE USER [minion] FOR LOGIN [minion] - ALTER ROLE [db_datareader] ADD MEMBER [minion] - ALTER ROLE [db_ddladmin] ADD MEMBER [minion] - ALTER ROLE [db_datawriter] ADD MEMBER [minion] - */ + ALTER ROLE [db_datareader] ADD MEMBER [minion] + ALTER ROLE [db_ddladmin] ADD MEMBER [minion] + ALTER ROLE [db_datawriter] ADD MEMBER [minion] +*/ - class CrossDatabaseDataLoadTests : DataLoadEngineTestsBase +internal class CrossDatabaseDataLoadTests : DataLoadEngineTestsBase +{ + public enum TestCase { - public enum TestCase - { - Normal, - LowPrivilegeLoaderAccount, - ForeignKeyOrphans, - DodgyCollation, - AllPrimaryKeys, - NoTrigger, - WithNonPrimaryKeyIdentityColumn, - - WithCustomTableNamer, - - WithDiffColumnIgnoreRegex //tests ability of the system to skip a given column when doing the DLE diff section - } + Normal, + LowPrivilegeLoaderAccount, + ForeignKeyOrphans, + DodgyCollation, + AllPrimaryKeys, + NoTrigger, + WithNonPrimaryKeyIdentityColumn, + + WithCustomTableNamer, + + WithDiffColumnIgnoreRegex //tests ability of the system to skip a given column when doing the DLE diff section + } + + [TestCase(DatabaseType.Oracle, TestCase.Normal)] + [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.Normal)] + [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.NoTrigger)] + [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.WithCustomTableNamer)] + [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.WithNonPrimaryKeyIdentityColumn)] + [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.DodgyCollation)] + [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.LowPrivilegeLoaderAccount)] + [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.AllPrimaryKeys)] + [TestCase(DatabaseType.MySql, TestCase.Normal)] + //[TestCase(DatabaseType.MySql, TestCase.WithNonPrimaryKeyIdentityColumn)] //Not supported by MySql:Incorrect table definition; there can be only one auto column and it must be defined as a key + [TestCase(DatabaseType.MySql, TestCase.DodgyCollation)] + [TestCase(DatabaseType.MySql, TestCase.WithCustomTableNamer)] + [TestCase(DatabaseType.MySql, TestCase.LowPrivilegeLoaderAccount)] + [TestCase(DatabaseType.MySql, TestCase.AllPrimaryKeys)] + [TestCase(DatabaseType.MySql, TestCase.WithDiffColumnIgnoreRegex)] + public void Load(DatabaseType databaseType, TestCase testCase) + { + var defaults = CatalogueRepository; + var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); + var logManager = new LogManager(logServer); + + var db = GetCleanedServer(databaseType); + + var raw = db.Server.ExpectDatabase($"{db.GetRuntimeName()}_RAW"); + if (raw.Exists()) + raw.Drop(); + + using var dt = new DataTable("MyTable"); + dt.Columns.Add("Name"); + dt.Columns.Add("DateOfBirth"); + dt.Columns.Add("FavouriteColour"); + dt.Rows.Add("Bob", "2001-01-01", "Pink"); + dt.Rows.Add("Frank", "2001-01-01", "Orange"); + + var nameCol = new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof(string), 20), false) + { IsPrimaryKey = true }; - [TestCase(DatabaseType.Oracle,TestCase.Normal)] - [TestCase(DatabaseType.MicrosoftSQLServer,TestCase.Normal)] - [TestCase(DatabaseType.MicrosoftSQLServer,TestCase.NoTrigger)] - [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.WithCustomTableNamer)] - [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.WithNonPrimaryKeyIdentityColumn)] - [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.DodgyCollation)] - [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.LowPrivilegeLoaderAccount)] - [TestCase(DatabaseType.MicrosoftSQLServer, TestCase.AllPrimaryKeys)] - [TestCase(DatabaseType.MySql,TestCase.Normal)] - //[TestCase(DatabaseType.MySql, TestCase.WithNonPrimaryKeyIdentityColumn)] //Not supported by MySql:Incorrect table definition; there can be only one auto column and it must be defined as a key - [TestCase(DatabaseType.MySql, TestCase.DodgyCollation)] - [TestCase(DatabaseType.MySql, TestCase.WithCustomTableNamer)] - [TestCase(DatabaseType.MySql, TestCase.LowPrivilegeLoaderAccount)] - [TestCase(DatabaseType.MySql, TestCase.AllPrimaryKeys)] - [TestCase(DatabaseType.MySql, TestCase.WithDiffColumnIgnoreRegex)] - public void Load(DatabaseType databaseType, TestCase testCase) + if (testCase == TestCase.DodgyCollation) + nameCol.Collation = databaseType switch + { + DatabaseType.MicrosoftSQLServer => "Latin1_General_CS_AS_KS_WS", + DatabaseType.MySql => "latin1_german1_ci", + _ => nameCol.Collation + }; + + DiscoveredTable tbl; + switch (testCase) { - var defaults = CatalogueRepository; - var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); - var logManager = new LogManager(logServer); - - var db = GetCleanedServer(databaseType); - - var raw = db.Server.ExpectDatabase(db.GetRuntimeName() + "_RAW"); - if(raw.Exists()) - raw.Drop(); - - var dt = new DataTable("MyTable"); - dt.Columns.Add("Name"); - dt.Columns.Add("DateOfBirth"); - dt.Columns.Add("FavouriteColour"); - dt.Rows.Add("Bob", "2001-01-01","Pink"); - dt.Rows.Add("Frank", "2001-01-01","Orange"); - - var nameCol = new DatabaseColumnRequest("Name", new DatabaseTypeRequest(typeof (string), 20), false){IsPrimaryKey = true}; - - if (testCase == TestCase.DodgyCollation) - if(databaseType == DatabaseType.MicrosoftSQLServer) - nameCol.Collation = "Latin1_General_CS_AS_KS_WS"; - else if (databaseType == DatabaseType.MySql) - nameCol.Collation = "latin1_german1_ci"; - - - DiscoveredTable tbl; - if (testCase == TestCase.WithNonPrimaryKeyIdentityColumn) + case TestCase.WithNonPrimaryKeyIdentityColumn: { - tbl = db.CreateTable("MyTable",new [] + tbl = db.CreateTable("MyTable", new[] { - new DatabaseColumnRequest("ID",new DatabaseTypeRequest(typeof(int)),false){IsPrimaryKey = false,IsAutoIncrement = true}, - nameCol, - new DatabaseColumnRequest("DateOfBirth",new DatabaseTypeRequest(typeof(DateTime)),false){IsPrimaryKey = true}, - new DatabaseColumnRequest("FavouriteColour",new DatabaseTypeRequest(typeof(string))), + new DatabaseColumnRequest("ID", new DatabaseTypeRequest(typeof(int)), false) + { IsPrimaryKey = false, IsAutoIncrement = true }, + nameCol, + new DatabaseColumnRequest("DateOfBirth", new DatabaseTypeRequest(typeof(DateTime)), false) + { IsPrimaryKey = true }, + new DatabaseColumnRequest("FavouriteColour", new DatabaseTypeRequest(typeof(string))) }); - + using (var blk = tbl.BeginBulkInsert()) + { blk.Upload(dt); + } - Assert.AreEqual(1,tbl.DiscoverColumns().Count(c=>c.GetRuntimeName().Equals("ID",StringComparison.CurrentCultureIgnoreCase)),"Table created did not contain ID column"); - + Assert.AreEqual(1, + tbl.DiscoverColumns().Count(c => + c.GetRuntimeName().Equals("ID", StringComparison.CurrentCultureIgnoreCase)), + "Table created did not contain ID column"); + break; } - else - if (testCase == TestCase.AllPrimaryKeys) - { + case TestCase.AllPrimaryKeys: dt.PrimaryKey = dt.Columns.Cast().ToArray(); - tbl = db.CreateTable("MyTable",dt,new []{nameCol}); //upload the column as is + tbl = db.CreateTable("MyTable", dt, new[] { nameCol }); //upload the column as is Assert.IsTrue(tbl.DiscoverColumns().All(c => c.IsPrimaryKey)); - } - else - { + break; + default: tbl = db.CreateTable("MyTable", dt, new[] { nameCol, - new DatabaseColumnRequest("DateOfBirth",new DatabaseTypeRequest(typeof(DateTime)),false){IsPrimaryKey = true} + new DatabaseColumnRequest("DateOfBirth", new DatabaseTypeRequest(typeof(DateTime)), false) + { IsPrimaryKey = true } }); - } + break; + } - Assert.AreEqual(2, tbl.GetRowCount()); - - //define a new load configuration - var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); + Assert.AreEqual(2, tbl.GetRowCount()); - if(testCase == TestCase.NoTrigger) - { - lmd.IgnoreTrigger = true; - lmd.SaveToDatabase(); - } - - ITableInfo ti = Import(tbl, lmd,logManager); - - var projectDirectory = SetupLoadDirectory(lmd); - - CreateCSVProcessTask(lmd,ti,"*.csv"); - - //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) - File.WriteAllText( - Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), -@"Name,DateOfBirth,FavouriteColour + //define a new load configuration + var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); + + if (testCase == TestCase.NoTrigger) + { + lmd.IgnoreTrigger = true; + lmd.SaveToDatabase(); + } + + var ti = Import(tbl, lmd, logManager); + + var projectDirectory = SetupLoadDirectory(lmd); + + CreateCSVProcessTask(lmd, ti, "*.csv"); + + //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) + File.WriteAllText( + Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), + @"Name,DateOfBirth,FavouriteColour Frank,2001-01-01,Neon MrMurder,2001-01-01,Yella"); - - //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time - //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations - var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(),CatalogueRepository.MEF); - checker.Check(new AcceptAllCheckNotifier()); + //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time - //create a reader - if (testCase == TestCase.LowPrivilegeLoaderAccount) - { - SetupLowPrivilegeUserRightsFor(ti, TestLowPrivilegePermissions.Reader|TestLowPrivilegePermissions.Writer); - SetupLowPrivilegeUserRightsFor(db.Server.ExpectDatabase("DLE_STAGING"),TestLowPrivilegePermissions.All); - } + //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations + var checker = + new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags()); + checker.Check(new AcceptAllCheckNotifier()); - Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c=>c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); - Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c=>c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); + //create a reader + if (testCase == TestCase.LowPrivilegeLoaderAccount) + { + SetupLowPrivilegeUserRightsFor(ti, TestLowPrivilegePermissions.Reader | TestLowPrivilegePermissions.Writer); + SetupLowPrivilegeUserRightsFor(db.Server.ExpectDatabase("DLE_STAGING"), TestLowPrivilegePermissions.All); + } - var dbConfig = new HICDatabaseConfiguration(lmd,testCase == TestCase.WithCustomTableNamer? new CustomINameDatabasesAndTablesDuringLoads():null); + Assert.AreEqual(testCase != TestCase.NoTrigger, + tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), + $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); + Assert.AreEqual(testCase != TestCase.NoTrigger, + tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), + $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); - if(testCase == TestCase.WithCustomTableNamer) - new PreExecutionChecker(lmd, dbConfig).Check(new AcceptAllCheckNotifier()); //handles staging database creation etc + var dbConfig = new HICDatabaseConfiguration(lmd, + testCase == TestCase.WithCustomTableNamer ? new CustomINameDatabasesAndTablesDuringLoads() : null); - if (testCase == TestCase.WithDiffColumnIgnoreRegex) - dbConfig.UpdateButDoNotDiff = new Regex("^FavouriteColour"); //do not diff FavouriteColour - - - var loadFactory = new HICDataLoadFactory( - lmd, - dbConfig, - new HICLoadConfigurationFlags(), - CatalogueRepository, - logManager - ); - - try - { - var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); - - var exitCode = exe.Run( - new DataLoadJob(RepositoryLocator,"Go go go!", logManager, lmd, projectDirectory,new ThrowImmediatelyDataLoadEventListener(),dbConfig), - new GracefulCancellationToken()); + if (testCase == TestCase.WithCustomTableNamer) + new PreExecutionChecker(lmd, dbConfig).Check( + new AcceptAllCheckNotifier()); //handles staging database creation etc - Assert.AreEqual(ExitCodeType.Success,exitCode); + if (testCase == TestCase.WithDiffColumnIgnoreRegex) + dbConfig.UpdateButDoNotDiff = new Regex("^FavouriteColour"); //do not diff FavouriteColour - if (testCase == TestCase.AllPrimaryKeys) - { - Assert.AreEqual(4, tbl.GetRowCount()); //Bob, Frank, Frank (with also pk Neon) & MrMurder - Assert.Pass(); - } - if (testCase == TestCase.WithDiffColumnIgnoreRegex) - { - Assert.AreEqual(3, tbl.GetRowCount()); //Bob, Frank (original since the diff was skipped), & MrMurder - //frank should be updated to like Neon instead of Orange - Assert.AreEqual(3, tbl.GetRowCount()); - var frankOld = tbl.GetDataTable().Rows.Cast().Single(r => (string)r["Name"] == "Frank"); - Assert.AreEqual("Orange", frankOld["FavouriteColour"]); - Assert.Pass(); - } + var loadFactory = new HICDataLoadFactory( + lmd, + dbConfig, + new HICLoadConfigurationFlags(), + CatalogueRepository, + logManager + ); - //frank should be updated to like Neon instead of Orange - Assert.AreEqual(3,tbl.GetRowCount()); - var result = tbl.GetDataTable(); - var frank = result.Rows.Cast().Single(r => (string) r["Name"] == "Frank"); - Assert.AreEqual("Neon",frank["FavouriteColour"]); - - if(testCase != TestCase.NoTrigger) - AssertHasDataLoadRunId(frank); - - //MrMurder is a new person who likes Yella - var mrmurder = result.Rows.Cast().Single(r => (string)r["Name"] == "MrMurder"); - Assert.AreEqual("Yella", mrmurder["FavouriteColour"]); - Assert.AreEqual(new DateTime(2001,01,01), mrmurder["DateOfBirth"]); - - if(testCase != TestCase.NoTrigger) - AssertHasDataLoadRunId(mrmurder); - - //bob should be untouched (same values as before and no dataloadrunID) - var bob = result.Rows.Cast().Single(r => (string)r["Name"] == "Bob"); - Assert.AreEqual("Pink", bob["FavouriteColour"]); - Assert.AreEqual(new DateTime(2001, 01, 01), bob["DateOfBirth"]); - - if(testCase != TestCase.NoTrigger) - { - Assert.AreEqual(DBNull.Value,bob[SpecialFieldNames.DataLoadRunID]); + try + { + var exe = loadFactory.Create(ThrowImmediatelyDataLoadEventListener.Quiet); - //MySql add default of now() on a table will auto populate all the column values with the the now() date while Sql Server will leave them as nulls - if(databaseType == DatabaseType.MicrosoftSQLServer) - Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.ValidFrom]); - } - - Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c=>c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); - Assert.AreEqual(testCase != TestCase.NoTrigger, tbl.DiscoverColumns().Select(c=>c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); + var exitCode = exe.Run( + new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, + ThrowImmediatelyDataLoadEventListener.Quiet, dbConfig), + new GracefulCancellationToken()); + + Assert.AreEqual(ExitCodeType.Success, exitCode); + + if (testCase == TestCase.AllPrimaryKeys) + { + Assert.AreEqual(4, tbl.GetRowCount()); //Bob, Frank, Frank (with also pk Neon) & MrMurder + Assert.Pass(); } - finally + + if (testCase == TestCase.WithDiffColumnIgnoreRegex) { - Directory.Delete(lmd.LocationOfFlatFiles, true); + Assert.AreEqual(3, tbl.GetRowCount()); //Bob, Frank (original since the diff was skipped), & MrMurder - foreach (Catalogue c in RepositoryLocator.CatalogueRepository.GetAllObjects()) - c.DeleteInDatabase(); + //frank should be updated to like Neon instead of Orange + Assert.AreEqual(3, tbl.GetRowCount()); + var frankOld = tbl.GetDataTable().Rows.Cast().Single(r => (string)r["Name"] == "Frank"); + Assert.AreEqual("Orange", frankOld["FavouriteColour"]); + Assert.Pass(); + } - foreach (TableInfo t in RepositoryLocator.CatalogueRepository.GetAllObjects()) - t.DeleteInDatabase(); + //frank should be updated to like Neon instead of Orange + Assert.AreEqual(3, tbl.GetRowCount()); + var result = tbl.GetDataTable(); + var frank = result.Rows.Cast().Single(r => (string)r["Name"] == "Frank"); + Assert.AreEqual("Neon", frank["FavouriteColour"]); - foreach (LoadMetadata l in RepositoryLocator.CatalogueRepository.GetAllObjects()) - l.DeleteInDatabase(); - } + if (testCase != TestCase.NoTrigger) + AssertHasDataLoadRunId(frank); + + //MrMurder is a new person who likes Yella + var mrmurder = result.Rows.Cast().Single(r => (string)r["Name"] == "MrMurder"); + Assert.AreEqual("Yella", mrmurder["FavouriteColour"]); + Assert.AreEqual(new DateTime(2001, 01, 01), mrmurder["DateOfBirth"]); + + if (testCase != TestCase.NoTrigger) + AssertHasDataLoadRunId(mrmurder); + + //bob should be untouched (same values as before and no dataloadrunID) + var bob = result.Rows.Cast().Single(r => (string)r["Name"] == "Bob"); + Assert.AreEqual("Pink", bob["FavouriteColour"]); + Assert.AreEqual(new DateTime(2001, 01, 01), bob["DateOfBirth"]); - if(testCase == TestCase.WithCustomTableNamer) + if (testCase != TestCase.NoTrigger) { - var db2 = db.Server.ExpectDatabase("BB_STAGING"); - if(db.Exists()) - db2.Drop(); + Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.DataLoadRunID]); + + //MySql add default of now() on a table will auto populate all the column values with the the now() date while Sql Server will leave them as nulls + if (databaseType == DatabaseType.MicrosoftSQLServer) + Assert.AreEqual(DBNull.Value, bob[SpecialFieldNames.ValidFrom]); } + + Assert.AreEqual(testCase != TestCase.NoTrigger, + tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.DataLoadRunID), + $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); + Assert.AreEqual(testCase != TestCase.NoTrigger, + tbl.DiscoverColumns().Select(c => c.GetRuntimeName()).Contains(SpecialFieldNames.ValidFrom), + $"When running with NoTrigger there shouldn't be any additional columns added to table. Test case was {testCase}"); + } + finally + { + Directory.Delete(lmd.LocationOfFlatFiles, true); + + foreach (var c in RepositoryLocator.CatalogueRepository.GetAllObjects()) + c.DeleteInDatabase(); + + foreach (var t in RepositoryLocator.CatalogueRepository.GetAllObjects()) + t.DeleteInDatabase(); + + foreach (var l in RepositoryLocator.CatalogueRepository.GetAllObjects()) + l.DeleteInDatabase(); } - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void DLELoadTwoTables(DatabaseType databaseType) + if (testCase == TestCase.WithCustomTableNamer) { - //setup the data tables - var logServer = CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); - var logManager = new LogManager(logServer); - - var db = GetCleanedServer(databaseType); - - var dtParent = new DataTable(); - dtParent.Columns.Add("ID",typeof(int)); - dtParent.Columns.Add("Name"); - dtParent.Columns.Add("Height"); - dtParent.PrimaryKey = new[] {dtParent.Columns[0]}; - - dtParent.Rows.Add("1", "Dave", "3.5"); - - var dtChild = new DataTable(); - dtChild.Columns.Add("Parent_ID"); - dtChild.Columns.Add("ChildNumber"); - dtChild.Columns.Add("Name"); - dtChild.Columns.Add("DateOfBirth"); - dtChild.Columns.Add("Age"); - dtChild.Columns.Add("Height"); - - dtChild.Rows.Add("1","1","Child1","2001-01-01","20","3.5"); - dtChild.Rows.Add("1","2","Child2","2002-01-01","19","3.4"); - - dtChild.PrimaryKey = new[] {dtChild.Columns[0], dtChild.Columns[1]}; - - //create the parent table based on the DataTable - var parentTbl = db.CreateTable("Parent",dtParent); - - //go find the primary key column created - var pkParentID = parentTbl.DiscoverColumn("ID"); - - //forward declare this column as part of pk (will be used to specify foreign key - var fkParentID = new DatabaseColumnRequest("Parent_ID", "int"){IsPrimaryKey = true}; - - var args = new CreateTableArgs( - db, - "Child", - null, - dtChild, - false, - new Dictionary() - { - {fkParentID, pkParentID} - }, - true); + var db2 = db.Server.ExpectDatabase("BB_STAGING"); + if (db.Exists()) + db2.Drop(); + } + } - args.ExplicitColumnDefinitions = new[] - { - fkParentID - }; + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void DLELoadTwoTables(DatabaseType databaseType) + { + //setup the data tables + var logServer = CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); + var logManager = new LogManager(logServer); - var childTbl = db.CreateTable(args); + var db = GetCleanedServer(databaseType); - Assert.AreEqual(1, parentTbl.GetRowCount()); - Assert.AreEqual(2, childTbl.GetRowCount()); + var dtParent = new DataTable(); + dtParent.Columns.Add("ID", typeof(int)); + dtParent.Columns.Add("Name"); + dtParent.Columns.Add("Height"); + dtParent.PrimaryKey = new[] { dtParent.Columns[0] }; - //create a new load - var lmd = new LoadMetadata(CatalogueRepository, "MyLoading2"); - - ITableInfo childTableInfo = Import(childTbl, lmd, logManager); - ITableInfo parentTableInfo = Import(parentTbl,lmd,logManager); + dtParent.Rows.Add("1", "Dave", "3.5"); - var projectDirectory = SetupLoadDirectory(lmd); + var dtChild = new DataTable(); + dtChild.Columns.Add("Parent_ID"); + dtChild.Columns.Add("ChildNumber"); + dtChild.Columns.Add("Name"); + dtChild.Columns.Add("DateOfBirth"); + dtChild.Columns.Add("Age"); + dtChild.Columns.Add("Height"); - CreateCSVProcessTask(lmd,parentTableInfo,"parent.csv"); - CreateCSVProcessTask(lmd, childTableInfo, "child.csv"); + dtChild.Rows.Add("1", "1", "Child1", "2001-01-01", "20", "3.5"); + dtChild.Rows.Add("1", "2", "Child2", "2002-01-01", "19", "3.4"); - //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) - File.WriteAllText( - Path.Combine(projectDirectory.ForLoading.FullName, "parent.csv"), -@"ID,Name,Height -2,Man2,3.1 -1,Dave,3.2"); + dtChild.PrimaryKey = new[] { dtChild.Columns[0], dtChild.Columns[1] }; - File.WriteAllText( - Path.Combine(projectDirectory.ForLoading.FullName, "child.csv"), -@"Parent_ID,ChildNumber,Name,DateOfBirth,Age,Height -1,1,UpdC1,2001-01-01,20,3.5 -2,1,NewC1,2000-01-01,19,null"); - - - //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations - var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(), CatalogueRepository.MEF); - checker.Check(new AcceptAllCheckNotifier()); - - var config = new HICDatabaseConfiguration(lmd); - - var loadFactory = new HICDataLoadFactory( - lmd, - config, - new HICLoadConfigurationFlags(), - CatalogueRepository, - logManager - ); - try + //create the parent table based on the DataTable + var parentTbl = db.CreateTable("Parent", dtParent); + + //go find the primary key column created + var pkParentID = parentTbl.DiscoverColumn("ID"); + + //forward declare this column as part of pk (will be used to specify foreign key + var fkParentID = new DatabaseColumnRequest("Parent_ID", "int") { IsPrimaryKey = true }; + + var args = new CreateTableArgs( + db, + "Child", + null, + dtChild, + false, + new Dictionary + { + { fkParentID, pkParentID } + }, + true) + { + ExplicitColumnDefinitions = new[] { - var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); + fkParentID + } + }; - var exitCode = exe.Run( - new DataLoadJob(RepositoryLocator,"Go go go!", logManager, lmd, projectDirectory, new ThrowImmediatelyDataLoadEventListener(),config), - new GracefulCancellationToken()); + var childTbl = db.CreateTable(args); - Assert.AreEqual(ExitCodeType.Success, exitCode); + Assert.AreEqual(1, parentTbl.GetRowCount()); + Assert.AreEqual(2, childTbl.GetRowCount()); - //should now be 2 parents (the original - who was updated) + 1 new one (Man2) - Assert.AreEqual(2, parentTbl.GetRowCount()); - var result = parentTbl.GetDataTable(); - var dave = result.Rows.Cast().Single(r => (string)r["Name"] == "Dave"); - Assert.AreEqual(3.2f, dave["Height"]); //should now be only 3.2 inches high - AssertHasDataLoadRunId(dave); + //create a new load + var lmd = new LoadMetadata(CatalogueRepository, "MyLoading2"); - //should be 3 children (Child1 who gets updated to be called UpdC1) and NewC1 - Assert.AreEqual(3, childTbl.GetRowCount()); - result = childTbl.GetDataTable(); + var childTableInfo = Import(childTbl, lmd, logManager); + var parentTableInfo = Import(parentTbl, lmd, logManager); - var updC1 = result.Rows.Cast().Single(r => (string)r["Name"] == "UpdC1"); - Assert.AreEqual(1, updC1["Parent_ID"]); - Assert.AreEqual(1, updC1["ChildNumber"]); - AssertHasDataLoadRunId(updC1); + var projectDirectory = SetupLoadDirectory(lmd); - var newC1 = result.Rows.Cast().Single(r => (string)r["Name"] == "NewC1"); - Assert.AreEqual(2, newC1["Parent_ID"]); - Assert.AreEqual(1, newC1["ChildNumber"]); - Assert.AreEqual(DBNull.Value, newC1["Height"]); //the "null" in the input file should be DBNull.Value in the final database - AssertHasDataLoadRunId(newC1); + CreateCSVProcessTask(lmd, parentTableInfo, "parent.csv"); + CreateCSVProcessTask(lmd, childTableInfo, "child.csv"); - } - finally - { - Directory.Delete(lmd.LocationOfFlatFiles,true); + //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) + File.WriteAllText( + Path.Combine(projectDirectory.ForLoading.FullName, "parent.csv"), + @"ID,Name,Height +2,Man2,3.1 +1,Dave,3.2"); - foreach (Catalogue c in RepositoryLocator.CatalogueRepository.GetAllObjects()) - c.DeleteInDatabase(); + File.WriteAllText( + Path.Combine(projectDirectory.ForLoading.FullName, "child.csv"), + @"Parent_ID,ChildNumber,Name,DateOfBirth,Age,Height +1,1,UpdC1,2001-01-01,20,3.5 +2,1,NewC1,2000-01-01,19,null"); - foreach (TableInfo t in RepositoryLocator.CatalogueRepository.GetAllObjects()) - t.DeleteInDatabase(); - foreach (LoadMetadata l in RepositoryLocator.CatalogueRepository.GetAllObjects()) - l.DeleteInDatabase(); - } - } + //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations + var checker = + new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags()); + checker.Check(new AcceptAllCheckNotifier()); - } + var config = new HICDatabaseConfiguration(lmd); - class CustomINameDatabasesAndTablesDuringLoads:INameDatabasesAndTablesDuringLoads - { - public string GetDatabaseName(string rootDatabaseName, LoadBubble convention) + var loadFactory = new HICDataLoadFactory( + lmd, + config, + new HICLoadConfigurationFlags(), + CatalogueRepository, + logManager + ); + try { - //RAW is AA, Staging is BB - switch (convention) - { - case LoadBubble.Raw: - return "AA_RAW"; - case LoadBubble.Staging: - return "BB_STAGING"; - case LoadBubble.Live: - case LoadBubble.Archive: - return rootDatabaseName; - default: - throw new ArgumentOutOfRangeException("convention"); - } + var exe = loadFactory.Create(ThrowImmediatelyDataLoadEventListener.Quiet); + + var exitCode = exe.Run( + new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, + ThrowImmediatelyDataLoadEventListener.Quiet, config), + new GracefulCancellationToken()); + + Assert.AreEqual(ExitCodeType.Success, exitCode); + + //should now be 2 parents (the original - who was updated) + 1 new one (Man2) + Assert.AreEqual(2, parentTbl.GetRowCount()); + var result = parentTbl.GetDataTable(); + var dave = result.Rows.Cast().Single(r => (string)r["Name"] == "Dave"); + Assert.AreEqual(3.2f, dave["Height"]); //should now be only 3.2 inches high + AssertHasDataLoadRunId(dave); + + //should be 3 children (Child1 who gets updated to be called UpdC1) and NewC1 + Assert.AreEqual(3, childTbl.GetRowCount()); + result = childTbl.GetDataTable(); + + var updC1 = result.Rows.Cast().Single(r => (string)r["Name"] == "UpdC1"); + Assert.AreEqual(1, updC1["Parent_ID"]); + Assert.AreEqual(1, updC1["ChildNumber"]); + AssertHasDataLoadRunId(updC1); + + var newC1 = result.Rows.Cast().Single(r => (string)r["Name"] == "NewC1"); + Assert.AreEqual(2, newC1["Parent_ID"]); + Assert.AreEqual(1, newC1["ChildNumber"]); + Assert.AreEqual(DBNull.Value, + newC1["Height"]); //the "null" in the input file should be DBNull.Value in the final database + AssertHasDataLoadRunId(newC1); } - - public string GetName(string tableName, LoadBubble convention) + finally { - //all tables get called CC - if (convention < LoadBubble.Live) - return "CC"; + Directory.Delete(lmd.LocationOfFlatFiles, true); + + foreach (var c in RepositoryLocator.CatalogueRepository.GetAllObjects()) + c.DeleteInDatabase(); + + foreach (var t in RepositoryLocator.CatalogueRepository.GetAllObjects()) + t.DeleteInDatabase(); - return tableName; + foreach (var l in RepositoryLocator.CatalogueRepository.GetAllObjects()) + l.DeleteInDatabase(); } } } + +internal class CustomINameDatabasesAndTablesDuringLoads : INameDatabasesAndTablesDuringLoads +{ + public string GetDatabaseName(string rootDatabaseName, LoadBubble convention) + { + //RAW is AA, Staging is BB + return convention switch + { + LoadBubble.Raw => "AA_RAW", + LoadBubble.Staging => "BB_STAGING", + LoadBubble.Live => rootDatabaseName, + LoadBubble.Archive => rootDatabaseName, + _ => throw new ArgumentOutOfRangeException(nameof(convention)) + }; + } + + public string GetName(string tableName, LoadBubble convention) => + //all tables get called CC + convention < LoadBubble.Live ? "CC" : tableName; +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/CrossDatabaseMergeCommandTest.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/CrossDatabaseMergeCommandTest.cs index acfac8b2a3..8ab992b6f5 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/CrossDatabaseMergeCommandTest.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/CrossDatabaseMergeCommandTest.cs @@ -21,115 +21,115 @@ using Rdmp.Core.DataLoad.Engine.Migration; using Rdmp.Core.DataLoad.Triggers.Implementations; using Rdmp.Core.Logging; -using ReusableLibraryCode.Checks; -using Tests.Common; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.CrossDatabaseTypeTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.CrossDatabaseTypeTests; + +public class CrossDatabaseMergeCommandTest : FromToDatabaseTests { - public class CrossDatabaseMergeCommandTest:FromToDatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestMerge(DatabaseType databaseType) { - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestMerge(DatabaseType databaseType) - { - //microsoft one gets called for free in test setup (see base class) - if (databaseType != DatabaseType.MicrosoftSQLServer) - SetupFromTo(databaseType); - - var dt = new DataTable(); - var colName = new DataColumn("Name",typeof(string)); - var colAge = new DataColumn("Age",typeof(int)); - dt.Columns.Add(colName); - dt.Columns.Add(colAge); - dt.Columns.Add("Postcode",typeof(string)); - - //Data in live awaiting toTbl be updated - dt.Rows.Add(new object[]{"Dave",18,"DD3 1AB"}); - dt.Rows.Add(new object[] {"Dave", 25, "DD1 1XS" }); - dt.Rows.Add(new object[] {"Mango", 32, DBNull.Value}); - dt.Rows.Add(new object[] { "Filli", 32,"DD3 78L" }); - dt.Rows.Add(new object[] { "Mandrake", 32, DBNull.Value }); - - dt.PrimaryKey = new[]{colName,colAge}; - - var toTbl = To.CreateTable("ToTable", dt); - - Assert.IsTrue(toTbl.DiscoverColumn("Name").IsPrimaryKey); - Assert.IsTrue(toTbl.DiscoverColumn("Age").IsPrimaryKey); - Assert.IsFalse(toTbl.DiscoverColumn("Postcode").IsPrimaryKey); - - dt.Rows.Clear(); - - //new data being loaded - dt.Rows.Add(new object[] { "Dave", 25, "DD1 1PS" }); //update toTbl change postcode toTbl "DD1 1PS" - dt.Rows.Add(new object[] { "Chutney", 32, DBNull.Value }); //new insert Chutney - dt.Rows.Add(new object[] { "Mango", 32, DBNull.Value }); //ignored because already present in dataset - dt.Rows.Add(new object[] { "Filli", 32, DBNull.Value }); //update from "DD3 78L" null - dt.Rows.Add(new object[] { "Mandrake", 32, "DD1 1PS" }); //update from null toTbl "DD1 1PS" - dt.Rows.Add(new object[] { "Mandrake", 31, "DD1 1PS" }); // insert because Age is unique (and part of pk) - - var fromTbl = From.CreateTable(DatabaseName + "_ToTable_STAGING", dt); - - //import the toTbl table as a TableInfo - var cata = Import(toTbl,out var ti,out var cis); - - //put the backup trigger on the live table (this will also create the needed hic_ columns etc) - var triggerImplementer = new TriggerImplementerFactory(databaseType).Create(toTbl); - triggerImplementer.CreateTrigger(new ThrowImmediatelyCheckNotifier()); - - var configuration = new MigrationConfiguration(From, LoadBubble.Staging, LoadBubble.Live, - new FixedStagingDatabaseNamer(toTbl.Database.GetRuntimeName(), fromTbl.Database.GetRuntimeName())); - - var lmd = new LoadMetadata(CatalogueRepository); - cata.LoadMetadata_ID = lmd.ID; - cata.SaveToDatabase(); - - var migrationHost = new MigrationHost(From, To, configuration, new HICDatabaseConfiguration(lmd)); - - //set SetUp a logging task - var logServer = CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); - var logManager = new LogManager(logServer); - logManager.CreateNewLoggingTaskIfNotExists("CrossDatabaseMergeCommandTest"); - var dli = logManager.CreateDataLoadInfo("CrossDatabaseMergeCommandTest", "tests", "running test", "", true); - - var job = new ThrowImmediatelyDataLoadJob() - { - LoadMetadata = lmd, - DataLoadInfo = dli, - RegularTablesToLoad = new List(new[]{ti}) - }; - - migrationHost.Migrate(job, new GracefulCancellationToken()); - - var resultantDt = toTbl.GetDataTable(); - Assert.AreEqual(7,resultantDt.Rows.Count); - - AssertRowEquals(resultantDt, "Dave", 25, "DD1 1PS"); - AssertRowEquals(resultantDt, "Chutney", 32, DBNull.Value); - AssertRowEquals(resultantDt, "Mango", 32, DBNull.Value); - - AssertRowEquals(resultantDt,"Filli",32,DBNull.Value); - AssertRowEquals(resultantDt, "Mandrake", 32, "DD1 1PS"); - AssertRowEquals(resultantDt, "Mandrake", 31, "DD1 1PS"); - - AssertRowEquals(resultantDt, "Dave", 18, "DD3 1AB"); - - - var archival = logManager.GetArchivalDataLoadInfos("CrossDatabaseMergeCommandTest", new CancellationToken()); - var log = archival.First(); - - - Assert.AreEqual(dli.ID,log.ID); - Assert.AreEqual(2,log.TableLoadInfos.Single().Inserts); - Assert.AreEqual(3, log.TableLoadInfos.Single().Updates); - } - - private void AssertRowEquals(DataTable resultantDt,string name,int age, object postcode) + //microsoft one gets called for free in test setup (see base class) + if (databaseType != DatabaseType.MicrosoftSQLServer) + SetupFromTo(databaseType); + + var dt = new DataTable(); + var colName = new DataColumn("Name", typeof(string)); + var colAge = new DataColumn("Age", typeof(int)); + dt.Columns.Add(colName); + dt.Columns.Add(colAge); + dt.Columns.Add("Postcode", typeof(string)); + + //Data in live awaiting toTbl be updated + dt.Rows.Add(new object[] { "Dave", 18, "DD3 1AB" }); + dt.Rows.Add(new object[] { "Dave", 25, "DD1 1XS" }); + dt.Rows.Add(new object[] { "Mango", 32, DBNull.Value }); + dt.Rows.Add(new object[] { "Filli", 32, "DD3 78L" }); + dt.Rows.Add(new object[] { "Mandrake", 32, DBNull.Value }); + + dt.PrimaryKey = new[] { colName, colAge }; + + var toTbl = To.CreateTable("ToTable", dt); + + Assert.IsTrue(toTbl.DiscoverColumn("Name").IsPrimaryKey); + Assert.IsTrue(toTbl.DiscoverColumn("Age").IsPrimaryKey); + Assert.IsFalse(toTbl.DiscoverColumn("Postcode").IsPrimaryKey); + + dt.Rows.Clear(); + + //new data being loaded + dt.Rows.Add(new object[] { "Dave", 25, "DD1 1PS" }); //update toTbl change postcode toTbl "DD1 1PS" + dt.Rows.Add(new object[] { "Chutney", 32, DBNull.Value }); //new insert Chutney + dt.Rows.Add(new object[] { "Mango", 32, DBNull.Value }); //ignored because already present in dataset + dt.Rows.Add(new object[] { "Filli", 32, DBNull.Value }); //update from "DD3 78L" null + dt.Rows.Add(new object[] { "Mandrake", 32, "DD1 1PS" }); //update from null toTbl "DD1 1PS" + dt.Rows.Add(new object[] { "Mandrake", 31, "DD1 1PS" }); // insert because Age is unique (and part of pk) + + var fromTbl = From.CreateTable($"{DatabaseName}_ToTable_STAGING", dt); + + //import the toTbl table as a TableInfo + var cata = Import(toTbl, out var ti, out var cis); + + //put the backup trigger on the live table (this will also create the needed hic_ columns etc) + var triggerImplementer = new TriggerImplementerFactory(databaseType).Create(toTbl); + triggerImplementer.CreateTrigger(ThrowImmediatelyCheckNotifier.Quiet); + + var configuration = new MigrationConfiguration(From, LoadBubble.Staging, LoadBubble.Live, + new FixedStagingDatabaseNamer(toTbl.Database.GetRuntimeName(), fromTbl.Database.GetRuntimeName())); + + var lmd = new LoadMetadata(CatalogueRepository); + cata.LoadMetadata_ID = lmd.ID; + cata.SaveToDatabase(); + + var migrationHost = new MigrationHost(From, To, configuration, new HICDatabaseConfiguration(lmd)); + + //set SetUp a logging task + var logServer = CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); + var logManager = new LogManager(logServer); + logManager.CreateNewLoggingTaskIfNotExists("CrossDatabaseMergeCommandTest"); + var dli = logManager.CreateDataLoadInfo("CrossDatabaseMergeCommandTest", "tests", "running test", "", true); + + var job = new ThrowImmediatelyDataLoadJob { - Assert.AreEqual( - 1, resultantDt.Rows.Cast().Count(r => Equals(r["Name"], name) && Equals(r["Age"], age) && Equals(r["Postcode"], postcode)), - "Did not find expected record:" + string.Join(",",name,age,postcode)); - } + LoadMetadata = lmd, + DataLoadInfo = dli, + RegularTablesToLoad = new List(new[] { ti }) + }; + + migrationHost.Migrate(job, new GracefulCancellationToken()); + + var resultantDt = toTbl.GetDataTable(); + Assert.AreEqual(7, resultantDt.Rows.Count); + + AssertRowEquals(resultantDt, "Dave", 25, "DD1 1PS"); + AssertRowEquals(resultantDt, "Chutney", 32, DBNull.Value); + AssertRowEquals(resultantDt, "Mango", 32, DBNull.Value); + + AssertRowEquals(resultantDt, "Filli", 32, DBNull.Value); + AssertRowEquals(resultantDt, "Mandrake", 32, "DD1 1PS"); + AssertRowEquals(resultantDt, "Mandrake", 31, "DD1 1PS"); + + AssertRowEquals(resultantDt, "Dave", 18, "DD3 1AB"); + + + var archival = logManager.GetArchivalDataLoadInfos("CrossDatabaseMergeCommandTest", new CancellationToken()); + var log = archival.First(); + + + Assert.AreEqual(dli.ID, log.ID); + Assert.AreEqual(2, log.TableLoadInfos.Single().Inserts); + Assert.AreEqual(3, log.TableLoadInfos.Single().Updates); + } + + private static void AssertRowEquals(DataTable resultantDt, string name, int age, object postcode) + { + Assert.AreEqual( + 1, + resultantDt.Rows.Cast().Count(r => + Equals(r["Name"], name) && Equals(r["Age"], age) && Equals(r["Postcode"], postcode)), + "Did not find expected record:" + string.Join(",", name, age, postcode)); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/HowDoWeAchieveMd5Test.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/HowDoWeAchieveMd5Test.cs index b42440a9e2..d824f8b095 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/HowDoWeAchieveMd5Test.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/CrossDatabaseTypeTests/HowDoWeAchieveMd5Test.cs @@ -10,77 +10,71 @@ using NUnit.Framework; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.CrossDatabaseTypeTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.CrossDatabaseTypeTests; + +internal class HowDoWeAchieveMd5Test : DatabaseTests { - class HowDoWeAchieveMd5Test:DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestMd5String(DatabaseType type) { + var dt = new DataTable(); + dt.Columns.Add("F"); + dt.Rows.Add(new[] { "Fish" }); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestMd5String(DatabaseType type) - { - DataTable dt = new DataTable(); - dt.Columns.Add("F"); - dt.Rows.Add(new[] {"Fish"}); - - var db = GetCleanedServer(type); - var tbl = db.CreateTable("MD5Test", dt); - - var col = tbl.DiscoverColumn("F"); + var db = GetCleanedServer(type); + var tbl = db.CreateTable("MD5Test", dt); - var sql = "SELECT " + tbl.GetQuerySyntaxHelper().HowDoWeAchieveMd5(col.GetFullyQualifiedName()) + " FROM " + tbl.GetFullyQualifiedName(); + var col = tbl.DiscoverColumn("F"); + var sql = + $"SELECT {tbl.GetQuerySyntaxHelper().HowDoWeAchieveMd5(col.GetFullyQualifiedName())} FROM {tbl.GetFullyQualifiedName()}"; - using (var con = db.Server.GetConnection()) - { - con.Open(); - var cmd = db.Server.GetCommand(sql, con); - var value = cmd.ExecuteScalar(); + using var con = db.Server.GetConnection(); + con.Open(); + var cmd = db.Server.GetCommand(sql, con); + var value = cmd.ExecuteScalar(); - Console.WriteLine("Value was:" + value); - Assert.IsNotNull(value); - Assert.AreNotEqual("Fish",value); - Assert.GreaterOrEqual(value.ToString().Length,32); - - } - } + Console.WriteLine($"Value was:{value}"); - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestMd5Date(DatabaseType type) - { - DataTable dt = new DataTable(); - dt.Columns.Add("F"); - dt.Rows.Add(new[] { "2001-01-01" }); + Assert.IsNotNull(value); + Assert.AreNotEqual("Fish", value); + Assert.GreaterOrEqual(value.ToString().Length, 32); + } - var db = GetCleanedServer(type); - var tbl = db.CreateTable("MD5Test", dt); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestMd5Date(DatabaseType type) + { + var dt = new DataTable(); + dt.Columns.Add("F"); + dt.Rows.Add(new[] { "2001-01-01" }); - var col = tbl.DiscoverColumn("F"); + var db = GetCleanedServer(type); + var tbl = db.CreateTable("MD5Test", dt); - - Assert.AreEqual(typeof(DateTime),tbl.GetQuerySyntaxHelper().TypeTranslater.GetCSharpTypeForSQLDBType(col.DataType.SQLType)); + var col = tbl.DiscoverColumn("F"); - var sql = "SELECT " + tbl.GetQuerySyntaxHelper().HowDoWeAchieveMd5(col.GetFullyQualifiedName()) + " FROM " + tbl.GetFullyQualifiedName(); + Assert.AreEqual(typeof(DateTime), + tbl.GetQuerySyntaxHelper().TypeTranslater.GetCSharpTypeForSQLDBType(col.DataType.SQLType)); - using (var con = db.Server.GetConnection()) - { - con.Open(); - var cmd = db.Server.GetCommand(sql, con); - var value = cmd.ExecuteScalar(); + var sql = + $"SELECT {tbl.GetQuerySyntaxHelper().HowDoWeAchieveMd5(col.GetFullyQualifiedName())} FROM {tbl.GetFullyQualifiedName()}"; - Console.WriteLine("Value was:" + value); + using var con = db.Server.GetConnection(); + con.Open(); + var cmd = db.Server.GetCommand(sql, con); + var value = cmd.ExecuteScalar(); - Assert.IsNotNull(value); - Assert.GreaterOrEqual(value.ToString().Length, 32); - } - } + Console.WriteLine($"Value was:{value}"); + Assert.IsNotNull(value); + Assert.GreaterOrEqual(value.ToString().Length, 32); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataLoadEngineTestsBase.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataLoadEngineTestsBase.cs index a638a394ab..073179f22d 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataLoadEngineTestsBase.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataLoadEngineTestsBase.cs @@ -15,77 +15,81 @@ using Rdmp.Core.DataLoad.Modules.Attachers; using Rdmp.Core.DataLoad.Triggers; using Rdmp.Core.Logging; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +/// +/// Base class for tests that want to run data loads contains helper methods for setting up a valid DLE load configuration and running it +/// +internal class DataLoadEngineTestsBase : DatabaseTests { - /// - /// Base class for tests that want to run data loads contains helper methods for setting up a valid DLE load configuration and running it - /// - class DataLoadEngineTestsBase : DatabaseTests + protected static void AssertHasDataLoadRunId(DataRow row) { - protected void AssertHasDataLoadRunId(DataRow row) - { - var o = row[SpecialFieldNames.DataLoadRunID]; + var o = row[SpecialFieldNames.DataLoadRunID]; - Assert.IsNotNull(o, "A row which was expected to have a hic_dataLoadRunID had null instead"); - Assert.AreNotEqual(DBNull.Value, o, "A row which was expected to have a hic_dataLoadRunID had DBNull.Value instead"); - Assert.GreaterOrEqual((int)o, 0); + Assert.IsNotNull(o, "A row which was expected to have a hic_dataLoadRunID had null instead"); + Assert.AreNotEqual(DBNull.Value, o, + "A row which was expected to have a hic_dataLoadRunID had DBNull.Value instead"); + Assert.GreaterOrEqual((int)o, 0); - var d = row[SpecialFieldNames.ValidFrom]; - Assert.IsNotNull(d, "A row which was expected to have a hic_validFrom had null instead"); - Assert.AreNotEqual(DBNull.Value, d, "A row which was expected to have a hic_validFrom had DBNull.Value instead"); + var d = row[SpecialFieldNames.ValidFrom]; + Assert.IsNotNull(d, "A row which was expected to have a hic_validFrom had null instead"); + Assert.AreNotEqual(DBNull.Value, d, + "A row which was expected to have a hic_validFrom had DBNull.Value instead"); - //expect validFrom to be after 2 hours ago (to handle UTC / BST nonsense) - Assert.GreaterOrEqual((DateTime)d, DateTime.Now.Subtract(new TimeSpan(2, 0, 0))); - - } + //expect validFrom to be after 2 hours ago (to handle UTC / BST nonsense) + Assert.GreaterOrEqual((DateTime)d, DateTime.Now.Subtract(new TimeSpan(2, 0, 0))); + } - protected void CreateCSVProcessTask(LoadMetadata lmd, ITableInfo ti, string regex) + protected void CreateCSVProcessTask(LoadMetadata lmd, ITableInfo ti, string regex) + { + var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.Mounting) { - var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.Mounting); - pt.Path = typeof(AnySeparatorFileAttacher).FullName; - pt.ProcessTaskType = ProcessTaskType.Attacher; - pt.Name = "Load " + ti.GetRuntimeName(); - pt.SaveToDatabase(); - - pt.CreateArgumentsForClassIfNotExists(); - pt.SetArgumentValue("FilePattern", regex); - pt.SetArgumentValue("Separator", ","); - pt.SetArgumentValue("TableToLoad", ti); - - pt.Check(new ThrowImmediatelyCheckNotifier()); - } + Path = typeof(AnySeparatorFileAttacher).FullName, + ProcessTaskType = ProcessTaskType.Attacher, + Name = $"Load {ti.GetRuntimeName()}" + }; + pt.SaveToDatabase(); + + pt.CreateArgumentsForClassIfNotExists(); + pt.SetArgumentValue("FilePattern", regex); + pt.SetArgumentValue("Separator", ","); + pt.SetArgumentValue("TableToLoad", ti); + + pt.Check(ThrowImmediatelyCheckNotifier.Quiet); + } - protected LoadDirectory SetupLoadDirectory(LoadMetadata lmd) - { - var projectDirectory = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "MyLoadDir", true); - lmd.LocationOfFlatFiles = projectDirectory.RootPath.FullName; - lmd.SaveToDatabase(); + protected static LoadDirectory SetupLoadDirectory(LoadMetadata lmd) + { + var projectDirectory = + LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), + "MyLoadDir", true); + lmd.LocationOfFlatFiles = projectDirectory.RootPath.FullName; + lmd.SaveToDatabase(); - return projectDirectory; - } + return projectDirectory; + } - protected ITableInfo Import(DiscoveredTable tbl, LoadMetadata lmd, LogManager logManager) - { - logManager.CreateNewLoggingTaskIfNotExists(lmd.Name); + protected ITableInfo Import(DiscoveredTable tbl, LoadMetadata lmd, LogManager logManager) + { + logManager.CreateNewLoggingTaskIfNotExists(lmd.Name); - //import TableInfos - var importer = new TableInfoImporter(CatalogueRepository, tbl); - importer.DoImport(out var ti, out var cis); + //import TableInfos + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out var ti, out var cis); - //create Catalogue - var forwardEngineer = new ForwardEngineerCatalogue(ti, cis); - forwardEngineer.ExecuteForwardEngineering(out var cata, out var cataItems, out var eis); + //create Catalogue + var forwardEngineer = new ForwardEngineerCatalogue(ti, cis); + forwardEngineer.ExecuteForwardEngineering(out var cata, out var cataItems, out var eis); - //make the catalogue use the load configuration - cata.LoadMetadata_ID = lmd.ID; - cata.LoggingDataTask = lmd.Name; - Assert.IsNotNull(cata.LiveLoggingServer_ID); //catalogue should have one of these because of system defaults - cata.SaveToDatabase(); + //make the catalogue use the load configuration + cata.LoadMetadata_ID = lmd.ID; + cata.LoggingDataTask = lmd.Name; + Assert.IsNotNull(cata.LiveLoggingServer_ID); //catalogue should have one of these because of system defaults + cata.SaveToDatabase(); - return ti; - } + return ti; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataLoadProgressUpdateInfoTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataLoadProgressUpdateInfoTests.cs index 851536c58d..65034cc993 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataLoadProgressUpdateInfoTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataLoadProgressUpdateInfoTests.cs @@ -7,7 +7,7 @@ using System; using System.Collections.Generic; using Microsoft.Data.SqlClient; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; @@ -17,125 +17,148 @@ using Rdmp.Core.Logging; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class DataLoadProgressUpdateInfoTests : DatabaseTests { - public class DataLoadProgressUpdateInfoTests :DatabaseTests + private ScheduledDataLoadJob _job; + + #region Setup Methods + + public DataLoadProgressUpdateInfoTests() { - private ScheduledDataLoadJob _job; + var cata = Substitute.For(); + cata.LoggingDataTask.Returns("NothingTask"); + cata.GetTableInfoList(false).Returns(Array.Empty()); + cata.GetLookupTableInfoList().Returns(Array.Empty()); + + var lmd = Substitute.For(); + lmd.GetAllCatalogues().Returns(new[] { cata }); + + _job = new ScheduledDataLoadJob(null, "fish", Substitute.For(), lmd, null, + new ThrowImmediatelyDataLoadJob(), null); + } - #region Setup Methods - public DataLoadProgressUpdateInfoTests() + #endregion + + [Test] + public void AddBasicNormalStrategy_NoDates() + { + var updateInfo = new DataLoadProgressUpdateInfo(); + var ex = Assert.Throws(() => updateInfo.AddAppropriateDisposeStep(_job, null)); + Assert.IsTrue(ex.Message.StartsWith("Job does not have any DatesToRetrieve")); + } + + [Test] + public void AddBasicNormalStrategy_MaxDate() + { + var updateInfo = new DataLoadProgressUpdateInfo(); + Assert.AreEqual(DataLoadProgressUpdateStrategy.UseMaxRequestedDay, updateInfo.Strategy); + + _job.DatesToRetrieve = new List { - ICatalogue cata = Mock.Of( - c=> c.LoggingDataTask == "NothingTask" && - c.GetTableInfoList(false) == new TableInfo[0] && - c.GetLookupTableInfoList() == new TableInfo[0]); - - var lmd = Mock.Of(m => m.GetAllCatalogues() == new[] { cata }); - - _job = new ScheduledDataLoadJob(null,"fish", Mock.Of(), lmd, null, new ThrowImmediatelyDataLoadJob(),null); - } - #endregion + new(2001, 1, 1), + new(2001, 1, 2), + new(2001, 1, 3) + }; + try + { + var added = (UpdateProgressIfLoadsuccessful)updateInfo.AddAppropriateDisposeStep(_job, null); - [Test] - public void AddBasicNormalStrategy_NoDates() + + Assert.AreEqual(new DateTime(2001, 1, 3), added.DateToSetProgressTo); + } + finally { - var updateInfo = new DataLoadProgressUpdateInfo(); - var ex = Assert.Throws(()=>updateInfo.AddAppropriateDisposeStep(_job,null)); - Assert.IsTrue(ex.Message.StartsWith("Job does not have any DatesToRetrieve")); + _job.DatesToRetrieve.Clear(); } + } - [Test] - public void AddBasicNormalStrategy_MaxDate() + [Test] + public void AddRAWSQLStrategy_NoSQL() + { + var updateInfo = new DataLoadProgressUpdateInfo { - var updateInfo = new DataLoadProgressUpdateInfo(); - Assert.AreEqual(DataLoadProgressUpdateStrategy.UseMaxRequestedDay,updateInfo.Strategy); - - _job.DatesToRetrieve = new List(); - _job.DatesToRetrieve.Add(new DateTime(2001,1,1)); - _job.DatesToRetrieve.Add(new DateTime(2001, 1, 2)); - _job.DatesToRetrieve.Add(new DateTime(2001, 1, 3)); - try - { - var added = (UpdateProgressIfLoadsuccessful)updateInfo.AddAppropriateDisposeStep(_job, null); - - - Assert.AreEqual(new DateTime(2001, 1, 3), added.DateToSetProgressTo); - } - finally - { - _job.DatesToRetrieve.Clear(); - } - - } + Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW + }; + + var ex = Assert.Throws(() => + updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer))); - [Test] - public void AddRAWSQLStrategy_NoSQL() + Assert.IsTrue(ex.Message.StartsWith("Strategy is ExecuteScalarSQLInRAW but there is no ExecuteScalarSQL")); + } + + [Test] + public void AddRAWSQLStrategy_SQLDodgy_SqlCrashes() + { + var updateInfo = new DataLoadProgressUpdateInfo { - var updateInfo = new DataLoadProgressUpdateInfo(); - updateInfo.Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW; - - var ex = Assert.Throws(()=>updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer))); + Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW, + ExecuteScalarSQL = "SELECT Top 1 BarrelORum from CaptainMorgansSpicedRumBarrel" + }; - Assert.IsTrue(ex.Message.StartsWith("Strategy is ExecuteScalarSQLInRAW but there is no ExecuteScalarSQL")); - } + var ex = Assert.Throws(() => + updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer))); + + Assert.IsTrue(ex.Message.StartsWith("Failed to execute the following SQL in the RAW database")); + Assert.IsInstanceOf(ex.InnerException); + } - [Test] - public void AddRAWSQLStrategy_SQLDodgy_SqlCrashes() + [Test] + public void AddRAWSQLStrategy_SQLDodgy_SqlReturnsNull() + { + var updateInfo = new DataLoadProgressUpdateInfo { - var updateInfo = new DataLoadProgressUpdateInfo(); - updateInfo.Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW; - - updateInfo.ExecuteScalarSQL = "SELECT Top 1 BarrelORum from CaptainMorgansSpicedRumBarrel"; - var ex = Assert.Throws(() => updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer))); - - Assert.IsTrue(ex.Message.StartsWith("Failed to execute the following SQL in the RAW database")); - Assert.IsInstanceOf(ex.InnerException); - } + Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW, + ExecuteScalarSQL = "SELECT null" + }; + + var ex = Assert.Throws(() => + updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer))); + + Assert.IsTrue(ex.Message.Contains("ExecuteScalarSQL")); + Assert.IsTrue(ex.Message.Contains("returned null")); + } - [Test] - public void AddRAWSQLStrategy_SQLDodgy_SqlReturnsNull() + [Test] + public void AddRAWSQLStrategy_SQLDodgy_SqlReturnsNonDate() + { + var updateInfo = new DataLoadProgressUpdateInfo { - var updateInfo = new DataLoadProgressUpdateInfo(); - updateInfo.Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW; - - updateInfo.ExecuteScalarSQL = "SELECT null"; - var ex = Assert.Throws(() => updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer))); - - Assert.IsTrue(ex.Message.Contains("ExecuteScalarSQL")); - Assert.IsTrue(ex.Message.Contains("returned null")); - } + Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW, + ExecuteScalarSQL = "SELECT 'fishfish'" + }; + + var ex = Assert.Throws(() => + updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer))); + + Assert.AreEqual( + "ExecuteScalarSQL specified for determining the maximum date of data loaded returned a value that was not a Date:fishfish", + ex.Message); + Assert.IsInstanceOf(ex.InnerException); + } - [Test] - public void AddRAWSQLStrategy_SQLDodgy_SqlReturnsNonDate() + [Test] + public void AddRAWSQLStrategy_SQLCorrect() + { + _job.DatesToRetrieve = new List { - var updateInfo = new DataLoadProgressUpdateInfo(); - updateInfo.Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW; - - updateInfo.ExecuteScalarSQL = "SELECT 'fishfish'"; - var ex = Assert.Throws(() => updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer))); - - Assert.AreEqual("ExecuteScalarSQL specified for determining the maximum date of data loaded returned a value that was not a Date:fishfish",ex.Message); - Assert.IsInstanceOf(ex.InnerException); - } + new(2001, 1, 6), + new(2001, 1, 7), + new(2001, 1, 8) + }; - [Test] - public void AddRAWSQLStrategy_SQLCorrect() + var updateInfo = new DataLoadProgressUpdateInfo { - _job.DatesToRetrieve = new List(); - _job.DatesToRetrieve.Add(new DateTime(2001,1,6)); - _job.DatesToRetrieve.Add(new DateTime(2001,1,7)); - _job.DatesToRetrieve.Add(new DateTime(2001,1,8)); + Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW, + ExecuteScalarSQL = "SELECT '2001-01-07'" + }; - var updateInfo = new DataLoadProgressUpdateInfo(); - updateInfo.Strategy = DataLoadProgressUpdateStrategy.ExecuteScalarSQLInRAW; - updateInfo.ExecuteScalarSQL = "SELECT '2001-01-07'"; + var added = (UpdateProgressIfLoadsuccessful)updateInfo.AddAppropriateDisposeStep(_job, + GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); - var added = (UpdateProgressIfLoadsuccessful)updateInfo.AddAppropriateDisposeStep(_job, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); - - Assert.AreEqual(new DateTime(2001, 1, 7), added.DateToSetProgressTo); + Assert.AreEqual(new DateTime(2001, 1, 7), added.DateToSetProgressTo); - _job.DatesToRetrieve.Clear(); - } + _job.DatesToRetrieve.Clear(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataTableUploadDestinationTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataTableUploadDestinationTests.cs index fb96fca993..ec66d9e732 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataTableUploadDestinationTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DataTableUploadDestinationTests.cs @@ -11,172 +11,115 @@ using FAnsi; using FAnsi.Discovery; using FAnsi.Discovery.TableCreation; -using Moq; using NUnit.Framework; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataLoad.Engine.Pipeline.Destinations; -using ReusableLibraryCode; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; using TypeGuesser; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class DataTableUploadDestinationTests : DatabaseTests { - public class DataTableUploadDestinationTests:DatabaseTests + [Test] + public void DataTableChangesLengths_NoReAlter() { - [Test] - public void DataTableChangesLengths_NoReAlter() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - - DataTable dt1 = new DataTable(); - dt1.Columns.Add("name", typeof (string)); - dt1.Rows.Add(new []{"Fish"}); - dt1.TableName = "DataTableUploadDestinationTests"; - - DataTable dt2 = new DataTable(); - dt2.Columns.Add("name", typeof (string)); - dt2.Rows.Add(new []{"BigFish"}); - dt2.TableName = "DataTableUploadDestinationTests"; - - destination.ProcessPipelineData( dt1, toConsole,token); - var ex = Assert.Throws(()=>destination.ProcessPipelineData( dt2, toConsole,token)); - - string expectedText = - "BulkInsert failed on data row 1 the complaint was about source column <> which had value <> destination data type was <>"; - - Assert.IsNotNull(ex.InnerException); - StringAssert.Contains(expectedText,ex.InnerException.Message); - - destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), ex); - } + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); - //RDMPDEV-653 - [Test] - [TestCase(true,10)] - [TestCase(false,10)] - public void DataTableChangesLengths_RandomColumnOrder(bool createIdentity,int numberOfRandomisations) - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var dt1 = new DataTable(); + dt1.Columns.Add("name", typeof(string)); + dt1.Rows.Add(new[] { "Fish" }); + dt1.TableName = "DataTableUploadDestinationTests"; - var tbl = db.ExpectTable("RandomOrderTable"); - var random = new Random(); + var dt2 = new DataTable(); + dt2.Columns.Add("name", typeof(string)); + dt2.Rows.Add(new[] { "BigFish" }); + dt2.TableName = "DataTableUploadDestinationTests"; - for (int i =0;i(() => destination.ProcessPipelineData(dt2, toConsole, token)); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + var expectedText = + "BulkInsert failed on data row 1 the complaint was about source column <> which had value <> destination data type was <>"; - int errorIsInColumnOrder = random.Next(3); - string errorColumn = ""; + Assert.IsNotNull(ex.InnerException); + StringAssert.Contains(expectedText, ex.InnerException.Message); - string sql = "CREATE TABLE RandomOrderTable ("; + destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); + } - List leftToCreate = new List(); + //RDMPDEV-653 + [Test] + [TestCase(true, 10)] + [TestCase(false, 10)] + public void DataTableChangesLengths_RandomColumnOrder(bool createIdentity, int numberOfRandomisations) + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - leftToCreate.Add("name varchar(50),"); - leftToCreate.Add("color varchar(50),"); - leftToCreate.Add("age varchar(50),"); + var tbl = db.ExpectTable("RandomOrderTable"); + var random = new Random(); - if(createIdentity) - leftToCreate.Add("id int IDENTITY(1,1),"); + for (var i = 0; i < numberOfRandomisations; i++) + { + if (tbl.Exists()) + tbl.Drop(); - bool invalid = false; + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - for (int j = 0; j < (createIdentity ? 4 : 3); j++) - { - var toAddNext = random.Next(leftToCreate.Count); + var errorIsInColumnOrder = random.Next(3); + var errorColumn = ""; - string colSql = leftToCreate[toAddNext]; + var sql = "CREATE TABLE RandomOrderTable ("; - leftToCreate.Remove(colSql); + var leftToCreate = new List + { + "name varchar(50),", + "color varchar(50),", + "age varchar(50)," + }; - if (errorIsInColumnOrder == j) - { - sql += colSql.Replace("(50)", "(1)"); - errorColumn = colSql.Substring(0, colSql.IndexOf(" ")); + if(createIdentity) + leftToCreate.Add("id int IDENTITY(1,1),"); - if (errorColumn == "id") - invalid = true; - } + var invalid = false; - else - sql += colSql; - } + for (var j = 0; j < (createIdentity ? 4 : 3); j++) + { + var toAddNext = random.Next(leftToCreate.Count); - if(invalid) - continue; + var colSql = leftToCreate[toAddNext]; - sql = sql.TrimEnd(',') + ")"; + leftToCreate.Remove(colSql); - Console.Write("About to execute:" + sql); - - //problem is with the column name which appears at order 0 in the destination dataset (name with width 1) - using (var con = db.Server.GetConnection()) + if (errorIsInColumnOrder == j) { - con.Open(); - db.Server.GetCommand(sql, con).ExecuteNonQuery(); - } - + sql += colSql.Replace("(50)", "(1)"); + errorColumn = colSql[..colSql.IndexOf(" ", StringComparison.Ordinal)]; - //the bulk insert is - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - - //order is inverted where name comes out at the end column (index 2) - DataTable dt1 = new DataTable(); - dt1.Columns.Add("age", typeof (string)); - dt1.Columns.Add("color", typeof (string)); - dt1.Columns.Add("name", typeof (string)); - - dt1.Rows.Add("30", "blue", "Fish"); - dt1.TableName = "RandomOrderTable"; - - var ex = Assert.Throws(() => destination.ProcessPipelineData(dt1, toConsole, token)); - - string exceptionMessage = ex.InnerException.Message; - var interestingBit = exceptionMessage.Substring(exceptionMessage.IndexOf(": <<") + ": ".Length); - - string expectedErrorMessage = "<<" + errorColumn + ">> which had value <<"+dt1.Rows[0][errorColumn]+">> destination data type was <>"; - StringAssert.Contains(expectedErrorMessage,interestingBit); + if (errorColumn == "id") + invalid = true; + } - destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), ex); - tbl.Drop(); + else + { + sql += colSql; + } } - } + if (invalid) + continue; + sql = $"{sql.TrimEnd(',')})"; - //RDMPDEV-653 - [Test] - public void DataTableChangesLengths_DropColumns() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - - var tbl = db.ExpectTable("DroppedColumnsTable"); - if(tbl.Exists()) - tbl.Drop(); - - string sql = @"CREATE TABLE DroppedColumnsTable ( -name varchar(50), -color varchar(50), -age varchar(50) -) - -ALTER TABLE DroppedColumnsTable Drop column color -ALTER TABLE DroppedColumnsTable add color varchar(1) -"; - - Console.Write("About to execute:" + sql); + Console.Write($"About to execute:{sql}"); //problem is with the column name which appears at order 0 in the destination dataset (name with width 1) using (var con = db.Server.GetConnection()) @@ -184,1127 +127,1229 @@ ALTER TABLE DroppedColumnsTable add color varchar(1) con.Open(); db.Server.GetCommand(sql, con).ExecuteNonQuery(); } - + + //the bulk insert is - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, new ThrowImmediatelyDataLoadEventListener()); + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); //order is inverted where name comes out at the end column (index 2) - DataTable dt1 = new DataTable(); + using var dt1 = new DataTable(); dt1.Columns.Add("age", typeof(string)); dt1.Columns.Add("color", typeof(string)); dt1.Columns.Add("name", typeof(string)); dt1.Rows.Add("30", "blue", "Fish"); - dt1.TableName = "DroppedColumnsTable"; + dt1.TableName = "RandomOrderTable"; - var ex = Assert.Throws(() => destination.ProcessPipelineData(dt1, new ThrowImmediatelyDataLoadEventListener(), token)); + var ex = Assert.Throws(() => destination.ProcessPipelineData(dt1, toConsole, token)); - string exceptionMessage = ex.InnerException.Message; - var interestingBit = exceptionMessage.Substring(exceptionMessage.IndexOf(": <<") + ": ".Length); + var exceptionMessage = ex.InnerException.Message; + var interestingBit = + exceptionMessage[(exceptionMessage.IndexOf(": <<", StringComparison.Ordinal) + ": ".Length)..]; - string expectedErrorMessage = "<> which had value <> destination data type was <>"; + var expectedErrorMessage = + $"<<{errorColumn}>> which had value <<{dt1.Rows[0][errorColumn]}>> destination data type was <>"; StringAssert.Contains(expectedErrorMessage, interestingBit); - - destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), ex); - if(tbl.Exists()) - tbl.Drop(); + destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); + tbl.Drop(); } + } - [Test] - public void DataTableEmpty_ThrowHelpfulException() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); + //RDMPDEV-653 + [Test] + public void DataTableChangesLengths_DropColumns() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - DataTable dt1 = new DataTable(); - dt1.TableName = "MyEmptyTable"; - var ex = Assert.Throws(() => destination.ProcessPipelineData(dt1, toConsole, token)); + var tbl = db.ExpectTable("DroppedColumnsTable"); + if (tbl.Exists()) + tbl.Drop(); - destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), ex); - - Assert.AreEqual("DataTable 'MyEmptyTable' had no Columns!", ex.Message); - } - [Test] - public void DataTableNoRows_ThrowHelpfulException() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + var sql = @"CREATE TABLE DroppedColumnsTable ( +name varchar(50), +color varchar(50), +age varchar(50) +) - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); +ALTER TABLE DroppedColumnsTable Drop column color +ALTER TABLE DroppedColumnsTable add color varchar(1) +"; - DataTable dt1 = new DataTable(); - dt1.Columns.Add("GoTeamGo"); - dt1.TableName = "MyEmptyTable"; - var ex = Assert.Throws(() => destination.ProcessPipelineData(dt1, toConsole, token)); - - destination.Dispose(new ThrowImmediatelyDataLoadEventListener(), ex); + Console.Write($"About to execute:{sql}"); - Assert.AreEqual("DataTable 'MyEmptyTable' had no Rows!", ex.Message); - } - [Test] - public void DataTableChangesLengths_AllowAlter() + //problem is with the column name which appears at order 0 in the destination dataset (name with width 1) + using (var con = db.Server.GetConnection()) { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - var toMemory = new ToMemoryDataLoadEventListener(true); + con.Open(); + db.Server.GetCommand(sql, con).ExecuteNonQuery(); + } - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + //the bulk insert is + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, ThrowImmediatelyDataLoadEventListener.Quiet); - DataTable dt1 = new DataTable(); - dt1.Columns.Add("name", typeof(string)); - dt1.Rows.Add(new[] { "Fish" }); - dt1.TableName = "DataTableUploadDestinationTests"; + //order is inverted where name comes out at the end column (index 2) + using var dt1 = new DataTable(); + dt1.Columns.Add("age", typeof(string)); + dt1.Columns.Add("color", typeof(string)); + dt1.Columns.Add("name", typeof(string)); - DataTable dt2 = new DataTable(); - dt2.Columns.Add("name", typeof(string)); - dt2.Rows.Add(new[] { "BigFish" }); - dt2.TableName = "DataTableUploadDestinationTests"; + dt1.Rows.Add("30", "blue", "Fish"); + dt1.TableName = "DroppedColumnsTable"; - destination.ProcessPipelineData( dt1, toConsole, token); - Assert.DoesNotThrow(() => destination.ProcessPipelineData( dt2, toMemory, token)); + var ex = Assert.Throws(() => + destination.ProcessPipelineData(dt1, ThrowImmediatelyDataLoadEventListener.Quiet, token)); - Assert.IsTrue(toMemory.EventsReceivedBySender[destination].Any(msg => msg.Message.Contains("Resizing column"))); + var exceptionMessage = ex.InnerException.Message; + var interestingBit = exceptionMessage[(exceptionMessage.IndexOf(": <<", StringComparison.Ordinal) + 2)..]; - destination.Dispose(toConsole, null); - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(2,db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - - - } + const string expectedErrorMessage = + "<> which had value <> destination data type was <>"; + StringAssert.Contains(expectedErrorMessage, interestingBit); - [Test] - public void DoubleResizingBetweenIntAndDouble() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; - - DataTable dt1 = new DataTable(); - dt1.Columns.Add("mynum", typeof(double)); - dt1.Rows.Add(new object[] {1}); - dt1.Rows.Add(new object[] { 5 }); - dt1.Rows.Add(new object[] { 15 }); - dt1.Rows.Add(new object[] { 2.5 }); - dt1.Rows.Add(new object[] { 5 }); - - dt1.TableName = "DataTableUploadDestinationTests"; - - destination.ProcessPipelineData(dt1, toConsole, token); - destination.Dispose(toConsole, null); + if (tbl.Exists()) + tbl.Drop(); + } - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(5, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - Assert.AreEqual("decimal(3,1)", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mynum").DataType.SQLType); - } + [Test] + public void DataTableEmpty_ThrowHelpfulException() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); - [TestCase("varchar(3)", 1.5, "x")]//RDMPDEV-932 - [TestCase("varchar(27)", "2001-01-01", "x")] //see Guesser.MinimumLengthRequiredForDateStringRepresentation - public void BatchResizing(string expectedDatatypeInDatabase,object batch1Value,object batch2Value) + var dt1 = new DataTable { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + TableName = "MyEmptyTable" + }; + var ex = Assert.Throws(() => destination.ProcessPipelineData(dt1, toConsole, token)); - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; - - DataTable dt1 = new DataTable(); - dt1.Columns.Add("mycol"); - dt1.Rows.Add(new[] {batch1Value}); - - dt1.TableName = "DataTableUploadDestinationTests"; - try - { - destination.ProcessPipelineData(dt1, toConsole, token); - - DataTable dt2 = new DataTable(); - dt2.Columns.Add("mycol"); - dt2.Rows.Add(new object[] { batch2Value }); - - destination.ProcessPipelineData(dt2, toConsole, token); - destination.Dispose(toConsole, null); - } - catch (Exception e) - { - destination.Dispose(toConsole, e); - throw; - } - - Assert.AreEqual(expectedDatatypeInDatabase, db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mycol").DataType.SQLType); - } + destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); - [TestCase("varchar(24)", "2", "987styb4ih0r9h4322938476", "tinyint")] - public void BatchResizing_WithExplicitWriteTypes(string expectedDatatypeInDatabase, object batch1Value, object batch2Value, string batch1SqlType) - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + Assert.AreEqual("DataTable 'MyEmptyTable' had no Columns!", ex.Message); + } - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + [Test] + public void DataTableNoRows_ThrowHelpfulException() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - DataTable dt1 = new DataTable(); - dt1.Columns.Add("mycol"); - dt1.Rows.Add(new[] { batch1Value }); + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); - dt1.TableName = "DataTableUploadDestinationTests"; - try - { - destination.AddExplicitWriteType("mycol", batch1SqlType); - destination.ProcessPipelineData(dt1, toConsole, token); + var dt1 = new DataTable(); + dt1.Columns.Add("GoTeamGo"); + dt1.TableName = "MyEmptyTable"; + var ex = Assert.Throws(() => destination.ProcessPipelineData(dt1, toConsole, token)); - DataTable dt2 = new DataTable(); - dt2.Columns.Add("mycol"); - dt2.Rows.Add(new object[] { batch2Value }); + destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); - destination.ProcessPipelineData(dt2, toConsole, token); - destination.Dispose(toConsole, null); - } - catch (Exception e) - { - destination.Dispose(toConsole, e); - throw; - } + Assert.AreEqual("DataTable 'MyEmptyTable' had no Rows!", ex.Message); + } - Assert.AreEqual(expectedDatatypeInDatabase, db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mycol").DataType.SQLType); - } + [Test] + public void DataTableChangesLengths_AllowAlter() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var toMemory = new ToMemoryDataLoadEventListener(true); - [Test] - public void VeryLongStringIsVarcharMax() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + var dt1 = new DataTable(); + dt1.Columns.Add("name", typeof(string)); + dt1.Rows.Add(new[] { "Fish" }); + dt1.TableName = "DataTableUploadDestinationTests"; + + var dt2 = new DataTable(); + dt2.Columns.Add("name", typeof(string)); + dt2.Rows.Add(new[] { "BigFish" }); + dt2.TableName = "DataTableUploadDestinationTests"; + + destination.ProcessPipelineData(dt1, toConsole, token); + Assert.DoesNotThrow(() => destination.ProcessPipelineData(dt2, toMemory, token)); + + Assert.IsTrue(toMemory.EventsReceivedBySender[destination].Any(msg => msg.Message.Contains("Resizing column"))); + destination.Dispose(toConsole, null); + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + } - string longBitOfText = ""; + [Test] + public void DoubleResizingBetweenIntAndDouble() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; + + var dt1 = new DataTable(); + dt1.Columns.Add("mynum", typeof(double)); + dt1.Rows.Add(new object[] { 1 }); + dt1.Rows.Add(new object[] { 5 }); + dt1.Rows.Add(new object[] { 15 }); + dt1.Rows.Add(new object[] { 2.5 }); + dt1.Rows.Add(new object[] { 5 }); + + dt1.TableName = "DataTableUploadDestinationTests"; + + destination.ProcessPipelineData(dt1, toConsole, token); + destination.Dispose(toConsole, null); + + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(5, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + Assert.AreEqual("decimal(3,1)", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mynum").DataType.SQLType); + } - for (int i = 0; i < 9000; i++) - longBitOfText += 'A'; - DataTable dt1 = new DataTable(); - dt1.Columns.Add("myText"); - dt1.Rows.Add(new object[] { longBitOfText }); + [TestCase("varchar(3)", 1.5, "x")] //RDMPDEV-932 + [TestCase("varchar(27)", "2001-01-01", "x")] //see Guesser.MinimumLengthRequiredForDateStringRepresentation + public void BatchResizing(string expectedDatatypeInDatabase, object batch1Value, object batch2Value) + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; - dt1.TableName = "DataTableUploadDestinationTests"; + var dt1 = new DataTable(); + dt1.Columns.Add("mycol"); + dt1.Rows.Add(new[] { batch1Value }); + dt1.TableName = "DataTableUploadDestinationTests"; + try + { destination.ProcessPipelineData(dt1, toConsole, token); - destination.Dispose(toConsole, null); - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(1, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - Assert.AreEqual("varchar(max)", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("myText").DataType.SQLType); - } + var dt2 = new DataTable(); + dt2.Columns.Add("mycol"); + dt2.Rows.Add(new object[] { batch2Value }); - [Test] - [TestCase(true)] - [TestCase(false)] - public void DecimalResizing(bool negative) + destination.ProcessPipelineData(dt2, toConsole, token); + destination.Dispose(toConsole, null); + } + catch (Exception e) { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - var toMemory = new ToMemoryDataLoadEventListener(true); + destination.Dispose(toConsole, e); + throw; + } - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + Assert.AreEqual(expectedDatatypeInDatabase, + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mycol").DataType.SQLType); + } + + [TestCase("varchar(24)", "2", "987styb4ih0r9h4322938476", "tinyint")] + public void BatchResizing_WithExplicitWriteTypes(string expectedDatatypeInDatabase, object batch1Value, + object batch2Value, string batch1SqlType) + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - DataTable dt1 = new DataTable(); - dt1.Columns.Add("mynum", typeof(string)); - dt1.Rows.Add(new[] { "1.51" }); - dt1.TableName = "DataTableUploadDestinationTests"; + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; - DataTable dt2 = new DataTable(); - dt2.Columns.Add("mynum", typeof(string)); - dt2.Rows.Add(new[] { negative ? "-999.99" : "999.99" }); - dt2.Rows.Add(new[] { "00000.00000" }); - dt2.Rows.Add(new[] { "0" }); - dt2.Rows.Add(new string[] { null }); - dt2.Rows.Add(new [] { "" }); - dt2.Rows.Add(new[] { DBNull.Value }); - dt2.TableName = "DataTableUploadDestinationTests"; + var dt1 = new DataTable(); + dt1.Columns.Add("mycol"); + dt1.Rows.Add(new[] { batch1Value }); - destination.ProcessPipelineData( dt1, toConsole, token); - destination.ProcessPipelineData( dt2, toMemory, token); + dt1.TableName = "DataTableUploadDestinationTests"; + try + { + destination.AddExplicitWriteType("mycol", batch1SqlType); + destination.ProcessPipelineData(dt1, toConsole, token); - Assert.IsTrue(toMemory.EventsReceivedBySender[destination].Any(msg => msg.Message.Contains("Resizing column "))); + var dt2 = new DataTable(); + dt2.Columns.Add("mycol"); + dt2.Rows.Add(new object[] { batch2Value }); + destination.ProcessPipelineData(dt2, toConsole, token); destination.Dispose(toConsole, null); - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(7, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - Assert.AreEqual("decimal(5,2)", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mynum").DataType.SQLType); + } + catch (Exception e) + { + destination.Dispose(toConsole, e); + throw; } - private static object[] _sourceLists = { - new object[] {"decimal(3,3)",new object[]{"0.001"}, new object[]{0.001}}, //case 1 - new object[] {"decimal(6,3)",new object[]{"19","0.001","123.001",32.0f}, new object[]{19,0.001,123.001,32.0f}}, //case 2 + Assert.AreEqual(expectedDatatypeInDatabase, + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mycol").DataType.SQLType); + } - //Time tests - new object[] {"time",new object[]{"12:01"}, new object[]{new TimeSpan(12,1,0)}}, - new object[] {"time",new object[]{"13:00:00"}, new object[]{new TimeSpan(13,0,0)}}, + [Test] + public void VeryLongStringIsVarcharMax() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - //Send two dates expect datetime in database and resultant data to be legit dates - new object[] {"datetime2",new object[]{"2001-01-01 12:01","2010-01-01"}, new object[]{new DateTime(2001,01,01,12,1,0),new DateTime(2010,01,01,0,0,0)}}, + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; - //Mixed data types going from time to date results in us falling back to string - new object[] {"varchar(10)",new object[]{"12:01","2001-01-01"}, new object[]{"12:01","2001-01-01"}} - }; - - [Test, TestCaseSource(nameof(_sourceLists))] - public void DataTypeEstimation(string expectedDatatypeInDatabase, object[] rowValues, object[] expectedValuesReadFromDatabase) - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); + var longBitOfText = ""; - DataTable dt1 = new DataTable(); - dt1.Columns.Add("myCol", typeof(string)); + for (var i = 0; i < 9000; i++) + longBitOfText += 'A'; - foreach (object rowValue in rowValues) - dt1.Rows.Add(new[] {rowValue}); - - dt1.TableName = "DataTableUploadDestinationTests"; + var dt1 = new DataTable(); + dt1.Columns.Add("myText"); + dt1.Rows.Add(new object[] { longBitOfText }); - destination.ProcessPipelineData(dt1, toConsole, token); - destination.Dispose(toConsole, null); - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(expectedDatatypeInDatabase, db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("myCol").DataType.SQLType); + dt1.TableName = "DataTableUploadDestinationTests"; - using (var con = db.Server.GetConnection()) - { - con.Open(); - using(var cmd = DatabaseCommandHelper.GetCommand("Select * from DataTableUploadDestinationTests", con)) - using(var r = cmd.ExecuteReader()) - foreach (object e in expectedValuesReadFromDatabase) - { - Assert.IsTrue(r.Read()); - Assert.AreEqual(e, r["myCol"]); - } - } - } + destination.ProcessPipelineData(dt1, toConsole, token); + destination.Dispose(toConsole, null); + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(1, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + Assert.AreEqual("varchar(max)", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("myText").DataType.SQLType); + } + [Test] + [TestCase(true)] + [TestCase(false)] + public void DecimalResizing(bool negative) + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var toMemory = new ToMemoryDataLoadEventListener(true); + + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; + + var dt1 = new DataTable(); + dt1.Columns.Add("mynum", typeof(string)); + dt1.Rows.Add(new[] { "1.51" }); + dt1.TableName = "DataTableUploadDestinationTests"; + + var dt2 = new DataTable(); + dt2.Columns.Add("mynum", typeof(string)); + dt2.Rows.Add(new[] { negative ? "-999.99" : "999.99" }); + dt2.Rows.Add(new[] { "00000.00000" }); + dt2.Rows.Add(new[] { "0" }); + dt2.Rows.Add(new string[] { null }); + dt2.Rows.Add(new[] { "" }); + dt2.Rows.Add(new[] { DBNull.Value }); + dt2.TableName = "DataTableUploadDestinationTests"; + + destination.ProcessPipelineData(dt1, toConsole, token); + destination.ProcessPipelineData(dt2, toMemory, token); + + Assert.IsTrue(toMemory.EventsReceivedBySender[destination] + .Any(msg => msg.Message.Contains("Resizing column "))); + + destination.Dispose(toConsole, null); + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(7, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + Assert.AreEqual("decimal(5,2)", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mynum").DataType.SQLType); + } - [Test] - [TestCase(true)] - [TestCase(false)] - public void DecimalZeros(bool sendTheZero) + private static object[] _sourceLists = + { + new object[] { "decimal(3,3)", new object[] { "0.001" }, new object[] { 0.001 } }, //case 1 + new object[] { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + "decimal(6,3)", new object[] { "19", "0.001", "123.001", 32.0f }, new object[] { 19, 0.001, 123.001, 32.0f } + }, //case 2 - DataTable dt1 = new DataTable(); - dt1.Columns.Add("mynum", typeof(string)); - dt1.Rows.Add(new[] { "0.000742548000424313" }); + //Time tests + new object[] { "time", new object[] { "12:01" }, new object[] { new TimeSpan(12, 1, 0) } }, + new object[] { "time", new object[] { "13:00:00" }, new object[] { new TimeSpan(13, 0, 0) } }, - if (sendTheZero) - dt1.Rows.Add(new[] { "0" }); + //Send two dates expect datetime in database and resultant data to be legit dates + new object[] + { + "datetime2", new object[] { "2001-01-01 12:01", "2010-01-01" }, + new object[] { new DateTime(2001, 01, 01, 12, 1, 0), new DateTime(2010, 01, 01, 0, 0, 0) } + }, - dt1.TableName = "DataTableUploadDestinationTests"; + //Mixed data types going from time to date results in us falling back to string + new object[] { "varchar(10)", new object[] { "12:01", "2001-01-01" }, new object[] { "12:01", "2001-01-01" } } + }; - destination.ProcessPipelineData( dt1, toConsole, token); - destination.Dispose(toConsole, null); - //table should exist - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + [Test] + [TestCaseSource(nameof(_sourceLists))] + public void DataTypeEstimation(string expectedDatatypeInDatabase, object[] rowValues, + object[] expectedValuesReadFromDatabase) + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - //should have 2 rows - Assert.AreEqual(sendTheZero?2:1, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - - //should be decimal + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); - Assert.AreEqual(sendTheZero ?"decimal(19,18)":"decimal(18,18)", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mynum").DataType.SQLType); - } + var dt1 = new DataTable(); + dt1.Columns.Add("myCol", typeof(string)); - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void TestResizing(DatabaseType dbType) - { - var db = GetCleanedServer(dbType); - var server = db.Server; + foreach (var rowValue in rowValues) + dt1.Rows.Add(new[] { rowValue }); - var table = db.CreateTable("TestResizing", - new DatabaseColumnRequest[] - { - new DatabaseColumnRequest("MyInteger",new DatabaseTypeRequest(typeof(int))), - new DatabaseColumnRequest("MyMaxString",new DatabaseTypeRequest(typeof(string),int.MaxValue)), - new DatabaseColumnRequest("Description",new DatabaseTypeRequest(typeof(string),int.MaxValue)), - new DatabaseColumnRequest("StringNotNull",new DatabaseTypeRequest(typeof(string),100),false), - new DatabaseColumnRequest("StringAllowNull",new DatabaseTypeRequest(typeof(string),100),true), - new DatabaseColumnRequest("StringPk",new DatabaseTypeRequest(typeof(string),50),true){IsPrimaryKey = true } - }); - - using (var con = server.GetConnection()) - { - con.Open(); - - //should not allow nulls before - Assert.AreEqual(false, table.DiscoverColumn("StringNotNull").AllowNulls); - //do resize - table.DiscoverColumn("StringNotNull").DataType.Resize(500); - - //rediscover it to get the new state in database (it should now be 500 and still shouldn't allow nulls) - AssertIsStringWithLength(table.DiscoverColumn("StringNotNull"), 500); - - - Assert.AreEqual(false, table.DiscoverColumn("StringNotNull").AllowNulls); - - //do the same with the one that allows nulls - Assert.AreEqual(true, table.DiscoverColumn("StringAllowNull").AllowNulls); - table.DiscoverColumn("StringAllowNull").DataType.Resize(101); - table.DiscoverColumn("StringAllowNull").DataType.Resize(103); - table.DiscoverColumn("StringAllowNull").DataType.Resize(105); - - AssertIsStringWithLength(table.DiscoverColumn("StringAllowNull"), 105); - Assert.AreEqual(true, table.DiscoverColumn("StringAllowNull").AllowNulls); - - //we should have correct understanding prior to resize - AssertIsStringWithLength(table.DiscoverColumn("StringPk"),50); - Assert.AreEqual(true, table.DiscoverColumn("StringPk").IsPrimaryKey); - Assert.AreEqual(false, table.DiscoverColumn("StringPk").AllowNulls); - - //now we execute the resize - table.DiscoverColumn("StringPk").DataType.Resize(500); - - AssertIsStringWithLength(table.DiscoverColumn("StringPk"), 500); - - Assert.AreEqual(true, table.DiscoverColumn("StringPk").IsPrimaryKey); - Assert.AreEqual(false, table.DiscoverColumn("StringPk").AllowNulls); - - con.Close(); - } - } + dt1.TableName = "DataTableUploadDestinationTests"; - private void AssertIsStringWithLength(DiscoveredColumn col, int expectedLength) - { - switch (col.Table.Database.Server.DatabaseType) - { - case DatabaseType.MicrosoftSQLServer: - case DatabaseType.MySql: - Assert.AreEqual($"varchar({expectedLength})",col.DataType.SQLType); - break; - case DatabaseType.Oracle: - Assert.AreEqual($"varchar2({expectedLength})",col.DataType.SQLType); - break; - case DatabaseType.PostgreSql: - Assert.AreEqual($"character varying({expectedLength})",col.DataType.SQLType); - break; - default: - throw new ArgumentOutOfRangeException(nameof(col.Table.Database.Server.DatabaseType), col.Table.Database.Server.DatabaseType, null); - } + destination.ProcessPipelineData(dt1, toConsole, token); - } + destination.Dispose(toConsole, null); + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(expectedDatatypeInDatabase, + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("myCol").DataType.SQLType); - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void TestResizing_WithDetection(DatabaseType dbType) + using var con = db.Server.GetConnection(); + con.Open(); + using var cmd = DatabaseCommandHelper.GetCommand("Select * from DataTableUploadDestinationTests", con); + using var r = cmd.ExecuteReader(); + foreach (var e in expectedValuesReadFromDatabase) { - var db = GetCleanedServer(dbType); + Assert.IsTrue(r.Read()); + Assert.AreEqual(e, r["myCol"]); + } + } - var table = db.CreateTable("TestResizing", - new DatabaseColumnRequest[] - { - new DatabaseColumnRequest("MyInteger",new DatabaseTypeRequest(typeof(int))), - new DatabaseColumnRequest("MyMaxString",new DatabaseTypeRequest(typeof(string),int.MaxValue)), - new DatabaseColumnRequest("Description",new DatabaseTypeRequest(typeof(string),int.MaxValue)), - new DatabaseColumnRequest("StringNotNull",new DatabaseTypeRequest(typeof(string),10),false), - new DatabaseColumnRequest("StringAllowNull",new DatabaseTypeRequest(typeof(string),100),true), - new DatabaseColumnRequest("StringPk",new DatabaseTypeRequest(typeof(string),50),true){IsPrimaryKey = true } - }); - - Assert.AreEqual(10, table.DiscoverColumn("StringNotNull").DataType.GetLengthIfString()); - - var dt = new DataTable("TestResizing"); - dt.Columns.Add("MyInteger"); - dt.Columns.Add("MyMaxString"); - dt.Columns.Add("Description"); - dt.Columns.Add("StringNotNull"); - dt.Columns.Add("StringAllowNull"); - dt.Columns.Add("StringPk"); - - dt.Rows.Add("1", //MyInteger - "fff", //MyMaxString - "fff2", //Description - "1234567891011",//StringNotNull - too long for the column, so it should resize - DBNull.Value, //StringAllowNull - "f" //StringPk - ); - - var dt2 = dt.Clone(); - dt2.Rows.Clear(); - dt2.Rows.Add("1", //MyInteger - "fff", //MyMaxString - "fff2", //Description - "12345678910112",//StringNotNull - too long for the column, so it should resize - DBNull.Value, //StringAllowNull - "f2" //StringPk - ); - - var dest = new DataTableUploadDestination(); - dest.AllowResizingColumnsAtUploadTime = true; - dest.PreInitialize(db,new ThrowImmediatelyDataLoadEventListener()); - - dest.ProcessPipelineData(dt,new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - dest.ProcessPipelineData(dt2, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - dest.Dispose(new ThrowImmediatelyDataLoadEventListener(),null); - - //it should have resized us. - Assert.AreEqual(14, table.DiscoverColumn("StringNotNull").DataType.GetLengthIfString()); - } - [TestCase(DatabaseType.MicrosoftSQLServer, "didn’t")] - [TestCase(DatabaseType.MySql, "didn’t")] - [TestCase(DatabaseType.Oracle, "didn’t")] - [TestCase(DatabaseType.MicrosoftSQLServer, "didn't")] - [TestCase(DatabaseType.MySql, "didn't")] - [TestCase(DatabaseType.Oracle, "didn't")] - public void Test_SingleQuote_InText(DatabaseType dbType,string testValue) - { - var db = GetCleanedServer(dbType); - - var dt = new DataTable("TestFreeText"); - dt.Columns.Add("MyFreeText"); - dt.Rows.Add(testValue); - - var dest = new DataTableUploadDestination(); - dest.AllowResizingColumnsAtUploadTime = true; - dest.PreInitialize(db, new ThrowImmediatelyDataLoadEventListener()); - dest.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - dest.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - - var tbl = db.ExpectTable("TestFreeText"); - Assert.IsTrue(tbl.Exists()); - Assert.AreEqual(1,tbl.GetRowCount()); - } + [Test] + [TestCase(true)] + [TestCase(false)] + public void DecimalZeros(bool sendTheZero) + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - [Test] - public void DodgyTypes() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + var dt1 = new DataTable(); + dt1.Columns.Add("mynum", typeof(string)); + dt1.Rows.Add(new[] { "0.000742548000424313" }); - DataTable dt1 = new DataTable(); - dt1.Columns.Add("col1", typeof(double)); - dt1.Columns.Add("col2", typeof(double)); - dt1.Columns.Add("col3", typeof(bool)); - dt1.Columns.Add("col4", typeof(byte)); - dt1.Columns.Add("col5", typeof(byte[])); + if (sendTheZero) + dt1.Rows.Add(new[] { "0" }); - dt1.Rows.Add(new object[] { 0.425,0.451,true,(byte)2,new byte[]{0x5,0xA}}); + dt1.TableName = "DataTableUploadDestinationTests"; + destination.ProcessPipelineData(dt1, toConsole, token); + destination.Dispose(toConsole, null); - dt1.TableName = "DataTableUploadDestinationTests"; + //table should exist + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - destination.ProcessPipelineData(dt1, toConsole, token); - destination.Dispose(toConsole, null); + //should have 2 rows + Assert.AreEqual(sendTheZero ? 2 : 1, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - //table should exist - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + //should be decimal - //should have 2 rows - Assert.AreEqual(1, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + Assert.AreEqual(sendTheZero ? "decimal(19,18)" : "decimal(18,18)", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mynum").DataType.SQLType); + } - //should be decimal - Assert.AreEqual("decimal(3,3)", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col1").DataType.SQLType); - Assert.AreEqual("decimal(3,3)", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col2").DataType.SQLType); - Assert.AreEqual("bit", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col3").DataType.SQLType); - Assert.AreEqual("tinyint", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col4").DataType.SQLType); - - Assert.AreEqual("varbinary(max)", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col5").DataType.SQLType); - } + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void TestResizing(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); + var server = db.Server; + var table = db.CreateTable("TestResizing", + new DatabaseColumnRequest[] + { + new("MyInteger", new DatabaseTypeRequest(typeof(int))), + new("MyMaxString", new DatabaseTypeRequest(typeof(string), int.MaxValue)), + new("Description", new DatabaseTypeRequest(typeof(string), int.MaxValue)), + new("StringNotNull", new DatabaseTypeRequest(typeof(string), 100), false), + new("StringAllowNull", new DatabaseTypeRequest(typeof(string), 100), true), + new("StringPk", new DatabaseTypeRequest(typeof(string), 50), true) { IsPrimaryKey = true } + }); - [Test] - public void TypeAlteringlResizing() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - var toMemory = new ToMemoryDataLoadEventListener(true); + using var con = server.GetConnection(); + con.Open(); - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + //should not allow nulls before + Assert.AreEqual(false, table.DiscoverColumn("StringNotNull").AllowNulls); + //do resize + table.DiscoverColumn("StringNotNull").DataType.Resize(500); - DataTable dt1 = new DataTable(); - dt1.Columns.Add("mynum", typeof(string)); - dt1.Rows.Add(new[] { "true" }); - dt1.TableName = "DataTableUploadDestinationTests"; + //rediscover it to get the new state in database (it should now be 500 and still shouldn't allow nulls) + AssertIsStringWithLength(table.DiscoverColumn("StringNotNull"), 500); - DataTable dt2 = new DataTable(); - dt2.Columns.Add("mynum", typeof(string)); - dt2.Rows.Add(new[] { "999" }); - dt2.TableName = "DataTableUploadDestinationTests"; - destination.ProcessPipelineData(dt1, toConsole, token); - destination.ProcessPipelineData(dt2, toMemory, token); + Assert.AreEqual(false, table.DiscoverColumn("StringNotNull").AllowNulls); - Assert.IsTrue(toMemory.EventsReceivedBySender[destination].Any(msg => msg.Message.Contains("Resizing column 'mynum' from 'bit' to 'int'"))); + //do the same with the one that allows nulls + Assert.AreEqual(true, table.DiscoverColumn("StringAllowNull").AllowNulls); + table.DiscoverColumn("StringAllowNull").DataType.Resize(101); + table.DiscoverColumn("StringAllowNull").DataType.Resize(103); + table.DiscoverColumn("StringAllowNull").DataType.Resize(105); - destination.Dispose(toConsole, null); - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - Assert.AreEqual("int", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mynum").DataType.SQLType); - } + AssertIsStringWithLength(table.DiscoverColumn("StringAllowNull"), 105); + Assert.AreEqual(true, table.DiscoverColumn("StringAllowNull").AllowNulls); - [Test] - public void MySqlTest_Simple() - { - var token = new GracefulCancellationToken(); - - var db = GetCleanedServer(DatabaseType.MySql); + //we should have correct understanding prior to resize + AssertIsStringWithLength(table.DiscoverColumn("StringPk"), 50); + Assert.AreEqual(true, table.DiscoverColumn("StringPk").IsPrimaryKey); + Assert.AreEqual(false, table.DiscoverColumn("StringPk").AllowNulls); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); + //now we execute the resize + table.DiscoverColumn("StringPk").DataType.Resize(500); - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; - - DataTable dt = new DataTable(); - dt.Columns.Add("mystringcol", typeof(string)); - dt.Columns.Add("mynum", typeof(string)); - dt.Columns.Add("mydate", typeof (string)); - dt.Columns.Add("myLegitDateTime", typeof(DateTime)); - dt.Columns.Add("mynullcol", typeof(string)); + AssertIsStringWithLength(table.DiscoverColumn("StringPk"), 500); + Assert.AreEqual(true, table.DiscoverColumn("StringPk").IsPrimaryKey); + Assert.AreEqual(false, table.DiscoverColumn("StringPk").AllowNulls); - //drop the millisecond part - var now = DateTime.Now; - now = new DateTime(now.Year,now.Month,now.Day,now.Hour,now.Minute,now.Second); - - dt.Rows.Add(new object[] { "Anhoy there \"mates\"", "999", "2001-01-01", now,null}); - dt.TableName = "DataTableUploadDestinationTests"; + con.Close(); + } - destination.ProcessPipelineData(dt, toConsole, token); + private static void AssertIsStringWithLength(DiscoveredColumn col, int expectedLength) + { + switch (col.Table.Database.Server.DatabaseType) + { + case DatabaseType.MicrosoftSQLServer: + case DatabaseType.MySql: + Assert.AreEqual($"varchar({expectedLength})", col.DataType.SQLType); + break; + case DatabaseType.Oracle: + Assert.AreEqual($"varchar2({expectedLength})", col.DataType.SQLType); + break; + case DatabaseType.PostgreSql: + Assert.AreEqual($"character varying({expectedLength})", col.DataType.SQLType); + break; + default: + throw new ArgumentOutOfRangeException(nameof(col.Table.Database.Server.DatabaseType), + col.Table.Database.Server.DatabaseType, null); + } + } - destination.Dispose(toConsole, null); - var tbl = db.ExpectTable("DataTableUploadDestinationTests"); - Assert.IsTrue(tbl.Exists()); - Assert.AreEqual(1, tbl.GetRowCount()); - Assert.AreEqual("int", tbl.DiscoverColumn("mynum").DataType.SQLType); + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void TestResizing_WithDetection(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - using (var con = db.Server.GetConnection()) + var table = db.CreateTable("TestResizing", + new DatabaseColumnRequest[] { - con.Open(); - var r = db.Server.GetCommand(tbl.GetTopXSql(10), con).ExecuteReader(); - - Assert.IsTrue(r.Read()); - Assert.AreEqual("Anhoy there \"mates\"", (string)r["mystringcol"]); - Assert.AreEqual(999,(int)r["mynum"]); - Assert.AreEqual(new DateTime(2001,1,1),(DateTime)r["mydate"]); - Assert.AreEqual(now, (DateTime)r["myLegitDateTime"]); - Assert.AreEqual(DBNull.Value, r["mynullcol"]); - } + new("MyInteger", new DatabaseTypeRequest(typeof(int))), + new("MyMaxString", new DatabaseTypeRequest(typeof(string), int.MaxValue)), + new("Description", new DatabaseTypeRequest(typeof(string), int.MaxValue)), + new("StringNotNull", new DatabaseTypeRequest(typeof(string), 10), false), + new("StringAllowNull", new DatabaseTypeRequest(typeof(string), 100), true), + new("StringPk", new DatabaseTypeRequest(typeof(string), 50), true) { IsPrimaryKey = true } + }); - db.Drop(); - } + Assert.AreEqual(10, table.DiscoverColumn("StringNotNull").DataType.GetLengthIfString()); + + var dt = new DataTable("TestResizing"); + dt.Columns.Add("MyInteger"); + dt.Columns.Add("MyMaxString"); + dt.Columns.Add("Description"); + dt.Columns.Add("StringNotNull"); + dt.Columns.Add("StringAllowNull"); + dt.Columns.Add("StringPk"); + + dt.Rows.Add("1", //MyInteger + "fff", //MyMaxString + "fff2", //Description + "1234567891011", //StringNotNull - too long for the column, so it should resize + DBNull.Value, //StringAllowNull + "f" //StringPk + ); + + var dt2 = dt.Clone(); + dt2.Rows.Clear(); + dt2.Rows.Add("1", //MyInteger + "fff", //MyMaxString + "fff2", //Description + "12345678910112", //StringNotNull - too long for the column, so it should resize + DBNull.Value, //StringAllowNull + "f2" //StringPk + ); + + var dest = new DataTableUploadDestination + { + AllowResizingColumnsAtUploadTime = true + }; + dest.PreInitialize(db, ThrowImmediatelyDataLoadEventListener.Quiet); - [Test] - public void MySqlTest_Resize() + dest.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + dest.ProcessPipelineData(dt2, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + dest.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + + //it should have resized us. + Assert.AreEqual(14, table.DiscoverColumn("StringNotNull").DataType.GetLengthIfString()); + } + + [TestCase(DatabaseType.MicrosoftSQLServer, "didn�t")] + [TestCase(DatabaseType.MySql, "didn�t")] + [TestCase(DatabaseType.Oracle, "didn�t")] + [TestCase(DatabaseType.MicrosoftSQLServer, "didn't")] + [TestCase(DatabaseType.MySql, "didn't")] + [TestCase(DatabaseType.Oracle, "didn't")] + public void Test_SingleQuote_InText(DatabaseType dbType, string testValue) + { + var db = GetCleanedServer(dbType); + + var dt = new DataTable("TestFreeText"); + dt.Columns.Add("MyFreeText"); + dt.Rows.Add(testValue); + + var dest = new DataTableUploadDestination { - var token = new GracefulCancellationToken(); + AllowResizingColumnsAtUploadTime = true + }; + dest.PreInitialize(db, ThrowImmediatelyDataLoadEventListener.Quiet); + dest.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + dest.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + + var tbl = db.ExpectTable("TestFreeText"); + Assert.IsTrue(tbl.Exists()); + Assert.AreEqual(1, tbl.GetRowCount()); + } - var db = GetCleanedServer(DatabaseType.MySql); + [Test] + public void DodgyTypes() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - var toMemory = new ToMemoryDataLoadEventListener(true); + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + var dt1 = new DataTable(); + dt1.Columns.Add("col1", typeof(double)); + dt1.Columns.Add("col2", typeof(double)); + dt1.Columns.Add("col3", typeof(bool)); + dt1.Columns.Add("col4", typeof(byte)); + dt1.Columns.Add("col5", typeof(byte[])); - DataTable dt1 = new DataTable(); - dt1.Columns.Add("mynum", typeof(string)); - dt1.Rows.Add(new[] { "true" }); - dt1.TableName = "DataTableUploadDestinationTests"; + dt1.Rows.Add(new object[] { 0.425, 0.451, true, (byte)2, new byte[] { 0x5, 0xA } }); - DataTable dt2 = new DataTable(); - dt2.Columns.Add("mynum", typeof(string)); - dt2.Rows.Add(new[] { "999" }); - dt2.TableName = "DataTableUploadDestinationTests"; - destination.ProcessPipelineData(dt1, toConsole, token); - destination.ProcessPipelineData(dt2, toMemory, token); + dt1.TableName = "DataTableUploadDestinationTests"; - destination.Dispose(toConsole, null); - var tbl = db.ExpectTable("DataTableUploadDestinationTests"); - Assert.IsTrue(tbl.Exists()); - Assert.AreEqual(2, tbl.GetRowCount()); - Assert.AreEqual("int", tbl.DiscoverColumn("mynum").DataType.SQLType); + destination.ProcessPipelineData(dt1, toConsole, token); + destination.Dispose(toConsole, null); - using (var con = db.Server.GetConnection()) - { - con.Open(); - var r = db.Server.GetCommand(tbl.GetTopXSql(10), con).ExecuteReader(); - - //technically these can come out in a random order - List numbersRead = new List(); - Assert.IsTrue(r.Read()); - numbersRead.Add((int) r["mynum"]); - Assert.IsTrue(r.Read()); - numbersRead.Add((int)r["mynum"]); - - Assert.IsFalse(r.Read()); - Assert.IsTrue(numbersRead.Contains(1)); - Assert.IsTrue(numbersRead.Contains(999)); - } - } + //table should exist + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - [TestCase(false)] - [TestCase(true)] - public void TestDestinationAlreadyExistingIsOk(bool targetTableIsEmpty) - { - //create a table in the scratch database with a single column Name - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var tbl = db.CreateTable("TestDestinationAlreadyExistingIsOk",new[]{new DatabaseColumnRequest("Name","varchar(10)",false)}); - try - { - if(!targetTableIsEmpty) - { - //upload a single row - var dtAlreadyThereData = new DataTable(); - dtAlreadyThereData.Columns.Add("Name"); - dtAlreadyThereData.Rows.Add(new[] {"Bob"}); + //should have 2 rows + Assert.AreEqual(1, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - using(var bulk = tbl.BeginBulkInsert()) - bulk.Upload(dtAlreadyThereData); - } - - //create the destination component (what we want to test) - var destinationComponent = new DataTableUploadDestination(); - destinationComponent.AllowResizingColumnsAtUploadTime = true; - destinationComponent.AllowLoadingPopulatedTables = true; - - //create the simulated chunk that will be dispatched - var dt = new DataTable("TestDestinationAlreadyExistingIsOk"); - dt.Columns.Add("Name"); - dt.Rows.Add(new[] {"Bob"}); - dt.Rows.Add(new[] { "Frank" }); - dt.Rows.Add(new[] { "I've got a lovely bunch of coconuts" }); - - var listener = new ThrowImmediatelyDataLoadEventListener(); - - //pre initialzie with the database (which must be part of any pipeline use case involving a DataTableUploadDestination) - destinationComponent.PreInitialize(db ,listener); - - //tell the destination component to process the data - destinationComponent.ProcessPipelineData(dt, listener,new GracefulCancellationToken()); - - destinationComponent.Dispose(listener,null); - Assert.AreEqual(targetTableIsEmpty?3:4, tbl.GetRowCount()); - } - finally - { - tbl.Drop(); - } - } + //should be decimal + Assert.AreEqual("decimal(3,3)", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col1").DataType.SQLType); + Assert.AreEqual("decimal(3,3)", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col2").DataType.SQLType); + Assert.AreEqual("bit", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col3").DataType.SQLType); + Assert.AreEqual("tinyint", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col4").DataType.SQLType); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void DataTableUploadDestinationTests_PrimaryKeyDataTableWithAlterSizeLater(DatabaseType dbtype) - { - var db = GetCleanedServer(dbtype); + Assert.AreEqual("varbinary(max)", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("col5").DataType.SQLType); + } - var destination = new DataTableUploadDestination(); - - destination.AllowResizingColumnsAtUploadTime = true; - destination.PreInitialize(db,new ThrowImmediatelyDataLoadEventListener()); - DataTable dt1 = new DataTable(); - dt1.TableName = "MyTable"; - dt1.Columns.Add("Name"); - dt1.Rows.Add("Fish"); + [Test] + public void TypeAlteringlResizing() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var toMemory = new ToMemoryDataLoadEventListener(true); + + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; + + var dt1 = new DataTable(); + dt1.Columns.Add("mynum", typeof(string)); + dt1.Rows.Add(new[] { "true" }); + dt1.TableName = "DataTableUploadDestinationTests"; + + var dt2 = new DataTable(); + dt2.Columns.Add("mynum", typeof(string)); + dt2.Rows.Add(new[] { "999" }); + dt2.TableName = "DataTableUploadDestinationTests"; + + destination.ProcessPipelineData(dt1, toConsole, token); + destination.ProcessPipelineData(dt2, toMemory, token); + + Assert.IsTrue(toMemory.EventsReceivedBySender[destination] + .Any(msg => msg.Message.Contains("Resizing column 'mynum' from 'bit' to 'int'"))); + + destination.Dispose(toConsole, null); + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + Assert.AreEqual("int", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("mynum").DataType.SQLType); + } - dt1.PrimaryKey = dt1.Columns.Cast().ToArray(); + [Test] + public void MySqlTest_Simple() + { + var token = new GracefulCancellationToken(); - destination.ProcessPipelineData(dt1, new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - - DataTable dt2 = new DataTable(); - dt2.TableName = "MyTable"; - dt2.Columns.Add("Name"); - dt2.Rows.Add("Fish Monkey Fish Fish"); //notice that this is longer so the column must be resized + var db = GetCleanedServer(DatabaseType.MySql); - dt2.PrimaryKey = dt2.Columns.Cast().ToArray(); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; - destination.ProcessPipelineData(dt2, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; + var dt = new DataTable(); + dt.Columns.Add("mystringcol", typeof(string)); + dt.Columns.Add("mynum", typeof(string)); + dt.Columns.Add("mydate", typeof(string)); + dt.Columns.Add("myLegitDateTime", typeof(DateTime)); + dt.Columns.Add("mynullcol", typeof(string)); - destination.Dispose(new ThrowImmediatelyDataLoadEventListener(),null); - var tbl = db.ExpectTable("MyTable"); - - Assert.AreEqual(2,tbl.GetRowCount()); - Assert.IsTrue(tbl.DiscoverColumns().Single().IsPrimaryKey); + //drop the millisecond part + var now = DateTime.Now; + now = new DateTime(now.Year, now.Month, now.Day, now.Hour, now.Minute, now.Second); + dt.Rows.Add(new object[] { "Anhoy there \"mates\"", "999", "2001-01-01", now, null }); + dt.TableName = "DataTableUploadDestinationTests"; - } + destination.ProcessPipelineData(dt, toConsole, token); + + destination.Dispose(toConsole, null); + var tbl = db.ExpectTable("DataTableUploadDestinationTests"); + Assert.IsTrue(tbl.Exists()); + Assert.AreEqual(1, tbl.GetRowCount()); + Assert.AreEqual("int", tbl.DiscoverColumn("mynum").DataType.SQLType); - [Test] - public void TestDestinationAlreadyExisting_ColumnSubset() + using (var con = db.Server.GetConnection()) { - //create a table in the scratch database with a single column Name - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var tbl = db.CreateTable("TestDestinationAlreadyExisting_ColumnSubset", new[] - { - new DatabaseColumnRequest("Name", "varchar(10)", false), - new DatabaseColumnRequest("Age","int"), - new DatabaseColumnRequest("Address","varchar(1000)") + con.Open(); + var r = db.Server.GetCommand(tbl.GetTopXSql(10), con).ExecuteReader(); + + Assert.IsTrue(r.Read()); + Assert.AreEqual("Anhoy there \"mates\"", (string)r["mystringcol"]); + Assert.AreEqual(999, (int)r["mynum"]); + Assert.AreEqual(new DateTime(2001, 1, 1), (DateTime)r["mydate"]); + Assert.AreEqual(now, (DateTime)r["myLegitDateTime"]); + Assert.AreEqual(DBNull.Value, r["mynullcol"]); + } - }); + db.Drop(); + } - try + [Test] + public void MySqlTest_Resize() + { + var token = new GracefulCancellationToken(); + + var db = GetCleanedServer(DatabaseType.MySql); + + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var toMemory = new ToMemoryDataLoadEventListener(true); + + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; + + var dt1 = new DataTable(); + dt1.Columns.Add("mynum", typeof(string)); + dt1.Rows.Add(new[] { "true" }); + dt1.TableName = "DataTableUploadDestinationTests"; + + var dt2 = new DataTable(); + dt2.Columns.Add("mynum", typeof(string)); + dt2.Rows.Add(new[] { "999" }); + dt2.TableName = "DataTableUploadDestinationTests"; + + destination.ProcessPipelineData(dt1, toConsole, token); + destination.ProcessPipelineData(dt2, toMemory, token); + + destination.Dispose(toConsole, null); + var tbl = db.ExpectTable("DataTableUploadDestinationTests"); + Assert.IsTrue(tbl.Exists()); + Assert.AreEqual(2, tbl.GetRowCount()); + Assert.AreEqual("int", tbl.DiscoverColumn("mynum").DataType.SQLType); + + using var con = db.Server.GetConnection(); + con.Open(); + var r = db.Server.GetCommand(tbl.GetTopXSql(10), con).ExecuteReader(); + + //technically these can come out in a random order + var numbersRead = new List(); + Assert.IsTrue(r.Read()); + numbersRead.Add((int)r["mynum"]); + Assert.IsTrue(r.Read()); + numbersRead.Add((int)r["mynum"]); + + Assert.IsFalse(r.Read()); + Assert.IsTrue(numbersRead.Contains(1)); + Assert.IsTrue(numbersRead.Contains(999)); + } + + [TestCase(false)] + [TestCase(true)] + public void TestDestinationAlreadyExistingIsOk(bool targetTableIsEmpty) + { + //create a table in the scratch database with a single column Name + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var tbl = db.CreateTable("TestDestinationAlreadyExistingIsOk", + new[] { new DatabaseColumnRequest("Name", "varchar(10)", false) }); + try + { + if (!targetTableIsEmpty) { - - //upload a single row of already existing data + //upload a single row var dtAlreadyThereData = new DataTable(); dtAlreadyThereData.Columns.Add("Name"); - dtAlreadyThereData.Columns.Add("Age"); - dtAlreadyThereData.Rows.Add(new object[] { "Bob",5}); - - using (var bulk = tbl.BeginBulkInsert()) - bulk.Upload(dtAlreadyThereData); - - //create the destination component (what we want to test) - var destinationComponent = new DataTableUploadDestination(); - destinationComponent.AllowResizingColumnsAtUploadTime = true; - destinationComponent.AllowLoadingPopulatedTables = true; - - //create the simulated chunk that will be dispatched - var dt = new DataTable("TestDestinationAlreadyExisting_ColumnSubset"); - dt.Columns.Add("Name"); - dt.Rows.Add(new[] { "Bob" }); - dt.Rows.Add(new[] { "Frank" }); - dt.Rows.Add(new[] { "I've got a lovely bunch of coconuts" }); - - var listener = new ThrowImmediatelyDataLoadEventListener(); - - //pre initialzie with the database (which must be part of any pipeline use case involving a DataTableUploadDestination) - destinationComponent.PreInitialize(db, listener); - - //tell the destination component to process the data - destinationComponent.ProcessPipelineData(dt, listener, new GracefulCancellationToken()); - - destinationComponent.Dispose(listener, null); - Assert.AreEqual(4, tbl.GetRowCount()); - } - finally - { - tbl.Drop(); + dtAlreadyThereData.Rows.Add(new[] { "Bob" }); + + using var bulk = tbl.BeginBulkInsert(); + bulk.Upload(dtAlreadyThereData); } - } - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void Test_DataTableUploadDestination_ScientificNotation(DatabaseType dbType) - { - var db = GetCleanedServer(dbType); + //create the destination component (what we want to test) + var destinationComponent = new DataTableUploadDestination + { + AllowResizingColumnsAtUploadTime = true, + AllowLoadingPopulatedTables = true + }; - DataTable dt = new DataTable("ff"); - dt.Columns.Add("mycol"); - dt.Rows.Add("-4.10235746055587E-05"); //this string is untyped + //create the simulated chunk that will be dispatched + var dt = new DataTable("TestDestinationAlreadyExistingIsOk"); + dt.Columns.Add("Name"); + dt.Rows.Add(new[] { "Bob" }); + dt.Rows.Add(new[] { "Frank" }); + dt.Rows.Add(new[] { "I've got a lovely bunch of coconuts" }); - var dest = new DataTableUploadDestination(); - dest.PreInitialize(db,new ThrowImmediatelyDataLoadEventListener()); + var listener = ThrowImmediatelyDataLoadEventListener.Quiet; - try - { - dest.ProcessPipelineData(dt,new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - dest.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } - catch(Exception ex) - { - dest.Dispose(new ThrowImmediatelyDataLoadEventListener(),ex); - throw; - } - - //in the database it should be typed - Assert.AreEqual(typeof(Decimal),db.ExpectTable("ff").DiscoverColumn("mycol").DataType.GetCSharpDataType()); + //pre initialzie with the database (which must be part of any pipeline use case involving a DataTableUploadDestination) + destinationComponent.PreInitialize(db, listener); - var dt2 = db.ExpectTable("ff").GetDataTable(); - - Assert.IsTrue((decimal)dt2.Rows[0][0] == (decimal)-0.0000410235746055587); + //tell the destination component to process the data + destinationComponent.ProcessPipelineData(dt, listener, new GracefulCancellationToken()); + destinationComponent.Dispose(listener, null); + Assert.AreEqual(targetTableIsEmpty ? 3 : 4, tbl.GetRowCount()); + } + finally + { + tbl.Drop(); } + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void DataTableUploadDestinationTests_PrimaryKeyDataTableWithAlterSizeLater(DatabaseType dbtype) + { + var db = GetCleanedServer(dbtype); - private class AdjustColumnDelegater : IDatabaseColumnRequestAdjuster + var destination = new DataTableUploadDestination { - public static Action> AdjusterDelegate; + AllowResizingColumnsAtUploadTime = true + }; - public void AdjustColumns(List columns) - { - AdjusterDelegate(columns); - } - } + destination.PreInitialize(db, ThrowImmediatelyDataLoadEventListener.Quiet); - /// - /// T and F are NOT NORMALLY True and False, this test confirms that we can force T and F to go in - /// as boolean instead - /// - /// - [TestCaseSource(typeof(All),nameof(All.DatabaseTypes))] - public void Test_DataTableUploadDestination_ForceBool(DatabaseType dbType) + var dt1 = new DataTable { - var db = GetCleanedServer(dbType); + TableName = "MyTable" + }; + dt1.Columns.Add("Name"); + dt1.Rows.Add("Fish"); - DataTable dt = new DataTable("ForceStringTable"); - dt.Columns.Add("hb_extract"); - dt.Columns.Add("Name"); + dt1.PrimaryKey = dt1.Columns.Cast().ToArray(); - dt.Rows.Add("T", "Abc"); - dt.Rows.Add("F", "Def"); + destination.ProcessPipelineData(dt1, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - var dest = new DataTableUploadDestination(); - dest.PreInitialize(db, new ThrowImmediatelyDataLoadEventListener()); - dest.Adjuster = typeof(AdjustColumnDelegater); + var dt2 = new DataTable + { + TableName = "MyTable" + }; + dt2.Columns.Add("Name"); + dt2.Rows.Add("Fish Monkey Fish Fish"); //notice that this is longer so the column must be resized - AdjustColumnDelegater.AdjusterDelegate = (s) => - { - var col = s.Single(c => c.ColumnName.Equals("hb_extract")); + dt2.PrimaryKey = dt2.Columns.Cast().ToArray(); - //Guesser would normally make it a string - Assert.AreEqual(typeof(string), col.TypeRequested.CSharpType); + destination.ProcessPipelineData(dt2, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - //we demand a boolean interpretation instead! - col.TypeRequested.CSharpType = typeof(bool); - }; - try + destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + + var tbl = db.ExpectTable("MyTable"); + + Assert.AreEqual(2, tbl.GetRowCount()); + Assert.IsTrue(tbl.DiscoverColumns().Single().IsPrimaryKey); + } + + [Test] + public void TestDestinationAlreadyExisting_ColumnSubset() + { + //create a table in the scratch database with a single column Name + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var tbl = db.CreateTable("TestDestinationAlreadyExisting_ColumnSubset", new[] + { + new DatabaseColumnRequest("Name", "varchar(10)", false), + new DatabaseColumnRequest("Age", "int"), + new DatabaseColumnRequest("Address", "varchar(1000)") + }); + + try + { + //upload a single row of already existing data + var dtAlreadyThereData = new DataTable(); + dtAlreadyThereData.Columns.Add("Name"); + dtAlreadyThereData.Columns.Add("Age"); + dtAlreadyThereData.Rows.Add(new object[] { "Bob", 5 }); + + using (var bulk = tbl.BeginBulkInsert()) { - dest.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - dest.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); + bulk.Upload(dtAlreadyThereData); } - catch (Exception ex) + + //create the destination component (what we want to test) + var destinationComponent = new DataTableUploadDestination { - dest.Dispose(new ThrowImmediatelyDataLoadEventListener(), ex); - throw; - } + AllowResizingColumnsAtUploadTime = true, + AllowLoadingPopulatedTables = true + }; - var tbl = db.ExpectTable("ForceStringTable"); + //create the simulated chunk that will be dispatched + var dt = new DataTable("TestDestinationAlreadyExisting_ColumnSubset"); + dt.Columns.Add("Name"); + dt.Rows.Add(new[] { "Bob" }); + dt.Rows.Add(new[] { "Frank" }); + dt.Rows.Add(new[] { "I've got a lovely bunch of coconuts" }); - if (dbType == DatabaseType.Oracle) - { - //in the database it should be typed as string - Assert.AreEqual(typeof(string), tbl.DiscoverColumn("hb_extract").DataType.GetCSharpDataType()); + var listener = ThrowImmediatelyDataLoadEventListener.Quiet; - var dt2 = tbl.GetDataTable(); - Assert.Contains("T",dt2.Rows.Cast().Select(r=>r[0]).ToArray()); - Assert.Contains("F",dt2.Rows.Cast().Select(r =>r[0]).ToArray()); - } - else - { - //in the database it should be typed as bool - Assert.AreEqual(typeof(bool), tbl.DiscoverColumn("hb_extract").DataType.GetCSharpDataType()); + //pre initialzie with the database (which must be part of any pipeline use case involving a DataTableUploadDestination) + destinationComponent.PreInitialize(db, listener); - var dt2 = tbl.GetDataTable(); - Assert.Contains(true,dt2.Rows.Cast().Select(r=>r[0]).ToArray()); - Assert.Contains(false,dt2.Rows.Cast().Select(r =>r[0]).ToArray()); - } - } + //tell the destination component to process the data + destinationComponent.ProcessPipelineData(dt, listener, new GracefulCancellationToken()); - #region Two Batch Tests - [TestCase(DatabaseType.MySql, true)] - [TestCase(DatabaseType.MySql, false)] - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - public void TwoBatch_BooleanResizingTest(DatabaseType dbType, bool giveNullValuesOnly) + destinationComponent.Dispose(listener, null); + Assert.AreEqual(4, tbl.GetRowCount()); + } + finally { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(dbType); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - var toMemory = new ToMemoryDataLoadEventListener(true); + tbl.Drop(); + } + } - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void Test_DataTableUploadDestination_ScientificNotation(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - DataTable dt1 = new DataTable(); - dt1.Columns.Add("TestedCol", typeof(string)); - dt1.Columns.Add("OtherCol", typeof(string)); - dt1.Rows.Add(new[] { giveNullValuesOnly ? null : "true", "1.51" }); + var dt = new DataTable("ff"); + dt.Columns.Add("mycol"); + dt.Rows.Add("-4.10235746055587E-05"); //this string is untyped - dt1.TableName = "DataTableUploadDestinationTests"; + var dest = new DataTableUploadDestination(); + dest.PreInitialize(db, ThrowImmediatelyDataLoadEventListener.Quiet); - DataTable dt2 = new DataTable(); - dt2.Columns.Add("TestedCol", typeof(string)); - dt2.Columns.Add("OtherCol", typeof(string)); + try + { + dest.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + dest.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + catch (Exception ex) + { + dest.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); + throw; + } - dt2.Rows.Add(new[] { "2001-01-01", "999.99" }); + //in the database it should be typed + Assert.AreEqual(typeof(decimal), db.ExpectTable("ff").DiscoverColumn("mycol").DataType.GetCSharpDataType()); - dt2.TableName = "DataTableUploadDestinationTests"; + var dt2 = db.ExpectTable("ff").GetDataTable(); - destination.ProcessPipelineData(dt1, toConsole, token); + Assert.IsTrue((decimal)dt2.Rows[0][0] == (decimal)-0.0000410235746055587); + } - Assert.AreEqual("bit", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("TestedCol").DataType.SQLType); + private class AdjustColumnDelegater : IDatabaseColumnRequestAdjuster + { + public static Action> AdjusterDelegate; - destination.ProcessPipelineData(dt2, toMemory, token); + public void AdjustColumns(List columns) + { + AdjusterDelegate(columns); + } + } - Assert.IsTrue(toMemory.EventsReceivedBySender[destination].Any(msg => msg.Message.Contains("Resizing column "))); + /// + /// T and F are NOT NORMALLY True and False, this test confirms that we can force T and F to go in + /// as boolean instead + /// + /// + [TestCaseSource(typeof(All), nameof(All.DatabaseTypes))] + public void Test_DataTableUploadDestination_ForceBool(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - destination.Dispose(toConsole, null); - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + using var dt = new DataTable("ForceStringTable"); + dt.Columns.Add("hb_extract"); + dt.Columns.Add("Name"); - var tt = db.Server.GetQuerySyntaxHelper().TypeTranslater; + dt.Rows.Add("T", "Abc"); + dt.Rows.Add("F", "Def"); - Assert.AreEqual( + var dest = new DataTableUploadDestination(); + dest.PreInitialize(db, ThrowImmediatelyDataLoadEventListener.Quiet); + dest.Adjuster = typeof(AdjustColumnDelegater); + + AdjustColumnDelegater.AdjusterDelegate = s => + { + var col = s.Single(c => c.ColumnName.Equals("hb_extract")); - //if all we got are nulls we should have a DateTime otherwise we had 1/true so the only usable data type is string - giveNullValuesOnly ? typeof(DateTime) : typeof(string), + //Guesser would normally make it a string + Assert.AreEqual(typeof(string), col.TypeRequested.CSharpType); - tt.GetCSharpTypeForSQLDBType(db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("TestedCol").DataType.SQLType)); + //we demand a boolean interpretation instead! + col.TypeRequested.CSharpType = typeof(bool); + }; + + try + { + dest.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + dest.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); } + catch (Exception ex) + { + dest.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, ex); + throw; + } + + var tbl = db.ExpectTable("ForceStringTable"); - /// - /// Tests the systems ability to change live table datatypes during bulk insert to accomodate novel data - /// - /// This test set passes v1 in the first batch which determines the initial Type of the database table. Then v2 is passed in the next batch - /// which will (in most cases) require an ALTER of the live table to accomodate the wider datatype. - /// - /// The DBMS to test - /// The row value to send in batch 1 - /// The row value to send in batch 2 (after table creation) - /// The Type you expect to be used to store the v1 - /// The Type you expect after ALTER to support all values seen SetUp till now (i.e. v1) AND v2 - [TestCase(DatabaseType.MySql,null,"235", typeof(bool),typeof(int))] - [TestCase(DatabaseType.MySql, "123", "2001-01-01 12:00:00" ,typeof(int), typeof(string))] //123 cannot be converted to date so it becomes string - [TestCase(DatabaseType.MySql, "2001-01-01", "2001-01-01 12:00:00" , typeof(DateTime), typeof(DateTime) )] - [TestCase(DatabaseType.MySql, "2001-01-01", "omg", typeof(DateTime), typeof(string))] - - [TestCase(DatabaseType.MicrosoftSQLServer, null, "235", typeof(bool), typeof(int))] - [TestCase(DatabaseType.MicrosoftSQLServer, "123", "2001-01-01 12:00:00", typeof(int), typeof(string))] //123 cannot be converted to date so it becomes string - [TestCase(DatabaseType.MicrosoftSQLServer, "2001-01-01", "2001-01-01 12:00:00", typeof(DateTime), typeof(DateTime))] - [TestCase(DatabaseType.MicrosoftSQLServer, "2001-01-01", "omg", typeof(DateTime), typeof(string))] - public void TwoBatch_MiscellaneousTest(DatabaseType dbType, string v1,string v2,Type expectedTypeForBatch1,Type expectedTypeForBatch2) + if (dbType == DatabaseType.Oracle) { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(dbType); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - var toMemory = new ToMemoryDataLoadEventListener(true); + //in the database it should be typed as string + Assert.AreEqual(typeof(string), tbl.DiscoverColumn("hb_extract").DataType.GetCSharpDataType()); - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; + var dt2 = tbl.GetDataTable(); + Assert.Contains("T", dt2.Rows.Cast().Select(r => r[0]).ToArray()); + Assert.Contains("F", dt2.Rows.Cast().Select(r => r[0]).ToArray()); + } + else + { + //in the database it should be typed as bool + Assert.AreEqual(typeof(bool), tbl.DiscoverColumn("hb_extract").DataType.GetCSharpDataType()); - DataTable dt1 = new DataTable(); - dt1.Columns.Add("TestedCol", typeof(string)); - dt1.Rows.Add(new[] { v1 }); + var dt2 = tbl.GetDataTable(); + Assert.Contains(true, dt2.Rows.Cast().Select(r => r[0]).ToArray()); + Assert.Contains(false, dt2.Rows.Cast().Select(r => r[0]).ToArray()); + } + } - if (v1 != null && v2 != null) - dt1.PrimaryKey = dt1.Columns.Cast().ToArray(); + #region Two Batch Tests - dt1.TableName = "DataTableUploadDestinationTests"; + [TestCase(DatabaseType.MySql, true)] + [TestCase(DatabaseType.MySql, false)] + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + public void TwoBatch_BooleanResizingTest(DatabaseType dbType, bool giveNullValuesOnly) + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(dbType); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var toMemory = new ToMemoryDataLoadEventListener(true); - DataTable dt2 = new DataTable(); - dt2.Columns.Add("TestedCol", typeof(string)); + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; - dt2.Rows.Add(new[] { v2 }); - dt2.TableName = "DataTableUploadDestinationTests"; + var dt1 = new DataTable(); + dt1.Columns.Add("TestedCol", typeof(string)); + dt1.Columns.Add("OtherCol", typeof(string)); + dt1.Rows.Add(new[] { giveNullValuesOnly ? null : "true", "1.51" }); - var tt = db.Server.GetQuerySyntaxHelper().TypeTranslater; - var tbl = db.ExpectTable("DataTableUploadDestinationTests"); + dt1.TableName = "DataTableUploadDestinationTests"; - try - { - destination.ProcessPipelineData(dt1, toConsole, token); - Assert.AreEqual(expectedTypeForBatch1, - tt.GetCSharpTypeForSQLDBType(tbl.DiscoverColumn("TestedCol").DataType.SQLType)); + var dt2 = new DataTable(); + dt2.Columns.Add("TestedCol", typeof(string)); + dt2.Columns.Add("OtherCol", typeof(string)); - destination.ProcessPipelineData(dt2, toMemory, token); - destination.Dispose(toConsole, null); - } - catch(Exception ex) - { - destination.Dispose(toConsole, ex); - throw; - } - - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + dt2.Rows.Add(new[] { "2001-01-01", "999.99" }); - var colAfter = tbl.DiscoverColumn("TestedCol"); + dt2.TableName = "DataTableUploadDestinationTests"; - Assert.AreEqual(v1 != null && v2 != null,colAfter.IsPrimaryKey); + destination.ProcessPipelineData(dt1, toConsole, token); - Assert.AreEqual(expectedTypeForBatch2, tt.GetCSharpTypeForSQLDBType(colAfter.DataType.SQLType)); - } + Assert.AreEqual("bit", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("TestedCol").DataType.SQLType); - - [Test] - public void TwoBatch_ExplicitRealDataType() - { - var token = new GracefulCancellationToken(); - DiscoveredDatabase db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var toConsole = new ThrowImmediatelyDataLoadEventListener(); - var toMemory = new ToMemoryDataLoadEventListener(true); + destination.ProcessPipelineData(dt2, toMemory, token); - DataTableUploadDestination destination = new DataTableUploadDestination(); - destination.PreInitialize(db, toConsole); - destination.AllowResizingColumnsAtUploadTime = true; - destination.AddExplicitWriteType("FloatCol","real"); + Assert.IsTrue(toMemory.EventsReceivedBySender[destination] + .Any(msg => msg.Message.Contains("Resizing column "))); - DataTable dt1 = new DataTable(); - dt1.Columns.Add("FloatCol", typeof(string)); - dt1.Rows.Add(new[] { "1.51" }); + destination.Dispose(toConsole, null); + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); - dt1.TableName = "DataTableUploadDestinationTests"; + var tt = db.Server.GetQuerySyntaxHelper().TypeTranslater; - DataTable dt2 = new DataTable(); - dt2.Columns.Add("FloatCol", typeof(string)); - dt2.Rows.Add(new[] { "99.9999" }); + Assert.AreEqual( + //if all we got are nulls we should have a DateTime otherwise we had 1/true so the only usable data type is string + giveNullValuesOnly ? typeof(DateTime) : typeof(string), + tt.GetCSharpTypeForSQLDBType(db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("TestedCol") + .DataType.SQLType)); + } - dt2.TableName = "DataTableUploadDestinationTests"; + /// + /// Tests the systems ability to change live table datatypes during bulk insert to accomodate novel data + /// + /// This test set passes v1 in the first batch which determines the initial Type of the database table. Then v2 is passed in the next batch + /// which will (in most cases) require an ALTER of the live table to accomodate the wider datatype. + /// + /// The DBMS to test + /// The row value to send in batch 1 + /// The row value to send in batch 2 (after table creation) + /// The Type you expect to be used to store the v1 + /// The Type you expect after ALTER to support all values seen SetUp till now (i.e. v1) AND v2 + [TestCase(DatabaseType.MySql, null, "235", typeof(bool), typeof(int))] + [TestCase(DatabaseType.MySql, "123", "2001-01-01 12:00:00", typeof(int), + typeof(string))] //123 cannot be converted to date so it becomes string + [TestCase(DatabaseType.MySql, "2001-01-01", "2001-01-01 12:00:00", typeof(DateTime), typeof(DateTime))] + [TestCase(DatabaseType.MySql, "2001-01-01", "omg", typeof(DateTime), typeof(string))] + [TestCase(DatabaseType.MicrosoftSQLServer, null, "235", typeof(bool), typeof(int))] + [TestCase(DatabaseType.MicrosoftSQLServer, "123", "2001-01-01 12:00:00", typeof(int), + typeof(string))] //123 cannot be converted to date so it becomes string + [TestCase(DatabaseType.MicrosoftSQLServer, "2001-01-01", "2001-01-01 12:00:00", typeof(DateTime), typeof(DateTime))] + [TestCase(DatabaseType.MicrosoftSQLServer, "2001-01-01", "omg", typeof(DateTime), typeof(string))] + public void TwoBatch_MiscellaneousTest(DatabaseType dbType, string v1, string v2, Type expectedTypeForBatch1, + Type expectedTypeForBatch2) + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(dbType); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var toMemory = new ToMemoryDataLoadEventListener(true); - destination.ProcessPipelineData(dt1, toConsole, token); + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; - Assert.AreEqual("real", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("FloatCol").DataType.SQLType); + var dt1 = new DataTable(); + dt1.Columns.Add("TestedCol", typeof(string)); + dt1.Rows.Add(new[] { v1 }); - destination.ProcessPipelineData(dt2, toMemory, token); + if (v1 != null && v2 != null) + dt1.PrimaryKey = dt1.Columns.Cast().ToArray(); - destination.Dispose(toConsole, null); + dt1.TableName = "DataTableUploadDestinationTests"; + + var dt2 = new DataTable(); + dt2.Columns.Add("TestedCol", typeof(string)); + + dt2.Rows.Add(new[] { v2 }); + dt2.TableName = "DataTableUploadDestinationTests"; - Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); - Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + var tt = db.Server.GetQuerySyntaxHelper().TypeTranslater; + var tbl = db.ExpectTable("DataTableUploadDestinationTests"); - // should still be real - Assert.AreEqual("real", db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("FloatCol").DataType.SQLType); + try + { + destination.ProcessPipelineData(dt1, toConsole, token); + Assert.AreEqual(expectedTypeForBatch1, + tt.GetCSharpTypeForSQLDBType(tbl.DiscoverColumn("TestedCol").DataType.SQLType)); + destination.ProcessPipelineData(dt2, toMemory, token); + destination.Dispose(toConsole, null); + } + catch (Exception ex) + { + destination.Dispose(toConsole, ex); + throw; } - #endregion + + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + + var colAfter = tbl.DiscoverColumn("TestedCol"); + + Assert.AreEqual(v1 != null && v2 != null, colAfter.IsPrimaryKey); + + Assert.AreEqual(expectedTypeForBatch2, tt.GetCSharpTypeForSQLDBType(colAfter.DataType.SQLType)); } -} + + + [Test] + public void TwoBatch_ExplicitRealDataType() + { + var token = new GracefulCancellationToken(); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var toConsole = ThrowImmediatelyDataLoadEventListener.Quiet; + var toMemory = new ToMemoryDataLoadEventListener(true); + + var destination = new DataTableUploadDestination(); + destination.PreInitialize(db, toConsole); + destination.AllowResizingColumnsAtUploadTime = true; + destination.AddExplicitWriteType("FloatCol", "real"); + + var dt1 = new DataTable(); + dt1.Columns.Add("FloatCol", typeof(string)); + dt1.Rows.Add(new[] { "1.51" }); + + dt1.TableName = "DataTableUploadDestinationTests"; + + var dt2 = new DataTable(); + dt2.Columns.Add("FloatCol", typeof(string)); + dt2.Rows.Add(new[] { "99.9999" }); + + dt2.TableName = "DataTableUploadDestinationTests"; + + destination.ProcessPipelineData(dt1, toConsole, token); + + Assert.AreEqual("real", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("FloatCol").DataType.SQLType); + + destination.ProcessPipelineData(dt2, toMemory, token); + + destination.Dispose(toConsole, null); + + Assert.IsTrue(db.ExpectTable("DataTableUploadDestinationTests").Exists()); + Assert.AreEqual(2, db.ExpectTable("DataTableUploadDestinationTests").GetRowCount()); + + // should still be real + Assert.AreEqual("real", + db.ExpectTable("DataTableUploadDestinationTests").DiscoverColumn("FloatCol").DataType.SQLType); + } + + #endregion +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DatabaseOperationTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DatabaseOperationTests.cs index ed1eeb6c49..96ad2c84d3 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DatabaseOperationTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DatabaseOperationTests.cs @@ -9,123 +9,126 @@ using Microsoft.Data.SqlClient; using FAnsi; using FAnsi.Discovery; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; -using Rdmp.Core.Curation.Data.Defaults; using Rdmp.Core.DataLoad; using Rdmp.Core.DataLoad.Engine.DatabaseManagement.EntityNaming; using Rdmp.Core.DataLoad.Engine.DatabaseManagement.Operations; -using ReusableLibraryCode.DataAccess; -using ReusableLibraryCode.Progress; +using Rdmp.Core.MapsDirectlyToDatabaseTable; +using Rdmp.Core.ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +internal class DatabaseOperationTests : DatabaseTests { - class DatabaseOperationTests : DatabaseTests + private Stack toCleanUp = new(); + + [Test] + // This no longer copies between servers, but the original test didn't guarantee that would happen anyway + public void CloneDatabaseAndTable() { - Stack toCleanUp = new Stack(); + var testLiveDatabaseName = TestDatabaseNames.GetConsistentName("TEST"); - [Test] - // This no longer copies between servers, but the original test didn't guarantee that would happen anyway - public void CloneDatabaseAndTable() - { - string testLiveDatabaseName = TestDatabaseNames.GetConsistentName("TEST"); - - var testDb = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testLiveDatabaseName); - var raw = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testLiveDatabaseName + "_RAW"); + var testDb = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testLiveDatabaseName); + var raw = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase($"{testLiveDatabaseName}_RAW"); - foreach (DiscoveredDatabase db in new[] { raw ,testDb}) - if (db.Exists()) - { - foreach (DiscoveredTable table in db.DiscoverTables(true)) - table.Drop(); + foreach (var db in new[] { raw, testDb }) + if (db.Exists()) + { + foreach (var table in db.DiscoverTables(true)) + table.Drop(); - db.Drop(); - } - - DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase(testLiveDatabaseName); - Assert.IsTrue(testDb.Exists()); + db.Drop(); + } + + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.CreateDatabase(testLiveDatabaseName); + Assert.IsTrue(testDb.Exists()); + + testDb.CreateTable("Table_1", new[] { new DatabaseColumnRequest("Id", "int") }); - testDb.CreateTable("Table_1", new[] {new DatabaseColumnRequest("Id", "int")}); - - //clone the builder - var builder = new SqlConnectionStringBuilder(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder.ConnectionString) + //clone the builder + var builder = + new SqlConnectionStringBuilder( + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder.ConnectionString) { InitialCatalog = testLiveDatabaseName }; - - var dbConfiguration = new HICDatabaseConfiguration(new DiscoveredServer(builder),null,CatalogueRepository); - - var cloner = new DatabaseCloner(dbConfiguration); - try - { - var cloneDb = cloner.CreateDatabaseForStage(LoadBubble.Raw); - //confirm database appeared - Assert.IsTrue(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(testLiveDatabaseName + "_RAW").Exists()); + var dbConfiguration = new HICDatabaseConfiguration(new DiscoveredServer(builder), null, CatalogueRepository); - //now create a catalogue and wire it SetUp to the table TEST on the test database server - Catalogue cata = SetupATestCatalogue(builder, testLiveDatabaseName, "Table_1"); + var cloner = new DatabaseCloner(dbConfiguration); + try + { + var cloneDb = cloner.CreateDatabaseForStage(LoadBubble.Raw); - //now clone the catalogue data structures to MachineName - foreach (TableInfo tableInfo in cata.GetTableInfoList(false)) - cloner.CreateTablesInDatabaseFromCatalogueInfo(new ThrowImmediatelyDataLoadEventListener(), tableInfo, LoadBubble.Raw); - - Assert.IsTrue(raw.Exists()); - Assert.IsTrue(raw.ExpectTable("Table_1").Exists()); + //confirm database appeared + Assert.IsTrue(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase( + $"{testLiveDatabaseName}_RAW").Exists()); - } - finally - { - cloner.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadEventListener()); - - while (toCleanUp.Count > 0) - try - { - toCleanUp.Pop().DeleteInDatabase(); - } - catch (Exception e) - { - //always clean SetUp everything - Console.WriteLine(e); - } - } - } + //now create a catalogue and wire it SetUp to the table TEST on the test database server + var cata = SetupATestCatalogue(builder, testLiveDatabaseName, "Table_1"); - private Catalogue SetupATestCatalogue(SqlConnectionStringBuilder builder, string database, string table) + //now clone the catalogue data structures to MachineName + foreach (TableInfo tableInfo in cata.GetTableInfoList(false)) + cloner.CreateTablesInDatabaseFromCatalogueInfo(ThrowImmediatelyDataLoadEventListener.Quiet, tableInfo, + LoadBubble.Raw); + + Assert.IsTrue(raw.Exists()); + Assert.IsTrue(raw.ExpectTable("Table_1").Exists()); + } + finally { - //create a new catalogue for test data (in the test data catalogue) - var cat = new Catalogue(CatalogueRepository, "DeleteMe"); - TableInfoImporter importer = new TableInfoImporter(CatalogueRepository, builder.DataSource, database, table, DatabaseType.MicrosoftSQLServer, builder.UserID, builder.Password); - importer.DoImport(out var tableInfo, out var columnInfos); - - toCleanUp.Push(cat); - - //push the credentials if there are any - var creds = (DataAccessCredentials)tableInfo.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); - if (creds != null) - toCleanUp.Push(creds); - - //and the TableInfo - toCleanUp.Push(tableInfo); - - //for each column we will add a new one to the - foreach (ColumnInfo col in columnInfos) - { - //create it with the same name - var cataItem = new CatalogueItem(CatalogueRepository, cat, col.Name.Substring(col.Name.LastIndexOf(".") + 1).Trim('[', ']', '`')); - toCleanUp.Push(cataItem); + cloner.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); - cataItem.SetColumnInfo(col); + while (toCleanUp.Count > 0) + try + { + toCleanUp.Pop().DeleteInDatabase(); + } + catch (Exception e) + { + //always clean SetUp everything + Console.WriteLine(e); + } + } + } - toCleanUp.Push(col); - } + private Catalogue SetupATestCatalogue(SqlConnectionStringBuilder builder, string database, string table) + { + //create a new catalogue for test data (in the test data catalogue) + var cat = new Catalogue(CatalogueRepository, "DeleteMe"); + var importer = new TableInfoImporter(CatalogueRepository, builder.DataSource, database, table, + DatabaseType.MicrosoftSQLServer, builder.UserID, builder.Password); + importer.DoImport(out var tableInfo, out var columnInfos); + + toCleanUp.Push(cat); + + //push the credentials if there are any + var creds = (DataAccessCredentials)tableInfo.GetCredentialsIfExists(DataAccessContext.InternalDataProcessing); + if (creds != null) + toCleanUp.Push(creds); + + //and the TableInfo + toCleanUp.Push(tableInfo); + //for each column we will add a new one to the + foreach (var col in columnInfos) + { + //create it with the same name + var cataItem = new CatalogueItem(CatalogueRepository, cat, + col.Name[(col.Name.LastIndexOf(".", StringComparison.Ordinal) + 1)..].Trim('[', ']', '`')); + toCleanUp.Push(cataItem); + + cataItem.SetColumnInfo(col); - return cat; + toCleanUp.Push(col); } + + + return cat; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionCheckTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionCheckTests.cs index b584c8512c..58c451e6aa 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionCheckTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionCheckTests.cs @@ -5,48 +5,53 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataLoad.Modules.Mutilators.Dilution.Exceptions; using Rdmp.Core.DataLoad.Modules.Mutilators.Dilution.Operations; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.DilutionTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.DilutionTests; + +[Category("Unit")] +public class DilutionCheckTests { - [Category("Unit")] - public class DilutionCheckTests + [Test] + public void TestChecking_RoundDateToMiddleOfQuarter_NoColumnSet() { - [Test] - public void TestChecking_RoundDateToMiddleOfQuarter_NoColumnSet() - { - var dil = new RoundDateToMiddleOfQuarter(); - Assert.Throws(() => dil.Check(new ThrowImmediatelyCheckNotifier())); - } - - [TestCase("varchar(10)")] - [TestCase("bit")] - [TestCase("binary(50)")] - public void TestChecking_RoundDateToMiddleOfQuarter_WrongDataType(string incompatibleType) + var dil = new RoundDateToMiddleOfQuarter(); + Assert.Throws(() => dil.Check(ThrowImmediatelyCheckNotifier.Quiet)); + } + + [TestCase("varchar(10)")] + [TestCase("bit")] + [TestCase("binary(50)")] + public void TestChecking_RoundDateToMiddleOfQuarter_WrongDataType(string incompatibleType) + { + var col = Substitute.For(); + col.SqlDataType.Returns(incompatibleType); + + var dil = new RoundDateToMiddleOfQuarter { - var col = Mock.Of(p => p.SqlDataType == incompatibleType); + ColumnToDilute = col + }; - var dil = new RoundDateToMiddleOfQuarter(); - dil.ColumnToDilute = col; + Assert.Throws(() => dil.Check(ThrowImmediatelyCheckNotifier.Quiet)); + } - Assert.Throws(() => dil.Check(new ThrowImmediatelyCheckNotifier())); - } + [TestCase("date")] + [TestCase("datetime")] + public void TestChecking_RoundDateToMiddleOfQuarter_CompatibleDataType(string incompatibleType) + { + var col = Substitute.For(); + col.SqlDataType.Returns(incompatibleType); - [TestCase("date")] - [TestCase("datetime")] - public void TestChecking_RoundDateToMiddleOfQuarter_CompatibleDataType(string incompatibleType) + var dil = new RoundDateToMiddleOfQuarter { - var col = Mock.Of(p => p.SqlDataType==incompatibleType); - - var dil = new RoundDateToMiddleOfQuarter(); - dil.ColumnToDilute = col; + ColumnToDilute = col + }; - dil.Check(new ThrowImmediatelyCheckNotifier()); - } + dil.Check(ThrowImmediatelyCheckNotifier.Quiet); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionOperationFactoryTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionOperationFactoryTests.cs index cf74857da2..3b624e6120 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionOperationFactoryTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionOperationFactoryTests.cs @@ -5,7 +5,7 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.ANOEngineering; using Rdmp.Core.Curation.Data; @@ -14,43 +14,44 @@ using Rdmp.Core.DataLoad.Modules.Mutilators.Dilution.Operations; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.DilutionTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.DilutionTests; + +public class DilutionOperationFactoryTests : DatabaseTests { - public class DilutionOperationFactoryTests : DatabaseTests - { [Test] public void NullColumn_Throws() { - Assert.Throws(() => new DilutionOperationFactory(null)); + Assert.Throws(() => new DilutionOperationFactory(null)); } [Test] public void NullOperation_Throws() { - var col = Mock.Of(p => p.Repository==CatalogueRepository); + var col = Substitute.For(); + col.Repository.Returns(CatalogueRepository); - var factory = new DilutionOperationFactory(col); - Assert.Throws(()=>factory.Create(null)); + var factory = new DilutionOperationFactory(col); + Assert.Throws(() => factory.Create(null)); } [Test] public void UnexpectedType_Throws() { - var col = Mock.Of(p => p.Repository==CatalogueRepository); + var col = Substitute.For(); + col.Repository.Returns(CatalogueRepository); - var factory = new DilutionOperationFactory(col); - Assert.Throws(()=>factory.Create(typeof(Catalogue))); + var factory = new DilutionOperationFactory(col); + Assert.Throws(() => factory.Create(typeof(Catalogue))); } [Test] public void ExpectedType_Created() { - var col = Mock.Of(p => p.Repository==CatalogueRepository); - - var factory = new DilutionOperationFactory(col); - var i = factory.Create(typeof(ExcludeRight3OfUKPostcodes)); - Assert.IsNotNull(i); - Assert.IsInstanceOf(i); + var col = Substitute.For(); + col.Repository.Returns(CatalogueRepository); + var factory = new DilutionOperationFactory(col); + var i = factory.Create(typeof(ExcludeRight3OfUKPostcodes)); + Assert.IsNotNull(i); + Assert.IsInstanceOf(i); } - } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionOperationTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionOperationTests.cs index e2b8c6207a..d5b6297e4d 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionOperationTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DilutionTests/DilutionOperationTests.cs @@ -7,7 +7,7 @@ using System; using System.Data; using FAnsi; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; @@ -15,186 +15,195 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.Mutilators.Dilution; using Rdmp.Core.DataLoad.Modules.Mutilators.Dilution.Operations; -using ReusableLibraryCode; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.DilutionTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.DilutionTests; + +public class DilutionOperationTests : DatabaseTests { - public class DilutionOperationTests:DatabaseTests + [TestCase("2001-01-03", "2001-02-15")] + [TestCase("2001-03-31", "2001-02-15")] + [TestCase("2001-04-01", "2001-05-15")] + [TestCase("2001-03-31 23:59:59", "2001-02-15")] + [TestCase("2001-04-01 01:15:00", "2001-05-15")] + [TestCase(null, null)] + public void TestRoundDateToMiddleOfQuarter(string input, string expectedDilute) { - [TestCase("2001-01-03", "2001-02-15")] - [TestCase("2001-03-31", "2001-02-15")] - [TestCase("2001-04-01", "2001-05-15")] - [TestCase("2001-03-31 23:59:59", "2001-02-15")] - [TestCase("2001-04-01 01:15:00", "2001-05-15")] - [TestCase(null, null)] - public void TestRoundDateToMiddleOfQuarter(string input, string expectedDilute) + var tbl = Substitute.For(); + tbl.GetRuntimeName(LoadStage.AdjustStaging, null).Returns("DateRoundingTests"); + var col = Substitute.For(); + col.TableInfo.Returns(tbl); + col.GetRuntimeName().Returns("TestField"); + + var o = new RoundDateToMiddleOfQuarter + { + ColumnToDilute = col + }; + var sql = o.GetMutilationSql(null); + + var server = GetCleanedServer(DatabaseType.MicrosoftSQLServer).Server; + using var con = server.BeginNewTransactedConnection(); + try + { + var insert = input != null ? $"'{input}'" : "NULL"; + + server.GetCommand($@"CREATE TABLE DateRoundingTests(TestField datetime) +INSERT INTO DateRoundingTests VALUES ({insert})", con).ExecuteNonQuery(); + + UsefulStuff.ExecuteBatchNonQuery(sql, con.Connection, con.Transaction); + + var result = server.GetCommand("SELECT * from DateRoundingTests", con).ExecuteScalar(); + + if (expectedDilute == null) + Assert.AreEqual(DBNull.Value, result); + else + Assert.AreEqual(DateTime.Parse(expectedDilute), result); + } + finally { - - - var tbl = Mock.Of(m => m.GetRuntimeName(LoadStage.AdjustStaging,null) == "DateRoundingTests"); - var col = Mock.Of(c=> - c.TableInfo == tbl && - c.GetRuntimeName() == "TestField"); - - var o = new RoundDateToMiddleOfQuarter(); - o.ColumnToDilute = col; - var sql = o.GetMutilationSql(null); - - var server = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).Server; - using (var con = server.BeginNewTransactedConnection()) - { - try - { - string insert = input != null ? "'" + input + "'" : "NULL"; - - server.GetCommand(@"CREATE TABLE DateRoundingTests(TestField datetime) -INSERT INTO DateRoundingTests VALUES (" + insert + ")", con).ExecuteNonQuery(); - - UsefulStuff.ExecuteBatchNonQuery(sql, con.Connection, con.Transaction); - - var result = server.GetCommand("SELECT * from DateRoundingTests", con).ExecuteScalar(); - - if (expectedDilute == null) - Assert.AreEqual(DBNull.Value, result); - else - Assert.AreEqual(DateTime.Parse(expectedDilute), result); - } - finally - { - con.ManagedTransaction.AbandonAndCloseConnection(); - } - } + con.ManagedTransaction.AbandonAndCloseConnection(); } + } + + + [TestCase("DD3 9TA", "DD3")] + [TestCase("DD03 9TA", "DD03")] + [TestCase("EC4V 2AU", "EC4V")] //London postcodes have extra digits + [TestCase("EC4V", "EC4V")] //Already is a prefix + [TestCase("DD3", "DD3")] //Already is a prefix + [TestCase("DD3_5L1", "DD3")] //Makey upey suffix + [TestCase("DD3_XXX", "DD3")] //Makey upey suffix + [TestCase("!D!D!3!9TA!", "DD3")] //Random garbage + [TestCase("EC4V_2AU", "EC4V")] //underscore instead of space + [TestCase("EC4V2AU ", "EC4V")] //Trailing whitespace + [TestCase("??", + "??")] //It's short and it's complete garbage but this is the kind of thing research datasets have :) + [TestCase("???????", + "????")] //Return type is varchar(4) so while we reject the original value we still end SetUp truncating it + [TestCase("I<3Coffee Yay", "I3Co")] //What can you do?!, got to return varchar(4) + [TestCase("D3 9T", + "D39T")] //39T isn't a valid suffix and the remainder (D) wouldn't be enough for a postcode prefix anyway so just return the original input minus dodgy characters + [TestCase("G 9TA", + "G")] //9TA is the correct suffix pattern (Numeric Alpha Alpha) so can be chopped off and the remainder returned (G) + [TestCase("DD3 9T", + "DD")] //Expected to get it wrong because the suffix check sees 39T but the remainder is long enough to make a legit postcode (2). We are currently deciding not to evaluate spaces/other dodgy characters when attempting to resolve postcodes + [TestCase(null, null)] + public void TestExcludeRight3OfUKPostcodes(string input, string expectedDilute) + { + var tbl = Substitute.For(); + tbl.GetRuntimeName(LoadStage.AdjustStaging, null).Returns("ExcludeRight3OfPostcodes"); + var col = Substitute.For(); + col.TableInfo.Returns(tbl); + col.GetRuntimeName().Returns("TestField"); + + var o = new ExcludeRight3OfUKPostcodes + { + ColumnToDilute = col + }; + var sql = o.GetMutilationSql(null); + + var server = GetCleanedServer(DatabaseType.MicrosoftSQLServer).Server; + using var con = server.BeginNewTransactedConnection(); + try + { + var insert = input != null ? $"'{input}'" : "NULL"; + + server.GetCommand($@"CREATE TABLE ExcludeRight3OfPostcodes(TestField varchar(15)) + INSERT INTO ExcludeRight3OfPostcodes VALUES ({insert})", con).ExecuteNonQuery(); + UsefulStuff.ExecuteBatchNonQuery(sql, con.Connection, con.Transaction); - [TestCase("DD3 9TA", "DD3")] - [TestCase("DD03 9TA", "DD03")] - [TestCase("EC4V 2AU", "EC4V")] //London postcodes have extra digits - [TestCase("EC4V", "EC4V")] //Already is a prefix - [TestCase("DD3", "DD3")] //Already is a prefix - [TestCase("DD3_5L1", "DD3")] //Makey upey suffix - [TestCase("DD3_XXX", "DD3")] //Makey upey suffix - [TestCase("!D!D!3!9TA!", "DD3")] //Random garbage - [TestCase("EC4V_2AU", "EC4V")] //underscore instead of space - [TestCase("EC4V2AU ", "EC4V")] //Trailing whitespace - [TestCase("??", "??")] //It's short and it's complete garbage but this is the kind of thing research datasets have :) - [TestCase("???????", "????")] //Return type is varchar(4) so while we reject the original value we still end SetUp truncating it - [TestCase("I<3Coffee Yay", "I3Co")] //What can you do?!, got to return varchar(4) - [TestCase("D3 9T", "D39T")]//39T isn't a valid suffix and the remainder (D) wouldn't be enough for a postcode prefix anyway so just return the original input minus dodgy characters - [TestCase("G 9TA", "G")]//9TA is the correct suffix pattern (Numeric Alpha Alpha) so can be chopped off and the remainder returned (G) - [TestCase("DD3 9T", "DD")] //Expected to get it wrong because the suffix check sees 39T but the remainder is long enough to make a legit postcode (2). We are currently deciding not to evaluate spaces/other dodgy characters when attempting to resolve postcodes - [TestCase(null,null)] - public void TestExcludeRight3OfUKPostcodes(string input, string expectedDilute) + var result = server.GetCommand("SELECT * from ExcludeRight3OfPostcodes", con).ExecuteScalar(); + + if (expectedDilute == null) + Assert.AreEqual(DBNull.Value, result); + else + Assert.AreEqual(expectedDilute, result); + } + finally { - var tbl = Mock.Of(t=>t.GetRuntimeName(LoadStage.AdjustStaging,null) == "ExcludeRight3OfPostcodes"); - var col = Mock.Of(c=>c.TableInfo == tbl && c.GetRuntimeName() == "TestField"); - - var o = new ExcludeRight3OfUKPostcodes(); - o.ColumnToDilute = col; - var sql = o.GetMutilationSql(null); - - var server = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).Server; - using (var con = server.BeginNewTransactedConnection()) - { - try - { - string insert = input != null ? "'" + input + "'" : "NULL"; - - server.GetCommand(@"CREATE TABLE ExcludeRight3OfPostcodes(TestField varchar(15)) - INSERT INTO ExcludeRight3OfPostcodes VALUES (" + insert + ")", con).ExecuteNonQuery(); - - UsefulStuff.ExecuteBatchNonQuery(sql, con.Connection, con.Transaction); - - var result = server.GetCommand("SELECT * from ExcludeRight3OfPostcodes", con).ExecuteScalar(); - - if(expectedDilute == null) - Assert.AreEqual(DBNull.Value, result); - else - Assert.AreEqual(expectedDilute, result); - } - finally - { - con.ManagedTransaction.AbandonAndCloseConnection(); - } - - } + con.ManagedTransaction.AbandonAndCloseConnection(); } + } + + [TestCase("2001-01-03", "datetime", true)] + [TestCase("2001-01-03", "varchar(50)", true)] + [TestCase(null, "varchar(50)", false)] + [TestCase(null, "bit", false)] + [TestCase("1", "bit", true)] + [TestCase("0", "bit", true)] + [TestCase("", "varchar(1)", true)] //This data exists regardless of if it is blank so it still gets the 1 + public void DiluteToBitFlag(string input, string inputDataType, bool expectedDilute) + { + var tbl = Substitute.For(); + tbl.GetRuntimeName(LoadStage.AdjustStaging, null).Returns("DiluteToBitFlagTests"); + var col = Substitute.For(); + col.TableInfo.Returns(tbl); + col.GetRuntimeName().Returns("TestField"); - [TestCase("2001-01-03","datetime", true)] - [TestCase("2001-01-03", "varchar(50)", true)] - [TestCase(null,"varchar(50)", false)] - [TestCase(null, "bit", false)] - [TestCase("1", "bit", true)] - [TestCase("0", "bit", true)] - [TestCase("","varchar(1)", true)]//This data exists regardless of if it is blank so it still gets the 1 - public void DiluteToBitFlag(string input,string inputDataType, bool expectedDilute) + var o = new CrushToBitFlag { - var tbl = Mock.Of(m => m.GetRuntimeName(LoadStage.AdjustStaging,null) == "DiluteToBitFlagTests"); - var col = Mock.Of(c=> - c.TableInfo == tbl && - c.GetRuntimeName() =="TestField"); - - var o = new CrushToBitFlag(); - o.ColumnToDilute = col; - var sql = o.GetMutilationSql(null); - - var server = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).Server; - using (var con = server.BeginNewTransactedConnection()) - { - try - { - string insert = input != null ? "'" + input + "'" : "NULL"; - - server.GetCommand(@"CREATE TABLE DiluteToBitFlagTests(TestField "+inputDataType+@") -INSERT INTO DiluteToBitFlagTests VALUES (" + insert + ")", con).ExecuteNonQuery(); - - UsefulStuff.ExecuteBatchNonQuery(sql, con.Connection, con.Transaction); - - var result = server.GetCommand("SELECT * from DiluteToBitFlagTests", con).ExecuteScalar(); - - Assert.AreEqual(expectedDilute, Convert.ToBoolean(result)); - } - finally - { - con.ManagedTransaction.AbandonAndCloseConnection(); - } - } + ColumnToDilute = col + }; + var sql = o.GetMutilationSql(null); + + var server = GetCleanedServer(DatabaseType.MicrosoftSQLServer).Server; + using var con = server.BeginNewTransactedConnection(); + try + { + var insert = input != null ? $"'{input}'" : "NULL"; + + server.GetCommand($@"CREATE TABLE DiluteToBitFlagTests(TestField {inputDataType}) +INSERT INTO DiluteToBitFlagTests VALUES ({insert})", con).ExecuteNonQuery(); + + UsefulStuff.ExecuteBatchNonQuery(sql, con.Connection, con.Transaction); + + var result = server.GetCommand("SELECT * from DiluteToBitFlagTests", con).ExecuteScalar(); + + Assert.AreEqual(expectedDilute, Convert.ToBoolean(result)); } - - [Test] - public void Dilution_WithNamer_Test() + finally { - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - - DataTable dt = new DataTable(); - dt.Columns.Add("Bob"); - dt.Rows.Add(new[] {"Fish"}); + con.ManagedTransaction.AbandonAndCloseConnection(); + } + } - var tbl = db.CreateTable("DilutionNamerTest", dt); - Import(tbl,out var ti,out var cols); + [Test] + public void Dilution_WithNamer_Test() + { + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - tbl.Rename("AAAA"); - var namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "AAAA"); + var dt = new DataTable(); + dt.Columns.Add("Bob"); + dt.Rows.Add(new[] { "Fish" }); - var discarded = new PreLoadDiscardedColumn(CatalogueRepository, ti, "Bob"); - discarded.SqlDataType = "varchar(10)"; - discarded.Destination = DiscardedColumnDestination.Dilute; - discarded.SaveToDatabase(); + var tbl = db.CreateTable("DilutionNamerTest", dt); + Import(tbl, out var ti, out var cols); + tbl.Rename("AAAA"); + var namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "AAAA"); - var dilution = new Dilution(); + var discarded = new PreLoadDiscardedColumn(CatalogueRepository, ti, "Bob") + { + SqlDataType = "varchar(10)", + Destination = DiscardedColumnDestination.Dilute + }; + discarded.SaveToDatabase(); - dilution.ColumnToDilute = discarded; - dilution.Operation = typeof (CrushToBitFlag); - dilution.Initialize(db,LoadStage.AdjustStaging); - dilution.Check(new ThrowImmediatelyCheckNotifier()); + var dilution = new Dilution + { + ColumnToDilute = discarded, + Operation = typeof(CrushToBitFlag) + }; - var job = new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server,namer),ti); - - dilution.Mutilate(job); - } + dilution.Initialize(db, LoadStage.AdjustStaging); + dilution.Check(ThrowImmediatelyCheckNotifier.Quiet); + + var job = new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server, namer), ti); + + dilution.Mutilate(job); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DistincterTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DistincterTests.cs index 775b865f77..995a8023bd 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/DistincterTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/DistincterTests.cs @@ -10,7 +10,7 @@ using System.Linq; using System.Text.RegularExpressions; using FAnsi; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; @@ -20,114 +20,125 @@ using Rdmp.Core.DataLoad.Modules.Mutilators; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class DistincterTests : DatabaseTests { - public class DistincterTests : DatabaseTests + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestDistincter_Duplicates(DatabaseType type) { - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestDistincter_Duplicates(DatabaseType type) - { - var db = GetCleanedServer(type, "TestCoalescer"); + var db = GetCleanedServer(type, "TestCoalescer"); - int batchCount = 1000; + const int batchCount = 1000; - DataTable dt = new DataTable("TestCoalescer_RampantNullness"); - dt.Columns.Add("pk"); - dt.Columns.Add("f1"); - dt.Columns.Add("f2"); - dt.Columns.Add("f3"); - dt.Columns.Add("f4"); + using var dt = new DataTable("TestCoalescer_RampantNullness"); + dt.BeginLoadData(); + dt.Columns.Add("pk"); + dt.Columns.Add("f1"); + dt.Columns.Add("f2"); + dt.Columns.Add("f3"); + dt.Columns.Add("f4"); - Random r = new Random(123); + var r = new Random(123); - for (int i = 0; i < batchCount; i++) - { - int randInt = r.Next(int.MaxValue); - - dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); - dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); - } + for (var i = 0; i < batchCount; i++) + { + var randInt = r.Next(int.MaxValue); - var tbl = db.CreateTable(dt.TableName, dt); + dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); + dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); + } - var importer = new TableInfoImporter(CatalogueRepository, tbl); - importer.DoImport(out var tableInfo, out var colInfos); + dt.EndLoadData(); + var tbl = db.CreateTable(dt.TableName, dt); - //lie about what hte primary key is because this component is designed to run in the RAW environment and we are simulating a LIVE TableInfo (correctly) - var pk = colInfos.Single(c => c.GetRuntimeName().Equals("pk")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out var tableInfo, out var colInfos); - var rowsBefore = tbl.GetRowCount(); + //lie about what hte primary key is because this component is designed to run in the RAW environment and we are simulating a LIVE TableInfo (correctly) + var pk = colInfos.Single(c => c.GetRuntimeName().Equals("pk")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); - var distincter = new Distincter(); - distincter.TableRegexPattern = new Regex(".*"); - distincter.Initialize(db, LoadStage.AdjustRaw); + var rowsBefore = tbl.GetRowCount(); - var job = Mock.Of(p => p.RegularTablesToLoad==new List(new[] { tableInfo })&& p.Configuration==new HICDatabaseConfiguration(db.Server,null,null,null)); + var distincter = new Distincter + { + TableRegexPattern = new Regex(".*") + }; + distincter.Initialize(db, LoadStage.AdjustRaw); - distincter.Mutilate(job); + var job = Substitute.For(); + job.RegularTablesToLoad.Returns(new List(new[] { tableInfo })); + job.Configuration.Returns(new HICDatabaseConfiguration(db.Server, null, null, null)); - var rowsAfter = tbl.GetRowCount(); + distincter.Mutilate(job); - Assert.AreEqual(rowsBefore/2,rowsAfter); + var rowsAfter = tbl.GetRowCount(); - db.Drop(); - } + Assert.AreEqual(rowsBefore / 2, rowsAfter); - [Test] - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestDistincter_NoDuplicates(DatabaseType type) - { - var db = GetCleanedServer(type, "TestCoalescer"); + db.Drop(); + } - int batchCount = 1000; + [Test] + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestDistincter_NoDuplicates(DatabaseType type) + { + var db = GetCleanedServer(type, "TestCoalescer"); - DataTable dt = new DataTable("TestCoalescer_RampantNullness"); - dt.Columns.Add("pk"); - dt.Columns.Add("f1"); - dt.Columns.Add("f2"); - dt.Columns.Add("f3"); - dt.Columns.Add("f4"); + const int batchCount = 1000; - Random r = new Random(123); + using var dt = new DataTable("TestCoalescer_RampantNullness"); + dt.BeginLoadData(); + dt.Columns.Add("pk"); + dt.Columns.Add("f1"); + dt.Columns.Add("f2"); + dt.Columns.Add("f3"); + dt.Columns.Add("f4"); - for (int i = 0; i < batchCount; i++) - { - int randInt = r.Next(int.MaxValue); + var r = new Random(123); - dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); - dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt+1 }); - } + for (var i = 0; i < batchCount; i++) + { + var randInt = r.Next(int.MaxValue); - var tbl = db.CreateTable(dt.TableName, dt); + dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt }); + dt.Rows.Add(new object[] { randInt, randInt, randInt, randInt, randInt + 1 }); + } - var importer = new TableInfoImporter(CatalogueRepository, tbl); - importer.DoImport(out var tableInfo, out var colInfos); + dt.EndLoadData(); + var tbl = db.CreateTable(dt.TableName, dt); - //lie about what hte primary key is because this component is designed to run in the RAW environment and we are simulating a LIVE TableInfo (correctly) - var pk = colInfos.Single(c => c.GetRuntimeName().Equals("pk")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out var tableInfo, out var colInfos); - var rowsBefore = tbl.GetRowCount(); + //lie about what hte primary key is because this component is designed to run in the RAW environment and we are simulating a LIVE TableInfo (correctly) + var pk = colInfos.Single(c => c.GetRuntimeName().Equals("pk")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); - var distincter = new Distincter(); - distincter.TableRegexPattern = new Regex(".*"); - distincter.Initialize(db, LoadStage.AdjustRaw); + var rowsBefore = tbl.GetRowCount(); - var job = Mock.Of(p => p.RegularTablesToLoad==new List(new[] { tableInfo }) && p.Configuration==new HICDatabaseConfiguration(db.Server,null,null,null)); + var distincter = new Distincter + { + TableRegexPattern = new Regex(".*") + }; + distincter.Initialize(db, LoadStage.AdjustRaw); - distincter.Mutilate(job); + var job = Substitute.For(); + job.RegularTablesToLoad.Returns(new List(new[] { tableInfo })); + job.Configuration.Returns(new HICDatabaseConfiguration(db.Server, null, null, null)); - var rowsAfter = tbl.GetRowCount(); + distincter.Mutilate(job); - Assert.AreEqual(rowsBefore, rowsAfter); + var rowsAfter = tbl.GetRowCount(); - db.Drop(); - } + Assert.AreEqual(rowsBefore, rowsAfter); + + db.Drop(); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExcelConversionTest.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExcelConversionTest.cs index 2c1a0491c9..30e90a939b 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExcelConversionTest.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExcelConversionTest.cs @@ -7,113 +7,125 @@ using System; using System.Collections.Generic; using System.IO; -using System.Linq; using System.Text.RegularExpressions; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.DataProvider.FlatFileManipulation; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +[Category("Unit")] +public class ExcelConversionTest { - [Category("Unit")] - public class ExcelConversionTest + private readonly Stack _dirsToCleanUp = new(); + private DirectoryInfo _parentDir; + + [OneTimeSetUp] + protected virtual void OneTimeSetUp() { - private readonly Stack _dirsToCleanUp = new Stack(); - private DirectoryInfo _parentDir; - - [OneTimeSetUp] - protected virtual void OneTimeSetUp() - { - var testDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); - _parentDir = testDir.CreateSubdirectory("ExcelConversionTest"); - _dirsToCleanUp.Push(_parentDir); - } - - private LoadDirectory CreateLoadDirectoryForTest(string directoryName) + var testDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); + _parentDir = testDir.CreateSubdirectory("ExcelConversionTest"); + _dirsToCleanUp.Push(_parentDir); + } + + [OneTimeTearDown] + protected virtual void OneTimeTearDown() + { + while (_dirsToCleanUp.Count > 0) { - var loadDirectory = LoadDirectory.CreateDirectoryStructure(_parentDir, directoryName,true); - _dirsToCleanUp.Push(loadDirectory.RootPath); - return loadDirectory; + var dir = _dirsToCleanUp.Pop(); + if (dir.Exists) + dir.Delete(true); } + } - [Test] - public void TestExcelFunctionality_OnSimpleXlsx() - { - var LoadDirectory = CreateLoadDirectoryForTest("TestExcelFunctionality_OnSimpleXlsx"); + private LoadDirectory CreateLoadDirectoryForTest(string directoryName) + { + var loadDirectory = LoadDirectory.CreateDirectoryStructure(_parentDir, directoryName, true); + _dirsToCleanUp.Push(loadDirectory.RootPath); + return loadDirectory; + } - //clean SetUp anything in the test project folders forloading directory - foreach (FileInfo fileInfo in LoadDirectory.ForLoading.GetFiles()) - fileInfo.Delete(); + [Test] + public void TestExcelFunctionality_OnSimpleXlsx() + { + var LoadDirectory = CreateLoadDirectoryForTest("TestExcelFunctionality_OnSimpleXlsx"); - string targetFile = Path.Combine(LoadDirectory.ForLoading.FullName, "Test.xlsx"); + //clean SetUp anything in the test project folders forloading directory + foreach (var fileInfo in LoadDirectory.ForLoading.GetFiles()) + fileInfo.Delete(); - FileInfo fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "DataLoad", "Engine", - "Resources", "Test.xlsx")); + var targetFile = Path.Combine(LoadDirectory.ForLoading.FullName, "Test.xlsx"); - FileAssert.Exists(fi); + var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "DataLoad", "Engine", + "Resources", "Test.xlsx")); - fi.CopyTo(targetFile, true); + FileAssert.Exists(fi); - TestConversionFor(targetFile, "*.xlsx", 5, LoadDirectory); - } + fi.CopyTo(targetFile, true); - [Test] - public void TestExcelFunctionality_DodgyFileExtension() - { - var LoadDirectory = CreateLoadDirectoryForTest("TestExcelFunctionality_DodgyFileExtension"); + TestConversionFor(targetFile, "*.xlsx", 5, LoadDirectory); + } - //clean SetUp anything in the test project folders forloading directory - foreach (FileInfo fileInfo in LoadDirectory.ForLoading.GetFiles()) - fileInfo.Delete(); + [Test] + public void TestExcelFunctionality_DodgyFileExtension() + { + var LoadDirectory = CreateLoadDirectoryForTest("TestExcelFunctionality_DodgyFileExtension"); - string targetFile = Path.Combine(LoadDirectory.ForLoading.FullName, "Test.xml"); - FileInfo fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "DataLoad", "Engine", - "Resources", "XmlTestForExcel.xml")); + //clean SetUp anything in the test project folders forloading directory + foreach (var fileInfo in LoadDirectory.ForLoading.GetFiles()) + fileInfo.Delete(); - FileAssert.Exists(fi); + var targetFile = Path.Combine(LoadDirectory.ForLoading.FullName, "Test.xml"); + var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "DataLoad", "Engine", + "Resources", "XmlTestForExcel.xml")); - fi.CopyTo(targetFile, true); + FileAssert.Exists(fi); - var ex = Assert.Throws(()=>TestConversionFor(targetFile, "*.fish", 1, LoadDirectory)); + fi.CopyTo(targetFile, true); - Assert.IsTrue(ex.Message.StartsWith("Did not find any files matching Pattern '*.fish' in directory")); - } - - private void TestConversionFor(string targetFile, string fileExtensionToConvert, int expectedNumberOfSheets, LoadDirectory directory) - { - FileInfo f = new FileInfo(targetFile); + var ex = Assert.Throws(() => TestConversionFor(targetFile, "*.fish", 1, LoadDirectory)); - try - { - Assert.IsTrue(f.Exists); - Assert.IsTrue(f.Length > 100); + Assert.IsTrue(ex.Message.StartsWith("Did not find any files matching Pattern '*.fish' in directory")); + } + + private static void TestConversionFor(string targetFile, string fileExtensionToConvert, int expectedNumberOfSheets, + LoadDirectory directory) + { + var f = new FileInfo(targetFile); - ExcelToCSVFilesConverter converter = new ExcelToCSVFilesConverter(); + try + { + Assert.IsTrue(f.Exists); + Assert.IsTrue(f.Length > 100); - var job = new ThrowImmediatelyDataLoadJob(new ThrowImmediatelyDataLoadEventListener(){ThrowOnWarning = true, WriteToConsole = true}); - job.LoadDirectory = directory; + var converter = new ExcelToCSVFilesConverter(); - converter.ExcelFilePattern = fileExtensionToConvert; - converter.Fetch(job, new GracefulCancellationToken()); + var job = new ThrowImmediatelyDataLoadJob(ThrowImmediatelyDataLoadEventListener.QuietPicky) + { + LoadDirectory = directory + }; - FileInfo[] filesCreated = directory.ForLoading.GetFiles("*.csv"); + converter.ExcelFilePattern = fileExtensionToConvert; + converter.Fetch(job, new GracefulCancellationToken()); - Assert.AreEqual(expectedNumberOfSheets,filesCreated.Length); + var filesCreated = directory.ForLoading.GetFiles("*.csv"); - foreach (FileInfo fileCreated in filesCreated) - { - Assert.IsTrue(Regex.IsMatch(fileCreated.Name, "Sheet[0-9].csv")); - Assert.GreaterOrEqual(fileCreated.Length, 100); - fileCreated.Delete(); - } - } - finally + Assert.AreEqual(expectedNumberOfSheets, filesCreated.Length); + + foreach (var fileCreated in filesCreated) { - f.Delete(); + Assert.IsTrue(Regex.IsMatch(fileCreated.Name, "Sheet[0-9].csv")); + Assert.GreaterOrEqual(fileCreated.Length, 100); + fileCreated.Delete(); } } + finally + { + f.Delete(); + } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExcelDatabaseTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExcelDatabaseTests.cs index 7cac836e72..1374b12b07 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExcelDatabaseTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExcelDatabaseTests.cs @@ -13,43 +13,43 @@ using System.Linq; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class ExcelDatabaseTests : DatabaseTests { - public class ExcelDatabaseTests : DatabaseTests + [Test] + public void TestLoadingFileWithTrailingDotsInHeader() { - [Test] - public void TestLoadingFileWithTrailingDotsInHeader() - { - var trailingDotsFile = Path.Combine(TestContext.CurrentContext.TestDirectory, "TrailingDots....xlsx"); - FileAssert.Exists(trailingDotsFile); - - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - - // Create the 'out of the box' RDMP pipelines (which includes an excel bulk importer pipeline) - var creator = new CataloguePipelinesAndReferencesCreation( - RepositoryLocator, UnitTestLoggingConnectionString, DataQualityEngineConnectionString); - creator.CreatePipelines(); - - // find the excel loading pipeline - var pipe = CatalogueRepository.GetAllObjects().OrderByDescending(p => p.ID).FirstOrDefault(p => p.Name.Contains("BULK INSERT: Excel File")); - - // run an import of the file using the pipeline - var cmd = new ExecuteCommandCreateNewCatalogueByImportingFile( - new ThrowImmediatelyActivator(RepositoryLocator), - new FileInfo(trailingDotsFile), - null, db, pipe, null); - - cmd.Execute(); - - var tbl = db.ExpectTable("TrailingDots"); - Assert.IsTrue(tbl.Exists()); - - var cols = tbl.DiscoverColumns(); - Assert.AreEqual(2, cols.Length); - Assert.AreEqual("Field1",cols[0].GetRuntimeName()); - Assert.AreEqual("Field2", cols[1].GetRuntimeName()); - - Assert.AreEqual(2, tbl.GetRowCount()); - } + var trailingDotsFile = Path.Combine(TestContext.CurrentContext.TestDirectory, "TrailingDots....xlsx"); + FileAssert.Exists(trailingDotsFile); + + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + + // Create the 'out of the box' RDMP pipelines (which includes an excel bulk importer pipeline) + var creator = new CataloguePipelinesAndReferencesCreation( + RepositoryLocator, UnitTestLoggingConnectionString, DataQualityEngineConnectionString); + creator.CreatePipelines(new PlatformDatabaseCreationOptions {}); + + // find the excel loading pipeline + var pipe = CatalogueRepository.GetAllObjects().OrderByDescending(p => p.ID) + .FirstOrDefault(p => p.Name.Contains("BULK INSERT: Excel File")); + + // run an import of the file using the pipeline + var cmd = new ExecuteCommandCreateNewCatalogueByImportingFile( + new ThrowImmediatelyActivator(RepositoryLocator), + new FileInfo(trailingDotsFile), + null, db, pipe, null); + + cmd.Execute(); + + var tbl = db.ExpectTable("TrailingDots"); + Assert.IsTrue(tbl.Exists()); + + var cols = tbl.DiscoverColumns(); + Assert.AreEqual(2, cols.Length); + Assert.AreEqual("Field1", cols[0].GetRuntimeName()); + Assert.AreEqual("Field2", cols[1].GetRuntimeName()); + + Assert.AreEqual(2, tbl.GetRowCount()); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExecutableProcessTaskTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExecutableProcessTaskTests.cs index 73cbf2fd69..e402a338fb 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExecutableProcessTaskTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExecutableProcessTaskTests.cs @@ -11,48 +11,48 @@ using Rdmp.Core.DataLoad.Engine.LoadExecution.Components.Runtime; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +internal class ExecutableProcessTaskTests : DatabaseTests { - class ExecutableProcessTaskTests : DatabaseTests + [Test] + public void TestConstructionFromProcessTaskUsingDatabase() { - [Test] - public void TestConstructionFromProcessTaskUsingDatabase() + const string expectedPath = @"\\a\fake\path.exe"; + + var loadMetadata = new LoadMetadata(CatalogueRepository); + var processTask = new ProcessTask(CatalogueRepository, loadMetadata, LoadStage.Mounting) + { + Name = "Test process task", + Path = expectedPath + }; + processTask.SaveToDatabase(); + + var argument = new ProcessTaskArgument(CatalogueRepository, processTask) + { + Name = "DatabaseName", + Value = @"Foo_STAGING" + }; + argument.SaveToDatabase(); + + try + { + var args = + new RuntimeArgumentCollection(processTask.ProcessTaskArguments.Cast().ToArray(), null); + + var runtimeTask = new ExecutableRuntimeTask(processTask, args); + Assert.AreEqual(expectedPath, runtimeTask.ExeFilepath); + + Assert.AreEqual(1, runtimeTask.RuntimeArguments.GetAllArgumentsOfType().Count()); + + var dictionaryOfStringArguments = runtimeTask.RuntimeArguments.GetAllArgumentsOfType() + .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + Assert.IsNotNull(dictionaryOfStringArguments["DatabaseName"]); + Assert.AreEqual("Foo_STAGING", dictionaryOfStringArguments["DatabaseName"]); + } + finally { - const string expectedPath = @"\\a\fake\path.exe"; - - var loadMetadata = new LoadMetadata(CatalogueRepository); - var processTask = new ProcessTask(CatalogueRepository, loadMetadata, LoadStage.Mounting) - { - Name = "Test process task", - Path = expectedPath - }; - processTask.SaveToDatabase(); - - var argument = new ProcessTaskArgument(CatalogueRepository, processTask) - { - Name = "DatabaseName", - Value = @"Foo_STAGING" - }; - argument.SaveToDatabase(); - - try - { - var args = - new RuntimeArgumentCollection(processTask.ProcessTaskArguments.Cast().ToArray(), null); - - var runtimeTask = new ExecutableRuntimeTask(processTask, args); - Assert.AreEqual(expectedPath, runtimeTask.ExeFilepath); - - Assert.AreEqual(1, runtimeTask.RuntimeArguments.GetAllArgumentsOfType().Count()); - - var dictionaryOfStringArguments = runtimeTask.RuntimeArguments.GetAllArgumentsOfType().ToDictionary(kvp => kvp.Key, kvp => kvp.Value); - Assert.IsNotNull(dictionaryOfStringArguments["DatabaseName"]); - Assert.AreEqual("Foo_STAGING", dictionaryOfStringArguments["DatabaseName"]); - } - finally - { - loadMetadata.DeleteInDatabase(); - } + loadMetadata.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExecuteSqlFileRuntimeTaskTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExecuteSqlFileRuntimeTaskTests.cs index 37d40aea1e..d0e76eaa40 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExecuteSqlFileRuntimeTaskTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ExecuteSqlFileRuntimeTaskTests.cs @@ -9,7 +9,7 @@ using System.Data; using System.IO; using FAnsi; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data; @@ -20,190 +20,199 @@ using Rdmp.Core.DataLoad.Engine.LoadExecution.Components.Arguments; using Rdmp.Core.DataLoad.Engine.LoadExecution.Components.Runtime; using Rdmp.Core.DataLoad.Modules.Mutilators; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +internal class ExecuteSqlFileRuntimeTaskTests : DatabaseTests { - class ExecuteSqlFileRuntimeTaskTests:DatabaseTests + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void ExecuteSqlFileRuntimeTask_BasicScript(DatabaseType dbType) { - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void ExecuteSqlFileRuntimeTask_BasicScript(DatabaseType dbType) - { - var dt = new DataTable(); - dt.Columns.Add("Lawl"); - dt.Rows.Add(new object []{2}); + var dt = new DataTable(); + dt.Columns.Add("Lawl"); + dt.Rows.Add(new object[] { 2 }); - var db = GetCleanedServer(dbType); + var db = GetCleanedServer(dbType); - var tbl = db.CreateTable("Fish",dt); - - FileInfo f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"Bob.sql")); + var tbl = db.CreateTable("Fish", dt); - File.WriteAllText(f.FullName,@"UPDATE Fish Set Lawl = 1"); + var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Bob.sql")); - var pt = Mock.Of(x => x.Path==f.FullName); + File.WriteAllText(f.FullName, @"UPDATE Fish Set Lawl = 1"); - var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory),"ExecuteSqlFileRuntimeTaskTests", true); + var pt = Substitute.For(); + pt.Path.Returns(f.FullName); - var task = new ExecuteSqlFileRuntimeTask(pt, new RuntimeArgumentCollection(new IArgument[0], new StageArgs(LoadStage.AdjustRaw, db, dir))); + var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), + "ExecuteSqlFileRuntimeTaskTests", true); - task.Check(new ThrowImmediatelyCheckNotifier()); + var task = new ExecuteSqlFileRuntimeTask(pt, + new RuntimeArgumentCollection(Array.Empty(), new StageArgs(LoadStage.AdjustRaw, db, dir))); - IDataLoadJob job = Mock.Of(); + task.Check(ThrowImmediatelyCheckNotifier.Quiet); - task.Run(job, new GracefulCancellationToken()); + var job = Substitute.For(); - Assert.AreEqual(1,tbl.GetDataTable().Rows[0][0]); + task.Run(job, new GracefulCancellationToken()); - tbl.Drop(); - } + Assert.AreEqual(1, tbl.GetDataTable().Rows[0][0]); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void ExecuteSqlFileRuntimeTask_InvalidID(DatabaseType dbType) - { - var dt = new DataTable(); - dt.Columns.Add("Lawl"); - dt.Rows.Add(new object[] { 2 }); + tbl.Drop(); + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void ExecuteSqlFileRuntimeTask_InvalidID(DatabaseType dbType) + { + var dt = new DataTable(); + dt.Columns.Add("Lawl"); + dt.Rows.Add(new object[] { 2 }); - var db = GetCleanedServer(dbType); + var db = GetCleanedServer(dbType); - var tbl = db.CreateTable("Fish", dt); + var tbl = db.CreateTable("Fish", dt); - Import(tbl,out var ti,out var cols); + Import(tbl, out var ti, out var cols); - FileInfo f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Bob.sql")); - - File.WriteAllText(f.FullName, @"UPDATE {T:0} Set {C:0} = 1"); + var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Bob.sql")); - var pt = Mock.Of(x => x.Path==f.FullName); + File.WriteAllText(f.FullName, @"UPDATE {T:0} Set {C:0} = 1"); - var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory),"ExecuteSqlFileRuntimeTaskTests", true); + var pt = Substitute.For(); + pt.Path.Returns(f.FullName); + var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), + "ExecuteSqlFileRuntimeTaskTests", true); - var task = new ExecuteSqlFileRuntimeTask(pt, new RuntimeArgumentCollection(new IArgument[0], new StageArgs(LoadStage.AdjustRaw, db, dir))); + var task = new ExecuteSqlFileRuntimeTask(pt, + new RuntimeArgumentCollection(Array.Empty(), new StageArgs(LoadStage.AdjustRaw, db, dir))); - task.Check(new ThrowImmediatelyCheckNotifier()); - HICDatabaseConfiguration configuration = new HICDatabaseConfiguration(db.Server); + task.Check(ThrowImmediatelyCheckNotifier.Quiet); + var configuration = new HICDatabaseConfiguration(db.Server); - IDataLoadJob job = Mock.Of(x => - x.RegularTablesToLoad == new List {ti} && - x.LookupTablesToLoad == new List() && - x.Configuration == configuration); - - var ex = Assert.Throws(()=>task.Run(job, new GracefulCancellationToken())); - StringAssert.Contains("Failed to find a TableInfo in the load with ID 0",ex.Message); + var job = Substitute.For(); + job.RegularTablesToLoad.Returns(new List { ti }); + job.LookupTablesToLoad.Returns(new List()); + job.Configuration.Returns(configuration); - task.LoadCompletedSoDispose(Core.DataLoad.ExitCodeType.Success,new ThrowImmediatelyDataLoadEventListener()); - } + var ex = Assert.Throws(() => + task.Run(job, new GracefulCancellationToken())); + StringAssert.Contains("Failed to find a TableInfo in the load with ID 0", ex.Message); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void ExecuteSqlRuntimeTask_InvalidID(DatabaseType dbType) - { - var dt = new DataTable(); - dt.Columns.Add("Lawl"); - dt.Rows.Add(new object[] { 2 }); + task.LoadCompletedSoDispose(Core.DataLoad.ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void ExecuteSqlRuntimeTask_InvalidID(DatabaseType dbType) + { + var dt = new DataTable(); + dt.Columns.Add("Lawl"); + dt.Rows.Add(new object[] { 2 }); - var db = GetCleanedServer(dbType); + var db = GetCleanedServer(dbType); - var tbl = db.CreateTable("Fish", dt); + var tbl = db.CreateTable("Fish", dt); - Import(tbl,out var ti,out var cols); + Import(tbl, out var ti, out var cols); - string sql = @"UPDATE {T:0} Set {C:0} = 1"; - - IRuntimeTask task; - IProcessTask pt; - - var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory),"ExecuteSqlFileRuntimeTaskTests", true); + var sql = @"UPDATE {T:0} Set {C:0} = 1"; + + var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), + "ExecuteSqlFileRuntimeTaskTests", true); #pragma warning disable CS0252, CS0253 // Spurious warning 'Possible unintended reference comparison; left hand side needs cast' since VS doesn't grok Moq fully - var sqlArg = new IArgument[]{Mock.Of(x => - x.Name == "Sql" && - x.Value == sql && - x.GetValueAsSystemType() == sql) }; + var _arg = Substitute.For(); + _arg.Name.Returns("Sql"); + _arg.Value.Returns(sql); + _arg.GetValueAsSystemType().Returns(sql); + var sqlArg = new IArgument[] + { + _arg + }; #pragma warning restore CS0252, CS0253 - var args = new RuntimeArgumentCollection(sqlArg, new StageArgs(LoadStage.AdjustRaw, db, dir)); + var args = new RuntimeArgumentCollection(sqlArg, new StageArgs(LoadStage.AdjustRaw, db, dir)); - pt = Mock.Of(x => - x.Path == typeof(ExecuteSqlMutilation).FullName && - x.GetAllArguments() == sqlArg - ); + var pt = Substitute.For(); + pt.Path.Returns(typeof(ExecuteSqlMutilation).FullName); + pt.GetAllArguments().Returns(sqlArg); - task = new MutilateDataTablesRuntimeTask(pt,args,CatalogueRepository.MEF); - - task.Check(new ThrowImmediatelyCheckNotifier()); - HICDatabaseConfiguration configuration = new HICDatabaseConfiguration(db.Server); + IRuntimeTask task = new MutilateDataTablesRuntimeTask(pt, args); - var job = new ThrowImmediatelyDataLoadJob(); - - job.RegularTablesToLoad = new List {ti}; - job.LookupTablesToLoad = new List(); - job.Configuration = configuration; - - var ex = Assert.Throws(()=>task.Run(job, new GracefulCancellationToken())); + task.Check(ThrowImmediatelyCheckNotifier.Quiet); + var configuration = new HICDatabaseConfiguration(db.Server); - StringAssert.Contains("Mutilate failed",ex.Message); - StringAssert.Contains("Failed to find a TableInfo in the load with ID 0",ex.InnerException.Message); + var job = new ThrowImmediatelyDataLoadJob + { + RegularTablesToLoad = new List { ti }, + LookupTablesToLoad = new List(), + Configuration = configuration + }; - task.LoadCompletedSoDispose(Core.DataLoad.ExitCodeType.Success,new ThrowImmediatelyDataLoadEventListener()); - } + var ex = Assert.Throws(() => task.Run(job, new GracefulCancellationToken())); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void ExecuteSqlFileRuntimeTask_ValidID_CustomNamer(DatabaseType dbType) - { - var dt = new DataTable(); - dt.Columns.Add("Lawl"); - dt.Rows.Add(new object[] { 2 }); + StringAssert.Contains("Mutilate failed", ex.Message); + StringAssert.Contains("Failed to find a TableInfo in the load with ID 0", ex.InnerException.Message); + + task.LoadCompletedSoDispose(Core.DataLoad.ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void ExecuteSqlFileRuntimeTask_ValidID_CustomNamer(DatabaseType dbType) + { + var dt = new DataTable(); + dt.Columns.Add("Lawl"); + dt.Rows.Add(new object[] { 2 }); + + var db = GetCleanedServer(dbType); - var db = GetCleanedServer(dbType); + var tbl = db.CreateTable("Fish", dt); - var tbl = db.CreateTable("Fish", dt); + var tableName = "AAAAAAA"; - var tableName = "AAAAAAA"; + Import(tbl, out var ti, out var cols); - Import(tbl, out var ti, out var cols); - - FileInfo f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Bob.sql")); + var f = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Bob.sql")); - File.WriteAllText(f.FullName, @"UPDATE {T:"+ti.ID+ "} Set {C:"+cols[0].ID+ "} = 1"); + File.WriteAllText(f.FullName, $@"UPDATE {{T:{ti.ID}}} Set {{C:{cols[0].ID}}} = 1"); - tbl.Rename(tableName); + tbl.Rename(tableName); - //we renamed the table to simulate RAW, confirm TableInfo doesn't think it exists - Assert.IsFalse(ti.Discover(DataAccessContext.InternalDataProcessing).Exists()); + //we renamed the table to simulate RAW, confirm TableInfo doesn't think it exists + Assert.IsFalse(ti.Discover(DataAccessContext.InternalDataProcessing).Exists()); - var pt = Mock.Of(x => x.Path==f.FullName); + var pt = Substitute.For(); + pt.Path.Returns(f.FullName); + var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), + "ExecuteSqlFileRuntimeTaskTests", true); - var dir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory),"ExecuteSqlFileRuntimeTaskTests", true); + var task = new ExecuteSqlFileRuntimeTask(pt, + new RuntimeArgumentCollection(Array.Empty(), new StageArgs(LoadStage.AdjustRaw, db, dir))); - var task = new ExecuteSqlFileRuntimeTask(pt, new RuntimeArgumentCollection(new IArgument[0], new StageArgs(LoadStage.AdjustRaw, db, dir))); + task.Check(ThrowImmediatelyCheckNotifier.Quiet); - task.Check(new ThrowImmediatelyCheckNotifier()); - - //create a namer that tells the user - var namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, tableName); - HICDatabaseConfiguration configuration = new HICDatabaseConfiguration(db.Server,namer); + //create a namer that tells the user + var namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, tableName); + var configuration = new HICDatabaseConfiguration(db.Server, namer); - IDataLoadJob job = Mock.Of(x => - x.RegularTablesToLoad == new List { ti }&& - x.LookupTablesToLoad == new List() && - x.Configuration == configuration); + var job = Substitute.For(); + job.RegularTablesToLoad.Returns(new List { ti }); + job.LookupTablesToLoad.Returns(new List()); + job.Configuration.Returns(configuration); - task.Run(job, new GracefulCancellationToken()); + task.Run(job, new GracefulCancellationToken()); - Assert.AreEqual(1, tbl.GetDataTable().Rows[0][0]); + Assert.AreEqual(1, tbl.GetDataTable().Rows[0][0]); - tbl.Drop(); - } + tbl.Drop(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/FixedWidthTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/FixedWidthTests.cs index d30028c97e..da8d39e156 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/FixedWidthTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/FixedWidthTests.cs @@ -5,7 +5,6 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Data; using System.IO; using System.Linq; using System.Text.RegularExpressions; @@ -17,212 +16,216 @@ using Rdmp.Core.DataLoad.Modules.Exceptions; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class FixedWidthTests : DatabaseTests { - public class FixedWidthTests :DatabaseTests + private static FixedWidthFormatFile CreateFormatFile() { - private FixedWidthFormatFile CreateFormatFile() - { - FileInfo fileInfo = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,@"FixedWidthFormat.csv")); + var fileInfo = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, @"FixedWidthFormat.csv")); - File.WriteAllText(fileInfo.FullName, LoadDirectory.ExampleFixedWidthFormatFileContents); - - Assert.IsTrue(fileInfo.Exists); + File.WriteAllText(fileInfo.FullName, LoadDirectory.ExampleFixedWidthFormatFileContents); - return new FixedWidthFormatFile(fileInfo); - } - - [Test] - public void TestLoadingFormat() - { - FixedWidthFormatFile formatFile = CreateFormatFile(); - - Assert.AreEqual(8,formatFile.FormatColumns.Length); - - Assert.AreEqual("gmc",formatFile.FormatColumns[0].Field); - Assert.AreEqual(1, formatFile.FormatColumns[0].From); - Assert.AreEqual(7, formatFile.FormatColumns[0].To); - Assert.AreEqual(1+ formatFile.FormatColumns[0].To - formatFile.FormatColumns[0].From, formatFile.FormatColumns[0].Size); - Assert.AreEqual(7, formatFile.FormatColumns[0].Size); - - Assert.AreEqual("gp_code", formatFile.FormatColumns[1].Field); - Assert.AreEqual(8, formatFile.FormatColumns[1].From); - Assert.AreEqual(12, formatFile.FormatColumns[1].To); - Assert.AreEqual(1 + formatFile.FormatColumns[1].To - formatFile.FormatColumns[1].From, formatFile.FormatColumns[1].Size); - Assert.AreEqual(5, formatFile.FormatColumns[1].Size); - - Assert.AreEqual("surname", formatFile.FormatColumns[2].Field); - Assert.AreEqual(13, formatFile.FormatColumns[2].From); - Assert.AreEqual(32, formatFile.FormatColumns[2].To); - Assert.AreEqual(1 + formatFile.FormatColumns[2].To - formatFile.FormatColumns[2].From, formatFile.FormatColumns[2].Size); - Assert.AreEqual(20, formatFile.FormatColumns[2].Size); - - Assert.AreEqual("forename", formatFile.FormatColumns[3].Field); - Assert.AreEqual(33, formatFile.FormatColumns[3].From); - Assert.AreEqual(52, formatFile.FormatColumns[3].To); - Assert.AreEqual(1 + formatFile.FormatColumns[3].To - formatFile.FormatColumns[3].From, formatFile.FormatColumns[3].Size); - Assert.AreEqual(20, formatFile.FormatColumns[3].Size); - - Assert.AreEqual("initials", formatFile.FormatColumns[4].Field); - Assert.AreEqual(53, formatFile.FormatColumns[4].From); - Assert.AreEqual(55, formatFile.FormatColumns[4].To); - Assert.AreEqual(1 + formatFile.FormatColumns[4].To - formatFile.FormatColumns[4].From, formatFile.FormatColumns[4].Size); - Assert.AreEqual(3, formatFile.FormatColumns[4].Size); - - Assert.AreEqual("practice_code", formatFile.FormatColumns[5].Field); - Assert.AreEqual(56, formatFile.FormatColumns[5].From); - Assert.AreEqual(60, formatFile.FormatColumns[5].To); - Assert.AreEqual(1 + formatFile.FormatColumns[5].To - formatFile.FormatColumns[5].From, formatFile.FormatColumns[5].Size); - Assert.AreEqual(5, formatFile.FormatColumns[5].Size); - - Assert.AreEqual("date_into_practice", formatFile.FormatColumns[6].Field); - Assert.AreEqual(61, formatFile.FormatColumns[6].From); - Assert.AreEqual(68, formatFile.FormatColumns[6].To); - Assert.AreEqual(1 + formatFile.FormatColumns[6].To - formatFile.FormatColumns[6].From, formatFile.FormatColumns[6].Size); - Assert.AreEqual(8, formatFile.FormatColumns[6].Size); - Assert.AreEqual("yyyyMMdd", formatFile.FormatColumns[6].DateFormat); - - Assert.AreEqual("date_out_of_practice", formatFile.FormatColumns[7].Field); - Assert.AreEqual(69, formatFile.FormatColumns[7].From); - Assert.AreEqual(76, formatFile.FormatColumns[7].To); - Assert.AreEqual(1 + formatFile.FormatColumns[7].To - formatFile.FormatColumns[7].From, formatFile.FormatColumns[7].Size); - Assert.AreEqual(8, formatFile.FormatColumns[7].Size); - Assert.AreEqual("yyyyMMdd", formatFile.FormatColumns[7].DateFormat); - } + Assert.IsTrue(fileInfo.Exists); - [Test] - public void TestLoadingFormatThenFile() - { - FixedWidthFormatFile formatFile = CreateFormatFile(); + return new FixedWidthFormatFile(fileInfo); + } - string tempFileToCreate = Path.Combine(TestContext.CurrentContext.TestDirectory,"unitTestFixedWidthFile.txt"); + [Test] + public void TestLoadingFormat() + { + var formatFile = CreateFormatFile(); + + Assert.AreEqual(8, formatFile.FormatColumns.Length); + + Assert.AreEqual("gmc", formatFile.FormatColumns[0].Field); + Assert.AreEqual(1, formatFile.FormatColumns[0].From); + Assert.AreEqual(7, formatFile.FormatColumns[0].To); + Assert.AreEqual(1 + formatFile.FormatColumns[0].To - formatFile.FormatColumns[0].From, + formatFile.FormatColumns[0].Size); + Assert.AreEqual(7, formatFile.FormatColumns[0].Size); + + Assert.AreEqual("gp_code", formatFile.FormatColumns[1].Field); + Assert.AreEqual(8, formatFile.FormatColumns[1].From); + Assert.AreEqual(12, formatFile.FormatColumns[1].To); + Assert.AreEqual(1 + formatFile.FormatColumns[1].To - formatFile.FormatColumns[1].From, + formatFile.FormatColumns[1].Size); + Assert.AreEqual(5, formatFile.FormatColumns[1].Size); + + Assert.AreEqual("surname", formatFile.FormatColumns[2].Field); + Assert.AreEqual(13, formatFile.FormatColumns[2].From); + Assert.AreEqual(32, formatFile.FormatColumns[2].To); + Assert.AreEqual(1 + formatFile.FormatColumns[2].To - formatFile.FormatColumns[2].From, + formatFile.FormatColumns[2].Size); + Assert.AreEqual(20, formatFile.FormatColumns[2].Size); + + Assert.AreEqual("forename", formatFile.FormatColumns[3].Field); + Assert.AreEqual(33, formatFile.FormatColumns[3].From); + Assert.AreEqual(52, formatFile.FormatColumns[3].To); + Assert.AreEqual(1 + formatFile.FormatColumns[3].To - formatFile.FormatColumns[3].From, + formatFile.FormatColumns[3].Size); + Assert.AreEqual(20, formatFile.FormatColumns[3].Size); + + Assert.AreEqual("initials", formatFile.FormatColumns[4].Field); + Assert.AreEqual(53, formatFile.FormatColumns[4].From); + Assert.AreEqual(55, formatFile.FormatColumns[4].To); + Assert.AreEqual(1 + formatFile.FormatColumns[4].To - formatFile.FormatColumns[4].From, + formatFile.FormatColumns[4].Size); + Assert.AreEqual(3, formatFile.FormatColumns[4].Size); + + Assert.AreEqual("practice_code", formatFile.FormatColumns[5].Field); + Assert.AreEqual(56, formatFile.FormatColumns[5].From); + Assert.AreEqual(60, formatFile.FormatColumns[5].To); + Assert.AreEqual(1 + formatFile.FormatColumns[5].To - formatFile.FormatColumns[5].From, + formatFile.FormatColumns[5].Size); + Assert.AreEqual(5, formatFile.FormatColumns[5].Size); + + Assert.AreEqual("date_into_practice", formatFile.FormatColumns[6].Field); + Assert.AreEqual(61, formatFile.FormatColumns[6].From); + Assert.AreEqual(68, formatFile.FormatColumns[6].To); + Assert.AreEqual(1 + formatFile.FormatColumns[6].To - formatFile.FormatColumns[6].From, + formatFile.FormatColumns[6].Size); + Assert.AreEqual(8, formatFile.FormatColumns[6].Size); + Assert.AreEqual("yyyyMMdd", formatFile.FormatColumns[6].DateFormat); + + Assert.AreEqual("date_out_of_practice", formatFile.FormatColumns[7].Field); + Assert.AreEqual(69, formatFile.FormatColumns[7].From); + Assert.AreEqual(76, formatFile.FormatColumns[7].To); + Assert.AreEqual(1 + formatFile.FormatColumns[7].To - formatFile.FormatColumns[7].From, + formatFile.FormatColumns[7].Size); + Assert.AreEqual(8, formatFile.FormatColumns[7].Size); + Assert.AreEqual("yyyyMMdd", formatFile.FormatColumns[7].DateFormat); + } - StreamWriter streamWriter = File.CreateText(tempFileToCreate); - try - { - streamWriter.WriteLine("002644099999Akerman Frank FM 380512004040120090501"); - streamWriter.WriteLine("002705600000SHAW LENA LC 852251978100119941031"); - streamWriter.Flush(); - streamWriter.Close(); - - DataTable dataTable = formatFile.GetDataTableFromFlatFile(new FileInfo(tempFileToCreate)); - Assert.AreEqual(dataTable.Rows.Count,2); - Assert.AreEqual("0026440", dataTable.Rows[0]["gmc"]); - Assert.AreEqual("99999", dataTable.Rows[0]["gp_code"]); - Assert.AreEqual("Akerman", dataTable.Rows[0]["surname"]); - Assert.AreEqual("Frank", dataTable.Rows[0]["forename"]); - Assert.AreEqual("FM", dataTable.Rows[0]["initials"]); - Assert.AreEqual("38051", dataTable.Rows[0]["practice_code"]); - Assert.AreEqual(new DateTime(2004, 4, 1), dataTable.Rows[0]["date_into_practice"]); - Assert.AreEqual(new DateTime(2009,5,1), dataTable.Rows[0]["date_out_of_practice"]); - + [Test] + public void TestLoadingFormatThenFile() + { + var formatFile = CreateFormatFile(); - } - finally - { - File.Delete(tempFileToCreate); - } - } + var tempFileToCreate = Path.Combine(TestContext.CurrentContext.TestDirectory, "unitTestFixedWidthFile.txt"); - public enum FixedWidthTestCase + var streamWriter = File.CreateText(tempFileToCreate); + try { - CompatibleHeaders, - MisnamedHeaders, - InsufficientLengthOfCharactersInFileToLoad + streamWriter.WriteLine("002644099999Akerman Frank FM 380512004040120090501"); + streamWriter.WriteLine("002705600000SHAW LENA LC 852251978100119941031"); + streamWriter.Flush(); + streamWriter.Close(); + + var dataTable = formatFile.GetDataTableFromFlatFile(new FileInfo(tempFileToCreate)); + Assert.AreEqual(dataTable.Rows.Count, 2); + Assert.AreEqual("0026440", dataTable.Rows[0]["gmc"]); + Assert.AreEqual("99999", dataTable.Rows[0]["gp_code"]); + Assert.AreEqual("Akerman", dataTable.Rows[0]["surname"]); + Assert.AreEqual("Frank", dataTable.Rows[0]["forename"]); + Assert.AreEqual("FM", dataTable.Rows[0]["initials"]); + Assert.AreEqual("38051", dataTable.Rows[0]["practice_code"]); + Assert.AreEqual(new DateTime(2004, 4, 1), dataTable.Rows[0]["date_into_practice"]); + Assert.AreEqual(new DateTime(2009, 5, 1), dataTable.Rows[0]["date_out_of_practice"]); } - - [Test] - [TestCase(FixedWidthTestCase.CompatibleHeaders)] - [TestCase(FixedWidthTestCase.MisnamedHeaders)] - [TestCase(FixedWidthTestCase.InsufficientLengthOfCharactersInFileToLoad)] - public void TestHeaderMatching(FixedWidthTestCase testCase) + finally { - //Create the format file - string flatFileColumn = "myNumber"; + File.Delete(tempFileToCreate); + } + } - if (testCase == FixedWidthTestCase.MisnamedHeaders) - flatFileColumn = "chickenDippers"; + public enum FixedWidthTestCase + { + CompatibleHeaders, + MisnamedHeaders, + InsufficientLengthOfCharactersInFileToLoad + } + + [Test] + [TestCase(FixedWidthTestCase.CompatibleHeaders)] + [TestCase(FixedWidthTestCase.MisnamedHeaders)] + [TestCase(FixedWidthTestCase.InsufficientLengthOfCharactersInFileToLoad)] + public void TestHeaderMatching(FixedWidthTestCase testCase) + { + //Create the format file + var flatFileColumn = "myNumber"; - FileInfo formatFile = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"Format.csv")); + if (testCase == FixedWidthTestCase.MisnamedHeaders) + flatFileColumn = "chickenDippers"; - File.WriteAllText(formatFile.FullName, @"From,To,Field,Size,DateFormat -1,5," + flatFileColumn + ",5"); + var formatFile = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "Format.csv")); + File.WriteAllText(formatFile.FullName, $@"From,To,Field,Size,DateFormat +1,5,{flatFileColumn},5"); - //Create the working directory that will be processed - var workingDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); - var parentDir = workingDir.CreateSubdirectory("FixedWidthTests"); - DirectoryInfo toCleanup = parentDir.GetDirectories().SingleOrDefault(d => d.Name.Equals("TestHeaderMatching")); - if (toCleanup != null) - toCleanup.Delete(true); + //Create the working directory that will be processed + var workingDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); + var parentDir = workingDir.CreateSubdirectory("FixedWidthTests"); - var loadDirectory = LoadDirectory.CreateDirectoryStructure(parentDir, "TestHeaderMatching"); + var toCleanup = parentDir.GetDirectories().SingleOrDefault(d => d.Name.Equals("TestHeaderMatching")); + toCleanup?.Delete(true); + var loadDirectory = LoadDirectory.CreateDirectoryStructure(parentDir, "TestHeaderMatching"); - //create the file we will be trying to load - if(testCase == FixedWidthTestCase.InsufficientLengthOfCharactersInFileToLoad) - File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "file.txt"), @"12345 -12"); - else - File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName , "file.txt"),@"12345 + + //create the file we will be trying to load + File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "file.txt"), + testCase == FixedWidthTestCase.InsufficientLengthOfCharactersInFileToLoad + ? @"12345 +12" + : @"12345 67890"); - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var attacher = new FixedWidthAttacher(); - attacher.Initialize(loadDirectory, db); - attacher.PathToFormatFile = formatFile; - attacher.TableName = "TestHeaderMatching_Compatible"; - attacher.FilePattern = "*.txt"; + var attacher = new FixedWidthAttacher(); + attacher.Initialize(loadDirectory, db); + attacher.PathToFormatFile = formatFile; + attacher.TableName = "TestHeaderMatching_Compatible"; + attacher.FilePattern = "*.txt"; - using (var con = db.Server.GetConnection()) - { - con.Open(); - db.Server.GetCommand("CREATE TABLE TestHeaderMatching_Compatible( myNumber int)", con).ExecuteNonQuery(); - } + using (var con = db.Server.GetConnection()) + { + con.Open(); + db.Server.GetCommand("CREATE TABLE TestHeaderMatching_Compatible( myNumber int)", con).ExecuteNonQuery(); + } - var table = db.ExpectTable("TestHeaderMatching_Compatible"); - - Assert.IsTrue(table.Exists()); - Assert.AreEqual(0, table.GetRowCount()); - try - { - Regex errorRegex; - Exception ex; - - switch (testCase) - { - //Success Case - case FixedWidthTestCase.CompatibleHeaders: - attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - Assert.AreEqual(2, table.GetRowCount()); - return;//Return - - - //Error cases, set the expected error result - case FixedWidthTestCase.MisnamedHeaders: - errorRegex = new Regex( - @"Format file \(.*Format.csv\) indicated there would be a header called 'chickenDippers' but the column did not appear in the RAW database table \(Columns in RAW were myNumber\)"); - ex = Assert.Throws(() => attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); - break; - case FixedWidthTestCase.InsufficientLengthOfCharactersInFileToLoad: - errorRegex = new Regex( - @"Error on line 2 of file file.txt, the format file \(.*Format.csv\) specified that a column myNumber would be found between character positions 1 and 5 but the current line is only 2 characters long"); - ex = Assert.Throws(() => attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); - break; - default: - throw new ArgumentOutOfRangeException("testCase"); - } - - //Assert the expected error result is the real one - Assert.IsTrue(errorRegex.IsMatch(ex.Message)); - + var table = db.ExpectTable("TestHeaderMatching_Compatible"); - } - finally + Assert.IsTrue(table.Exists()); + Assert.AreEqual(0, table.GetRowCount()); + try + { + Regex errorRegex; + Exception ex; + + switch (testCase) { - table.Drop(); + //Success Case + case FixedWidthTestCase.CompatibleHeaders: + attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + Assert.AreEqual(2, table.GetRowCount()); + return; //Return + + + //Error cases, set the expected error result + case FixedWidthTestCase.MisnamedHeaders: + errorRegex = new Regex( + @"Format file \(.*Format.csv\) indicated there would be a header called 'chickenDippers' but the column did not appear in the RAW database table \(Columns in RAW were myNumber\)"); + ex = Assert.Throws(() => + attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); + break; + case FixedWidthTestCase.InsufficientLengthOfCharactersInFileToLoad: + errorRegex = new Regex( + @"Error on line 2 of file file.txt, the format file \(.*Format.csv\) specified that a column myNumber would be found between character positions 1 and 5 but the current line is only 2 characters long"); + ex = Assert.Throws(() => + attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); + break; + default: + throw new ArgumentOutOfRangeException(nameof(testCase)); } + + //Assert the expected error result is the real one + Assert.IsTrue(errorRegex.IsMatch(ex.Message)); + } + finally + { + table.Drop(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/FlatFileAttacherTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/FlatFileAttacherTests.cs index db533b4461..df66271a0a 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/FlatFileAttacherTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/FlatFileAttacherTests.cs @@ -21,472 +21,464 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.Attachers; using Rdmp.Core.DataLoad.Modules.Exceptions; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; using TypeGuesser; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class FlatFileAttacherTests : DatabaseTests { - public class FlatFileAttacherTests : DatabaseTests + private LoadDirectory _loadDirectory; + private DirectoryInfo _parentDir; + private DiscoveredDatabase _database; + private DiscoveredTable _table; + + [SetUp] + protected override void SetUp() { - private LoadDirectory LoadDirectory; - DirectoryInfo parentDir; - private DiscoveredDatabase _database; - private DiscoveredTable _table; + base.SetUp(); + + var workingDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); + _parentDir = workingDir.CreateSubdirectory("FlatFileAttacherTests"); + + var toCleanup = _parentDir.GetDirectories().SingleOrDefault(d => d.Name.Equals("Test_CSV_Attachment")); + toCleanup?.Delete(true); + + _loadDirectory = LoadDirectory.CreateDirectoryStructure(_parentDir, "Test_CSV_Attachment"); - [SetUp] - protected override void SetUp() + // create a separate builder for setting an initial catalogue on (need to figure out how best to stop child classes changing ServerICan... as this then causes TearDown to fail) + _database = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + + using (var con = _database.Server.GetConnection()) { - base.SetUp(); - - var workingDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); - parentDir = workingDir.CreateSubdirectory("FlatFileAttacherTests"); - - DirectoryInfo toCleanup = parentDir.GetDirectories().SingleOrDefault(d => d.Name.Equals("Test_CSV_Attachment")); - if(toCleanup != null) - toCleanup.Delete(true); - - LoadDirectory = LoadDirectory.CreateDirectoryStructure(parentDir, "Test_CSV_Attachment"); - - // create a separate builder for setting an initial catalog on (need to figure out how best to stop child classes changing ServerICan... as this then causes TearDown to fail) - _database = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - - using (var con = _database.Server.GetConnection()) - { - con.Open(); - - var cmdCreateTable = _database.Server.GetCommand("CREATE Table "+_database.GetRuntimeName()+"..Bob([name] [varchar](500),[name2] [varchar](500))" ,con); - cmdCreateTable.ExecuteNonQuery(); - } - - _table = _database.ExpectTable("Bob"); + con.Open(); + var cmdCreateTable = _database.Server.GetCommand( + $"CREATE Table {_database.GetRuntimeName()}..Bob([name] [varchar](500),[name2] [varchar](500))", con); + cmdCreateTable.ExecuteNonQuery(); } - [Test] - [TestCase(",",false)] - [TestCase("|",false)]//wrong separator - [TestCase(",",true)] - public void Test_CSV_Attachment(string separator, bool overrideHeaders) + _table = _database.ExpectTable("Bob"); + } + + [Test] + [TestCase(",", false)] + [TestCase("|", false)] //wrong separator + [TestCase(",", true)] + public void Test_CSV_Attachment(string separator, bool overrideHeaders) + { + var filename = Path.Combine(_loadDirectory.ForLoading.FullName, "bob.csv"); + var sw = new StreamWriter(filename); + + sw.WriteLine("name,name2"); + sw.WriteLine("Bob,Munchousain"); + sw.WriteLine("Franky,Hollyw9ood"); + + sw.Flush(); + sw.Close(); + sw.Dispose(); + + + var filename2 = Path.Combine(_loadDirectory.ForLoading.FullName, "bob2.csv"); + var sw2 = new StreamWriter(filename2); + + sw2.WriteLine("name,name2"); + sw2.WriteLine("Manny2,Ok"); + + sw2.Flush(); + sw2.Close(); + sw2.Dispose(); + + var attacher = new AnySeparatorFileAttacher(); + attacher.Initialize(_loadDirectory, _database); + attacher.Separator = separator; + attacher.FilePattern = "bob*"; + attacher.TableName = "Bob"; + + if (overrideHeaders) { - - string filename = Path.Combine(LoadDirectory.ForLoading.FullName, "bob.csv"); - var sw = new StreamWriter(filename); - - sw.WriteLine("name,name2"); - sw.WriteLine("Bob,Munchousain"); - sw.WriteLine("Franky,Hollyw9ood"); - - sw.Flush(); - sw.Close(); - sw.Dispose(); - - - string filename2 = Path.Combine(LoadDirectory.ForLoading.FullName, "bob2.csv"); - var sw2 = new StreamWriter(filename2); - - sw2.WriteLine("name,name2"); - sw2.WriteLine("Manny2,Ok"); - - sw2.Flush(); - sw2.Close(); - sw2.Dispose(); - - var attacher = new AnySeparatorFileAttacher(); - attacher.Initialize(LoadDirectory, _database); - attacher.Separator = separator; - attacher.FilePattern = "bob*"; - attacher.TableName = "Bob"; - - if (overrideHeaders) - { - attacher.ForceHeaders = "name,name2"; - attacher.ForceHeadersReplacesFirstLineInFile = true; - } - - //Case when you are using the wrong separator - if(separator == "|") - { - - var ex = Assert.Throws(()=>attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); - - Assert.IsNotNull(ex.InnerException); - StringAssert.StartsWith("Your separator does not appear in the headers line of your file (bob.csv) but the separator ',' does", ex.InnerException.Message); - return; - } - - //other cases (i.e. correct separator) - attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - - var table = _database.ExpectTable("Bob"); - Assert.IsTrue(table.Exists()); - - table.DiscoverColumn("name"); - table.DiscoverColumn("name2"); - - using (var con = _database.Server.GetConnection()) - { - - con.Open(); - var r = _database.Server.GetCommand("Select * from Bob", con).ExecuteReader(); - Assert.IsTrue(r.Read()); - Assert.AreEqual("Bob",r["name"]); - Assert.AreEqual("Munchousain", r["name2"]); - - Assert.IsTrue(r.Read()); - Assert.AreEqual("Franky", r["name"]); - Assert.AreEqual("Hollyw9ood", r["name2"]); - - Assert.IsTrue(r.Read()); - Assert.AreEqual("Manny2", r["name"]); - Assert.AreEqual("Ok", r["name2"]); - } - - attacher.LoadCompletedSoDispose(ExitCodeType.Success,new ThrowImmediatelyDataLoadEventListener()); - - File.Delete(filename); + attacher.ForceHeaders = "name,name2"; + attacher.ForceHeadersReplacesFirstLineInFile = true; } - - - [Test] - public void Test_ExplicitDateTimeFormat_Attachment() + //Case when you are using the wrong separator + if (separator == "|") { - string filename = Path.Combine(LoadDirectory.ForLoading.FullName, "bob.csv"); - var sw = new StreamWriter(filename); - - sw.WriteLine("name,name2"); - sw.WriteLine("Bob,20011301"); - sw.WriteLine("Franky,20021301"); - - sw.Flush(); - sw.Close(); - sw.Dispose(); - - var attacher = new AnySeparatorFileAttacher(); - attacher.Initialize(LoadDirectory, _database); - attacher.Separator = ","; - attacher.FilePattern = "bob*"; - attacher.TableName = "Bob"; - attacher.ExplicitDateTimeFormat = "yyyyddMM"; - - - var table = _database.ExpectTable("Bob"); - table.Truncate(); - - Assert.IsTrue(table.Exists()); - table.DiscoverColumn("name"); - var name2 = table.DiscoverColumn("name2"); - name2.DataType.AlterTypeTo("datetime2"); - - //other cases (i.e. correct separator) - attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - - using (var con = _database.Server.GetConnection()) - { - - con.Open(); - var r = _database.Server.GetCommand("Select * from Bob", con).ExecuteReader(); - Assert.IsTrue(r.Read()); - Assert.AreEqual("Bob",r["name"]); - Assert.AreEqual(new DateTime(2001,01,13), r["name2"]); - - Assert.IsTrue(r.Read()); - Assert.AreEqual("Franky", r["name"]); - Assert.AreEqual(new DateTime(2002,01,13), r["name2"]); - } - - attacher.LoadCompletedSoDispose(ExitCodeType.Success,new ThrowImmediatelyDataLoadEventListener()); - - File.Delete(filename); + var ex = Assert.Throws(() => + attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); + + Assert.IsNotNull(ex.InnerException); + StringAssert.StartsWith( + "Your separator does not appear in the headers line of your file (bob.csv) but the separator ',' does", + ex.InnerException.Message); + return; } - [Test] - public void TabTestWithOverrideHeaders() + //other cases (i.e. correct separator) + attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + + var table = _database.ExpectTable("Bob"); + Assert.IsTrue(table.Exists()); + + table.DiscoverColumn("name"); + table.DiscoverColumn("name2"); + + using (var con = _database.Server.GetConnection()) { - string filename = Path.Combine(LoadDirectory.ForLoading.FullName, "bob.csv"); - var sw = new StreamWriter(filename); + con.Open(); + var r = _database.Server.GetCommand("Select * from Bob", con).ExecuteReader(); + Assert.IsTrue(r.Read()); + Assert.AreEqual("Bob", r["name"]); + Assert.AreEqual("Munchousain", r["name2"]); + + Assert.IsTrue(r.Read()); + Assert.AreEqual("Franky", r["name"]); + Assert.AreEqual("Hollyw9ood", r["name2"]); + + Assert.IsTrue(r.Read()); + Assert.AreEqual("Manny2", r["name"]); + Assert.AreEqual("Ok", r["name2"]); + } - sw.WriteLine("Face\tBasher"); - sw.WriteLine("Candy\tCrusher"); + attacher.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); - sw.Flush(); - sw.Close(); - sw.Dispose(); + File.Delete(filename); + } - var attacher = new AnySeparatorFileAttacher(); - attacher.Initialize(LoadDirectory, _database); - attacher.Separator = "\\t"; - attacher.FilePattern = "bob*"; - attacher.TableName = "Bob"; - attacher.ForceHeaders = "name\tname2"; - var exitCode = attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - Assert.AreEqual(ExitCodeType.Success,exitCode); + [Test] + public void Test_ExplicitDateTimeFormat_Attachment() + { + var filename = Path.Combine(_loadDirectory.ForLoading.FullName, "bob.csv"); + var sw = new StreamWriter(filename); - using (var con = _database.Server.GetConnection()) - { + sw.WriteLine("name,name2"); + sw.WriteLine("Bob,20011301"); + sw.WriteLine("Franky,20021301"); - con.Open(); - var r = _database.Server.GetCommand("Select name,name2 from Bob", con).ExecuteReader(); - Assert.IsTrue(r.Read()); - Assert.AreEqual("Face", r["name"]); - Assert.AreEqual("Basher", r["name2"]); + sw.Flush(); + sw.Close(); + sw.Dispose(); - Assert.IsTrue(r.Read()); - Assert.AreEqual("Candy", r["name"]); - Assert.AreEqual("Crusher", r["name2"]); - } + var attacher = new AnySeparatorFileAttacher(); + attacher.Initialize(_loadDirectory, _database); + attacher.Separator = ","; + attacher.FilePattern = "bob*"; + attacher.TableName = "Bob"; + attacher.ExplicitDateTimeFormat = "yyyyddMM"; - attacher.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadEventListener()); - File.Delete(filename); + var table = _database.ExpectTable("Bob"); + table.Truncate(); + Assert.IsTrue(table.Exists()); + table.DiscoverColumn("name"); + var name2 = table.DiscoverColumn("name2"); + name2.DataType.AlterTypeTo("datetime2"); - } + //other cases (i.e. correct separator) + attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - [TestCase(true)] - [TestCase(false)] - public void TabTestWithOverrideHeaders_IncludePath(bool columnExistsInRaw) + using (var con = _database.Server.GetConnection()) { - string filename = Path.Combine(LoadDirectory.ForLoading.FullName, "bob.csv"); - var sw = new StreamWriter(filename); + con.Open(); + var r = _database.Server.GetCommand("Select * from Bob", con).ExecuteReader(); + Assert.IsTrue(r.Read()); + Assert.AreEqual("Bob", r["name"]); + Assert.AreEqual(new DateTime(2001, 01, 13), r["name2"]); + + Assert.IsTrue(r.Read()); + Assert.AreEqual("Franky", r["name"]); + Assert.AreEqual(new DateTime(2002, 01, 13), r["name2"]); + } + + attacher.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); - sw.WriteLine("Face\tBasher"); - sw.WriteLine("Candy\tCrusher"); + File.Delete(filename); + } - sw.Flush(); - sw.Close(); - sw.Dispose(); + [Test] + public void TabTestWithOverrideHeaders() + { + var filename = Path.Combine(_loadDirectory.ForLoading.FullName, "bob.csv"); + var sw = new StreamWriter(filename); - if (columnExistsInRaw) - _table.AddColumn("FilePath",new DatabaseTypeRequest(typeof(string),500),true,30); + sw.WriteLine("Face\tBasher"); + sw.WriteLine("Candy\tCrusher"); - var attacher = new AnySeparatorFileAttacher(); - attacher.Initialize(LoadDirectory, _database); - attacher.Separator = "\\t"; - attacher.FilePattern = "bob*"; - attacher.TableName = "Bob"; - attacher.ForceHeaders = "name\tname2"; - attacher.AddFilenameColumnNamed = "FilePath"; + sw.Flush(); + sw.Close(); + sw.Dispose(); - if (!columnExistsInRaw) - { - var ex = Assert.Throws(()=>attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); - Assert.AreEqual("AddFilenameColumnNamed is set to 'FilePath' but the column did not exist in RAW",ex.InnerException.Message); - return; - } + var attacher = new AnySeparatorFileAttacher(); + attacher.Initialize(_loadDirectory, _database); + attacher.Separator = "\\t"; + attacher.FilePattern = "bob*"; + attacher.TableName = "Bob"; + attacher.ForceHeaders = "name\tname2"; + var exitCode = attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + Assert.AreEqual(ExitCodeType.Success, exitCode); - var exitCode = attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - Assert.AreEqual(ExitCodeType.Success, exitCode); + using (var con = _database.Server.GetConnection()) + { + con.Open(); + var r = _database.Server.GetCommand("Select name,name2 from Bob", con).ExecuteReader(); + Assert.IsTrue(r.Read()); + Assert.AreEqual("Face", r["name"]); + Assert.AreEqual("Basher", r["name2"]); + + Assert.IsTrue(r.Read()); + Assert.AreEqual("Candy", r["name"]); + Assert.AreEqual("Crusher", r["name2"]); + } - using (var con = _database.Server.GetConnection()) - { + attacher.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); - con.Open(); - var r = _database.Server.GetCommand("Select name,name2,FilePath from Bob", con).ExecuteReader(); - Assert.IsTrue(r.Read()); - Assert.AreEqual("Face", r["name"]); - Assert.AreEqual("Basher", r["name2"]); - Assert.AreEqual(filename, r["FilePath"]); + File.Delete(filename); + } - Assert.IsTrue(r.Read()); - Assert.AreEqual("Candy", r["name"]); - Assert.AreEqual("Crusher", r["name2"]); - } + [TestCase(true)] + [TestCase(false)] + public void TabTestWithOverrideHeaders_IncludePath(bool columnExistsInRaw) + { + var filename = Path.Combine(_loadDirectory.ForLoading.FullName, "bob.csv"); + var sw = new StreamWriter(filename); - attacher.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadEventListener()); + sw.WriteLine("Face\tBasher"); + sw.WriteLine("Candy\tCrusher"); - File.Delete(filename); + sw.Flush(); + sw.Close(); + sw.Dispose(); + if (columnExistsInRaw) + _table.AddColumn("FilePath", new DatabaseTypeRequest(typeof(string), 500), true, 30); + var attacher = new AnySeparatorFileAttacher(); + attacher.Initialize(_loadDirectory, _database); + attacher.Separator = "\\t"; + attacher.FilePattern = "bob*"; + attacher.TableName = "Bob"; + attacher.ForceHeaders = "name\tname2"; + attacher.AddFilenameColumnNamed = "FilePath"; + + if (!columnExistsInRaw) + { + var ex = Assert.Throws(() => + attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); + Assert.AreEqual("AddFilenameColumnNamed is set to 'FilePath' but the column did not exist in RAW", + ex.InnerException.Message); + return; } - [TestCase(true)] - [TestCase(false)] - public void TestTableInfo(bool usenamer) + var exitCode = attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + Assert.AreEqual(ExitCodeType.Success, exitCode); + + using (var con = _database.Server.GetConnection()) { - string filename = Path.Combine(LoadDirectory.ForLoading.FullName, "bob.csv"); - var sw = new StreamWriter(filename); + con.Open(); + var r = _database.Server.GetCommand("Select name,name2,FilePath from Bob", con).ExecuteReader(); + Assert.IsTrue(r.Read()); + Assert.AreEqual("Face", r["name"]); + Assert.AreEqual("Basher", r["name2"]); + Assert.AreEqual(filename, r["FilePath"]); + + Assert.IsTrue(r.Read()); + Assert.AreEqual("Candy", r["name"]); + Assert.AreEqual("Crusher", r["name2"]); + } - sw.WriteLine("name,name2"); - sw.WriteLine("Bob,Munchousain"); - sw.WriteLine("Franky,Hollyw9ood"); + attacher.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); - sw.Flush(); - sw.Close(); - sw.Dispose(); + File.Delete(filename); + } - Import(_table, out var ti, out var cols); - var attacher = new AnySeparatorFileAttacher(); - attacher.Initialize(LoadDirectory, _database); - attacher.Separator = ","; - attacher.FilePattern = "bob*"; - attacher.TableToLoad = ti; + [TestCase(true)] + [TestCase(false)] + public void TestTableInfo(bool usenamer) + { + var filename = Path.Combine(_loadDirectory.ForLoading.FullName, "bob.csv"); + var sw = new StreamWriter(filename); - INameDatabasesAndTablesDuringLoads namer = null; + sw.WriteLine("name,name2"); + sw.WriteLine("Bob,Munchousain"); + sw.WriteLine("Franky,Hollyw9ood"); - if (usenamer) - { - _table.Rename("AAA"); - namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(_database, "AAA"); - } + sw.Flush(); + sw.Close(); + sw.Dispose(); - var job = new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(_database.Server, namer), ti); + Import(_table, out var ti, out _); - var exitCode = attacher.Attach(job, new GracefulCancellationToken()); - Assert.AreEqual(ExitCodeType.Success, exitCode); - - using (var con = _database.Server.GetConnection()) - { + var attacher = new AnySeparatorFileAttacher(); + attacher.Initialize(_loadDirectory, _database); + attacher.Separator = ","; + attacher.FilePattern = "bob*"; + attacher.TableToLoad = ti; - con.Open(); - var r = _database.Server.GetCommand("Select name,name2 from " + _table.GetRuntimeName(), con).ExecuteReader(); - Assert.IsTrue(r.Read()); - Assert.AreEqual("Bob", r["name"]); - Assert.AreEqual("Munchousain", r["name2"]); + INameDatabasesAndTablesDuringLoads namer = null; - Assert.IsTrue(r.Read()); - Assert.AreEqual("Franky", r["name"]); - Assert.AreEqual("Hollyw9ood", r["name2"]); - } + if (usenamer) + { + _table.Rename("AAA"); + namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(_database, "AAA"); + } - attacher.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadEventListener()); + var job = new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(_database.Server, namer), ti); - File.Delete(filename); + var exitCode = attacher.Attach(job, new GracefulCancellationToken()); + Assert.AreEqual(ExitCodeType.Success, exitCode); + using (var con = _database.Server.GetConnection()) + { + con.Open(); + var r = _database.Server.GetCommand($"Select name,name2 from {_table.GetRuntimeName()}", con) + .ExecuteReader(); + Assert.IsTrue(r.Read()); + Assert.AreEqual("Bob", r["name"]); + Assert.AreEqual("Munchousain", r["name2"]); + + Assert.IsTrue(r.Read()); + Assert.AreEqual("Franky", r["name"]); + Assert.AreEqual("Hollyw9ood", r["name2"]); } + attacher.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); + + File.Delete(filename); + } + + + [Test] + public void Test_FlatFileAttacher_IgnoreColumns() + { + var filename = Path.Combine(_loadDirectory.ForLoading.FullName, "bob.csv"); + var sw = new StreamWriter(filename); + + sw.WriteLine("name,name2,address"); + sw.WriteLine("Bob,Munchousain,\"67, franklin\""); + sw.WriteLine("Franky,Hollyw9ood,32 dodgery"); + + sw.Flush(); + sw.Close(); + sw.Dispose(); + Import(_table, out var ti, out _); - [Test] - public void Test_FlatFileAttcher_IgnoreColumns() + var attacher = new AnySeparatorFileAttacher { - string filename = Path.Combine(LoadDirectory.ForLoading.FullName, "bob.csv"); - var sw = new StreamWriter(filename); + Separator = ",", + FilePattern = "bob*", + TableToLoad = ti, + IgnoreColumns = "address" + }; + attacher.Initialize(_loadDirectory, _database); - sw.WriteLine("name,name2,address"); - sw.WriteLine("Bob,Munchousain,\"67, franklin\""); - sw.WriteLine("Franky,Hollyw9ood,32 dodgery"); + var job = new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(_database.Server, null), ti); - sw.Flush(); - sw.Close(); - sw.Dispose(); + var exitCode = attacher.Attach(job, new GracefulCancellationToken()); + Assert.AreEqual(ExitCodeType.Success, exitCode); - Import(_table, out var ti, out var cols); + using (var con = _database.Server.GetConnection()) + { + con.Open(); + var r = _database.Server.GetCommand($"Select name,name2 from {_table.GetRuntimeName()}", con) + .ExecuteReader(); + Assert.IsTrue(r.Read()); + Assert.AreEqual("Bob", r["name"]); + Assert.AreEqual("Munchousain", r["name2"]); + + Assert.IsTrue(r.Read()); + Assert.AreEqual("Franky", r["name"]); + Assert.AreEqual("Hollyw9ood", r["name2"]); + } - var attacher = new AnySeparatorFileAttacher(); - attacher.Initialize(LoadDirectory, _database); - attacher.Separator = ","; - attacher.FilePattern = "bob*"; - attacher.TableToLoad = ti; - attacher.IgnoreColumns = "address"; - - var job = new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(_database.Server, null), ti); + attacher.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); + + File.Delete(filename); + } - var exitCode = attacher.Attach(job, new GracefulCancellationToken()); - Assert.AreEqual(ExitCodeType.Success, exitCode); + [TestCase(DatabaseType.MySql, "27/01/2001", "en-GB", "en-GB")] + [TestCase(DatabaseType.MySql, "27/01/2001", "en-GB", "en-us")] + [TestCase(DatabaseType.MySql, "01/27/2001", "en-us", "en-us")] + [TestCase(DatabaseType.MySql, "01/27/2001", "en-us", "en-GB")] + [TestCase(DatabaseType.MicrosoftSQLServer, "27/01/2001", "en-GB", "en-GB")] + [TestCase(DatabaseType.MicrosoftSQLServer, "27/01/2001", "en-GB", "en-us")] + [TestCase(DatabaseType.MicrosoftSQLServer, "01/27/2001", "en-us", "en-us")] + [TestCase(DatabaseType.MicrosoftSQLServer, "01/27/2001", "en-us", "en-GB")] + [TestCase(DatabaseType.Oracle, "27/01/2001", "en-GB", "en-GB")] + [TestCase(DatabaseType.Oracle, "27/01/2001", "en-GB", "en-us")] + [TestCase(DatabaseType.Oracle, "01/27/2001", "en-us", "en-us")] + [TestCase(DatabaseType.Oracle, "01/27/2001", "en-us", "en-GB")] + public void Test_FlatFileAttacher_AmbiguousDates(DatabaseType type, string val, string attacherCulture, + string threadCulture) + { + Thread.CurrentThread.CurrentCulture = new CultureInfo(threadCulture); - using (var con = _database.Server.GetConnection()) - { + var filename = Path.Combine(_loadDirectory.ForLoading.FullName, "bob.csv"); + var sw = new StreamWriter(filename); - con.Open(); - var r = _database.Server.GetCommand("Select name,name2 from " + _table.GetRuntimeName(), con).ExecuteReader(); - Assert.IsTrue(r.Read()); - Assert.AreEqual("Bob", r["name"]); - Assert.AreEqual("Munchousain", r["name2"]); + sw.WriteLine("dob"); + sw.WriteLine(val); - Assert.IsTrue(r.Read()); - Assert.AreEqual("Franky", r["name"]); - Assert.AreEqual("Hollyw9ood", r["name2"]); - } + sw.Flush(); + sw.Close(); + sw.Dispose(); - attacher.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadEventListener()); + var db = GetCleanedServer(type); - File.Delete(filename); + var tbl = + db.CreateTable("AmbiguousDatesTestTable", + new[] { new DatabaseColumnRequest("dob", new DatabaseTypeRequest(typeof(DateTime))) } + ); - } - - [TestCase(DatabaseType.MySql,"27/01/2001","en-GB","en-GB")] - [TestCase(DatabaseType.MySql,"27/01/2001","en-GB","en-us")] - [TestCase(DatabaseType.MySql,"01/27/2001","en-us", "en-us")] - [TestCase(DatabaseType.MySql,"01/27/2001","en-us", "en-GB")] - - [TestCase(DatabaseType.MicrosoftSQLServer,"27/01/2001","en-GB","en-GB")] - [TestCase(DatabaseType.MicrosoftSQLServer,"27/01/2001","en-GB","en-us")] - [TestCase(DatabaseType.MicrosoftSQLServer,"01/27/2001","en-us","en-us")] - [TestCase(DatabaseType.MicrosoftSQLServer,"01/27/2001","en-us","en-GB")] - - [TestCase(DatabaseType.Oracle,"27/01/2001","en-GB","en-GB")] - [TestCase(DatabaseType.Oracle,"27/01/2001","en-GB","en-us")] - [TestCase(DatabaseType.Oracle,"01/27/2001","en-us","en-us")] - [TestCase(DatabaseType.Oracle,"01/27/2001","en-us","en-GB")] - - public void Test_FlatFileAttcher_AmbiguousDates(DatabaseType type,string val,string attacherCulture, string threadCulture) - { - Thread.CurrentThread.CurrentCulture = new CultureInfo(threadCulture); - - string filename = Path.Combine(LoadDirectory.ForLoading.FullName, "bob.csv"); - var sw = new StreamWriter(filename); - - sw.WriteLine("dob"); - sw.WriteLine(val); - - sw.Flush(); - sw.Close(); - sw.Dispose(); - - var db = GetCleanedServer(type); - - var tbl = - db.CreateTable("AmbiguousDatesTestTable", - new []{new DatabaseColumnRequest("dob",new DatabaseTypeRequest(typeof(DateTime)))} - ); - - - Import(tbl,out var ti,out _); - var attacher = new AnySeparatorFileAttacher(); - attacher.Separator = ","; - attacher.FilePattern = "bob*"; - attacher.TableName = tbl.GetRuntimeName(); - attacher.Culture = new CultureInfo(attacherCulture); - attacher.Initialize(LoadDirectory, db); - - var job = new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(_database.Server, null),ti); - - var exitCode = attacher.Attach(job, new GracefulCancellationToken()); - Assert.AreEqual(ExitCodeType.Success, exitCode); - - attacher.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadEventListener()); - - Assert.AreEqual(new DateTime(2001,1,27),tbl.GetDataTable().Rows[0][0]); - - File.Delete(filename); - tbl.Drop(); - } - [Test] - public void Test_TableToLoad_IDNotInLoadMetadata() + Import(tbl, out var ti, out _); + var attacher = new AnySeparatorFileAttacher { - var source = new AnySeparatorFileAttacher(); - - var tiInLoad = new TableInfo(CatalogueRepository,"TableInLoad"); - var tiNotInLoad = new TableInfo(CatalogueRepository, "TableNotInLoad"); + Separator = ",", + FilePattern = "bob*", + TableName = tbl.GetRuntimeName(), + Culture = new CultureInfo(attacherCulture) + }; + attacher.Initialize(_loadDirectory, db); - source.TableToLoad = tiNotInLoad; + var job = new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(_database.Server, null), ti); - var job = new ThrowImmediatelyDataLoadJob(new ThrowImmediatelyDataLoadEventListener(){ ThrowOnWarning = true}); - job.RegularTablesToLoad = new System.Collections.Generic.List(new []{tiInLoad }); + var exitCode = attacher.Attach(job, new GracefulCancellationToken()); + Assert.AreEqual(ExitCodeType.Success, exitCode); + attacher.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); - var ex = Assert.Throws(()=>source.Attach(job,new GracefulCancellationToken())); + Assert.AreEqual(new DateTime(2001, 1, 27), tbl.GetDataTable().Rows[0][0]); - StringAssert.IsMatch("FlatFileAttacher TableToLoad was 'TableNotInLoad' \\(ID=\\d+\\) but that table was not one of the tables in the load:'TableInLoad'", ex.Message); - } + File.Delete(filename); + tbl.Drop(); } -} - + [Test] + public void Test_TableToLoad_IDNotInLoadMetadata() + { + var source = new AnySeparatorFileAttacher(); + + var tiInLoad = new TableInfo(CatalogueRepository, "TableInLoad"); + var tiNotInLoad = new TableInfo(CatalogueRepository, "TableNotInLoad"); + + source.TableToLoad = tiNotInLoad; + + var job = new ThrowImmediatelyDataLoadJob(ThrowImmediatelyDataLoadEventListener.QuietPicky) + { + RegularTablesToLoad = new System.Collections.Generic.List(new[] { tiInLoad }) + }; + + var ex = Assert.Throws(() => source.Attach(job, new GracefulCancellationToken())); + + StringAssert.IsMatch( + "FlatFileAttacher TableToLoad was 'TableNotInLoad' \\(ID=\\d+\\) but that table was not one of the tables in the load:'TableInLoad'", + ex.Message); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/HICPipelineTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/HICPipelineTests.cs index 5fd54d6422..1693be3bd4 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/HICPipelineTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/HICPipelineTests.cs @@ -23,299 +23,306 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.DataFlowSources; using Rdmp.Core.Repositories; -using ReusableLibraryCode; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.DataAccess; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class HICPipelineTests : DatabaseTests { - public class HICPipelineTests : DatabaseTests + internal class CatalogueEntities : IDisposable { - internal class CatalogueEntities : IDisposable - { - public Catalogue Catalogue { get; private set; } - public LoadMetadata LoadMetadata { get; private set; } - public ColumnInfo ColumnInfo { get; private set; } - public TableInfo TableInfo { get; private set; } + public Catalogue Catalogue { get; private set; } + public LoadMetadata LoadMetadata { get; private set; } + public ColumnInfo ColumnInfo { get; private set; } + public TableInfo TableInfo { get; private set; } - public DataAccessCredentials Credentials { get; private set; } + public DataAccessCredentials Credentials { get; private set; } - public CatalogueEntities() - { - Catalogue = null; - LoadMetadata = null; - ColumnInfo = null; - TableInfo = null; - } + public CatalogueEntities() + { + Catalogue = null; + LoadMetadata = null; + ColumnInfo = null; + TableInfo = null; + } - public void Create(CatalogueRepository repository, DiscoveredDatabase database, - ILoadDirectory directory) + public void Create(CatalogueRepository repository, DiscoveredDatabase database, + ILoadDirectory directory) + { + TableInfo = new TableInfo(repository, "TestData") { - TableInfo = new TableInfo(repository, "TestData") - { - Server = database.Server.Name, - Database = database.GetRuntimeName() - }; - TableInfo.SaveToDatabase(); + Server = database.Server.Name, + Database = database.GetRuntimeName() + }; + TableInfo.SaveToDatabase(); - if (!string.IsNullOrWhiteSpace(database.Server.ExplicitUsernameIfAny)) - Credentials = new DataAccessCredentialsFactory(repository).Create(TableInfo, - database.Server.ExplicitUsernameIfAny, database.Server.ExplicitPasswordIfAny, - DataAccessContext.Any); + if (!string.IsNullOrWhiteSpace(database.Server.ExplicitUsernameIfAny)) + Credentials = new DataAccessCredentialsFactory(repository).Create(TableInfo, + database.Server.ExplicitUsernameIfAny, database.Server.ExplicitPasswordIfAny, + DataAccessContext.Any); - ColumnInfo = new ColumnInfo(repository, "Col1", "int", TableInfo) - { - IsPrimaryKey = true - }; - ColumnInfo.SaveToDatabase(); + ColumnInfo = new ColumnInfo(repository, "Col1", "int", TableInfo) + { + IsPrimaryKey = true + }; + ColumnInfo.SaveToDatabase(); - LoadMetadata = new LoadMetadata(repository, "HICLoadPipelineTests") - { - LocationOfFlatFiles = directory.RootPath.FullName - }; - LoadMetadata.SaveToDatabase(); + LoadMetadata = new LoadMetadata(repository, "HICLoadPipelineTests") + { + LocationOfFlatFiles = directory.RootPath.FullName + }; + LoadMetadata.SaveToDatabase(); - Catalogue = new Catalogue(repository, "HICLoadPipelineTests") - { - LoggingDataTask = "Test", - LoadMetadata_ID = LoadMetadata.ID - }; - Catalogue.SaveToDatabase(); + Catalogue = new Catalogue(repository, "HICLoadPipelineTests") + { + LoggingDataTask = "Test", + LoadMetadata_ID = LoadMetadata.ID + }; + Catalogue.SaveToDatabase(); - var catalogueItem = new CatalogueItem(repository, Catalogue, "Test"); - catalogueItem.SetColumnInfo(ColumnInfo); + var catalogueItem = new CatalogueItem(repository, Catalogue, "Test"); + catalogueItem.SetColumnInfo(ColumnInfo); - SetupLoadProcessTasks(repository); - } + SetupLoadProcessTasks(repository); + } - public void Dispose() - { - if (Catalogue != null) - Catalogue.DeleteInDatabase(); + public void Dispose() + { + Catalogue?.DeleteInDatabase(); - if (LoadMetadata != null) - LoadMetadata.DeleteInDatabase(); + LoadMetadata?.DeleteInDatabase(); - if (ColumnInfo != null) - ColumnInfo.DeleteInDatabase(); + ColumnInfo?.DeleteInDatabase(); - if (TableInfo != null) - TableInfo.DeleteInDatabase(); + TableInfo?.DeleteInDatabase(); - if (Credentials != null) - Credentials.DeleteInDatabase(); - } + Credentials?.DeleteInDatabase(); + } - private void SetupLoadProcessTasks(ICatalogueRepository catalogueRepository) + private void SetupLoadProcessTasks(ICatalogueRepository catalogueRepository) + { + var attacherTask = new ProcessTask(catalogueRepository, LoadMetadata, LoadStage.Mounting) + { + Name = "Attach CSV file", + Order = 1, + Path = "Rdmp.Core.DataLoad.Modules.Attachers.AnySeparatorFileAttacher", + ProcessTaskType = ProcessTaskType.Attacher + }; + attacherTask.SaveToDatabase(); + + // Not assigned to a variable as they will be magically available through the repository + var processTaskArgs = new List> + { + new("FilePattern", "1.csv", typeof(string)), + new("TableName", "TestData", typeof(string)), + new("ForceHeaders", null, typeof(string)), + new("IgnoreQuotes", null, typeof(bool)), + new("IgnoreBlankLines", null, typeof(bool)), + new("ForceHeadersReplacesFirstLineInFile", null, typeof(bool)), + new("SendLoadNotRequiredIfFileNotFound", "false", typeof(bool)), + new("Separator", ",", typeof(string)), + new("TableToLoad", null, typeof(TableInfo)), + new("BadDataHandlingStrategy", BadDataHandlingStrategy.ThrowException.ToString(), + typeof(BadDataHandlingStrategy)), + new("ThrowOnEmptyFiles", "true", typeof(bool)), + new("AttemptToResolveNewLinesInRecords", "true", typeof(bool)), + new("MaximumErrorsToReport", "0", typeof(int)), + new("IgnoreColumns", null, typeof(string)), + new("IgnoreBadReads", "false", typeof(bool)), + new("AddFilenameColumnNamed", null, typeof(string)) + }; + + + foreach (var tuple in processTaskArgs) { - var attacherTask = new ProcessTask(catalogueRepository, LoadMetadata, LoadStage.Mounting) + var pta = new ProcessTaskArgument(catalogueRepository, attacherTask) { - Name = "Attach CSV file", - Order = 1, - Path = "Rdmp.Core.DataLoad.Modules.Attachers.AnySeparatorFileAttacher", - ProcessTaskType = ProcessTaskType.Attacher + Name = tuple.Item1, + Value = tuple.Item2 }; - attacherTask.SaveToDatabase(); + pta.SetType(tuple.Item3); + pta.SaveToDatabase(); + } + } + } - // Not assigned to a variable as they will be magically available through the repository - var processTaskArgs = new List> - { - new Tuple("FilePattern", "1.csv", typeof (string)), - new Tuple("TableName", "TestData", typeof (string)), - new Tuple("ForceHeaders", null, typeof (string)), - new Tuple("IgnoreQuotes", null, typeof (bool)), - new Tuple("IgnoreBlankLines", null, typeof (bool)), - new Tuple("ForceHeadersReplacesFirstLineInFile", null, typeof (bool)), - new Tuple("SendLoadNotRequiredIfFileNotFound", "false", typeof (bool)), - new Tuple("Separator", ",", typeof (string)), - new Tuple("TableToLoad", null, typeof (TableInfo)), - new Tuple("BadDataHandlingStrategy", BadDataHandlingStrategy.ThrowException.ToString(), typeof (BadDataHandlingStrategy)), - new Tuple("ThrowOnEmptyFiles", "true", typeof (bool)), - new Tuple("AttemptToResolveNewLinesInRecords", "true", typeof (bool)), - new Tuple("MaximumErrorsToReport", "0", typeof (int)), - new Tuple("IgnoreColumns", null, typeof (string)), - new Tuple("IgnoreBadReads", "false", typeof (bool)), - new Tuple("AddFilenameColumnNamed", null, typeof (string)), + internal class DatabaseHelper : IDisposable + { + private DiscoveredServer _server; - }; - - foreach (var tuple in processTaskArgs) - { - var pta = new ProcessTaskArgument(catalogueRepository, attacherTask) - { - Name = tuple.Item1, - Value = tuple.Item2 - }; - pta.SetType(tuple.Item3); - pta.SaveToDatabase(); - } - } - } + public DiscoveredDatabase DatabaseToLoad { get; private set; } - internal class DatabaseHelper : IDisposable + public void SetUp(DiscoveredServer server) { - private DiscoveredServer _server; - + _server = server; - public DiscoveredDatabase DatabaseToLoad { get; private set; } - public void SetUp(DiscoveredServer server) - { - _server = server; + var databaseToLoadName = "HICPipelineTests"; - var databaseToLoadName = "HICPipelineTests"; - - // Create the databases - server.ExpectDatabase(databaseToLoadName).Create(true); - server.ChangeDatabase(databaseToLoadName); + // Create the databases + server.ExpectDatabase(databaseToLoadName).Create(true); + server.ChangeDatabase(databaseToLoadName); - // Create the dataset table - DatabaseToLoad = server.ExpectDatabase(databaseToLoadName); - using (var con = DatabaseToLoad.Server.GetConnection()) + // Create the dataset table + DatabaseToLoad = server.ExpectDatabase(databaseToLoadName); + using (var con = DatabaseToLoad.Server.GetConnection()) + { + con.Open(); + const string createDatasetTableQuery = + "CREATE TABLE TestData ([Col1] [int], [hic_dataLoadRunID] [int] NULL, [hic_validFrom] [datetime] NULL, CONSTRAINT [PK_TestData] PRIMARY KEY CLUSTERED ([Col1] ASC))"; + const string addValidFromDefault = + "ALTER TABLE TestData ADD CONSTRAINT [DF_TestData__hic_validFrom] DEFAULT (getdate()) FOR [hic_validFrom]"; + using (var cmd = DatabaseCommandHelper.GetCommand(createDatasetTableQuery, con)) { - con.Open(); - const string createDatasetTableQuery = - "CREATE TABLE TestData ([Col1] [int], [hic_dataLoadRunID] [int] NULL, [hic_validFrom] [datetime] NULL, CONSTRAINT [PK_TestData] PRIMARY KEY CLUSTERED ([Col1] ASC))"; - const string addValidFromDefault = - "ALTER TABLE TestData ADD CONSTRAINT [DF_TestData__hic_validFrom] DEFAULT (getdate()) FOR [hic_validFrom]"; - using(var cmd = DatabaseCommandHelper.GetCommand(createDatasetTableQuery, con)) - cmd.ExecuteNonQuery(); - - using(var cmd = DatabaseCommandHelper.GetCommand(addValidFromDefault, con)) - cmd.ExecuteNonQuery(); + cmd.ExecuteNonQuery(); } - // Ensure the dataset table has been created - var datasetTable = DatabaseToLoad.ExpectTable("TestData"); - Assert.IsTrue(datasetTable.Exists()); + using (var cmd = DatabaseCommandHelper.GetCommand(addValidFromDefault, con)) + { + cmd.ExecuteNonQuery(); + } } - public void Dispose() - { - if (DatabaseToLoad == null) - return; - - if (DatabaseToLoad.Exists()) - DatabaseToLoad.Drop(); - - // check if RAW has been created and remove it - var raw = _server.ExpectDatabase(DatabaseToLoad.GetRuntimeName() + "_RAW"); - if (raw.Exists()) - raw.Drop(); - } + // Ensure the dataset table has been created + var datasetTable = DatabaseToLoad.ExpectTable("TestData"); + Assert.IsTrue(datasetTable.Exists()); } - [Test] - [TestCase(false, false)] - [TestCase(true, false)] - [TestCase(true, true)] - public void TestSingleJob(bool overrideRAW, bool sendDodgyCredentials) + public void Dispose() { - if (sendDodgyCredentials && !overrideRAW) - throw new NotSupportedException("Cannot send dodgy credentials if you aren't overriding RAW"); + if (DatabaseToLoad == null) + return; - IServerDefaults defaults = CatalogueRepository; - var oldDefault = defaults.GetDefaultFor(PermissableDefaults.RAWDataLoadServer); + if (DatabaseToLoad.Exists()) + DatabaseToLoad.Drop(); - var testDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); - var testDir = Directory.CreateDirectory(testDirPath); - var server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn; - - var catalogueEntities = new CatalogueEntities(); - var databaseHelper = new DatabaseHelper(); - ExternalDatabaseServer external = null; + // check if RAW has been created and remove it + var raw = _server.ExpectDatabase($"{DatabaseToLoad.GetRuntimeName()}_RAW"); + if (raw.Exists()) + raw.Drop(); + } + } - try - { - // Set SetUp the dataset's project directory and add the CSV file to ForLoading - var loadDirectory = LoadDirectory.CreateDirectoryStructure(testDir, "TestDataset"); - File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "1.csv"), - "Col1\r\n1\r\n2\r\n3\r\n4"); + [Test] + [TestCase(false, false)] + [TestCase(true, false)] + [TestCase(true, true)] + public void TestSingleJob(bool overrideRAW, bool sendDodgyCredentials) + { + if (sendDodgyCredentials && !overrideRAW) + throw new NotSupportedException("Cannot send dodgy credentials if you aren't overriding RAW"); - databaseHelper.SetUp(server); + IServerDefaults defaults = CatalogueRepository; + var oldDefault = defaults.GetDefaultFor(PermissableDefaults.RAWDataLoadServer); - // Create the Catalogue entities for the dataset - catalogueEntities.Create(CatalogueTableRepository, databaseHelper.DatabaseToLoad, loadDirectory); - - if (overrideRAW) - { - external = new ExternalDatabaseServer(CatalogueRepository, "RAW Server",null); - external.SetProperties(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("master")); + var testDirPath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); + var testDir = Directory.CreateDirectory(testDirPath); + var server = DiscoveredServerICanCreateRandomDatabasesAndTablesOn; - if (sendDodgyCredentials) - { - external.Username = "IveGotaLovely"; - external.Password = "BunchOfCoconuts"; - } - external.SaveToDatabase(); + var catalogueEntities = new CatalogueEntities(); + var databaseHelper = new DatabaseHelper(); + ExternalDatabaseServer external = null; - defaults.SetDefault(PermissableDefaults.RAWDataLoadServer, external); - } + try + { + // Set SetUp the dataset's project directory and add the CSV file to ForLoading + var loadDirectory = LoadDirectory.CreateDirectoryStructure(testDir, "TestDataset"); + File.WriteAllText(Path.Combine(loadDirectory.ForLoading.FullName, "1.csv"), + "Col1\r\n1\r\n2\r\n3\r\n4"); - var options = new DleOptions(); - options.LoadMetadata = catalogueEntities.LoadMetadata.ID.ToString(); - options.Command = CommandLineActivity.check; + databaseHelper.SetUp(server); - //run checks (with ignore errors if we are sending dodgy credentials) - new RunnerFactory().CreateRunner(new ThrowImmediatelyActivator(RepositoryLocator),options).Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), - sendDodgyCredentials? - (ICheckNotifier) new IgnoreAllErrorsCheckNotifier(): new AcceptAllCheckNotifier(), new GracefulCancellationToken()); + // Create the Catalogue entities for the dataset + catalogueEntities.Create(CatalogueTableRepository, databaseHelper.DatabaseToLoad, loadDirectory); - //run load - options.Command = CommandLineActivity.run; - var runner = new RunnerFactory().CreateRunner(new ThrowImmediatelyActivator(RepositoryLocator),options); + if (overrideRAW) + { + external = new ExternalDatabaseServer(CatalogueRepository, "RAW Server", null); + external.SetProperties(DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase("master")); - if (sendDodgyCredentials) { - var ex = Assert.Throws(()=>runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new AcceptAllCheckNotifier(), new GracefulCancellationToken())); - Assert.IsTrue(ex.InnerException.Message.Contains("Login failed for user 'IveGotaLovely'"),"Error message did not contain expected text"); - return; + external.Username = "IveGotaLovely"; + external.Password = "BunchOfCoconuts"; } - else - runner.Run(RepositoryLocator, new ThrowImmediatelyDataLoadEventListener(), new AcceptAllCheckNotifier(), new GracefulCancellationToken()); + external.SaveToDatabase(); - var archiveFile = loadDirectory.ForArchiving.EnumerateFiles("*.zip").OrderByDescending(f=>f.FullName).FirstOrDefault(); - Assert.NotNull(archiveFile,"Archive file has not been created by the load."); - Assert.IsFalse(loadDirectory.ForLoading.EnumerateFileSystemInfos().Any()); - + defaults.SetDefault(PermissableDefaults.RAWDataLoadServer, external); } - finally + + var options = new DleOptions { - //reset the original RAW server - defaults.SetDefault(PermissableDefaults.RAWDataLoadServer, oldDefault); + LoadMetadata = catalogueEntities.LoadMetadata.ID.ToString(), + Command = CommandLineActivity.check + }; - if (external != null) - external.DeleteInDatabase(); + //run checks (with ignore errors if we are sending dodgy credentials) + RunnerFactory.CreateRunner(new ThrowImmediatelyActivator(RepositoryLocator), options).Run(RepositoryLocator, + ThrowImmediatelyDataLoadEventListener.Quiet, + sendDodgyCredentials + ? (ICheckNotifier)IgnoreAllErrorsCheckNotifier.Instance + : new AcceptAllCheckNotifier(), new GracefulCancellationToken()); - testDir.Delete(true); + //run load + options.Command = CommandLineActivity.run; + var runner = RunnerFactory.CreateRunner(new ThrowImmediatelyActivator(RepositoryLocator), options); - databaseHelper.Dispose(); - catalogueEntities.Dispose(); + + if (sendDodgyCredentials) + { + var ex = Assert.Throws(() => runner.Run(RepositoryLocator, + ThrowImmediatelyDataLoadEventListener.Quiet, new AcceptAllCheckNotifier(), + new GracefulCancellationToken())); + Assert.IsTrue(ex.InnerException.Message.Contains("Login failed for user 'IveGotaLovely'"), + "Error message did not contain expected text"); + return; } + else + { + runner.Run(RepositoryLocator, ThrowImmediatelyDataLoadEventListener.Quiet, new AcceptAllCheckNotifier(), + new GracefulCancellationToken()); + } + + + var archiveFile = loadDirectory.ForArchiving.EnumerateFiles("*.zip").MaxBy(f => f.FullName); + Assert.NotNull(archiveFile, "Archive file has not been created by the load."); + Assert.IsFalse(loadDirectory.ForLoading.EnumerateFileSystemInfos().Any()); + } + finally + { + //reset the original RAW server + defaults.SetDefault(PermissableDefaults.RAWDataLoadServer, oldDefault); + + external?.DeleteInDatabase(); + + testDir.Delete(true); + + databaseHelper.Dispose(); + catalogueEntities.Dispose(); } } +} - public class TestCacheFileRetriever : CachedFileRetriever +public class TestCacheFileRetriever : CachedFileRetriever +{ + public override void Initialize(ILoadDirectory directory, DiscoveredDatabase dbInfo) { - public override void Initialize(ILoadDirectory directory, DiscoveredDatabase dbInfo) - { - - } + } - public override ExitCodeType Fetch(IDataLoadJob dataLoadJob, GracefulCancellationToken cancellationToken) - { - var LoadDirectory = dataLoadJob.LoadDirectory; - var fileToMove = LoadDirectory.Cache.EnumerateFiles("*.csv").FirstOrDefault(); - if (fileToMove == null) - return ExitCodeType.OperationNotRequired; + public override ExitCodeType Fetch(IDataLoadJob dataLoadJob, GracefulCancellationToken cancellationToken) + { + var LoadDirectory = dataLoadJob.LoadDirectory; + var fileToMove = LoadDirectory.Cache.EnumerateFiles("*.csv").FirstOrDefault(); + if (fileToMove == null) + return ExitCodeType.OperationNotRequired; - File.Move(fileToMove.FullName, Path.Combine(LoadDirectory.ForLoading.FullName, "1.csv")); - return ExitCodeType.Success; - } + File.Move(fileToMove.FullName, Path.Combine(LoadDirectory.ForLoading.FullName, "1.csv")); + return ExitCodeType.Success; } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/HousekeepingTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/HousekeepingTests.cs index 9fc53760d4..17fc0322df 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/HousekeepingTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/HousekeepingTests.cs @@ -11,55 +11,52 @@ using Rdmp.Core.DataLoad.Triggers.Implementations; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +internal class HousekeepingTests : DatabaseTests { - internal class HousekeepingTests : DatabaseTests + [Test] + public void TestCheckUpdateTrigger() { - [Test] - public void TestCheckUpdateTrigger() - { - - // set SetUp a test database - const string tableName = "TestTable"; - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + // set SetUp a test database + const string tableName = "TestTable"; + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var databaseName = db.GetRuntimeName(); - var table = db.CreateTable(tableName,new[] {new DatabaseColumnRequest("Id", "int"),}); + var databaseName = db.GetRuntimeName(); + var table = db.CreateTable(tableName, new[] { new DatabaseColumnRequest("Id", "int") }); + + var server = db.Server; + using (var con = server.GetConnection()) + { + con.Open(); + var cmd = server.GetCommand( + $"CREATE TRIGGER dbo.[TestTable_OnUpdate] ON [dbo].[{tableName}] AFTER DELETE AS RAISERROR('MESSAGE',16,10)", + con); - var server = db.Server; - using (var con = server.GetConnection()) - { - con.Open(); - var cmd = server.GetCommand( - "CREATE TRIGGER dbo.[TestTable_OnUpdate] ON [dbo].[" + tableName + - "] AFTER DELETE AS RAISERROR('MESSAGE',16,10)", con); + cmd.ExecuteNonQuery(); + } - cmd.ExecuteNonQuery(); - } + var dbInfo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName); - var dbInfo = DiscoveredServerICanCreateRandomDatabasesAndTablesOn.ExpectDatabase(databaseName); + var factory = new TriggerImplementerFactory(dbInfo.Server.DatabaseType); - var factory = new TriggerImplementerFactory(dbInfo.Server.DatabaseType); - - var triggerImplementer = factory.Create(table); - var isEnabled = triggerImplementer.GetTriggerStatus(); - Assert.AreEqual(TriggerStatus.Enabled, isEnabled); + var triggerImplementer = factory.Create(table); + var isEnabled = triggerImplementer.GetTriggerStatus(); + Assert.AreEqual(TriggerStatus.Enabled, isEnabled); - - // disable the trigger and test correct reporting - using (var con = new SqlConnection(dbInfo.Server.Builder.ConnectionString)) - { - con.Open(); - var cmd = - new SqlCommand( - "USE [" + databaseName + "]; DISABLE TRIGGER TestTable_OnUpdate ON [" + databaseName + "]..[" + - tableName + "]", con); - cmd.ExecuteNonQuery(); - } - isEnabled = triggerImplementer.GetTriggerStatus(); - Assert.AreEqual(TriggerStatus.Disabled, isEnabled); + // disable the trigger and test correct reporting + using (var con = new SqlConnection(dbInfo.Server.Builder.ConnectionString)) + { + con.Open(); + var cmd = + new SqlCommand( + $"USE [{databaseName}]; DISABLE TRIGGER TestTable_OnUpdate ON [{databaseName}]..[{tableName}]", + con); + cmd.ExecuteNonQuery(); } - } -} + isEnabled = triggerImplementer.GetTriggerStatus(); + Assert.AreEqual(TriggerStatus.Disabled, isEnabled); + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ImportFilesDataProviderTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ImportFilesDataProviderTests.cs index ea2e8c17ed..405a1902bb 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ImportFilesDataProviderTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ImportFilesDataProviderTests.cs @@ -8,105 +8,103 @@ using System; using System.IO; using System.Linq; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataLoad; using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.DataProvider; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class ImportFilesDataProviderTests : DatabaseTests { - public class ImportFilesDataProviderTests:DatabaseTests + [Test] + public void CopyFiles() { - [Test] - public void CopyFiles() - { - var sourceDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory).CreateSubdirectory("subdir"); - var targetDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory).CreateSubdirectory("loaddir"); - - //make sure target is empty - foreach (var f in targetDir.GetFiles()) - f.Delete(); - - var originpath = Path.Combine(sourceDir.FullName, "myFile.txt"); + var sourceDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory).CreateSubdirectory("subdir"); + var targetDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory).CreateSubdirectory("loaddir"); + + //make sure target is empty + foreach (var f in targetDir.GetFiles()) + f.Delete(); - File.WriteAllText(originpath,"fish"); + var originpath = Path.Combine(sourceDir.FullName, "myFile.txt"); - var job = new ThrowImmediatelyDataLoadJob(); - var mockProjectDirectory = Mock.Of(p => p.ForLoading == targetDir); - job.LoadDirectory = mockProjectDirectory; + File.WriteAllText(originpath, "fish"); + var job = new ThrowImmediatelyDataLoadJob(); + var mockProjectDirectory = Substitute.For(); + mockProjectDirectory.ForLoading.Returns(targetDir); + job.LoadDirectory = mockProjectDirectory; - //Create the provider - var provider = new ImportFilesDataProvider(); - //it doesn't know what to load yet - Assert.Throws(() => provider.Check(new ThrowImmediatelyCheckNotifier())); - - //now it does - provider.DirectoryPath = sourceDir.FullName; + //Create the provider + var provider = new ImportFilesDataProvider(); - //but it doesn't have a file pattern - Assert.Throws(() => provider.Check(new ThrowImmediatelyCheckNotifier())); + //it doesn't know what to load yet + Assert.Throws(() => provider.Check(ThrowImmediatelyCheckNotifier.Quiet)); - //now it does but its not a matching one - provider.FilePattern = "cannonballs.bat"; + //now it does + provider.DirectoryPath = sourceDir.FullName; - //either way it passes checking - Assert.DoesNotThrow(() => provider.Check(new ThrowImmediatelyCheckNotifier())); + //but it doesn't have a file pattern + Assert.Throws(() => provider.Check(ThrowImmediatelyCheckNotifier.Quiet)); - //execute the provider - provider.Fetch(job, new GracefulCancellationToken()); + //now it does but its not a matching one + provider.FilePattern = "cannonballs.bat"; - //destination is empty because nothing matched - Assert.IsEmpty(targetDir.GetFiles()); + //either way it passes checking + Assert.DoesNotThrow(() => provider.Check(ThrowImmediatelyCheckNotifier.Quiet)); - //give it correct pattern - provider.FilePattern = "*.txt"; + //execute the provider + provider.Fetch(job, new GracefulCancellationToken()); - //execute the provider - provider.Fetch(job, new GracefulCancellationToken()); + //destination is empty because nothing matched + Assert.IsEmpty(targetDir.GetFiles()); - //both files should exist - Assert.AreEqual(1,targetDir.GetFiles().Count()); - Assert.AreEqual(1, sourceDir.GetFiles().Count()); + //give it correct pattern + provider.FilePattern = "*.txt"; - //simulate load failure - provider.LoadCompletedSoDispose(ExitCodeType.Abort, new ThrowImmediatelyDataLoadJob()); + //execute the provider + provider.Fetch(job, new GracefulCancellationToken()); - //both files should exist - Assert.AreEqual(1, targetDir.GetFiles().Count()); - Assert.AreEqual(1, sourceDir.GetFiles().Count()); + //both files should exist + Assert.AreEqual(1, targetDir.GetFiles().Length); + Assert.AreEqual(1, sourceDir.GetFiles().Length); - //simulate load success - provider.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadJob()); + //simulate load failure + provider.LoadCompletedSoDispose(ExitCodeType.Abort, new ThrowImmediatelyDataLoadJob()); - //both files should exist because Delete on success is false - Assert.AreEqual(1, targetDir.GetFiles().Count()); - Assert.AreEqual(1, sourceDir.GetFiles().Count()); + //both files should exist + Assert.AreEqual(1, targetDir.GetFiles().Length); + Assert.AreEqual(1, sourceDir.GetFiles().Length); - //change behaviour to delete on successful data loads - provider.DeleteFilesOnsuccessfulLoad = true; + //simulate load success + provider.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadJob()); - //simulate load failure - provider.LoadCompletedSoDispose(ExitCodeType.Error, new ThrowImmediatelyDataLoadJob()); + //both files should exist because Delete on success is false + Assert.AreEqual(1, targetDir.GetFiles().Length); + Assert.AreEqual(1, sourceDir.GetFiles().Length); - //both files should exist - Assert.AreEqual(1, targetDir.GetFiles().Count()); - Assert.AreEqual(1, sourceDir.GetFiles().Count()); + //change behaviour to delete on successful data loads + provider.DeleteFilesOnsuccessfulLoad = true; - //simulate load success - provider.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadJob()); + //simulate load failure + provider.LoadCompletedSoDispose(ExitCodeType.Error, new ThrowImmediatelyDataLoadJob()); - //only forLoading file should exist (in real life that one would be handled by archivng already) - Assert.AreEqual(1, targetDir.GetFiles().Count()); - Assert.AreEqual(0, sourceDir.GetFiles().Count()); + //both files should exist + Assert.AreEqual(1, targetDir.GetFiles().Length); + Assert.AreEqual(1, sourceDir.GetFiles().Length); - } + //simulate load success + provider.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadJob()); + //only forLoading file should exist (in real life that one would be handled by archivng already) + Assert.AreEqual(1, targetDir.GetFiles().Length); + Assert.AreEqual(0, sourceDir.GetFiles().Length); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/JobDateGenerationStrategyFactoryTestsIntegration.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/JobDateGenerationStrategyFactoryTestsIntegration.cs index d4141047d3..fcc1afcfd8 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/JobDateGenerationStrategyFactoryTestsIntegration.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/JobDateGenerationStrategyFactoryTestsIntegration.cs @@ -18,184 +18,215 @@ using Rdmp.Core.DataLoad.Engine.Job.Scheduling.Exceptions; using Rdmp.Core.DataLoad.Engine.LoadProcess.Scheduling.Strategy; using Rdmp.Core.DataLoad.Modules.DataProvider; -using ReusableLibraryCode.Progress; +using Rdmp.Core.Repositories; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; using Tests.Common.Helpers; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class JobDateGenerationStrategyFactoryTestsIntegration : DatabaseTests { - public class JobDateGenerationStrategyFactoryTestsIntegration:DatabaseTests + private CacheProgress _cp; + private LoadProgress _lp; + private LoadMetadata _lmd; + private DiscoveredServer _server; + private JobDateGenerationStrategyFactory _factory; + + [SetUp] + protected override void SetUp() { - private CacheProgress _cp; - private LoadProgress _lp; - private LoadMetadata _lmd; - private DiscoveredServer _server; - private JobDateGenerationStrategyFactory _factory; - - [SetUp] - protected override void SetUp() + base.SetUp(); + + MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); + MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); + + _lmd = new LoadMetadata(CatalogueRepository, "JobDateGenerationStrategyFactoryTestsIntegration"); + _lp = new LoadProgress(CatalogueRepository, _lmd) { - base.SetUp(); + DataLoadProgress = new DateTime(2001, 1, 1) + }; - RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataWriter)); - RepositoryLocator.CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestDataInventor)); + _lp.SaveToDatabase(); - _lmd = new LoadMetadata(CatalogueRepository, "JobDateGenerationStrategyFactoryTestsIntegration"); - _lp = new LoadProgress(CatalogueRepository, _lmd); + _cp = new CacheProgress(CatalogueRepository, _lp); - _lp.DataLoadProgress = new DateTime(2001, 1, 1); - _lp.SaveToDatabase(); - _cp = new CacheProgress(CatalogueRepository, _lp); + _server = new DiscoveredServer(new SqlConnectionStringBuilder("server=localhost;initial catalog=fish")); + _factory = new JobDateGenerationStrategyFactory(new SingleLoadProgressSelectionStrategy(_lp)); + } + [Test] + public void CacheProvider_None() + { + var ex = Assert.Throws(() => + _factory.Create(_lp, ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.IsTrue(ex.Message.StartsWith( + "LoadMetadata JobDateGenerationStrategyFactoryTestsIntegration does not have ANY process tasks of type ProcessTaskType.DataProvider")); + } - _server = new DiscoveredServer(new SqlConnectionStringBuilder("server=localhost;initial catalog=fish")); - _factory = new JobDateGenerationStrategyFactory(new SingleLoadProgressSelectionStrategy(_lp)); - } - [Test] - public void CacheProvider_None() + [Test] + public void CacheProvider_NonCachingOne() + { + var pt = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles) { - var ex = Assert.Throws(() => _factory.Create(_lp,new ThrowImmediatelyDataLoadEventListener())); - Assert.IsTrue(ex.Message.StartsWith("LoadMetadata JobDateGenerationStrategyFactoryTestsIntegration does not have ANY process tasks of type ProcessTaskType.DataProvider")); - } + Path = typeof(DoNothingDataProvider).FullName, + ProcessTaskType = ProcessTaskType.DataProvider, + Name = "DoNothing" + }; + pt.SaveToDatabase(); + + var ex = Assert.Throws(() => + _factory.Create(_lp, ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.IsTrue(ex.Message.StartsWith( + "LoadMetadata JobDateGenerationStrategyFactoryTestsIntegration has some DataProviders tasks but none of them wrap classes that implement ICachedDataProvider")); + } - [Test] - public void CacheProvider_NonCachingOne() + [Test] + public void CacheProvider_TwoCachingOnes() + { + var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles) { - var pt = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); - pt.Path = typeof (DoNothingDataProvider).FullName; - pt.ProcessTaskType = ProcessTaskType.DataProvider; - pt.Name = "DoNothing"; - pt.SaveToDatabase(); - - var ex = Assert.Throws(() => _factory.Create(_lp,new ThrowImmediatelyDataLoadEventListener())); - Assert.IsTrue(ex.Message.StartsWith("LoadMetadata JobDateGenerationStrategyFactoryTestsIntegration has some DataProviders tasks but none of them wrap classes that implement ICachedDataProvider")); - } + Path = typeof(TestCachedFileRetriever).FullName, + ProcessTaskType = ProcessTaskType.DataProvider, + Name = "Cache1" + }; + pt1.SaveToDatabase(); + var pt2 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles) + { + Path = typeof(TestCachedFileRetriever).FullName, + ProcessTaskType = ProcessTaskType.DataProvider, + Name = "Cache2" + }; + pt2.SaveToDatabase(); + + var ex = Assert.Throws(() => + _factory.Create(_lp, ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.AreEqual( + "LoadMetadata JobDateGenerationStrategyFactoryTestsIntegration has multiple cache DataProviders tasks (Cache1,Cache2), you are only allowed 1", + ex.Message); + } - [Test] - public void CacheProvider_TwoCachingOnes() + [Test] + public void CacheProvider_NoPipeline() + { + var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles) + { + Path = typeof(TestCachedFileRetriever).FullName, + ProcessTaskType = ProcessTaskType.DataProvider, + Name = "Cache1" + }; + pt1.SaveToDatabase(); + + _cp.CacheFillProgress = new DateTime(1999, 1, 1); + _cp.Name = "MyTestCp"; + _cp.SaveToDatabase(); + + pt1.CreateArgumentsForClassIfNotExists(); + + var projDir = + LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", + true); + _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; + _lmd.SaveToDatabase(); + try { - var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); - pt1.Path = typeof(TestCachedFileRetriever).FullName; - pt1.ProcessTaskType = ProcessTaskType.DataProvider; - pt1.Name = "Cache1"; - pt1.SaveToDatabase(); - - var pt2 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); - pt2.Path = typeof(TestCachedFileRetriever).FullName; - pt2.ProcessTaskType = ProcessTaskType.DataProvider; - pt2.Name = "Cache2"; - pt2.SaveToDatabase(); - - var ex = Assert.Throws(() => _factory.Create(_lp,new ThrowImmediatelyDataLoadEventListener())); - Assert.AreEqual("LoadMetadata JobDateGenerationStrategyFactoryTestsIntegration has multiple cache DataProviders tasks (Cache1,Cache2), you are only allowed 1",ex.Message); + var ex = Assert.Throws(() => _factory.Create(_lp, ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.AreEqual("CacheProgress MyTestCp does not have a Pipeline configured on it", ex.Message); } - - [Test] - public void CacheProvider_NoPipeline() + finally { - var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); - pt1.Path = typeof(TestCachedFileRetriever).FullName; - pt1.ProcessTaskType = ProcessTaskType.DataProvider; - pt1.Name = "Cache1"; - pt1.SaveToDatabase(); - - _cp.CacheFillProgress = new DateTime(1999, 1, 1); - _cp.Name = "MyTestCp"; - _cp.SaveToDatabase(); - - pt1.CreateArgumentsForClassIfNotExists(); - - var projDir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory),"delme", true); - _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; - _lmd.SaveToDatabase(); - try - { - var ex = Assert.Throws(() => _factory.Create(_lp,new ThrowImmediatelyDataLoadEventListener())); - Assert.AreEqual("CacheProgress MyTestCp does not have a Pipeline configured on it", ex.Message); - } - finally - { - projDir.RootPath.Delete(true); - } + projDir.RootPath.Delete(true); } + } - [Test] - public void CacheProvider_NoCacheProgress() + [Test] + public void CacheProvider_NoCacheProgress() + { + var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles) + { + Path = typeof(BasicCacheDataProvider).FullName, + ProcessTaskType = ProcessTaskType.DataProvider, + Name = "Cache1" + }; + pt1.SaveToDatabase(); + + var projDir = + LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", + true); + _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; + _lmd.SaveToDatabase(); + + var pipeAssembler = new TestDataPipelineAssembler("CacheProvider_Normal", CatalogueRepository); + pipeAssembler.ConfigureCacheProgressToUseThePipeline(_cp); + + try { - var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); - pt1.Path = typeof(BasicCacheDataProvider).FullName; - pt1.ProcessTaskType = ProcessTaskType.DataProvider; - pt1.Name = "Cache1"; - pt1.SaveToDatabase(); - - var projDir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", true); - _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; - _lmd.SaveToDatabase(); - - var pipeAssembler = new TestDataPipelineAssembler("CacheProvider_Normal", CatalogueRepository); - pipeAssembler.ConfigureCacheProgressToUseThePipeline(_cp); - - try - { - var ex = Assert.Throws(()=>_factory.Create(_lp,new ThrowImmediatelyDataLoadEventListener())); - Assert.AreEqual("Caching has not begun for this CacheProgress ("+_cp.ID+"), so there is nothing to load and this strategy should not be used.",ex.Message); - } - finally - { - _cp.Pipeline_ID = null; - pipeAssembler.Destroy(); - projDir.RootPath.Delete(true); - } + var ex = Assert.Throws(() => + _factory.Create(_lp, ThrowImmediatelyDataLoadEventListener.Quiet)); + Assert.AreEqual( + $"Caching has not begun for this CacheProgress ({_cp.ID}), so there is nothing to load and this strategy should not be used.", + ex.Message); } - [Test] - public void CacheProvider_Normal() + finally { - var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles); - pt1.Path = typeof(BasicCacheDataProvider).FullName; - pt1.ProcessTaskType = ProcessTaskType.DataProvider; - pt1.Name = "Cache1"; - pt1.SaveToDatabase(); - - _cp.CacheFillProgress = new DateTime(2010, 1, 1); - _cp.SaveToDatabase(); - - var projDir = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", true); - _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; - _lmd.SaveToDatabase(); - - var pipeAssembler = new TestDataPipelineAssembler("CacheProvider_Normal", CatalogueRepository); - pipeAssembler.ConfigureCacheProgressToUseThePipeline(_cp); - - try - { - var strategy = _factory.Create(_lp,new ThrowImmediatelyDataLoadEventListener()); - Assert.AreEqual(typeof(SingleScheduleCacheDateTrackingStrategy), strategy.GetType()); - - var dates = strategy.GetDates(10, false); - Assert.AreEqual(0,dates.Count); //zero dates to load because no files in cache - - File.WriteAllText(Path.Combine(projDir.Cache.FullName, "2001-01-02.zip"),"bobbobbobyobyobyobbzzztproprietarybitztreamzippy"); - File.WriteAllText(Path.Combine(projDir.Cache.FullName, "2001-01-03.zip"), "bobbobbobyobyobyobbzzztproprietarybitztreamzippy"); - File.WriteAllText(Path.Combine(projDir.Cache.FullName, "2001-01-05.zip"), "bobbobbobyobyobyobbzzztproprietarybitztreamzippy"); - - strategy = _factory.Create(_lp,new ThrowImmediatelyDataLoadEventListener()); - Assert.AreEqual(typeof(SingleScheduleCacheDateTrackingStrategy), strategy.GetType()); - dates = strategy.GetDates(10, false); - Assert.AreEqual(3, dates.Count); //zero dates to load because no files in cache - - - } - finally - { - _cp.Pipeline_ID = null; - pipeAssembler.Destroy(); - projDir.RootPath.Delete(true); - } + _cp.Pipeline_ID = null; + pipeAssembler.Destroy(); + projDir.RootPath.Delete(true); } } -} + [Test] + public void CacheProvider_Normal() + { + var pt1 = new ProcessTask(CatalogueRepository, _lmd, LoadStage.GetFiles) + { + Path = typeof(BasicCacheDataProvider).FullName, + ProcessTaskType = ProcessTaskType.DataProvider, + Name = "Cache1" + }; + pt1.SaveToDatabase(); + + _cp.CacheFillProgress = new DateTime(2010, 1, 1); + _cp.SaveToDatabase(); + + var projDir = + LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", + true); + _lmd.LocationOfFlatFiles = projDir.RootPath.FullName; + _lmd.SaveToDatabase(); + + var pipeAssembler = new TestDataPipelineAssembler("CacheProvider_Normal", CatalogueRepository); + pipeAssembler.ConfigureCacheProgressToUseThePipeline(_cp); + + try + { + var strategy = _factory.Create(_lp, ThrowImmediatelyDataLoadEventListener.Quiet); + Assert.AreEqual(typeof(SingleScheduleCacheDateTrackingStrategy), strategy.GetType()); + + var dates = strategy.GetDates(10, false); + Assert.AreEqual(0, dates.Count); //zero dates to load because no files in cache + + File.WriteAllText(Path.Combine(projDir.Cache.FullName, "2001-01-02.zip"), + "bobbobbobyobyobyobbzzztproprietarybitztreamzippy"); + File.WriteAllText(Path.Combine(projDir.Cache.FullName, "2001-01-03.zip"), + "bobbobbobyobyobyobbzzztproprietarybitztreamzippy"); + File.WriteAllText(Path.Combine(projDir.Cache.FullName, "2001-01-05.zip"), + "bobbobbobyobyobyobbzzztproprietarybitztreamzippy"); + + strategy = _factory.Create(_lp, ThrowImmediatelyDataLoadEventListener.Quiet); + Assert.AreEqual(typeof(SingleScheduleCacheDateTrackingStrategy), strategy.GetType()); + dates = strategy.GetDates(10, false); + Assert.AreEqual(3, dates.Count); //zero dates to load because no files in cache + } + finally + { + _cp.Pipeline_ID = null; + pipeAssembler.Destroy(); + projDir.RootPath.Delete(true); + } + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/KVPAttacherTest.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/KVPAttacherTest.cs index e35a3ea63e..8dbed8328a 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/KVPAttacherTest.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/KVPAttacherTest.cs @@ -16,223 +16,226 @@ using Rdmp.Core.DataLoad.Modules.Attachers; using Rdmp.Core.DataLoad.Modules.DataFlowOperations; using Rdmp.Core.DataLoad.Modules.DataFlowSources; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class KVPAttacherTest : DatabaseTests { - public class KVPAttacherTest:DatabaseTests + public enum KVPAttacherTestCase { + OneFileWithPrimaryKey, + OneFileWithoutPrimaryKey, + TwoFilesWithPrimaryKey + } - public enum KVPAttacherTestCase - { - OneFileWithPrimaryKey, - OneFileWithoutPrimaryKey, - TwoFilesWithPrimaryKey - } + [Test] + [TestCase(KVPAttacherTestCase.OneFileWithPrimaryKey)] + [TestCase(KVPAttacherTestCase.OneFileWithoutPrimaryKey)] + [TestCase(KVPAttacherTestCase.TwoFilesWithPrimaryKey)] + public void KVPAttacherTest_Attach(KVPAttacherTestCase testCase) + { + var hasPk = testCase != KVPAttacherTestCase.OneFileWithoutPrimaryKey; + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var attacher = new KVPAttacher(); + var tbl = db.ExpectTable("KVPTestTable"); - [Test] - [TestCase(KVPAttacherTestCase.OneFileWithPrimaryKey)] - [TestCase(KVPAttacherTestCase.OneFileWithoutPrimaryKey)] - [TestCase(KVPAttacherTestCase.TwoFilesWithPrimaryKey)] - public void KVPAttacherTest_Attach(KVPAttacherTestCase testCase) - { - bool hasPk = testCase != KVPAttacherTestCase.OneFileWithoutPrimaryKey; - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var attacher = new KVPAttacher(); - var tbl = db.ExpectTable("KVPTestTable"); + var workingDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); + var parentDir = workingDir.CreateSubdirectory("KVPAttacherTestProjectDirectory"); + var projectDir = LoadDirectory.CreateDirectoryStructure(parentDir, "KVPAttacherTest", true); - var workingDir = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); - var parentDir = workingDir.CreateSubdirectory("KVPAttacherTestProjectDirectory"); - var projectDir = LoadDirectory.CreateDirectoryStructure(parentDir, "KVPAttacherTest", true); + var filepk = "kvpTestFilePK.csv"; + var filepk2 = "kvpTestFilePK2.csv"; + var fileNoPk = "kvpTestFile_NoPK.csv"; - string filepk = "kvpTestFilePK.csv"; - string filepk2 = "kvpTestFilePK2.csv"; - string fileNoPk = "kvpTestFile_NoPK.csv"; + if (testCase is KVPAttacherTestCase.OneFileWithPrimaryKey or KVPAttacherTestCase.TwoFilesWithPrimaryKey) + CopyToBin(projectDir, filepk); - if (testCase == KVPAttacherTestCase.OneFileWithPrimaryKey || testCase == KVPAttacherTestCase.TwoFilesWithPrimaryKey) - CopyToBin(projectDir, filepk); + if (testCase == KVPAttacherTestCase.TwoFilesWithPrimaryKey) + CopyToBin(projectDir, filepk2); - if (testCase == KVPAttacherTestCase.TwoFilesWithPrimaryKey) - CopyToBin(projectDir, filepk2); - - if (testCase == KVPAttacherTestCase.OneFileWithoutPrimaryKey) - CopyToBin(projectDir, fileNoPk); - - if (tbl.Exists()) - tbl.Drop(); - - //Create destination data table on server (where the data will ultimately end SetUp) - using (var con = (SqlConnection) tbl.Database.Server.GetConnection()) - { - con.Open(); - string sql = hasPk - ? "CREATE TABLE KVPTestTable (Person varchar(100), Test varchar(50), Result int)" - : "CREATE TABLE KVPTestTable (Test varchar(50), Result int)"; - - new SqlCommand(sql, con).ExecuteNonQuery(); - } - - var remnantPipeline = - CatalogueRepository.GetAllObjects().SingleOrDefault(p=>p.Name.Equals("KVPAttacherTestPipeline")); - - if(remnantPipeline != null) - remnantPipeline.DeleteInDatabase(); - - //Setup the Pipeline - var p = new Pipeline(CatalogueRepository, "KVPAttacherTestPipeline"); - - //With a CSV source - var flatFileLoad = new PipelineComponent(CatalogueRepository, p, typeof (DelimitedFlatFileDataFlowSource), 0,"Data Flow Source"); - - //followed by a Transpose that turns columns to rows (see how the test file grows right with new records instead of down, this is common in KVP input files but not always) - var transpose = new PipelineComponent(CatalogueRepository, p, typeof (Transposer), 1, "Transposer"); - - var saneHeaders = transpose.CreateArgumentsForClassIfNotExists(typeof (Transposer)).Single(a => a.Name.Equals("MakeHeaderNamesSane")); - saneHeaders.SetValue(false); - saneHeaders.SaveToDatabase(); - - //set the source separator to comma - flatFileLoad.CreateArgumentsForClassIfNotExists(typeof(DelimitedFlatFileDataFlowSource)); - var arg = flatFileLoad.PipelineComponentArguments.Single(a => a.Name.Equals("Separator")); - arg.SetValue(","); - arg.SaveToDatabase(); - - arg = flatFileLoad.PipelineComponentArguments.Single(a => a.Name.Equals("MakeHeaderNamesSane")); - arg.SetValue(false); - arg.SaveToDatabase(); - - p.SourcePipelineComponent_ID = flatFileLoad.ID; - p.SaveToDatabase(); - - try - { - attacher.PipelineForReadingFromFlatFile = p; - attacher.TableName = "KVPTestTable"; - - switch (testCase) - { - case KVPAttacherTestCase.OneFileWithPrimaryKey: - attacher.FilePattern = filepk; - break; - case KVPAttacherTestCase.OneFileWithoutPrimaryKey: - attacher.FilePattern = fileNoPk; - break; - case KVPAttacherTestCase.TwoFilesWithPrimaryKey: - attacher.FilePattern = "kvpTestFilePK*.*"; - break; - default: - throw new ArgumentOutOfRangeException("testCase"); - } - - - if (hasPk) - attacher.PrimaryKeyColumns = "Person"; - - attacher.TargetDataTableKeyColumnName = "Test"; - attacher.TargetDataTableValueColumnName = "Result"; - - attacher.Initialize(projectDir,db); - - attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - - //test file contains 291 values belonging to 3 different people - int expectedRows = 291; - - //if we loaded two files (or should have done) then add the number of values in that file (54) - if (testCase == KVPAttacherTestCase.TwoFilesWithPrimaryKey) - expectedRows += 54; - - Assert.AreEqual(expectedRows, tbl.GetRowCount()); - - } - finally - { - p.DeleteInDatabase(); - tbl.Drop(); - } - } + if (testCase == KVPAttacherTestCase.OneFileWithoutPrimaryKey) + CopyToBin(projectDir, fileNoPk); + if (tbl.Exists()) + tbl.Drop(); - [Test] - public void KVPAttacherCheckTest_TableNameMissing() + //Create destination data table on server (where the data will ultimately end SetUp) + using (var con = (SqlConnection)tbl.Database.Server.GetConnection()) { - var ex = Assert.Throws(() => new KVPAttacher().Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Either argument TableName or TableToLoad must be set Rdmp.Core.DataLoad.Modules.Attachers.KVPAttacher, you should specify this value.",ex.Message); + con.Open(); + var sql = hasPk + ? "CREATE TABLE KVPTestTable (Person varchar(100), Test varchar(50), Result int)" + : "CREATE TABLE KVPTestTable (Test varchar(50), Result int)"; + + new SqlCommand(sql, con).ExecuteNonQuery(); } - [Test] - public void KVPAttacherCheckTest_FilePathMissing() - { - var kvp = new KVPAttacher(); - kvp.TableName = "MyTable"; + var remnantPipeline = + CatalogueRepository.GetAllObjects() + .SingleOrDefault(p => p.Name.Equals("KVPAttacherTestPipeline")); - var ex = Assert.Throws(()=>kvp.Check(new ThrowImmediatelyCheckNotifier())); - Assert.IsTrue(ex.Message.StartsWith("Argument FilePattern has not been set")); - } + remnantPipeline?.DeleteInDatabase(); + //Setup the Pipeline + var p = new Pipeline(CatalogueRepository, "KVPAttacherTestPipeline"); + //With a CSV source + var flatFileLoad = new PipelineComponent(CatalogueRepository, p, typeof(DelimitedFlatFileDataFlowSource), 0, + "Data Flow Source"); - [Test] - [TestCase("PrimaryKeyColumns")] - [TestCase("TargetDataTableKeyColumnName")] - [TestCase("TargetDataTableValueColumnName")] - public void KVPAttacherCheckTest_BasicArgumentMissing(string missingField) + //followed by a Transpose that turns columns to rows (see how the test file grows right with new records instead of down, this is common in KVP input files but not always) + var transpose = new PipelineComponent(CatalogueRepository, p, typeof(Transposer), 1, "Transposer"); + + var saneHeaders = transpose.CreateArgumentsForClassIfNotExists(typeof(Transposer)) + .Single(a => a.Name.Equals("MakeHeaderNamesSane")); + saneHeaders.SetValue(false); + saneHeaders.SaveToDatabase(); + + //set the source separator to comma + flatFileLoad.CreateArgumentsForClassIfNotExists(typeof(DelimitedFlatFileDataFlowSource)); + var arg = flatFileLoad.PipelineComponentArguments.Single(a => a.Name.Equals("Separator")); + arg.SetValue(","); + arg.SaveToDatabase(); + + arg = flatFileLoad.PipelineComponentArguments.Single(a => a.Name.Equals("MakeHeaderNamesSane")); + arg.SetValue(false); + arg.SaveToDatabase(); + + p.SourcePipelineComponent_ID = flatFileLoad.ID; + p.SaveToDatabase(); + + try { - var kvp = new KVPAttacher(); - kvp.TableName = "MyTable"; - kvp.FilePattern = "*.csv"; + attacher.PipelineForReadingFromFlatFile = p; + attacher.TableName = "KVPTestTable"; + + attacher.FilePattern = testCase switch + { + KVPAttacherTestCase.OneFileWithPrimaryKey => filepk, + KVPAttacherTestCase.OneFileWithoutPrimaryKey => fileNoPk, + KVPAttacherTestCase.TwoFilesWithPrimaryKey => "kvpTestFilePK*.*", + _ => throw new ArgumentOutOfRangeException(nameof(testCase)) + }; - if (missingField != "PrimaryKeyColumns") - kvp.PrimaryKeyColumns = "dave,bob"; + if (hasPk) + attacher.PrimaryKeyColumns = "Person"; - if (missingField != "TargetDataTableKeyColumnName") - kvp.TargetDataTableKeyColumnName = "frank"; + attacher.TargetDataTableKeyColumnName = "Test"; + attacher.TargetDataTableValueColumnName = "Result"; - if (missingField != "TargetDataTableValueColumnName") - kvp.TargetDataTableValueColumnName = "smith"; - - var ex = Assert.Throws(() => kvp.Check(new ThrowImmediatelyCheckNotifier())); - Assert.IsTrue(ex.Message.StartsWith("Argument " + missingField + " has not been set")); + attacher.Initialize(projectDir, db); + + attacher.Attach(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + + //test file contains 291 values belonging to 3 different people + var expectedRows = 291; + + //if we loaded two files (or should have done) then add the number of values in that file (54) + if (testCase == KVPAttacherTestCase.TwoFilesWithPrimaryKey) + expectedRows += 54; + + Assert.AreEqual(expectedRows, tbl.GetRowCount()); } + finally + { + p.DeleteInDatabase(); + tbl.Drop(); + } + } - [Test] - [TestCase(true)] - [TestCase(false)] - public void KVPAttacherCheckTest_Crossover(bool isKeyColumnDuplicate) + + [Test] + public void KVPAttacherCheckTest_TableNameMissing() + { + var ex = Assert.Throws(() => new KVPAttacher().Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "Either argument TableName or TableToLoad must be set Rdmp.Core.DataLoad.Modules.Attachers.KVPAttacher, you should specify this value.", + ex.Message); + } + + [Test] + public void KVPAttacherCheckTest_FilePathMissing() + { + var kvp = new KVPAttacher + { + TableName = "MyTable" + }; + + var ex = Assert.Throws(() => kvp.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.IsTrue(ex.Message.StartsWith("Argument FilePattern has not been set")); + } + + + [Test] + [TestCase("PrimaryKeyColumns")] + [TestCase("TargetDataTableKeyColumnName")] + [TestCase("TargetDataTableValueColumnName")] + public void KVPAttacherCheckTest_BasicArgumentMissing(string missingField) + { + var kvp = new KVPAttacher { - var kvp = new KVPAttacher(); - kvp.TableName = "MyTable"; - kvp.FilePattern = "*.csv"; + TableName = "MyTable", + FilePattern = "*.csv" + }; + + if (missingField != "PrimaryKeyColumns") kvp.PrimaryKeyColumns = "dave,bob"; - kvp.TargetDataTableKeyColumnName = isKeyColumnDuplicate ?"dave":"Fish"; - kvp.TargetDataTableValueColumnName = isKeyColumnDuplicate ? "tron" : "dave"; - var ex = Assert.Throws(() => kvp.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("Field 'dave' is both a PrimaryKeyColumn and a TargetDataTable column, this is not allowed. Your fields Pk1,Pk2,Pketc,Key,Value must all be mutually exclusive", ex.Message); - } + if (missingField != "TargetDataTableKeyColumnName") + kvp.TargetDataTableKeyColumnName = "frank"; - [Test] - public void KVPAttacherCheckTest_CrossoverKeyAndValue() + if (missingField != "TargetDataTableValueColumnName") + kvp.TargetDataTableValueColumnName = "smith"; + + var ex = Assert.Throws(() => kvp.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.IsTrue(ex.Message.StartsWith($"Argument {missingField} has not been set")); + } + + [Test] + [TestCase(true)] + [TestCase(false)] + public void KVPAttacherCheckTest_Crossover(bool isKeyColumnDuplicate) + { + var kvp = new KVPAttacher { - var kvp = new KVPAttacher(); - kvp.TableName = "MyTable"; - kvp.FilePattern = "*.csv"; - kvp.PrimaryKeyColumns = "dave"; - kvp.TargetDataTableKeyColumnName = "Key"; - kvp.TargetDataTableValueColumnName = "Key"; - - var ex = Assert.Throws(() => kvp.Check(new ThrowImmediatelyCheckNotifier())); - Assert.AreEqual("TargetDataTableKeyColumnName cannot be the same as TargetDataTableValueColumnName", ex.Message); - } + TableName = "MyTable", + FilePattern = "*.csv", + PrimaryKeyColumns = "dave,bob", + TargetDataTableKeyColumnName = isKeyColumnDuplicate ? "dave" : "Fish", + TargetDataTableValueColumnName = isKeyColumnDuplicate ? "tron" : "dave" + }; + + var ex = Assert.Throws(() => kvp.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual( + "Field 'dave' is both a PrimaryKeyColumn and a TargetDataTable column, this is not allowed. Your fields Pk1,Pk2,Pketc,Key,Value must all be mutually exclusive", + ex.Message); + } - private void CopyToBin(LoadDirectory projDir, string file) + [Test] + public void KVPAttacherCheckTest_CrossoverKeyAndValue() + { + var kvp = new KVPAttacher { - - string testFileLocation = Path.Combine(TestContext.CurrentContext.TestDirectory,"DataLoad","Engine","Resources" , file); - Assert.IsTrue(File.Exists(testFileLocation)); + TableName = "MyTable", + FilePattern = "*.csv", + PrimaryKeyColumns = "dave", + TargetDataTableKeyColumnName = "Key", + TargetDataTableValueColumnName = "Key" + }; + + var ex = Assert.Throws(() => kvp.Check(ThrowImmediatelyCheckNotifier.Quiet)); + Assert.AreEqual("TargetDataTableKeyColumnName cannot be the same as TargetDataTableValueColumnName", + ex.Message); + } - File.Copy(testFileLocation, projDir.ForLoading.FullName + Path.DirectorySeparatorChar + file, true); - } + private static void CopyToBin(LoadDirectory projDir, string file) + { + var testFileLocation = Path.Combine(TestContext.CurrentContext.TestDirectory, "DataLoad", "Engine", "Resources", + file); + Assert.IsTrue(File.Exists(testFileLocation)); + + File.Copy(testFileLocation, projDir.ForLoading.FullName + Path.DirectorySeparatorChar + file, true); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/MigrationStrategyTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/MigrationStrategyTests.cs index 03cd8b31e1..e10701c77d 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/MigrationStrategyTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/MigrationStrategyTests.cs @@ -7,34 +7,31 @@ using System; using FAnsi.Connections; using FAnsi.Discovery; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Engine.Migration; using Rdmp.Core.DataLoad.Engine.Migration.QueryBuilding; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +internal class MigrationStrategyTests : DatabaseTests { - class MigrationStrategyTests : DatabaseTests + [Test] + public void OverwriteMigrationStrategy_NoPrimaryKey() { - [Test] - public void OverwriteMigrationStrategy_NoPrimaryKey() - { - var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var db = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var from = db.CreateTable("Bob",new[] {new DatabaseColumnRequest("Field", "int")}); - var to = db.CreateTable("Frank", new[] { new DatabaseColumnRequest("Field", "int") }); + var from = db.CreateTable("Bob", new[] { new DatabaseColumnRequest("Field", "int") }); + var to = db.CreateTable("Frank", new[] { new DatabaseColumnRequest("Field", "int") }); - var connection = Mock.Of(); - var job = Mock.Of(); - var strategy = new OverwriteMigrationStrategy(connection); + var connection = Substitute.For(); + var strategy = new OverwriteMigrationStrategy(connection); - var migrationFieldProcessor = Mock.Of(); + var migrationFieldProcessor = Substitute.For(); - var ex = Assert.Throws(() => new MigrationColumnSet(from, to, migrationFieldProcessor)); - Assert.AreEqual("There are no primary keys declared in table Bob", ex.Message); - } + var ex = Assert.Throws(() => new MigrationColumnSet(from, to, migrationFieldProcessor)); + Assert.AreEqual("There are no primary keys declared in table Bob", ex.Message); } - -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PayloadTest.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PayloadTest.cs index 5a5debabe7..e59b4cce06 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PayloadTest.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PayloadTest.cs @@ -17,78 +17,83 @@ using Rdmp.Core.DataLoad.Engine.LoadExecution; using Rdmp.Core.DataLoad.Engine.LoadProcess; using Rdmp.Core.Logging; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.Repositories; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class PayloadTest : DatabaseTests { - public class PayloadTest:DatabaseTests + public static object payload = new(); + public static bool Success; + + [Test] + public void TestPayloadInjection() { - public static object payload = new object(); - public static bool Success = false; + var b = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), 10); + b.SetupTestData(); + b.ImportAsCatalogue(); - [Test] - public void TestPayloadInjection() + var lmd = new LoadMetadata(CatalogueRepository, "Loading") { - BulkTestsData b = new BulkTestsData(CatalogueRepository,GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer),10); - b.SetupTestData(); - b.ImportAsCatalogue(); + LocationOfFlatFiles = LoadDirectory + .CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory), "delme", true) + .RootPath.FullName + }; + lmd.SaveToDatabase(); - var lmd = new LoadMetadata(CatalogueRepository, "Loading"); - lmd.LocationOfFlatFiles = LoadDirectory.CreateDirectoryStructure(new DirectoryInfo(TestContext.CurrentContext.TestDirectory),"delme", true).RootPath.FullName; - lmd.SaveToDatabase(); + MEF.AddTypeToCatalogForTesting(typeof(TestPayloadAttacher)); - CatalogueRepository.MEF.AddTypeToCatalogForTesting(typeof(TestPayloadAttacher)); + b.catalogue.LoadMetadata_ID = lmd.ID; + b.catalogue.LoggingDataTask = "TestPayloadInjection"; + b.catalogue.SaveToDatabase(); - b.catalogue.LoadMetadata_ID = lmd.ID; - b.catalogue.LoggingDataTask = "TestPayloadInjection"; - b.catalogue.SaveToDatabase(); + var lm = new LogManager(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); + lm.CreateNewLoggingTaskIfNotExists("TestPayloadInjection"); - var lm = new LogManager(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); - lm.CreateNewLoggingTaskIfNotExists("TestPayloadInjection"); + var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.Mounting) + { + Path = typeof(TestPayloadAttacher).FullName, + ProcessTaskType = ProcessTaskType.Attacher + }; + pt.SaveToDatabase(); - var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.Mounting); - pt.Path = typeof (TestPayloadAttacher).FullName; - pt.ProcessTaskType = ProcessTaskType.Attacher; - pt.SaveToDatabase(); + var config = new HICDatabaseConfiguration(GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).Server); + var factory = new HICDataLoadFactory(lmd, config, new HICLoadConfigurationFlags(), CatalogueRepository, lm); + var execution = factory.Create(ThrowImmediatelyDataLoadEventListener.Quiet); - var config = new HICDatabaseConfiguration(GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer).Server); - var factory = new HICDataLoadFactory(lmd, config, new HICLoadConfigurationFlags(), CatalogueRepository, lm); - IDataLoadExecution execution = factory.Create(new ThrowImmediatelyDataLoadEventListener()); + var procedure = new DataLoadProcess(RepositoryLocator, lmd, null, lm, + ThrowImmediatelyDataLoadEventListener.Quiet, execution, config); - var proceedure = new DataLoadProcess(RepositoryLocator, lmd, null, lm, new ThrowImmediatelyDataLoadEventListener(), execution, config); + procedure.Run(new GracefulCancellationToken(), payload); - proceedure.Run(new GracefulCancellationToken(), payload); + Assert.IsTrue(Success, "Expected IAttacher to detect Payload and set this property to true"); + } - Assert.IsTrue(PayloadTest.Success, "Expected IAttacher to detect Payload and set this property to true"); + + public class TestPayloadAttacher : Attacher, IPluginAttacher + { + public TestPayloadAttacher() : base(false) + { } + public override ExitCodeType Attach(IDataLoadJob job, GracefulCancellationToken cancellationToken) + { + job.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"Found Payload:{job.Payload}")); + Success = ReferenceEquals(payload, job.Payload); + + return ExitCodeType.OperationNotRequired; + } + + public override void Check(ICheckNotifier notifier) + { + } - public class TestPayloadAttacher : Attacher,IPluginAttacher + public override void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postLoadEventListener) { - public TestPayloadAttacher() : base(false) - { - } - - public override ExitCodeType Attach(IDataLoadJob job, GracefulCancellationToken cancellationToken) - { - job.OnNotify(this,new NotifyEventArgs(ProgressEventType.Information, "Found Payload:" + job.Payload)); - PayloadTest.Success = ReferenceEquals(payload, job.Payload); - - return ExitCodeType.OperationNotRequired; - } - - public override void Check(ICheckNotifier notifier) - { - - } - - public override void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postLoadEventListener) - { - - } } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/ArchiveFilesTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/ArchiveFilesTests.cs index 5c0ff9d640..31bada0452 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/ArchiveFilesTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/ArchiveFilesTests.cs @@ -8,7 +8,7 @@ using System.IO; using System.IO.Compression; using System.Linq; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.DataFlowPipeline; @@ -18,86 +18,88 @@ using Rdmp.Core.Logging; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests; + +public class ArchiveFilesTests : DatabaseTests { - public class ArchiveFilesTests : DatabaseTests + [Test] + public void TestAllFilesAreArchived() { - [Test] - public void TestAllFilesAreArchived() + var directoryHelper = new TestDirectoryHelper(GetType()); + directoryHelper.SetUp(); + + var forArchiving = directoryHelper.Directory.CreateSubdirectory("forArchiving"); + var forLoading = directoryHelper.Directory.CreateSubdirectory("forLoading"); + File.WriteAllText(Path.Combine(forLoading.FullName, "test.txt"), "test data"); + var subDir = forLoading.CreateSubdirectory("subdir"); + File.WriteAllText(Path.Combine(subDir.FullName, "subdir.txt"), "test data in subdir"); + + // test the hidden dir which the archiver should ignore + var hiddenDir = forLoading.CreateSubdirectory(ArchiveFiles.HiddenFromArchiver); + File.WriteAllText(Path.Combine(hiddenDir.FullName, "hidden.txt"), "I should not appear in the archive"); + + var archiveComponent = new ArchiveFiles(new HICLoadConfigurationFlags()); + + var dataLoadInfo = Substitute.For(); + dataLoadInfo.ID.Returns(1); + + var LoadDirectory = Substitute.For(); + LoadDirectory.ForArchiving.Returns(forArchiving); + LoadDirectory.ForLoading.Returns(forLoading); + + var job = Substitute.For(); + job.DataLoadInfo.Returns(dataLoadInfo); + job.LoadDirectory.Returns(LoadDirectory); + + try { - var directoryHelper = new TestDirectoryHelper(GetType()); - directoryHelper.SetUp(); - - var forArchiving = directoryHelper.Directory.CreateSubdirectory("forArchiving"); - var forLoading = directoryHelper.Directory.CreateSubdirectory("forLoading"); - File.WriteAllText(Path.Combine(forLoading.FullName, "test.txt"), "test data"); - var subDir = forLoading.CreateSubdirectory("subdir"); - File.WriteAllText(Path.Combine(subDir.FullName, "subdir.txt"), "test data in subdir"); - - // test the hidden dir which the archiver should ignore - var hiddenDir = forLoading.CreateSubdirectory(ArchiveFiles.HiddenFromArchiver); - File.WriteAllText(Path.Combine(hiddenDir.FullName, "hidden.txt"), "I should not appear in the archive"); - - var archiveComponent = new ArchiveFiles(new HICLoadConfigurationFlags()); - - var dataLoadInfo = Mock.Of(info => info.ID==1); - - var LoadDirectory = Mock.Of(d => d.ForArchiving==forArchiving && d.ForLoading==forLoading); - - var job = Mock.Of(j => j.DataLoadInfo==dataLoadInfo); - job.LoadDirectory = LoadDirectory; - - try - { - archiveComponent.Run(job, new GracefulCancellationToken()); - - // first we expect a file in forArchiving called 1.zip - var zipFilename = Path.Combine(forArchiving.FullName, "1.zip"); - Assert.True(File.Exists(zipFilename)); - - // there should be two entries - using (var archive = ZipFile.Open(zipFilename, ZipArchiveMode.Read)) - { - Assert.AreEqual(2, archive.Entries.Count, "There should be two entries in this archive: one from the root and one from the subdirectory"); - Assert.IsTrue(archive.Entries.Any(entry => entry.FullName.Equals(@"subdir/subdir.txt"))); - Assert.IsTrue(archive.Entries.Any(entry => entry.FullName.Equals(@"test.txt"))); - } - } - finally - { - directoryHelper.TearDown(); - } + archiveComponent.Run(job, new GracefulCancellationToken()); + + // first we expect a file in forArchiving called 1.zip + var zipFilename = Path.Combine(forArchiving.FullName, "1.zip"); + Assert.True(File.Exists(zipFilename)); + + // there should be two entries + using var archive = ZipFile.Open(zipFilename, ZipArchiveMode.Read); + Assert.AreEqual(2, archive.Entries.Count, + "There should be two entries in this archive: one from the root and one from the subdirectory"); + Assert.IsTrue(archive.Entries.Any(entry => entry.FullName.Equals(@"subdir/subdir.txt"))); + Assert.IsTrue(archive.Entries.Any(entry => entry.FullName.Equals(@"test.txt"))); } - - [Test] - public void CreateArchiveWithNoFiles_ShouldThrow() + finally { - var directoryHelper = new TestDirectoryHelper(GetType()); - directoryHelper.SetUp(); - - var testDir = directoryHelper.Directory.CreateSubdirectory("CreateArchiveWithNoFiles_ShouldThrow"); - - var archiveFiles = new ArchiveFiles(new HICLoadConfigurationFlags()); - var loadDirectory = LoadDirectory.CreateDirectoryStructure(testDir, "dataset"); - - var job = Mock.Of(j => j.DataLoadInfo==Mock.Of()); - job.LoadDirectory = loadDirectory; - - try - { - archiveFiles.Run(job, new GracefulCancellationToken()); - - foreach (FileInfo fileInfo in loadDirectory.ForArchiving.GetFiles("*.zip")) - Console.WriteLine("About to throw SetUp because of zip file:" + fileInfo.FullName); - - Assert.IsFalse(loadDirectory.ForArchiving.GetFiles("*.zip").Any(),"There should not be any zip files in the archive directory!"); - } - finally - { - directoryHelper.TearDown(); - } + directoryHelper.TearDown(); } - } -} + [Test] + public void CreateArchiveWithNoFiles_ShouldThrow() + { + var directoryHelper = new TestDirectoryHelper(GetType()); + directoryHelper.SetUp(); + + var testDir = directoryHelper.Directory.CreateSubdirectory("CreateArchiveWithNoFiles_ShouldThrow"); + + var archiveFiles = new ArchiveFiles(new HICLoadConfigurationFlags()); + var loadDirectory = LoadDirectory.CreateDirectoryStructure(testDir, "dataset"); + + var job = Substitute.For(); + job.DataLoadInfo.Returns(Substitute.For()); + job.LoadDirectory.Returns(loadDirectory); + + try + { + archiveFiles.Run(job, new GracefulCancellationToken()); + + foreach (var fileInfo in loadDirectory.ForArchiving.GetFiles("*.zip")) + Console.WriteLine($"About to throw SetUp because of zip file:{fileInfo.FullName}"); + + Assert.IsFalse(loadDirectory.ForArchiving.GetFiles("*.zip").Any(), + "There should not be any zip files in the archive directory!"); + } + finally + { + directoryHelper.TearDown(); + } + } +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/ComponentCompatibilityTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/ComponentCompatibilityTests.cs index 63c92cb8e2..fbf27cfa90 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/ComponentCompatibilityTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/ComponentCompatibilityTests.cs @@ -4,7 +4,6 @@ // RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. // You should have received a copy of the GNU General Public License along with RDMP. If not, see . -using System; using System.Data; using System.Linq; using NUnit.Framework; @@ -12,33 +11,13 @@ using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests; + +public class ComponentCompatibilityTests : UnitTests { - public class ComponentCompatibilityTests :UnitTests + [Test] + public void GetComponentsCompatibleWithBulkInsertContext() { - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); - - SetupMEF(); - } - - [Test] - public void GetComponentsCompatibleWithBulkInsertContext() - { - Type[] array = MEF.GetTypes>().ToArray(); - - Assert.Greater(array.Count(),0); - } - - [Test] - public void HowDoesMEFHandleTypeNames() - { - string expected = "Rdmp.Core.DataFlowPipeline.IDataFlowSource(System.Data.DataTable)"; - - Assert.AreEqual(expected, MEF.GetMEFNameForType(typeof(IDataFlowSource))); - } + Assert.True(MEF.GetTypes>().Any()); } -} - +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/AliasHandlerTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/AliasHandlerTests.cs index 806263cad2..a7deaf37ce 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/AliasHandlerTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/AliasHandlerTests.cs @@ -12,201 +12,213 @@ using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataLoad.Modules.DataFlowOperations.Aliases; using Rdmp.Core.DataLoad.Modules.DataFlowOperations.Aliases.Exceptions; -using ReusableLibraryCode.DataAccess; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.DataAccess; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Components +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Components; + +public class AliasHandlerTests : DatabaseTests { - public class AliasHandlerTests : DatabaseTests + private ExternalDatabaseServer _server; + private AliasHandler _handler; + + private DiscoveredDatabase _database; + + [SetUp] + protected override void SetUp() { - private ExternalDatabaseServer _server; - private AliasHandler _handler; + base.SetUp(); - private DiscoveredDatabase _database; + _server = new ExternalDatabaseServer(CatalogueRepository, "AliasHandlerTestsServer", null); + _server.SetProperties(GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); - [SetUp] - protected override void SetUp() + _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); + var s = _database.Server; + using (var con = s.GetConnection()) { - base.SetUp(); - - _server = new ExternalDatabaseServer(CatalogueRepository, "AliasHandlerTestsServer",null); - _server.SetProperties(GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); - - _database = GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer); - var s = _database.Server; - using (var con = s.GetConnection()) - { - con.Open(); - - s.GetCommand("CREATE TABLE AliasHandlerTests (input varchar(50), alias varchar(50))", con).ExecuteNonQuery(); - - //Two names which are aliases of the same person - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('freddie','craig')", con).ExecuteNonQuery(); - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('craig','freddie')", con).ExecuteNonQuery(); - - //Three names which are all aliases of the same person - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('peter','paul')", con).ExecuteNonQuery(); - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('peter','pepey')", con).ExecuteNonQuery(); - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('pepey','paul')", con).ExecuteNonQuery(); - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('pepey','peter')", con).ExecuteNonQuery(); - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('paul','pepey')", con).ExecuteNonQuery(); - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('paul','peter')", con).ExecuteNonQuery(); - } - - _handler = new AliasHandler(); - - _handler.AliasColumnInInputDataTables = "input"; - _handler.AliasTableSQL = "select * from AliasHandlerTests"; - _handler.DataAccessContext = DataAccessContext.DataLoad; - _handler.ResolutionStrategy = AliasResolutionStrategy.CrashIfAliasesFound; - _handler.TimeoutForAssemblingAliasTable = 10; - _handler.ServerToExecuteQueryOn = _server; - + con.Open(); + + s.GetCommand("CREATE TABLE AliasHandlerTests (input varchar(50), alias varchar(50))", con) + .ExecuteNonQuery(); + + //Two names which are aliases of the same person + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('freddie','craig')", con).ExecuteNonQuery(); + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('craig','freddie')", con).ExecuteNonQuery(); + + //Three names which are all aliases of the same person + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('peter','paul')", con).ExecuteNonQuery(); + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('peter','pepey')", con).ExecuteNonQuery(); + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('pepey','paul')", con).ExecuteNonQuery(); + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('pepey','peter')", con).ExecuteNonQuery(); + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('paul','pepey')", con).ExecuteNonQuery(); + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('paul','peter')", con).ExecuteNonQuery(); } - - - [Test] - public void ThrowBecause_ColumnNotInInputDataTable() + _handler = new AliasHandler { - var dt = new DataTable(); - dt.Columns.Add("cannonballer");//not the same as the expected input column name - dt.Rows.Add(new object[] { "yes"}); - - var ex = Assert.Throws(()=>_handler.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); + AliasColumnInInputDataTables = "input", + AliasTableSQL = "select * from AliasHandlerTests", + DataAccessContext = DataAccessContext.DataLoad, + ResolutionStrategy = AliasResolutionStrategy.CrashIfAliasesFound, + TimeoutForAssemblingAliasTable = 10, + ServerToExecuteQueryOn = _server + }; + } - Assert.AreEqual("You asked to resolve aliases on a column called 'input' but no column by that name appeared in the DataTable being processed. Columns in that table were:cannonballer", - ex.Message); - } - [Test] - public void ThrowBecause_NameAndAliasSameValue() - { - var s = _database.Server; - using (var con = s.GetConnection()) - { - con.Open(); - s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('dave','dave')", con).ExecuteNonQuery(); - } + [Test] + public void ThrowBecause_ColumnNotInInputDataTable() + { + var dt = new DataTable(); + dt.Columns.Add("cannonballer"); //not the same as the expected input column name + dt.Rows.Add(new object[] { "yes" }); - var dt = new DataTable(); - dt.Columns.Add("input"); - dt.Rows.Add(new object[] { "candle" }); + var ex = Assert.Throws(() => + _handler.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken())); - var ex = Assert.Throws(()=>_handler.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - Assert.IsTrue(ex.Message.StartsWith("Alias table SQL should only return aliases not exact matches")); + Assert.AreEqual( + "You asked to resolve aliases on a column called 'input' but no column by that name appeared in the DataTable being processed. Columns in that table were:cannonballer", + ex.Message); + } + [Test] + public void ThrowBecause_NameAndAliasSameValue() + { + var s = _database.Server; + using (var con = s.GetConnection()) + { + con.Open(); + s.GetCommand("INSERT INTO AliasHandlerTests VALUES ('dave','dave')", con).ExecuteNonQuery(); } - [Test] - public void ThrowBecause_ThreeColumnAliasTable() + var dt = new DataTable(); + dt.Columns.Add("input"); + dt.Rows.Add(new object[] { "candle" }); + + var ex = Assert.Throws(() => + _handler.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken())); + Assert.IsTrue(ex.Message.StartsWith("Alias table SQL should only return aliases not exact matches")); + } + + [Test] + public void ThrowBecause_ThreeColumnAliasTable() + { + var s = _database.Server; + using (var con = s.GetConnection()) { - var s = _database.Server; - using (var con = s.GetConnection()) - { - con.Open(); - s.GetCommand("ALTER TABLE AliasHandlerTests ADD anotherAliascol varchar(50)", con).ExecuteNonQuery(); - } + con.Open(); + s.GetCommand("ALTER TABLE AliasHandlerTests ADD anotherAliascol varchar(50)", con).ExecuteNonQuery(); + } - var dt = new DataTable(); - dt.Columns.Add("input"); - dt.Columns.Add("value"); + var dt = new DataTable(); + dt.Columns.Add("input"); + dt.Columns.Add("value"); - dt.Rows.Add(new object[] { "dave", 100 }); - dt.Rows.Add(new object[] { "frank", 100 }); + dt.Rows.Add(new object[] { "dave", 100 }); + dt.Rows.Add(new object[] { "frank", 100 }); - var ex = Assert.Throws(() => _handler.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); + var ex = Assert.Throws(() => + _handler.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken())); - Assert.IsTrue(ex.Message.Contains("Alias table SQL resulted in 3 fields being returned")); - } + Assert.IsTrue(ex.Message.Contains("Alias table SQL resulted in 3 fields being returned")); + } - [Test] - public void NoAliases() - { - var dt = new DataTable(); - dt.Columns.Add("input"); - dt.Columns.Add("value"); + [Test] + public void NoAliases() + { + var dt = new DataTable(); + dt.Columns.Add("input"); + dt.Columns.Add("value"); - dt.Rows.Add(new object[] {"dave", 100}); - dt.Rows.Add(new object[] {"frank", 100}); + dt.Rows.Add(new object[] { "dave", 100 }); + dt.Rows.Add(new object[] { "frank", 100 }); - var result = _handler.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + var result = _handler.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - Assert.AreEqual(2,result.Rows.Count); - } + Assert.AreEqual(2, result.Rows.Count); + } - [Test] - public void CrashStrategy() - { - var dt = new DataTable(); - dt.Columns.Add("input"); + [Test] + public void CrashStrategy() + { + var dt = new DataTable(); + dt.Columns.Add("input"); - dt.Rows.Add(new object[] { "paul"}); - Assert.Throws(()=> _handler.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - } + dt.Rows.Add(new object[] { "paul" }); + Assert.Throws(() => + _handler.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken())); + } - [Test] - public void ResolveTwoNameAlias() - { - _handler.ResolutionStrategy = AliasResolutionStrategy.MultiplyInputDataRowsByAliases; + [Test] + public void ResolveTwoNameAlias() + { + _handler.ResolutionStrategy = AliasResolutionStrategy.MultiplyInputDataRowsByAliases; - var dt = new DataTable(); - dt.Columns.Add("value1",typeof(int)); - dt.Columns.Add("input"); - dt.Columns.Add("value2", typeof(int)); + var dt = new DataTable(); + dt.Columns.Add("value1", typeof(int)); + dt.Columns.Add("input"); + dt.Columns.Add("value2", typeof(int)); - dt.Rows.Add(new object[] { 99,"dave", 100 }); - dt.Rows.Add(new object[] { 199,"frank", 200 }); - dt.Rows.Add(new object[] { 299,"freddie", 300 }); //has a two name alias + dt.Rows.Add(new object[] { 99, "dave", 100 }); + dt.Rows.Add(new object[] { 199, "frank", 200 }); + dt.Rows.Add(new object[] { 299, "freddie", 300 }); //has a two name alias - var result = _handler.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + var result = _handler.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - Assert.AreEqual(4, result.Rows.Count); - - Assert.AreEqual(299, result.Rows[2][0]); - Assert.AreEqual("freddie", result.Rows[2][1]);//the original input row which had an alias on it - Assert.AreEqual(300, result.Rows[2][2]); + Assert.AreEqual(4, result.Rows.Count); - Assert.AreEqual(299, result.Rows[3][0]); - Assert.AreEqual("craig", result.Rows[3][1]);//The new row that should have appeared to resolve the freddie=craig alias - Assert.AreEqual(300, result.Rows[3][2]);//value should match the input array - } + Assert.AreEqual(299, result.Rows[2][0]); + Assert.AreEqual("freddie", result.Rows[2][1]); //the original input row which had an alias on it + Assert.AreEqual(300, result.Rows[2][2]); - [Test] - public void ResolveThreeNameAlias() - { - _handler.ResolutionStrategy = AliasResolutionStrategy.MultiplyInputDataRowsByAliases; + Assert.AreEqual(299, result.Rows[3][0]); + Assert.AreEqual("craig", + result.Rows[3][1]); //The new row that should have appeared to resolve the freddie=craig alias + Assert.AreEqual(300, result.Rows[3][2]); //value should match the input array + } - var dt = new DataTable(); - dt.Columns.Add("value1", typeof(int)); - dt.Columns.Add("input"); - dt.Columns.Add("value2", typeof(int)); + [Test] + public void ResolveThreeNameAlias() + { + _handler.ResolutionStrategy = AliasResolutionStrategy.MultiplyInputDataRowsByAliases; - dt.Rows.Add(new object[] { 99, "pepey", 100 });//has a three name alias - dt.Rows.Add(new object[] { 199, "frank", 200 }); - dt.Rows.Add(new object[] { 299, "anderson", 300 }); + var dt = new DataTable(); + dt.Columns.Add("value1", typeof(int)); + dt.Columns.Add("input"); + dt.Columns.Add("value2", typeof(int)); - var result = _handler.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + dt.Rows.Add(new object[] { 99, "pepey", 100 }); //has a three name alias + dt.Rows.Add(new object[] { 199, "frank", 200 }); + dt.Rows.Add(new object[] { 299, "anderson", 300 }); - Assert.AreEqual(5, result.Rows.Count); + var result = _handler.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - Assert.AreEqual(99, result.Rows[0][0]); - Assert.AreEqual("pepey", result.Rows[0][1]);//the original input row which had an alias on it - Assert.AreEqual(100, result.Rows[0][2]); + Assert.AreEqual(5, result.Rows.Count); + Assert.AreEqual(99, result.Rows[0][0]); + Assert.AreEqual("pepey", result.Rows[0][1]); //the original input row which had an alias on it + Assert.AreEqual(100, result.Rows[0][2]); - //new rows are added at the end of the DataTable - Assert.AreEqual(99, result.Rows[3][0]); - Assert.AreEqual("paul", result.Rows[3][1]);//The new row that should have appeared to resolve the pepey=paul=peter alias - Assert.AreEqual(100, result.Rows[3][2]);//value should match the input array - Assert.AreEqual(99, result.Rows[4][0]); - Assert.AreEqual("peter", result.Rows[4][1]);//The new row that should have appeared to resolve the pepey=paul=peter alias - Assert.AreEqual(100, result.Rows[4][2]);//value should match the input array - } + //new rows are added at the end of the DataTable + Assert.AreEqual(99, result.Rows[3][0]); + Assert.AreEqual("paul", + result.Rows[3][1]); //The new row that should have appeared to resolve the pepey=paul=peter alias + Assert.AreEqual(100, result.Rows[3][2]); //value should match the input array + Assert.AreEqual(99, result.Rows[4][0]); + Assert.AreEqual("peter", + result.Rows[4][1]); //The new row that should have appeared to resolve the pepey=paul=peter alias + Assert.AreEqual(100, result.Rows[4][2]); //value should match the input array } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/ColumnSwapperTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/ColumnSwapperTests.cs index b024b96186..bfa3b10f9d 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/ColumnSwapperTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/ColumnSwapperTests.cs @@ -11,552 +11,589 @@ using FAnsi; using FAnsi.Discovery; using FAnsi.Extensions; -using Moq; +using NSubstitute; using NUnit.Framework; -using Rdmp.Core.Curation.Data; using Rdmp.Core.DataExport.Data; using Rdmp.Core.DataExport.DataExtraction.Commands; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataLoad.Modules.DataFlowOperations.Aliases; using Rdmp.Core.DataLoad.Modules.DataFlowOperations.Aliases.Exceptions; using Rdmp.Core.DataLoad.Modules.DataFlowOperations.Swapping; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Components +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Components; + +internal class ColumnSwapperTests : DatabaseTests { - class ColumnSwapperTests:DatabaseTests + [TestCase(true)] + [TestCase(false)] + public void TestColumnSwapper_NormalUseCase(bool keepInputColumnToo) { - [TestCase(true)] - [TestCase(false)] - public void TestColumnSwapper_NormalUseCase(bool keepInputColumnToo) - { - using var dt = new DataTable(); - dt.Columns.Add("In"); - dt.Columns.Add("Out"); + using var dt = new DataTable(); + dt.Columns.Add("In"); + dt.Columns.Add("Out"); - dt.Rows.Add("A", 1); - dt.Rows.Add("B", 2); - dt.Rows.Add("C", 3); - dt.Rows.Add("D", 4); - dt.Rows.Add("D", 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter + dt.Rows.Add("A", 1); + dt.Rows.Add("B", 2); + dt.Rows.Add("C", 3); + dt.Rows.Add("D", 4); + dt.Rows.Add("D", + 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - - Import(db.CreateTable("Map", dt),out var map,out var mapCols); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); - swapper.KeepInputColumnToo = keepInputColumnToo; + Import(db.CreateTable("Map", dt), out var map, out var mapCols); - swapper.Check(new ThrowImmediatelyCheckNotifier()); + var swapper = new ColumnSwapper + { + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")), + KeepInputColumnToo = keepInputColumnToo + }; - using var dtToSwap = new DataTable(); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - dtToSwap.Columns.Add("In"); - dtToSwap.Columns.Add("Name"); - dtToSwap.Columns.Add("Age"); + using var dtToSwap = new DataTable(); - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("B", "Frank", 50); + dtToSwap.Columns.Add("In"); + dtToSwap.Columns.Add("Name"); + dtToSwap.Columns.Add("Age"); - var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("B", "Frank", 50); - //in should be there or not depending on the setting KeepInputColumnToo - Assert.AreEqual(keepInputColumnToo, resultDt.Columns.Contains("In")); + var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); - Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); + //in should be there or not depending on the setting KeepInputColumnToo + Assert.AreEqual(keepInputColumnToo, resultDt.Columns.Contains("In")); - AreBasicallyEquals(1, resultDt.Rows[1]["Out"]); - Assert.AreEqual("Dave", resultDt.Rows[1]["Name"]); + AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); + Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - AreBasicallyEquals(2, resultDt.Rows[2]["Out"]); - Assert.AreEqual("Frank", resultDt.Rows[2]["Name"]); + AreBasicallyEquals(1, resultDt.Rows[1]["Out"]); + Assert.AreEqual("Dave", resultDt.Rows[1]["Name"]); - if (keepInputColumnToo) - { - Assert.AreEqual("A", resultDt.Rows[0]["In"]); - Assert.AreEqual("A", resultDt.Rows[1]["In"]); - Assert.AreEqual("B", resultDt.Rows[2]["In"]); - } - } + AreBasicallyEquals(2, resultDt.Rows[2]["Out"]); + Assert.AreEqual("Frank", resultDt.Rows[2]["Name"]); - [TestCase(true)] - [TestCase(false)] - public void TestColumnSwapper_AlternateColumnNames(bool keepInputColumnToo) + if (keepInputColumnToo) { - using var dtMap = new DataTable(); - dtMap.Columns.Add("In"); - dtMap.Columns.Add("Out"); + Assert.AreEqual("A", resultDt.Rows[0]["In"]); + Assert.AreEqual("A", resultDt.Rows[1]["In"]); + Assert.AreEqual("B", resultDt.Rows[2]["In"]); + } + } - dtMap.Rows.Add("A", 1); - dtMap.Rows.Add("B", 2); - dtMap.Rows.Add("C", 3); - dtMap.Rows.Add("D", 4); - dtMap.Rows.Add("D", 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter + [TestCase(true)] + [TestCase(false)] + public void TestColumnSwapper_AlternateColumnNames(bool keepInputColumnToo) + { + using var dtMap = new DataTable(); + dtMap.Columns.Add("In"); + dtMap.Columns.Add("Out"); - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + dtMap.Rows.Add("A", 1); + dtMap.Rows.Add("B", 2); + dtMap.Rows.Add("C", 3); + dtMap.Rows.Add("D", 4); + dtMap.Rows.Add("D", + 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter - Import(db.CreateTable("Map", dtMap), out var map, out var mapCols); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); - swapper.KeepInputColumnToo = keepInputColumnToo; + Import(db.CreateTable("Map", dtMap), out var map, out var mapCols); - swapper.Check(new ThrowImmediatelyCheckNotifier()); + var swapper = new ColumnSwapper + { + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")), + KeepInputColumnToo = keepInputColumnToo + }; - using var dtToSwap = new DataTable(); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - dtToSwap.Columns.Add("In2"); - dtToSwap.Columns.Add("Name"); - dtToSwap.Columns.Add("Age"); + using var dtToSwap = new DataTable(); - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("B", "Frank", 50); + dtToSwap.Columns.Add("In2"); + dtToSwap.Columns.Add("Name"); + dtToSwap.Columns.Add("Age"); - // Our pipeline data does not have a column called In but instead it is called In2 - var ex = Assert.Throws(() => swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - Assert.AreEqual("DataTable did not contain a field called 'In'", ex.Message); - - // Tell the swapper about the new name - swapper.InputFromColumn = "In2"; - swapper.OutputToColumn = "Out2"; + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("B", "Frank", 50); - var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + // Our pipeline data does not have a column called In but instead it is called In2 + var ex = Assert.Throws(() => swapper.ProcessPipelineData(dtToSwap, + ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + Assert.AreEqual("DataTable did not contain a field called 'In'", ex.Message); - //in should be there or not depending on the setting KeepInputColumnToo - Assert.AreEqual(keepInputColumnToo, resultDt.Columns.Contains("In2")); + // Tell the swapper about the new name + swapper.InputFromColumn = "In2"; + swapper.OutputToColumn = "Out2"; - AreBasicallyEquals(1, resultDt.Rows[0]["Out2"]); - Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); + var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - AreBasicallyEquals(1, resultDt.Rows[1]["Out2"]); - Assert.AreEqual("Dave", resultDt.Rows[1]["Name"]); + //in should be there or not depending on the setting KeepInputColumnToo + Assert.AreEqual(keepInputColumnToo, resultDt.Columns.Contains("In2")); - AreBasicallyEquals(2, resultDt.Rows[2]["Out2"]); - Assert.AreEqual("Frank", resultDt.Rows[2]["Name"]); + AreBasicallyEquals(1, resultDt.Rows[0]["Out2"]); + Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - if (keepInputColumnToo) - { - Assert.AreEqual("A", resultDt.Rows[0]["In2"]); - Assert.AreEqual("A", resultDt.Rows[1]["In2"]); - Assert.AreEqual("B", resultDt.Rows[2]["In2"]); - } - } + AreBasicallyEquals(1, resultDt.Rows[1]["Out2"]); + Assert.AreEqual("Dave", resultDt.Rows[1]["Name"]); + AreBasicallyEquals(2, resultDt.Rows[2]["Out2"]); + Assert.AreEqual("Frank", resultDt.Rows[2]["Name"]); - [TestCase(true)] - [TestCase(false)] - public void TestColumnSwapper_InPlaceSwapNoNewCols(bool keepInputColumnToo) + if (keepInputColumnToo) { - using var dtMap = new DataTable(); - dtMap.Columns.Add("In"); - dtMap.Columns.Add("Out"); + Assert.AreEqual("A", resultDt.Rows[0]["In2"]); + Assert.AreEqual("A", resultDt.Rows[1]["In2"]); + Assert.AreEqual("B", resultDt.Rows[2]["In2"]); + } + } - dtMap.Rows.Add("A", 1); - dtMap.Rows.Add("B", 2); - dtMap.Rows.Add("C", 3); - dtMap.Rows.Add("D", 4); - dtMap.Rows.Add("D", 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + [TestCase(true)] + [TestCase(false)] + public void TestColumnSwapper_InPlaceSwapNoNewCols(bool keepInputColumnToo) + { + using var dtMap = new DataTable(); + dtMap.Columns.Add("In"); + dtMap.Columns.Add("Out"); - Import(db.CreateTable("Map", dtMap), out var map, out var mapCols); + dtMap.Rows.Add("A", 1); + dtMap.Rows.Add("B", 2); + dtMap.Rows.Add("C", 3); + dtMap.Rows.Add("D", 4); + dtMap.Rows.Add("D", + 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); - swapper.KeepInputColumnToo = keepInputColumnToo; + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - swapper.Check(new ThrowImmediatelyCheckNotifier()); + Import(db.CreateTable("Map", dtMap), out var map, out var mapCols); - using var dtToSwap = new DataTable(); + var swapper = new ColumnSwapper + { + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")), + KeepInputColumnToo = keepInputColumnToo + }; - dtToSwap.Columns.Add("In2"); - dtToSwap.Columns.Add("Name"); - dtToSwap.Columns.Add("Age"); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("B", "Frank", 50); + using var dtToSwap = new DataTable(); - // Tell the swapper about the new name - swapper.InputFromColumn = "In2"; - swapper.OutputToColumn = "In2"; + dtToSwap.Columns.Add("In2"); + dtToSwap.Columns.Add("Name"); + dtToSwap.Columns.Add("Age"); - var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("B", "Frank", 50); - // in ALWAYS be there, because it is an in place update - ignore KeepInputColumnToo - Assert.True(resultDt.Columns.Contains("In2")); + // Tell the swapper about the new name + swapper.InputFromColumn = "In2"; + swapper.OutputToColumn = "In2"; - AreBasicallyEquals(1, resultDt.Rows[0]["In2"]); - Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); + var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - AreBasicallyEquals(1, resultDt.Rows[1]["In2"]); - Assert.AreEqual("Dave", resultDt.Rows[1]["Name"]); + // in ALWAYS be there, because it is an in place update - ignore KeepInputColumnToo + Assert.True(resultDt.Columns.Contains("In2")); - AreBasicallyEquals(2, resultDt.Rows[2]["In2"]); - Assert.AreEqual("Frank", resultDt.Rows[2]["Name"]); - } + AreBasicallyEquals(1, resultDt.Rows[0]["In2"]); + Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - [TestCase(AliasResolutionStrategy.CrashIfAliasesFound)] - [TestCase(AliasResolutionStrategy.MultiplyInputDataRowsByAliases)] - public void TestColumnSwapper_Aliases(AliasResolutionStrategy strategy) - { - using var dt = new DataTable(); - dt.Columns.Add("In"); - dt.Columns.Add("Out"); - - dt.Rows.Add("A", 1); - dt.Rows.Add("B", 2); - dt.Rows.Add("C", 3); - dt.Rows.Add("D", 4); - dt.Rows.Add("D", 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter - - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - - Import(db.CreateTable("Map", dt), out var map, out var mapCols); - - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); - swapper.AliasResolutionStrategy = strategy; - - swapper.Check(new ThrowImmediatelyCheckNotifier()); - - using var dtToSwap = new DataTable(); - - dtToSwap.Columns.Add("In"); - dtToSwap.Columns.Add("Name"); - dtToSwap.Columns.Add("Age"); - - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("D", "Dandy", 60); - - switch (strategy) - { - case AliasResolutionStrategy.CrashIfAliasesFound: - Assert.Throws(()=>swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - break; - case AliasResolutionStrategy.MultiplyInputDataRowsByAliases: - - var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - - AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); - Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - - //we get the first alias (4) - AreBasicallyEquals(4, resultDt.Rows[1]["Out"]); - Assert.AreEqual("Dandy", resultDt.Rows[1]["Name"]); - AreBasicallyEquals(60, resultDt.Rows[1]["Age"]); - - //and the second alias (5) - AreBasicallyEquals(5, resultDt.Rows[2]["Out"]); - Assert.AreEqual("Dandy", resultDt.Rows[2]["Name"]); - AreBasicallyEquals(60, resultDt.Rows[1]["Age"]); - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } - - } + AreBasicallyEquals(1, resultDt.Rows[1]["In2"]); + Assert.AreEqual("Dave", resultDt.Rows[1]["Name"]); - [TestCase(true)] - [TestCase(false)] - public void TestColumnSwapper_MissingMappings(bool crashIfNoMappingsFound) - { - using var dt = new DataTable(); - dt.Columns.Add("In"); - dt.Columns.Add("Out"); + AreBasicallyEquals(2, resultDt.Rows[2]["In2"]); + Assert.AreEqual("Frank", resultDt.Rows[2]["Name"]); + } + + [TestCase(AliasResolutionStrategy.CrashIfAliasesFound)] + [TestCase(AliasResolutionStrategy.MultiplyInputDataRowsByAliases)] + public void TestColumnSwapper_Aliases(AliasResolutionStrategy strategy) + { + using var dt = new DataTable(); + dt.Columns.Add("In"); + dt.Columns.Add("Out"); - dt.Rows.Add("A", 1); - dt.Rows.Add("B", 2); - dt.Rows.Add("C", 3); - dt.Rows.Add("D", 4); - dt.Rows.Add("D", 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter + dt.Rows.Add("A", 1); + dt.Rows.Add("B", 2); + dt.Rows.Add("C", 3); + dt.Rows.Add("D", 4); + dt.Rows.Add("D", + 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - Import(db.CreateTable("Map", dt), out var map, out var mapCols); + Import(db.CreateTable("Map", dt), out var map, out var mapCols); - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); - swapper.CrashIfNoMappingsFound = crashIfNoMappingsFound; - swapper.WHERELogic = swapper.MappingToColumn.GetFullyQualifiedName() + " < 2"; //throws out all rows but A + var swapper = new ColumnSwapper + { + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")), + AliasResolutionStrategy = strategy + }; + + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - swapper.Check(new ThrowImmediatelyCheckNotifier()); + using var dtToSwap = new DataTable(); - using var dtToSwap = new DataTable(); + dtToSwap.Columns.Add("In"); + dtToSwap.Columns.Add("Name"); + dtToSwap.Columns.Add("Age"); - dtToSwap.Columns.Add("In"); - dtToSwap.Columns.Add("Name"); - dtToSwap.Columns.Add("Age"); + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("D", "Dandy", 60); - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("B", "Frank", 50); + switch (strategy) + { + case AliasResolutionStrategy.CrashIfAliasesFound: + Assert.Throws(() => swapper.ProcessPipelineData(dtToSwap, + ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + break; + case AliasResolutionStrategy.MultiplyInputDataRowsByAliases: - if(crashIfNoMappingsFound) - Assert.Throws(() => swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), null)); - else - { - var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - Assert.AreEqual(1, resultDt.Rows.Count); AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - } - } - [Test] - public void TestColumnSwapper_ProjectSpecificMappings() - { - using var dt = new DataTable(); - dt.Columns.Add("In"); - dt.Columns.Add("Out"); - dt.Columns.Add("Proj"); + //we get the first alias (4) + AreBasicallyEquals(4, resultDt.Rows[1]["Out"]); + Assert.AreEqual("Dandy", resultDt.Rows[1]["Name"]); + AreBasicallyEquals(60, resultDt.Rows[1]["Age"]); + + //and the second alias (5) + AreBasicallyEquals(5, resultDt.Rows[2]["Out"]); + Assert.AreEqual("Dandy", resultDt.Rows[2]["Name"]); + AreBasicallyEquals(60, resultDt.Rows[1]["Age"]); + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); + } + } - //Anonymise A and B differently depending on ProjectNumber (valid project numbers are 1 and 2) - dt.Rows.Add("A", 1,1); - dt.Rows.Add("A", 2,2); - dt.Rows.Add("B", 3,1); - dt.Rows.Add("B", 4,2); + [TestCase(true)] + [TestCase(false)] + public void TestColumnSwapper_MissingMappings(bool crashIfNoMappingsFound) + { + using var dt = new DataTable(); + dt.Columns.Add("In"); + dt.Columns.Add("Out"); - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + dt.Rows.Add("A", 1); + dt.Rows.Add("B", 2); + dt.Rows.Add("C", 3); + dt.Rows.Add("D", 4); + dt.Rows.Add("D", + 5); //oh dear D maps to 2 out values that's a violation! but if we don't see a D it doesn't matter - Import(db.CreateTable("Map", dt), out var map, out var mapCols); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); - swapper.WHERELogic = "Proj = $n"; - - // initialize with a mock that returns ProjectNumber 1 - swapper.PreInitialize(GetMockExtractDatasetCommand(), new ThrowImmediatelyDataLoadEventListener()); + Import(db.CreateTable("Map", dt), out var map, out var mapCols); - swapper.Check(new ThrowImmediatelyCheckNotifier()); + var swapper = new ColumnSwapper + { + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")), + CrashIfNoMappingsFound = crashIfNoMappingsFound + }; + swapper.WHERELogic = $"{swapper.MappingToColumn.GetFullyQualifiedName()} < 2"; //throws out all rows but A - using var dtToSwap = new DataTable(); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - dtToSwap.Columns.Add("In"); - dtToSwap.Columns.Add("Name"); - dtToSwap.Columns.Add("Age"); + using var dtToSwap = new DataTable(); - dtToSwap.Rows.Add("A", "Dave", 30); - dtToSwap.Rows.Add("B", "Frank", 50); + dtToSwap.Columns.Add("In"); + dtToSwap.Columns.Add("Name"); + dtToSwap.Columns.Add("Age"); - using var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("B", "Frank", 50); - Assert.AreEqual(2, resultDt.Rows.Count); + if (crashIfNoMappingsFound) + { + Assert.Throws(() => + swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, null)); + } + else + { + var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - // Should have project specific results for A of 1 and for B of 3 because the ProjectNumber is 1 + Assert.AreEqual(1, resultDt.Rows.Count); AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - AreBasicallyEquals(3, resultDt.Rows[1]["Out"]); - Assert.AreEqual("Frank", resultDt.Rows[1]["Name"]); } + } + + [Test] + public void TestColumnSwapper_ProjectSpecificMappings() + { + using var dt = new DataTable(); + dt.Columns.Add("In"); + dt.Columns.Add("Out"); + dt.Columns.Add("Proj"); + + //Anonymise A and B differently depending on ProjectNumber (valid project numbers are 1 and 2) + dt.Rows.Add("A", 1, 1); + dt.Rows.Add("A", 2, 2); + dt.Rows.Add("B", 3, 1); + dt.Rows.Add("B", 4, 2); + + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - /// - /// Tests ColumnSwapper when there are null values in the input being processed - /// - [Test] - public void TestColumnSwapper_InputTableNulls() + Import(db.CreateTable("Map", dt), out var map, out var mapCols); + + var swapper = new ColumnSwapper { - using var dt = new DataTable(); - dt.Columns.Add("In"); - dt.Columns.Add("Out"); + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")), + WHERELogic = "Proj = $n" + }; - dt.Rows.Add(1, 1); - dt.Rows.Add(2, 2); + // initialize with a mock that returns ProjectNumber 1 + swapper.PreInitialize(GetMockExtractDatasetCommand(), ThrowImmediatelyDataLoadEventListener.Quiet); - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - - Import(db.CreateTable("Map", dt), out var map, out var mapCols); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); + using var dtToSwap = new DataTable(); - swapper.Check(new ThrowImmediatelyCheckNotifier()); + dtToSwap.Columns.Add("In"); + dtToSwap.Columns.Add("Name"); + dtToSwap.Columns.Add("Age"); - using var dtToSwap = new DataTable(); + dtToSwap.Rows.Add("A", "Dave", 30); + dtToSwap.Rows.Add("B", "Frank", 50); - dtToSwap.Columns.Add("In",typeof(int)); - dtToSwap.Columns.Add("Name"); - dtToSwap.Columns.Add("Age"); + using var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - dtToSwap.Rows.Add(1, "Dave", 30); - dtToSwap.Rows.Add(null, "Bob", 30); + Assert.AreEqual(2, resultDt.Rows.Count); - var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + // Should have project specific results for A of 1 and for B of 3 because the ProjectNumber is 1 + AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); + Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); + AreBasicallyEquals(3, resultDt.Rows[1]["Out"]); + Assert.AreEqual("Frank", resultDt.Rows[1]["Name"]); + } - Assert.AreEqual(2, resultDt.Rows.Count); - AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); - Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - - AreBasicallyEquals(DBNull.Value, resultDt.Rows[1]["Out"]); - Assert.AreEqual("Bob", resultDt.Rows[1]["Name"]); - + /// + /// Tests ColumnSwapper when there are null values in the input being processed + /// + [Test] + public void TestColumnSwapper_InputTableNulls() + { + using var dt = new DataTable(); + dt.Columns.Add("In"); + dt.Columns.Add("Out"); - } - /// - /// Tests ColumnSwapper when there are null values in the database mapping table - /// - [Test] - public void TestColumnSwapper_MappingTableNulls() + dt.Rows.Add(1, 1); + dt.Rows.Add(2, 2); + + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + + Import(db.CreateTable("Map", dt), out var map, out var mapCols); + + var swapper = new ColumnSwapper { - using var dt = new DataTable(); - dt.Columns.Add("In"); - dt.Columns.Add("Out"); + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")) + }; - dt.Rows.Add(1, 1); - dt.Rows.Add(DBNull.Value, 3); // this value should be ignored - dt.Rows.Add(2, 2); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + using var dtToSwap = new DataTable(); - Import(db.CreateTable("Map", dt), out var map, out var mapCols); + dtToSwap.Columns.Add("In", typeof(int)); + dtToSwap.Columns.Add("Name"); + dtToSwap.Columns.Add("Age"); - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); + dtToSwap.Rows.Add(1, "Dave", 30); + dtToSwap.Rows.Add(null, "Bob", 30); - swapper.Check(new ThrowImmediatelyCheckNotifier()); + var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - using var dtToSwap = new DataTable(); + Assert.AreEqual(2, resultDt.Rows.Count); + AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); + Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - dtToSwap.Columns.Add("In",typeof(int)); - dtToSwap.Columns.Add("Name"); - dtToSwap.Columns.Add("Age"); + AreBasicallyEquals(DBNull.Value, resultDt.Rows[1]["Out"]); + Assert.AreEqual("Bob", resultDt.Rows[1]["Name"]); + } - dtToSwap.Rows.Add(1, "Dave", 30); - dtToSwap.Rows.Add(null, "Bob", 30); + /// + /// Tests ColumnSwapper when there are null values in the database mapping table + /// + [Test] + public void TestColumnSwapper_MappingTableNulls() + { + using var dt = new DataTable(); + dt.Columns.Add("In"); + dt.Columns.Add("Out"); - var toMem = new ToMemoryDataLoadEventListener(true); + dt.Rows.Add(1, 1); + dt.Rows.Add(DBNull.Value, 3); // this value should be ignored + dt.Rows.Add(2, 2); - var resultDt = swapper.ProcessPipelineData(dtToSwap,toMem , new GracefulCancellationToken()); + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - //this is the primary thing we are testing here - Assert.Contains("Discarded 1 Null key values read from mapping table",toMem.GetAllMessagesByProgressEventType()[ProgressEventType.Warning].Select(m=>m.Message).ToArray()); + Import(db.CreateTable("Map", dt), out var map, out var mapCols); - Assert.AreEqual(2, resultDt.Rows.Count); - AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); - Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - - AreBasicallyEquals(DBNull.Value, resultDt.Rows[1]["Out"]); - Assert.AreEqual("Bob", resultDt.Rows[1]["Name"]); - } - /// - /// Tests the systems ability to compare an integer in the input data table with a string in the database - /// - [Test] - public void TestColumnSwapper_MixedDatatypes_StringInDatabase() + var swapper = new ColumnSwapper { - using var dt = new DataTable(); - dt.Columns.Add("In"); - dt.Columns.Add("Out"); + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")) + }; - dt.Rows.Add("1" /*string*/, 2); - dt.Rows.Add("2", 3); - dt.Rows.Add("3", 4); - dt.SetDoNotReType(true); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + using var dtToSwap = new DataTable(); - DiscoveredTable mapTbl; + dtToSwap.Columns.Add("In", typeof(int)); + dtToSwap.Columns.Add("Name"); + dtToSwap.Columns.Add("Age"); - Import(mapTbl = db.CreateTable("Map", dt), out var map, out var mapCols); + dtToSwap.Rows.Add(1, "Dave", 30); + dtToSwap.Rows.Add(null, "Bob", 30); - Assert.AreEqual(typeof(string),mapTbl.DiscoverColumn("In").DataType.GetCSharpDataType(), "Expected map to be of string datatype"); + var toMem = new ToMemoryDataLoadEventListener(true); - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); + var resultDt = swapper.ProcessPipelineData(dtToSwap, toMem, new GracefulCancellationToken()); - swapper.Check(new ThrowImmediatelyCheckNotifier()); + //this is the primary thing we are testing here + Assert.Contains("Discarded 1 Null key values read from mapping table", + toMem.GetAllMessagesByProgressEventType()[ProgressEventType.Warning].Select(m => m.Message).ToArray()); - using var dtToSwap = new DataTable(); + Assert.AreEqual(2, resultDt.Rows.Count); + AreBasicallyEquals(1, resultDt.Rows[0]["Out"]); + Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - dtToSwap.Columns.Add("In"); - dtToSwap.Columns.Add("Name"); - dtToSwap.Rows.Add(1 /*int*/, "Dave"); - - var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + AreBasicallyEquals(DBNull.Value, resultDt.Rows[1]["Out"]); + Assert.AreEqual("Bob", resultDt.Rows[1]["Name"]); + } - Assert.AreEqual(1, resultDt.Rows.Count); - AreBasicallyEquals(2, resultDt.Rows[0]["Out"]); - Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - } + /// + /// Tests the systems ability to compare an integer in the input data table with a string in the database + /// + [Test] + public void TestColumnSwapper_MixedDatatypes_StringInDatabase() + { + using var dt = new DataTable(); + dt.Columns.Add("In"); + dt.Columns.Add("Out"); + + dt.Rows.Add("1" /*string*/, 2); + dt.Rows.Add("2", 3); + dt.Rows.Add("3", 4); + dt.SetDoNotReType(true); + + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - - /// - /// Tests the systems ability to compare a string input data table with an integer in the database - /// - [Test] - public void TestColumnSwapper_MixedDatatypes_IntegerInDatabase() + DiscoveredTable mapTbl; + + Import(mapTbl = db.CreateTable("Map", dt), out var map, out var mapCols); + + Assert.AreEqual(typeof(string), mapTbl.DiscoverColumn("In").DataType.GetCSharpDataType(), + "Expected map to be of string datatype"); + + var swapper = new ColumnSwapper { - using var dt = new DataTable(); - dt.Columns.Add("In"); - dt.Columns.Add("Out"); + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")) + }; - dt.Rows.Add(1 /*int*/, 2); - dt.Rows.Add(2, 3); - dt.Rows.Add(3, 4); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - - DiscoveredTable mapTbl; + using var dtToSwap = new DataTable(); - Import(mapTbl = db.CreateTable("Map", dt), out var map, out var mapCols); + dtToSwap.Columns.Add("In"); + dtToSwap.Columns.Add("Name"); + dtToSwap.Rows.Add(1 /*int*/, "Dave"); - Assert.AreEqual(typeof(int),mapTbl.DiscoverColumn("In").DataType.GetCSharpDataType(), "Expected map to be of int datatype"); + var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); - var swapper = new ColumnSwapper(); - swapper.MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")); - swapper.MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")); + Assert.AreEqual(1, resultDt.Rows.Count); + AreBasicallyEquals(2, resultDt.Rows[0]["Out"]); + Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); + } - swapper.Check(new ThrowImmediatelyCheckNotifier()); - using var dtToSwap = new DataTable(); + /// + /// Tests the systems ability to compare a string input data table with an integer in the database + /// + [Test] + public void TestColumnSwapper_MixedDatatypes_IntegerInDatabase() + { + using var dt = new DataTable(); + dt.Columns.Add("In"); + dt.Columns.Add("Out"); - dtToSwap.Columns.Add("In"); - dtToSwap.Columns.Add("Name"); - dtToSwap.Rows.Add("1" /*string*/, "Dave"); - - var resultDt = swapper.ProcessPipelineData(dtToSwap, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + dt.Rows.Add(1 /*int*/, 2); + dt.Rows.Add(2, 3); + dt.Rows.Add(3, 4); - Assert.AreEqual(1, resultDt.Rows.Count); - AreBasicallyEquals(2, resultDt.Rows[0]["Out"]); - Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); - } + var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); + + DiscoveredTable mapTbl; + + Import(mapTbl = db.CreateTable("Map", dt), out var map, out var mapCols); + + Assert.AreEqual(typeof(int), mapTbl.DiscoverColumn("In").DataType.GetCSharpDataType(), + "Expected map to be of int datatype"); - private IExtractDatasetCommand GetMockExtractDatasetCommand() + var swapper = new ColumnSwapper { - var mockPj = Mock.Of(p => - p.Name == "My Project" && - p.ProjectNumber == 1 - ); + MappingFromColumn = mapCols.Single(c => c.GetRuntimeName().Equals("In")), + MappingToColumn = mapCols.Single(c => c.GetRuntimeName().Equals("Out")) + }; - var mockConfig = Mock.Of(c => - c.Project == mockPj); + swapper.Check(ThrowImmediatelyCheckNotifier.Quiet); - var mockSelectedDatasets = Mock.Of(sds => - sds.ExtractionConfiguration == mockConfig - ); + using var dtToSwap = new DataTable(); - var mockExtractDsCmd = Mock.Of(d => - d.Project == mockPj && - d.Configuration == mockConfig && - d.SelectedDataSets == mockSelectedDatasets - ); + dtToSwap.Columns.Add("In"); + dtToSwap.Columns.Add("Name"); + dtToSwap.Rows.Add("1" /*string*/, "Dave"); - return mockExtractDsCmd; - } + var resultDt = swapper.ProcessPipelineData(dtToSwap, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); + + Assert.AreEqual(1, resultDt.Rows.Count); + AreBasicallyEquals(2, resultDt.Rows[0]["Out"]); + Assert.AreEqual("Dave", resultDt.Rows[0]["Name"]); + } + + private static IExtractDatasetCommand GetMockExtractDatasetCommand() + { + var mockPj = Substitute.For(); + mockPj.Name.Returns("My Project"); + mockPj.ProjectNumber.Returns(1); + + var mockConfig = Substitute.For(); + mockConfig.Project.Returns(mockPj); + + var mockSelectedDatasets = Substitute.For(); + mockSelectedDatasets.ExtractionConfiguration.Returns(mockConfig); + + + var mockExtractDsCmd = Substitute.For(); + mockExtractDsCmd.Project.Returns(mockPj); + mockExtractDsCmd.Configuration.Returns(mockConfig); + mockExtractDsCmd.SelectedDataSets.Returns(mockSelectedDatasets); + + return mockExtractDsCmd; } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/RemoveDuplicatesTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/RemoveDuplicatesTests.cs index b6bff51d5c..6ebbc6e522 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/RemoveDuplicatesTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/RemoveDuplicatesTests.cs @@ -8,100 +8,105 @@ using NUnit.Framework; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataLoad.Engine.Pipeline.Components; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Components +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Components; + +[Category("Unit")] +public class RemoveDuplicatesTests { - [Category("Unit")] - public class RemoveDuplicatesTests + [Test] + public void TestRemovingDuplicatesFromDataTable() { - [Test] - public void TestRemovingDuplicatesFromDataTable() - { - DataTable dt = new DataTable(); - dt.Columns.Add("Col1"); - dt.Columns.Add("Col2",typeof(int)); + var dt = new DataTable(); + dt.Columns.Add("Col1"); + dt.Columns.Add("Col2", typeof(int)); - dt.Rows.Add("Fish", 123); - dt.Rows.Add("Fish", 123); - dt.Rows.Add("Fish", 123); + dt.Rows.Add("Fish", 123); + dt.Rows.Add("Fish", 123); + dt.Rows.Add("Fish", 123); - Assert.AreEqual(3,dt.Rows.Count); - - - Assert.AreEqual(123, dt.Rows[0]["Col2"]); + Assert.AreEqual(3, dt.Rows.Count); - var receiver = new ToMemoryDataLoadEventListener(true); - var result = new RemoveDuplicates().ProcessPipelineData(dt, receiver, new GracefulCancellationToken()); + Assert.AreEqual(123, dt.Rows[0]["Col2"]); - //should have told us that it processed 3 rows - Assert.AreEqual(3,receiver.LastProgressRecieivedByTaskName["Evaluating For Duplicates"].Progress.Value); + var receiver = new ToMemoryDataLoadEventListener(true); - //and discarded 2 of them as duplicates - Assert.AreEqual(2, receiver.LastProgressRecieivedByTaskName["Discarding Duplicates"].Progress.Value); + var result = new RemoveDuplicates().ProcessPipelineData(dt, receiver, new GracefulCancellationToken()); - Assert.AreEqual(1, result.Rows.Count); - Assert.AreEqual("Fish", result.Rows[0]["Col1"]); - Assert.AreEqual(123, result.Rows[0]["Col2"]); - } + //should have told us that it processed 3 rows + Assert.AreEqual(3, receiver.LastProgressRecieivedByTaskName["Evaluating For Duplicates"].Progress.Value); - [Test] - public void TestEmptyDataTable() - { - Assert.AreEqual(0,new RemoveDuplicates().ProcessPipelineData(new DataTable(),new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()).Rows.Count); - } + //and discarded 2 of them as duplicates + Assert.AreEqual(2, receiver.LastProgressRecieivedByTaskName["Discarding Duplicates"].Progress.Value); - [Test] - public void TestMultipleBatches() - { - DataTable dt = new DataTable(); - dt.Columns.Add("Col1"); - dt.Columns.Add("Col2", typeof(int)); + Assert.AreEqual(1, result.Rows.Count); + Assert.AreEqual("Fish", result.Rows[0]["Col1"]); + Assert.AreEqual(123, result.Rows[0]["Col2"]); + } - dt.Rows.Add("Fish", 123); - dt.Rows.Add("Fish", 123); + [Test] + public void TestEmptyDataTable() + { + Assert.AreEqual(0, + new RemoveDuplicates().ProcessPipelineData(new DataTable(), ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()).Rows.Count); + } + [Test] + public void TestMultipleBatches() + { + var dt = new DataTable(); + dt.Columns.Add("Col1"); + dt.Columns.Add("Col2", typeof(int)); - DataTable dt2 = new DataTable(); - dt2.Columns.Add("Col1"); - dt2.Columns.Add("Col2", typeof(int)); + dt.Rows.Add("Fish", 123); + dt.Rows.Add("Fish", 123); - dt2.Rows.Add("Fish", 123); - dt2.Rows.Add("Haddock", 123); + var dt2 = new DataTable(); + dt2.Columns.Add("Col1"); + dt2.Columns.Add("Col2", typeof(int)); - var remover = new RemoveDuplicates(); + dt2.Rows.Add("Fish", 123); + dt2.Rows.Add("Haddock", 123); - //send it the batch with the duplication it will return 1 row - Assert.AreEqual(1,remover.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()).Rows.Count); - //now send it the second batch which contains 2 records, one duplication against first batch and one new one, expect only 1 row to come back - Assert.AreEqual(1, remover.ProcessPipelineData(dt2, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()).Rows.Count); - } + var remover = new RemoveDuplicates(); - [Test] - public void TestNulls() - { - DataTable dt = new DataTable(); - dt.Columns.Add("Col1"); - dt.Columns.Add("Col2", typeof(int)); + //send it the batch with the duplication it will return 1 row + Assert.AreEqual(1, + remover.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()).Rows.Count); - dt.Rows.Add("Fish", 123); - dt.Rows.Add("Fish", null); - dt.Rows.Add(null, 123); - dt.Rows.Add("Pizza", null); - dt.Rows.Add(null, null); - dt.Rows.Add(null, null); + //now send it the second batch which contains 2 records, one duplication against first batch and one new one, expect only 1 row to come back + Assert.AreEqual(1, + remover.ProcessPipelineData(dt2, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()).Rows.Count); + } - var remover = new RemoveDuplicates(); + [Test] + public void TestNulls() + { + var dt = new DataTable(); + dt.Columns.Add("Col1"); + dt.Columns.Add("Col2", typeof(int)); - Assert.AreEqual(6,dt.Rows.Count); + dt.Rows.Add("Fish", 123); + dt.Rows.Add("Fish", null); + dt.Rows.Add(null, 123); + dt.Rows.Add("Pizza", null); + dt.Rows.Add(null, null); + dt.Rows.Add(null, null); - //send it the batch with the duplication it will return 5 rows (the only duplicate is the double null) - Assert.AreEqual(5, remover.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()).Rows.Count); + var remover = new RemoveDuplicates(); + Assert.AreEqual(6, dt.Rows.Count); - } + //send it the batch with the duplication it will return 5 rows (the only duplicate is the double null) + Assert.AreEqual(5, + remover.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()).Rows.Count); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/TransposerTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/TransposerTests.cs index b65a6d1bc0..c310515340 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/TransposerTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Components/TransposerTests.cs @@ -11,86 +11,90 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.DataFlowOperations; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Components +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Components; + +[TestFixture] +[Category("Unit")] +public class TransposerTests { - [TestFixture] - [Category("Unit")] - public class TransposerTests - { - DataTable dt = new DataTable(); + private DataTable dt = new(); - [OneTimeSetUp] - public virtual void OneTimeSetUp() - { - dt.Columns.Add("recipe"); - dt.Columns.Add("Fishcakes"); - dt.Columns.Add("Chips"); - dt.Columns.Add("Gateau"); - dt.Rows.Add("protein", "20", "30", "40"); - dt.Rows.Add("fat", "11", "2", "33"); - dt.Rows.Add("carbohydrate", "55", "0", "5"); - } - - [Test] - public void TransposerTest_ThrowOnDualBatches() - { - var transposer = new Transposer(); - transposer.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - var ex = Assert.Throws(()=>transposer.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); - Assert.AreEqual("Error, we received multiple batches, Transposer only works when all the data arrives in a single DataTable",ex.Message); - } + [OneTimeSetUp] + public virtual void OneTimeSetUp() + { + dt.Columns.Add("recipe"); + dt.Columns.Add("Fishcakes"); + dt.Columns.Add("Chips"); + dt.Columns.Add("Gateau"); + dt.Rows.Add("protein", "20", "30", "40"); + dt.Rows.Add("fat", "11", "2", "33"); + dt.Rows.Add("carbohydrate", "55", "0", "5"); + } - [Test] - public void TransposerTest_ThrowOnEmptyDataTable() - { - var transposer = new Transposer(); - var ex = Assert.Throws(()=>transposer.ProcessPipelineData(new DataTable(), new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); - Assert.AreEqual("DataTable toProcess had 0 rows and 0 columns, thus it cannot be transposed", ex.Message); - } + [Test] + public void TransposerTest_ThrowOnDualBatches() + { + var transposer = new Transposer(); + transposer.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + var ex = Assert.Throws(() => + transposer.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken())); + Assert.AreEqual( + "Error, we received multiple batches, Transposer only works when all the data arrives in a single DataTable", + ex.Message); + } + [Test] + public void TransposerTest_ThrowOnEmptyDataTable() + { + var transposer = new Transposer(); + var ex = Assert.Throws(() => + transposer.ProcessPipelineData(new DataTable(), new ThrowImmediatelyDataLoadJob(), + new GracefulCancellationToken())); + Assert.AreEqual("DataTable toProcess had 0 rows and 0 columns, thus it cannot be transposed", ex.Message); + } - [Test] - public void TransposerTest_TableTransposed() - { - var transposer = new Transposer(); - var actual = transposer.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - DataTable expectedResult = new DataTable(); + [Test] + public void TransposerTest_TableTransposed() + { + var transposer = new Transposer(); + var actual = + transposer.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + + var expectedResult = new DataTable(); - expectedResult.Columns.Add("recipe"); - expectedResult.Columns.Add("protein"); - expectedResult.Columns.Add("fat"); - expectedResult.Columns.Add("carbohydrate"); + expectedResult.Columns.Add("recipe"); + expectedResult.Columns.Add("protein"); + expectedResult.Columns.Add("fat"); + expectedResult.Columns.Add("carbohydrate"); - expectedResult.Rows.Add("Fishcakes", "20", "11", "55"); - expectedResult.Rows.Add("Chips", "30", "2", "0"); - expectedResult.Rows.Add("Gateau", "40", "33", "5"); + expectedResult.Rows.Add("Fishcakes", "20", "11", "55"); + expectedResult.Rows.Add("Chips", "30", "2", "0"); + expectedResult.Rows.Add("Gateau", "40", "33", "5"); - for (int i = 0; i < actual.Columns.Count; i++) - Assert.AreEqual(expectedResult.Columns[i].ColumnName, actual.Columns[i].ColumnName); + for (var i = 0; i < actual.Columns.Count; i++) + Assert.AreEqual(expectedResult.Columns[i].ColumnName, actual.Columns[i].ColumnName); - for (int i = 0; i < expectedResult.Rows.Count; i++) - for (int j = 0; j < actual.Columns.Count; j++) - Assert.AreEqual(expectedResult.Rows[i][j], actual.Rows[i][j]); + for (var i = 0; i < expectedResult.Rows.Count; i++) + for (var j = 0; j < actual.Columns.Count; j++) + Assert.AreEqual(expectedResult.Rows[i][j], actual.Rows[i][j]); + } - } + [Test] + public void TestTransposerDodgyHeaders() + { + var dr = dt.Rows.Add("32 GramMax", "55", "0", "5"); - [Test] - public void TestTransposerDodgyHeaders() + var transposer = new Transposer { + MakeHeaderNamesSane = true + }; + var actual = + transposer.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - var dr = dt.Rows.Add("32 GramMax", "55", "0", "5"); - - var transposer = new Transposer(); - transposer.MakeHeaderNamesSane = true; - var actual = transposer.ProcessPipelineData(dt, new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - - Assert.IsTrue(actual.Columns.Contains("_32GramMax")); + Assert.IsTrue(actual.Columns.Contains("_32GramMax")); - dt.Rows.Remove(dr); - - } + dt.Rows.Remove(dr); } - -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/PipelineArgumentTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/PipelineArgumentTests.cs index 6539a48cfc..d55c7d4eb0 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/PipelineArgumentTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/PipelineArgumentTests.cs @@ -9,48 +9,44 @@ using Rdmp.Core.Curation.Data.Pipelines; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests; + +public class PipelineArgumentTests : DatabaseTests { - public class PipelineArgumentTests:DatabaseTests + [Test] + [TestCase(typeof(int?), null)] + [TestCase(typeof(float?), null)] + [TestCase(typeof(double?), null)] + [TestCase(typeof(char?), null)] + [TestCase(typeof(DateTime?), null)] + [TestCase(typeof(int?), 3)] + [TestCase(typeof(float?), 10.01f)] + [TestCase(typeof(double?), 10.999)] + [TestCase(typeof(char?), 'K')] + [TestCase(typeof(DateTime?), "now")] //sadly can't pass DateTime.Now + public void TestIArgumentsForNullableTypes(Type nullableType, object value) { - [Test] - [TestCase(typeof(int?),null)] - [TestCase(typeof(float?), null)] - [TestCase(typeof(double?), null)] - [TestCase(typeof(char?), null)] - [TestCase(typeof(DateTime?), null)] - - - [TestCase(typeof(int?), 3)] - [TestCase(typeof(float?), 10.01f)] - [TestCase(typeof(double?), 10.999)] - [TestCase(typeof(char?), 'K')] - [TestCase(typeof(DateTime?), "now")] //sadly can't pass DateTime.Now + if (string.Equals(value as string, "now")) //sadly can't pass DateTime.Now + value = new DateTime(2001, 01, 01, 3, 20, + 11); //hey btw when you put in milliseconds into DateTime IArgument it drops them... due to DateTime.Parse? or DateTime.ToString()? - public void TestIArgumentsForNullableTypes(Type nullableType,object value) + var p = new Pipeline(CatalogueRepository); + var pc = new PipelineComponent(CatalogueRepository, p, + GetType() //Normally this would be the PipelineComponent hosted class which would be a proper class declared as a MEF export with DemandsInitialization etc but we don't need all that + , 0 + , "My imaginary Pipe Component"); + var arg = new PipelineComponentArgument(CatalogueRepository, pc); + try { - if (String.Equals(value as String, "now")) //sadly can't pass DateTime.Now - value = new DateTime(2001, 01, 01, 3, 20, 11); //hey btw when you put in milliseconds into DateTime IArgument it drops them... due to DateTime.Parse? or DateTime.ToString()? - - var p = new Pipeline(CatalogueRepository); - var pc = new PipelineComponent(CatalogueRepository,p, - GetType() //Normally this would be the PipelineComponent hosted class which would be a proper class declared as a MEF export with DemandsInitialization etc but we don't need all that - ,0 - ,"My imaginary Pipe Component"); - var arg = new PipelineComponentArgument(CatalogueRepository,pc); - try - { - arg.SetType(nullableType); - arg.SetValue(value); + arg.SetType(nullableType); + arg.SetValue(value); - Assert.AreEqual(nullableType,arg.GetSystemType()); - Assert.AreEqual(value,arg.GetValueAsSystemType()); - - } - finally - { - p.DeleteInDatabase(); - } + Assert.AreEqual(nullableType, arg.GetSystemType()); + Assert.AreEqual(value, arg.GetValueAsSystemType()); + } + finally + { + p.DeleteInDatabase(); } } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/PipelineReadPerformanceTest.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/PipelineReadPerformanceTest.cs index 25a6949e0e..5a1e3c75a1 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/PipelineReadPerformanceTest.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/PipelineReadPerformanceTest.cs @@ -5,49 +5,44 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using System.Data.Common; using NUnit.Framework; using Tests.Common; using Tests.Common.Scenarios; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests; + +public class PipelineReadPerformanceTest : DatabaseTests { - public class PipelineReadPerformanceTest:DatabaseTests + private BulkTestsData _bulkTestData; + + [OneTimeSetUp] + protected override void OneTimeSetUp() + { + base.OneTimeSetUp(); + + _bulkTestData = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); + _bulkTestData.SetupTestData(); + } + + [Test] + public void BulkTestDataContainsExpectedNumberOfRows() { - private BulkTestsData _bulkTestData; - - [OneTimeSetUp] - protected override void OneTimeSetUp() - { - base.OneTimeSetUp(); - - _bulkTestData = new BulkTestsData(CatalogueRepository, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer)); - _bulkTestData.SetupTestData(); - - } - - [Test] - public void BulkTestDataContainsExpectedNumberOfRows() - { - var server = _bulkTestData.BulkDataDatabase.Server; - - using (DbConnection con = server.GetConnection()) - { - con.Open(); - DbCommand cmd = server.GetCommand("Select count(*) from " + BulkTestsData.BulkDataTable, con); - int manualCount = Convert.ToInt32(cmd.ExecuteScalar()); - - //manual count matches expected - Assert.AreEqual(_bulkTestData.ExpectedNumberOfRowsInTestData,manualCount); - - //now get the fast approximate rowcount - int fastRowcount = _bulkTestData.BulkDataDatabase - .ExpectTable(BulkTestsData.BulkDataTable) - .GetRowCount(); - - //it should also match - Assert.AreEqual(_bulkTestData.ExpectedNumberOfRowsInTestData,fastRowcount); - } - } + var server = _bulkTestData.BulkDataDatabase.Server; + + using var con = server.GetConnection(); + con.Open(); + var cmd = server.GetCommand($"Select count(*) from {BulkTestsData.BulkDataTable}", con); + var manualCount = Convert.ToInt32(cmd.ExecuteScalar()); + + //manual count matches expected + Assert.AreEqual(_bulkTestData.ExpectedNumberOfRowsInTestData, manualCount); + + //now get the fast approximate rowcount + var fastRowcount = _bulkTestData.BulkDataDatabase + .ExpectTable(BulkTestsData.BulkDataTable) + .GetRowCount(); + + //it should also match + Assert.AreEqual(_bulkTestData.ExpectedNumberOfRowsInTestData, fastRowcount); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests.cs index 6eec5f50ec..28a10e8485 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests.cs @@ -9,7 +9,6 @@ using System.IO; using System.Linq; using System.Text; -using CsvHelper; using FAnsi.Discovery; using NUnit.Framework; using Rdmp.Core.DataFlowPipeline; @@ -17,698 +16,722 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.DataFlowSources; using Rdmp.Core.DataLoad.Modules.Exceptions; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources -{ - [Category("Unit")] - public class DelimitedFileSourceTests - { - private string filename; +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources; - [SetUp] - public void SetUp() - { - filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "DelimitedFileSourceTests.txt"); +[Category("Unit")] +public class DelimitedFileSourceTests +{ + private string filename; - if(File.Exists(filename)) - File.Delete(filename); + [SetUp] + public void SetUp() + { + filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "DelimitedFileSourceTests.txt"); - StringBuilder sb = new StringBuilder(); + if (File.Exists(filename)) + File.Delete(filename); - sb.AppendLine("CHI,StudyID,Date"); - sb.AppendLine("0101010101,5,2001-01-05"); + var sb = new StringBuilder(); - File.WriteAllText(filename, sb.ToString()); - } + sb.AppendLine("CHI,StudyID,Date"); + sb.AppendLine("0101010101,5,2001-01-05"); - [Test] - public void FileToLoadNotSet_Throws() - { - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - var ex = Assert.Throws(()=>source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - StringAssert.Contains("_fileToLoad was not set",ex.Message); - } - [Test] - public void SeparatorNotSet_Throws() - { - var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile),new ThrowImmediatelyDataLoadEventListener() ); - var ex = Assert.Throws(()=>source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - StringAssert.Contains("Separator has not been set", ex.Message); - } - [Test] - public void LoadCSVWithCorrectDatatypes_ForceHeadersWhitespace() - { - var testFile = new FileInfo(filename); + File.WriteAllText(filename, sb.ToString()); + } - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.ForceHeaders = "chi ,Study ID\t ,Date"; - source.ForceHeadersReplacesFirstLineInFile = true; - source.StronglyTypeInput = true;//makes the source interpret the file types properly + [Test] + public void FileToLoadNotSet_Throws() + { + var source = new DelimitedFlatFileDataFlowSource(); + var ex = Assert.Throws(() => + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + StringAssert.Contains("_fileToLoad was not set", ex.Message); + } - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + [Test] + public void SeparatorNotSet_Throws() + { + var testFile = new FileInfo(filename); + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + var ex = Assert.Throws(() => + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + StringAssert.Contains("Separator has not been set", ex.Message); + } - Console.WriteLine("Resulting columns were:" + string.Join("," , chunk.Columns.Cast().Select(c=>c.ColumnName))); + [Test] + public void LoadCSVWithCorrectDatatypes_ForceHeadersWhitespace() + { + var testFile = new FileInfo(filename); - Assert.IsTrue(chunk.Columns.Contains("chi")); //notice the lack of whitespace! - Assert.IsTrue(chunk.Columns.Contains("study ID")); //whitespace is allowed in the middle though... because we like a challenge! + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.ForceHeaders = "chi ,Study ID\t ,Date"; + source.ForceHeadersReplacesFirstLineInFile = true; + source.StronglyTypeInput = true; //makes the source interpret the file types properly - Assert.AreEqual(3,chunk.Columns.Count); - Assert.AreEqual(1, chunk.Rows.Count); - Assert.AreEqual("0101010101", chunk.Rows[0][0]); - Assert.AreEqual(5, chunk.Rows[0][1]); - Assert.AreEqual(new DateTime(2001 , 1 , 5), chunk.Rows[0][2]);//notice the strong typing (we are not looking for strings here) - - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); - [Test] - public void LoadCSVWithCorrectDatatypes_DatatypesAreCorrect() - { + Console.WriteLine( + $"Resulting columns were:{string.Join(",", chunk.Columns.Cast().Select(c => c.ColumnName))}"); - var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.StronglyTypeInput = true;//makes the source interpret the file types properly + Assert.IsTrue(chunk.Columns.Contains("chi")); //notice the lack of whitespace! + Assert.IsTrue( + chunk.Columns + .Contains("study ID")); //whitespace is allowed in the middle though... because we like a challenge! - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); + Assert.AreEqual(3, chunk.Columns.Count); + Assert.AreEqual(1, chunk.Rows.Count); + Assert.AreEqual("0101010101", chunk.Rows[0][0]); + Assert.AreEqual(5, chunk.Rows[0][1]); + Assert.AreEqual(new DateTime(2001, 1, 5), + chunk.Rows[0][2]); //notice the strong typing (we are not looking for strings here) - Assert.AreEqual(3, chunk.Columns.Count); - Assert.AreEqual(1, chunk.Rows.Count); - Assert.AreEqual("0101010101", chunk.Rows[0][0]); - Assert.AreEqual(5, chunk.Rows[0][1]); - Assert.AreEqual(new DateTime(2001, 1, 5), chunk.Rows[0][2]);//notice the strong typing (we are not looking for strings here) + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } - [Test] - public void OverrideDatatypes_ForcedFreakyTypesCorrect() - { + [Test] + public void LoadCSVWithCorrectDatatypes_DatatypesAreCorrect() + { + var testFile = new FileInfo(filename); + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.StronglyTypeInput = true; //makes the source interpret the file types properly + + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + + Assert.AreEqual(3, chunk.Columns.Count); + Assert.AreEqual(1, chunk.Rows.Count); + Assert.AreEqual("0101010101", chunk.Rows[0][0]); + Assert.AreEqual(5, chunk.Rows[0][1]); + Assert.AreEqual(new DateTime(2001, 1, 5), + chunk.Rows[0][2]); //notice the strong typing (we are not looking for strings here) + + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } - var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.StronglyTypeInput = true;//makes the source interpret the file types properly - - source.ExplicitlyTypedColumns = new ExplicitTypingCollection(); - source.ExplicitlyTypedColumns.ExplicitTypesCSharp.Add("StudyID",typeof(string)); - - //preview should be correct - DataTable preview = source.TryGetPreview(); - Assert.AreEqual(typeof(string), preview.Columns["StudyID"].DataType); - Assert.AreEqual("5", preview.Rows[0]["StudyID"]); - - //as should live run - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(typeof(string), chunk.Columns["StudyID"].DataType); - Assert.AreEqual("5", chunk.Rows[0]["StudyID"]); - - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } + [Test] + public void OverrideDatatypes_ForcedFreakyTypesCorrect() + { + var testFile = new FileInfo(filename); + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.StronglyTypeInput = true; //makes the source interpret the file types properly + + source.ExplicitlyTypedColumns = new ExplicitTypingCollection(); + source.ExplicitlyTypedColumns.ExplicitTypesCSharp.Add("StudyID", typeof(string)); + + //preview should be correct + var preview = source.TryGetPreview(); + Assert.AreEqual(typeof(string), preview.Columns["StudyID"].DataType); + Assert.AreEqual("5", preview.Rows[0]["StudyID"]); + + //as should live run + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.AreEqual(typeof(string), chunk.Columns["StudyID"].DataType); + Assert.AreEqual("5", chunk.Rows[0]["StudyID"]); + + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } - [Test] - public void TestIgnoreQuotes() - { - if (File.Exists(filename)) - File.Delete(filename); + [Test] + public void TestIgnoreQuotes() + { + if (File.Exists(filename)) + File.Delete(filename); - StringBuilder sb = new StringBuilder(); + var sb = new StringBuilder(); - sb.AppendLine("Number,Field"); - sb.AppendLine("1,\"Sick\" headaches"); - sb.AppendLine("2,2\" length of wood"); - sb.AppendLine("3,\"\"The bends\"\""); + sb.AppendLine("Number,Field"); + sb.AppendLine("1,\"Sick\" headaches"); + sb.AppendLine("2,2\" length of wood"); + sb.AppendLine("3,\"\"The bends\"\""); - File.WriteAllText(filename, sb.ToString()); + File.WriteAllText(filename, sb.ToString()); - var testFile = new FileInfo(filename); + var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.IgnoreQuotes = true; - source.MaxBatchSize = 10000; - source.StronglyTypeInput = true;//makes the source interpret the file types properly - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(3, dt.Rows.Count); - Assert.AreEqual("\"Sick\" headaches", dt.Rows[0][1]); - Assert.AreEqual("2\" length of wood", dt.Rows[1][1]); - Assert.AreEqual("\"\"The bends\"\"", dt.Rows[2][1]); + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.IgnoreQuotes = true; + source.MaxBatchSize = 10000; + source.StronglyTypeInput = true; //makes the source interpret the file types properly + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.AreEqual(3, dt.Rows.Count); + Assert.AreEqual("\"Sick\" headaches", dt.Rows[0][1]); + Assert.AreEqual("2\" length of wood", dt.Rows[1][1]); + Assert.AreEqual("\"\"The bends\"\"", dt.Rows[2][1]); - source.Dispose(new ThrowImmediatelyDataLoadEventListener(),null); - } + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } - [TestCase(BadDataHandlingStrategy.DivertRows)] - [TestCase(BadDataHandlingStrategy.IgnoreRows)] - [TestCase(BadDataHandlingStrategy.ThrowException)] - public void BadDataTestExtraColumns(BadDataHandlingStrategy strategy) - { - if (File.Exists(filename)) - File.Delete(filename); + [TestCase(BadDataHandlingStrategy.DivertRows)] + [TestCase(BadDataHandlingStrategy.IgnoreRows)] + [TestCase(BadDataHandlingStrategy.ThrowException)] + public void BadDataTestExtraColumns(BadDataHandlingStrategy strategy) + { + if (File.Exists(filename)) + File.Delete(filename); - StringBuilder sb = new StringBuilder(); - sb.AppendLine("CHI,StudyID,Date"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05,fish,watafak"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); + var sb = new StringBuilder(); + sb.AppendLine("CHI,StudyID,Date"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05,fish,watafak"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); - File.WriteAllText(filename, sb.ToString()); + File.WriteAllText(filename, sb.ToString()); - var testFile = new FileInfo(filename); + var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; - source.MaxBatchSize = 10000; + source.MaxBatchSize = 10000; - source.StronglyTypeInput = true;//makes the source interpret the file types properly - source.BadDataHandlingStrategy = strategy; - try - { - switch (strategy) - { - case BadDataHandlingStrategy.ThrowException: - var ex = Assert.Throws(() => source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - StringAssert.Contains("line 4",ex.Message); - break; - case BadDataHandlingStrategy.IgnoreRows: - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - Assert.IsNotNull(dt); - - Assert.AreEqual(4,dt.Rows.Count); - break; - case BadDataHandlingStrategy.DivertRows: - var dt2 = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(4, dt2.Rows.Count); - - Assert.IsNotNull(source.EventHandlers.DivertErrorsFile); - - Assert.AreEqual("0101010101,5,2001-01-05,fish,watafak"+Environment.NewLine, File.ReadAllText(source.EventHandlers.DivertErrorsFile.FullName)); - - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } - } - finally + source.StronglyTypeInput = true; //makes the source interpret the file types properly + source.BadDataHandlingStrategy = strategy; + try + { + switch (strategy) { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); + case BadDataHandlingStrategy.ThrowException: + var ex = Assert.Throws(() => + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + StringAssert.Contains("line 4", ex.Message); + break; + case BadDataHandlingStrategy.IgnoreRows: + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); + Assert.IsNotNull(dt); + + Assert.AreEqual(4, dt.Rows.Count); + break; + case BadDataHandlingStrategy.DivertRows: + var dt2 = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); + Assert.AreEqual(4, dt2.Rows.Count); + + Assert.IsNotNull(source.EventHandlers.DivertErrorsFile); + + Assert.AreEqual($"0101010101,5,2001-01-05,fish,watafak{Environment.NewLine}", + File.ReadAllText(source.EventHandlers.DivertErrorsFile.FullName)); + + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); } - } - - [Test] - public void DelimitedFlatFileDataFlowSource_ProperQuoteEscaping() + finally { - if (File.Exists(filename)) - File.Delete(filename); + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + } - StringBuilder sb = new StringBuilder(); - sb.AppendLine("CHI,Name,SomeInterestingFacts,Date"); - sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); - sb.AppendLine("0101010101,Dave,\"Dave is \"\"over\"\" 1000 years old\",2001-01-05"); //https://tools.ietf.org/html/rfc4180 (to properly include quotes in escaped text you need to use "") + [Test] + public void DelimitedFlatFileDataFlowSource_ProperQuoteEscaping() + { + if (File.Exists(filename)) + File.Delete(filename); - File.WriteAllText(filename, sb.ToString()); + var sb = new StringBuilder(); + sb.AppendLine("CHI,Name,SomeInterestingFacts,Date"); + sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); + sb.AppendLine( + "0101010101,Dave,\"Dave is \"\"over\"\" 1000 years old\",2001-01-05"); //https://tools.ietf.org/html/rfc4180 (to properly include quotes in escaped text you need to use "") - var testFile = new FileInfo(filename); + File.WriteAllText(filename, sb.ToString()); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.MaxBatchSize = 10000; + var testFile = new FileInfo(filename); - source.StronglyTypeInput = true; //makes the source interpret the file types properly - source.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; - source.IgnoreBadReads = false; + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.MaxBatchSize = 10000; - try - { - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(2, chunk.Rows.Count); - Assert.AreEqual("Dave is \"over\" 1000 years old", chunk.Rows[1][2]); - } - finally - { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } - } + source.StronglyTypeInput = true; //makes the source interpret the file types properly + source.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; + source.IgnoreBadReads = false; - /// - /// Test checks that IgnoreBadReads lets you load quotes in the middle of free text without having to set IgnoreQuotes to true: - /// 1. There is a row (2) with quotes in the middle which should get loaded correctly - /// 2. there's a row (4) with quotes in the middle of the text and the cell itself is quoted. This loads but drops some quotes. - /// - /// The proper way to express row 4 is by escaping the quote with another quote i.e. "" (See test DelimitedFlatFileDataFlowSource_ProperQuoteEscaping) - /// - [Test] - public void DelimitedFlatFileDataFlowSource_LoadDataWithQuotesInMiddle_IgnoreBadReads() + try { - if (File.Exists(filename)) - File.Delete(filename); - - StringBuilder sb = new StringBuilder(); - sb.AppendLine("CHI,Name,SomeInterestingFacts,Date"); - sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); - sb.AppendLine("0101010101,Dave,Dave is \"over\" 1000 years old,2001-01-05"); - sb.AppendLine($"0101010101,Dave,\"Dave is {Environment.NewLine}over 1000 years old\",2001-01-05"); - sb.AppendLine("0101010101,Dave,\"Dave is \"over\" 1000 years old\",2001-01-05"); - sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.AreEqual(2, chunk.Rows.Count); + Assert.AreEqual("Dave is \"over\" 1000 years old", chunk.Rows[1][2]); + } + finally + { + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + } - File.WriteAllText(filename, sb.ToString()); + /// + /// Test checks that IgnoreBadReads lets you load quotes in the middle of free text without having to set IgnoreQuotes to true: + /// 1. There is a row (2) with quotes in the middle which should get loaded correctly + /// 2. there's a row (4) with quotes in the middle of the text and the cell itself is quoted. This loads but drops some quotes. + /// + /// The proper way to express row 4 is by escaping the quote with another quote i.e. "" (See test DelimitedFlatFileDataFlowSource_ProperQuoteEscaping) + /// + [Test] + public void DelimitedFlatFileDataFlowSource_LoadDataWithQuotesInMiddle_IgnoreBadReads() + { + if (File.Exists(filename)) + File.Delete(filename); - var testFile = new FileInfo(filename); + var sb = new StringBuilder(); + sb.AppendLine("CHI,Name,SomeInterestingFacts,Date"); + sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); + sb.AppendLine("0101010101,Dave,Dave is \"over\" 1000 years old,2001-01-05"); + sb.AppendLine($"0101010101,Dave,\"Dave is {Environment.NewLine}over 1000 years old\",2001-01-05"); + sb.AppendLine("0101010101,Dave,\"Dave is \"over\" 1000 years old\",2001-01-05"); + sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.MaxBatchSize = 10000; + File.WriteAllText(filename, sb.ToString()); - source.StronglyTypeInput = true; //makes the source interpret the file types properly - source.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; - source.IgnoreBadReads = true; + var testFile = new FileInfo(filename); - try - { - var chunk = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(5,chunk.Rows.Count); - Assert.AreEqual("Dave is \"over\" 1000 years old", chunk.Rows[1][2]); - Assert.AreEqual($"Dave is {Environment.NewLine}over 1000 years old", chunk.Rows[2][2]); - Assert.AreEqual("Dave is over\" 1000 years old\"", chunk.Rows[3][2]); //notice this line drops some of the quotes, we just have to live with that - } - finally - { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } - } + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.MaxBatchSize = 10000; + source.StronglyTypeInput = true; //makes the source interpret the file types properly + source.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; + source.IgnoreBadReads = true; - /// - /// Test checks that IgnoreBadReads doesn't cause serious errors (too many cells in row) to be ignored/swallowed - /// - [Test] - public void DelimitedFlatFileDataFlowSource_TrashFile_IgnoreBadReads() + try + { + var chunk = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.AreEqual(5, chunk.Rows.Count); + Assert.AreEqual("Dave is \"over\" 1000 years old", chunk.Rows[1][2]); + Assert.AreEqual($"Dave is {Environment.NewLine}over 1000 years old", chunk.Rows[2][2]); + Assert.AreEqual("Dave is over\" 1000 years old\"", + chunk.Rows[3][2]); //notice this line drops some of the quotes, we just have to live with that + } + finally { - if (File.Exists(filename)) - File.Delete(filename); + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + } - StringBuilder sb = new StringBuilder(); - sb.AppendLine("CHI,Name,SomeInterestingFacts,Date"); - sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); - sb.AppendLine("0101010101,Dave,Da,,ve is \"over\" 1000 years old,2001-01-05"); - sb.AppendLine("0101010101\"Dave is \"over\" 1000 years old\",2001-01-05"); - sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); - File.WriteAllText(filename, sb.ToString()); + /// + /// Test checks that IgnoreBadReads doesn't cause serious errors (too many cells in row) to be ignored/swallowed + /// + [Test] + public void DelimitedFlatFileDataFlowSource_TrashFile_IgnoreBadReads() + { + if (File.Exists(filename)) + File.Delete(filename); - var testFile = new FileInfo(filename); + var sb = new StringBuilder(); + sb.AppendLine("CHI,Name,SomeInterestingFacts,Date"); + sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); + sb.AppendLine("0101010101,Dave,Da,,ve is \"over\" 1000 years old,2001-01-05"); + sb.AppendLine("0101010101\"Dave is \"over\" 1000 years old\",2001-01-05"); + sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.MaxBatchSize = 10000; + File.WriteAllText(filename, sb.ToString()); - source.StronglyTypeInput = true; //makes the source interpret the file types properly - source.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; - source.IgnoreBadReads = true; + var testFile = new FileInfo(filename); - try - { - var ex = Assert.Throws(()=>source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - Assert.AreEqual("Bad data found on line 3", ex.Message); + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.MaxBatchSize = 10000; - } - finally - { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } - } + source.StronglyTypeInput = true; //makes the source interpret the file types properly + source.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; + source.IgnoreBadReads = true; - [Test] - public void DelimitedFlatFileDataFlowSource_LoadDataWithQuotesInMiddle_WithMultiLineRecords() + try { - if (File.Exists(filename)) - File.Delete(filename); - - StringBuilder sb = new StringBuilder(); - sb.AppendLine("CHI,Name,SomeInterestingFacts,Date"); - sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); - sb.AppendLine("0101010101,Dave,Dave is \"over\" 1000 years old,2001-01-05"); - sb.AppendLine(@"0101010101,Dave,""Dave is + var ex = Assert.Throws(() => + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + Assert.AreEqual("Bad data found on line 3", ex.Message); + } + finally + { + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + } + + [Test] + public void DelimitedFlatFileDataFlowSource_LoadDataWithQuotesInMiddle_WithMultiLineRecords() + { + if (File.Exists(filename)) + File.Delete(filename); + + var sb = new StringBuilder(); + sb.AppendLine("CHI,Name,SomeInterestingFacts,Date"); + sb.AppendLine("0101010101,Dave,Dave is over 1000 years old,2001-01-05"); + sb.AppendLine("0101010101,Dave,Dave is \"over\" 1000 years old,2001-01-05"); + sb.AppendLine(@"0101010101,Dave,""Dave is ""over"" 1000 years old"",2001-01-05"); - File.WriteAllText(filename, sb.ToString()); + File.WriteAllText(filename, sb.ToString()); - var testFile = new FileInfo(filename); + var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.MaxBatchSize = 10000; - source.AttemptToResolveNewLinesInRecords = true; + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.MaxBatchSize = 10000; + source.AttemptToResolveNewLinesInRecords = true; - source.StronglyTypeInput = true; //makes the source interpret the file types properly - source.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; - try - { - var ex = Assert.Throws(() => source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - Assert.AreEqual("Bad data found on line 3", ex.Message); - } - finally - { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } + source.StronglyTypeInput = true; //makes the source interpret the file types properly + source.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; + try + { + var ex = Assert.Throws(() => + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + Assert.AreEqual("Bad data found on line 3", ex.Message); } + finally + { + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + } - [TestCase(BadDataHandlingStrategy.DivertRows)] - [TestCase(BadDataHandlingStrategy.IgnoreRows)] - [TestCase(BadDataHandlingStrategy.ThrowException)] - public void BadDataTestExtraColumns_ErrorIsOnLastLine(BadDataHandlingStrategy strategy) - { - if (File.Exists(filename)) - File.Delete(filename); + [TestCase(BadDataHandlingStrategy.DivertRows)] + [TestCase(BadDataHandlingStrategy.IgnoreRows)] + [TestCase(BadDataHandlingStrategy.ThrowException)] + public void BadDataTestExtraColumns_ErrorIsOnLastLine(BadDataHandlingStrategy strategy) + { + if (File.Exists(filename)) + File.Delete(filename); - StringBuilder sb = new StringBuilder(); + var sb = new StringBuilder(); - sb.AppendLine("CHI,StudyID,Date"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05,fish,watafak"); + sb.AppendLine("CHI,StudyID,Date"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05,fish,watafak"); - File.WriteAllText(filename, sb.ToString()); + File.WriteAllText(filename, sb.ToString()); - var testFile = new FileInfo(filename); + var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; - source.MaxBatchSize = 10000; + source.MaxBatchSize = 10000; - source.StronglyTypeInput = true;//makes the source interpret the file types properly - source.BadDataHandlingStrategy = strategy; - try + source.StronglyTypeInput = true; //makes the source interpret the file types properly + source.BadDataHandlingStrategy = strategy; + try + { + switch (strategy) { - switch (strategy) - { - case BadDataHandlingStrategy.ThrowException: - var ex = Assert.Throws(() => source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - StringAssert.Contains("line 6", ex.Message); - break; - case BadDataHandlingStrategy.IgnoreRows: - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.IsNotNull(dt); - - Assert.AreEqual(4, dt.Rows.Count); - break; - case BadDataHandlingStrategy.DivertRows: - var dt2 = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(4, dt2.Rows.Count); - - Assert.IsNotNull(source.EventHandlers.DivertErrorsFile); - - Assert.AreEqual("0101010101,5,2001-01-05,fish,watafak" + Environment.NewLine, File.ReadAllText(source.EventHandlers.DivertErrorsFile.FullName)); - - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } + case BadDataHandlingStrategy.ThrowException: + var ex = Assert.Throws(() => + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + StringAssert.Contains("line 6", ex.Message); + break; + case BadDataHandlingStrategy.IgnoreRows: + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); + Assert.IsNotNull(dt); + + Assert.AreEqual(4, dt.Rows.Count); + break; + case BadDataHandlingStrategy.DivertRows: + var dt2 = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); + Assert.AreEqual(4, dt2.Rows.Count); + + Assert.IsNotNull(source.EventHandlers.DivertErrorsFile); + + Assert.AreEqual($"0101010101,5,2001-01-05,fish,watafak{Environment.NewLine}", + File.ReadAllText(source.EventHandlers.DivertErrorsFile.FullName)); + + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); } - finally - { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } } - - [Test] - public void NewLinesInConstantString_EscapedCorrectly() + finally { - if (File.Exists(filename)) - File.Delete(filename); + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + } - StringBuilder sb = new StringBuilder(); - sb.AppendLine("CHI,StudyID,Date"); - sb.AppendLine(@"0101010101,""5 + [Test] + public void NewLinesInConstantString_EscapedCorrectly() + { + if (File.Exists(filename)) + File.Delete(filename); + + var sb = new StringBuilder(); + sb.AppendLine("CHI,StudyID,Date"); + sb.AppendLine(@"0101010101,""5 The first"",2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); - File.WriteAllText(filename, sb.ToString()); + File.WriteAllText(filename, sb.ToString()); - var testFile = new FileInfo(filename); + var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; - source.MaxBatchSize = 10000; - source.StronglyTypeInput = true;//makes the source interpret the file types properly - - try - { - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.IsNotNull(dt); - Assert.AreEqual(5, dt.Rows.Count); - Assert.AreEqual(@"5 - The first",dt.Rows[0][1]); - - } - finally - { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } - } + source.MaxBatchSize = 10000; + source.StronglyTypeInput = true; //makes the source interpret the file types properly - [TestCase(BadDataHandlingStrategy.ThrowException)] - [TestCase(BadDataHandlingStrategy.DivertRows)] - [TestCase(BadDataHandlingStrategy.IgnoreRows)] - public void NewLinesInConstantString_NotEscaped(BadDataHandlingStrategy strategy) + try { - if (File.Exists(filename)) - File.Delete(filename); + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.IsNotNull(dt); + Assert.AreEqual(5, dt.Rows.Count); + Assert.AreEqual(@"5 + The first", dt.Rows[0][1]); + } + finally + { + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + } - StringBuilder sb = new StringBuilder(); - sb.AppendLine("CHI,StudyID,Date"); - sb.AppendLine(@"0101010101,5 + [TestCase(BadDataHandlingStrategy.ThrowException)] + [TestCase(BadDataHandlingStrategy.DivertRows)] + [TestCase(BadDataHandlingStrategy.IgnoreRows)] + public void NewLinesInConstantString_NotEscaped(BadDataHandlingStrategy strategy) + { + if (File.Exists(filename)) + File.Delete(filename); + + var sb = new StringBuilder(); + sb.AppendLine("CHI,StudyID,Date"); + sb.AppendLine(@"0101010101,5 The first,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); - sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); + sb.AppendLine("0101010101,5,2001-01-05"); - File.WriteAllText(filename, sb.ToString()); + File.WriteAllText(filename, sb.ToString()); - var testFile = new FileInfo(filename); + var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; - source.MaxBatchSize = 10000; - source.StronglyTypeInput = true;//makes the source interpret the file types properly - source.BadDataHandlingStrategy = strategy; - try + source.MaxBatchSize = 10000; + source.StronglyTypeInput = true; //makes the source interpret the file types properly + source.BadDataHandlingStrategy = strategy; + try + { + switch (strategy) { - switch (strategy) - { - case BadDataHandlingStrategy.ThrowException: - var ex = Assert.Throws(() => source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken())); - StringAssert.Contains("line 2", ex.Message); - break; - case BadDataHandlingStrategy.IgnoreRows: - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.IsNotNull(dt); - - Assert.AreEqual(4, dt.Rows.Count); - break; - case BadDataHandlingStrategy.DivertRows: - var dt2 = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(4, dt2.Rows.Count); - - Assert.IsNotNull(source.EventHandlers.DivertErrorsFile); - - Assert.AreEqual(@"0101010101,5 + case BadDataHandlingStrategy.ThrowException: + var ex = Assert.Throws(() => + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken())); + StringAssert.Contains("line 2", ex.Message); + break; + case BadDataHandlingStrategy.IgnoreRows: + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); + Assert.IsNotNull(dt); + + Assert.AreEqual(4, dt.Rows.Count); + break; + case BadDataHandlingStrategy.DivertRows: + var dt2 = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, + new GracefulCancellationToken()); + Assert.AreEqual(4, dt2.Rows.Count); + + Assert.IsNotNull(source.EventHandlers.DivertErrorsFile); + + Assert.AreEqual(@"0101010101,5 The first,2001-01-05 ", File.ReadAllText(source.EventHandlers.DivertErrorsFile.FullName)); - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); } - finally - { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } } - - - [Test] - public void OverrideHeadersAndTab() + finally { - if (File.Exists(filename)) - File.Delete(filename); + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); + } + } + + + [Test] + public void OverrideHeadersAndTab() + { + if (File.Exists(filename)) + File.Delete(filename); + + var sb = new StringBuilder(); + sb.AppendLine("0101010101\t5\t2001-01-05"); + sb.AppendLine("0101010101\t5\t2001-01-05"); + File.WriteAllText(filename, sb.ToString()); - StringBuilder sb = new StringBuilder(); - sb.AppendLine("0101010101\t5\t2001-01-05"); - sb.AppendLine("0101010101\t5\t2001-01-05"); - File.WriteAllText(filename,sb.ToString()); + var testFile = new FileInfo(filename); - var testFile = new FileInfo(filename); + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = + "\\t"; //<-- Important this is the string value SLASH T not an actual escaped tab as C# understands it. This reflects the user pressing slash and t on his keyboard for the Separator argument in the UI + source.ForceHeaders = "CHI\tStudyID\tDate"; + source.MaxBatchSize = 10000; - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = "\\t"; //<-- Important this is the string value SLASH T not an actual escaped tab as C# understands it. This reflects the user pressing slash and t on his keyboard for the Separator argument in the UI - source.ForceHeaders = "CHI\tStudyID\tDate"; - source.MaxBatchSize = 10000; + var dt = source.GetChunk(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - var dt = source.GetChunk(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + Assert.NotNull(dt); - Assert.NotNull(dt); + Assert.AreEqual(3, dt.Columns.Count); - Assert.AreEqual(3,dt.Columns.Count); + Assert.AreEqual("CHI", dt.Columns[0].ColumnName); + Assert.AreEqual("StudyID", dt.Columns[1].ColumnName); + Assert.AreEqual("Date", dt.Columns[2].ColumnName); - Assert.AreEqual("CHI", dt.Columns[0].ColumnName); - Assert.AreEqual("StudyID", dt.Columns[1].ColumnName); - Assert.AreEqual("Date", dt.Columns[2].ColumnName); + Assert.AreEqual(2, dt.Rows.Count); - Assert.AreEqual(2,dt.Rows.Count); + source.Dispose(new ThrowImmediatelyDataLoadJob(), null); - source.Dispose(new ThrowImmediatelyDataLoadJob(), null); + File.Delete(filename); + } + [Test] + public void Test_IgnoreColumns() + { + if (File.Exists(filename)) File.Delete(filename); - } - [Test] - public void Test_IgnoreColumns() - { - if (File.Exists(filename)) - File.Delete(filename); + var sb = new StringBuilder(); + sb.AppendLine("0101010101\t5\t2001-01-05\tomg\t"); + sb.AppendLine("0101010101\t5\t2001-01-05\tomg2\t"); + File.WriteAllText(filename, sb.ToString()); - StringBuilder sb = new StringBuilder(); - sb.AppendLine("0101010101\t5\t2001-01-05\tomg\t"); - sb.AppendLine("0101010101\t5\t2001-01-05\tomg2\t"); - File.WriteAllText(filename, sb.ToString()); - - var testFile = new FileInfo(filename); + var testFile = new FileInfo(filename); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(testFile), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = "\\t"; //<-- Important this is the string value SLASH T not an actual escaped tab as C# understands it. This reflects the user pressing slash and t on his keyboard for the Separator argument in the UI - source.ForceHeaders = "CHI\tStudyID\tDate\tSomeText"; - source.MaxBatchSize = 10000; - source.IgnoreColumns = "StudyID\tDate\t"; + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(testFile), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = + "\\t"; //<-- Important this is the string value SLASH T not an actual escaped tab as C# understands it. This reflects the user pressing slash and t on his keyboard for the Separator argument in the UI + source.ForceHeaders = "CHI\tStudyID\tDate\tSomeText"; + source.MaxBatchSize = 10000; + source.IgnoreColumns = "StudyID\tDate\t"; - var dt = source.GetChunk(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); + var dt = source.GetChunk(new ThrowImmediatelyDataLoadJob(), new GracefulCancellationToken()); - Assert.NotNull(dt); + Assert.NotNull(dt); - //should only be one column (chi since we ignore study and date) - Assert.AreEqual(2, dt.Columns.Count); - Assert.AreEqual("CHI", dt.Columns[0].ColumnName); - Assert.AreEqual("SomeText", dt.Columns[1].ColumnName); + //should only be one column (chi since we ignore study and date) + Assert.AreEqual(2, dt.Columns.Count); + Assert.AreEqual("CHI", dt.Columns[0].ColumnName); + Assert.AreEqual("SomeText", dt.Columns[1].ColumnName); - Assert.AreEqual(2, dt.Rows.Count); + Assert.AreEqual(2, dt.Rows.Count); - source.Dispose(new ThrowImmediatelyDataLoadJob(), null); + source.Dispose(new ThrowImmediatelyDataLoadJob(), null); - File.Delete(filename); - - } + File.Delete(filename); + } - [TestCase("Fish In Barrel", "FishInBarrel")] - [TestCase("32 Fish In Barrel","_32FishInBarrel")]//Column names can't start with numbers so underscore prefix applies - [TestCase("once upon a time","onceUponATime")]//where spaces are removed cammel case the next symbol if it's a character - [TestCase("once _ upon a time", "once_UponATime")]//where spaces are removed cammel case the next symbol if it's a character - [TestCase("once#upon a", "onceuponA")] - [TestCase("once #upon", "onceUpon")] //Dodgy characters are stripped before cammel casing after spaces so 'u' gets cammeled even though it has a symbol before it. - public void TestMakingHeaderNamesSane(string bad, string expectedGood) - { - Assert.AreEqual(expectedGood,QuerySyntaxHelper.MakeHeaderNameSensible(bad)); - } + [TestCase("Fish In Barrel", "FishInBarrel")] + [TestCase("32 Fish In Barrel", + "_32FishInBarrel")] //Column names can't start with numbers so underscore prefix applies + [TestCase("once upon a time", + "onceUponATime")] //where spaces are removed cammel case the next symbol if it's a character + [TestCase("once _ upon a time", + "once_UponATime")] //where spaces are removed cammel case the next symbol if it's a character + [TestCase("once#upon a", "onceuponA")] + [TestCase("once #upon", + "onceUpon")] //Dodgy characters are stripped before cammel casing after spaces so 'u' gets cammeled even though it has a symbol before it. + public void TestMakingHeaderNamesSane(string bad, string expectedGood) + { + Assert.AreEqual(expectedGood, QuerySyntaxHelper.MakeHeaderNameSensible(bad)); + } - [Test] - public void Test_ScientificNotation_StronglyTyped() - { - var f = Path.Combine(TestContext.CurrentContext.WorkDirectory,"meee.csv"); - - StringBuilder sb = new StringBuilder(); - - sb.AppendLine("test"); - - //1 scientific notation on first row (test is the header) - sb.AppendLine("-4.10235746055587E-05"); - - //500 lines of random stuff to force 2 batches - for (int i=0;i< DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; i++) - sb.AppendLine("5"); - - //a scientific notation in batch 2 - sb.AppendLine("-4.10235746055587E-05"); - - File.WriteAllText(f,sb.ToString()); - - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(new FileInfo(f)), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.MaxBatchSize = DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; - source.StronglyTypeInputBatchSize = DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; - source.StronglyTypeInput = true; - - var dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(),new GracefulCancellationToken()); - Assert.AreEqual(typeof(Decimal), dt.Columns.Cast().Single().DataType); - Assert.AreEqual(DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize, dt.Rows.Count); - - dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.AreEqual(typeof(Decimal), dt.Columns.Cast().Single().DataType); - Assert.AreEqual(2, dt.Rows.Count); - - - dt = source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - Assert.IsNull(dt); - } + [Test] + public void Test_ScientificNotation_StronglyTyped() + { + var f = Path.Combine(TestContext.CurrentContext.WorkDirectory, "meee.csv"); - /// - /// Depicts a case where quotes appear at the start of a string field - /// - [Test] - public void Test_IgnoreQuotes() - { - var f = Path.Combine(TestContext.CurrentContext.WorkDirectory,"talk.csv"); - - File.WriteAllText(f,@"Field1,Field2 + var sb = new StringBuilder(); + + sb.AppendLine("test"); + + //1 scientific notation on first row (test is the header) + sb.AppendLine("-4.10235746055587E-05"); + + //500 lines of random stuff to force 2 batches + for (var i = 0; i < DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; i++) + sb.AppendLine("5"); + + //a scientific notation in batch 2 + sb.AppendLine("-4.10235746055587E-05"); + + File.WriteAllText(f, sb.ToString()); + + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(new FileInfo(f)), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.MaxBatchSize = DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; + source.StronglyTypeInputBatchSize = DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; + source.StronglyTypeInput = true; + + var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.AreEqual(typeof(decimal), dt.Columns.Cast().Single().DataType); + Assert.AreEqual(DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize, dt.Rows.Count); + + dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.AreEqual(typeof(decimal), dt.Columns.Cast().Single().DataType); + Assert.AreEqual(2, dt.Rows.Count); + + + dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + Assert.IsNull(dt); + } + + /// + /// Depicts a case where quotes appear at the start of a string field + /// + [Test] + public void Test_IgnoreQuotes() + { + var f = Path.Combine(TestContext.CurrentContext.WorkDirectory, "talk.csv"); + + File.WriteAllText(f, @"Field1,Field2 1,Watch out guys its Billie ""The Killer"" Cole 2,""The Killer""? I've heard of him hes a bad un"); - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(new FlatFileToLoad(new FileInfo(f)), new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.MaxBatchSize = DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; - source.StronglyTypeInputBatchSize = DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; - source.StronglyTypeInput = true; - - var toMem = new ToMemoryDataLoadEventListener(true); - var ex = Assert.Throws(()=>source.GetChunk(toMem,new GracefulCancellationToken())); - Assert.AreEqual("Bad data found on line 2", ex.Message); - source.Dispose(new ThrowImmediatelyDataLoadEventListener(),null); - } + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(new FlatFileToLoad(new FileInfo(f)), ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.MaxBatchSize = DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; + source.StronglyTypeInputBatchSize = DelimitedFlatFileDataFlowSource.MinimumStronglyTypeInputBatchSize; + source.StronglyTypeInput = true; + + var toMem = new ToMemoryDataLoadEventListener(true); + var ex = Assert.Throws(() => source.GetChunk(toMem, new GracefulCancellationToken())); + Assert.AreEqual("Bad data found on line 2", ex.Message); + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTestsBase.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTestsBase.cs index b15aa19d05..aef541170b 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTestsBase.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTestsBase.cs @@ -11,66 +11,63 @@ using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataFlowPipeline.Requirements; using Rdmp.Core.DataLoad.Modules.DataFlowSources; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources; + +[Category("Unit")] +public abstract class DelimitedFileSourceTestsBase { - [Category("Unit")] - public abstract class DelimitedFileSourceTestsBase + protected static FlatFileToLoad CreateTestFile(params string[] contents) { - protected FlatFileToLoad CreateTestFile(params string[] contents) - { - var filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "DelimitedFileSourceTests.txt"); + var filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "DelimitedFileSourceTests.txt"); - if (File.Exists(filename)) - File.Delete(filename); + if (File.Exists(filename)) + File.Delete(filename); - File.WriteAllLines(filename, contents); + File.WriteAllLines(filename, contents); - return new FlatFileToLoad(new FileInfo(filename)); - } + return new FlatFileToLoad(new FileInfo(filename)); + } - protected void AssertDivertFileIsExactly(string expectedContents) - { - var filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "DelimitedFileSourceTests_Errors.txt"); + protected static void AssertDivertFileIsExactly(string expectedContents) + { + var filename = Path.Combine(TestContext.CurrentContext.TestDirectory, "DelimitedFileSourceTests_Errors.txt"); - if(!File.Exists(filename)) - Assert.Fail("No Divert file was generated at expected path " + filename); + if (!File.Exists(filename)) + Assert.Fail($"No Divert file was generated at expected path {filename}"); - var contents = File.ReadAllText(filename); - Assert.AreEqual(expectedContents, contents); - } + var contents = File.ReadAllText(filename); + Assert.AreEqual(expectedContents, contents); + } - protected DataTable RunGetChunk(FlatFileToLoad file,BadDataHandlingStrategy strategy, bool throwOnEmpty) - { - return RunGetChunk(file, s => - { - s.BadDataHandlingStrategy = strategy; - s.ThrowOnEmptyFiles = throwOnEmpty; - }); - } - - protected DataTable RunGetChunk(FlatFileToLoad file, Action adjust = null) + protected static DataTable RunGetChunk(FlatFileToLoad file, BadDataHandlingStrategy strategy, bool throwOnEmpty) + { + return RunGetChunk(file, s => { - DelimitedFlatFileDataFlowSource source = new DelimitedFlatFileDataFlowSource(); - source.PreInitialize(file, new ThrowImmediatelyDataLoadEventListener()); - source.Separator = ","; - source.StronglyTypeInput = true;//makes the source interpret the file types properly - source.StronglyTypeInputBatchSize = 100; - source.AttemptToResolveNewLinesInRecords = true; //maximise potential for conflicts - if (adjust != null) - adjust(source); + s.BadDataHandlingStrategy = strategy; + s.ThrowOnEmptyFiles = throwOnEmpty; + }); + } - try - { - return source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()); - } - finally - { - source.Dispose(new ThrowImmediatelyDataLoadEventListener(), null); - } + protected static DataTable RunGetChunk(FlatFileToLoad file, Action adjust = null) + { + var source = new DelimitedFlatFileDataFlowSource(); + source.PreInitialize(file, ThrowImmediatelyDataLoadEventListener.Quiet); + source.Separator = ","; + source.StronglyTypeInput = true; //makes the source interpret the file types properly + source.StronglyTypeInputBatchSize = 100; + source.AttemptToResolveNewLinesInRecords = true; //maximise potential for conflicts + adjust?.Invoke(source); + try + { + return source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()); + } + finally + { + source.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_AutomaticallyResolved.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_AutomaticallyResolved.cs index f4a672b9ee..9a997c7db9 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_AutomaticallyResolved.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_AutomaticallyResolved.cs @@ -8,165 +8,164 @@ using System.Data; using NUnit.Framework; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources; + +public class DelimitedFileSourceTests_AutomaticallyResolved : DelimitedFileSourceTestsBase { - public class DelimitedFileSourceTests_AutomaticallyResolved : DelimitedFileSourceTestsBase + [Test] + public void Test_ValuesSpradAcrossMultipleRows() + { + var file = CreateTestFile( + "A,B,C,D", + "1,17,Beginning of a long,37.00", + ",,description of something,", + ",,really boring," + ); + + var dt = RunGetChunk(file); + Assert.AreEqual(3, dt.Rows.Count); + Assert.AreEqual(true, dt.Rows[0]["A"]); + Assert.AreEqual(17, dt.Rows[0]["B"]); + Assert.AreEqual("Beginning of a long", dt.Rows[0]["C"]); + Assert.AreEqual(37, dt.Rows[0]["D"]); + + + Assert.AreEqual(DBNull.Value, dt.Rows[1]["A"]); + Assert.AreEqual(DBNull.Value, dt.Rows[1]["B"]); + Assert.AreEqual("description of something", dt.Rows[1]["C"]); + Assert.AreEqual(DBNull.Value, dt.Rows[1]["D"]); + + + Assert.AreEqual(DBNull.Value, dt.Rows[2]["A"]); + Assert.AreEqual(DBNull.Value, dt.Rows[2]["B"]); + Assert.AreEqual("really boring", dt.Rows[2]["C"]); + Assert.AreEqual(DBNull.Value, dt.Rows[2]["D"]); + } + + [Test] + public void NewLineInFile_Ignored() + { + var file = CreateTestFile( + "Name,Dob", + "Frank,2001-01-01", + "", + "Herbert,2002-01-01" + ); + + var dt = RunGetChunk(file); + Assert.AreEqual(2, dt.Rows.Count); + Assert.AreEqual("Frank", dt.Rows[0]["Name"]); + Assert.AreEqual("Herbert", dt.Rows[1]["Name"]); + } + + [Test] + public void NewLineInFile_RespectedWhenQuoted() { - [Test] - public void Test_ValuesSpradAcrossMultipleRows() - { - - var file = CreateTestFile( - "A,B,C,D", - "1,17,Beginning of a long,37.00", - ",,description of something,", - ",,really boring," - ); - - var dt = RunGetChunk(file); - Assert.AreEqual(3,dt.Rows.Count); - Assert.AreEqual(true, dt.Rows[0]["A"]); - Assert.AreEqual(17, dt.Rows[0]["B"]); - Assert.AreEqual("Beginning of a long", dt.Rows[0]["C"]); - Assert.AreEqual(37, dt.Rows[0]["D"]); - - - Assert.AreEqual(DBNull.Value, dt.Rows[1]["A"]); - Assert.AreEqual(DBNull.Value, dt.Rows[1]["B"]); - Assert.AreEqual("description of something", dt.Rows[1]["C"]); - Assert.AreEqual(DBNull.Value, dt.Rows[1]["D"]); - - - Assert.AreEqual(DBNull.Value, dt.Rows[2]["A"]); - Assert.AreEqual(DBNull.Value, dt.Rows[2]["B"]); - Assert.AreEqual("really boring", dt.Rows[2]["C"]); - Assert.AreEqual(DBNull.Value, dt.Rows[2]["D"]); - } - - [Test] - public void NewLineInFile_Ignored() - { - var file = CreateTestFile( - "Name,Dob", - "Frank,2001-01-01", - "", - "Herbert,2002-01-01" - ); - - var dt = RunGetChunk(file); - Assert.AreEqual(2,dt.Rows.Count); - Assert.AreEqual("Frank", dt.Rows[0]["Name"]); - Assert.AreEqual("Herbert", dt.Rows[1]["Name"]); - } - - [Test] - public void NewLineInFile_RespectedWhenQuoted() - { - var file = CreateTestFile( - @"Name,Dob,Description + var file = CreateTestFile( + @"Name,Dob,Description Frank,2001-01-01,""Frank is the best ever"" Herbert,2002-01-01,Hey" - ); + ); - var dt = RunGetChunk(file); - Assert.AreEqual(2,dt.Rows.Count); - Assert.AreEqual("Frank", dt.Rows[0]["Name"]); - Assert.AreEqual(@"Frank is + var dt = RunGetChunk(file); + Assert.AreEqual(2, dt.Rows.Count); + Assert.AreEqual("Frank", dt.Rows[0]["Name"]); + Assert.AreEqual(@"Frank is the best ever", dt.Rows[0]["Description"]); - Assert.AreEqual("Herbert", dt.Rows[1]["Name"]); - - } - - [TestCase("")] - [TestCase(" ")] - [TestCase("\" \"")] - [TestCase("null")] - [TestCase("NULL ")] - public void NullCellValues_ToDbNull(string nullstring) - { - var file = CreateTestFile( - "Name,Dob", - string.Format("{0},2001-01-01",nullstring), - "", - string.Format("Herbert ,{0}",nullstring) - ); - - var dt = RunGetChunk(file); - Assert.AreEqual(2, dt.Rows.Count); - Assert.AreEqual(DBNull.Value, dt.Rows[0]["Name"]); - Assert.AreEqual("Herbert", dt.Rows[1]["Name"]); - Assert.AreEqual(DBNull.Value, dt.Rows[1]["Dob"]); - } - - - [TestCase(",,")] - [TestCase("NULL,NULL,NULL")] - [TestCase("NULL,,null")] - [TestCase("NULL , null ,")] - public void TrailingNulls_InRows(string nullSuffix) - { - var file = CreateTestFile( + Assert.AreEqual("Herbert", dt.Rows[1]["Name"]); + } + + [TestCase("")] + [TestCase(" ")] + [TestCase("\" \"")] + [TestCase("null")] + [TestCase("NULL ")] + public void NullCellValues_ToDbNull(string nullstring) + { + var file = CreateTestFile( + "Name,Dob", + $"{nullstring},2001-01-01", + "", + $"Herbert ,{nullstring}" + ); + + var dt = RunGetChunk(file); + Assert.AreEqual(2, dt.Rows.Count); + Assert.AreEqual(DBNull.Value, dt.Rows[0]["Name"]); + Assert.AreEqual("Herbert", dt.Rows[1]["Name"]); + Assert.AreEqual(DBNull.Value, dt.Rows[1]["Dob"]); + } + + + [TestCase(",,")] + [TestCase("NULL,NULL,NULL")] + [TestCase("NULL,,null")] + [TestCase("NULL , null ,")] + public void TrailingNulls_InRows(string nullSuffix) + { + var file = CreateTestFile( "CHI,StudyID,Date", "0101010101,5,2001-01-05", "0101010101,5,2001-01-05", - "0101010101,5,2001-01-05" + nullSuffix, //Row has trailing nulls in it which get ignored + $"0101010101,5,2001-01-05{nullSuffix}", //Row has trailing nulls in it which get ignored + "0101010101,5,2001-01-05"); + + var dt = RunGetChunk(file); + Assert.AreEqual(4, dt.Rows.Count); + Assert.AreEqual(3, dt.Columns.Count); + } + + [Test] + public void TrailingNulls_InHeader() + { + var file = CreateTestFile( + "CHI ,StudyID,Date,,", + //Row has trailing null headers, these get ignored + "0101010101,5,2001-01-05", + "0101010101,5,2001-01-05", + "0101010101,5,2001-01-05", "0101010101,5,2001-01-05"); - var dt = RunGetChunk(file); - Assert.AreEqual(4, dt.Rows.Count); - Assert.AreEqual(3, dt.Columns.Count); - } - [Test] - public void TrailingNulls_InHeader() - { - var file = CreateTestFile( - "CHI ,StudyID,Date,,", - //Row has trailing null headers, these get ignored - "0101010101,5,2001-01-05", - "0101010101,5,2001-01-05", - "0101010101,5,2001-01-05", - "0101010101,5,2001-01-05"); - - var dt = RunGetChunk(file); - Assert.IsNotNull(dt); - Assert.AreEqual(4, dt.Rows.Count); - Assert.AreEqual(3, dt.Columns.Count); //and therefore do not appear in the output table - Assert.AreEqual("CHI", dt.Columns[0].ColumnName); - Assert.AreEqual("StudyID", dt.Columns[1].ColumnName); - Assert.AreEqual("Date", dt.Columns[2].ColumnName); - } - - [TestCase(true)] - [TestCase(false)] - public void NullHeader_InMiddleOfColumns(bool forceHeaders) - { - var file = CreateTestFile( - "CHI ,,StudyID,Date,,", - //Row has trailing null headers, these get ignored but the one in the middle must be maintained to prevent cell read errors/mismatch - - "0101010101,,5,2001-01-05", - "0101010101,,5,2001-01-05", - "0101010101,,5,2001-01-05", - "0101010101,,5,2001-01-05"); //note that if you put any values in these empty column it is BadData - - - DataTable dt; - if (forceHeaders) - dt = RunGetChunk(file,s=> { s.ForceHeaders = "CHI ,,StudyID,Date,,"; - s.ForceHeadersReplacesFirstLineInFile = true; - }); - else - dt = RunGetChunk(file); - - Assert.IsNotNull(dt); - Assert.AreEqual(4, dt.Rows.Count); - Assert.AreEqual(3, dt.Columns.Count); - Assert.AreEqual("CHI", dt.Columns[0].ColumnName); - Assert.AreEqual("StudyID", dt.Columns[1].ColumnName); - Assert.AreEqual("Date", dt.Columns[2].ColumnName); - } + var dt = RunGetChunk(file); + Assert.IsNotNull(dt); + Assert.AreEqual(4, dt.Rows.Count); + Assert.AreEqual(3, dt.Columns.Count); //and therefore do not appear in the output table + Assert.AreEqual("CHI", dt.Columns[0].ColumnName); + Assert.AreEqual("StudyID", dt.Columns[1].ColumnName); + Assert.AreEqual("Date", dt.Columns[2].ColumnName); + } + + [TestCase(true)] + [TestCase(false)] + public void NullHeader_InMiddleOfColumns(bool forceHeaders) + { + var file = CreateTestFile( + "CHI ,,StudyID,Date,,", + //Row has trailing null headers, these get ignored but the one in the middle must be maintained to prevent cell read errors/mismatch + "0101010101,,5,2001-01-05", + "0101010101,,5,2001-01-05", + "0101010101,,5,2001-01-05", + "0101010101,,5,2001-01-05"); //note that if you put any values in these empty column it is BadData + + + DataTable dt; + if (forceHeaders) + dt = RunGetChunk(file, s => + { + s.ForceHeaders = "CHI ,,StudyID,Date,,"; + s.ForceHeadersReplacesFirstLineInFile = true; + }); + else + dt = RunGetChunk(file); + + Assert.IsNotNull(dt); + Assert.AreEqual(4, dt.Rows.Count); + Assert.AreEqual(3, dt.Columns.Count); + Assert.AreEqual("CHI", dt.Columns[0].ColumnName); + Assert.AreEqual("StudyID", dt.Columns[1].ColumnName); + Assert.AreEqual("Date", dt.Columns[2].ColumnName); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_ResolvedAccordingToStrategy.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_ResolvedAccordingToStrategy.cs index 2e65d8929f..a047af9c02 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_ResolvedAccordingToStrategy.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_ResolvedAccordingToStrategy.cs @@ -9,347 +9,366 @@ using Rdmp.Core.DataLoad.Modules.DataFlowSources; using Rdmp.Core.DataLoad.Modules.Exceptions; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources; + +public class DelimitedFileSourceTests_ResolvedAccordingToStrategy : DelimitedFileSourceTestsBase { - public class DelimitedFileSourceTests_ResolvedAccordingToStrategy : DelimitedFileSourceTestsBase + [TestCase(true)] + [TestCase(false)] + public void EmptyFile_TotallyEmpty(bool throwOnEmpty) { - [TestCase(true)] - [TestCase(false)] - public void EmptyFile_TotallyEmpty(bool throwOnEmpty) + var file = CreateTestFile(); //create completely empty file + + if (throwOnEmpty) { - var file = CreateTestFile(); //create completely empty file - - if (throwOnEmpty) - { - var ex = Assert.Throws(() => RunGetChunk(file, BadDataHandlingStrategy.ThrowException, throwOnEmpty)); - Assert.AreEqual("File DelimitedFileSourceTests.txt is empty", ex.Message); - } - else - { - Assert.IsNull(RunGetChunk(file, BadDataHandlingStrategy.ThrowException, throwOnEmpty)); - } + var ex = Assert.Throws(() => + RunGetChunk(file, BadDataHandlingStrategy.ThrowException, true)); + Assert.AreEqual("File DelimitedFileSourceTests.txt is empty", ex?.Message); } - - [TestCase(true)] - [TestCase(false)] - public void EmptyFile_AllWhitespace(bool throwOnEmpty) + else { - var file = CreateTestFile(@" + Assert.IsNull(RunGetChunk(file, BadDataHandlingStrategy.ThrowException, false)); + } + } + + [TestCase(true)] + [TestCase(false)] + public void EmptyFile_AllWhitespace(bool throwOnEmpty) + { + var file = CreateTestFile(@" "); - if(throwOnEmpty) - { - var ex = Assert.Throws(() => RunGetChunk(file, BadDataHandlingStrategy.ThrowException, throwOnEmpty)); - StringAssert.StartsWith("File DelimitedFileSourceTests.txt is empty", ex.Message); - } - else - { - Assert.IsNull(RunGetChunk(file, BadDataHandlingStrategy.ThrowException,throwOnEmpty)); - } + if (throwOnEmpty) + { + var ex = Assert.Throws(() => + RunGetChunk(file, BadDataHandlingStrategy.ThrowException, true)); + StringAssert.StartsWith("File DelimitedFileSourceTests.txt is empty", ex?.Message); } + else + { + Assert.IsNull(RunGetChunk(file, BadDataHandlingStrategy.ThrowException, false)); + } + } - [TestCase(true)] - [TestCase(false)] - public void EmptyFile_HeaderOnly(bool throwOnEmpty) - { - var file = CreateTestFile(@"Name,Address + [TestCase(true)] + [TestCase(false)] + public void EmptyFile_HeaderOnly(bool throwOnEmpty) + { + var file = CreateTestFile(@"Name,Address "); - if (throwOnEmpty) - { - var ex = Assert.Throws(() => RunGetChunk(file, s=>s.ThrowOnEmptyFiles = true)); - Assert.AreEqual("File DelimitedFileSourceTests.txt is empty", ex.Message); - } - else - { - Assert.IsNull(RunGetChunk(file,s => s.ThrowOnEmptyFiles = false)); - } + if (throwOnEmpty) + { + var ex = Assert.Throws(() => RunGetChunk(file, s => s.ThrowOnEmptyFiles = true)); + Assert.AreEqual("File DelimitedFileSourceTests.txt is empty", ex.Message); } - - [TestCase(true)] - [TestCase(false)] - public void EmptyFile_ForceHeader(bool throwOnEmpty) + else { - var file = CreateTestFile(@"Name,Address + Assert.IsNull(RunGetChunk(file, s => s.ThrowOnEmptyFiles = false)); + } + } + + [TestCase(true)] + [TestCase(false)] + public void EmptyFile_ForceHeader(bool throwOnEmpty) + { + var file = CreateTestFile(@"Name,Address "); - - if (throwOnEmpty) - { - var ex = Assert.Throws(() => RunGetChunk(file, - s =>{ s.ThrowOnEmptyFiles = true; s.ForceHeaders="Name,Address"; s.ForceHeadersReplacesFirstLineInFile = true;})); - Assert.AreEqual("File DelimitedFileSourceTests.txt is empty", ex.Message); - } - else - { - Assert.IsNull(RunGetChunk(file, - s =>{ s.ThrowOnEmptyFiles = false; s.ForceHeaders="Name,Address"; s.ForceHeadersReplacesFirstLineInFile = true;})); - } - } - [TestCase(BadDataHandlingStrategy.DivertRows)] - [TestCase(BadDataHandlingStrategy.ThrowException)] - [TestCase(BadDataHandlingStrategy.IgnoreRows)] - public void BadCSV_TooManyCellsInRow(BadDataHandlingStrategy strategy) + if (throwOnEmpty) { - var file = CreateTestFile( - "Name,Description,Age", - "Frank,Is the greatest,100", - "Bob,He's also dynamite, seen him do a lot of good work,30", - "Dennis,Hes ok,35"); - - switch (strategy) - { - case BadDataHandlingStrategy.ThrowException: - var ex = Assert.Throws(() => RunGetChunk(file, strategy, true)); - StringAssert.StartsWith("Bad data found on line 3", ex.Message); - break; - case BadDataHandlingStrategy.IgnoreRows: - var dt = RunGetChunk(file, strategy, true); - Assert.AreEqual(2,dt.Rows.Count); - break; - case BadDataHandlingStrategy.DivertRows: - var dt2 = RunGetChunk(file, strategy, true); - Assert.AreEqual(2,dt2.Rows.Count); - - AssertDivertFileIsExactly("Bob,He's also dynamite, seen him do a lot of good work,30" +Environment.NewLine); - - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } + var ex = Assert.Throws(() => RunGetChunk(file, + s => + { + s.ThrowOnEmptyFiles = true; + s.ForceHeaders = "Name,Address"; + s.ForceHeadersReplacesFirstLineInFile = true; + })); + Assert.AreEqual("File DelimitedFileSourceTests.txt is empty", ex.Message); } - - [TestCase(BadDataHandlingStrategy.DivertRows)] - [TestCase(BadDataHandlingStrategy.ThrowException)] - [TestCase(BadDataHandlingStrategy.IgnoreRows)] - public void BadCSV_TooManyCellsInRow_TwoBadRows(BadDataHandlingStrategy strategy) + else { - var file = CreateTestFile( - "Name,Description,Age", - "Frank,Is the greatest,100", - "Frank,Is the greatest,100,Frank,Is the greatest,100", //input file has 2 lines stuck together, these should appear in divert file exactly as the input file - "Bob,He's also dynamite, seen him do a lot of good work,30", // has too many cells, should appear - "Bob2,He's also dynamite2, seen him do a lot of good work2,30", // aso has too many cells, should appear - "Dennis,Hes ok,35"); - - switch (strategy) - { - case BadDataHandlingStrategy.ThrowException: - var ex = Assert.Throws(() => RunGetChunk(file, strategy, true)); - StringAssert.StartsWith("Bad data found on line 3", ex.Message); - break; - case BadDataHandlingStrategy.IgnoreRows: - var dt = RunGetChunk(file, strategy, true); - Assert.AreEqual(2,dt.Rows.Count); - break; - case BadDataHandlingStrategy.DivertRows: - var dt2 = RunGetChunk(file, strategy, true); - Assert.AreEqual(2,dt2.Rows.Count); - - AssertDivertFileIsExactly("Frank,Is the greatest,100,Frank,Is the greatest,100" + Environment.NewLine + "Bob,He's also dynamite, seen him do a lot of good work,30" +Environment.NewLine + "Bob2,He's also dynamite2, seen him do a lot of good work2,30" +Environment.NewLine); - - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } + Assert.IsNull(RunGetChunk(file, + s => + { + s.ThrowOnEmptyFiles = false; + s.ForceHeaders = "Name,Address"; + s.ForceHeadersReplacesFirstLineInFile = true; + })); } - - [TestCase(BadDataHandlingStrategy.DivertRows,true)] - [TestCase(BadDataHandlingStrategy.ThrowException,false)] - [TestCase(BadDataHandlingStrategy.ThrowException,true)] - [TestCase(BadDataHandlingStrategy.IgnoreRows,false)] - public void BadCSV_TooFewCellsInRow(BadDataHandlingStrategy strategy,bool tryToResolve) + } + + [TestCase(BadDataHandlingStrategy.DivertRows)] + [TestCase(BadDataHandlingStrategy.ThrowException)] + [TestCase(BadDataHandlingStrategy.IgnoreRows)] + public void BadCSV_TooManyCellsInRow(BadDataHandlingStrategy strategy) + { + var file = CreateTestFile( + "Name,Description,Age", + "Frank,Is the greatest,100", + "Bob,He's also dynamite, seen him do a lot of good work,30", + "Dennis,Hes ok,35"); + + switch (strategy) { - var file = CreateTestFile( - "Name,Description,Age", - "Frank,Is the greatest,100", - "", - "Other People To Investigate", - "Dennis,Hes ok,35"); - - Action adjust = (a) => - { - a.BadDataHandlingStrategy = strategy; - a.AttemptToResolveNewLinesInRecords = tryToResolve; - a.ThrowOnEmptyFiles = true; - }; - - switch (strategy) - { - case BadDataHandlingStrategy.ThrowException: - var ex = Assert.Throws(() => RunGetChunk(file, adjust)); - StringAssert.StartsWith("Bad data found on line 4", ex.Message); - break; - case BadDataHandlingStrategy.IgnoreRows: - var dt = RunGetChunk(file, adjust); - Assert.AreEqual(2, dt.Rows.Count); - break; - case BadDataHandlingStrategy.DivertRows: - var dt2 = RunGetChunk(file, adjust); - Assert.AreEqual(2, dt2.Rows.Count); - - AssertDivertFileIsExactly("Other People To Investigate" + Environment.NewLine); - - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } + case BadDataHandlingStrategy.ThrowException: + var ex = Assert.Throws(() => RunGetChunk(file, strategy, true)); + StringAssert.StartsWith("Bad data found on line 3", ex.Message); + break; + case BadDataHandlingStrategy.IgnoreRows: + var dt = RunGetChunk(file, strategy, true); + Assert.AreEqual(2, dt.Rows.Count); + break; + case BadDataHandlingStrategy.DivertRows: + var dt2 = RunGetChunk(file, strategy, true); + Assert.AreEqual(2, dt2.Rows.Count); + + AssertDivertFileIsExactly( + $"Bob,He's also dynamite, seen him do a lot of good work,30{Environment.NewLine}"); + + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); } + } - [TestCase(BadDataHandlingStrategy.DivertRows, true)] - [TestCase(BadDataHandlingStrategy.ThrowException, false)] - [TestCase(BadDataHandlingStrategy.ThrowException, true)] - [TestCase(BadDataHandlingStrategy.IgnoreRows, false)] - public void BadCSV_TooFewColumnsOnLastLine(BadDataHandlingStrategy strategy, bool tryToResolve) + [TestCase(BadDataHandlingStrategy.DivertRows)] + [TestCase(BadDataHandlingStrategy.ThrowException)] + [TestCase(BadDataHandlingStrategy.IgnoreRows)] + public void BadCSV_TooManyCellsInRow_TwoBadRows(BadDataHandlingStrategy strategy) + { + var file = CreateTestFile( + "Name,Description,Age", + "Frank,Is the greatest,100", + "Frank,Is the greatest,100,Frank,Is the greatest,100", //input file has 2 lines stuck together, these should appear in divert file exactly as the input file + "Bob,He's also dynamite, seen him do a lot of good work,30", // has too many cells, should appear + "Bob2,He's also dynamite2, seen him do a lot of good work2,30", // aso has too many cells, should appear + "Dennis,Hes ok,35"); + + switch (strategy) { - var file = CreateTestFile( - "Name,Description,Age", - "Frank,Is the greatest,100", - "Bob"); - - Action adjust = (a) => - { - a.BadDataHandlingStrategy = strategy; - a.AttemptToResolveNewLinesInRecords = tryToResolve; - a.ThrowOnEmptyFiles = true; - }; - - switch (strategy) - { - case BadDataHandlingStrategy.ThrowException: - var ex = Assert.Throws(() => RunGetChunk(file, adjust)); - StringAssert.StartsWith("Bad data found on line 3", ex.Message); - break; - case BadDataHandlingStrategy.IgnoreRows: - var dt = RunGetChunk(file, adjust); - Assert.AreEqual(1, dt.Rows.Count); - break; - case BadDataHandlingStrategy.DivertRows: - var dt2 = RunGetChunk(file, adjust); - Assert.AreEqual(1, dt2.Rows.Count); - - AssertDivertFileIsExactly("Bob"+Environment.NewLine); - - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } + case BadDataHandlingStrategy.ThrowException: + var ex = Assert.Throws(() => RunGetChunk(file, strategy, true)); + StringAssert.StartsWith("Bad data found on line 3", ex.Message); + break; + case BadDataHandlingStrategy.IgnoreRows: + var dt = RunGetChunk(file, strategy, true); + Assert.AreEqual(2, dt.Rows.Count); + break; + case BadDataHandlingStrategy.DivertRows: + var dt2 = RunGetChunk(file, strategy, true); + Assert.AreEqual(2, dt2.Rows.Count); + + AssertDivertFileIsExactly( + $"Frank,Is the greatest,100,Frank,Is the greatest,100{Environment.NewLine}Bob,He's also dynamite, seen him do a lot of good work,30{Environment.NewLine}Bob2,He's also dynamite2, seen him do a lot of good work2,30{Environment.NewLine}"); + + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); } + } - [Test] - public void BadCSV_FreeTextMiddleColumn() + [TestCase(BadDataHandlingStrategy.DivertRows, true)] + [TestCase(BadDataHandlingStrategy.ThrowException, false)] + [TestCase(BadDataHandlingStrategy.ThrowException, true)] + [TestCase(BadDataHandlingStrategy.IgnoreRows, false)] + public void BadCSV_TooFewCellsInRow(BadDataHandlingStrategy strategy, bool tryToResolve) + { + var file = CreateTestFile( + "Name,Description,Age", + "Frank,Is the greatest,100", + "", + "Other People To Investigate", + "Dennis,Hes ok,35"); + + void Adjust(DelimitedFlatFileDataFlowSource a) { - //This is recoverable - var file = CreateTestFile( - "Name,Description,Age", - "Frank,Is the greatest,100", - @"Bob,He's -not too bad -to be honest,20", - "Dennis,Hes ok,35"); - - var dt = RunGetChunk(file,s=> { s.AttemptToResolveNewLinesInRecords = true; }); - Assert.AreEqual(3, dt.Rows.Count); - Assert.AreEqual($"He's{Environment.NewLine}not too bad{Environment.NewLine}to be honest", dt.Rows[1][1]); + a.BadDataHandlingStrategy = strategy; + a.AttemptToResolveNewLinesInRecords = tryToResolve; + a.ThrowOnEmptyFiles = true; } - [Test] - public void BadCSV_FreeTextFirstColumn() + switch (strategy) { - var file = CreateTestFile( - "Description,Name,Age", - "Is the greatest,Frank,100", - @"He's -not too bad -to be honest,Bob,20", - "Hes ok,Dennis,35"); + case BadDataHandlingStrategy.ThrowException: + var ex = Assert.Throws(() => RunGetChunk(file, Adjust)); + StringAssert.StartsWith("Bad data found on line 4", ex.Message); + break; + case BadDataHandlingStrategy.IgnoreRows: + var dt = RunGetChunk(file, Adjust); + Assert.AreEqual(2, dt.Rows.Count); + break; + case BadDataHandlingStrategy.DivertRows: + var dt2 = RunGetChunk(file, Adjust); + Assert.AreEqual(2, dt2.Rows.Count); + + AssertDivertFileIsExactly($"Other People To Investigate{Environment.NewLine}"); + + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); + } + } - var ex = Assert.Throws(()=>RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = true; })); - Assert.AreEqual("Bad data found on line 3",ex.Message); + [TestCase(BadDataHandlingStrategy.DivertRows, true)] + [TestCase(BadDataHandlingStrategy.ThrowException, false)] + [TestCase(BadDataHandlingStrategy.ThrowException, true)] + [TestCase(BadDataHandlingStrategy.IgnoreRows, false)] + public void BadCSV_TooFewColumnsOnLastLine(BadDataHandlingStrategy strategy, bool tryToResolve) + { + var file = CreateTestFile( + "Name,Description,Age", + "Frank,Is the greatest,100", + "Bob"); - //looks like a good record followed by 2 bad records - var dt = RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = true; s.BadDataHandlingStrategy = BadDataHandlingStrategy.IgnoreRows; }); - Assert.AreEqual(3, dt.Rows.Count); - Assert.AreEqual("to be honest", dt.Rows[1][0]); - Assert.AreEqual("Bob", dt.Rows[1][1]); - Assert.AreEqual(20, dt.Rows[1][2]); + void Adjust(DelimitedFlatFileDataFlowSource a) + { + a.BadDataHandlingStrategy = strategy; + a.AttemptToResolveNewLinesInRecords = tryToResolve; + a.ThrowOnEmptyFiles = true; } - [Test] - public void BadCSV_FreeTextLastColumn() + switch (strategy) { - var file = CreateTestFile( - "Name,Age,Description", - "Frank,100,Is the greatest", - @"Bob,20,He's -not too bad -to be honest", - "Dennis,35,Hes ok"); + case BadDataHandlingStrategy.ThrowException: + var ex = Assert.Throws(() => RunGetChunk(file, Adjust)); + StringAssert.StartsWith("Bad data found on line 3", ex.Message); + break; + case BadDataHandlingStrategy.IgnoreRows: + var dt = RunGetChunk(file, Adjust); + Assert.AreEqual(1, dt.Rows.Count); + break; + case BadDataHandlingStrategy.DivertRows: + var dt2 = RunGetChunk(file, Adjust); + Assert.AreEqual(1, dt2.Rows.Count); + + AssertDivertFileIsExactly($"Bob{Environment.NewLine}"); + + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); + } + } - var ex = Assert.Throws(()=>RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = true; })); - Assert.AreEqual("Bad data found on line 4",ex.Message); + [Test] + public void BadCSV_FreeTextMiddleColumn() + { + //This is recoverable + var file = CreateTestFile( + "Name,Description,Age", + "Frank,Is the greatest,100", + @"Bob,He's +not too bad +to be honest,20", + "Dennis,Hes ok,35"); - //looks like a good record followed by 2 bad records - var dt = RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = true;s.BadDataHandlingStrategy = BadDataHandlingStrategy.IgnoreRows; }); - Assert.AreEqual(3,dt.Rows.Count); - Assert.AreEqual("He's", dt.Rows[1][2]); + var dt = RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = true; }); + Assert.AreEqual(3, dt.Rows.Count); + Assert.AreEqual($"He's{Environment.NewLine}not too bad{Environment.NewLine}to be honest", dt.Rows[1][1]); + } + [Test] + public void BadCSV_FreeTextFirstColumn() + { + var file = CreateTestFile( + "Description,Name,Age", + "Is the greatest,Frank,100", + @"He's +not too bad +to be honest,Bob,20", + "Hes ok,Dennis,35"); - } + var ex = Assert.Throws(() => + RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = true; })); + Assert.AreEqual("Bad data found on line 3", ex.Message); - [Test] - public void BadCSV_ForceHeaders() + //looks like a good record followed by 2 bad records + var dt = RunGetChunk(file, s => { - var file = CreateTestFile( - "Patient's first name, Patients blood glucose, measured in mg", - "Thomas,100", - "Frank,300"); + s.AttemptToResolveNewLinesInRecords = true; + s.BadDataHandlingStrategy = BadDataHandlingStrategy.IgnoreRows; + }); + Assert.AreEqual(3, dt.Rows.Count); + Assert.AreEqual("to be honest", dt.Rows[1][0]); + Assert.AreEqual("Bob", dt.Rows[1][1]); + Assert.AreEqual(20, dt.Rows[1][2]); + } + + [Test] + public void BadCSV_FreeTextLastColumn() + { + var file = CreateTestFile( + "Name,Age,Description", + "Frank,100,Is the greatest", + @"Bob,20,He's +not too bad +to be honest", + "Dennis,35,Hes ok"); - var ex = Assert.Throws(() => RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = false; })); - Assert.AreEqual("Bad data found on line 2", ex.Message); + var ex = Assert.Throws(() => + RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = true; })); + Assert.AreEqual("Bad data found on line 4", ex.Message); + //looks like a good record followed by 2 bad records + var dt = RunGetChunk(file, s => + { + s.AttemptToResolveNewLinesInRecords = true; + s.BadDataHandlingStrategy = BadDataHandlingStrategy.IgnoreRows; + }); + Assert.AreEqual(3, dt.Rows.Count); + Assert.AreEqual("He's", dt.Rows[1][2]); + } - var dt = RunGetChunk(file, s => - { - s.AttemptToResolveNewLinesInRecords = false; - s.ForceHeaders = "Name,BloodGlucose"; - s.ForceHeadersReplacesFirstLineInFile = true; - }); + [Test] + public void BadCSV_ForceHeaders() + { + var file = CreateTestFile( + "Patient's first name, Patients blood glucose, measured in mg", + "Thomas,100", + "Frank,300"); - Assert.AreEqual(2,dt.Rows.Count); - Assert.AreEqual(2, dt.Columns.Count); - Assert.AreEqual("Thomas", dt.Rows[0]["Name"]); - Assert.AreEqual(100, dt.Rows[0]["BloodGlucose"]); + var ex = Assert.Throws(() => + RunGetChunk(file, s => { s.AttemptToResolveNewLinesInRecords = false; })); + Assert.AreEqual("Bad data found on line 2", ex.Message); - } - [Test] - public void BadCSV_ForceHeaders_NoReplace() + var dt = RunGetChunk(file, s => { - var file = CreateTestFile( - "Thomas,100", - "Frank,300"); - - var dt = RunGetChunk(file, s => - { - s.AttemptToResolveNewLinesInRecords = false; - s.ForceHeaders = "Name,BloodGlucose"; - }); + s.AttemptToResolveNewLinesInRecords = false; + s.ForceHeaders = "Name,BloodGlucose"; + s.ForceHeadersReplacesFirstLineInFile = true; + }); + + Assert.AreEqual(2, dt.Rows.Count); + Assert.AreEqual(2, dt.Columns.Count); + Assert.AreEqual("Thomas", dt.Rows[0]["Name"]); + Assert.AreEqual(100, dt.Rows[0]["BloodGlucose"]); + } - Assert.AreEqual(2,dt.Rows.Count); - Assert.AreEqual(2, dt.Columns.Count); - Assert.AreEqual("Thomas", dt.Rows[0]["Name"]); - Assert.AreEqual(100, dt.Rows[0]["BloodGlucose"]); + [Test] + public void BadCSV_ForceHeaders_NoReplace() + { + var file = CreateTestFile( + "Thomas,100", + "Frank,300"); - } - + var dt = RunGetChunk(file, s => + { + s.AttemptToResolveNewLinesInRecords = false; + s.ForceHeaders = "Name,BloodGlucose"; + }); + + Assert.AreEqual(2, dt.Rows.Count); + Assert.AreEqual(2, dt.Columns.Count); + Assert.AreEqual("Thomas", dt.Rows[0]["Name"]); + Assert.AreEqual(100, dt.Rows[0]["BloodGlucose"]); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_Unresolveable.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_Unresolveable.cs index d3eaa4dc55..547f0e985c 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_Unresolveable.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/DelimitedFileSourceTests_Unresolveable.cs @@ -9,77 +9,76 @@ using Rdmp.Core.DataLoad.Modules.DataFlowSources; using Rdmp.Core.DataLoad.Modules.Exceptions; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources; + +internal class DelimitedFileSourceTests_Unresolveable : DelimitedFileSourceTestsBase { - class DelimitedFileSourceTests_Unresolveable: DelimitedFileSourceTestsBase + [TestCase(BadDataHandlingStrategy.DivertRows)] + [TestCase(BadDataHandlingStrategy.ThrowException)] + [TestCase(BadDataHandlingStrategy.IgnoreRows)] + public void BadCSV_UnclosedQuote(BadDataHandlingStrategy strategy) { + var file = CreateTestFile( + "Name,Description,Age", + "Frank,\"Is, the greatest\",100", //<---- how you should be doing it + "Frank,Is the greatest,100", + "Frank,\"Is the greatest,100", //<----- no closing quote! i.e. read the rest of the file! + "Frank,Is the greatest,100", + "Frank,Is the greatest,100", + "Frank,Is the greatest,100", + "Frank,Is the greatest,100"); - [TestCase(BadDataHandlingStrategy.DivertRows)] - [TestCase(BadDataHandlingStrategy.ThrowException)] - [TestCase(BadDataHandlingStrategy.IgnoreRows)] - public void BadCSV_UnclosedQuote(BadDataHandlingStrategy strategy) + void Adjust(DelimitedFlatFileDataFlowSource a) { - var file = CreateTestFile( - "Name,Description,Age", - "Frank,\"Is, the greatest\",100", //<---- how you should be doing it - "Frank,Is the greatest,100", - "Frank,\"Is the greatest,100", //<----- no closing quote! i.e. read the rest of the file! - "Frank,Is the greatest,100", - "Frank,Is the greatest,100", - "Frank,Is the greatest,100", - "Frank,Is the greatest,100"); - - Action adjust = (a) => - { - a.BadDataHandlingStrategy = strategy; - a.ThrowOnEmptyFiles = true; - a.IgnoreQuotes = false; - }; + a.BadDataHandlingStrategy = strategy; + a.ThrowOnEmptyFiles = true; + a.IgnoreQuotes = false; + } - switch (strategy) - { - case BadDataHandlingStrategy.ThrowException: - var ex = Assert.Throws(() => RunGetChunk(file, adjust)); - Assert.AreEqual("Bad data found on line 9", ex.Message); - break; - case BadDataHandlingStrategy.IgnoreRows: - var dt = RunGetChunk(file, adjust); - Assert.AreEqual(2, dt.Rows.Count); //reads first 2 rows and chucks the rest! - break; - case BadDataHandlingStrategy.DivertRows: + switch (strategy) + { + case BadDataHandlingStrategy.ThrowException: + var ex = Assert.Throws(() => RunGetChunk(file, Adjust)); + Assert.AreEqual("Bad data found on line 9", ex?.Message); + break; + case BadDataHandlingStrategy.IgnoreRows: + var dt = RunGetChunk(file, Adjust); + Assert.AreEqual(2, dt.Rows.Count); //reads first 2 rows and chucks the rest! + break; + case BadDataHandlingStrategy.DivertRows: - //read 2 rows and rejected the rest - var dt2 = RunGetChunk(file, adjust); - Assert.AreEqual(2, dt2.Rows.Count); - AssertDivertFileIsExactly($"Frank,\"Is the greatest,100{Environment.NewLine}Frank,Is the greatest,100{Environment.NewLine}Frank,Is the greatest,100{Environment.NewLine}Frank,Is the greatest,100{Environment.NewLine}Frank,Is the greatest,100{Environment.NewLine}"); + //read 2 rows and rejected the rest + var dt2 = RunGetChunk(file, Adjust); + Assert.AreEqual(2, dt2.Rows.Count); + AssertDivertFileIsExactly( + $"Frank,\"Is the greatest,100{Environment.NewLine}Frank,Is the greatest,100{Environment.NewLine}Frank,Is the greatest,100{Environment.NewLine}Frank,Is the greatest,100{Environment.NewLine}Frank,Is the greatest,100{Environment.NewLine}"); - break; - default: - throw new ArgumentOutOfRangeException("strategy"); - } + break; + default: + throw new ArgumentOutOfRangeException(nameof(strategy)); } - - [Test] - public void BadCSV_UnclosedQuote_IgnoreQuotes() - { - var file = CreateTestFile( - "Name,Description,Age", - "Frank,Is the greatest,100", - "Frank,\"Is the greatest,100", - "Frank,Is the greatest,100", - "Frank,Is the greatest,100", - "Frank,Is the greatest,100"); + } - Action adjust = (a) => - { - a.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; - a.ThrowOnEmptyFiles = true; - a.IgnoreQuotes = true; - }; + [Test] + public void BadCSV_UnclosedQuote_IgnoreQuotes() + { + var file = CreateTestFile( + "Name,Description,Age", + "Frank,Is the greatest,100", + "Frank,\"Is the greatest,100", + "Frank,Is the greatest,100", + "Frank,Is the greatest,100", + "Frank,Is the greatest,100"); - var dt2 = RunGetChunk(file, adjust); - Assert.AreEqual(5, dt2.Rows.Count); - Assert.AreEqual("\"Is the greatest", dt2.Rows[1]["Description"]); + static void Adjust(DelimitedFlatFileDataFlowSource a) + { + a.BadDataHandlingStrategy = BadDataHandlingStrategy.ThrowException; + a.ThrowOnEmptyFiles = true; + a.IgnoreQuotes = true; } + + var dt2 = RunGetChunk(file, Adjust); + Assert.AreEqual(5, dt2.Rows.Count); + Assert.AreEqual("\"Is the greatest", dt2.Rows[1]["Description"]); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/SourceTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/SourceTests.cs index 055dea92d3..39dc637579 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/SourceTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PipelineTests/Sources/SourceTests.cs @@ -15,232 +15,177 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Engine.Pipeline.Sources; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration.PipelineTests.Sources; + +public class SourceTests : DatabaseTests { - public class SourceTests:DatabaseTests - { - private ICatalogueRepository mockRepo = new MemoryCatalogueRepository(); + private ICatalogueRepository mockRepo = new MemoryCatalogueRepository(); - [Test] - public void RetrieveChunks() - { - var source = new DbDataCommandDataFlowSource("Select top 3 * from master.sys.tables", "Query Sys tables", DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder, 30); - Assert.AreEqual(3, source.GetChunk(new ThrowImmediatelyDataLoadEventListener(), new GracefulCancellationToken()).Rows.Count); - } + [Test] + public void RetrieveChunks() + { + var source = new DbDataCommandDataFlowSource("Select top 3 * from master.sys.tables", "Query Sys tables", + DiscoveredServerICanCreateRandomDatabasesAndTablesOn.Builder, 30); + Assert.AreEqual(3, + source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new GracefulCancellationToken()).Rows.Count); + } - [Test] - public void TestPipelineContextInitialization() - { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.FixedDestination |PipelineUsage.LoadsSingleTableInfo); - - var component = new TestObject_RequiresTableInfo(); - var ti = new TableInfo(CatalogueRepository, "TestTableInfo"); - context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), component, ti); - - Assert.AreEqual(component.PreInitToThis, ti); - ti.DeleteInDatabase(); - } + [Test] + public void TestPipelineContextInitialization() + { + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.LoadsSingleTableInfo); - [Test] - public void TestPipelineContextInitializationNoInterfaces() - { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.LoadsSingleTableInfo); - var ti = new TableInfo(mockRepo, "Foo"); - var component = new TestObjectNoRequirements(); - Assert.DoesNotThrow(() => context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), component, ti)); - } + var component = new TestObject_RequiresTableInfo(); + var ti = new TableInfo(CatalogueRepository, "TestTableInfo"); + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, component, ti); - [Test] - public void TestPipelineContextInitialization_UnexpectedType() - { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.LoadsSingleTableInfo); + Assert.AreEqual(component.PreInitToThis, ti); + ti.DeleteInDatabase(); + } - var component = new TestObject_RequiresTableInfo(); - var ti = new TableInfo(mockRepo, "Foo"); - var ci = new ColumnInfo(mockRepo, "ColumnInfo", "Type", ti); - ci.Name = "ColumnInfo"; // because we passed a stubbed repository, the name won't be set + [Test] + public void TestPipelineContextInitializationNoInterfaces() + { + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.LoadsSingleTableInfo); + var ti = new TableInfo(mockRepo, "Foo"); + var component = new TestObjectNoRequirements(); + Assert.DoesNotThrow(() => context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, component, ti)); + } - var ex = Assert.Throws(()=>context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), component, ci)); - StringAssert.Contains("The following expected types were not passed to PreInitialize:TableInfo",ex.Message); - } + [Test] + public void TestPipelineContextInitialization_UnexpectedType() + { + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.LoadsSingleTableInfo); - [Test] - public void TestPipelineContextInitialization_ForbiddenType() + var component = new TestObject_RequiresTableInfo(); + var ti = new TableInfo(mockRepo, "Foo"); + var ci = new ColumnInfo(mockRepo, "ColumnInfo", "Type", ti) { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.None); + Name = "ColumnInfo" // because we passed a stubbed repository, the name won't be set + }; - var component = new TestObject_RequiresTableInfo(); - var ti = new TableInfo(new MemoryCatalogueRepository(), "Foo"); - var ex = Assert.Throws(()=>context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), component, ti)); - StringAssert.Contains("Type TableInfo is not an allowable PreInitialize parameters type under the current DataFlowPipelineContext (check which flags you passed to the DataFlowPipelineContextFactory and the interfaces IPipelineRequirement<> that your components implement) ",ex.Message); - } - - [Test] - public void TestPipelineContextInitialization_UninitializedInterface() - { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.LoadsSingleTableInfo); + var ex = Assert.Throws(() => + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, component, ci)); + StringAssert.Contains("The following expected types were not passed to PreInitialize:TableInfo", ex.Message); + } - //component is both IPipelineRequirement AND IPipelineRequirement but only TableInfo is passed in params - var component = new TestObject_RequiresTableInfoAndFreakyObject(); + [Test] + public void TestPipelineContextInitialization_ForbiddenType() + { + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.None); + + var component = new TestObject_RequiresTableInfo(); + var ti = new TableInfo(new MemoryCatalogueRepository(), "Foo"); + var ex = Assert.Throws(() => + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, component, ti)); + StringAssert.Contains( + "Type TableInfo is not an allowable PreInitialize parameters type under the current DataFlowPipelineContext (check which flags you passed to the DataFlowPipelineContextFactory and the interfaces IPipelineRequirement<> that your components implement) ", + ex.Message); + } - var testTableInfo = new TableInfo(mockRepo, ""); - testTableInfo.Name = "Test Table Info"; + [Test] + public void TestPipelineContextInitialization_UninitializedInterface() + { + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.LoadsSingleTableInfo); - var ex = Assert.Throws(()=>context.PreInitialize(new ThrowImmediatelyDataLoadEventListener(), component, testTableInfo)); - StringAssert.Contains($"The following expected types were not passed to PreInitialize:LoadModuleAssembly{Environment.NewLine}The object types passed were:{Environment.NewLine}Rdmp.Core.Curation.Data.TableInfo:Test Table Info",ex.Message); - } + //component is both IPipelineRequirement AND IPipelineRequirement but only TableInfo is passed in params + var component = new TestObject_RequiresTableInfoAndFreakyObject(); - [Test] - public void TestPipelineContextIsAllowable() + var testTableInfo = new TableInfo(mockRepo, "") { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.FixedSource | PipelineUsage.FixedDestination | PipelineUsage.LoadsSingleTableInfo); - - var pipeline = new Pipeline(CatalogueRepository, "DeleteMePipeline"); - var component = new PipelineComponent(CatalogueRepository, pipeline, typeof(TestObject_RequiresTableInfo), 0); - - Assert.IsTrue(context.IsAllowable(pipeline)); + Name = "Test Table Info" + }; + + var ex = Assert.Throws(() => + context.PreInitialize(ThrowImmediatelyDataLoadEventListener.Quiet, component, testTableInfo)); + StringAssert.Contains( + $"The following expected types were not passed to PreInitialize:LoadModuleAssembly{Environment.NewLine}The object types passed were:{Environment.NewLine}Rdmp.Core.Curation.Data.TableInfo:Test Table Info", + ex.Message); + } - pipeline.DeleteInDatabase(); - } + [Test] + public void TestPipelineContextIsAllowable() + { + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.FixedSource | PipelineUsage.FixedDestination | + PipelineUsage.LoadsSingleTableInfo); + var pipeline = new Pipeline(CatalogueRepository, "DeleteMePipeline"); + var component = new PipelineComponent(CatalogueRepository, pipeline, typeof(TestObject_RequiresTableInfo), 0); - [Test] - public void TestPipelineContextIsNOTAllowable() - { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.FixedDestination); + Assert.IsTrue(context.IsAllowable(pipeline)); - var pipeline = new Pipeline(CatalogueRepository, "DeleteMePipeline"); - var component = new PipelineComponent(CatalogueRepository, pipeline, typeof(TestObject_RequiresTableInfo), 0); - component.Name = "TestPipeComponent"; - component.SaveToDatabase(); + pipeline.DeleteInDatabase(); + } - string reason; - bool rejection = context.IsAllowable(pipeline, out reason); - Console.WriteLine(reason); + [Test] + public void TestPipelineContextIsNOTAllowable() + { + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.FixedDestination); - Assert.IsFalse(rejection,reason); + var pipeline = new Pipeline(CatalogueRepository, "DeleteMePipeline"); + var component = new PipelineComponent(CatalogueRepository, pipeline, typeof(TestObject_RequiresTableInfo), 0) + { + Name = "TestPipeComponent" + }; + component.SaveToDatabase(); - Assert.AreEqual("Component TestPipeComponent implements a forbidden type (IPipelineRequirement) under the pipeline usage context",reason); + var rejection = context.IsAllowable(pipeline, out var reason); - pipeline.DeleteInDatabase(); - } + Console.WriteLine(reason); - [Test] - public void TestSuspiciousPipelineRequirements() - { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.FixedDestination); - - var suspiciousComponent = new TestObject_Suspicious(); - var ex = Assert.Throws(() => context.PreInitialize(new ThrowImmediatelyDataLoadJob(), suspiciousComponent, 5, "fish")); + Assert.IsFalse(rejection, reason); - Console.WriteLine("Exception was:" + ex.Message); - } - [Test] - public void TestExtraSuspiciousPipelineRequirements() - { - var contextFactory = new DataFlowPipelineContextFactory(); - var context = contextFactory.Create(PipelineUsage.FixedDestination); + Assert.AreEqual( + "Component TestPipeComponent implements a forbidden type (IPipelineRequirement) under the pipeline usage context", + reason); - var suspiciousComponent = new TestObject_ExtraSuspicious(); - Assert.Throws(() => context.PreInitialize(new ThrowImmediatelyDataLoadJob(), suspiciousComponent, "5")); - } + pipeline.DeleteInDatabase(); + } - #region Test objects that have an assortment of IPipelineRequirements + [Test] + public void TestSuspiciousPipelineRequirements() + { + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.FixedDestination); - public class TestObject_RequiresTableInfo : IDataFlowComponent, IPipelineRequirement - { - public TableInfo PreInitToThis { get; private set; } - public DataTable ProcessPipelineData( DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) - { - throw new NotImplementedException(); - } - - public void Abort(IDataLoadEventListener listener) - { - throw new NotImplementedException(); - } - - public void PreInitialize(TableInfo value, IDataLoadEventListener listener) - { - PreInitToThis = value; - } + var suspiciousComponent = new TestObject_Suspicious(); + var ex = Assert.Throws(() => + context.PreInitialize(new ThrowImmediatelyDataLoadJob(), suspiciousComponent, 5, "fish")); - } - public class TestObject_RequiresTableInfoAndFreakyObject : IDataFlowComponent, IPipelineRequirement, IPipelineRequirement - { - public TableInfo PreInitToThis { get; private set; } - public DataTable ProcessPipelineData( DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) - { - throw new NotImplementedException(); - } - - public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) - { - throw new NotImplementedException(); - } - - public void Abort(IDataLoadEventListener listener) - { - throw new NotImplementedException(); - } - - public void PreInitialize(TableInfo value, IDataLoadEventListener listener) - { - PreInitToThis = value; - } - - - public void PreInitialize(LoadModuleAssembly value, IDataLoadEventListener listener) - { - throw new NotImplementedException(); - } - } + Console.WriteLine($"Exception was:{ex.Message}"); } - public class TestObjectNoRequirements : IDataFlowComponent + [Test] + public void TestExtraSuspiciousPipelineRequirements() { - public DataTable ProcessPipelineData( DataTable toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) - { - throw new NotImplementedException(); - } + var contextFactory = new DataFlowPipelineContextFactory(); + var context = contextFactory.Create(PipelineUsage.FixedDestination); - public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) - { - throw new NotImplementedException(); - } - - public void Abort(IDataLoadEventListener listener) - { - throw new NotImplementedException(); - } + var suspiciousComponent = new TestObject_ExtraSuspicious(); + Assert.Throws(() => + context.PreInitialize(new ThrowImmediatelyDataLoadJob(), suspiciousComponent, "5")); } - public class TestObject_Suspicious : IDataFlowComponent, IPipelineRequirement + #region Test objects that have an assortment of IPipelineRequirements + + public class TestObject_RequiresTableInfo : IDataFlowComponent, IPipelineRequirement { - public Object Object { get; set; } + public TableInfo PreInitToThis { get; private set; } + public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, - GracefulCancellationToken cancellationToken) - { - throw new NotImplementedException(); - } + GracefulCancellationToken cancellationToken) => throw new NotImplementedException(); public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { @@ -252,20 +197,19 @@ public void Abort(IDataLoadEventListener listener) throw new NotImplementedException(); } - public void PreInitialize(object value, IDataLoadEventListener listener) + public void PreInitialize(TableInfo value, IDataLoadEventListener listener) { - Object = value; + PreInitToThis = value; } } - public class TestObject_ExtraSuspicious : IDataFlowComponent, IPipelineRequirement, IPipelineRequirement + public class TestObject_RequiresTableInfoAndFreakyObject : IDataFlowComponent, + IPipelineRequirement, IPipelineRequirement { - public Object Object { get; set; } + public TableInfo PreInitToThis { get; private set; } + public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, - GracefulCancellationToken cancellationToken) - { - throw new NotImplementedException(); - } + GracefulCancellationToken cancellationToken) => throw new NotImplementedException(); public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { @@ -277,15 +221,87 @@ public void Abort(IDataLoadEventListener listener) throw new NotImplementedException(); } - public void PreInitialize(object value, IDataLoadEventListener listener) + public void PreInitialize(TableInfo value, IDataLoadEventListener listener) { - Object = value; + PreInitToThis = value; } - public void PreInitialize(string value, IDataLoadEventListener listener) + + public void PreInitialize(LoadModuleAssembly value, IDataLoadEventListener listener) { - Object = value; + throw new NotImplementedException(); } } - #endregion } + +public class TestObjectNoRequirements : IDataFlowComponent +{ + public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, + GracefulCancellationToken cancellationToken) => throw new NotImplementedException(); + + public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) + { + throw new NotImplementedException(); + } + + public void Abort(IDataLoadEventListener listener) + { + throw new NotImplementedException(); + } +} + +public class TestObject_Suspicious : IDataFlowComponent, IPipelineRequirement +{ + public object Object { get; set; } + + public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, + GracefulCancellationToken cancellationToken) => + throw new NotImplementedException(); + + public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) + { + throw new NotImplementedException(); + } + + public void Abort(IDataLoadEventListener listener) + { + throw new NotImplementedException(); + } + + public void PreInitialize(object value, IDataLoadEventListener listener) + { + Object = value; + } +} + +public class TestObject_ExtraSuspicious : IDataFlowComponent, IPipelineRequirement, + IPipelineRequirement +{ + public object Object { get; set; } + + public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener listener, + GracefulCancellationToken cancellationToken) => + throw new NotImplementedException(); + + public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) + { + throw new NotImplementedException(); + } + + public void Abort(IDataLoadEventListener listener) + { + throw new NotImplementedException(); + } + + public void PreInitialize(object value, IDataLoadEventListener listener) + { + Object = value; + } + + public void PreInitialize(string value, IDataLoadEventListener listener) + { + Object = value; + } +} + +#endregion \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PrematureLoadEnderTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PrematureLoadEnderTests.cs index a496e4ad75..949a189e31 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PrematureLoadEnderTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PrematureLoadEnderTests.cs @@ -13,64 +13,69 @@ using Rdmp.Core.DataLoad.Modules.Mutilators; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +internal class PrematureLoadEnderTests : DatabaseTests { - class PrematureLoadEnderTests:DatabaseTests + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestEndLoadBecause_NoTables(DatabaseType type) { - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestEndLoadBecause_NoTables(DatabaseType type) - { - var database = GetCleanedServer(type); - - Assert.AreEqual(0,database.DiscoverTables(false).Length); - - var ender = new PrematureLoadEnder(); - ender.ConditionsToTerminateUnder = PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase; - ender.ExitCodeToReturnIfConditionMet = ExitCodeType.OperationNotRequired; - - ender.Initialize(database,LoadStage.AdjustRaw); - - Assert.AreEqual(ExitCodeType.OperationNotRequired ,ender.Mutilate(new ThrowImmediatelyDataLoadJob())); - } - - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestEndLoadBecause_NoRows(DatabaseType type) + var database = GetCleanedServer(type); + + Assert.AreEqual(0, database.DiscoverTables(false).Length); + + var ender = new PrematureLoadEnder { - var database = GetCleanedServer(type); + ConditionsToTerminateUnder = PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase, + ExitCodeToReturnIfConditionMet = ExitCodeType.OperationNotRequired + }; - DataTable dt = new DataTable(); - dt.Columns.Add("Fish"); + ender.Initialize(database, LoadStage.AdjustRaw); - database.CreateTable("MyTable", dt); - var ender = new PrematureLoadEnder(); - ender.ConditionsToTerminateUnder = PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase; - ender.ExitCodeToReturnIfConditionMet = ExitCodeType.OperationNotRequired; + Assert.AreEqual(ExitCodeType.OperationNotRequired, ender.Mutilate(new ThrowImmediatelyDataLoadJob())); + } - ender.Initialize(database, LoadStage.AdjustRaw); + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestEndLoadBecause_NoRows(DatabaseType type) + { + var database = GetCleanedServer(type); - Assert.AreEqual(ExitCodeType.OperationNotRequired, ender.Mutilate(new ThrowImmediatelyDataLoadJob())); - } + var dt = new DataTable(); + dt.Columns.Add("Fish"); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void TestNoEnd_BecauseRows(DatabaseType type) + database.CreateTable("MyTable", dt); + var ender = new PrematureLoadEnder { - var database = GetCleanedServer(type); + ConditionsToTerminateUnder = PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase, + ExitCodeToReturnIfConditionMet = ExitCodeType.OperationNotRequired + }; - DataTable dt = new DataTable(); - dt.Columns.Add("Fish"); - dt.Rows.Add("myval"); + ender.Initialize(database, LoadStage.AdjustRaw); - database.CreateTable("MyTable", dt); - var ender = new PrematureLoadEnder(); - ender.ConditionsToTerminateUnder = PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase; - ender.ExitCodeToReturnIfConditionMet = ExitCodeType.OperationNotRequired; + Assert.AreEqual(ExitCodeType.OperationNotRequired, ender.Mutilate(new ThrowImmediatelyDataLoadJob())); + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void TestNoEnd_BecauseRows(DatabaseType type) + { + var database = GetCleanedServer(type); + + var dt = new DataTable(); + dt.Columns.Add("Fish"); + dt.Rows.Add("myval"); + + database.CreateTable("MyTable", dt); + var ender = new PrematureLoadEnder + { + ConditionsToTerminateUnder = PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase, + ExitCodeToReturnIfConditionMet = ExitCodeType.OperationNotRequired + }; - ender.Initialize(database, LoadStage.AdjustRaw); + ender.Initialize(database, LoadStage.AdjustRaw); - Assert.AreEqual(ExitCodeType.Success, ender.Mutilate(new ThrowImmediatelyDataLoadJob())); - } + Assert.AreEqual(ExitCodeType.Success, ender.Mutilate(new ThrowImmediatelyDataLoadJob())); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PrimaryKeyCollisionResolverTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PrimaryKeyCollisionResolverTests.cs index dfb865248b..e2cefd89ee 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/PrimaryKeyCollisionResolverTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/PrimaryKeyCollisionResolverTests.cs @@ -9,179 +9,167 @@ using Rdmp.Core.Curation.Data; using Rdmp.Core.QueryBuilding; using Rdmp.Core.DataLoad.Modules.Mutilators; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class PrimaryKeyCollisionResolverTests : DatabaseTests { - public class PrimaryKeyCollisionResolverTests : DatabaseTests + [Test] + public void PrimaryKeyCollisionResolverMultilation_Check_Passes() { - - [Test] - public void PrimaryKeyCollisionResolverMultilation_Check_Passes() + SetupTableInfos(out var t, out var c1, out var c2, out var c3); + try { - TableInfo t; - ColumnInfo c1; - ColumnInfo c2; - ColumnInfo c3; - SetupTableInfos(out t, out c1, out c2, out c3); - try + var mutilation = new PrimaryKeyCollisionResolverMutilation { - var mutilation = new PrimaryKeyCollisionResolverMutilation(); - mutilation.TargetTable = t; - - c1.IsPrimaryKey = true; - c1.SaveToDatabase(); - - c2.DuplicateRecordResolutionOrder = 1; - c2.DuplicateRecordResolutionIsAscending = true; - c2.SaveToDatabase(); - - c3.DuplicateRecordResolutionOrder = 2; - c3.DuplicateRecordResolutionIsAscending = false; - c3.SaveToDatabase(); - - Assert.DoesNotThrow(() => mutilation.Check(new ThrowImmediatelyCheckNotifier())); - - } - finally - { - t.DeleteInDatabase(); - } + TargetTable = t + }; + + c1.IsPrimaryKey = true; + c1.SaveToDatabase(); + + c2.DuplicateRecordResolutionOrder = 1; + c2.DuplicateRecordResolutionIsAscending = true; + c2.SaveToDatabase(); + + c3.DuplicateRecordResolutionOrder = 2; + c3.DuplicateRecordResolutionIsAscending = false; + c3.SaveToDatabase(); + + Assert.DoesNotThrow(() => mutilation.Check(ThrowImmediatelyCheckNotifier.Quiet)); } + finally + { + t.DeleteInDatabase(); + } + } - [Test] - public void PrimaryKeyCollisionResolverMultilation_Check_ThrowsBecauseNoColumnOrderConfigured() + [Test] + public void PrimaryKeyCollisionResolverMultilation_Check_ThrowsBecauseNoColumnOrderConfigured() + { + SetupTableInfos(out var t, out _, out _, out _); + try { - TableInfo t; - ColumnInfo c1; - ColumnInfo c2; - ColumnInfo c3; - SetupTableInfos(out t, out c1, out c2,out c3); + var mutilation = new PrimaryKeyCollisionResolverMutilation + { + TargetTable = t + }; try { - var mutilation = new PrimaryKeyCollisionResolverMutilation(); - mutilation.TargetTable = t; - try - { - - mutilation.Check(new ThrowImmediatelyCheckNotifier()); - Assert.Fail("Should have crashed before here"); - } - catch (Exception e) - { - Assert.AreEqual("Failed to check PrimaryKeyCollisionResolver on PrimaryKeyCollisionResolverTests", e.Message); - Assert.AreEqual("TableInfo PrimaryKeyCollisionResolverTests does not have any primary keys defined so cannot resolve primary key collisions",e.InnerException.Message); - } + mutilation.Check(ThrowImmediatelyCheckNotifier.Quiet); + Assert.Fail("Should have crashed before here"); } - finally + catch (Exception e) { - t.DeleteInDatabase(); + Assert.AreEqual("Failed to check PrimaryKeyCollisionResolver on PrimaryKeyCollisionResolverTests", + e.Message); + Assert.AreEqual( + "TableInfo PrimaryKeyCollisionResolverTests does not have any primary keys defined so cannot resolve primary key collisions", + e.InnerException.Message); } } - - [Test] - public void PrimaryKeyCollisionResolverMultilation_Check_ThrowsBecauseNotInitialized() + finally { - var mutilation = new PrimaryKeyCollisionResolverMutilation(); - - var ex = Assert.Throws(()=>mutilation.Check(new ThrowImmediatelyCheckNotifier())); - StringAssert.Contains("Target table is null, a table must be specified upon which to resolve primary key duplication (that TableInfo must have a primary key collision resolution order)",ex.Message); + t.DeleteInDatabase(); } + } + + [Test] + public void PrimaryKeyCollisionResolverMultilation_Check_ThrowsBecauseNotInitialized() + { + var mutilation = new PrimaryKeyCollisionResolverMutilation(); + + var ex = Assert.Throws(() => mutilation.Check(ThrowImmediatelyCheckNotifier.Quiet)); + StringAssert.Contains( + "Target table is null, a table must be specified upon which to resolve primary key duplication (that TableInfo must have a primary key collision resolution order)", + ex.Message); + } - [Test] - public void GenerateSQL_OrderCorrect() + [Test] + public void GenerateSQL_OrderCorrect() + { + SetupTableInfos(out var t, out var c1, out var c2,out var c3); + try { - TableInfo t; - ColumnInfo c1; - ColumnInfo c2; - ColumnInfo c3; - SetupTableInfos(out t, out c1, out c2,out c3); - try - { - c1.IsPrimaryKey = true; - c1.SaveToDatabase(); + c1.IsPrimaryKey = true; + c1.SaveToDatabase(); - c2.DuplicateRecordResolutionOrder = 1; - c2.DuplicateRecordResolutionIsAscending = true; - c2.SaveToDatabase(); + c2.DuplicateRecordResolutionOrder = 1; + c2.DuplicateRecordResolutionIsAscending = true; + c2.SaveToDatabase(); - c3.DuplicateRecordResolutionOrder = 2; - c3.DuplicateRecordResolutionIsAscending = false; - c3.SaveToDatabase(); + c3.DuplicateRecordResolutionOrder = 2; + c3.DuplicateRecordResolutionIsAscending = false; + c3.SaveToDatabase(); - PrimaryKeyCollisionResolver resolver = new PrimaryKeyCollisionResolver(t); - string sql = resolver.GenerateSQL(); + var resolver = new PrimaryKeyCollisionResolver(t); + var sql = resolver.GenerateSQL(); - Console.WriteLine(sql); + Console.WriteLine(sql); - Assert.IsTrue(sql.Contains(c2.Name)); - Assert.IsTrue(sql.Contains(c3.Name)); + Assert.IsTrue(sql.Contains(c2.Name)); + Assert.IsTrue(sql.Contains(c3.Name)); - //column 2 has the following null substitute, is Ascending order and is the first of two - Assert.IsTrue(sql.Contains("ISNULL([col2],-9223372036854775808) ASC,")); + //column 2 has the following null substitute, is Ascending order and is the first of two + Assert.IsTrue(sql.Contains("ISNULL([col2],-9223372036854775808) ASC,")); - //column 3 has the following null substitute and is descending and is not followed by another column - Assert.IsTrue(sql.Contains("ISNULL([col3],-2147483648) DESC")); - } - finally - { - t.DeleteInDatabase(); - } + //column 3 has the following null substitute and is descending and is not followed by another column + Assert.IsTrue(sql.Contains("ISNULL([col3],-2147483648) DESC")); } - - [Test] - public void NoColumnOrdersConfigured_ThrowsException() + finally { - TableInfo t; - ColumnInfo c1; - ColumnInfo c2; - ColumnInfo c3; - SetupTableInfos(out t, out c1, out c2, out c3); - try - { - c1.IsPrimaryKey = true; - c1.SaveToDatabase(); - - PrimaryKeyCollisionResolver resolver = new PrimaryKeyCollisionResolver(t); - var ex = Assert.Throws(()=>Console.WriteLine(resolver.GenerateSQL())); - StringAssert.Contains("The ColumnInfos of TableInfo PrimaryKeyCollisionResolverTests do not have primary key resolution orders configured (do not know which order to use non primary key column values in to resolve collisions). Fix this by right clicking a TableInfo in CatalogueManager and selecting 'Configure Primary Key Collision Resolution'.",ex.Message); - } - finally - { - t.DeleteInDatabase(); - } + t.DeleteInDatabase(); } + } - [Test] - public void NoPrimaryKeys_ThrowsException() + [Test] + public void NoColumnOrdersConfigured_ThrowsException() + { + SetupTableInfos(out var t, out var c1, out _, out _); + try { - TableInfo t; - ColumnInfo c1; - ColumnInfo c2; - ColumnInfo c3; - SetupTableInfos(out t, out c1, out c2,out c3); - - try - { - PrimaryKeyCollisionResolver resolver = new PrimaryKeyCollisionResolver(t); - var ex = Assert.Throws(()=>Console.WriteLine(resolver.GenerateSQL())); - StringAssert.Contains("does not have any primary keys defined so cannot resolve primary key collisions",ex.Message); - } - finally - { - t.DeleteInDatabase(); - } + c1.IsPrimaryKey = true; + c1.SaveToDatabase(); + + var resolver = new PrimaryKeyCollisionResolver(t); + var ex = Assert.Throws(() => Console.WriteLine(resolver.GenerateSQL())); + StringAssert.Contains( + "The ColumnInfos of TableInfo PrimaryKeyCollisionResolverTests do not have primary key resolution orders configured (do not know which order to use non primary key column values in to resolve collisions). Fix this by right clicking a TableInfo in CatalogueManager and selecting 'Configure Primary Key Collision Resolution'.", + ex.Message); } - - private void SetupTableInfos(out TableInfo tableInfo, out ColumnInfo c1, out ColumnInfo c2, out ColumnInfo c3) + finally { - tableInfo = new TableInfo(CatalogueRepository, "PrimaryKeyCollisionResolverTests"); + t.DeleteInDatabase(); + } + } - c1 = new ColumnInfo(CatalogueRepository, "col1", "varchar(100)", tableInfo); - c2 = new ColumnInfo(CatalogueRepository, "col2", "float", tableInfo); - c3 = new ColumnInfo(CatalogueRepository, "col3", "int", tableInfo); + [Test] + public void NoPrimaryKeys_ThrowsException() + { + SetupTableInfos(out var t, out _, out _, out _); + + try + { + var resolver = new PrimaryKeyCollisionResolver(t); + var ex = Assert.Throws(() => Console.WriteLine(resolver.GenerateSQL())); + StringAssert.Contains("does not have any primary keys defined so cannot resolve primary key collisions", + ex.Message); } + finally + { + t.DeleteInDatabase(); + } + } + + private void SetupTableInfos(out TableInfo tableInfo, out ColumnInfo c1, out ColumnInfo c2, out ColumnInfo c3) + { + tableInfo = new TableInfo(CatalogueRepository, "PrimaryKeyCollisionResolverTests"); + + c1 = new ColumnInfo(CatalogueRepository, "col1", "varchar(100)", tableInfo); + c2 = new ColumnInfo(CatalogueRepository, "col2", "float", tableInfo); + c3 = new ColumnInfo(CatalogueRepository, "col3", "int", tableInfo); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/RemoteDatabaseAttacherTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/RemoteDatabaseAttacherTests.cs index 9ffe317a5c..6e79de5059 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/RemoteDatabaseAttacherTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/RemoteDatabaseAttacherTests.cs @@ -8,7 +8,7 @@ using System.Collections.Generic; using System.Data; using FAnsi; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; @@ -18,112 +18,115 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.Attachers; using Rdmp.Core.Logging; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class RemoteDatabaseAttacherTests : DatabaseTests { - public class RemoteDatabaseAttacherTests:DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer, Scenario.AllRawColumns)] + [TestCase(DatabaseType.MySql, Scenario.AllRawColumns)] + [TestCase(DatabaseType.MicrosoftSQLServer, Scenario.AllColumns)] + [TestCase(DatabaseType.MicrosoftSQLServer, Scenario.MissingPreLoadDiscardedColumn)] + [TestCase(DatabaseType.MicrosoftSQLServer, Scenario.MissingPreLoadDiscardedColumnButSelectStar)] + public void TestRemoteDatabaseAttach(DatabaseType dbType, Scenario scenario) { - [TestCase(DatabaseType.MicrosoftSQLServer, Scenario.AllRawColumns)] - [TestCase(DatabaseType.MySql, Scenario.AllRawColumns)] - [TestCase(DatabaseType.MicrosoftSQLServer, Scenario.AllColumns)] - [TestCase(DatabaseType.MicrosoftSQLServer, Scenario.MissingPreLoadDiscardedColumn)] - [TestCase(DatabaseType.MicrosoftSQLServer, Scenario.MissingPreLoadDiscardedColumnButSelectStar)] - public void TestRemoteDatabaseAttach(DatabaseType dbType, Scenario scenario) - { - var db = GetCleanedServer(dbType); + var db = GetCleanedServer(dbType); - DataTable dt = new DataTable(); + var dt = new DataTable(); - dt.Columns.Add("Fish"); - dt.Columns.Add("hic_Heroism"); + dt.Columns.Add("Fish"); + dt.Columns.Add("hic_Heroism"); - dt.Rows.Add("123", 11); + dt.Rows.Add("123", 11); - var tbl = db.CreateTable("MyTable",dt); + var tbl = db.CreateTable("MyTable", dt); - Assert.AreEqual(1, tbl.GetRowCount()); - Import(tbl, out var ti, out var cols); + Assert.AreEqual(1, tbl.GetRowCount()); + Import(tbl, out var ti, out _); - //Create a virtual RAW column - if (scenario == Scenario.MissingPreLoadDiscardedColumn || scenario == Scenario.MissingPreLoadDiscardedColumnButSelectStar) - new PreLoadDiscardedColumn(CatalogueRepository, ti, "MyMissingCol"); + //Create a virtual RAW column + if (scenario is Scenario.MissingPreLoadDiscardedColumn or Scenario.MissingPreLoadDiscardedColumnButSelectStar) + new PreLoadDiscardedColumn(CatalogueRepository, ti, "MyMissingCol"); - var externalServer = new ExternalDatabaseServer(CatalogueRepository, "MyFictionalRemote",null); - externalServer.SetProperties(db); - - var attacher = new RemoteDatabaseAttacher(); - attacher.Initialize(null,db); + var externalServer = new ExternalDatabaseServer(CatalogueRepository, "MyFictionalRemote", null); + externalServer.SetProperties(db); - attacher.LoadRawColumnsOnly = scenario == Scenario.AllRawColumns || scenario == Scenario.MissingPreLoadDiscardedColumn; - attacher.RemoteSource = externalServer; - - var lm = new LogManager(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); - lm.CreateNewLoggingTaskIfNotExists("amagad"); - var dli = lm.CreateDataLoadInfo("amagad", "p", "a", "", true); + var attacher = new RemoteDatabaseAttacher(); + attacher.Initialize(null, db); - var job = Mock.Of(p => - p.RegularTablesToLoad==new List {ti} && - p.LookupTablesToLoad==new List() && p.DataLoadInfo==dli); - - switch (scenario) - { - case Scenario.AllRawColumns: - break; - case Scenario.AllColumns: - break; - case Scenario.MissingPreLoadDiscardedColumn: - var ex = Assert.Throws(() => attacher.Attach(job, new GracefulCancellationToken())); + attacher.LoadRawColumnsOnly = scenario is Scenario.AllRawColumns or Scenario.MissingPreLoadDiscardedColumn; + attacher.RemoteSource = externalServer; - Assert.AreEqual("Invalid column name 'MyMissingCol'.", (ex.InnerException.InnerException).InnerException.Message); - return; - case Scenario.MissingPreLoadDiscardedColumnButSelectStar: - break; - default: - throw new ArgumentOutOfRangeException("scenario"); - } - attacher.Attach(job, new GracefulCancellationToken()); + var lm = new LogManager(CatalogueRepository.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID)); + lm.CreateNewLoggingTaskIfNotExists("amagad"); + var dli = lm.CreateDataLoadInfo("amagad", "p", "a", "", true); - Assert.AreEqual(2,tbl.GetRowCount()); + var job = Substitute.For(); + job.RegularTablesToLoad.Returns(new List { ti }); + job.LookupTablesToLoad.Returns(new List()); + job.DataLoadInfo.Returns(dli); - dt = tbl.GetDataTable(); + switch (scenario) + { + case Scenario.AllRawColumns: + break; + case Scenario.AllColumns: + break; + case Scenario.MissingPreLoadDiscardedColumn: + var ex = Assert.Throws(() => + attacher.Attach(job, new GracefulCancellationToken())); + + Assert.AreEqual("Invalid column name 'MyMissingCol'.", + ex.InnerException.InnerException.InnerException.Message); + return; + case Scenario.MissingPreLoadDiscardedColumnButSelectStar: + break; + default: + throw new ArgumentOutOfRangeException(nameof(scenario)); + } - VerifyRowExist(dt,123,11); + attacher.Attach(job, new GracefulCancellationToken()); - if (scenario == Scenario.AllRawColumns) - VerifyRowExist(dt, 123, DBNull.Value); + Assert.AreEqual(2, tbl.GetRowCount()); - attacher.LoadCompletedSoDispose(ExitCodeType.Success, new ThrowImmediatelyDataLoadEventListener()); + dt = tbl.GetDataTable(); - externalServer.DeleteInDatabase(); - } - - public enum Scenario - { - /// - /// Tests the ability of the DLE to load RAW columns from a remote database by identifying tables matching - /// by name and fetching all columns which are expected to be in RAW. - /// - AllRawColumns, - - /// - /// Tests the ability of the DLE to load RAW columns from a remote database by identifying tables matching - /// by name and fetching all columns using SELECT *. - /// - AllColumns, - - /// - /// Tests the behaviour of the system when there is a RAW only column which does not appear in the remote - /// database when using the option. - /// - MissingPreLoadDiscardedColumn, - - /// - /// Tests the behaviour of the system when there is a RAW only column which does not appear in the remote - /// database but the mode fetch mode is SELECT * - /// - MissingPreLoadDiscardedColumnButSelectStar - } + VerifyRowExist(dt, 123, 11); + + if (scenario == Scenario.AllRawColumns) + VerifyRowExist(dt, 123, DBNull.Value); + + attacher.LoadCompletedSoDispose(ExitCodeType.Success, ThrowImmediatelyDataLoadEventListener.Quiet); + + externalServer.DeleteInDatabase(); + } + + public enum Scenario + { + /// + /// Tests the ability of the DLE to load RAW columns from a remote database by identifying tables matching + /// by name and fetching all columns which are expected to be in RAW. + /// + AllRawColumns, + + /// + /// Tests the ability of the DLE to load RAW columns from a remote database by identifying tables matching + /// by name and fetching all columns using SELECT *. + /// + AllColumns, + + /// + /// Tests the behaviour of the system when there is a RAW only column which does not appear in the remote + /// database when using the option. + /// + MissingPreLoadDiscardedColumn, + + /// + /// Tests the behaviour of the system when there is a RAW only column which does not appear in the remote + /// database but the mode fetch mode is SELECT * + /// + MissingPreLoadDiscardedColumnButSelectStar } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/RuntimeTaskFactoryTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/RuntimeTaskFactoryTests.cs index 8432848d0f..898c08d12a 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/RuntimeTaskFactoryTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/RuntimeTaskFactoryTests.cs @@ -5,7 +5,7 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation; using Rdmp.Core.Curation.Data.DataLoad; @@ -13,37 +13,36 @@ using Rdmp.Core.DataLoad.Engine.LoadExecution.Components.Runtime; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class RuntimeTaskFactoryTests : DatabaseTests { - public class RuntimeTaskFactoryTests : DatabaseTests + [Test] + [TestCase("Rdmp.Core.DataLoad.Modules.Web.WebFileDownloader")] + [TestCase("Rdmp.Core.DataLoad.Modules.DataProvider.FlatFileManipulation.ExcelToCSVFilesConverter")] + public void RuntimeTaskFactoryTest(string className) { - [Test] - [TestCase("Rdmp.Core.DataLoad.Modules.Web.WebFileDownloader")] - [TestCase("Rdmp.Core.DataLoad.Modules.DataProvider.FlatFileManipulation.ExcelToCSVFilesConverter")] - public void RuntimeTaskFactoryTest(string className) - { - - var lmd = new LoadMetadata(CatalogueRepository); - var task = new ProcessTask(CatalogueRepository, lmd,LoadStage.GetFiles); - - var f = new RuntimeTaskFactory(CatalogueRepository); + var lmd = new LoadMetadata(CatalogueRepository); + var task = new ProcessTask(CatalogueRepository, lmd, LoadStage.GetFiles); - task.Path = className; - task.ProcessTaskType = ProcessTaskType.DataProvider; - task.SaveToDatabase(); - - try - { - var ex = Assert.Throws(() => f.Create(task, new StageArgs(LoadStage.AdjustRaw, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), Mock.Of()))); - Assert.IsTrue(ex.InnerException.Message.Contains("marked with DemandsInitialization but no corresponding argument was provided in ArgumentCollection")); - } - finally - { - task.DeleteInDatabase(); - lmd.DeleteInDatabase(); - } + var f = new RuntimeTaskFactory(CatalogueRepository); + task.Path = className; + task.ProcessTaskType = ProcessTaskType.DataProvider; + task.SaveToDatabase(); + try + { + var ex = Assert.Throws(() => RuntimeTaskFactory.Create(task, + new StageArgs(LoadStage.AdjustRaw, GetCleanedServer(FAnsi.DatabaseType.MicrosoftSQLServer), + Substitute.For()))); + Assert.IsTrue(ex.InnerException.Message.Contains( + "marked with DemandsInitialization but no corresponding argument was provided in ArgumentCollection")); + } + finally + { + task.DeleteInDatabase(); + lmd.DeleteInDatabase(); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/SafePrimaryKeyCollisionResolverMutilationTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/SafePrimaryKeyCollisionResolverMutilationTests.cs index d7652a6997..2d642558ef 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/SafePrimaryKeyCollisionResolverMutilationTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/SafePrimaryKeyCollisionResolverMutilationTests.cs @@ -9,309 +9,350 @@ using System.Linq; using FAnsi; using NUnit.Framework; -using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataLoad.Engine.DatabaseManagement.EntityNaming; using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.Mutilators; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class SafePrimaryKeyCollisionResolverMutilationTests : DatabaseTests { - public class SafePrimaryKeyCollisionResolverMutilationTests:DatabaseTests + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MySql, true)] + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, false)] + public void SafePrimaryKeyCollisionResolverMutilationTests_NoDifference_NoRecordsDeleted(DatabaseType dbType, + bool bothNull) { - [TestCase(DatabaseType.MicrosoftSQLServer,true)] - [TestCase(DatabaseType.MySql,true)] - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - [TestCase(DatabaseType.MySql, false)] - public void SafePrimaryKeyCollisionResolverMutilationTests_NoDifference_NoRecordsDeleted(DatabaseType dbType,bool bothNull) - { - var db = GetCleanedServer(dbType); - - DataTable dt = new DataTable(); - dt.Columns.Add("PK"); - dt.Columns.Add("ResolveOn"); - dt.Columns.Add("AnotherCol"); - - dt.Rows.Add(1, bothNull?null:"fish", "cat"); - dt.Rows.Add(1, bothNull ? null : "fish", "flop"); - dt.Rows.Add(2, "fish", "flop"); - dt.Rows.Add(3, "dave", "franl"); - - var tbl = db.CreateTable("MyTable", dt); - - Import(tbl,out var ti,out var cis); - - var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); - - var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); - - var mutilation = new SafePrimaryKeyCollisionResolverMutilation(); - mutilation.ColumnToResolveOn = resolveOn; - - mutilation.PreferLargerValues = true; - mutilation.PreferNulls = false; - - mutilation.Initialize(db, LoadStage.AdjustRaw); - mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); - - Assert.AreEqual(4,tbl.GetRowCount()); - } - [TestCase(DatabaseType.MicrosoftSQLServer,false)] - [TestCase(DatabaseType.MySql,false)] - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MySql, true)] - public void SafePrimaryKeyCollisionResolverMutilationTests_PreferNull_RecordsDeleted(DatabaseType dbType,bool preferNulls) - { - var db = GetCleanedServer(dbType); - - DataTable dt = new DataTable(); - dt.Columns.Add("PK"); - dt.Columns.Add("ResolveOn"); - dt.Columns.Add("AnotherCol"); + var db = GetCleanedServer(dbType); - dt.Rows.Add(1, null, "cat"); - dt.Rows.Add(1, "fish", "flop"); - dt.Rows.Add(2, "fish", "flop"); - dt.Rows.Add(3, "dave", "franl"); + var dt = new DataTable(); + dt.Columns.Add("PK"); + dt.Columns.Add("ResolveOn"); + dt.Columns.Add("AnotherCol"); - var tbl = db.CreateTable("MyTable", dt); + dt.Rows.Add(1, bothNull ? null : "fish", "cat"); + dt.Rows.Add(1, bothNull ? null : "fish", "flop"); + dt.Rows.Add(2, "fish", "flop"); + dt.Rows.Add(3, "dave", "franl"); - Import(tbl, out var ti, out var cis); + var tbl = db.CreateTable("MyTable", dt); - var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); + Import(tbl, out var ti, out var cis); - var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); + var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); - var mutilation = new SafePrimaryKeyCollisionResolverMutilation(); - mutilation.ColumnToResolveOn = resolveOn; + var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); - mutilation.PreferLargerValues = true; - mutilation.PreferNulls = preferNulls; - - mutilation.Initialize(db, LoadStage.AdjustRaw); - mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); - - Assert.AreEqual(3, tbl.GetRowCount()); - var result = tbl.GetDataTable(); - - //if you prefer nulls you shouldn't want this one - Assert.AreEqual(preferNulls? 0:1 ,result.Rows.Cast().Count(r=>(int)r["PK"] == 1 && r["ResolveOn"] as string == "fish" && r["AnotherCol"] as string == "flop" )); - - //if you prefer nulls you should have this one - Assert.AreEqual(preferNulls ? 1 : 0, result.Rows.Cast().Count(r => (int)r["PK"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); - } - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void SafePrimaryKeyCollisionResolverMutilationTests_WithDatabaseNamer_RecordsDeleted(DatabaseType dbType) + var mutilation = new SafePrimaryKeyCollisionResolverMutilation { - var db = GetCleanedServer(dbType); - - DataTable dt = new DataTable(); - dt.Columns.Add("PK"); - dt.Columns.Add("ResolveOn"); - dt.Columns.Add("AnotherCol"); + ColumnToResolveOn = resolveOn, + PreferLargerValues = true, + PreferNulls = false + }; - dt.Rows.Add(1, null, "cat"); - dt.Rows.Add(1, "fish", "flop"); - dt.Rows.Add(2, "fish", "flop"); - dt.Rows.Add(3, "dave", "franl"); + mutilation.Initialize(db, LoadStage.AdjustRaw); + mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); - var tbl = db.CreateTable("MyTable", dt); - - Import(tbl, out var ti, out var cis); - - tbl.Rename("AAAA"); - var namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db,"AAAA"); - - var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); - - var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); + Assert.AreEqual(4, tbl.GetRowCount()); + } - var mutilation = new SafePrimaryKeyCollisionResolverMutilation(); - mutilation.ColumnToResolveOn = resolveOn; + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, false)] + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MySql, true)] + public void SafePrimaryKeyCollisionResolverMutilationTests_PreferNull_RecordsDeleted(DatabaseType dbType, + bool preferNulls) + { + var db = GetCleanedServer(dbType); - mutilation.PreferLargerValues = true; - mutilation.PreferNulls = true; + var dt = new DataTable(); + dt.Columns.Add("PK"); + dt.Columns.Add("ResolveOn"); + dt.Columns.Add("AnotherCol"); - mutilation.Initialize(db, LoadStage.AdjustRaw); - mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server,namer), ti)); + dt.Rows.Add(1, null, "cat"); + dt.Rows.Add(1, "fish", "flop"); + dt.Rows.Add(2, "fish", "flop"); + dt.Rows.Add(3, "dave", "franl"); - Assert.AreEqual(3, tbl.GetRowCount()); - var result = tbl.GetDataTable(); + var tbl = db.CreateTable("MyTable", dt); - //if you prefer nulls you shouldn't want this one - Assert.AreEqual( 0 , result.Rows.Cast().Count(r => (int)r["PK"] == 1 && r["ResolveOn"] as string == "fish" && r["AnotherCol"] as string == "flop")); + Import(tbl, out var ti, out var cis); - //if you prefer nulls you should have this one - Assert.AreEqual(1, result.Rows.Cast().Count(r => (int)r["PK"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); - } + var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); + var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - [TestCase(DatabaseType.MySql, false)] - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MySql, true)] - public void SafePrimaryKeyCollisionResolverMutilationTests_PreferLarger_RecordsDeleted(DatabaseType dbType, bool preferLarger) + var mutilation = new SafePrimaryKeyCollisionResolverMutilation { - var db = GetCleanedServer(dbType); - - DataTable dt = new DataTable(); - dt.Columns.Add("PK"); - dt.Columns.Add("ResolveOn"); - dt.Columns.Add("AnotherCol"); + ColumnToResolveOn = resolveOn, + PreferLargerValues = true, + PreferNulls = preferNulls + }; + + mutilation.Initialize(db, LoadStage.AdjustRaw); + mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); + + Assert.AreEqual(3, tbl.GetRowCount()); + var result = tbl.GetDataTable(); + + //if you prefer nulls you shouldn't want this one + Assert.AreEqual(preferNulls ? 0 : 1, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && r["ResolveOn"] as string == "fish" && r["AnotherCol"] as string == "flop")); + + //if you prefer nulls you should have this one + Assert.AreEqual(preferNulls ? 1 : 0, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); + } - dt.Rows.Add(1, null, "cat"); - dt.Rows.Add(1, "a", "flop"); - dt.Rows.Add(1, "b", "flop"); - dt.Rows.Add(2, "fish", "flop"); - dt.Rows.Add(3, "dave", "franl"); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void SafePrimaryKeyCollisionResolverMutilationTests_WithDatabaseNamer_RecordsDeleted(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - var tbl = db.CreateTable("MyTable", dt); + var dt = new DataTable(); + dt.Columns.Add("PK"); + dt.Columns.Add("ResolveOn"); + dt.Columns.Add("AnotherCol"); - Import(tbl, out var ti, out var cis); + dt.Rows.Add(1, null, "cat"); + dt.Rows.Add(1, "fish", "flop"); + dt.Rows.Add(2, "fish", "flop"); + dt.Rows.Add(3, "dave", "franl"); - var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); + var tbl = db.CreateTable("MyTable", dt); - var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); + Import(tbl, out var ti, out var cis); - var mutilation = new SafePrimaryKeyCollisionResolverMutilation(); - mutilation.ColumnToResolveOn = resolveOn; + tbl.Rename("AAAA"); + var namer = RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "AAAA"); - mutilation.PreferLargerValues = preferLarger; - mutilation.PreferNulls = false; + var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); - mutilation.Initialize(db, LoadStage.AdjustRaw); - mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); + var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); - Assert.AreEqual(3, tbl.GetRowCount()); - var result = tbl.GetDataTable(); + var mutilation = new SafePrimaryKeyCollisionResolverMutilation + { + ColumnToResolveOn = resolveOn, + PreferLargerValues = true, + PreferNulls = true + }; + + mutilation.Initialize(db, LoadStage.AdjustRaw); + mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server, namer), ti)); + + Assert.AreEqual(3, tbl.GetRowCount()); + var result = tbl.GetDataTable(); + + //if you prefer nulls you shouldn't want this one + Assert.AreEqual(0, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && r["ResolveOn"] as string == "fish" && r["AnotherCol"] as string == "flop")); + + //if you prefer nulls you should have this one + Assert.AreEqual(1, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); + } - //if you like larger values (alphabetically) then you want the 'b' - Assert.AreEqual(preferLarger ? 1 : 0, result.Rows.Cast().Count(r => (int)r["PK"] == 1 && r["ResolveOn"] as string == "b" && r["AnotherCol"] as string == "flop")); - Assert.AreEqual(preferLarger ? 0 : 1, result.Rows.Cast().Count(r => (int)r["PK"] == 1 && r["ResolveOn"] as string == "a" && r["AnotherCol"] as string == "flop")); - //either way you shouldn't have the null one - Assert.AreEqual(0, result.Rows.Cast().Count(r => (int)r["PK"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); - } + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, false)] + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MySql, true)] + public void SafePrimaryKeyCollisionResolverMutilationTests_PreferLarger_RecordsDeleted(DatabaseType dbType, + bool preferLarger) + { + var db = GetCleanedServer(dbType); + var dt = new DataTable(); + dt.Columns.Add("PK"); + dt.Columns.Add("ResolveOn"); + dt.Columns.Add("AnotherCol"); + dt.Rows.Add(1, null, "cat"); + dt.Rows.Add(1, "a", "flop"); + dt.Rows.Add(1, "b", "flop"); + dt.Rows.Add(2, "fish", "flop"); + dt.Rows.Add(3, "dave", "franl"); - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - [TestCase(DatabaseType.MySql, false)] - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MySql, true)] - public void SafePrimaryKeyCollisionResolverMutilationTests_PreferLarger_Dates_RecordsDeleted(DatabaseType dbType, bool preferLarger) - { - var db = GetCleanedServer(dbType); + var tbl = db.CreateTable("MyTable", dt); - DataTable dt = new DataTable(); - dt.Columns.Add("PK"); - dt.Columns.Add("ResolveOn"); - dt.Columns.Add("AnotherCol"); + Import(tbl, out var ti, out var cis); - dt.Rows.Add(1, null, "cat"); - dt.Rows.Add(1, new DateTime(2001,01,01), "flop"); - dt.Rows.Add(1, new DateTime(2002, 01, 01), "flop"); - dt.Rows.Add(2, null, "flop"); - dt.Rows.Add(3, null, "franl"); + var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); - var tbl = db.CreateTable("MyTable", dt); + var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); - Import(tbl, out var ti, out var cis); + var mutilation = new SafePrimaryKeyCollisionResolverMutilation + { + ColumnToResolveOn = resolveOn, + PreferLargerValues = preferLarger, + PreferNulls = false + }; + + mutilation.Initialize(db, LoadStage.AdjustRaw); + mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); + + Assert.AreEqual(3, tbl.GetRowCount()); + var result = tbl.GetDataTable(); + + //if you like larger values (alphabetically) then you want the 'b' + Assert.AreEqual(preferLarger ? 1 : 0, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && r["ResolveOn"] as string == "b" && r["AnotherCol"] as string == "flop")); + Assert.AreEqual(preferLarger ? 0 : 1, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && r["ResolveOn"] as string == "a" && r["AnotherCol"] as string == "flop")); + + //either way you shouldn't have the null one + Assert.AreEqual(0, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); + } - var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); - var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, false)] + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MySql, true)] + public void SafePrimaryKeyCollisionResolverMutilationTests_PreferLarger_Dates_RecordsDeleted(DatabaseType dbType, + bool preferLarger) + { + var db = GetCleanedServer(dbType); - var mutilation = new SafePrimaryKeyCollisionResolverMutilation(); - mutilation.ColumnToResolveOn = resolveOn; + var dt = new DataTable(); + dt.Columns.Add("PK"); + dt.Columns.Add("ResolveOn"); + dt.Columns.Add("AnotherCol"); - mutilation.PreferLargerValues = preferLarger; - mutilation.PreferNulls = false; + dt.Rows.Add(1, null, "cat"); + dt.Rows.Add(1, new DateTime(2001, 01, 01), "flop"); + dt.Rows.Add(1, new DateTime(2002, 01, 01), "flop"); + dt.Rows.Add(2, null, "flop"); + dt.Rows.Add(3, null, "franl"); - mutilation.Initialize(db, LoadStage.AdjustRaw); - mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); + var tbl = db.CreateTable("MyTable", dt); - Assert.AreEqual(3, tbl.GetRowCount()); - var result = tbl.GetDataTable(); + Import(tbl, out var ti, out var cis); - //if you like larger values then you want 2002 thats larger than 2001 - Assert.AreEqual(preferLarger ? 1 : 0, result.Rows.Cast().Count(r => (int)r["PK"] == 1 && Equals(r["ResolveOn"], new DateTime(2002,01,01)) && r["AnotherCol"] as string == "flop")); - Assert.AreEqual(preferLarger ? 0 : 1, result.Rows.Cast().Count(r => (int)r["PK"] == 1 && Equals(r["ResolveOn"], new DateTime(2001,01,01)) && r["AnotherCol"] as string == "flop")); + var pk = cis.Single(c => c.GetRuntimeName().Equals("PK")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); - //either way you shouldn't have the null one - Assert.AreEqual(0, result.Rows.Cast().Count(r => (int)r["PK"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); - } + var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); - [TestCase(DatabaseType.MicrosoftSQLServer, false)] - [TestCase(DatabaseType.MySql, false)] - [TestCase(DatabaseType.MicrosoftSQLServer, true)] - [TestCase(DatabaseType.MySql, true)] - public void SafePrimaryKeyCollisionResolverMutilationTests_PreferLarger_ComboKey_RecordsDeleted(DatabaseType dbType, bool preferLarger) + var mutilation = new SafePrimaryKeyCollisionResolverMutilation { - var db = GetCleanedServer(dbType); - - DataTable dt = new DataTable(); - dt.Columns.Add("PK1"); - dt.Columns.Add("PK2"); - dt.Columns.Add("ResolveOn"); - dt.Columns.Add("AnotherCol"); - - dt.Rows.Add(1,1, null, "cat"); - dt.Rows.Add(1,1, new DateTime(2001, 01, 01), "flop"); - dt.Rows.Add(1,1, new DateTime(2002, 01, 01), "flop"); - - dt.Rows.Add(1, 2, null, "cat"); - dt.Rows.Add(1, 2, null, "cat"); - dt.Rows.Add(1, 3, new DateTime(2001, 01, 01), "flop"); - dt.Rows.Add(1, 4, new DateTime(2002, 01, 01), "flop"); - - dt.Rows.Add(2,1, null, "flop"); - dt.Rows.Add(3,1, null, "franl"); + ColumnToResolveOn = resolveOn, + PreferLargerValues = preferLarger, + PreferNulls = false + }; + + mutilation.Initialize(db, LoadStage.AdjustRaw); + mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); + + Assert.AreEqual(3, tbl.GetRowCount()); + var result = tbl.GetDataTable(); + + //if you like larger values then you want 2002 thats larger than 2001 + Assert.AreEqual(preferLarger ? 1 : 0, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && Equals(r["ResolveOn"], new DateTime(2002, 01, 01)) && + r["AnotherCol"] as string == "flop")); + Assert.AreEqual(preferLarger ? 0 : 1, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && Equals(r["ResolveOn"], new DateTime(2001, 01, 01)) && + r["AnotherCol"] as string == "flop")); + + //either way you shouldn't have the null one + Assert.AreEqual(0, + result.Rows.Cast().Count(r => + (int)r["PK"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); + } - var tbl = db.CreateTable("MyTable", dt); + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, false)] + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MySql, true)] + public void SafePrimaryKeyCollisionResolverMutilationTests_PreferLarger_ComboKey_RecordsDeleted(DatabaseType dbType, + bool preferLarger) + { + var db = GetCleanedServer(dbType); - Import(tbl, out var ti, out var cis); + var dt = new DataTable(); + dt.Columns.Add("PK1"); + dt.Columns.Add("PK2"); + dt.Columns.Add("ResolveOn"); + dt.Columns.Add("AnotherCol"); - var pk = cis.Single(c => c.GetRuntimeName().Equals("PK1")); - pk.IsPrimaryKey = true; - pk.SaveToDatabase(); + dt.Rows.Add(1, 1, null, "cat"); + dt.Rows.Add(1, 1, new DateTime(2001, 01, 01), "flop"); + dt.Rows.Add(1, 1, new DateTime(2002, 01, 01), "flop"); - var pk2 = cis.Single(c => c.GetRuntimeName().Equals("PK2")); - pk2.IsPrimaryKey = true; - pk2.SaveToDatabase(); + dt.Rows.Add(1, 2, null, "cat"); + dt.Rows.Add(1, 2, null, "cat"); + dt.Rows.Add(1, 3, new DateTime(2001, 01, 01), "flop"); + dt.Rows.Add(1, 4, new DateTime(2002, 01, 01), "flop"); - var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); + dt.Rows.Add(2, 1, null, "flop"); + dt.Rows.Add(3, 1, null, "franl"); - var mutilation = new SafePrimaryKeyCollisionResolverMutilation(); - mutilation.ColumnToResolveOn = resolveOn; + var tbl = db.CreateTable("MyTable", dt); - mutilation.PreferLargerValues = preferLarger; - mutilation.PreferNulls = false; + Import(tbl, out var ti, out var cis); - mutilation.Initialize(db, LoadStage.AdjustRaw); - mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); + var pk = cis.Single(c => c.GetRuntimeName().Equals("PK1")); + pk.IsPrimaryKey = true; + pk.SaveToDatabase(); - Assert.AreEqual(7, tbl.GetRowCount()); - var result = tbl.GetDataTable(); + var pk2 = cis.Single(c => c.GetRuntimeName().Equals("PK2")); + pk2.IsPrimaryKey = true; + pk2.SaveToDatabase(); - //if you like larger values then you want 2002 thats larger than 2001 - Assert.AreEqual(preferLarger ? 1 : 0, result.Rows.Cast().Count(r => (int)r["PK1"] == 1 && (int)r["PK2"] == 1 && Equals(r["ResolveOn"], new DateTime(2002, 01, 01)) && r["AnotherCol"] as string == "flop")); - Assert.AreEqual(preferLarger ? 0 : 1, result.Rows.Cast().Count(r => (int)r["PK1"] == 1 && (int)r["PK2"] == 1 && Equals(r["ResolveOn"], new DateTime(2001, 01, 01)) && r["AnotherCol"] as string == "flop")); + var resolveOn = cis.Single(c => c.GetRuntimeName().Equals("ResolveOn")); - //either way you shouldn't have the null one - Assert.AreEqual(0, result.Rows.Cast().Count(r => (int)r["PK1"] == 1 && (int)r["PK2"] == 1 && r["ResolveOn"] == DBNull.Value && r["AnotherCol"] as string == "cat")); - } + var mutilation = new SafePrimaryKeyCollisionResolverMutilation + { + ColumnToResolveOn = resolveOn, + PreferLargerValues = preferLarger, + PreferNulls = false + }; + + mutilation.Initialize(db, LoadStage.AdjustRaw); + mutilation.Mutilate(new ThrowImmediatelyDataLoadJob(new HICDatabaseConfiguration(db.Server))); + + Assert.AreEqual(7, tbl.GetRowCount()); + var result = tbl.GetDataTable(); + + //if you like larger values then you want 2002 thats larger than 2001 + Assert.AreEqual(preferLarger ? 1 : 0, + result.Rows.Cast().Count(r => + (int)r["PK1"] == 1 && (int)r["PK2"] == 1 && Equals(r["ResolveOn"], new DateTime(2002, 01, 01)) && + r["AnotherCol"] as string == "flop")); + Assert.AreEqual(preferLarger ? 0 : 1, + result.Rows.Cast().Count(r => + (int)r["PK1"] == 1 && (int)r["PK2"] == 1 && Equals(r["ResolveOn"], new DateTime(2001, 01, 01)) && + r["AnotherCol"] as string == "flop")); + + //either way you shouldn't have the null one + Assert.AreEqual(0, + result.Rows.Cast().Count(r => + (int)r["PK1"] == 1 && (int)r["PK2"] == 1 && r["ResolveOn"] == DBNull.Value && + r["AnotherCol"] as string == "cat")); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/SingleJobPipelineTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/SingleJobPipelineTests.cs index 82d5d1d2f6..50852631ec 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/SingleJobPipelineTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/SingleJobPipelineTests.cs @@ -5,41 +5,37 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System.Collections.Generic; -using Moq; +using NSubstitute; using Rdmp.Core.DataFlowPipeline; using Rdmp.Core.DataLoad; using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Engine.LoadExecution; using Rdmp.Core.DataLoad.Engine.LoadExecution.Components; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class SingleJobPipelineTests : DatabaseTests { - public class SingleJobPipelineTests : DatabaseTests + public static void LoadNotRequiredStopsPipelineGracefully() { - public void LoadNotRequiredStopsPipelineGracefully() - { - var component = new NotRequiredComponent(); + var component = new NotRequiredComponent(); - var pipeline = new SingleJobExecution(new List {component}); + var pipeline = new SingleJobExecution(new List { component }); - var job = Mock.Of(); - var jobTokenSource = new GracefulCancellationTokenSource(); - pipeline.Run(job, jobTokenSource.Token); - } + var job = Substitute.For(); + var jobTokenSource = new GracefulCancellationTokenSource(); + pipeline.Run(job, jobTokenSource.Token); } +} - internal class NotRequiredComponent : DataLoadComponent - { - public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) - { - return ExitCodeType.OperationNotRequired; - } +internal class NotRequiredComponent : DataLoadComponent +{ + public override ExitCodeType Run(IDataLoadJob job, GracefulCancellationToken cancellationToken) => + ExitCodeType.OperationNotRequired; - public override void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postDataLoadEventListener) - { - } + public override void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postDataLoadEventListener) + { } - -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/TableInfoJoiningQueryBuilderTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/TableInfoJoiningQueryBuilderTests.cs index 9475ea7857..04c0a2d295 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/TableInfoJoiningQueryBuilderTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/TableInfoJoiningQueryBuilderTests.cs @@ -5,98 +5,99 @@ // You should have received a copy of the GNU General Public License along with RDMP. If not, see . using System; -using MapsDirectlyToDatabaseTable; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.Spontaneous; +using Rdmp.Core.MapsDirectlyToDatabaseTable; using Rdmp.Core.QueryBuilding; using Rdmp.Core.Repositories; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class TableInfoJoiningQueryBuilderTests : DatabaseTests { - public class TableInfoJoiningQueryBuilderTests:DatabaseTests + [Test] + public void OpportunisticJoinRequired() { - [Test] - public void OpportunisticJoinRequired() + var memory = new MemoryRepository(); + + //tables and columns + var head = new TableInfo(CatalogueRepository, "Head"); + var col1 = new ColumnInfo(CatalogueRepository, "TestResultSetNumber", "int", head); + var col2 = new ColumnInfo(CatalogueRepository, "PK", "int", head); + + var result = new TableInfo(CatalogueRepository, "[biochemistry]..[Result]"); + var col3 = new ColumnInfo(CatalogueRepository, "FK", "int", result); + var col4 = new ColumnInfo(CatalogueRepository, "Code", "varchar(10)", result); + var col5 = new ColumnInfo(CatalogueRepository, "[biochemistry]..[Result].[OmgBob]", "varchar(10)", result); + + //we can join on col2 = col3 + new JoinInfo(CatalogueRepository, col3, col2, ExtractionJoinType.Right, ""); + + //CASE 1 : Only 1 column used so no join needed + var queryBuilder = new QueryBuilder(null, null); + var icol1 = new ColumnInfoToIColumn(memory, col1) { - var memory = new MemoryRepository(); - - //tables and columns - TableInfo head = new TableInfo(CatalogueRepository,"Head"); - ColumnInfo col1 = new ColumnInfo(CatalogueRepository,"TestResultSetNumber","int",head); - ColumnInfo col2 = new ColumnInfo(CatalogueRepository, "PK", "int", head); - - TableInfo result = new TableInfo(CatalogueRepository, "[biochemistry]..[Result]"); - ColumnInfo col3 = new ColumnInfo(CatalogueRepository, "FK", "int", result); - ColumnInfo col4 = new ColumnInfo(CatalogueRepository, "Code", "varchar(10)", result); - ColumnInfo col5 = new ColumnInfo(CatalogueRepository, "[biochemistry]..[Result].[OmgBob]", "varchar(10)", result); - - //we can join on col2 = col3 - new JoinInfo(CatalogueRepository,col3, col2, ExtractionJoinType.Right, ""); - - //CASE 1 : Only 1 column used so no join needed - var queryBuilder = new QueryBuilder(null, null); - var icol1 = new ColumnInfoToIColumn(memory,col1); - icol1.Order = 1; - queryBuilder.AddColumn(icol1); - - var tablesUsed = SqlQueryBuilderHelper.GetTablesUsedInQuery(queryBuilder, out var primary,null); - - Assert.AreEqual(1,tablesUsed.Count); - Assert.AreEqual(head,tablesUsed[0]); - - //CASE 2 : 2 columns used one from each table so join is needed - queryBuilder = new QueryBuilder(null, null); - queryBuilder.AddColumn(new ColumnInfoToIColumn(memory,col1)); - - var icol4 = new ColumnInfoToIColumn(memory,col4); - icol4.Order = 2; - queryBuilder.AddColumn(icol4); - - tablesUsed = SqlQueryBuilderHelper.GetTablesUsedInQuery(queryBuilder, out primary, null); - - Assert.AreEqual(2, tablesUsed.Count); - Assert.AreEqual(head, tablesUsed[0]); - Assert.AreEqual(result, tablesUsed[1]); - - Assert.AreEqual(CollapseWhitespace(@"SELECT + Order = 1 + }; + queryBuilder.AddColumn(icol1); + + var tablesUsed = SqlQueryBuilderHelper.GetTablesUsedInQuery(queryBuilder, out _, null); + + Assert.AreEqual(1, tablesUsed.Count); + Assert.AreEqual(head, tablesUsed[0]); + + //CASE 2 : 2 columns used one from each table so join is needed + queryBuilder = new QueryBuilder(null, null); + queryBuilder.AddColumn(new ColumnInfoToIColumn(memory, col1)); + + var icol4 = new ColumnInfoToIColumn(memory, col4) + { + Order = 2 + }; + queryBuilder.AddColumn(icol4); + + tablesUsed = SqlQueryBuilderHelper.GetTablesUsedInQuery(queryBuilder, out _, null); + + Assert.AreEqual(2, tablesUsed.Count); + Assert.AreEqual(head, tablesUsed[0]); + Assert.AreEqual(result, tablesUsed[1]); + + Assert.AreEqual(CollapseWhitespace(@"SELECT TestResultSetNumber, Code FROM -[biochemistry]..[Result] Right JOIN Head ON FK = PK"),CollapseWhitespace(queryBuilder.SQL)); - - var memoryRepository = new MemoryCatalogueRepository(); - - var spontContainer = new SpontaneouslyInventedFilterContainer(memoryRepository,null, null, FilterContainerOperation.AND); +[biochemistry]..[Result] Right JOIN Head ON FK = PK"), CollapseWhitespace(queryBuilder.SQL)); - var spontFilter = new SpontaneouslyInventedFilter(memoryRepository,spontContainer, "[biochemistry]..[Result].[OmgBob] = 'T'", - "My Filter", "Causes spontaneous requirement for joining compeltely", null); - spontContainer.AddChild(spontFilter); + var memoryRepository = new MemoryCatalogueRepository(); + var spontContainer = + new SpontaneouslyInventedFilterContainer(memoryRepository, null, null, FilterContainerOperation.AND); - //CASE 3 : Only 1 column from Head but filter contains a reference to Result column - queryBuilder = new QueryBuilder(null, null); - queryBuilder.AddColumn(new ColumnInfoToIColumn(memory,col1)); + var spontFilter = new SpontaneouslyInventedFilter(memoryRepository, spontContainer, + "[biochemistry]..[Result].[OmgBob] = 'T'", + "My Filter", "Causes spontaneous requirement for joining compeltely", null); + spontContainer.AddChild(spontFilter); - //without the filter - tablesUsed = SqlQueryBuilderHelper.GetTablesUsedInQuery(queryBuilder, out primary, null); - Assert.AreEqual(1, tablesUsed.Count); - - //set the filter - queryBuilder.RootFilterContainer = spontContainer; - //this is super sneaky but makes the queryBuilder populate its Filters property... basically your not supposed to use SqlQueryBuilderHelper for this kind of thing - Console.WriteLine(queryBuilder.SQL); - queryBuilder.ParameterManager.ClearNonGlobals(); + //CASE 3 : Only 1 column from Head but filter contains a reference to Result column + queryBuilder = new QueryBuilder(null, null); + queryBuilder.AddColumn(new ColumnInfoToIColumn(memory, col1)); - //with the filter - tablesUsed = SqlQueryBuilderHelper.GetTablesUsedInQuery(queryBuilder, out primary,null); - Assert.AreEqual(2, tablesUsed.Count); + //without the filter + tablesUsed = SqlQueryBuilderHelper.GetTablesUsedInQuery(queryBuilder, out _, null); + Assert.AreEqual(1, tablesUsed.Count); - + //set the filter + queryBuilder.RootFilterContainer = spontContainer; - } + //this is super sneaky but makes the queryBuilder populate its Filters property... basically your not supposed to use SqlQueryBuilderHelper for this kind of thing + Console.WriteLine(queryBuilder.SQL); + queryBuilder.ParameterManager.ClearNonGlobals(); + //with the filter + tablesUsed = SqlQueryBuilderHelper.GetTablesUsedInQuery(queryBuilder, out _, null); + Assert.AreEqual(2, tablesUsed.Count); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/TableVarcharMaxerTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/TableVarcharMaxerTests.cs index db528e6d65..f19750fb3f 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/TableVarcharMaxerTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/TableVarcharMaxerTests.cs @@ -9,111 +9,118 @@ using System.Text.RegularExpressions; using FAnsi; using FAnsi.Discovery; -using Moq; +using NSubstitute; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; using Rdmp.Core.DataLoad.Engine.DatabaseManagement.EntityNaming; using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.DataLoad.Modules.Mutilators; -using ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Checks; using Tests.Common; using TypeGuesser; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class TableVarcharMaxerTests : DatabaseTests { - public class TableVarcharMaxerTests : DatabaseTests + [TestCase(DatabaseType.MySql, true)] + [TestCase(DatabaseType.MySql, false)] + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + public void TestTableVarcharMaxer(DatabaseType dbType, bool allDataTypes) { - [TestCase(DatabaseType.MySql,true)] - [TestCase(DatabaseType.MySql, false)] - [TestCase(DatabaseType.MicrosoftSQLServer,true)] - [TestCase(DatabaseType.MicrosoftSQLServer,false)] - public void TestTableVarcharMaxer(DatabaseType dbType,bool allDataTypes) + var db = GetCleanedServer(dbType); + + var tbl = db.CreateTable("Fish", new[] + { + new DatabaseColumnRequest("Dave", new DatabaseTypeRequest(typeof(string), 100)), + new DatabaseColumnRequest("Frank", new DatabaseTypeRequest(typeof(int))) + }); + + Import(tbl, out var ti, out var cols); + + var maxer = new TableVarcharMaxer + { + AllDataTypes = allDataTypes, + TableRegexPattern = new Regex(".*"), + DestinationType = db.Server.GetQuerySyntaxHelper().TypeTranslater + .GetSQLDBTypeForCSharpType(new DatabaseTypeRequest(typeof(string), int.MaxValue)) + }; + + maxer.Initialize(db, LoadStage.AdjustRaw); + maxer.Check(ThrowImmediatelyCheckNotifier.QuietPicky); + + var job = Substitute.For(); + job.RegularTablesToLoad.Returns(new List { ti }); + job.Configuration.Returns(new HICDatabaseConfiguration(db.Server, null, null, null)); + + maxer.Mutilate(job); + + switch (dbType) { - var db = GetCleanedServer(dbType); - - var tbl = db.CreateTable("Fish",new[] - { - new DatabaseColumnRequest("Dave",new DatabaseTypeRequest(typeof(string),100)), - new DatabaseColumnRequest("Frank",new DatabaseTypeRequest(typeof(int))) - }); - - Import(tbl, out var ti, out var cols); - - var maxer = new TableVarcharMaxer(); - maxer.AllDataTypes = allDataTypes; - maxer.TableRegexPattern = new Regex(".*"); - maxer.DestinationType = db.Server.GetQuerySyntaxHelper().TypeTranslater.GetSQLDBTypeForCSharpType(new DatabaseTypeRequest(typeof(string),int.MaxValue)); - - maxer.Initialize(db,LoadStage.AdjustRaw); - maxer.Check(new ThrowImmediatelyCheckNotifier(){ThrowOnWarning = true}); - - var job = Mock.Of(x => - x.RegularTablesToLoad==new List(){ti} && - x.Configuration==new HICDatabaseConfiguration(db.Server,null,null,null)); - - maxer.Mutilate(job); - - switch (dbType) - { - case DatabaseType.MicrosoftSQLServer: - Assert.AreEqual("varchar(max)",tbl.DiscoverColumn("Dave").DataType.SQLType); - Assert.AreEqual(allDataTypes ? "varchar(max)" : "int", tbl.DiscoverColumn("Frank").DataType.SQLType); - break; - case DatabaseType.MySql: - Assert.AreEqual("text",tbl.DiscoverColumn("Dave").DataType.SQLType); - Assert.AreEqual(allDataTypes ? "text" : "int", tbl.DiscoverColumn("Frank").DataType.SQLType); - break; - case DatabaseType.Oracle: - Assert.AreEqual("varchar(max)",tbl.DiscoverColumn("Dave").DataType.SQLType); + case DatabaseType.MicrosoftSQLServer: + Assert.AreEqual("varchar(max)", tbl.DiscoverColumn("Dave").DataType.SQLType); + Assert.AreEqual(allDataTypes ? "varchar(max)" : "int", tbl.DiscoverColumn("Frank").DataType.SQLType); + break; + case DatabaseType.MySql: + Assert.AreEqual("longtext", tbl.DiscoverColumn("Dave").DataType.SQLType); + Assert.AreEqual(allDataTypes ? "longtext" : "int", tbl.DiscoverColumn("Frank").DataType.SQLType); + break; + case DatabaseType.Oracle: + Assert.AreEqual("varchar(max)", tbl.DiscoverColumn("Dave").DataType.SQLType); Assert.AreEqual(allDataTypes ? "varchar(max)" : "int", tbl.DiscoverColumn("Frank").DataType.SQLType); - break; - default: - throw new ArgumentOutOfRangeException("dbType"); - } + break; + default: + throw new ArgumentOutOfRangeException(nameof(dbType)); } + } + + [TestCase(DatabaseType.MySql)] + [TestCase(DatabaseType.MicrosoftSQLServer)] + public void VarcharMaxer_BadTableNames(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); + + var tbl = db.CreateTable("Fi ; '`sh", new[] + { + new DatabaseColumnRequest("Da' ,,;ve", new DatabaseTypeRequest(typeof(string), 100)), + new DatabaseColumnRequest("Frrrrr ##' ank", new DatabaseTypeRequest(typeof(int))) + }); + + Import(tbl, out var ti, out var cols); + + var maxer = new TableVarcharMaxer + { + TableRegexPattern = new Regex(".*"), + DestinationType = db.Server.GetQuerySyntaxHelper().TypeTranslater + .GetSQLDBTypeForCSharpType(new DatabaseTypeRequest(typeof(string), int.MaxValue)) + }; + + maxer.Initialize(db, LoadStage.AdjustRaw); + maxer.Check(ThrowImmediatelyCheckNotifier.QuietPicky); + + var job = new ThrowImmediatelyDataLoadJob + { + RegularTablesToLoad = new List { ti }, + Configuration = new HICDatabaseConfiguration(db.Server, null, null, null) + }; + + maxer.Mutilate(job); - [TestCase(DatabaseType.MySql)] - [TestCase(DatabaseType.MicrosoftSQLServer)] - public void VarcharMaxer_BadTableNames(DatabaseType dbType) + switch (dbType) { - var db = GetCleanedServer(dbType); - - var tbl = db.CreateTable("Fi ; '`sh",new[] - { - new DatabaseColumnRequest("Da' ,,;ve",new DatabaseTypeRequest(typeof(string),100)), - new DatabaseColumnRequest("Frrrrr ##' ank",new DatabaseTypeRequest(typeof(int))) - }); - - Import(tbl, out var ti, out var cols); - - var maxer = new TableVarcharMaxer(); - maxer.TableRegexPattern = new Regex(".*"); - maxer.DestinationType = db.Server.GetQuerySyntaxHelper().TypeTranslater.GetSQLDBTypeForCSharpType(new DatabaseTypeRequest(typeof(string),int.MaxValue)); - - maxer.Initialize(db,LoadStage.AdjustRaw); - maxer.Check(new ThrowImmediatelyCheckNotifier(){ThrowOnWarning = true}); - - var job = new ThrowImmediatelyDataLoadJob(); - job.RegularTablesToLoad = new List(){ti}; - job.Configuration = new HICDatabaseConfiguration(db.Server,null,null,null); - - maxer.Mutilate(job); - - switch (dbType) - { - case DatabaseType.MicrosoftSQLServer: - Assert.AreEqual("varchar(max)",tbl.DiscoverColumn("Da' ,,;ve").DataType.SQLType); - break; - case DatabaseType.MySql: - Assert.AreEqual("text",tbl.DiscoverColumn("Da' ,,;ve").DataType.SQLType); - break; - case DatabaseType.Oracle: - Assert.AreEqual("varchar(max)",tbl.DiscoverColumn("Da' ,,;ve").DataType.SQLType); - break; - default: - throw new ArgumentOutOfRangeException("dbType"); - } + case DatabaseType.MicrosoftSQLServer: + Assert.AreEqual("varchar(max)", tbl.DiscoverColumn("Da' ,,;ve").DataType.SQLType); + break; + case DatabaseType.MySql: + Assert.AreEqual("longtext", tbl.DiscoverColumn("Da' ,,;ve").DataType.SQLType); + break; + case DatabaseType.Oracle: + Assert.AreEqual("varchar(max)", tbl.DiscoverColumn("Da' ,,;ve").DataType.SQLType); + break; + default: + throw new ArgumentOutOfRangeException(nameof(dbType)); } } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/TestTemporalTables.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/TestTemporalTables.cs index 3479b6bdc0..b67dacb672 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/TestTemporalTables.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/TestTemporalTables.cs @@ -7,7 +7,6 @@ using System.Data; using System.IO; using System.Linq; -using System.Text.RegularExpressions; using NUnit.Framework; using Rdmp.Core.Curation.Data; using Rdmp.Core.Curation.Data.DataLoad; @@ -20,15 +19,14 @@ using Rdmp.Core.DataLoad.Engine.LoadExecution; using Rdmp.Core.DataLoad.Engine.LoadProcess; using Rdmp.Core.Logging; -using ReusableLibraryCode.Checks; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Checks; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration -{ - class TestTemporalTables : DataLoadEngineTestsBase - { +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; - string sql = @"CREATE TABLE dbo.Employee +internal class TestTemporalTables : DataLoadEngineTestsBase +{ + private string sql = @"CREATE TABLE dbo.Employee ( [EmployeeID] int NOT NULL PRIMARY KEY CLUSTERED , [Name] nvarchar(100) NOT NULL @@ -45,103 +43,106 @@ [EmployeeID] int NOT NULL PRIMARY KEY CLUSTERED INSERT INTO Employee(EmployeeID,Name,Position,Department,Address,AnnualSalary) VALUES(1,'Frank','Security Guard','Arkham', '22 Innsmouth Way', 55000.5) "; - [TestCase(true)] - [TestCase(false)] - public void TestTemporalTable(bool ignoreWithGlobalPattern) + [TestCase(true)] + [TestCase(false)] + public void TestTemporalTable(bool ignoreWithGlobalPattern) + { + var dbtype = FAnsi.DatabaseType.MicrosoftSQLServer; + var db = GetCleanedServer(dbtype); + + using (var con = db.Server.GetConnection()) { - var dbtype = FAnsi.DatabaseType.MicrosoftSQLServer; - var db = GetCleanedServer(dbtype); + con.Open(); + db.Server.GetCommand(sql, con).ExecuteNonQuery(); + } - using(var con = db.Server.GetConnection()) - { - con.Open(); - db.Server.GetCommand(sql,con).ExecuteNonQuery(); - } - - var tbl = db.ExpectTable("Employee"); - - var defaults = CatalogueRepository; - var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); - var logManager = new LogManager(logServer); - - var raw = db.Server.ExpectDatabase(db.GetRuntimeName() + "_RAW"); - if(raw.Exists()) - raw.Drop(); - - //define a new load configuration - var lmd = new LoadMetadata(CatalogueRepository, "MyLoad"); - lmd.IgnoreTrigger = true; - lmd.SaveToDatabase(); - - ITableInfo ti = Import(tbl, lmd,logManager); - - var projectDirectory = SetupLoadDirectory(lmd); - - CreateCSVProcessTask(lmd,ti,"*.csv"); - - //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) - File.WriteAllText( - Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), -@"EmployeeID,Name,Position,Department,Address,AnnualSalary + var tbl = db.ExpectTable("Employee"); + + var defaults = CatalogueRepository; + var logServer = defaults.GetDefaultFor(PermissableDefaults.LiveLoggingServer_ID); + var logManager = new LogManager(logServer); + + var raw = db.Server.ExpectDatabase($"{db.GetRuntimeName()}_RAW"); + if (raw.Exists()) + raw.Drop(); + + //define a new load configuration + var lmd = new LoadMetadata(CatalogueRepository, "MyLoad") + { + IgnoreTrigger = true + }; + lmd.SaveToDatabase(); + + var ti = Import(tbl, lmd, logManager); + + var projectDirectory = SetupLoadDirectory(lmd); + + CreateCSVProcessTask(lmd, ti, "*.csv"); + + //create a text file to load where we update Frank's favourite colour (it's a pk field) and we insert a new record (MrMurder) + File.WriteAllText( + Path.Combine(projectDirectory.ForLoading.FullName, "LoadMe.csv"), + @"EmployeeID,Name,Position,Department,Address,AnnualSalary 1,Frank,Boss,Department of F'Tang, 22 Innsmouth Way, 55000.5 2,Herbert,Super Boss,Department of F'Tang, 22 Innsmouth Way, 155000.5"); - - //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time - //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations - var checker = new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags(),CatalogueRepository.MEF); - checker.Check(new AcceptAllCheckNotifier()); + //the checks will probably need to be run as ddl admin because it involves creating _Archive table and trigger the first time - if(ignoreWithGlobalPattern) - { - var regex = new StandardRegex(RepositoryLocator.CatalogueRepository) - { - ConceptName = StandardRegex.DataLoadEngineGlobalIgnorePattern, - Regex = "^Valid((From)|(To))$" - }; - - regex.SaveToDatabase(); - } - else + //clean SetUp RAW / STAGING etc and generally accept proposed cleanup operations + var checker = + new CheckEntireDataLoadProcess(lmd, new HICDatabaseConfiguration(lmd), new HICLoadConfigurationFlags()); + checker.Check(new AcceptAllCheckNotifier()); + + if (ignoreWithGlobalPattern) + { + var regex = new StandardRegex(RepositoryLocator.CatalogueRepository) { - var col = ti.ColumnInfos.Single(c=>c.GetRuntimeName().Equals("ValidFrom")); - col.IgnoreInLoads = true; - col.SaveToDatabase(); - - col = ti.ColumnInfos.Single(c=>c.GetRuntimeName().Equals("ValidTo")); - col.IgnoreInLoads = true; - col.SaveToDatabase(); - } - - var dbConfig = new HICDatabaseConfiguration(lmd,null); - - var loadFactory = new HICDataLoadFactory( - lmd, - dbConfig, - new HICLoadConfigurationFlags(), - CatalogueRepository, - logManager - ); - - var exe = loadFactory.Create(new ThrowImmediatelyDataLoadEventListener()); - - var exitCode = exe.Run( - new DataLoadJob(RepositoryLocator,"Go go go!", logManager, lmd, projectDirectory,new ThrowImmediatelyDataLoadEventListener(),dbConfig), - new GracefulCancellationToken()); - - Assert.AreEqual(ExitCodeType.Success,exitCode); - - //frank should be updated to his new departement and role - Assert.AreEqual(2,tbl.GetRowCount()); - var result = tbl.GetDataTable(); - var frank = result.Rows.Cast().Single(r => (string) r["Name"] == "Frank"); - Assert.AreEqual("Department of F'Tang",frank["Department"]); - Assert.AreEqual("Boss",frank["Position"]); - - //post test cleanup - foreach (var regex in RepositoryLocator.CatalogueRepository.GetAllObjects()) - regex.DeleteInDatabase(); + ConceptName = StandardRegex.DataLoadEngineGlobalIgnorePattern, + Regex = "^Valid((From)|(To))$" + }; + + regex.SaveToDatabase(); + } + else + { + var col = ti.ColumnInfos.Single(c => c.GetRuntimeName().Equals("ValidFrom")); + col.IgnoreInLoads = true; + col.SaveToDatabase(); + + col = ti.ColumnInfos.Single(c => c.GetRuntimeName().Equals("ValidTo")); + col.IgnoreInLoads = true; + col.SaveToDatabase(); } + + var dbConfig = new HICDatabaseConfiguration(lmd, null); + + var loadFactory = new HICDataLoadFactory( + lmd, + dbConfig, + new HICLoadConfigurationFlags(), + CatalogueRepository, + logManager + ); + + var exe = loadFactory.Create(ThrowImmediatelyDataLoadEventListener.Quiet); + + var exitCode = exe.Run( + new DataLoadJob(RepositoryLocator, "Go go go!", logManager, lmd, projectDirectory, + ThrowImmediatelyDataLoadEventListener.Quiet, dbConfig), + new GracefulCancellationToken()); + + Assert.AreEqual(ExitCodeType.Success, exitCode); + + //frank should be updated to his new departement and role + Assert.AreEqual(2, tbl.GetRowCount()); + var result = tbl.GetDataTable(); + var frank = result.Rows.Cast().Single(r => (string)r["Name"] == "Frank"); + Assert.AreEqual("Department of F'Tang", frank["Department"]); + Assert.AreEqual("Boss", frank["Position"]); + + //post test cleanup + foreach (var regex in RepositoryLocator.CatalogueRepository.GetAllObjects()) + regex.DeleteInDatabase(); } -} +} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ToMemoryDataLoadJob.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ToMemoryDataLoadJob.cs index 464366a422..b47269a44c 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/ToMemoryDataLoadJob.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/ToMemoryDataLoadJob.cs @@ -16,63 +16,66 @@ using Rdmp.Core.DataLoad.Engine.Job; using Rdmp.Core.Logging; using Rdmp.Core.Repositories; -using ReusableLibraryCode.Progress; +using Rdmp.Core.ReusableLibraryCode.Progress; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class ToMemoryDataLoadJob : ToMemoryDataLoadEventListener, IDataLoadJob { - public class ToMemoryDataLoadJob : ToMemoryDataLoadEventListener, IDataLoadJob + private List _crashAtEnd = new(); + + /// + public IReadOnlyCollection CrashAtEndMessages => _crashAtEnd.AsReadOnly(); + + public ToMemoryDataLoadJob(bool throwOnErrorEvents = true) : base(throwOnErrorEvents) { - private List _crashAtEnd = new (); - /// - public IReadOnlyCollection CrashAtEndMessages => _crashAtEnd.AsReadOnly(); + } - public ToMemoryDataLoadJob(bool throwOnErrorEvents = true): base(throwOnErrorEvents) - { - } + public string Description { get; private set; } + public IDataLoadInfo DataLoadInfo { get; private set; } + public ILoadDirectory LoadDirectory { get; set; } + public int JobID { get; set; } + public ILoadMetadata LoadMetadata { get; private set; } + public bool DisposeImmediately { get; private set; } + public string ArchiveFilepath { get; private set; } + public List RegularTablesToLoad { get; private set; } = new(); + public List LookupTablesToLoad { get; private set; } = new(); + public IRDMPPlatformRepositoryServiceLocator RepositoryLocator => null; - public string Description { get; private set; } - public IDataLoadInfo DataLoadInfo { get; private set; } - public ILoadDirectory LoadDirectory { get; set; } - public int JobID { get; set; } - public ILoadMetadata LoadMetadata { get; private set; } - public bool DisposeImmediately { get; private set; } - public string ArchiveFilepath { get; private set; } - public List RegularTablesToLoad { get; private set; } = new List(); - public List LookupTablesToLoad { get; private set; } = new List(); - public IRDMPPlatformRepositoryServiceLocator RepositoryLocator { get { return null; }} + public void StartLogging() + { + } + + public void CloseLogging() + { + } - public void StartLogging() - { - } + public HICDatabaseConfiguration Configuration { get; private set; } - public void CloseLogging() - { - } + public object Payload { get; set; } + public bool PersistentRaw { get; set; } - public HICDatabaseConfiguration Configuration { get; private set; } + public void CreateTablesInStage(DatabaseCloner cloner, LoadBubble stage) + { + } - public object Payload { get; set; } - public bool PersistentRaw { get; set; } + public void PushForDisposal(IDisposeAfterDataLoad disposeable) + { + } - public void CreateTablesInStage(DatabaseCloner cloner, LoadBubble stage) - { - } + public void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postLoadEventsListener) + { + } - public void PushForDisposal(IDisposeAfterDataLoad disposeable) - { - } + public ColumnInfo[] GetAllColumns() + { + return RegularTablesToLoad.SelectMany(t => t.ColumnInfos) + .Union(LookupTablesToLoad.SelectMany(t => t.ColumnInfos)).Distinct().ToArray(); + } - public void LoadCompletedSoDispose(ExitCodeType exitCode, IDataLoadEventListener postLoadEventsListener) - { - } - public ColumnInfo[] GetAllColumns() - { - return RegularTablesToLoad.SelectMany(t=>t.ColumnInfos).Union(LookupTablesToLoad.SelectMany(t=>t.ColumnInfos)).Distinct().ToArray(); - } - /// - public void CrashAtEnd(NotifyEventArgs because) - { - _crashAtEnd.Add(because); - } + /// + public void CrashAtEnd(NotifyEventArgs because) + { + _crashAtEnd.Add(because); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/WebFileDownloaderTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/WebFileDownloaderTests.cs deleted file mode 100644 index 3d6063c504..0000000000 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/WebFileDownloaderTests.cs +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) The University of Dundee 2018-2019 -// This file is part of the Research Data Management Platform (RDMP). -// RDMP is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. -// RDMP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. -// You should have received a copy of the GNU General Public License along with RDMP. If not, see . - -using System; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Threading; -using NUnit.Framework; - -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; - -[Category("Unit")] -public class WebFileDownloaderTests -{ - [Test, Ignore("Let's not put usernames and password in here eh.")] - public void ProxyTest() - { - var url = "http://www.bbc.co.uk/news"; - - Stream response; - try - { - response = CreateRequest(url); - } - catch (Exception e) - { - // First retry with proxy credentials - Console.WriteLine($"First HTTP request failed with '{e.Message}', retrying with credentials"); - var credentials = new NetworkCredential("proxyUsername", "proxyPassword"); - response = CreateRequest(url, credentials); - } - - var bytesRead=0; - while (response.ReadByte() != -1) - bytesRead++; - - response.Close(); - Assert.Greater(bytesRead, 0); - } - - - private static readonly HttpClientHandler HttpClientHandler = new (); - private static readonly HttpClient HttpClient=new(HttpClientHandler,true); - - private static Stream CreateRequest(string url, ICredentials credentials=null) - { - lock (HttpClient) - { - if (credentials is not null) - { - HttpClientHandler.Credentials = credentials; - HttpClientHandler.PreAuthenticate = true; - } - using var cts=new CancellationTokenSource(5000); - return HttpClient.GetStreamAsync(url,cts.Token).Result; - } - } -} \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Integration/WebServiceConfigurationTests.cs b/Rdmp.Core.Tests/DataLoad/Engine/Integration/WebServiceConfigurationTests.cs index 03f5ad3a30..cb817aa26d 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Integration/WebServiceConfigurationTests.cs +++ b/Rdmp.Core.Tests/DataLoad/Engine/Integration/WebServiceConfigurationTests.cs @@ -8,16 +8,15 @@ using Rdmp.Core.DataLoad.Modules.DataProvider; using Tests.Common; -namespace Rdmp.Core.Tests.DataLoad.Engine.Integration +namespace Rdmp.Core.Tests.DataLoad.Engine.Integration; + +public class WebServiceConfigurationTests : DatabaseTests { - public class WebServiceConfigurationTests : DatabaseTests + [Test] + public void TestXmlSerialization() { - [Test] - public void TestXmlSerialization() - { - var config = new WebServiceConfiguration(CatalogueRepository) {Username = "foo", Password = "bar"}; - var state = config.SaveStateToString(); - config.RestoreStateFrom(state); - } + var config = new WebServiceConfiguration(CatalogueRepository) { Username = "foo", Password = "bar" }; + var state = config.SaveStateToString(); + config.RestoreStateFrom(state); } } \ No newline at end of file diff --git a/Rdmp.Core.Tests/DataLoad/Engine/Resources/XmlTestForExcel.xml b/Rdmp.Core.Tests/DataLoad/Engine/Resources/XmlTestForExcel.xml index ed0027ebbb..b9e416f6c7 100644 --- a/Rdmp.Core.Tests/DataLoad/Engine/Resources/XmlTestForExcel.xml +++ b/Rdmp.Core.Tests/DataLoad/Engine/Resources/XmlTestForExcel.xml @@ -1,145 +1,210 @@  + - - Marie Pitkethly - Thomas Nind - 2014-10-15T13:59:26Z - 2014-10-15T13:56:39Z - 14.00 - - - - - - 10005 - 10005 - 120 - 135 - False - False - - - - - - - - - - -
- - - Node - HealthBoard Area - Organisation Name - PracticeCode - Address1 - Address2 - Address3 - Address4 - PostCode - Practice Managers - Practice Manager Emails - - - East - Fife - Airlie Medical Practice - 11111 - The Health Centre - Victoria Road - Leven - Fife - KY8 4ET - Captain Morgan - fishsticks@nhs.net - - - East - Fife - Anstruther Medical Practice - 22222 - Skeith Health Centre - Crail Road, Cellardyke - Anstruther - KY10 3FF - Thomas Nind - t.z.nind@nhs.net - - - - - - - - - - - - - - - - - - - - - - - - - - -
- - -
-