From f7ffbc28c5524a9b9319f9c2bd2e7bc09ccbe4bd Mon Sep 17 00:00:00 2001 From: jas88 Date: Mon, 27 Nov 2023 17:41:13 -0600 Subject: [PATCH] NUnit API fixup --- Packages.md | 1 + RDMP | 2 +- .../Integration/ExecuteCommandAddTagTests.cs | 53 +-- ...ommandCreateNewImagingDatasetSuiteTests.cs | 63 ++-- .../ExecuteCommandPacsFetchTest.cs | 2 +- .../FoDicomAnonymiserStandaloneTests.cs | 8 +- .../Integration/FoDicomAnonymiserTests.cs | 184 +++++----- .../Integration/ImagingTableCreationTests.cs | 24 +- .../LiveVsTemplateComparerTests.cs | 22 +- .../Integration/SMICacheLayoutTests.cs | 4 +- .../Integration/SemEHRApiCallerTests.cs | 13 +- Rdmp.Dicom.Tests/IsolationReviewTests.cs | 37 +- Rdmp.Dicom.Tests/PackageListIsCorrectTests.cs | 29 +- Rdmp.Dicom.Tests/PressureGaugeTests.cs | 26 +- Rdmp.Dicom.Tests/Rdmp.Dicom.Tests.csproj | 6 +- .../TestProcessBasedCacheSource.cs | 6 +- .../Unit/AmbiguousFilePathTests.cs | 74 ++-- Rdmp.Dicom.Tests/Unit/CFindDirSourceTests.cs | 28 +- Rdmp.Dicom.Tests/Unit/CFindSourceTests.cs | 4 +- Rdmp.Dicom.Tests/Unit/ConnectToOrthancTest.cs | 11 +- .../Unit/DICOMFileCollectionSourceTests.cs | 120 ++++--- Rdmp.Dicom.Tests/Unit/DataTableTestHelper.cs | 2 +- Rdmp.Dicom.Tests/Unit/DicomSourceUnitTests.cs | 147 ++++---- Rdmp.Dicom.Tests/Unit/PacsFetch.cs | 26 +- ...aryKeyCollisionIsolationMutilationTests.cs | 315 +++++++++--------- .../Unit/TestMakeUniquePipelineName.cs | 18 +- .../ExecuteCommandCreateNewImagingDataset.cs | 2 +- .../ExecuteCommandReviewIsolations.cs | 2 +- Rdmp.Dicom.UI/CreateNewImagingDatasetUI.cs | 2 +- Rdmp.Dicom.UI/IsolationTableUI.Designer.cs | 142 ++++---- Rdmp.Dicom.UI/IsolationTableUI.cs | 7 +- Rdmp.Dicom.UI/IsolationTableUI.resx | 50 +-- Rdmp.Dicom.UI/Rdmp.Dicom.UI.csproj | 4 + Rdmp.Dicom.UI/RdmpDicomUserInterface.cs | 2 +- Rdmp.Dicom.UI/SemEHRUI.cs | 2 +- Rdmp.Dicom.UI/TagColumnAdderUI.cs | 2 +- Rdmp.Dicom.UI/TagElevationXmlUI.cs | 2 +- .../Attachers/Routing/AutoRoutingAttacher.cs | 28 +- .../AutoRoutingAttacherWithPersistentRaw.cs | 2 +- .../Routing/PersistentRawTableCreator.cs | 6 +- Rdmp.Dicom/Cache/Pipeline/CFindSource.cs | 4 +- Rdmp.Dicom/Cache/Pipeline/CachingSCP.cs | 4 +- .../Pipeline/Dicom/DicomRequestSender.cs | 8 +- .../Cache/Pipeline/ProcessBasedCacheSource.cs | 20 +- Rdmp.Dicom/Cache/SMICacheDestination.cs | 4 +- Rdmp.Dicom/Cache/SMICacheLayout.cs | 6 +- Rdmp.Dicom/Cache/SMICacheTextFileGenerator.cs | 2 +- .../CommandExecution/ExecuteCommandAddTag.cs | 10 +- .../ExecuteCommandBuildExtractionView.cs | 4 +- .../ExecuteCommandCreateNewImagingDataset.cs | 6 +- ...cuteCommandCreateNewImagingDatasetSuite.cs | 26 +- .../ExecuteCommandPacsFetch.cs | 6 +- .../DataProviders/SMICachedFileRetriever.cs | 2 +- Rdmp.Dicom/ExternalApis/SemEHRApiCaller.cs | 4 +- .../ExternalApis/SemEHRConfiguration.cs | 12 +- .../FoDicomBased/AmbiguousFilePath.cs | 6 +- .../AmbiguousFilePathResolutionException.cs | 4 +- .../PutDicomFilesInExtractionDirectories.cs | 6 +- .../FoDicomBased/FoDicomAnonymiser.cs | 16 +- Rdmp.Dicom/Extraction/MappingRepository.cs | 2 +- Rdmp.Dicom/Extraction/UIDMapping.cs | 2 +- Rdmp.Dicom/LiveVsTemplateComparer.cs | 8 +- .../CFind/CFindDirSource.cs | 12 +- .../DicomDatasetCollectionSource.cs | 4 +- .../DicomSources/DicomFileCollectionSource.cs | 32 +- .../DicomSources/DicomSource.cs | 16 +- .../FlatFileToLoadDicomProcessListProvider.cs | 10 +- .../Worklists/IDicomDatasetWorklist.cs | 2 +- .../DicomSources/Worklists/IDicomWorklist.cs | 2 +- .../PipelineComponents/IsolationReview.cs | 28 +- .../PrimaryKeyCollisionIsolationMutilation.cs | 32 +- Rdmp.Dicom/Rdmp.Dicom.csproj | 4 + Rdmp.Dicom/SemEHRConsoleUI.cs | 2 +- .../TagPromotionSchema/TagColumnAdder.cs | 2 +- .../TagPromotionSchema/TagLoadedColumnPair.cs | 2 +- 75 files changed, 926 insertions(+), 862 deletions(-) diff --git a/Packages.md b/Packages.md index 6541b280..6505abd5 100644 --- a/Packages.md +++ b/Packages.md @@ -11,3 +11,4 @@ | ------- | ------------| ------- | ------- | -------------------------- | | HIC.DicomTypeTranslation | [GitHub](https://github.com/SMI/DicomTypeTranslation) | [GPL 3.0](https://www.gnu.org/licenses/gpl-3.0.html) | Translate dicom types into C# / database types | | | LibArchive.Net | [GitHub](https://github.com/jas88/libarchive.net) | [BSD] | Access archive formats without the LZMA bugs of SharpCompress | | +| [NUnit.Analyzers](https://nunit.org/) |[GitHub](https://github.com/nunit/nunit.analyzers) | [MIT](https://opensource.org/licenses/MIT) | Unit testing support code | diff --git a/RDMP b/RDMP index e94eed1c..ed138c42 160000 --- a/RDMP +++ b/RDMP @@ -1 +1 @@ -Subproject commit e94eed1c3e9daecdbdee3c1ba151b02a943ef50f +Subproject commit ed138c4242689825c28a4293135f1b38935335fd diff --git a/Rdmp.Dicom.Tests/Integration/ExecuteCommandAddTagTests.cs b/Rdmp.Dicom.Tests/Integration/ExecuteCommandAddTagTests.cs index 5c4ed572..3ca10ba0 100644 --- a/Rdmp.Dicom.Tests/Integration/ExecuteCommandAddTagTests.cs +++ b/Rdmp.Dicom.Tests/Integration/ExecuteCommandAddTagTests.cs @@ -40,18 +40,18 @@ public void TestAddTag_WithArchive(DatabaseType type) // use it to create a table var tbl = db.ExpectTable(template.TableName); - IAtomicCommand cmd = new ExecuteCommandCreateNewImagingDataset(RepositoryLocator,tbl , template); - Assert.IsFalse(cmd.IsImpossible); + IAtomicCommand cmd = new ExecuteCommandCreateNewImagingDataset(RepositoryLocator, tbl, template); + Assert.That(cmd.IsImpossible, Is.False); cmd.Execute(); - Assert.IsTrue(tbl.Exists()); + Assert.That(tbl.Exists()); // import RDMP reference to the table - var importer = new TableInfoImporter(CatalogueRepository,tbl); - importer.DoImport(out var ti,out var cols); - - var forward = new ForwardEngineerCatalogue(ti,cols); - forward.ExecuteForwardEngineering(out var catalogue,out _,out _); + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out var ti, out var cols); + + var forward = new ForwardEngineerCatalogue(ti, cols); + forward.ExecuteForwardEngineering(out var catalogue, out _, out _); // Create an archive table and backup trigger like we would have if this were the target of a data load var triggerImplementerFactory = new TriggerImplementerFactory(type); @@ -60,36 +60,39 @@ public void TestAddTag_WithArchive(DatabaseType type) var archive = tbl.Database.ExpectTable($"{tbl.GetRuntimeName()}_Archive"); - Assert.IsTrue(archive.Exists()); + Assert.That(archive.Exists()); - var activator = new ConsoleInputManager(RepositoryLocator,ThrowImmediatelyCheckNotifier.Quiet) + var activator = new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) { DisallowInput = true }; // Test the actual commands - cmd = new ExecuteCommandAddTag(activator,catalogue,"ffffff","int"); - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); + cmd = new ExecuteCommandAddTag(activator, catalogue, "ffffff", "int"); + Assert.That(cmd.IsImpossible, Is.False, cmd.ReasonCommandImpossible); cmd.Execute(); - cmd = new ExecuteCommandAddTag(activator,catalogue,"EchoTime",null); - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); + cmd = new ExecuteCommandAddTag(activator, catalogue, "EchoTime", null); + Assert.That(cmd.IsImpossible, Is.False, cmd.ReasonCommandImpossible); cmd.Execute(); // attempting to add something that is already there is not a problem and just gets skipped - Assert.DoesNotThrow(()=>new ExecuteCommandAddTag(activator,catalogue,"StudyDate",null).Execute()); - - cmd = new ExecuteCommandAddTag(activator,catalogue,"SeriesDate",null); - Assert.IsFalse(cmd.IsImpossible,cmd.ReasonCommandImpossible); - cmd.Execute(); + Assert.DoesNotThrow(() => new ExecuteCommandAddTag(activator, catalogue, "StudyDate", null).Execute()); - Assert.AreEqual("int",tbl.DiscoverColumn("ffffff").DataType.SQLType); - Assert.AreEqual("decimal(38,19)",tbl.DiscoverColumn("EchoTime").DataType.SQLType); - Assert.AreEqual(typeof(DateTime),tbl.DiscoverColumn("SeriesDate").DataType.GetCSharpDataType()); + cmd = new ExecuteCommandAddTag(activator, catalogue, "SeriesDate", null); + Assert.That(cmd.IsImpossible, Is.False, cmd.ReasonCommandImpossible); + cmd.Execute(); - Assert.AreEqual("int",archive.DiscoverColumn("ffffff").DataType.SQLType); - Assert.AreEqual("decimal(38,19)",archive.DiscoverColumn("EchoTime").DataType.SQLType); - Assert.AreEqual(typeof(DateTime),archive.DiscoverColumn("SeriesDate").DataType.GetCSharpDataType()); + Assert.Multiple(() => + { + Assert.That(tbl.DiscoverColumn("ffffff").DataType.SQLType, Is.EqualTo("int")); + Assert.That(tbl.DiscoverColumn("EchoTime").DataType.SQLType, Is.EqualTo("decimal(38,19)")); + Assert.That(tbl.DiscoverColumn("SeriesDate").DataType.GetCSharpDataType(), Is.EqualTo(typeof(DateTime))); + + Assert.That(archive.DiscoverColumn("ffffff").DataType.SQLType, Is.EqualTo("int")); + Assert.That(archive.DiscoverColumn("EchoTime").DataType.SQLType, Is.EqualTo("decimal(38,19)")); + Assert.That(archive.DiscoverColumn("SeriesDate").DataType.GetCSharpDataType(), Is.EqualTo(typeof(DateTime))); + }); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Integration/ExecuteCommandCreateNewImagingDatasetSuiteTests.cs b/Rdmp.Dicom.Tests/Integration/ExecuteCommandCreateNewImagingDatasetSuiteTests.cs index 78bdb7b4..03db12c4 100644 --- a/Rdmp.Dicom.Tests/Integration/ExecuteCommandCreateNewImagingDatasetSuiteTests.cs +++ b/Rdmp.Dicom.Tests/Integration/ExecuteCommandCreateNewImagingDatasetSuiteTests.cs @@ -12,9 +12,9 @@ namespace Rdmp.Dicom.Tests.Integration; class ExecuteCommandCreateNewImagingDatasetSuiteTests : DatabaseTests { - #region Template + #region Template - const string TemplateYaml = @" + const string TemplateYaml = @" #Last Modified: 2020-04-07 Tables: - TableName: StudyTable @@ -140,38 +140,41 @@ class ExecuteCommandCreateNewImagingDatasetSuiteTests : DatabaseTests - ColumnName: ScanOptions AllowNulls: true "; - #endregion + #endregion - [TestCase(DatabaseType.MicrosoftSQLServer)] - [TestCase(DatabaseType.MySql)] - public void TestSuiteCreation(DatabaseType dbType) - { - var db = GetCleanedServer(dbType); + [TestCase(DatabaseType.MicrosoftSQLServer)] + [TestCase(DatabaseType.MySql)] + public void TestSuiteCreation(DatabaseType dbType) + { + var db = GetCleanedServer(dbType); - var template = Path.Combine(TestContext.CurrentContext.WorkDirectory,"CT.it"); - File.WriteAllText(template,TemplateYaml); + var template = Path.Combine(TestContext.CurrentContext.WorkDirectory, "CT.it"); + File.WriteAllText(template, TemplateYaml); - var cmd = new ExecuteCommandCreateNewImagingDatasetSuite(RepositoryLocator,db, - new(TestContext.CurrentContext.WorkDirectory), - typeof(DicomFileCollectionSource), - "CT_", - new(template), - persistentRaw: false, - createLoad: true); + var cmd = new ExecuteCommandCreateNewImagingDatasetSuite(RepositoryLocator, db, + new(TestContext.CurrentContext.WorkDirectory), + typeof(DicomFileCollectionSource), + "CT_", + new(template), + persistentRaw: false, + createLoad: true); - Assert.IsFalse(cmd.IsImpossible); + Assert.That(cmd.IsImpossible, Is.False); - cmd.Execute(); + cmd.Execute(); - Assert.IsNotNull(cmd.NewLoadMetadata); - - var pipelineCreated = RepositoryLocator.CatalogueRepository.GetAllObjects().OrderByDescending(p=>p.ID).First(); - - Assert.AreEqual(typeof(DicomFileCollectionSource),pipelineCreated.Source.GetClassAsSystemType()); - - var argFieldMap = pipelineCreated.Source.GetAllArguments().Single(a=>a.Name.Equals(nameof(DicomSource.UseAllTableInfoInLoadAsFieldMap))); - - Assert.IsNotNull(argFieldMap); - Assert.AreEqual(argFieldMap.GetValueAsSystemType(),cmd.NewLoadMetadata); - } + Assert.That(cmd.NewLoadMetadata, Is.Not.Null); + + var pipelineCreated = RepositoryLocator.CatalogueRepository.GetAllObjects().OrderByDescending(p => p.ID).First(); + + Assert.That(pipelineCreated.Source.GetClassAsSystemType(), Is.EqualTo(typeof(DicomFileCollectionSource))); + + var argFieldMap = pipelineCreated.Source.GetAllArguments().Single(a => a.Name.Equals(nameof(DicomSource.UseAllTableInfoInLoadAsFieldMap))); + + Assert.Multiple(() => + { + Assert.That(argFieldMap, Is.Not.Null); + Assert.That(cmd.NewLoadMetadata, Is.EqualTo(argFieldMap.GetValueAsSystemType())); + }); + } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Integration/ExecuteCommandPacsFetchTest.cs b/Rdmp.Dicom.Tests/Integration/ExecuteCommandPacsFetchTest.cs index 38eaa34d..c6bd84b2 100644 --- a/Rdmp.Dicom.Tests/Integration/ExecuteCommandPacsFetchTest.cs +++ b/Rdmp.Dicom.Tests/Integration/ExecuteCommandPacsFetchTest.cs @@ -12,7 +12,7 @@ class ExecuteCommandPacsFetchTest : DatabaseTests [Test] public void TestLocal() { - var cmd = new ExecuteCommandPacsFetch(new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet){DisallowInput= true},"2013-01-01","2014-01-01","www.dicomserver.co.uk",11112,"you","localhost",11112,"me",TestContext.CurrentContext.WorkDirectory,0); + var cmd = new ExecuteCommandPacsFetch(new ConsoleInputManager(RepositoryLocator, ThrowImmediatelyCheckNotifier.Quiet) { DisallowInput = true }, "2013-01-01", "2014-01-01", "www.dicomserver.co.uk", 11112, "you", "localhost", 11112, "me", TestContext.CurrentContext.WorkDirectory, 0); cmd.Execute(); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Integration/FoDicomAnonymiserStandaloneTests.cs b/Rdmp.Dicom.Tests/Integration/FoDicomAnonymiserStandaloneTests.cs index 2867e9ae..363ca3f0 100644 --- a/Rdmp.Dicom.Tests/Integration/FoDicomAnonymiserStandaloneTests.cs +++ b/Rdmp.Dicom.Tests/Integration/FoDicomAnonymiserStandaloneTests.cs @@ -16,7 +16,7 @@ public void TestAnonymiseAFile() var inPath = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "in")); var outPath = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "out")); - + if (inPath.Exists) inPath.Delete(true); inPath.Create(); @@ -27,9 +27,9 @@ public void TestAnonymiseAFile() // put a dicom file in the in dir var testFile = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData/IM-0001-0013.dcm")); - testFile.CopyTo(Path.Combine(inPath.FullName, "blah.dcm"),true); + testFile.CopyTo(Path.Combine(inPath.FullName, "blah.dcm"), true); - anon.Initialize(1, outPath,null /*no UID mapping*/); + anon.Initialize(1, outPath, null /*no UID mapping*/); var putter = new PutInRoot(); @@ -39,5 +39,5 @@ public void TestAnonymiseAFile() "fffff", putter, null); } - + } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Integration/FoDicomAnonymiserTests.cs b/Rdmp.Dicom.Tests/Integration/FoDicomAnonymiserTests.cs index e245f514..80d00594 100644 --- a/Rdmp.Dicom.Tests/Integration/FoDicomAnonymiserTests.cs +++ b/Rdmp.Dicom.Tests/Integration/FoDicomAnonymiserTests.cs @@ -35,7 +35,7 @@ namespace Rdmp.Dicom.Tests.Integration; -public class FoDicomAnonymiserTests:DatabaseTests +public class FoDicomAnonymiserTests : DatabaseTests { [OneTimeSetUp] public void Init() @@ -49,7 +49,7 @@ public void Dispose() TidyUpImages(); } - private void TidyUpImages() + private static void TidyUpImages() { var imagesDir = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images")); if (imagesDir.Exists) @@ -59,24 +59,24 @@ private void TidyUpImages() // The following commented tests will fail due to underlying system limits on paths // there is no reliable method to get maximum path length (apparently?) // [TestCase(typeof(PutInUidStudySeriesFolders))] - [TestCase(typeof(PutInUidSeriesFolders),true)] - [TestCase(typeof(PutInUidSeriesFolders),false)] - [TestCase(typeof(PutInReleaseIdentifierSubfolders),true)] - [TestCase(typeof(PutInReleaseIdentifierSubfolders),false)] - [TestCase(typeof(PutInRoot),true)] - [TestCase(typeof(PutInRoot),true)] - public void TestAnonymisingDataset(Type putterType,bool keepDates) + [TestCase(typeof(PutInUidSeriesFolders), true)] + [TestCase(typeof(PutInUidSeriesFolders), false)] + [TestCase(typeof(PutInReleaseIdentifierSubfolders), true)] + [TestCase(typeof(PutInReleaseIdentifierSubfolders), false)] + [TestCase(typeof(PutInRoot), true)] + [TestCase(typeof(PutInRoot), true)] + public void TestAnonymisingDataset(Type putterType, bool keepDates) { var uidMapDb = GetCleanedServer(DatabaseType.MicrosoftSQLServer, "TESTUIDMapp"); MasterDatabaseScriptExecutor e = new(uidMapDb); var patcher = new SMIDatabasePatcher(); - e.CreateAndPatchDatabase(patcher,new AcceptAllCheckNotifier()); + e.CreateAndPatchDatabase(patcher, new AcceptAllCheckNotifier()); var eds = new ExternalDatabaseServer(CatalogueRepository, "eds", patcher); eds.SetProperties(uidMapDb); - - Dictionary thingThatShouldDisappear = new() + + Dictionary thingThatShouldDisappear = new() { //Things we would want to disappear {DicomTag.PatientName,"Moscow"}, @@ -87,12 +87,12 @@ public void TestAnonymisingDataset(Type putterType,bool keepDates) {DicomTag.StudyDate,"20020101"} }; - Dictionary thingsThatShouldRemain = new() + Dictionary thingsThatShouldRemain = new() { //Things we would want to remain //{DicomTag.SmokingStatus,"YES"}, }; - + var dicom = new DicomDataset { {DicomTag.SOPInstanceUID, "123.4.4"}, @@ -103,11 +103,11 @@ public void TestAnonymisingDataset(Type putterType,bool keepDates) foreach (var (key, value) in thingThatShouldDisappear) dicom.AddOrUpdate(key, value); - + foreach (var (key, value) in thingsThatShouldRemain) dicom.AddOrUpdate(key, value); - dicom.AddOrUpdate(DicomTag.StudyDate, new DateTime(2002 , 01 , 01)); + dicom.AddOrUpdate(DicomTag.StudyDate, new DateTime(2002, 01, 01)); var fi = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "madness.dcm")); @@ -129,7 +129,7 @@ public void TestAnonymisingDataset(Type putterType,bool keepDates) IExtractCommand cmd = MockExtractionCommand(); //give the mock to anonymiser - anonymiser.PreInitialize(cmd,ThrowImmediatelyDataLoadEventListener.Quiet); + anonymiser.PreInitialize(cmd, ThrowImmediatelyDataLoadEventListener.Quiet); anonymiser.PutterType = putterType; anonymiser.ArchiveRootIfAny = TestContext.CurrentContext.WorkDirectory; @@ -138,24 +138,27 @@ public void TestAnonymisingDataset(Type putterType,bool keepDates) anonymiser.RetainDates = keepDates; anonymiser.DeleteTags = "AlgorithmName"; - using var anoDt = anonymiser.ProcessPipelineData(dt,ThrowImmediatelyDataLoadEventListener.Quiet,new()); + using var anoDt = anonymiser.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new()); + + Assert.That(anoDt.Rows, Has.Count.EqualTo(1)); - Assert.AreEqual(1,anoDt.Rows.Count); - //Data table should contain new UIDs - Assert.AreNotEqual("123.4.4", anoDt.Rows[0]["SOPInstanceUID"]); - Assert.AreEqual(56, anoDt.Rows[0]["SOPInstanceUID"].ToString().Length); + Assert.That(anoDt.Rows[0]["SOPInstanceUID"], Is.Not.EqualTo("123.4.4")); + Assert.Multiple(() => + { + Assert.That(anoDt.Rows[0]["SOPInstanceUID"].ToString(), Has.Length.EqualTo(56)); - Assert.AreNotEqual("123.4.6", anoDt.Rows[0]["StudyInstanceUID"]); - Assert.AreEqual(56, anoDt.Rows[0]["StudyInstanceUID"].ToString().Length); + Assert.That(anoDt.Rows[0]["StudyInstanceUID"], Is.Not.EqualTo("123.4.6")); + }); + Assert.That(anoDt.Rows[0]["StudyInstanceUID"].ToString(), Has.Length.EqualTo(56)); FileInfo expectedFile = null; - if(putterType == typeof(PutInRoot)) - expectedFile = new(Path.Combine(TestContext.CurrentContext.WorkDirectory,"Images", + if (putterType == typeof(PutInRoot)) + expectedFile = new(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images", $"{anoDt.Rows[0]["SOPInstanceUID"]}.dcm")); if (putterType == typeof(PutInReleaseIdentifierSubfolders)) - expectedFile = new(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images","Hank", + expectedFile = new(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images", "Hank", $"{anoDt.Rows[0]["SOPInstanceUID"]}.dcm")); if (putterType == typeof(PutInUidSeriesFolders)) @@ -166,46 +169,52 @@ public void TestAnonymisingDataset(Type putterType,bool keepDates) expectedFile = new(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images", "Hank", anoDt.Rows[0]["StudyInstanceUID"].ToString(), anoDt.Rows[0]["SeriesInstanceUID"].ToString(), $"{anoDt.Rows[0]["SOPInstanceUID"]}.dcm")); - Assert.IsTrue(expectedFile?.Exists); + Assert.That(expectedFile?.Exists, Is.EqualTo(true)); var anoDicom = DicomFile.Open(expectedFile.FullName); - - Assert.AreEqual("Hank",anoDicom.Dataset.GetValue(DicomTag.PatientID,0)); - Assert.AreEqual(anoDt.Rows[0]["SOPInstanceUID"], anoDicom.Dataset.GetValue(DicomTag.SOPInstanceUID, 0)); - Assert.AreEqual(56, anoDicom.Dataset.GetValue(DicomTag.SeriesInstanceUID, 0).Length); + Assert.Multiple(() => + { + Assert.That(anoDicom.Dataset.GetValue(DicomTag.PatientID, 0), Is.EqualTo("Hank")); + + Assert.That(anoDicom.Dataset.GetValue(DicomTag.SOPInstanceUID, 0), Is.EqualTo(anoDt.Rows[0]["SOPInstanceUID"])); + Assert.That(anoDicom.Dataset.GetValue(DicomTag.SeriesInstanceUID, 0), Has.Length.EqualTo(56)); - Assert.AreEqual(anoDt.Rows[0]["StudyInstanceUID"], anoDicom.Dataset.GetValue(DicomTag.StudyInstanceUID, 0)); + Assert.That(anoDicom.Dataset.GetValue(DicomTag.StudyInstanceUID, 0), Is.EqualTo(anoDt.Rows[0]["StudyInstanceUID"])); + }); foreach (var (key, _) in thingThatShouldDisappear) { //if it chopped out the entire tag - if(!anoDicom.Dataset.Contains(key)) + if (!anoDicom.Dataset.Contains(key)) continue; - + if (anoDicom.Dataset.GetValueCount(key) == 0) continue; - + var value = anoDicom.Dataset.GetSingleValue(key); switch (value) { //allowed values - case "ANONYMOUS":continue; + case "ANONYMOUS": continue; //anonymous date - case "00010101": Assert.IsFalse(keepDates); + case "00010101": + Assert.That(keepDates, Is.False); continue; - case "20020101": Assert.IsTrue(keepDates); + case "20020101": + Assert.That(keepDates); continue; - default: Assert.Fail($"Unexpected value for {key}:{value}"); + default: + Assert.Fail($"Unexpected value for {key}:{value}"); break; } } foreach (var (key, value) in thingsThatShouldRemain) - Assert.AreEqual(value, anoDicom.Dataset.GetValue(key, 0)); + Assert.That(anoDicom.Dataset.GetValue(key, 0), Is.EqualTo(value)); } [TestCase()] @@ -284,27 +293,33 @@ public void TestSkipAnonymisationOnStructuredReports() using var anoDt = anonymiser.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new()); - Assert.AreEqual(1, anoDt.Rows.Count); + Assert.That(anoDt.Rows, Has.Count.EqualTo(1)); //Data table should contain new UIDs - Assert.AreNotEqual("123.4.4", anoDt.Rows[0]["SOPInstanceUID"]); - Assert.AreEqual(56, anoDt.Rows[0]["SOPInstanceUID"].ToString()?.Length); + Assert.That(anoDt.Rows[0]["SOPInstanceUID"], Is.Not.EqualTo("123.4.4")); + Assert.Multiple(() => + { + Assert.That(anoDt.Rows[0]["SOPInstanceUID"].ToString()?.Length, Is.EqualTo(56)); + + Assert.That(anoDt.Rows[0]["StudyInstanceUID"], Is.Not.EqualTo("123.4.6")); + }); + Assert.That(anoDt.Rows[0]["StudyInstanceUID"].ToString()?.Length, Is.EqualTo(56)); - Assert.AreNotEqual("123.4.6", anoDt.Rows[0]["StudyInstanceUID"]); - Assert.AreEqual(56, anoDt.Rows[0]["StudyInstanceUID"].ToString()?.Length); - var expectedFile = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images", $"{anoDt.Rows[0]["SOPInstanceUID"]}.dcm")); - Assert.IsTrue(expectedFile.Exists); + Assert.That(expectedFile.Exists); var anoDicom = DicomFile.Open(expectedFile.FullName); - Assert.AreEqual("Hank", anoDicom.Dataset.GetValue(DicomTag.PatientID, 0)); + Assert.Multiple(() => + { + Assert.That(anoDicom.Dataset.GetValue(DicomTag.PatientID, 0), Is.EqualTo("Hank")); - Assert.AreEqual(anoDt.Rows[0]["SOPInstanceUID"], anoDicom.Dataset.GetValue(DicomTag.SOPInstanceUID, 0)); - Assert.AreEqual(56, anoDicom.Dataset.GetValue(DicomTag.SeriesInstanceUID, 0).Length); + Assert.That(anoDicom.Dataset.GetValue(DicomTag.SOPInstanceUID, 0), Is.EqualTo(anoDt.Rows[0]["SOPInstanceUID"])); + Assert.That(anoDicom.Dataset.GetValue(DicomTag.SeriesInstanceUID, 0), Has.Length.EqualTo(56)); - Assert.AreEqual(anoDt.Rows[0]["StudyInstanceUID"], anoDicom.Dataset.GetValue(DicomTag.StudyInstanceUID, 0)); + Assert.That(anoDicom.Dataset.GetValue(DicomTag.StudyInstanceUID, 0), Is.EqualTo(anoDt.Rows[0]["StudyInstanceUID"])); + }); foreach (var (key, _) in thingThatShouldDisappear) @@ -329,7 +344,7 @@ public void TestSkipAnonymisationOnStructuredReports() } foreach (var (key, value) in thingsThatShouldRemain) - Assert.AreEqual(value, anoDicom.Dataset.GetValue(key, 0),$"Expected tag {key} to remain"); + Assert.That(anoDicom.Dataset.GetValue(key, 0), Is.EqualTo(value), $"Expected tag {key} to remain"); } // The following commented tests will fail due to underlying system limits on paths @@ -343,7 +358,7 @@ public void TestPutDicomFilesInExtractionDirectories(Type putterType) var outputDirectory = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "Images")); const string releaseIdentifier = "Hank"; var putter = (IPutDicomFilesInExtractionDirectories)ObjectConstructor.Construct(putterType); - + var dicomDataset = new DicomDataset { {DicomTag.SOPInstanceUID, "123.4.4"}, @@ -371,23 +386,23 @@ public void TestPutDicomFilesInExtractionDirectories(Type putterType) if (putterType == typeof(PutInUidStudySeriesFolders)) expectedFile = new(Path.Combine(outputDirectory.FullName, releaseIdentifier, - dicomDataset.GetValue(DicomTag.StudyInstanceUID, 0), + dicomDataset.GetValue(DicomTag.StudyInstanceUID, 0), dicomDataset.GetValue(DicomTag.SeriesInstanceUID, 0), $"{dicomDataset.GetValue(DicomTag.SOPInstanceUID, 0)}.dcm")); - - Assert.IsTrue(expectedFile?.Exists); + + Assert.That(expectedFile?.Exists, Is.EqualTo(true)); } - + [Test] public void TestUIDTableExists() { var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); // set it to an empty database - var eds = new ExternalDatabaseServer(CatalogueRepository,"UID server",null); + var eds = new ExternalDatabaseServer(CatalogueRepository, "UID server", null); eds.SetProperties(db); var anon = new FoDicomAnonymiser @@ -395,9 +410,9 @@ public void TestUIDTableExists() UIDMappingServer = eds }; - var ex = Assert.Throws(()=>anon.Check(ThrowImmediatelyCheckNotifier.QuietPicky)); + var ex = Assert.Throws(() => anon.Check(ThrowImmediatelyCheckNotifier.QuietPicky)); - StringAssert.AreEqualIgnoringCase("UIDMappingServer is not set up yet", ex?.Message); + Assert.That(ex?.Message, Is.EqualTo("UIDMappingServer is not set up yet").IgnoreCase); anon.Check(new AcceptAllCheckNotifier()); @@ -490,20 +505,23 @@ public void TestAnonymisingDataset_MetadataOnlyVsReal(Type putterType) using var anoDt = anonymiser.ProcessPipelineData(dt, ThrowImmediatelyDataLoadEventListener.Quiet, new()); - Assert.AreEqual(1, anoDt.Rows.Count); + Assert.That(anoDt.Rows, Has.Count.EqualTo(1)); //Data table should contain new UIDs - Assert.AreNotEqual("123.4.4", anoDt.Rows[0]["SOPInstanceUID"]); - Assert.AreEqual(56, anoDt.Rows[0]["SOPInstanceUID"].ToString()?.Length); + Assert.That(anoDt.Rows[0]["SOPInstanceUID"], Is.Not.EqualTo("123.4.4")); + Assert.Multiple(() => + { + Assert.That(anoDt.Rows[0]["SOPInstanceUID"].ToString()?.Length, Is.EqualTo(56)); - Assert.AreNotEqual("123.4.6", anoDt.Rows[0]["StudyInstanceUID"]); - Assert.AreEqual(56, anoDt.Rows[0]["StudyInstanceUID"].ToString()?.Length); + Assert.That(anoDt.Rows[0]["StudyInstanceUID"], Is.Not.EqualTo("123.4.6")); + }); + Assert.That(anoDt.Rows[0]["StudyInstanceUID"].ToString()?.Length, Is.EqualTo(56)); // second time - if(dtFirstTime != null) + if (dtFirstTime != null) { // rows should be the same whether or not we are doing Metadata only extraction - foreach(DataRow row in dtFirstTime.Rows) + foreach (DataRow row in dtFirstTime.Rows) { AssertContains(dt, row.ItemArray); } @@ -513,17 +531,17 @@ public void TestAnonymisingDataset_MetadataOnlyVsReal(Type putterType) } } - private void AssertContains(DataTable dt, params object[] rowValues) + private static void AssertContains(DataTable dt, params object[] rowValues) { - Assert.IsTrue(dt.Rows.Cast().Any(r => + Assert.That(dt.Rows.Cast().Any(r => rowValues.All(v => r.ItemArray.Contains(v))), "Did not find expected row " + string.Join(",", rowValues) + Environment.NewLine + "Rows seen were:" + string.Join(Environment.NewLine, dt.Rows.Cast().Select(r => string.Join(",", r.ItemArray)))); } - private IExtractDatasetCommand MockExtractionCommand() + private static IExtractDatasetCommand MockExtractionCommand() { - return new DummyExtractDatasetCommand(TestContext.CurrentContext.WorkDirectory,100); + return new DummyExtractDatasetCommand(TestContext.CurrentContext.WorkDirectory, 100); } } @@ -639,13 +657,13 @@ public void RevertToDatabaseState() /// public IHasDependencies[] GetObjectsThisDependsOn() { - return new IHasDependencies[] { }; + return Array.Empty(); } /// public IHasDependencies[] GetObjectsDependingOnThis() { - return new IHasDependencies[] { }; + return Array.Empty(); } /// @@ -743,7 +761,7 @@ public IEnumerable FilterRuns(IEnumerable public ExtractableColumn[] GetAllExtractableColumnsFor(IExtractableDataSet dataset) { - return new ExtractableColumn[] { }; + return Array.Empty(); } /// @@ -752,7 +770,7 @@ public ExtractableColumn[] GetAllExtractableColumnsFor(IExtractableDataSet datas /// public IExtractableDataSet[] GetAllExtractableDataSets() { - return new IExtractableDataSet[] { }; + return Array.Empty(); } /// @@ -771,7 +789,7 @@ public void Unfreeze() /// public IMapsDirectlyToDatabaseTable[] GetGlobals() { - return new IMapsDirectlyToDatabaseTable[] { }; + return Array.Empty(); } /// @@ -786,13 +804,13 @@ internal class DummyProject : IProject /// public IHasDependencies[] GetObjectsThisDependsOn() { - return new IHasDependencies[] { }; + return Array.Empty(); } /// public IHasDependencies[] GetObjectsDependingOnThis() { - return new IHasDependencies[] { }; + return Array.Empty(); } /// @@ -860,19 +878,19 @@ public IProjectCohortIdentificationConfigurationAssociation[] ProjectCohortIdent /// public ICatalogue[] GetAllProjectCatalogues() { - return new ICatalogue[] { }; + return Array.Empty(); } /// public ExtractionInformation[] GetAllProjectCatalogueColumns(ExtractionCategory any) { - return new ExtractionInformation[] { }; + return Array.Empty(); } /// public ExtractionInformation[] GetAllProjectCatalogueColumns(ICoreChildProvider childProvider, ExtractionCategory any) { - return new ExtractionInformation[] { }; + return Array.Empty(); } } internal class DummyExtractDatasetCommand : IExtractDatasetCommand @@ -884,7 +902,7 @@ public DummyExtractDatasetCommand(string dir, int i) _dir = new DirectoryInfo(dir); Configuration = new DummyExtractionConfiguration() { - Project = new DummyProject {ProjectNumber = i} + Project = new DummyProject { ProjectNumber = i } }; QueryBuilder = new DummySqlQueryBuilder() { diff --git a/Rdmp.Dicom.Tests/Integration/ImagingTableCreationTests.cs b/Rdmp.Dicom.Tests/Integration/ImagingTableCreationTests.cs index 13a5ac04..cc3295a9 100644 --- a/Rdmp.Dicom.Tests/Integration/ImagingTableCreationTests.cs +++ b/Rdmp.Dicom.Tests/Integration/ImagingTableCreationTests.cs @@ -7,7 +7,7 @@ namespace Rdmp.Dicom.Tests.Integration; -public class ImagingTableCreationTests:DatabaseTests +public class ImagingTableCreationTests : DatabaseTests { [TestCase(DatabaseType.MySql)] @@ -32,25 +32,31 @@ public void TestImageTemplates(DatabaseType type) } }; var tbl = db.ExpectTable(template.TableName); - var cmd = new ExecuteCommandCreateNewImagingDataset(RepositoryLocator,tbl , template); - Assert.IsFalse(cmd.IsImpossible); + var cmd = new ExecuteCommandCreateNewImagingDataset(RepositoryLocator, tbl, template); + Assert.That(cmd.IsImpossible, Is.False); cmd.Execute(); - Assert.IsTrue(tbl.Exists()); + Assert.That(tbl.Exists()); var cols = tbl.DiscoverColumns(); - Assert.AreEqual(2,cols.Length); + Assert.That(cols, Has.Length.EqualTo(2)); var rfa = cols.Single(c => c.GetRuntimeName().Equals("RelativeFileArchiveURI")); - Assert.IsTrue(rfa.IsPrimaryKey); - Assert.IsFalse(rfa.AllowNulls); //because PK! + Assert.Multiple(() => + { + Assert.That(rfa.IsPrimaryKey); + Assert.That(rfa.AllowNulls, Is.False); //because PK! + }); var sid = cols.Single(c => c.GetRuntimeName().Equals("SeriesInstanceUID")); - Assert.IsFalse(sid.IsPrimaryKey); - Assert.IsTrue(sid.AllowNulls); + Assert.Multiple(() => + { + Assert.That(sid.IsPrimaryKey, Is.False); + Assert.That(sid.AllowNulls); + }); diff --git a/Rdmp.Dicom.Tests/Integration/LiveVsTemplateComparerTests.cs b/Rdmp.Dicom.Tests/Integration/LiveVsTemplateComparerTests.cs index 1c2c751d..4099e42d 100644 --- a/Rdmp.Dicom.Tests/Integration/LiveVsTemplateComparerTests.cs +++ b/Rdmp.Dicom.Tests/Integration/LiveVsTemplateComparerTests.cs @@ -7,7 +7,7 @@ namespace Rdmp.Dicom.Tests.Integration; -class LiveVsTemplateComparerTests:DatabaseTests +class LiveVsTemplateComparerTests : DatabaseTests { [TestCase(DatabaseType.MySql)] @@ -53,28 +53,28 @@ public void TestImageTemplates(DatabaseType type) // use it to create a table var tbl = db.ExpectTable(template.TableName); - var cmd = new ExecuteCommandCreateNewImagingDataset(RepositoryLocator,tbl , template); - Assert.IsFalse(cmd.IsImpossible); + var cmd = new ExecuteCommandCreateNewImagingDataset(RepositoryLocator, tbl, template); + Assert.That(cmd.IsImpossible, Is.False); cmd.Execute(); - Assert.IsTrue(tbl.Exists()); + Assert.That(tbl.Exists()); // import RDMP reference to the table - var importer = new TableInfoImporter(CatalogueRepository,tbl); - importer.DoImport(out var ti,out _); + var importer = new TableInfoImporter(CatalogueRepository, tbl); + importer.DoImport(out var ti, out _); // compare the live with the template - var comparer = new LiveVsTemplateComparer(ti,new() { DatabaseType = type,Tables = new() { template } }); + var comparer = new LiveVsTemplateComparer(ti, new() { DatabaseType = type, Tables = new() { template } }); // should be no differences - Assert.AreEqual(comparer.TemplateSql,comparer.LiveSql); + Assert.That(comparer.LiveSql, Is.EqualTo(comparer.TemplateSql)); // make a difference tbl.DropColumn(tbl.DiscoverColumn("EchoTime")); - + //now comparer should see a difference - comparer = new(ti,new() { DatabaseType = type,Tables = new() { template } }); - Assert.AreNotEqual(comparer.TemplateSql,comparer.LiveSql); + comparer = new(ti, new() { DatabaseType = type, Tables = new() { template } }); + Assert.That(comparer.LiveSql, Is.Not.EqualTo(comparer.TemplateSql)); tbl.Drop(); } diff --git a/Rdmp.Dicom.Tests/Integration/SMICacheLayoutTests.cs b/Rdmp.Dicom.Tests/Integration/SMICacheLayoutTests.cs index bb6c2e3f..906a7159 100644 --- a/Rdmp.Dicom.Tests/Integration/SMICacheLayoutTests.cs +++ b/Rdmp.Dicom.Tests/Integration/SMICacheLayoutTests.cs @@ -11,10 +11,10 @@ class SMICacheLayoutTests public void TestFactoryConstruction() { var rootDirectory = new DirectoryInfo(TestContext.CurrentContext.WorkDirectory); - var layout = new SMICacheLayout(rootDirectory,new("CT")); + var layout = new SMICacheLayout(rootDirectory, new("CT")); var downloadDirectory = layout.GetLoadCacheDirectory(ThrowImmediatelyDataLoadEventListener.Quiet); - Assert.AreEqual(downloadDirectory.FullName, Path.Combine(rootDirectory.FullName, "CT")); + Assert.That(Path.Combine(rootDirectory.FullName, "CT"), Is.EqualTo(downloadDirectory.FullName)); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Integration/SemEHRApiCallerTests.cs b/Rdmp.Dicom.Tests/Integration/SemEHRApiCallerTests.cs index a0acafd1..831f3790 100644 --- a/Rdmp.Dicom.Tests/Integration/SemEHRApiCallerTests.cs +++ b/Rdmp.Dicom.Tests/Integration/SemEHRApiCallerTests.cs @@ -17,8 +17,7 @@ namespace Rdmp.Dicom.Tests.Integration; public class SemEHRApiCallerTests : DatabaseTests { - - public CachedAggregateConfigurationResultsManager SetupCache(DatabaseType dbType, out DiscoveredDatabase cacheDb) + private CachedAggregateConfigurationResultsManager SetupCache(DatabaseType dbType, out DiscoveredDatabase cacheDb) { cacheDb = GetCleanedServer(dbType); var creator = new MasterDatabaseScriptExecutor(cacheDb); @@ -29,7 +28,7 @@ public CachedAggregateConfigurationResultsManager SetupCache(DatabaseType dbType var eds = new ExternalDatabaseServer(CatalogueRepository, "cache", patcher); eds.SetProperties(cacheDb); - return new(eds); + return new CachedAggregateConfigurationResultsManager(eds); } @@ -43,9 +42,9 @@ public void TalkToApi(DatabaseType dbType) var cata = new Catalogue(CatalogueRepository, $"{PluginCohortCompiler.ApiPrefix}cata"); var cic = new CohortIdentificationConfiguration(CatalogueRepository, "my cic"); cic.CreateRootContainerIfNotExists(); - + var ac = new AggregateConfiguration(CatalogueRepository, cata, "blah"); - cic.RootCohortAggregateContainer.AddChild(ac,0); + cic.RootCohortAggregateContainer.AddChild(ac, 0); var semEHRConfiguration = new SemEHRConfiguration() { @@ -58,9 +57,9 @@ public void TalkToApi(DatabaseType dbType) var resultTable = cacheMgr.GetLatestResultsTableUnsafe(ac, AggregateOperation.IndexedExtractionIdentifierList); - Assert.IsNotNull(resultTable); + Assert.That(resultTable, Is.Not.Null); var tbl = cacheDb.ExpectTable(resultTable.GetRuntimeName()); - Assert.AreEqual(75, tbl.GetDataTable().Rows.Count); + Assert.That(tbl.GetDataTable().Rows, Has.Count.EqualTo(75)); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/IsolationReviewTests.cs b/Rdmp.Dicom.Tests/IsolationReviewTests.cs index 940e19e1..b37ea855 100644 --- a/Rdmp.Dicom.Tests/IsolationReviewTests.cs +++ b/Rdmp.Dicom.Tests/IsolationReviewTests.cs @@ -45,9 +45,9 @@ public void TestFindTables(DatabaseType dbType) // 'pk' 3 differs on col B dt.Rows.Add(3, 1, 1); dt.Rows.Add(3, 2, 1); - - - db.CreateTable("mytbl_Isolation",dt); + + + db.CreateTable("mytbl_Isolation", dt); var lmd = new LoadMetadata(CatalogueRepository, "ExampleLoad"); var pt = new ProcessTask(CatalogueRepository, lmd, LoadStage.AdjustRaw) @@ -57,38 +57,41 @@ public void TestFindTables(DatabaseType dbType) }; pt.SaveToDatabase(); - //make an isolation db that is the - var eds = new ExternalDatabaseServer(CatalogueRepository,"Isolation db",null); + //make an isolation db that is the + var eds = new ExternalDatabaseServer(CatalogueRepository, "Isolation db", null); eds.SetProperties(db); var args = pt.CreateArgumentsForClassIfNotExists(typeof(PrimaryKeyCollisionIsolationMutilation)); var ti = new TableInfo(CatalogueRepository, "mytbl"); - var ci = new ColumnInfo(CatalogueRepository, "A", "varchar(1)", ti) {IsPrimaryKey = true}; + var ci = new ColumnInfo(CatalogueRepository, "A", "varchar(1)", ti) { IsPrimaryKey = true }; ci.SaveToDatabase(); SetArg(args, "IsolationDatabase", eds); - SetArg(args, "TablesToIsolate", new []{ti}); - + SetArg(args, "TablesToIsolate", new[] { ti }); + var reviewer = new IsolationReview(pt); //no error since it is configured correctly - Assert.IsNull(reviewer.Error); + Assert.That(reviewer.Error, Is.Null); //tables should exist var isolationTables = reviewer.GetIsolationTables(); - Assert.IsTrue(isolationTables.Single().Value.Exists()); + Assert.That(isolationTables.Single().Value.Exists()); - - var diffDataTable = reviewer.GetDifferences(isolationTables.Single(),out var diffs); - - Assert.AreEqual(6,diffDataTable.Rows.Count); - Assert.AreEqual(6,diffs.Count); + + var diffDataTable = reviewer.GetDifferences(isolationTables.Single(), out var diffs); + + Assert.Multiple(() => + { + Assert.That(diffDataTable.Rows, Has.Count.EqualTo(6)); + Assert.That(diffs, Has.Count.EqualTo(6)); + }); } - private void SetArg(IArgument[] args, string argName, object value) + private static void SetArg(IArgument[] args, string argName, object value) { - var arg = args.Single(a=>a.Name.Equals(argName)); + var arg = args.Single(a => a.Name.Equals(argName)); arg.SetValue(value); arg.SaveToDatabase(); } diff --git a/Rdmp.Dicom.Tests/PackageListIsCorrectTests.cs b/Rdmp.Dicom.Tests/PackageListIsCorrectTests.cs index b1fa7a40..61de3540 100644 --- a/Rdmp.Dicom.Tests/PackageListIsCorrectTests.cs +++ b/Rdmp.Dicom.Tests/PackageListIsCorrectTests.cs @@ -9,19 +9,19 @@ namespace Rdmp.Dicom.Tests; /// -/// Tests to confirm that the dependencies in csproj files (NuGet packages) match those in the .nuspec files and that packages.md +/// Tests to confirm that the dependencies in csproj files (NuGet packages) match those in the .nuspec files and that packages.md /// lists the correct versions (in documentation) /// public class PackageListIsCorrectTests { - private static readonly EnumerationOptions EnumerationOptions = new() { RecurseSubdirectories = true,MatchCasing = MatchCasing.CaseInsensitive,IgnoreInaccessible = true}; + private static readonly EnumerationOptions EnumerationOptions = new() { RecurseSubdirectories = true, MatchCasing = MatchCasing.CaseInsensitive, IgnoreInaccessible = true }; // - private static readonly Regex RPackageRef = new(@" @@ -29,17 +29,17 @@ public class PackageListIsCorrectTests /// /// [TestCase] - public void TestPackagesDocumentCorrect(string rootPath=null) + public void TestPackagesDocumentCorrect(string rootPath = null) { - var root= FindRoot(rootPath); + var root = FindRoot(rootPath); var undocumented = new StringBuilder(); // Extract the named packages from PACKAGES.md var packagesMarkdown = GetPackagesMarkdown(root).SelectMany(File.ReadAllLines) .Select(line => RMarkdownEntry.Match(line)) - .Where(m=>m.Success) + .Where(m => m.Success) .Select(m => m.Groups[1].Value) - .Except(new[]{"Package", "-------" }) + .Except(new[] { "Package", "-------" }) .ToHashSet(StringComparer.InvariantCultureIgnoreCase); // Extract the named packages from csproj files @@ -51,9 +51,12 @@ public void TestPackagesDocumentCorrect(string rootPath=null) undocumented.AppendJoin(Environment.NewLine, undocumentedPackages); var unusedPackages = packagesMarkdown.Except(usedPackages).ToArray(); - Assert.IsEmpty(unusedPackages, - $"The following packages are listed in PACKAGES.md but are not used in any csproj file: {string.Join(", ", unusedPackages)}"); - Assert.IsEmpty(undocumented.ToString()); + Assert.Multiple(() => + { + Assert.That(unusedPackages, Is.Empty, + $"The following packages are listed in PACKAGES.md but are not used in any csproj file: {string.Join(", ", unusedPackages)}"); + Assert.That(undocumented.ToString(), Is.Empty); + }); } /// @@ -79,7 +82,7 @@ private static DirectoryInfo FindRoot(string path = null) var root = new DirectoryInfo(TestContext.CurrentContext.TestDirectory); while (!root.EnumerateFiles("*.sln", SearchOption.TopDirectoryOnly).Any() && root.Parent != null) root = root.Parent; - Assert.IsNotNull(root.Parent, "Could not find root of repository"); + Assert.That(root.Parent, Is.Not.Null, "Could not find root of repository"); return root; } @@ -101,7 +104,7 @@ private static IEnumerable GetCsprojFiles(DirectoryInfo root) private static string[] GetPackagesMarkdown(DirectoryInfo root) { var path = root.EnumerateFiles("packages.md", EnumerationOptions).Select(f => f.FullName).ToArray(); - Assert.False(path.Length==0, "Could not find packages.md"); + Assert.That(path, Is.Not.Empty, "Could not find packages.md"); return path; } diff --git a/Rdmp.Dicom.Tests/PressureGaugeTests.cs b/Rdmp.Dicom.Tests/PressureGaugeTests.cs index 4736ccf3..c9a17e7f 100644 --- a/Rdmp.Dicom.Tests/PressureGaugeTests.cs +++ b/Rdmp.Dicom.Tests/PressureGaugeTests.cs @@ -17,7 +17,7 @@ public void TestGauge_NotReached() ThresholdBeatsPerMinute = 4 }; g.Tick(new DateTime(2001, 01, 01, 01, 01, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); } [Test] public void TestGauge_NotReached_OverTime() @@ -31,11 +31,11 @@ public void TestGauge_NotReached_OverTime() // events are 1 minute appart so does not trigger g.Tick(new(2001, 01, 01, 01, 01, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); g.Tick(new(2001, 01, 01, 01, 02, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); g.Tick(new(2001, 01, 01, 01, 03, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); } [Test] public void TestGauge_Reached() @@ -47,15 +47,15 @@ public void TestGauge_Reached() ThresholdBeatsPerMinute = 4 }; g.Tick(new DateTime(2001, 01, 01, 01, 01, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); g.Tick(new(2001, 01, 01, 01, 01, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); g.Tick(new(2001, 01, 01, 01, 01, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); g.Tick(new(2001, 01, 01, 01, 01, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); g.Tick(new(2001, 01, 01, 01, 01, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsTrue(someFact); + Assert.That(someFact); } [Test] @@ -68,9 +68,9 @@ public void TestGauge_Reached_OverTime() ThresholdBeatsPerMinute = 1 }; g.Tick(new DateTime(2001, 01, 01, 01, 01, 01), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); g.Tick(new(2001, 01, 01, 01, 01, 30), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsTrue(someFact); + Assert.That(someFact); } [Test] public void TestGauge_Reached_OverTime_Boundary() @@ -82,8 +82,8 @@ public void TestGauge_Reached_OverTime_Boundary() ThresholdBeatsPerMinute = 1 }; g.Tick(new DateTime(2001, 01, 01, 01, 01, 30), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsFalse(someFact); + Assert.That(someFact, Is.False); g.Tick(new(2001, 01, 01, 01, 02, 29), ThrowImmediatelyDataLoadEventListener.Quiet, () => someFact = true); - Assert.IsTrue(someFact); + Assert.That(someFact); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Rdmp.Dicom.Tests.csproj b/Rdmp.Dicom.Tests/Rdmp.Dicom.Tests.csproj index e5516d83..17aacc74 100644 --- a/Rdmp.Dicom.Tests/Rdmp.Dicom.Tests.csproj +++ b/Rdmp.Dicom.Tests/Rdmp.Dicom.Tests.csproj @@ -44,7 +44,11 @@ - + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/Rdmp.Dicom.Tests/TestProcessBasedCacheSource.cs b/Rdmp.Dicom.Tests/TestProcessBasedCacheSource.cs index aa0d6fba..1e047ffa 100644 --- a/Rdmp.Dicom.Tests/TestProcessBasedCacheSource.cs +++ b/Rdmp.Dicom.Tests/TestProcessBasedCacheSource.cs @@ -55,8 +55,10 @@ public void TestWithEcho() var fork = new ForkDataLoadEventListener(toMem, ThrowImmediatelyDataLoadEventListener.Quiet); source.GetChunk(fork, new()); - Assert.Contains($"Hey Thomas go get 24/12/01 and store in {Path.Combine(loadDir.Cache.FullName, "ALL")}", - toMem.GetAllMessagesByProgressEventType()[ProgressEventType.Information].Select(v => v.Message).ToArray()); + Assert.That( + toMem.GetAllMessagesByProgressEventType()[ProgressEventType.Information].Select(static v => v.Message) + .ToArray(), + Does.Contain($"Hey Thomas go get 24/12/01 and store in {Path.Combine(loadDir.Cache.FullName, "ALL")}")); } private static bool IsLinux => Environment.OSVersion.Platform != PlatformID.Win32NT; diff --git a/Rdmp.Dicom.Tests/Unit/AmbiguousFilePathTests.cs b/Rdmp.Dicom.Tests/Unit/AmbiguousFilePathTests.cs index 29466eef..5ec87c80 100644 --- a/Rdmp.Dicom.Tests/Unit/AmbiguousFilePathTests.cs +++ b/Rdmp.Dicom.Tests/Unit/AmbiguousFilePathTests.cs @@ -18,29 +18,28 @@ public void BasicPathsTest() if (!EnvironmentInfo.IsLinux) return; //in linux this looks like a relative path - var ex = Assert.Throws(()=>_=new AmbiguousFilePath(@"c:\temp\my.dcm")); - StringAssert.StartsWith("Relative path was encountered without specifying a root",ex?.Message); + var ex = Assert.Throws(() => _ = new AmbiguousFilePath(@"c:\temp\my.dcm")); + Assert.That(ex?.Message, Does.StartWith("Relative path was encountered without specifying a root")); - ex = Assert.Throws(()=>_=new AmbiguousFilePath(@"c:\temp",@"c:\temp\my.dcm")); - StringAssert.IsMatch("Specified root path '.*' was not IsAbsolute",ex?.Message); + ex = Assert.Throws(() => _ = new AmbiguousFilePath(@"c:\temp", @"c:\temp\my.dcm")); + Assert.That(ex?.Message, Does.Match("Specified root path '.*' was not IsAbsolute")); } - + [Test] public void GetDatasetFromFileTest() { - FileInfo f = new(Path.Combine(TestContext.CurrentContext.WorkDirectory,"test.dcm")); - + FileInfo f = new(Path.Combine(TestContext.CurrentContext.WorkDirectory, "test.dcm")); File.Copy( - Path.Combine(TestContext.CurrentContext.TestDirectory,"TestData","IM-0001-0013.dcm"), - f.FullName,true); + Path.Combine(TestContext.CurrentContext.TestDirectory, "TestData", "IM-0001-0013.dcm"), + f.FullName, true); var a = new AmbiguousFilePath(f.FullName); var ds = a.GetDataset().Single().Item2; - Assert.NotNull(ds.Dataset.GetValue(DicomTag.SOPInstanceUID,0)); + Assert.That(ds.Dataset.GetValue(DicomTag.SOPInstanceUID, 0), Is.Not.Null); f.Delete(); } @@ -61,12 +60,12 @@ public void GetDatasetFromZipFileTest() s.Write(bytes, 0, bytes.Length); } - Assert.Throws(() => _=new AmbiguousFilePath(Path.Combine(TestContext.CurrentContext.WorkDirectory, "omgzip.zip!lol")).GetDataset().ToList()); + Assert.Throws(() => _ = new AmbiguousFilePath(Path.Combine(TestContext.CurrentContext.WorkDirectory, "omgzip.zip!lol")).GetDataset().ToList()); var a = new AmbiguousFilePath(Path.Combine(TestContext.CurrentContext.WorkDirectory, "omgzip.zip!test.dcm")); var ds = a.GetDataset().Single().Item2; - Assert.NotNull(ds.Dataset.GetValue(DicomTag.SOPInstanceUID, 0)); + Assert.That(ds.Dataset.GetValue(DicomTag.SOPInstanceUID, 0), Is.Not.Null); fzip.Delete(); } @@ -103,50 +102,47 @@ public void GetDatasetFromZipFile_WithPooling_Test() public void TestZipEntry_Exists() { var zipFile = new FileInfo(Path.Combine(TestContext.CurrentContext.WorkDirectory, "my.zip")); - var rootDir = Directory.CreateDirectory(Path.Combine(TestContext.CurrentContext.WorkDirectory,nameof(TestZipEntry_Exists))); + var rootDir = Directory.CreateDirectory(Path.Combine(TestContext.CurrentContext.WorkDirectory, nameof(TestZipEntry_Exists))); var subDirectory = rootDir.CreateSubdirectory("subdir"); - - var sourceFile = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,@"TestData/IM-0001-0013.dcm")); + var sourceFile = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData/IM-0001-0013.dcm")); - sourceFile.CopyTo(Path.Combine(rootDir.FullName, "file1.dcm"),true); - sourceFile.CopyTo(Path.Combine(subDirectory.FullName,"file2.dcm"),true); + sourceFile.CopyTo(Path.Combine(rootDir.FullName, "file1.dcm"), true); + sourceFile.CopyTo(Path.Combine(subDirectory.FullName, "file2.dcm"), true); - if(zipFile.Exists) + if (zipFile.Exists) zipFile.Delete(); - - ZipFile.CreateFromDirectory(rootDir.FullName,zipFile.FullName); - + ZipFile.CreateFromDirectory(rootDir.FullName, zipFile.FullName); FileAssert.Exists(zipFile.FullName); var exists = new AmbiguousFilePath($"{zipFile.FullName}!file1.dcm"); - Assert.IsNotNull(exists.GetDataset()); + Assert.That(exists.GetDataset(), Is.Not.Null); var notexists = new AmbiguousFilePath($"{zipFile.FullName}!file2.dcm"); - var ex = Assert.Throws(()=>notexists.GetDataset().ToList()); + var ex = Assert.Throws(() => notexists.GetDataset().ToList()); - StringAssert.Contains("Could not find path 'file2.dcm' within zip archive",ex.Message); + Assert.That(ex.Message, Does.Contain("Could not find path 'file2.dcm' within zip archive")); - var existsRelative = new AmbiguousFilePath(zipFile.DirectoryName,"my.zip!file1.dcm"); - Assert.IsNotNull(existsRelative.GetDataset()); + var existsRelative = new AmbiguousFilePath(zipFile.DirectoryName, "my.zip!file1.dcm"); + Assert.That(existsRelative.GetDataset(), Is.Not.Null); - var existsRelativeWithLeadingSlash = new AmbiguousFilePath(zipFile.DirectoryName,"my.zip!/file1.dcm"); - Assert.IsNotNull(existsRelativeWithLeadingSlash.GetDataset()); + var existsRelativeWithLeadingSlash = new AmbiguousFilePath(zipFile.DirectoryName, "my.zip!/file1.dcm"); + Assert.That(existsRelativeWithLeadingSlash.GetDataset(), Is.Not.Null); - var existsRelativeWithLeadingSlashInSubdir = new AmbiguousFilePath(zipFile.DirectoryName,"my.zip!/subdir/file2.dcm"); - Assert.IsNotNull(existsRelativeWithLeadingSlashInSubdir.GetDataset()); + var existsRelativeWithLeadingSlashInSubdir = new AmbiguousFilePath(zipFile.DirectoryName, "my.zip!/subdir/file2.dcm"); + Assert.That(existsRelativeWithLeadingSlashInSubdir.GetDataset(), Is.Not.Null); - var existsRelativeWithLeadingBackSlashInSubdir = new AmbiguousFilePath(zipFile.DirectoryName,"my.zip!\\subdir\\file2.dcm"); - Assert.IsNotNull(existsRelativeWithLeadingBackSlashInSubdir.GetDataset()); + var existsRelativeWithLeadingBackSlashInSubdir = new AmbiguousFilePath(zipFile.DirectoryName, "my.zip!\\subdir\\file2.dcm"); + Assert.That(existsRelativeWithLeadingBackSlashInSubdir.GetDataset(), Is.Not.Null); } - [TestCase(@"c:\temp\fff.dcm",true)] - [TestCase(@"c:\temp\fff",true)] - [TestCase(@"c:\temp\12.123.213.4214.15.dcm",true)] - [TestCase(@"c:\temp\12.123.213.4214.15",true)] - [TestCase(@"c:\temp\ff.zip",false)] - [TestCase(@"c:\temp\ff.tar",false)] + [TestCase(@"c:\temp\fff.dcm", true)] + [TestCase(@"c:\temp\fff", true)] + [TestCase(@"c:\temp\12.123.213.4214.15.dcm", true)] + [TestCase(@"c:\temp\12.123.213.4214.15", true)] + [TestCase(@"c:\temp\ff.zip", false)] + [TestCase(@"c:\temp\ff.tar", false)] public void TestIsDicomReference(string input, bool expected) { - Assert.AreEqual(expected,AmbiguousFilePath.IsDicomReference(input)); + Assert.That(AmbiguousFilePath.IsDicomReference(input), Is.EqualTo(expected)); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Unit/CFindDirSourceTests.cs b/Rdmp.Dicom.Tests/Unit/CFindDirSourceTests.cs index 74dc154a..3189a746 100644 --- a/Rdmp.Dicom.Tests/Unit/CFindDirSourceTests.cs +++ b/Rdmp.Dicom.Tests/Unit/CFindDirSourceTests.cs @@ -34,22 +34,24 @@ someAE DX\SR XR Elbow Lt 0102030405 TEXT 1.2.3.4.60 20200416 someAE XA\SR Fluoroscopy upper limb Lt 0102030405 TEXT 1.2.3.4.70 20200416 */ - Assert.AreEqual(3, dt.Rows.Count); + Assert.That(dt.Rows, Has.Count.EqualTo(3)); + Assert.Multiple(() => + { + Assert.That(dt.Rows[0]["StudyDescription"], Is.EqualTo("XR Facial bones")); + Assert.That(dt.Rows[1]["StudyDescription"], Is.EqualTo("XR Elbow Lt")); + Assert.That(dt.Rows[2]["StudyDescription"], Is.EqualTo("Fluoroscopy upper limb Lt")); - Assert.AreEqual("XR Facial bones", dt.Rows[0]["StudyDescription"]); - Assert.AreEqual("XR Elbow Lt", dt.Rows[1]["StudyDescription"]); - Assert.AreEqual("Fluoroscopy upper limb Lt", dt.Rows[2]["StudyDescription"]); + Assert.That(dt.Rows[0]["StudyInstanceUID"], Is.EqualTo("1.2.3.4.50")); + Assert.That(dt.Rows[1]["StudyInstanceUID"], Is.EqualTo("1.2.3.4.60")); + Assert.That(dt.Rows[2]["StudyInstanceUID"], Is.EqualTo("1.2.3.4.70")); - Assert.AreEqual("1.2.3.4.50", dt.Rows[0]["StudyInstanceUID"]); - Assert.AreEqual("1.2.3.4.60", dt.Rows[1]["StudyInstanceUID"]); - Assert.AreEqual("1.2.3.4.70", dt.Rows[2]["StudyInstanceUID"]); + Assert.That(dt.Rows[0]["RetrieveAETitle"], Is.EqualTo("someAE")); + Assert.That(dt.Rows[1]["RetrieveAETitle"], Is.EqualTo("someAE")); + Assert.That(dt.Rows[2]["RetrieveAETitle"], Is.EqualTo("someAE")); + }); - Assert.AreEqual("someAE", dt.Rows[0]["RetrieveAETitle"]); - Assert.AreEqual("someAE", dt.Rows[1]["RetrieveAETitle"]); - Assert.AreEqual("someAE", dt.Rows[2]["RetrieveAETitle"]); - - Assert.IsNotNull(dt.TableName); + Assert.That(dt.TableName, Is.Not.Null); } @@ -75,6 +77,6 @@ public void Test_ReadExampleXml_Directory() var dt = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new()); - Assert.AreEqual(3, dt.Rows.Count); + Assert.That(dt.Rows, Has.Count.EqualTo(3)); } } diff --git a/Rdmp.Dicom.Tests/Unit/CFindSourceTests.cs b/Rdmp.Dicom.Tests/Unit/CFindSourceTests.cs index 1123b1b2..9c9b7e1a 100644 --- a/Rdmp.Dicom.Tests/Unit/CFindSourceTests.cs +++ b/Rdmp.Dicom.Tests/Unit/CFindSourceTests.cs @@ -28,10 +28,10 @@ public void TestRunFindOn_PublicServer() // file name is miday on 2001 1st January var f = Path.Combine(dir.FullName, @"out/Data/Cache/ALL/20010101120000.csv"); FileAssert.Exists(f); - + var result = File.ReadAllLines(f); // should be at least 1 image in the public test server - Assert.GreaterOrEqual(result.Length,1); + Assert.That(result, Is.Not.Empty); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Unit/ConnectToOrthancTest.cs b/Rdmp.Dicom.Tests/Unit/ConnectToOrthancTest.cs index cb56cc36..ae02d6cc 100644 --- a/Rdmp.Dicom.Tests/Unit/ConnectToOrthancTest.cs +++ b/Rdmp.Dicom.Tests/Unit/ConnectToOrthancTest.cs @@ -16,15 +16,16 @@ public void EchoTest(string host, int port) var success = false; var client = DicomClientFactory.Create(host, port, false, LocalAetTitle, RemoteAetTitle); client.AddRequestAsync(new DicomCEchoRequest + { + OnResponseReceived = (req, res) => { - OnResponseReceived = (req,res) => { - success = true; - } + success = true; } + } ).Wait(); client.SendAsync().Wait(); - Assert.True(success,$"No echo response from PACS on {host}:{port}"); + Assert.That(success, $"No echo response from PACS on {host}:{port}"); } - + } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Unit/DICOMFileCollectionSourceTests.cs b/Rdmp.Dicom.Tests/Unit/DICOMFileCollectionSourceTests.cs index 39696257..2d79e378 100644 --- a/Rdmp.Dicom.Tests/Unit/DICOMFileCollectionSourceTests.cs +++ b/Rdmp.Dicom.Tests/Unit/DICOMFileCollectionSourceTests.cs @@ -27,19 +27,19 @@ public void Test_SingleFile(bool expressRelative) { var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); - var source = new DicomFileCollectionSource {FilenameField = "RelativeFileArchiveURI"}; + var source = new DicomFileCollectionSource { FilenameField = "RelativeFileArchiveURI" }; if (expressRelative) source.ArchiveRoot = TestContext.CurrentContext.TestDirectory; - var f = new FlatFileToLoad(new(Path.Combine(TestContext.CurrentContext.TestDirectory,@"TestData/IM-0001-0013.dcm"))); - + var f = new FlatFileToLoad(new(Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData/IM-0001-0013.dcm"))); + source.PreInitialize(new FlatFileToLoadDicomFileWorklist(f), ThrowImmediatelyDataLoadEventListener.Quiet); var tbl = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new()); var destination = new DataTableUploadDestination(); - - destination.PreInitialize(db,ThrowImmediatelyDataLoadEventListener.Quiet); + + destination.PreInitialize(db, ThrowImmediatelyDataLoadEventListener.Quiet); destination.AllowResizingColumnsAtUploadTime = true; destination.ProcessPipelineData(tbl, ThrowImmediatelyDataLoadEventListener.Quiet, new()); destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); @@ -49,15 +49,15 @@ public void Test_SingleFile(bool expressRelative) using (var dt = finalTable.GetDataTable()) { //should be 1 row in the final table - Assert.AreEqual(1,dt.Rows.Count); - + Assert.That(dt.Rows, Has.Count.EqualTo(1)); + //the path referenced should be the file read in relative/absolute format - Assert.AreEqual(expressRelative ? "./TestData/IM-0001-0013.dcm": - f.File.FullName.Replace('\\','/') - ,dt.Rows[0]["RelativeFileArchiveURI"]); + Assert.That(dt.Rows[0]["RelativeFileArchiveURI"], Is.EqualTo(expressRelative ? "./TestData/IM-0001-0013.dcm" : + f.File.FullName.Replace('\\', '/') +)); } - Assert.IsTrue(finalTable.Exists()); + Assert.That(finalTable.Exists()); finalTable.Drop(); } @@ -71,23 +71,23 @@ public void Test_ZipFile(bool expressRelative) //create a folder in which to generate some dicoms var dirToLoad = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFile))); - if(dirToLoad.Exists) + if (dirToLoad.Exists) dirToLoad.Delete(true); - + dirToLoad.Create(); //generate some random dicoms var r = new Random(999); - using DicomDataGenerator generator = new(r, dirToLoad.FullName, "CT") {MaximumImages = 5}; + using DicomDataGenerator generator = new(r, dirToLoad.FullName, "CT") { MaximumImages = 5 }; var people = new PersonCollection(); - people.GeneratePeople(1,r); - generator.GenerateTestDataFile(people,new("./inventory.csv"),1); + people.GeneratePeople(1, r); + generator.GenerateTestDataFile(people, new("./inventory.csv"), 1); //This generates // Test_ZipFile // 2015 // 3 - // 18 + // 18 // 751140 2.25.166922918107154891877498685128076062226.dcm // 751140 2.25.179610809676265137473873365625829826423.dcm // 751140 2.25.201969634959506849065133495434871450465.dcm @@ -95,21 +95,21 @@ public void Test_ZipFile(bool expressRelative) // 751140 2.25.316241631782653383510844072713132248731.dcm var yearDir = dirToLoad.GetDirectories().Single(); - StringAssert.IsMatch("\\d{4}",yearDir.Name); + Assert.That(yearDir.Name, Does.Match("\\d{4}")); //zip them up FileInfo zip = new(Path.Combine(TestContext.CurrentContext.TestDirectory, $"{nameof(Test_ZipFile)}.zip")); - if(zip.Exists) + if (zip.Exists) zip.Delete(); - ZipFile.CreateFromDirectory(dirToLoad.FullName,zip.FullName); + ZipFile.CreateFromDirectory(dirToLoad.FullName, zip.FullName); //tell the source to load the zip var f = new FlatFileToLoad(zip); - var source = new DicomFileCollectionSource {FilenameField = "RelativeFileArchiveURI"}; + var source = new DicomFileCollectionSource { FilenameField = "RelativeFileArchiveURI" }; if (expressRelative) source.ArchiveRoot = TestContext.CurrentContext.TestDirectory; @@ -118,39 +118,38 @@ public void Test_ZipFile(bool expressRelative) var tbl = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new()); var destination = new DataTableUploadDestination(); - - destination.PreInitialize(db,ThrowImmediatelyDataLoadEventListener.Quiet); + + destination.PreInitialize(db, ThrowImmediatelyDataLoadEventListener.Quiet); destination.AllowResizingColumnsAtUploadTime = true; destination.ProcessPipelineData(tbl, ThrowImmediatelyDataLoadEventListener.Quiet, new()); destination.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, null); var finalTable = db.ExpectTable(destination.TargetTableName); - + using (var dt = finalTable.GetDataTable()) { //should be 5 rows in the final table (5 images) - Assert.AreEqual(5,dt.Rows.Count); + Assert.That(dt.Rows, Has.Count.EqualTo(5)); - var pathInDbToDicomFile = (string) dt.Rows[0]["RelativeFileArchiveURI"]; + var pathInDbToDicomFile = (string)dt.Rows[0]["RelativeFileArchiveURI"]; //We expect either something like: // E:/RdmpDicom/Rdmp.Dicom.Tests/bin/Debug/netcoreapp2.2/Test_ZipFile.zip!2015/3/18/2.25.160787663560951826149226183314694084702.dcm // ./Test_ZipFile.zip!2015/3/18/2.25.105592977437473375573190160334447272386.dcm //the path referenced should be the file read in relative/absolute format - StringAssert.IsMatch( - expressRelative ? $@"./{zip.Name}![\d./]*.dcm": - $@"{Regex.Escape(zip.FullName.Replace('\\','/'))}![\d./]*.dcm", - pathInDbToDicomFile); + Assert.That( + pathInDbToDicomFile, Does.Match(expressRelative ? $@"./{zip.Name}![\d./]*.dcm" : + $@"{Regex.Escape(zip.FullName.Replace('\\', '/'))}![\d./]*.dcm")); - StringAssert.Contains(yearDir.Name,pathInDbToDicomFile,"Expected zip file to have subdirectories and for them to be loaded correctly"); + Assert.That(pathInDbToDicomFile, Does.Contain(yearDir.Name), "Expected zip file to have subdirectories and for them to be loaded correctly"); //confirm we can read that out again var path = new AmbiguousFilePath(TestContext.CurrentContext.TestDirectory, pathInDbToDicomFile); - Assert.IsNotNull(path.GetDataset(0,0)); + Assert.That(path.GetDataset(0, 0), Is.Not.Null); } - Assert.IsTrue(finalTable.Exists()); + Assert.That(finalTable.Exists()); finalTable.Drop(); } @@ -164,23 +163,23 @@ public void Test_ZipFileNotation(bool expressRelative) //create a folder in which to generate some dicoms var dirToLoad = new DirectoryInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, nameof(Test_ZipFileNotation))); - if(dirToLoad.Exists) + if (dirToLoad.Exists) dirToLoad.Delete(true); - + dirToLoad.Create(); //generate some random dicoms var r = new Random(999); - using DicomDataGenerator generator = new(r, dirToLoad.FullName, "CT") {MaximumImages = 5}; + using DicomDataGenerator generator = new(r, dirToLoad.FullName, "CT") { MaximumImages = 5 }; var people = new PersonCollection(); - people.GeneratePeople(1,r); - generator.GenerateTestDataFile(people,new("./inventory.csv"),1); + people.GeneratePeople(1, r); + generator.GenerateTestDataFile(people, new("./inventory.csv"), 1); //This generates // Test_ZipFile // 2015 // 3 - // 18 + // 18 // 751140 2.25.166922918107154891877498685128076062226.dcm // 751140 2.25.179610809676265137473873365625829826423.dcm // 751140 2.25.201969634959506849065133495434871450465.dcm @@ -188,26 +187,26 @@ public void Test_ZipFileNotation(bool expressRelative) // 751140 2.25.316241631782653383510844072713132248731.dcm var yearDir = dirToLoad.GetDirectories().Single(); - StringAssert.IsMatch("\\d{4}",yearDir.Name); + Assert.That(yearDir.Name, Does.Match("\\d{4}")); //should be 5 images in the zip file var dicomFiles = yearDir.GetFiles("*.dcm", SearchOption.AllDirectories); - Assert.AreEqual(5,dicomFiles.Length); + Assert.That(dicomFiles, Has.Length.EqualTo(5)); //e.g. \2015\3\18\2.25.223398837779449245317520567111874824918.dcm //e.g. \2015\3\18\2.25.179610809676265137473873365625829826423.dcm var relativePathWithinZip1 = dicomFiles[0].FullName[dirToLoad.FullName.Length..]; var relativePathWithinZip2 = dicomFiles[1].FullName[dirToLoad.FullName.Length..]; - + //zip them up FileInfo zip = new(Path.Combine(TestContext.CurrentContext.TestDirectory, - $"{nameof(Test_ZipFile)}.zip"));Path.Combine(TestContext.CurrentContext.TestDirectory, + $"{nameof(Test_ZipFile)}.zip")); Path.Combine(TestContext.CurrentContext.TestDirectory, $"{nameof(Test_ZipFile)}.zip"); - if(zip.Exists) + if (zip.Exists) zip.Delete(); - ZipFile.CreateFromDirectory(dirToLoad.FullName,zip.FullName); + ZipFile.CreateFromDirectory(dirToLoad.FullName, zip.FullName); //e.g. E:\RdmpDicom\Rdmp.Dicom.Tests\bin\Debug\netcoreapp2.2\Test_ZipFile.zip!\2015\3\18\2.25.223398837779449245317520567111874824918.dcm var pathToLoad1 = $"{zip.FullName}!{relativePathWithinZip1}"; @@ -216,12 +215,12 @@ public void Test_ZipFileNotation(bool expressRelative) var loadMeTextFile = new FileInfo(Path.Combine(dirToLoad.FullName, "LoadMe.txt")); //tell the source to load the zip - File.WriteAllText(loadMeTextFile.FullName,string.Join(Environment.NewLine, pathToLoad1, pathToLoad2)); - + File.WriteAllText(loadMeTextFile.FullName, string.Join(Environment.NewLine, pathToLoad1, pathToLoad2)); + var f = new FlatFileToLoad(loadMeTextFile); //Setup source - var source = new DicomFileCollectionSource {FilenameField = "RelativeFileArchiveURI"}; + var source = new DicomFileCollectionSource { FilenameField = "RelativeFileArchiveURI" }; if (expressRelative) source.ArchiveRoot = TestContext.CurrentContext.TestDirectory; @@ -229,44 +228,43 @@ public void Test_ZipFileNotation(bool expressRelative) var worklist = new FlatFileToLoadDicomFileWorklist(f); //Setup destination - var destination = new DataTableUploadDestination {AllowResizingColumnsAtUploadTime = true}; + var destination = new DataTableUploadDestination { AllowResizingColumnsAtUploadTime = true }; //setup pipeline var contextFactory = new DataFlowPipelineContextFactory(); var context = contextFactory.Create(PipelineUsage.FixedDestination | PipelineUsage.FixedDestination); //run pipeline - var pipe = new DataFlowPipelineEngine(context,source,destination,ThrowImmediatelyDataLoadEventListener.Quiet); - pipe.Initialize(db,worklist); + var pipe = new DataFlowPipelineEngine(context, source, destination, ThrowImmediatelyDataLoadEventListener.Quiet); + pipe.Initialize(db, worklist); pipe.ExecutePipeline(new()); var finalTable = db.ExpectTable(destination.TargetTableName); - + using (var dt = finalTable.GetDataTable()) { //should be 2 rows (since we told it to only load 2 files out of the zip) - Assert.AreEqual(2,dt.Rows.Count); + Assert.That(dt.Rows, Has.Count.EqualTo(2)); - var pathInDbToDicomFile = (string) dt.Rows[0]["RelativeFileArchiveURI"]; + var pathInDbToDicomFile = (string)dt.Rows[0]["RelativeFileArchiveURI"]; //We expect either something like: // E:/RdmpDicom/Rdmp.Dicom.Tests/bin/Debug/netcoreapp2.2/Test_ZipFile.zip!2015/3/18/2.25.160787663560951826149226183314694084702.dcm // ./Test_ZipFile.zip!2015/3/18/2.25.105592977437473375573190160334447272386.dcm //the path referenced should be the file read in relative/absolute format - StringAssert.IsMatch( - expressRelative ? $@"./{zip.Name}![\d./]*.dcm": - $@"{Regex.Escape(zip.FullName.Replace('\\','/'))}![\d./]*.dcm", - pathInDbToDicomFile); + Assert.That( + pathInDbToDicomFile, Does.Match(expressRelative ? $@"./{zip.Name}![\d./]*.dcm" : + $@"{Regex.Escape(zip.FullName.Replace('\\', '/'))}![\d./]*.dcm")); - StringAssert.Contains(yearDir.Name,pathInDbToDicomFile,"Expected zip file to have subdirectories and for them to be loaded correctly"); + Assert.That(pathInDbToDicomFile, Does.Contain(yearDir.Name), "Expected zip file to have subdirectories and for them to be loaded correctly"); //confirm we can read that out again var path = new AmbiguousFilePath(TestContext.CurrentContext.TestDirectory, pathInDbToDicomFile); - Assert.IsNotNull(path.GetDataset(0, 0)); + Assert.That(path.GetDataset(0, 0), Is.Not.Null); } - Assert.IsTrue(finalTable.Exists()); + Assert.That(finalTable.Exists()); finalTable.Drop(); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Unit/DataTableTestHelper.cs b/Rdmp.Dicom.Tests/Unit/DataTableTestHelper.cs index c96e64f7..60c4d30b 100644 --- a/Rdmp.Dicom.Tests/Unit/DataTableTestHelper.cs +++ b/Rdmp.Dicom.Tests/Unit/DataTableTestHelper.cs @@ -25,7 +25,7 @@ public static DataTable CreateDataTable( for (var i = 0; i < columnNames.Length; i++) { - DataColumn column = new() {DataType = data[0, i].GetType(), ColumnName = columnNames[i]}; + DataColumn column = new() { DataType = data[0, i].GetType(), ColumnName = columnNames[i] }; result.Columns.Add(column); } diff --git a/Rdmp.Dicom.Tests/Unit/DicomSourceUnitTests.cs b/Rdmp.Dicom.Tests/Unit/DicomSourceUnitTests.cs index 55f9e0be..a1aec114 100644 --- a/Rdmp.Dicom.Tests/Unit/DicomSourceUnitTests.cs +++ b/Rdmp.Dicom.Tests/Unit/DicomSourceUnitTests.cs @@ -14,104 +14,113 @@ namespace Rdmp.Dicom.Tests.Unit; public class DicomSourceUnitTests { - + [Test] public void Test_Linux_Root() { - var source = new DicomFileCollectionSource {ArchiveRoot = "/"}; + var source = new DicomFileCollectionSource { ArchiveRoot = "/" }; - Assert.AreEqual("/",source.ArchiveRoot); + Assert.That(source.ArchiveRoot, Is.EqualTo("/")); } - [TestCase(@"C:\bob\",@"\fish\1.dcm",@"/fish/1.dcm")] - [TestCase(@"C:\bob",@"/fish/1.dcm",@"/fish/1.dcm")] - [TestCase(@"C:\bob",@"./fish/1.dcm",@"./fish/1.dcm")] - [TestCase(@"C:\bob",@"C:\bob\fish\1.dcm",@"./fish/1.dcm")] - [TestCase(@"C:\bob\","C:/bob/fish/1.dcm","./fish/1.dcm")] - [TestCase(@"C:\bob","C:/bob/fish/1.dcm","./fish/1.dcm")] - [TestCase(@"C:\BOb","C:/bob/fish/1.dcm","./fish/1.dcm")] //capitalisation - [TestCase(@"C:/bob/",@"C:\bob\fish/1.dcm","./fish/1.dcm")] //mixed slash directions! - [TestCase("/bob/","/bob/fish/1.dcm","./fish/1.dcm")] //linux style paths - [TestCase("/bob/",@"\bob\fish\1.dcm","./fish/1.dcm")] - [TestCase(@"\\myserver\bob",@"\\myserver\bob\fish\1.dcm","./fish/1.dcm")] // UNC server paths - [TestCase(@"\\myserver\bob",@"\\myOtherServer\bob\fish\1.dcm",@"\\myOtherServer/bob/fish/1.dcm")] - [TestCase("/","/bob/fish/1.dcm","./bob/fish/1.dcm")] - [TestCase(@"C:\bob\",@"D:\fish\1.dcm",@"D:/fish/1.dcm")] //not relative so just return verbatim string (with slash fixes) - [TestCase(@"C:\bob\",@"D:/fish/1.dcm",@"D:/fish/1.dcm")] - [TestCase(@"C:\bob\",@"C:\fish\bob\fish\1.dcm",@"C:/fish/bob/fish/1.dcm")] //not relative so just return verbatim string (with slash fixes) - [TestCase(@"C:\bob\",@"./fish.dcm",@"./fish.dcm")] - [TestCase(@"./bob/",@"./bob/fish.dcm",@"./fish.dcm")] //if the "root" is relative then we can still express this relative to it + [TestCase(@"C:\bob\", @"\fish\1.dcm", @"/fish/1.dcm")] + [TestCase(@"C:\bob", @"/fish/1.dcm", @"/fish/1.dcm")] + [TestCase(@"C:\bob", @"./fish/1.dcm", @"./fish/1.dcm")] + [TestCase(@"C:\bob", @"C:\bob\fish\1.dcm", @"./fish/1.dcm")] + [TestCase(@"C:\bob\", "C:/bob/fish/1.dcm", "./fish/1.dcm")] + [TestCase(@"C:\bob", "C:/bob/fish/1.dcm", "./fish/1.dcm")] + [TestCase(@"C:\BOb", "C:/bob/fish/1.dcm", "./fish/1.dcm")] //capitalisation + [TestCase(@"C:/bob/", @"C:\bob\fish/1.dcm", "./fish/1.dcm")] //mixed slash directions! + [TestCase("/bob/", "/bob/fish/1.dcm", "./fish/1.dcm")] //linux style paths + [TestCase("/bob/", @"\bob\fish\1.dcm", "./fish/1.dcm")] + [TestCase(@"\\myserver\bob", @"\\myserver\bob\fish\1.dcm", "./fish/1.dcm")] // UNC server paths + [TestCase(@"\\myserver\bob", @"\\myOtherServer\bob\fish\1.dcm", @"\\myOtherServer/bob/fish/1.dcm")] + [TestCase("/", "/bob/fish/1.dcm", "./bob/fish/1.dcm")] + [TestCase(@"C:\bob\", @"D:\fish\1.dcm", @"D:/fish/1.dcm")] //not relative so just return verbatim string (with slash fixes) + [TestCase(@"C:\bob\", @"D:/fish/1.dcm", @"D:/fish/1.dcm")] + [TestCase(@"C:\bob\", @"C:\fish\bob\fish\1.dcm", @"C:/fish/bob/fish/1.dcm")] //not relative so just return verbatim string (with slash fixes) + [TestCase(@"C:\bob\", @"./fish.dcm", @"./fish.dcm")] + [TestCase(@"./bob/", @"./bob/fish.dcm", @"./fish.dcm")] //if the "root" is relative then we can still express this relative to it public void Test_ApplyArchiveRootToMakeRelativePath(string root, string inputPath, string expectedRelativePath) { - var source = new DicomFileCollectionSource {ArchiveRoot = root}; + var source = new DicomFileCollectionSource { ArchiveRoot = root }; var result = source.ApplyArchiveRootToMakeRelativePath(inputPath); - Assert.AreEqual(expectedRelativePath, result); - } + Assert.That(result, Is.EqualTo(expectedRelativePath)); + } [Test] public void AssembleDataTableFromFile() { - var source = new DicomFileCollectionSource {FilenameField = "RelativeFileArchiveURI"}; + var source = new DicomFileCollectionSource { FilenameField = "RelativeFileArchiveURI" }; var f = Path.Combine(TestContext.CurrentContext.TestDirectory, @"TestData/IM-0001-0013.dcm"); source.PreInitialize(new FlatFileToLoadDicomFileWorklist(new(new(f))), ThrowImmediatelyDataLoadEventListener.Quiet); var result = source.GetChunk(ThrowImmediatelyDataLoadEventListener.Quiet, new()); - Assert.AreEqual("IM00010013",result.TableName); - Assert.Greater(result.Columns.Count,0); - - Assert.IsNull(source.GetChunk(new ThrowImmediatelyDataLoadJob(), new())); + Assert.Multiple(() => + { + Assert.That(result.TableName, Is.EqualTo("IM00010013")); + Assert.That(result.Columns, Is.Not.Empty); + + Assert.That(source.GetChunk(new ThrowImmediatelyDataLoadJob(), new()), Is.Null); + }); } [Test] public void AssembleDataTableFromFileArchive() { var zip = Path.Combine(TestContext.CurrentContext.TestDirectory, "TestData.zip"); - var dir = Path.Combine(TestContext.CurrentContext.TestDirectory,"TestData"); - - if(File.Exists(zip)) + var dir = Path.Combine(TestContext.CurrentContext.TestDirectory, "TestData"); + + if (File.Exists(zip)) File.Delete(zip); - - ZipFile.CreateFromDirectory(dir,zip); - var fileCount = Directory.GetFiles(dir,"*.dcm").Length; + ZipFile.CreateFromDirectory(dir, zip); - var source = new DicomFileCollectionSource {FilenameField = "RelativeFileArchiveURI"}; + var fileCount = Directory.GetFiles(dir, "*.dcm").Length; + + var source = new DicomFileCollectionSource { FilenameField = "RelativeFileArchiveURI" }; source.PreInitialize(new FlatFileToLoadDicomFileWorklist(new(new(zip))), ThrowImmediatelyDataLoadEventListener.Quiet); var toMemory = new ToMemoryDataLoadEventListener(true); var result = source.GetChunk(toMemory, new()); - //processed every file once - Assert.AreEqual(fileCount, toMemory.LastProgressRecieivedByTaskName.Single().Value.Progress.Value); + Assert.Multiple(() => + { + //processed every file once + Assert.That(toMemory.LastProgressRecieivedByTaskName.Single().Value.Progress.Value, Is.EqualTo(fileCount)); - Assert.Greater(result.Columns.Count, 0); + Assert.That(result.Columns, Is.Not.Empty); + }); } [Test] public void AssembleDataTableFromFolder() { - var file1 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"TestData/FileWithLotsOfTags.dcm")); - var file2 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"TestData/IM-0001-0013.dcm")); + var file1 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "TestData/FileWithLotsOfTags.dcm")); + var file2 = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "TestData/IM-0001-0013.dcm")); - var controlFile = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory,"list.txt")); - File.WriteAllText(controlFile.FullName,file1.FullName + Environment.NewLine + file2.FullName); + var controlFile = new FileInfo(Path.Combine(TestContext.CurrentContext.TestDirectory, "list.txt")); + File.WriteAllText(controlFile.FullName, file1.FullName + Environment.NewLine + file2.FullName); - var source = new DicomFileCollectionSource {FilenameField = "RelativeFileArchiveURI"}; + var source = new DicomFileCollectionSource { FilenameField = "RelativeFileArchiveURI" }; source.PreInitialize(new FlatFileToLoadDicomFileWorklist(new(controlFile)), ThrowImmediatelyDataLoadEventListener.Quiet); - + var toMemory = new ToMemoryDataLoadEventListener(true); var result = source.GetChunk(toMemory, new()); - Assert.AreEqual(1,result.Rows.Count); + Assert.That(result.Rows, Has.Count.EqualTo(1)); result = source.GetChunk(toMemory, new()); - Assert.AreEqual(1, result.Rows.Count); + Assert.Multiple(() => + { + Assert.That(result.Rows, Has.Count.EqualTo(1)); - Assert.AreEqual(null, source.GetChunk(toMemory, new())); + Assert.That(source.GetChunk(toMemory, new()), Is.EqualTo(null)); + }); } - - + + [Test] @@ -143,47 +152,47 @@ public void Test_ElevationXmlLoading() "; #endregion - + var source = new DicomFileCollectionSource(); - var file = Path.Combine(TestContext.CurrentContext.WorkDirectory,"me.xml"); + var file = Path.Combine(TestContext.CurrentContext.WorkDirectory, "me.xml"); //no elevation to start with - Assert.IsNull(source.LoadElevationRequestsFile()); + Assert.That(source.LoadElevationRequestsFile(), Is.Null); //illegal file File.WriteAllText(file, ""); source.TagElevationConfigurationFile = new(file); - - var ex = Assert.Throws(()=>source.LoadElevationRequestsFile()); - StringAssert.Contains("Unexpected end of file",ex.Message); - + + var ex = Assert.Throws(() => source.LoadElevationRequestsFile()); + Assert.That(ex.Message, Does.Contain("Unexpected end of file")); + File.WriteAllText(file, invalidXml); var ex2 = Assert.Throws(() => source.LoadElevationRequestsFile()); - StringAssert.Contains("Navigation Token CodeValue was not the final token in the pathway", ex2.Message); + Assert.That(ex2.Message, Does.Contain("Navigation Token CodeValue was not the final token in the pathway")); File.WriteAllText(file, validXml1); - Assert.AreEqual("CodeValueCol",source.LoadElevationRequestsFile().Requests.Single().ColumnName); - + Assert.That(source.LoadElevationRequestsFile().Requests.Single().ColumnName, Is.EqualTo("CodeValueCol")); + //Setting the xml property will override the file xml - source.TagElevationConfigurationXml = new() {xml= "" }; + source.TagElevationConfigurationXml = new() { xml = "" }; var ex3 = Assert.Throws(() => source.LoadElevationRequestsFile()); - StringAssert.Contains("Unexpected end of file", ex3.Message); + Assert.That(ex3.Message, Does.Contain("Unexpected end of file")); source.TagElevationConfigurationXml = new() { xml = invalidXml }; var ex4 = Assert.Throws(() => source.LoadElevationRequestsFile()); - StringAssert.Contains("Navigation Token CodeValue was not the final token in the pathway", ex4.Message); + Assert.That(ex4.Message, Does.Contain("Navigation Token CodeValue was not the final token in the pathway")); source.TagElevationConfigurationXml = new() { xml = validXml2 }; - Assert.AreEqual("CodeValueCol2", source.LoadElevationRequestsFile().Requests.Single().ColumnName); - + Assert.That(source.LoadElevationRequestsFile().Requests.Single().ColumnName, Is.EqualTo("CodeValueCol2")); + //now we go back to the file one (by setting the xml one to null) source.TagElevationConfigurationXml = null; - Assert.AreEqual("CodeValueCol", source.LoadElevationRequestsFile().Requests.Single().ColumnName); - source.TagElevationConfigurationXml = new() {xml = "" }; - Assert.AreEqual("CodeValueCol", source.LoadElevationRequestsFile().Requests.Single().ColumnName); + Assert.That(source.LoadElevationRequestsFile().Requests.Single().ColumnName, Is.EqualTo("CodeValueCol")); + source.TagElevationConfigurationXml = new() { xml = "" }; + Assert.That(source.LoadElevationRequestsFile().Requests.Single().ColumnName, Is.EqualTo("CodeValueCol")); source.TagElevationConfigurationXml = new() { xml = " \r\n " }; - Assert.AreEqual("CodeValueCol", source.LoadElevationRequestsFile().Requests.Single().ColumnName); + Assert.That(source.LoadElevationRequestsFile().Requests.Single().ColumnName, Is.EqualTo("CodeValueCol")); } } \ No newline at end of file diff --git a/Rdmp.Dicom.Tests/Unit/PacsFetch.cs b/Rdmp.Dicom.Tests/Unit/PacsFetch.cs index dc60acab..a44ad668 100644 --- a/Rdmp.Dicom.Tests/Unit/PacsFetch.cs +++ b/Rdmp.Dicom.Tests/Unit/PacsFetch.cs @@ -18,9 +18,9 @@ internal class PacsFetch class QRService : DicomService, IDicomServiceProvider, IDicomCFindProvider, IDicomCEchoProvider, IDicomCMoveProvider { - private static readonly DicomServiceDependencies Dependencies = new(LoggerFactory.Create(builder=>builder.AddConsole()), + private static readonly DicomServiceDependencies Dependencies = new(LoggerFactory.Create(builder => builder.AddConsole()), new DesktopNetworkManager(), new DefaultTranscoderManager(), new ArrayPoolMemoryProvider()); - public QRService(INetworkStream stream, Encoding fallbackEncoding,Microsoft.Extensions.Logging.ILogger log) : base(stream, fallbackEncoding, log,Dependencies) + public QRService(INetworkStream stream, Encoding fallbackEncoding, Microsoft.Extensions.Logging.ILogger log) : base(stream, fallbackEncoding, log, Dependencies) { } @@ -56,13 +56,14 @@ public Task OnReceiveAssociationRequestAsync(DicomAssociation association) foreach (var pc in association.PresentationContexts) { if (pc.AbstractSyntax == DicomUID.Verification - || pc.AbstractSyntax==DicomUID.PatientRootQueryRetrieveInformationModelFind - || pc.AbstractSyntax==DicomUID.PatientRootQueryRetrieveInformationModelMove - || pc.AbstractSyntax==DicomUID.StudyRootQueryRetrieveInformationModelFind - || pc.AbstractSyntax==DicomUID.StudyRootQueryRetrieveInformationModelMove) + || pc.AbstractSyntax == DicomUID.PatientRootQueryRetrieveInformationModelFind + || pc.AbstractSyntax == DicomUID.PatientRootQueryRetrieveInformationModelMove + || pc.AbstractSyntax == DicomUID.StudyRootQueryRetrieveInformationModelFind + || pc.AbstractSyntax == DicomUID.StudyRootQueryRetrieveInformationModelMove) { pc.AcceptTransferSyntaxes(DicomTransferSyntax.ExplicitVRLittleEndian); - } else if (pc.AbstractSyntax.StorageCategory != DicomStorageCategory.None) + } + else if (pc.AbstractSyntax.StorageCategory != DicomStorageCategory.None) { pc.AcceptTransferSyntaxes(); } @@ -93,18 +94,19 @@ public void StopOwnPacs() [Test] public void EchoTest() { - var success=false; + var success = false; var client = DicomClientFactory.Create("127.0.0.1", 11112, false, "me", "otherme"); client.NegotiateAsyncOps(); client.AddRequestAsync(new DicomCEchoRequest + { + OnResponseReceived = (req, res) => { - OnResponseReceived = (req, res) => { - success = true; - } + success = true; } + } ).Wait(); client.SendAsync().Wait(); - Assert.True(success, "No echo response from own PACS"); + Assert.That(success, "No echo response from own PACS"); } /* [Test] diff --git a/Rdmp.Dicom.Tests/Unit/PrimaryKeyCollisionIsolationMutilationTests.cs b/Rdmp.Dicom.Tests/Unit/PrimaryKeyCollisionIsolationMutilationTests.cs index 1f4da32b..e5822942 100644 --- a/Rdmp.Dicom.Tests/Unit/PrimaryKeyCollisionIsolationMutilationTests.cs +++ b/Rdmp.Dicom.Tests/Unit/PrimaryKeyCollisionIsolationMutilationTests.cs @@ -17,7 +17,7 @@ namespace Rdmp.Dicom.Tests.Unit; -class PrimaryKeyCollisionIsolationMutilationTests:DatabaseTests +class PrimaryKeyCollisionIsolationMutilationTests : DatabaseTests { [TestCase(DatabaseType.MicrosoftSQLServer)] [TestCase(DatabaseType.MySql)] @@ -42,15 +42,18 @@ public void Test_IsolateSingleTable_Check(DatabaseType dbType) a.IsPrimaryKey = true; a.SaveToDatabase(); - var mutilator = GetMutilator(db,tableInfoCreated); + var mutilator = GetMutilator(db, tableInfoCreated); //first time no tables exist so they must be created mutilator.Check(new AcceptAllCheckNotifier()); - + var isolationTable = db.ExpectTable("CoolTable_Isolation"); - Assert.IsTrue(isolationTable.Exists()); - Assert.IsTrue(isolationTable.DiscoverColumns().Any(c=>c.GetRuntimeName().Equals("A"))); - Assert.IsTrue(isolationTable.DiscoverColumns().Any(c => c.GetRuntimeName().Equals("hic_dataLoadRunID"))); + Assert.Multiple(() => + { + Assert.That(isolationTable.Exists()); + Assert.That(isolationTable.DiscoverColumns().Any(c => c.GetRuntimeName().Equals("A"))); + Assert.That(isolationTable.DiscoverColumns().Any(c => c.GetRuntimeName().Equals("hic_dataLoadRunID"))); + }); //the check should pass second time without needing to accept any fixes mutilator.Check(ThrowImmediatelyCheckNotifier.Quiet); @@ -58,11 +61,11 @@ public void Test_IsolateSingleTable_Check(DatabaseType dbType) private PrimaryKeyCollisionIsolationMutilation GetMutilator(DiscoveredDatabase db, params ITableInfo[] tableInfoCreated) { - //tell the mutilator to resolve the primary key collision on column A by isolating the rows - var mutilation = new PrimaryKeyCollisionIsolationMutilation {TablesToIsolate = tableInfoCreated.Cast().ToArray()}; + //tell the mutilator to resolve the primary key collision on column A by isolating the rows + var mutilation = new PrimaryKeyCollisionIsolationMutilation { TablesToIsolate = tableInfoCreated.Cast().ToArray() }; //tell the mutilator to set up isolation into the provided database - var serverPointer = new ExternalDatabaseServer(CatalogueRepository, "Isolation Db",null); + var serverPointer = new ExternalDatabaseServer(CatalogueRepository, "Isolation Db", null); serverPointer.SetProperties(db); mutilation.IsolationDatabase = serverPointer; @@ -92,7 +95,7 @@ public void Test_IsolateSingleTable_Duplication(DatabaseType dbType) //import the table and make A look like a primary key to the metadata layer (and A would be pk in LIVE but not in RAW ofc) Import(tbl, out var tableInfoCreated, out var columnInfosCreated); - + //lie about the primary key status var a = columnInfosCreated.Single(c => c.GetRuntimeName().Equals("A")); a.IsPrimaryKey = true; @@ -101,27 +104,27 @@ public void Test_IsolateSingleTable_Duplication(DatabaseType dbType) var mutilator = GetMutilator(db, tableInfoCreated); mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "MyCoolTable2")); - var job = new ThrowImmediatelyDataLoadJob(config,tableInfoCreated); - - mutilator.Initialize(db,LoadStage.AdjustRaw); + var config = new HICDatabaseConfiguration(db.Server, RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "MyCoolTable2")); + var job = new ThrowImmediatelyDataLoadJob(config, tableInfoCreated); + + mutilator.Initialize(db, LoadStage.AdjustRaw); mutilator.Mutilate(job); using var dt2 = tbl.GetDataTable(); - Assert.AreEqual(2,dt2.Rows.Count); + Assert.That(dt2.Rows, Has.Count.EqualTo(2)); using var dtIsolation = tbl.Database.ExpectTable("MyCoolTable2_Isolation").GetDataTable(); - Assert.AreEqual(3, dtIsolation.Rows.Count); + Assert.That(dtIsolation.Rows, Has.Count.EqualTo(3)); } - - [TestCase(".[dbo].",true)] - [TestCase(".[dbo].",false)] - [TestCase(".dbo.",true)] - [TestCase(".dbo.",false)] - [TestCase("..",true)] - [TestCase("..",false)] - public void Test_IsolateSingleTableWithSchema_Duplication(string schemaExpression,bool includeQualifiers) + + [TestCase(".[dbo].", true)] + [TestCase(".[dbo].", false)] + [TestCase(".dbo.", true)] + [TestCase(".dbo.", false)] + [TestCase("..", true)] + [TestCase("..", false)] + public void Test_IsolateSingleTableWithSchema_Duplication(string schemaExpression, bool includeQualifiers) { var db = GetCleanedServer(DatabaseType.MicrosoftSQLServer); @@ -140,13 +143,13 @@ public void Test_IsolateSingleTableWithSchema_Duplication(string schemaExpressio //import the table and make A look like a primary key to the metadata layer (and A would be pk in LIVE but not in RAW ofc) Import(tbl, out var tableInfoCreated, out var columnInfosCreated); - + var syntax = db.Server.GetQuerySyntaxHelper(); - tableInfoCreated.Name = - (includeQualifiers ? syntax.EnsureWrapped(db.GetRuntimeName()) : db.GetRuntimeName()) - + schemaExpression + - (includeQualifiers ? syntax.EnsureWrapped(tbl.GetRuntimeName()) : tbl.GetRuntimeName()); + tableInfoCreated.Name = + (includeQualifiers ? syntax.EnsureWrapped(db.GetRuntimeName()) : db.GetRuntimeName()) + + schemaExpression + + (includeQualifiers ? syntax.EnsureWrapped(tbl.GetRuntimeName()) : tbl.GetRuntimeName()); tableInfoCreated.SaveToDatabase(); @@ -165,17 +168,17 @@ public void Test_IsolateSingleTableWithSchema_Duplication(string schemaExpressio var mutilator = GetMutilator(db, tableInfoCreated); mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "MyCoolTable2")); - var job = new ThrowImmediatelyDataLoadJob(config,tableInfoCreated); - - mutilator.Initialize(db,LoadStage.AdjustRaw); + var config = new HICDatabaseConfiguration(db.Server, RdmpMockFactory.Mock_INameDatabasesAndTablesDuringLoads(db, "MyCoolTable2")); + var job = new ThrowImmediatelyDataLoadJob(config, tableInfoCreated); + + mutilator.Initialize(db, LoadStage.AdjustRaw); mutilator.Mutilate(job); using var dt2 = tbl.GetDataTable(); - Assert.AreEqual(2,dt2.Rows.Count); + Assert.That(dt2.Rows, Has.Count.EqualTo(2)); using var dtIsolation = tbl.Database.ExpectTable("MyCoolTable2_Isolation").GetDataTable(); - Assert.AreEqual(3, dtIsolation.Rows.Count); + Assert.That(dtIsolation.Rows, Has.Count.EqualTo(3)); } [TestCase(DatabaseType.MicrosoftSQLServer)] [TestCase(DatabaseType.MySql)] @@ -217,8 +220,8 @@ public void Test_IsolateTwoTables_Duplication(DatabaseType dbType) { new DatabaseColumnRequest("SeriesInstanceUID",new DatabaseTypeRequest(typeof(string))) }); - - var tblChild = db.CreateTable("Child", dt2,new [] + + var tblChild = db.CreateTable("Child", dt2, new[] { new DatabaseColumnRequest("SeriesInstanceUID",new DatabaseTypeRequest(typeof(string))), new DatabaseColumnRequest("SOPInstanceUID",new DatabaseTypeRequest(typeof(string))) @@ -247,10 +250,10 @@ public void Test_IsolateTwoTables_Duplication(DatabaseType dbType) //checking should fail because it doesn't know how to join tables var ex = Assert.Throws(() => mutilator.Check(new AcceptAllCheckNotifier())); - StringAssert.Contains("join", ex.Message); //should be complaining about missing join infos + Assert.That(ex.Message, Does.Contain("join")); //should be complaining about missing join infos //tell RDMP about how to join tables - _=new JoinInfo(CatalogueRepository,childColumnInfosCreated.Single( + _ = new JoinInfo(CatalogueRepository, childColumnInfosCreated.Single( c => c.GetRuntimeName().Equals("SeriesInstanceUID")), parentColumnInfosCreated.Single(c => c.GetRuntimeName().Equals("SeriesInstanceUID")), ExtractionJoinType.Right, null); @@ -258,23 +261,23 @@ public void Test_IsolateTwoTables_Duplication(DatabaseType dbType) //now that we have a join it should pass checks mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,new ReturnSameString()); - var job = new ThrowImmediatelyDataLoadJob(config,parentTableInfo,childTableInfo); + var config = new HICDatabaseConfiguration(db.Server, new ReturnSameString()); + var job = new ThrowImmediatelyDataLoadJob(config, parentTableInfo, childTableInfo); mutilator.Initialize(db, LoadStage.AdjustRaw); mutilator.Mutilate(job); //parent should now only have "5.2.1" using var dtParent = parentTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(1, dtParent.Rows.Count); + Assert.That(dtParent.Rows, Has.Count.EqualTo(1)); //isolation should have 5 ("1.2.3", "2.3.4" and "9.9.9") using var dtParentIsolation = db.ExpectTable("Parent_Isolation").GetDataTable(); - Assert.AreEqual(5, dtParentIsolation.Rows.Count); + Assert.That(dtParentIsolation.Rows, Has.Count.EqualTo(5)); //child table should now only have 3 ("1.1.1", "1.1.2" and "1.1.3") using var dtChild = childTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(3, dtChild.Rows.Count); + Assert.That(dtChild.Rows, Has.Count.EqualTo(3)); //child isolation table should have 4: /* @@ -285,18 +288,18 @@ public void Test_IsolateTwoTables_Duplication(DatabaseType dbType) */ using var dtChildIsolation = db.ExpectTable("Child_Isolation").GetDataTable(); - Assert.AreEqual(4, dtChildIsolation.Rows.Count); + Assert.That(dtChildIsolation.Rows, Has.Count.EqualTo(4)); } - [TestCase(DatabaseType.MicrosoftSQLServer,false)] - [TestCase(DatabaseType.MySql,false)] - [TestCase(DatabaseType.MicrosoftSQLServer,true)] - [TestCase(DatabaseType.MySql,true)] - [TestCase(DatabaseType.PostgreSql,false)] - [TestCase(DatabaseType.PostgreSql,true)] - public void Test_IsolateTwoTables_MultipleConflictingColumns(DatabaseType dbType,bool whitespace) + [TestCase(DatabaseType.MicrosoftSQLServer, false)] + [TestCase(DatabaseType.MySql, false)] + [TestCase(DatabaseType.MicrosoftSQLServer, true)] + [TestCase(DatabaseType.MySql, true)] + [TestCase(DatabaseType.PostgreSql, false)] + [TestCase(DatabaseType.PostgreSql, true)] + public void Test_IsolateTwoTables_MultipleConflictingColumns(DatabaseType dbType, bool whitespace) { var db = GetCleanedServer(dbType); @@ -305,8 +308,8 @@ public void Test_IsolateTwoTables_MultipleConflictingColumns(DatabaseType dbType dt.Columns.Add("Pk"); dt.Columns.Add("OtherCol"); - dt.Rows.Add("A",1); //these are colliding on pk "A" with different values of "OtherCol" - dt.Rows.Add(whitespace? "A " :"A",2); + dt.Rows.Add("A", 1); //these are colliding on pk "A" with different values of "OtherCol" + dt.Rows.Add(whitespace ? "A " : "A", 2); //Create a table in 'RAW' (has no constraints) using var dt2 = new DataTable(); @@ -315,11 +318,11 @@ public void Test_IsolateTwoTables_MultipleConflictingColumns(DatabaseType dbType dt2.Columns.Add("OtherCol2"); dt2.Columns.Add("OtherCol3"); - dt2.Rows.Add(whitespace ? "X ": "X", "A", "FF",DBNull.Value); //these are colliding on pk "X" with different values of "OtherCol2" - dt2.Rows.Add("X", whitespace ? "A " :"A", "GG",DBNull.Value); - dt2.Rows.Add(whitespace ? "X ": "X", "A", "FF","HH"); //these are colliding on pk "X" with different values of "OtherCol2" - dt2.Rows.Add("X", whitespace ? "A " :"A", "GG","HH"); - + dt2.Rows.Add(whitespace ? "X " : "X", "A", "FF", DBNull.Value); //these are colliding on pk "X" with different values of "OtherCol2" + dt2.Rows.Add("X", whitespace ? "A " : "A", "GG", DBNull.Value); + dt2.Rows.Add(whitespace ? "X " : "X", "A", "FF", "HH"); //these are colliding on pk "X" with different values of "OtherCol2" + dt2.Rows.Add("X", whitespace ? "A " : "A", "GG", "HH"); + var tblParent = db.CreateTable("Parent", dt); var tblChild = db.CreateTable("Child", dt2); @@ -345,7 +348,7 @@ public void Test_IsolateTwoTables_MultipleConflictingColumns(DatabaseType dbType var mutilator = GetMutilator(db, parentTableInfo, childTableInfo); //tell RDMP about how to join tables - new JoinInfo(CatalogueRepository,childColumnInfosCreated.Single( + new JoinInfo(CatalogueRepository, childColumnInfosCreated.Single( c => c.GetRuntimeName().Equals("Fk")), parentColumnInfosCreated.Single(c => c.GetRuntimeName().Equals("Pk")), ExtractionJoinType.Right, null); @@ -353,34 +356,34 @@ public void Test_IsolateTwoTables_MultipleConflictingColumns(DatabaseType dbType //now that we have a join it should pass checks mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,new ReturnSameString()); - var job = new ThrowImmediatelyDataLoadJob(config,parentTableInfo,childTableInfo); + var config = new HICDatabaseConfiguration(db.Server, new ReturnSameString()); + var job = new ThrowImmediatelyDataLoadJob(config, parentTableInfo, childTableInfo); mutilator.Initialize(db, LoadStage.AdjustRaw); mutilator.Mutilate(job); //parent should now be empty using var dtParent = parentTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtParent.Rows.Count); + Assert.That(dtParent.Rows, Is.Empty); //isolation should have 2 using var dtParentIsolation = db.ExpectTable("Parent_Isolation").GetDataTable(); - Assert.AreEqual(2, dtParentIsolation.Rows.Count); + Assert.That(dtParentIsolation.Rows, Has.Count.EqualTo(2)); //child table should also be empty using var dtChild = childTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtChild.Rows.Count); + Assert.That(dtChild.Rows, Is.Empty); //child isolation table should have 4: using var dtChildIsolation = db.ExpectTable("Child_Isolation").GetDataTable(); - Assert.AreEqual(4, dtChildIsolation.Rows.Count); + Assert.That(dtChildIsolation.Rows, Has.Count.EqualTo(4)); } - + [TestCase(DatabaseType.MicrosoftSQLServer)] [TestCase(DatabaseType.MySql)] [TestCase(DatabaseType.PostgreSql)] public void Test_IsolateTwoTables_IntKeys(DatabaseType dbType) - { + { /*************************************** * Parent(Pk) Child (Pk2,Fk,OtherCol) * 4 -> 8,4,1 @@ -388,7 +391,7 @@ public void Test_IsolateTwoTables_IntKeys(DatabaseType dbType) * 8,4,2 * 5 -> 9,5,1 (good record with no collisions anywhere) **********************************/ - + var db = GetCleanedServer(dbType); //Create a table in 'RAW' (has no constraints) @@ -396,8 +399,8 @@ public void Test_IsolateTwoTables_IntKeys(DatabaseType dbType) dt.Columns.Add("Pk"); dt.Columns.Add("OtherCol"); - dt.Rows.Add(4,1); - dt.Rows.Add(5,2); + dt.Rows.Add(4, 1); + dt.Rows.Add(5, 2); //Create a table in 'RAW' (has no constraints) using var dt2 = new DataTable(); @@ -405,15 +408,15 @@ public void Test_IsolateTwoTables_IntKeys(DatabaseType dbType) dt2.Columns.Add("Fk"); dt2.Columns.Add("OtherCol"); - dt2.Rows.Add(8, 4,1); //these are colliding on pk 8 which will ship full hierarchy of parent pk 4 to the isolation table - dt2.Rows.Add(8, 4,2); - dt2.Rows.Add(9, 5,1); //good record with no collisions, should not be deleted! - + dt2.Rows.Add(8, 4, 1); //these are colliding on pk 8 which will ship full hierarchy of parent pk 4 to the isolation table + dt2.Rows.Add(8, 4, 2); + dt2.Rows.Add(9, 5, 1); //good record with no collisions, should not be deleted! + var tblParent = db.CreateTable("Parent", dt); var tblChild = db.CreateTable("Child", dt2); //make sure FAnsi made an int column - Assert.AreEqual(typeof(int),tblParent.DiscoverColumn("Pk").GetGuesser().Guess.CSharpType); + Assert.That(tblParent.DiscoverColumn("Pk").GetGuesser().Guess.CSharpType, Is.EqualTo(typeof(int))); //import the table and make A look like a primary key to the metadata layer (and A would be pk in LIVE but not in RAW ofc) Import(tblParent, out var parentTableInfo, out var parentColumnInfosCreated); @@ -437,7 +440,7 @@ public void Test_IsolateTwoTables_IntKeys(DatabaseType dbType) var mutilator = GetMutilator(db, parentTableInfo, childTableInfo); //tell RDMP about how to join tables - new JoinInfo(CatalogueRepository,childColumnInfosCreated.Single( + new JoinInfo(CatalogueRepository, childColumnInfosCreated.Single( c => c.GetRuntimeName().Equals("Fk")), parentColumnInfosCreated.Single(c => c.GetRuntimeName().Equals("Pk")), ExtractionJoinType.Right, null); @@ -445,34 +448,34 @@ public void Test_IsolateTwoTables_IntKeys(DatabaseType dbType) //now that we have a join it should pass checks mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,new ReturnSameString()); - var job = new ThrowImmediatelyDataLoadJob(config,parentTableInfo,childTableInfo); + var config = new HICDatabaseConfiguration(db.Server, new ReturnSameString()); + var job = new ThrowImmediatelyDataLoadJob(config, parentTableInfo, childTableInfo); mutilator.Initialize(db, LoadStage.AdjustRaw); mutilator.Mutilate(job); //parent should now have 1 using var dtParent = parentTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(1, dtParent.Rows.Count); + Assert.That(dtParent.Rows, Has.Count.EqualTo(1)); //isolation should have 1 using var dtParentIsolation = db.ExpectTable("Parent_Isolation").GetDataTable(); - Assert.AreEqual(1, dtParentIsolation.Rows.Count); + Assert.That(dtParentIsolation.Rows, Has.Count.EqualTo(1)); //child table should have the good 1 using var dtChild = childTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(1, dtChild.Rows.Count); + Assert.That(dtChild.Rows, Has.Count.EqualTo(1)); //child isolation table should have 2: using var dtChildIsolation = db.ExpectTable("Child_Isolation").GetDataTable(); - Assert.AreEqual(2, dtChildIsolation.Rows.Count); + Assert.That(dtChildIsolation.Rows, Has.Count.EqualTo(2)); } [TestCase(DatabaseType.MicrosoftSQLServer)] [TestCase(DatabaseType.MySql)] [TestCase(DatabaseType.PostgreSql)] public void Test_IsolateTwoTables_MultipleCollidingChildren(DatabaseType dbType) - { + { /*************************************** * Parent(Pk) Child (Pk2,Fk,OtherCol) * A -> X,A,1 @@ -483,7 +486,7 @@ public void Test_IsolateTwoTables_MultipleCollidingChildren(DatabaseType dbType) * Y,A,2 * **********************************/ - + var db = GetCleanedServer(dbType); //Create a table in 'RAW' (has no constraints) @@ -491,7 +494,7 @@ public void Test_IsolateTwoTables_MultipleCollidingChildren(DatabaseType dbType) dt.Columns.Add("Pk"); dt.Columns.Add("OtherCol"); - dt.Rows.Add("A",1); + dt.Rows.Add("A", 1); //Create a table in 'RAW' (has no constraints) using var dt2 = new DataTable(); @@ -499,11 +502,11 @@ public void Test_IsolateTwoTables_MultipleCollidingChildren(DatabaseType dbType) dt2.Columns.Add("Fk"); dt2.Columns.Add("OtherCol"); - dt2.Rows.Add("X", "A",1); //these are colliding on pk "X" which will ship A to the isolation table - dt2.Rows.Add("X", "A",2); - dt2.Rows.Add("Y", "A",2); //these are colliding on pk "Y" but also reference A (which has already been shipped to isolation) - dt2.Rows.Add("Y", "A",1); - + dt2.Rows.Add("X", "A", 1); //these are colliding on pk "X" which will ship A to the isolation table + dt2.Rows.Add("X", "A", 2); + dt2.Rows.Add("Y", "A", 2); //these are colliding on pk "Y" but also reference A (which has already been shipped to isolation) + dt2.Rows.Add("Y", "A", 1); + var tblParent = db.CreateTable("Parent", dt); var tblChild = db.CreateTable("Child", dt2); @@ -529,7 +532,7 @@ public void Test_IsolateTwoTables_MultipleCollidingChildren(DatabaseType dbType) var mutilator = GetMutilator(db, parentTableInfo, childTableInfo); //tell RDMP about how to join tables - new JoinInfo(CatalogueRepository,childColumnInfosCreated.Single( + new JoinInfo(CatalogueRepository, childColumnInfosCreated.Single( c => c.GetRuntimeName().Equals("Fk")), parentColumnInfosCreated.Single(c => c.GetRuntimeName().Equals("Pk")), ExtractionJoinType.Right, null); @@ -537,27 +540,27 @@ public void Test_IsolateTwoTables_MultipleCollidingChildren(DatabaseType dbType) //now that we have a join it should pass checks mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,new ReturnSameString()); - var job = new ThrowImmediatelyDataLoadJob(config,parentTableInfo,childTableInfo); + var config = new HICDatabaseConfiguration(db.Server, new ReturnSameString()); + var job = new ThrowImmediatelyDataLoadJob(config, parentTableInfo, childTableInfo); mutilator.Initialize(db, LoadStage.AdjustRaw); mutilator.Mutilate(job); //parent should now have 0... using var dtParent = parentTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtParent.Rows.Count); + Assert.That(dtParent.Rows, Is.Empty); //isolation should have 1 using var dtParentIsolation = db.ExpectTable("Parent_Isolation").GetDataTable(); - Assert.AreEqual(1, dtParentIsolation.Rows.Count); + Assert.That(dtParentIsolation.Rows, Has.Count.EqualTo(1)); //child table should also be empty using var dtChild = childTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtChild.Rows.Count); + Assert.That(dtChild.Rows, Is.Empty); //child isolation table should have 4: using var dtChildIsolation = db.ExpectTable("Child_Isolation").GetDataTable(); - Assert.AreEqual(4, dtChildIsolation.Rows.Count); + Assert.That(dtChildIsolation.Rows, Has.Count.EqualTo(4)); } [TestCase(DatabaseType.MicrosoftSQLServer)] [TestCase(DatabaseType.MySql)] @@ -579,8 +582,8 @@ public void Test_IsolateTables_Orphans(DatabaseType dbType) dt.Columns.Add("Pk"); dt.Columns.Add("OtherCol"); - dt.Rows.Add("A",1); //these are colliding on pk "A" with different values of "OtherCol" - dt.Rows.Add("A",2); + dt.Rows.Add("A", 1); //these are colliding on pk "A" with different values of "OtherCol" + dt.Rows.Add("A", 2); //Create a table in 'RAW' (has no constraints) using var dt2 = new DataTable(); @@ -589,9 +592,9 @@ public void Test_IsolateTables_Orphans(DatabaseType dbType) dt2.Columns.Add("OtherCol2"); dt2.Columns.Add("OtherCol3"); - dt2.Rows.Add("X", "B", "FF",DBNull.Value); //these are colliding (on pk 'X') and also orphans (B does not appear in parent table dt) - dt2.Rows.Add("X", "B", "GG",DBNull.Value); - + dt2.Rows.Add("X", "B", "FF", DBNull.Value); //these are colliding (on pk 'X') and also orphans (B does not appear in parent table dt) + dt2.Rows.Add("X", "B", "GG", DBNull.Value); + var tblParent = db.CreateTable("Parent", dt); var tblChild = db.CreateTable("Child", dt2); @@ -617,7 +620,7 @@ public void Test_IsolateTables_Orphans(DatabaseType dbType) var mutilator = GetMutilator(db, parentTableInfo, childTableInfo); //tell RDMP about how to join tables - new JoinInfo(CatalogueRepository,childColumnInfosCreated.Single( + new JoinInfo(CatalogueRepository, childColumnInfosCreated.Single( c => c.GetRuntimeName().Equals("Fk")), parentColumnInfosCreated.Single(c => c.GetRuntimeName().Equals("Pk")), ExtractionJoinType.Right, null); @@ -625,13 +628,13 @@ public void Test_IsolateTables_Orphans(DatabaseType dbType) //now that we have a join it should pass checks mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,new ReturnSameString()); - var job = new ThrowImmediatelyDataLoadJob(config,parentTableInfo,childTableInfo); + var config = new HICDatabaseConfiguration(db.Server, new ReturnSameString()); + var job = new ThrowImmediatelyDataLoadJob(config, parentTableInfo, childTableInfo); mutilator.Initialize(db, LoadStage.AdjustRaw); - var ex = Assert.Throws(()=>mutilator.Mutilate(job)); + var ex = Assert.Throws(() => mutilator.Mutilate(job)); - Assert.AreEqual("Primary key value not found for X", ex.Message); + Assert.That(ex.Message, Is.EqualTo("Primary key value not found for X")); } [TestCase(DatabaseType.MicrosoftSQLServer)] @@ -654,8 +657,8 @@ public void Test_IsolateTables_NullForeignKey(DatabaseType dbType) dt.Columns.Add("Pk"); dt.Columns.Add("OtherCol"); - dt.Rows.Add("A",1); //these are colliding on pk "A" with different values of "OtherCol" - + dt.Rows.Add("A", 1); //these are colliding on pk "A" with different values of "OtherCol" + //Create a table in 'RAW' (has no constraints) using var dt2 = new DataTable(); dt2.Columns.Add("Pk2"); @@ -663,9 +666,9 @@ public void Test_IsolateTables_NullForeignKey(DatabaseType dbType) dt2.Columns.Add("OtherCol2"); dt2.Columns.Add("OtherCol3"); - dt2.Rows.Add("X", "A", "FF",DBNull.Value); //these are colliding (on pk 'X'). "A" exists but the null value in the other record is a problem - dt2.Rows.Add("X", DBNull.Value, "GG",DBNull.Value); - + dt2.Rows.Add("X", "A", "FF", DBNull.Value); //these are colliding (on pk 'X'). "A" exists but the null value in the other record is a problem + dt2.Rows.Add("X", DBNull.Value, "GG", DBNull.Value); + var tblParent = db.CreateTable("Parent", dt); var tblChild = db.CreateTable("Child", dt2); @@ -691,7 +694,7 @@ public void Test_IsolateTables_NullForeignKey(DatabaseType dbType) var mutilator = GetMutilator(db, parentTableInfo, childTableInfo); //tell RDMP about how to join tables - new JoinInfo(CatalogueRepository,childColumnInfosCreated.Single( + new JoinInfo(CatalogueRepository, childColumnInfosCreated.Single( c => c.GetRuntimeName().Equals("Fk")), parentColumnInfosCreated.Single(c => c.GetRuntimeName().Equals("Pk")), ExtractionJoinType.Right, null); @@ -699,38 +702,38 @@ public void Test_IsolateTables_NullForeignKey(DatabaseType dbType) //now that we have a join it should pass checks mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,new ReturnSameString()); - var job = new ThrowImmediatelyDataLoadJob(config,parentTableInfo,childTableInfo); - + var config = new HICDatabaseConfiguration(db.Server, new ReturnSameString()); + var job = new ThrowImmediatelyDataLoadJob(config, parentTableInfo, childTableInfo); + mutilator.Initialize(db, LoadStage.AdjustRaw); mutilator.Mutilate(job); - + //parent should now have 0... using var dtParent = parentTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtParent.Rows.Count); + Assert.That(dtParent.Rows, Is.Empty); //isolation should have 1 (A) using var dtParentIsolation = db.ExpectTable("Parent_Isolation").GetDataTable(); - Assert.AreEqual(1, dtParentIsolation.Rows.Count); - AssertContains(dtParentIsolation,"A",true,0); + Assert.That(dtParentIsolation.Rows, Has.Count.EqualTo(1)); + AssertContains(dtParentIsolation, "A", true, 0); //child table should have the null record only using var dtChild = childTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(1, dtChild.Rows.Count); - AssertContains(dtChild,"X",DBNull.Value,"GG",DBNull.Value); + Assert.That(dtChild.Rows, Has.Count.EqualTo(1)); + AssertContains(dtChild, "X", DBNull.Value, "GG", DBNull.Value); //child isolation table should have 1 record (the X,A,FF) using var dtChildIsolation = db.ExpectTable("Child_Isolation").GetDataTable(); - Assert.AreEqual(1, dtChildIsolation.Rows.Count); - AssertContains(dtChildIsolation,"X","A","FF",DBNull.Value,0); + Assert.That(dtChildIsolation.Rows, Has.Count.EqualTo(1)); + AssertContains(dtChildIsolation, "X", "A", "FF", DBNull.Value, 0); } - private void AssertContains(DataTable dt, params object[] rowValues) + private static void AssertContains(DataTable dt, params object[] rowValues) { - Assert.IsTrue(dt.Rows.Cast().Any(r=> - rowValues.All(v=>r.ItemArray.Contains(v))),"Did not find expected row {0}{1}Rows seen were:{2}", string.Join("," , rowValues), Environment.NewLine, string.Join(Environment.NewLine, - dt.Rows.Cast().Select(r=>string.Join(",",r.ItemArray)))); + Assert.That(dt.Rows.Cast().Any(r => + rowValues.All(v => r.ItemArray.Contains(v))), $"Did not find expected row {string.Join(",", rowValues)}{Environment.NewLine}Rows seen were:{string.Join(Environment.NewLine, + dt.Rows.Cast().Select(r => string.Join(",", r.ItemArray)))}"); } [TestCase(DatabaseType.MicrosoftSQLServer)] @@ -753,8 +756,8 @@ public void Test_IsolateTables_AmbiguousFk(DatabaseType dbType) dt.Columns.Add("Pk"); dt.Columns.Add("OtherCol"); - dt.Rows.Add("A",1); - dt.Rows.Add("B",2); + dt.Rows.Add("A", 1); + dt.Rows.Add("B", 2); //Create a table in 'RAW' (has no constraints) using var dt2 = new DataTable(); @@ -763,10 +766,10 @@ public void Test_IsolateTables_AmbiguousFk(DatabaseType dbType) dt2.Columns.Add("OtherCol2"); dt2.Columns.Add("OtherCol3"); - dt2.Rows.Add("X", "A", "FF",DBNull.Value); //these are colliding (on pk 'X') but list two different (but existing) pks! - dt2.Rows.Add("X", "B", "GG",DBNull.Value); - dt2.Rows.Add("Y", "B", "AA",DBNull.Value); //good record but has to be isolated because it is child of B which is involved in the above collision - + dt2.Rows.Add("X", "A", "FF", DBNull.Value); //these are colliding (on pk 'X') but list two different (but existing) pks! + dt2.Rows.Add("X", "B", "GG", DBNull.Value); + dt2.Rows.Add("Y", "B", "AA", DBNull.Value); //good record but has to be isolated because it is child of B which is involved in the above collision + var tblParent = db.CreateTable("Parent", dt); var tblChild = db.CreateTable("Child", dt2); @@ -792,7 +795,7 @@ public void Test_IsolateTables_AmbiguousFk(DatabaseType dbType) var mutilator = GetMutilator(db, parentTableInfo, childTableInfo); //tell RDMP about how to join tables - new JoinInfo(CatalogueRepository,childColumnInfosCreated.Single( + new JoinInfo(CatalogueRepository, childColumnInfosCreated.Single( c => c.GetRuntimeName().Equals("Fk")), parentColumnInfosCreated.Single(c => c.GetRuntimeName().Equals("Pk")), ExtractionJoinType.Right, null); @@ -800,28 +803,28 @@ public void Test_IsolateTables_AmbiguousFk(DatabaseType dbType) //now that we have a join it should pass checks mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,new ReturnSameString()); - var job = new ThrowImmediatelyDataLoadJob(config,parentTableInfo,childTableInfo); + var config = new HICDatabaseConfiguration(db.Server, new ReturnSameString()); + var job = new ThrowImmediatelyDataLoadJob(config, parentTableInfo, childTableInfo); mutilator.Initialize(db, LoadStage.AdjustRaw); mutilator.Mutilate(job); - + //parent should now have 0... using var dtParent = parentTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtParent.Rows.Count); + Assert.That(dtParent.Rows, Is.Empty); //isolation should have 2 (A and B) using var dtParentIsolation = db.ExpectTable("Parent_Isolation").GetDataTable(); - Assert.AreEqual(2, dtParentIsolation.Rows.Count); + Assert.That(dtParentIsolation.Rows, Has.Count.EqualTo(2)); //child table should also be empty using var dtChild = childTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtChild.Rows.Count); + Assert.That(dtChild.Rows, Is.Empty); //child isolation table should have 3 (both bad records and the good record that would otherwise be an orphan in live) using var dtChildIsolation = db.ExpectTable("Child_Isolation").GetDataTable(); - Assert.AreEqual(3, dtChildIsolation.Rows.Count); + Assert.That(dtChildIsolation.Rows, Has.Count.EqualTo(3)); } @@ -846,7 +849,7 @@ public void Test_IsolateTables_NoRecordsLeftBehind(DatabaseType dbType) dt.Columns.Add("Pk"); dt.Columns.Add("OtherCol"); - dt.Rows.Add("A",1); + dt.Rows.Add("A", 1); //Create a table in 'RAW' (has no constraints) using var dt2 = new DataTable(); @@ -855,10 +858,10 @@ public void Test_IsolateTables_NoRecordsLeftBehind(DatabaseType dbType) dt2.Columns.Add("OtherCol2"); dt2.Columns.Add("OtherCol3"); - dt2.Rows.Add("X", "A", "FF",DBNull.Value); //these are colliding (on pk 'X') - dt2.Rows.Add("X", "A", "GG",DBNull.Value); - dt2.Rows.Add("Y", "A", "HH",DBNull.Value); //must not be left behind - + dt2.Rows.Add("X", "A", "FF", DBNull.Value); //these are colliding (on pk 'X') + dt2.Rows.Add("X", "A", "GG", DBNull.Value); + dt2.Rows.Add("Y", "A", "HH", DBNull.Value); //must not be left behind + var tblParent = db.CreateTable("Parent", dt); var tblChild = db.CreateTable("Child", dt2); @@ -884,7 +887,7 @@ public void Test_IsolateTables_NoRecordsLeftBehind(DatabaseType dbType) var mutilator = GetMutilator(db, parentTableInfo, childTableInfo); //tell RDMP about how to join tables - new JoinInfo(CatalogueRepository,childColumnInfosCreated.Single( + new JoinInfo(CatalogueRepository, childColumnInfosCreated.Single( c => c.GetRuntimeName().Equals("Fk")), parentColumnInfosCreated.Single(c => c.GetRuntimeName().Equals("Pk")), ExtractionJoinType.Right, null); @@ -892,27 +895,27 @@ public void Test_IsolateTables_NoRecordsLeftBehind(DatabaseType dbType) //now that we have a join it should pass checks mutilator.Check(new AcceptAllCheckNotifier()); - var config = new HICDatabaseConfiguration(db.Server,new ReturnSameString()); - var job = new ThrowImmediatelyDataLoadJob(config,parentTableInfo,childTableInfo); + var config = new HICDatabaseConfiguration(db.Server, new ReturnSameString()); + var job = new ThrowImmediatelyDataLoadJob(config, parentTableInfo, childTableInfo); mutilator.Initialize(db, LoadStage.AdjustRaw); - Assert.DoesNotThrow(()=>mutilator.Mutilate(job)); + Assert.DoesNotThrow(() => mutilator.Mutilate(job)); //parent should now have 0... using var dtParent = parentTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtParent.Rows.Count); + Assert.That(dtParent.Rows, Is.Empty); //isolation should have 1 using var dtParentIsolation = db.ExpectTable("Parent_Isolation").GetDataTable(); - Assert.AreEqual(1, dtParentIsolation.Rows.Count); + Assert.That(dtParentIsolation.Rows, Has.Count.EqualTo(1)); //child table should also be empty using var dtChild = childTableInfo.Discover(DataAccessContext.InternalDataProcessing).GetDataTable(); - Assert.AreEqual(0, dtChild.Rows.Count); + Assert.That(dtChild.Rows, Is.Empty); //child isolation table should have 3 (both bad records and the good record that would otherwise be an orphan in live) using var dtChildIsolation = db.ExpectTable("Child_Isolation").GetDataTable(); - Assert.AreEqual(3, dtChildIsolation.Rows.Count); + Assert.That(dtChildIsolation.Rows, Has.Count.EqualTo(3)); } class ReturnSameString : INameDatabasesAndTablesDuringLoads diff --git a/Rdmp.Dicom.Tests/Unit/TestMakeUniquePipelineName.cs b/Rdmp.Dicom.Tests/Unit/TestMakeUniquePipelineName.cs index 644e1665..db4415d6 100644 --- a/Rdmp.Dicom.Tests/Unit/TestMakeUniquePipelineName.cs +++ b/Rdmp.Dicom.Tests/Unit/TestMakeUniquePipelineName.cs @@ -9,16 +9,16 @@ internal class ExecuteCommandCreateNewImagingDatasetSuiteUnitTests [Test] public void TestMakeUniqueName() { - Assert.AreEqual("ff", - ExecuteCommandCreateNewImagingDatasetSuite.MakeUniqueName(Array.Empty(), "ff") - ); + Assert.Multiple(() => + { + Assert.That(ExecuteCommandCreateNewImagingDatasetSuite.MakeUniqueName(Array.Empty(), "ff") + , Is.EqualTo("ff")); - Assert.AreEqual("ff2", - ExecuteCommandCreateNewImagingDatasetSuite.MakeUniqueName(new[] {"ff" }, "ff") - ); - Assert.AreEqual("ff4", - ExecuteCommandCreateNewImagingDatasetSuite.MakeUniqueName(new[] { "ff","ff2","ff3" }, "ff") - ); + Assert.That(ExecuteCommandCreateNewImagingDatasetSuite.MakeUniqueName(new[] { "ff" }, "ff") + , Is.EqualTo("ff2")); + Assert.That(ExecuteCommandCreateNewImagingDatasetSuite.MakeUniqueName(new[] { "ff", "ff2", "ff3" }, "ff") + , Is.EqualTo("ff4")); + }); } } \ No newline at end of file diff --git a/Rdmp.Dicom.UI/CommandExecution/AtomicCommands/ExecuteCommandCreateNewImagingDataset.cs b/Rdmp.Dicom.UI/CommandExecution/AtomicCommands/ExecuteCommandCreateNewImagingDataset.cs index 0faad390..f928b37c 100644 --- a/Rdmp.Dicom.UI/CommandExecution/AtomicCommands/ExecuteCommandCreateNewImagingDataset.cs +++ b/Rdmp.Dicom.UI/CommandExecution/AtomicCommands/ExecuteCommandCreateNewImagingDataset.cs @@ -11,7 +11,7 @@ public class ExecuteCommandCreateNewImagingDataset : BasicUICommandExecution { public ExecuteCommandCreateNewImagingDataset(IActivateItems activator) : base(activator) { - + } public override string GetCommandName() diff --git a/Rdmp.Dicom.UI/CommandExecution/AtomicCommands/ExecuteCommandReviewIsolations.cs b/Rdmp.Dicom.UI/CommandExecution/AtomicCommands/ExecuteCommandReviewIsolations.cs index 2f25da1d..98b82871 100644 --- a/Rdmp.Dicom.UI/CommandExecution/AtomicCommands/ExecuteCommandReviewIsolations.cs +++ b/Rdmp.Dicom.UI/CommandExecution/AtomicCommands/ExecuteCommandReviewIsolations.cs @@ -13,7 +13,7 @@ public ExecuteCommandReviewIsolations(IActivateItems activator, ProcessTask proc { _reviewer = new(processTask); - if (_reviewer.Error != null) + if (_reviewer.Error != null) SetImpossible(_reviewer.Error); } diff --git a/Rdmp.Dicom.UI/CreateNewImagingDatasetUI.cs b/Rdmp.Dicom.UI/CreateNewImagingDatasetUI.cs index 2425e375..57c54d78 100644 --- a/Rdmp.Dicom.UI/CreateNewImagingDatasetUI.cs +++ b/Rdmp.Dicom.UI/CreateNewImagingDatasetUI.cs @@ -43,7 +43,7 @@ private bool CreateDatabaseIfNotExists(DiscoveredDatabase db) return true; } - + private void btnCreateSuiteWithTemplate_Click(object sender, EventArgs e) { string filename; diff --git a/Rdmp.Dicom.UI/IsolationTableUI.Designer.cs b/Rdmp.Dicom.UI/IsolationTableUI.Designer.cs index f5616d65..3fbe6c0a 100644 --- a/Rdmp.Dicom.UI/IsolationTableUI.Designer.cs +++ b/Rdmp.Dicom.UI/IsolationTableUI.Designer.cs @@ -30,103 +30,105 @@ protected override void Dispose(bool disposing) /// private void InitializeComponent() { - this.flpTables = new System.Windows.Forms.FlowLayoutPanel(); - this.label1 = new System.Windows.Forms.Label(); - this.toolStrip1 = new System.Windows.Forms.ToolStrip(); - this.toolStripLabel1 = new System.Windows.Forms.ToolStripLabel(); - this.tbTop = new System.Windows.Forms.ToolStripTextBox(); - this.toolStripLabel2 = new System.Windows.Forms.ToolStripLabel(); - this.tbTimeout = new System.Windows.Forms.ToolStripTextBox(); - this.dataGridView1 = new System.Windows.Forms.DataGridView(); - this.toolStrip1.SuspendLayout(); - ((System.ComponentModel.ISupportInitialize)(this.dataGridView1)).BeginInit(); - this.SuspendLayout(); + flpTables = new System.Windows.Forms.FlowLayoutPanel(); + label1 = new System.Windows.Forms.Label(); + toolStrip1 = new System.Windows.Forms.ToolStrip(); + toolStripLabel1 = new System.Windows.Forms.ToolStripLabel(); + tbTop = new System.Windows.Forms.ToolStripTextBox(); + toolStripLabel2 = new System.Windows.Forms.ToolStripLabel(); + tbTimeout = new System.Windows.Forms.ToolStripTextBox(); + dataGridView1 = new System.Windows.Forms.DataGridView(); + toolStrip1.SuspendLayout(); + ((System.ComponentModel.ISupportInitialize)dataGridView1).BeginInit(); + SuspendLayout(); // // flpTables // - this.flpTables.Dock = System.Windows.Forms.DockStyle.Top; - this.flpTables.Location = new System.Drawing.Point(0, 38); - this.flpTables.Name = "flpTables"; - this.flpTables.Size = new System.Drawing.Size(974, 30); - this.flpTables.TabIndex = 0; + flpTables.Dock = System.Windows.Forms.DockStyle.Top; + flpTables.Location = new System.Drawing.Point(0, 71); + flpTables.Margin = new System.Windows.Forms.Padding(6, 7, 6, 7); + flpTables.Name = "flpTables"; + flpTables.Size = new System.Drawing.Size(2110, 74); + flpTables.TabIndex = 0; // // label1 // - this.label1.AutoSize = true; - this.label1.Dock = System.Windows.Forms.DockStyle.Top; - this.label1.Location = new System.Drawing.Point(0, 0); - this.label1.Name = "label1"; - this.label1.Size = new System.Drawing.Size(84, 13); - this.label1.TabIndex = 1; - this.label1.Text = "Isolation Tables:"; + label1.AutoSize = true; + label1.Dock = System.Windows.Forms.DockStyle.Top; + label1.Location = new System.Drawing.Point(0, 0); + label1.Margin = new System.Windows.Forms.Padding(6, 0, 6, 0); + label1.Name = "label1"; + label1.Size = new System.Drawing.Size(181, 32); + label1.TabIndex = 1; + label1.Text = "Isolation Tables:"; // // toolStrip1 // - this.toolStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { - this.toolStripLabel1, - this.tbTop, - this.toolStripLabel2, - this.tbTimeout}); - this.toolStrip1.Location = new System.Drawing.Point(0, 13); - this.toolStrip1.Name = "toolStrip1"; - this.toolStrip1.Size = new System.Drawing.Size(974, 25); - this.toolStrip1.TabIndex = 4; - this.toolStrip1.Text = "toolStrip1"; + toolStrip1.ImageScalingSize = new System.Drawing.Size(32, 32); + toolStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { toolStripLabel1, tbTop, toolStripLabel2, tbTimeout }); + toolStrip1.Location = new System.Drawing.Point(0, 32); + toolStrip1.Name = "toolStrip1"; + toolStrip1.Padding = new System.Windows.Forms.Padding(0, 0, 4, 0); + toolStrip1.Size = new System.Drawing.Size(2110, 39); + toolStrip1.TabIndex = 4; + toolStrip1.Text = "toolStrip1"; // // toolStripLabel1 // - this.toolStripLabel1.Name = "toolStripLabel1"; - this.toolStripLabel1.Size = new System.Drawing.Size(31, 22); - this.toolStripLabel1.Text = "Top:"; + toolStripLabel1.Name = "toolStripLabel1"; + toolStripLabel1.Size = new System.Drawing.Size(58, 33); + toolStripLabel1.Text = "Top:"; // // tbTop // - this.tbTop.Name = "tbTop"; - this.tbTop.Size = new System.Drawing.Size(100, 25); - this.tbTop.Click += new System.EventHandler(this.tbTop_Click); + tbTop.Name = "tbTop"; + tbTop.Size = new System.Drawing.Size(212, 39); + tbTop.Click += tbTop_Click; // // toolStripLabel2 // - this.toolStripLabel2.Name = "toolStripLabel2"; - this.toolStripLabel2.Size = new System.Drawing.Size(55, 22); - this.toolStripLabel2.Text = "Timeout:"; + toolStripLabel2.Name = "toolStripLabel2"; + toolStripLabel2.Size = new System.Drawing.Size(108, 33); + toolStripLabel2.Text = "Timeout:"; // // tbTimeout // - this.tbTimeout.Name = "tbTimeout"; - this.tbTimeout.Size = new System.Drawing.Size(100, 25); - this.tbTimeout.Click += new System.EventHandler(this.tbTimeout_Click); + tbTimeout.Name = "tbTimeout"; + tbTimeout.Size = new System.Drawing.Size(212, 39); + tbTimeout.Click += tbTimeout_Click; // // dataGridView1 // - this.dataGridView1.AllowUserToAddRows = false; - this.dataGridView1.AllowUserToDeleteRows = false; - this.dataGridView1.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize; - this.dataGridView1.Dock = System.Windows.Forms.DockStyle.Fill; - this.dataGridView1.Location = new System.Drawing.Point(0, 68); - this.dataGridView1.Name = "dataGridView1"; - this.dataGridView1.ReadOnly = true; - this.dataGridView1.Size = new System.Drawing.Size(974, 604); - this.dataGridView1.TabIndex = 5; + dataGridView1.AllowUserToAddRows = false; + dataGridView1.AllowUserToDeleteRows = false; + dataGridView1.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize; + dataGridView1.Dock = System.Windows.Forms.DockStyle.Fill; + dataGridView1.Location = new System.Drawing.Point(0, 145); + dataGridView1.Margin = new System.Windows.Forms.Padding(6, 7, 6, 7); + dataGridView1.Name = "dataGridView1"; + dataGridView1.ReadOnly = true; + dataGridView1.RowHeadersWidth = 82; + dataGridView1.Size = new System.Drawing.Size(2110, 1509); + dataGridView1.TabIndex = 5; // // IsolationTableUI // - this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); - this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; - this.ClientSize = new System.Drawing.Size(974, 672); - this.Controls.Add(this.dataGridView1); - this.Controls.Add(this.flpTables); - this.Controls.Add(this.toolStrip1); - this.Controls.Add(this.label1); - this.Name = "IsolationTableUI"; - this.Text = "Isolation Table Reviewer"; - this.Load += new System.EventHandler(this.IsolationTableUI_Load); - this.toolStrip1.ResumeLayout(false); - this.toolStrip1.PerformLayout(); - ((System.ComponentModel.ISupportInitialize)(this.dataGridView1)).EndInit(); - this.ResumeLayout(false); - this.PerformLayout(); - + AutoScaleDimensions = new System.Drawing.SizeF(13F, 32F); + AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; + ClientSize = new System.Drawing.Size(2110, 1654); + Controls.Add(dataGridView1); + Controls.Add(flpTables); + Controls.Add(toolStrip1); + Controls.Add(label1); + Margin = new System.Windows.Forms.Padding(6, 7, 6, 7); + Name = "IsolationTableUI"; + Text = "Isolation Table Reviewer"; + Load += IsolationTableUI_Load; + toolStrip1.ResumeLayout(false); + toolStrip1.PerformLayout(); + ((System.ComponentModel.ISupportInitialize)dataGridView1).EndInit(); + ResumeLayout(false); + PerformLayout(); } #endregion diff --git a/Rdmp.Dicom.UI/IsolationTableUI.cs b/Rdmp.Dicom.UI/IsolationTableUI.cs index 02163f02..4f6db2f6 100644 --- a/Rdmp.Dicom.UI/IsolationTableUI.cs +++ b/Rdmp.Dicom.UI/IsolationTableUI.cs @@ -38,14 +38,15 @@ private void HandleClick(KeyValuePair kvp) { dataGridView1.DataSource = _currentDataTable = _reviewer.GetDifferences(kvp, out _currentDiffs); } - + private void DataGridView1OnCellFormatting(object sender, DataGridViewCellFormattingEventArgs e) { var diff = _currentDiffs?.FirstOrDefault(d => d.RowIndex == e.RowIndex); if (diff == null || diff.IsMaster || _currentDataTable == null) return; + var colName = _currentDataTable.Columns[e.ColumnIndex].ColumnName; - + if(diff.ConflictingColumns.Contains(colName)) e.CellStyle.BackColor = Color.LightCyan; } @@ -66,7 +67,7 @@ private void tbTimeout_Click(object sender, EventArgs e) private void tbTop_Click(object sender, EventArgs e) { - + tbTop.ForeColor = Color.Black; try diff --git a/Rdmp.Dicom.UI/IsolationTableUI.resx b/Rdmp.Dicom.UI/IsolationTableUI.resx index 5da7a24d..c3545833 100644 --- a/Rdmp.Dicom.UI/IsolationTableUI.resx +++ b/Rdmp.Dicom.UI/IsolationTableUI.resx @@ -1,17 +1,17 @@  - diff --git a/Rdmp.Dicom.UI/Rdmp.Dicom.UI.csproj b/Rdmp.Dicom.UI/Rdmp.Dicom.UI.csproj index 641c4ab1..996b879a 100644 --- a/Rdmp.Dicom.UI/Rdmp.Dicom.UI.csproj +++ b/Rdmp.Dicom.UI/Rdmp.Dicom.UI.csproj @@ -37,5 +37,9 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/Rdmp.Dicom.UI/RdmpDicomUserInterface.cs b/Rdmp.Dicom.UI/RdmpDicomUserInterface.cs index 226d933f..acb751bf 100644 --- a/Rdmp.Dicom.UI/RdmpDicomUserInterface.cs +++ b/Rdmp.Dicom.UI/RdmpDicomUserInterface.cs @@ -75,7 +75,7 @@ public override Image GetImage(object concept, OverlayKind kind = Overla public void RefreshBus_RefreshObject(object sender, RefreshObjectEventArgs e) { - + } public override bool CustomActivate(IMapsDirectlyToDatabaseTable o) diff --git a/Rdmp.Dicom.UI/SemEHRUI.cs b/Rdmp.Dicom.UI/SemEHRUI.cs index fa22443b..e2e4ab16 100644 --- a/Rdmp.Dicom.UI/SemEHRUI.cs +++ b/Rdmp.Dicom.UI/SemEHRUI.cs @@ -91,7 +91,7 @@ private void btnSave_Click(object sender, EventArgs e) _configuration.Temporality = new(); foreach (KeyValuePair item in cblTemporality.CheckedItems) { - _configuration.Temporality.Add(item.Value.ToString()); + _configuration.Temporality.Add(item.Value.ToString()); } _configuration.Negation = ""; diff --git a/Rdmp.Dicom.UI/TagColumnAdderUI.cs b/Rdmp.Dicom.UI/TagColumnAdderUI.cs index 27311278..ca7755f9 100644 --- a/Rdmp.Dicom.UI/TagColumnAdderUI.cs +++ b/Rdmp.Dicom.UI/TagColumnAdderUI.cs @@ -13,7 +13,7 @@ public TagColumnAdderUI(TableInfo tableInfo) { _tableInfo = tableInfo; InitializeComponent(); - + cbxTag.AutoCompleteSource = AutoCompleteSource.ListItems; cbxTag.DataSource = TagColumnAdder.GetAvailableTags(); } diff --git a/Rdmp.Dicom.UI/TagElevationXmlUI.cs b/Rdmp.Dicom.UI/TagElevationXmlUI.cs index 1701f232..8c323c53 100644 --- a/Rdmp.Dicom.UI/TagElevationXmlUI.cs +++ b/Rdmp.Dicom.UI/TagElevationXmlUI.cs @@ -91,7 +91,7 @@ public void SetUnderlyingObjectTo(DicomSource.TagElevationXml value) RunChecks(); } else - queryEditor.Text = ExampleElevationFile; + queryEditor.Text = ExampleElevationFile; } private void Btn_Click(object sender, EventArgs e) diff --git a/Rdmp.Dicom/Attachers/Routing/AutoRoutingAttacher.cs b/Rdmp.Dicom/Attachers/Routing/AutoRoutingAttacher.cs index 5473535e..72ba6be3 100644 --- a/Rdmp.Dicom/Attachers/Routing/AutoRoutingAttacher.cs +++ b/Rdmp.Dicom/Attachers/Routing/AutoRoutingAttacher.cs @@ -22,7 +22,7 @@ namespace Rdmp.Dicom.Attachers.Routing; /// -/// Routes tags from DICOM files to relational database tables in RAW as part of a DLE configuration. +/// Routes tags from DICOM files to relational database tables in RAW as part of a DLE configuration. /// /// public class AutoRoutingAttacher:Attacher,IPluginAttacher, IDemandToUseAPipeline, IDataFlowDestination @@ -30,7 +30,7 @@ public class AutoRoutingAttacher:Attacher,IPluginAttacher, IDemandToUseAPipeline public IDataLoadJob Job; private Dictionary> _columnNameToTargetTablesDictionary; private Dictionary> _uploaders; - + [DemandsInitialization(@"Optional, when specified this regex must match ALL table names in the load. The pattern must contain a single Group e.g. '^(.*)_.*$' would match CT_Image and CT_Study with the group matching 'CT'. This Grouping will be used to extract the Modality code when deciding which table(s) to put a given record into")] @@ -41,7 +41,7 @@ This Grouping will be used to extract the Modality code when deciding which tabl [DemandsInitialization("This attacher expects multiple flat files that will be loaded this pattern should match them (file pattern not regex e.g. *.csv)")] public string ListPattern { get; set; } - + readonly Dictionary _columnNamesRoutedSuccesfully = new(StringComparer.CurrentCultureIgnoreCase); readonly Stopwatch _sw = new(); @@ -49,7 +49,7 @@ This Grouping will be used to extract the Modality code when deciding which tabl protected AutoRoutingAttacher(bool requestsExternalDatabaseCreation) : base(requestsExternalDatabaseCreation) //Derived classes can change mind about RAW creation { - + } public AutoRoutingAttacher() : base(true)//Create RAW for us @@ -59,7 +59,7 @@ protected AutoRoutingAttacher(bool requestsExternalDatabaseCreation) : base(requ public override ExitCodeType Attach(IDataLoadJob job,GracefulCancellationToken token) { Job = job; - + //if we have an explicit payload to run instead (this is how you inject explicit files/archives/directories to be loaded without touching the disk if (job.Payload != null) { @@ -92,7 +92,7 @@ public override ExitCodeType Attach(IDataLoadJob job,GracefulCancellationToken t job.OnNotify(this,new(ProgressEventType.Warning, $"Ignored input columns:{unmatchedColumns}")); } - + return ExitCodeType.Success; } @@ -144,9 +144,9 @@ public override void Check(ICheckNotifier notifier) if(LoadPipeline != null) { new PipelineChecker(LoadPipeline).Check(notifier); - + //don't check this since we are our own Fixed source for the engine so we just end up in a loop! but do instantiate it incase there are construction/context errors - + PipelineChecker c = new(LoadPipeline); c.Check(notifier); } @@ -176,7 +176,7 @@ public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener RefreshUploadDictionary(); CreateTableUploaders(); - + CreateModalityMap(); AddRows(toProcess); @@ -221,7 +221,7 @@ private void CreateModalityMap() throw new($"ModalityMatchingRegex failed to match against DataTable {dt.TableName}"); var modality = m.Groups[1].Value; - + _modalityMap.TryAdd(dt, modality); } } @@ -306,7 +306,7 @@ private void AddCellValue(DataRow inputRow, DataColumn column, DataTable destina public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { - + } private void DisposeUploaders(Exception exception) @@ -316,17 +316,17 @@ private void DisposeUploaders(Exception exception) item1.Dispose(ThrowImmediatelyDataLoadEventListener.Quiet, exception); item2.CloseAndArchive(); } - + foreach (var dt in _columnNameToTargetTablesDictionary.SelectMany(v => v.Value).Distinct()) dt.Dispose(); - + _columnNameToTargetTablesDictionary = null; _uploaders = null; } public void Abort(IDataLoadEventListener listener) { - + } #endregion } \ No newline at end of file diff --git a/Rdmp.Dicom/Attachers/Routing/AutoRoutingAttacherWithPersistentRaw.cs b/Rdmp.Dicom/Attachers/Routing/AutoRoutingAttacherWithPersistentRaw.cs index 33256827..bc23b003 100644 --- a/Rdmp.Dicom/Attachers/Routing/AutoRoutingAttacherWithPersistentRaw.cs +++ b/Rdmp.Dicom/Attachers/Routing/AutoRoutingAttacherWithPersistentRaw.cs @@ -8,7 +8,7 @@ class AutoRoutingAttacherWithPersistentRaw : AutoRoutingAttacher { public AutoRoutingAttacherWithPersistentRaw():base(false) { - + } public override ExitCodeType Attach(IDataLoadJob job, GracefulCancellationToken token) { diff --git a/Rdmp.Dicom/Attachers/Routing/PersistentRawTableCreator.cs b/Rdmp.Dicom/Attachers/Routing/PersistentRawTableCreator.cs index 5d51e811..a316c53f 100644 --- a/Rdmp.Dicom/Attachers/Routing/PersistentRawTableCreator.cs +++ b/Rdmp.Dicom/Attachers/Routing/PersistentRawTableCreator.cs @@ -36,9 +36,9 @@ public void CreateRAWTablesInDatabase(DiscoveredDatabase rawDb, IDataLoadJob job var discardedColumns = tableInfo.PreLoadDiscardedColumns.Where(c => c.Destination == DiscardedColumnDestination.Dilute).ToArray(); var clone = new TableInfoCloneOperation(job.Configuration,(TableInfo)tableInfo,LoadBubble.Raw,job); - + clone.CloneTable(liveTable.Database, rawDb, tableInfo.Discover(DataAccessContext.DataLoad), rawTableName, true,true, true, discardedColumns); - + var existingColumns = tableInfo.ColumnInfos.Select(c => c.GetRuntimeName(LoadStage.AdjustRaw)).ToArray(); foreach (var preLoadDiscardedColumn in tableInfo.PreLoadDiscardedColumns) @@ -54,7 +54,7 @@ public void CreateRAWTablesInDatabase(DiscoveredDatabase rawDb, IDataLoadJob job //add all the preload discarded columns because they could be routed to ANO store or sent to oblivion AddColumnToTable(rawTable, preLoadDiscardedColumn.RuntimeColumnName, preLoadDiscardedColumn.SqlDataType, job); } - + _rawTables.Add(rawTable); } diff --git a/Rdmp.Dicom/Cache/Pipeline/CFindSource.cs b/Rdmp.Dicom/Cache/Pipeline/CFindSource.cs index 34e88531..9e81afd7 100644 --- a/Rdmp.Dicom/Cache/Pipeline/CFindSource.cs +++ b/Rdmp.Dicom/Cache/Pipeline/CFindSource.cs @@ -48,7 +48,7 @@ public override SMIDataChunk DoGetChunk(ICacheFetchRequest cacheRequest, IDataLo //temp dir var cacheDir = new LoadDirectory(Request.CacheProgress.LoadProgress.LoadMetadata.LocationOfFlatFiles).Cache; var cacheLayout = new SMICacheLayout(cacheDir, new(Modality)); - + Chunk = new(Request) { FetchDate = dateFrom, @@ -129,7 +129,7 @@ private void WriteValue(CsvWriter writer, DicomCFindResponse response, DicomTag } else { - writer.WriteField(DicomTypeTranslater.Flatten(val)); + writer.WriteField(DicomTypeTranslater.Flatten(val)); } } } \ No newline at end of file diff --git a/Rdmp.Dicom/Cache/Pipeline/CachingSCP.cs b/Rdmp.Dicom/Cache/Pipeline/CachingSCP.cs index e5daebc6..b4d01d8d 100644 --- a/Rdmp.Dicom/Cache/Pipeline/CachingSCP.cs +++ b/Rdmp.Dicom/Cache/Pipeline/CachingSCP.cs @@ -45,7 +45,7 @@ public class CachingSCP : DicomService, IDicomServiceProvider, IDicomCStoreProvi DicomTransferSyntax.MPEG4AVCH264HighProfileLevel42For2DVideo, DicomTransferSyntax.MPEG4AVCH264HighProfileLevel42For3DVideo, DicomTransferSyntax.MPEG4AVCH264StereoHighProfileLevel42, - + // Uncompressed DicomTransferSyntax.ExplicitVRLittleEndian, DicomTransferSyntax.ExplicitVRBigEndian, @@ -100,7 +100,7 @@ public void OnConnectionClosed(Exception e) if (e != null) msg += e.Message + e.StackTrace; Logger.Info(msg, e); Listener.OnNotify(this,new(Verbose ? ProgressEventType.Information : ProgressEventType.Trace, - $"ConnectionClosed: {msg}")); + $"ConnectionClosed: {msg}")); } #endregion diff --git a/Rdmp.Dicom/Cache/Pipeline/Dicom/DicomRequestSender.cs b/Rdmp.Dicom/Cache/Pipeline/Dicom/DicomRequestSender.cs index 15e5a5d0..e1f60a75 100644 --- a/Rdmp.Dicom/Cache/Pipeline/Dicom/DicomRequestSender.cs +++ b/Rdmp.Dicom/Cache/Pipeline/Dicom/DicomRequestSender.cs @@ -45,7 +45,7 @@ public void Check() /// - /// Throttle requests using W(O) = mO(t) + c where W is the wait period, O is the opertaion duration, m and c are positive constants + /// Throttle requests using W(O) = mO(t) + c where W is the wait period, O is the opertaion duration, m and c are positive constants /// The request is added to the client which is unreleased at the end of this request send. /// /// @@ -58,7 +58,7 @@ public void ThrottleRequest(DicomRequest dicomRequest, IDicomClient client, Canc #endregion /// - /// Throttle requests using W(O) = mO(t) + c where W is the wait period, O is the opertaion duration, m and c are positive constants + /// Throttle requests using W(O) = mO(t) + c where W is the wait period, O is the opertaion duration, m and c are positive constants /// Sends requests added to the client is unreleased at the end of this request send. /// /// @@ -100,7 +100,7 @@ private void SendRequest(DicomRequest dicomRequest, CancellationToken token) /// /// Blocks until the request is received so calling code doesn't have to deal with asynchrony (see the EventWaitHandle in TrySend). - /// Only the timeout is applied no Throtelling, the client is unreleased on return + /// Only the timeout is applied no Throtelling, the client is unreleased on return /// /// /// @@ -116,7 +116,7 @@ public void SendRequest(DicomRequest dicomRequest, IDicomClient client,Cancellat /// /// Blocks until the request is received so calling code doesn't have to deal with asynchrony (see the EventWaitHandle in TrySend). - /// Only the timeout is applied no Throtelling, the client is unreleased on return + /// Only the timeout is applied no Throtelling, the client is unreleased on return /// /// /// diff --git a/Rdmp.Dicom/Cache/Pipeline/ProcessBasedCacheSource.cs b/Rdmp.Dicom/Cache/Pipeline/ProcessBasedCacheSource.cs index 4b515636..b64bd47b 100644 --- a/Rdmp.Dicom/Cache/Pipeline/ProcessBasedCacheSource.cs +++ b/Rdmp.Dicom/Cache/Pipeline/ProcessBasedCacheSource.cs @@ -30,34 +30,34 @@ public class ProcessBasedCacheSource : CacheSource public override void Abort(IDataLoadEventListener listener) { - + } public override void Check(ICheckNotifier notifier) { - + } public override void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { - + } public override SMIDataChunk DoGetChunk(ICacheFetchRequest request, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) - { + { listener.OnNotify(this,new(ProgressEventType.Information,$"ProcessBasedCacheSource version is {typeof(ProcessBasedCacheSource).Assembly.GetName().Version}. Assembly is {typeof(ProcessBasedCacheSource).Assembly} " )); - + // Where we are putting the files var cacheDir = new LoadDirectory(Request.CacheProgress.LoadProgress.LoadMetadata.LocationOfFlatFiles).Cache; var cacheLayout = new SMICacheLayout(cacheDir, new("ALL")); - + Chunk = new(Request) { FetchDate = Request.Start, Modality = "ALL", Layout = cacheLayout }; - + var workingDirectory = cacheLayout.GetLoadCacheDirectory(listener); listener.OnNotify(this,new(ProgressEventType.Information, @@ -69,7 +69,7 @@ public override SMIDataChunk DoGetChunk(ICacheFetchRequest request, IDataLoadEve listener.OnNotify(this,new(ProgressEventType.Information, $"Args template is:{Args}")); listener.OnNotify(this,new(ProgressEventType.Information, $"Datetime format is:{TimeFormat}")); - + var args = Args .Replace("%s",request.Start.ToString(TimeFormat)) @@ -85,7 +85,7 @@ public override SMIDataChunk DoGetChunk(ICacheFetchRequest request, IDataLoadEve p.StartInfo.UseShellExecute = false; p.StartInfo.RedirectStandardOutput = true; p.OutputDataReceived += (sender, a) => listener.OnNotify(this,new(ProgressEventType.Information,a.Data)); - + p.Start(); p.BeginOutputReadLine(); @@ -97,7 +97,7 @@ public override SMIDataChunk DoGetChunk(ICacheFetchRequest request, IDataLoadEve if(p.ExitCode != 0 && ThrowOnNonZeroExitCode) throw new($"Process exited with code {p.ExitCode}"); } - + return Chunk; } diff --git a/Rdmp.Dicom/Cache/SMICacheDestination.cs b/Rdmp.Dicom/Cache/SMICacheDestination.cs index bf6323e4..ac80ebb1 100644 --- a/Rdmp.Dicom/Cache/SMICacheDestination.cs +++ b/Rdmp.Dicom/Cache/SMICacheDestination.cs @@ -15,7 +15,7 @@ public class SMICacheDestination : CacheFilesystemDestination public string Modality { get; set; } [DemandsInitialization("The file extension to look for in fetched data", Mandatory = true, DefaultValue = "*.dcm")] - public string Extension { get; set; } + public string Extension { get; set; } public SMIDataChunk ProcessPipelineData(SMIDataChunk toProcess, IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { @@ -41,7 +41,7 @@ public SMIDataChunk ProcessPipelineData(SMIDataChunk toProcess, IDataLoadEventLi //save cache fill progress to the database if(!DEBUG_DoNotUpdateCacheProgress) toProcess.Request.SaveCacheFillProgress(toProcess.Request.End); - + return toProcess; } diff --git a/Rdmp.Dicom/Cache/SMICacheLayout.cs b/Rdmp.Dicom/Cache/SMICacheLayout.cs index a3e40cb5..917be255 100644 --- a/Rdmp.Dicom/Cache/SMICacheLayout.cs +++ b/Rdmp.Dicom/Cache/SMICacheLayout.cs @@ -14,14 +14,14 @@ public class SMICacheLayout : CacheLayout { public SMICacheLayout(DirectoryInfo cacheDirectory, SMICachePathResolver resolver): base(cacheDirectory, "yyyyMMddHH", CacheArchiveType.Zip, CacheFileGranularity.Hour, resolver) { - + } - + public void CreateArchive(DateTime archiveDate,IDataLoadEventListener listener, string extension) { var downloadDirectory = GetLoadCacheDirectory(listener); var dataFiles = downloadDirectory.EnumerateFiles(extension).ToArray(); - + if (!dataFiles.Any()) return; diff --git a/Rdmp.Dicom/Cache/SMICacheTextFileGenerator.cs b/Rdmp.Dicom/Cache/SMICacheTextFileGenerator.cs index 673ec5b3..b04a8c41 100644 --- a/Rdmp.Dicom/Cache/SMICacheTextFileGenerator.cs +++ b/Rdmp.Dicom/Cache/SMICacheTextFileGenerator.cs @@ -34,7 +34,7 @@ public override ExitCodeType Fetch(IDataLoadJob dataLoadJob, GracefulCancellatio if (!jobs.Any()) return ExitCodeType.OperationNotRequired; - + StringBuilder sb = new(); foreach (var file in jobs.Values) diff --git a/Rdmp.Dicom/CommandExecution/ExecuteCommandAddTag.cs b/Rdmp.Dicom/CommandExecution/ExecuteCommandAddTag.cs index 4c71c70b..5f0525a3 100644 --- a/Rdmp.Dicom/CommandExecution/ExecuteCommandAddTag.cs +++ b/Rdmp.Dicom/CommandExecution/ExecuteCommandAddTag.cs @@ -23,7 +23,7 @@ public class ExecuteCommandAddTag : BasicCommandExecution /// The Catalogue you want to add the tag to. Must have a single table under it. /// The name of a dicom tag /// Optional. Pass null to lookup the dicom tags datatype automatically (recommended). Pass a value to use an explicit SQL DBMS datatype instead. - public ExecuteCommandAddTag(BasicActivateItems activator, ICatalogue catalogue,string column,string dataType) + public ExecuteCommandAddTag(BasicActivateItems activator, ICatalogue catalogue,string column,string dataType) : this(activator,new[] { catalogue },column,dataType) { @@ -37,16 +37,16 @@ public ExecuteCommandAddTag(BasicActivateItems activator, ICatalogue catalogue,s /// The name of a dicom tag /// Optional. Pass null to lookup the dicom tags datatype automatically (recommended). Pass a value to use an explicit SQL DBMS datatype instead. [UseWithObjectConstructor] - public ExecuteCommandAddTag(BasicActivateItems activator,ICatalogue[] catalogues, + public ExecuteCommandAddTag(BasicActivateItems activator,ICatalogue[] catalogues, [DemandsInitialization("Name of the new column you want created.")] - string column, + string column, [DemandsInitialization("Optional when column is the name of a Dicom Tag e.g. StudyInstanceUID")] string dataType):base(activator) { foreach(var c in catalogues) { _adders.Add(BuildTagAdder(c,column, dataType)); - + // once we can't process any Catalogue we should stop investigating if (IsImpossible) break; @@ -71,7 +71,7 @@ private TagColumnAdder BuildTagAdder(ICatalogue catalogue, string column, string var syntax = tables[0].GetQuerySyntaxHelper(); - //if user hasn't listed a specific datatype, guess it from the column + //if user hasn't listed a specific datatype, guess it from the column if (string.IsNullOrWhiteSpace(dataType)) { var available = TagColumnAdder.GetAvailableTags(); diff --git a/Rdmp.Dicom/CommandExecution/ExecuteCommandBuildExtractionView.cs b/Rdmp.Dicom/CommandExecution/ExecuteCommandBuildExtractionView.cs index 794e2e39..854a61ab 100644 --- a/Rdmp.Dicom/CommandExecution/ExecuteCommandBuildExtractionView.cs +++ b/Rdmp.Dicom/CommandExecution/ExecuteCommandBuildExtractionView.cs @@ -138,7 +138,7 @@ private bool HasPrimaryKey(TableInfo t, string name) } /// - /// Identify study table based on its name + /// Identify study table based on its name /// /// /// @@ -157,7 +157,7 @@ public override void Execute() primaryTable.IsPrimaryExtractionTable = true; primaryTable.SaveToDatabase(); - + foreach(var series in SeriesLevelTables) { SetupSubTableWithJoinsOf("StudyInstanceUID", primaryTable, series,joinManager); diff --git a/Rdmp.Dicom/CommandExecution/ExecuteCommandCreateNewImagingDataset.cs b/Rdmp.Dicom/CommandExecution/ExecuteCommandCreateNewImagingDataset.cs index ebed623a..7506e06d 100644 --- a/Rdmp.Dicom/CommandExecution/ExecuteCommandCreateNewImagingDataset.cs +++ b/Rdmp.Dicom/CommandExecution/ExecuteCommandCreateNewImagingDataset.cs @@ -14,9 +14,9 @@ public class ExecuteCommandCreateNewImagingDataset:BasicCommandExecution private readonly ImageTableTemplate _tableTemplate; private readonly IRDMPPlatformRepositoryServiceLocator _repositoryLocator; private readonly DiscoveredTable _expectedTable; - + public ICatalogue NewCatalogueCreated { get; private set; } - + public ExecuteCommandCreateNewImagingDataset(IRDMPPlatformRepositoryServiceLocator repositoryLocator, DiscoveredTable expectedTable, ImageTableTemplate tableTemplate) { _repositoryLocator = repositoryLocator; @@ -50,7 +50,7 @@ public override void Execute() seriesEi.IsExtractionIdentifier = true; seriesEi.SaveToDatabase(); } - + //make it extractable new ExtractableDataSet(_repositoryLocator.DataExportRepository, cata); diff --git a/Rdmp.Dicom/CommandExecution/ExecuteCommandCreateNewImagingDatasetSuite.cs b/Rdmp.Dicom/CommandExecution/ExecuteCommandCreateNewImagingDatasetSuite.cs index 47f38487..64457eb4 100644 --- a/Rdmp.Dicom/CommandExecution/ExecuteCommandCreateNewImagingDatasetSuite.cs +++ b/Rdmp.Dicom/CommandExecution/ExecuteCommandCreateNewImagingDatasetSuite.cs @@ -27,7 +27,7 @@ namespace Rdmp.Dicom.CommandExecution; public class ExecuteCommandCreateNewImagingDatasetSuite : BasicCommandExecution { - + private readonly DiscoveredDatabase _databaseToCreateInto; private readonly DirectoryInfo _projectDirectory; private readonly IExternalDatabaseServer _loggingServer; @@ -36,22 +36,22 @@ public class ExecuteCommandCreateNewImagingDatasetSuite : BasicCommandExecution public List NewCataloguesCreated { get; } public LoadMetadata NewLoadMetadata { get; private set; } - + /// - /// The component of the data load that will handle reading the Dicom files / json and converting it into DataTables (only populated after Execute has been called). + /// The component of the data load that will handle reading the Dicom files / json and converting it into DataTables (only populated after Execute has been called). /// Note that this is a PipelineComponent meaning it is the template which gets stamped out into a hydrated instance at runtime. The DicomSourcePipelineComponent.Path Should /// contain the DicomSourceType.Name and when the DLE is run the DicomSourceType is the Type that will be created from the template /// public PipelineComponent DicomSourcePipelineComponent { get; private set; } /// - /// The DicomSource component Type to use for the Loadmetadata pipeline responsible for loading the dicom metadata into the Catalogues (e.g. DicomDatasetCollectionSource + /// The DicomSource component Type to use for the Loadmetadata pipeline responsible for loading the dicom metadata into the Catalogues (e.g. DicomDatasetCollectionSource /// for Json or DicomFileCollectionSource for files) /// public Type DicomSourceType { get; set; } public bool CreateCoalescer { get; set; } - + /// /// Optional text to put at the beginning of the Catalogues / Pipeline etc /// @@ -84,7 +84,7 @@ public ExecuteCommandCreateNewImagingDatasetSuite(IRDMPPlatformRepositoryService if(_loggingServer == null) SetImpossible("No default logging server has been configured in your Catalogue database"); - + CreateLoad = true; } @@ -146,7 +146,7 @@ public override void Execute() foreach (var table in Template.Tables) { var tblName = GetNameWithPrefix(table.TableName); - + var tbl = _databaseToCreateInto.ExpectTable(tblName); var cmd = new ExecuteCommandCreateNewImagingDataset(_repositoryLocator, tbl, table); @@ -202,10 +202,10 @@ public override void Execute() arg.SetValue(NewLoadMetadata); arg.SaveToDatabase(); } - + pipe.SourcePipelineComponent_ID = DicomSourcePipelineComponent.ID; pipe.SaveToDatabase(); - + //Create the load process task that uses the pipe to load RAW tables with data from the dicom files var pt = new ProcessTask(_catalogueRepository, NewLoadMetadata, LoadStage.Mounting) @@ -223,10 +223,10 @@ public override void Execute() var args = PersistentRaw? pt.CreateArgumentsForClassIfNotExists() : pt.CreateArgumentsForClassIfNotExists(); SetArgument(args, "LoadPipeline", pipe); - + /////////////////////////////////////// Distinct tables on load ///////////////////////// - + var distincter = new ProcessTask(_catalogueRepository,NewLoadMetadata,LoadStage.AdjustRaw); var distincterArgs = distincter.CreateArgumentsForClassIfNotExists(); @@ -254,7 +254,7 @@ public override void Execute() foreach (var tbl in tablesCreated.Where(tbl => !tbl.DiscoverColumns().Any(c=>c.GetRuntimeName().Equals("SOPInstanceUID",StringComparison.CurrentCultureIgnoreCase)))) regexPattern.Append($"({tbl.GetRuntimeName()})|"); - + var coalArgs = coalescer.CreateArgumentsForClassIfNotExists(); SetArgument(coalArgs, "TableRegexPattern", regexPattern.ToString().TrimEnd('|')); @@ -277,7 +277,7 @@ public override void Execute() SetArgument(args, "ConditionsToTerminateUnder", PrematureLoadEndCondition.NoRecordsInAnyTablesInDatabase); //////////////////////////////////////////////////////////////////////////////////////////////// - + var checker = new CheckEntireDataLoadProcess(NewLoadMetadata, new(NewLoadMetadata), new()); checker.Check(new AcceptAllCheckNotifier()); } diff --git a/Rdmp.Dicom/CommandExecution/ExecuteCommandPacsFetch.cs b/Rdmp.Dicom/CommandExecution/ExecuteCommandPacsFetch.cs index 13832a99..b8551d17 100644 --- a/Rdmp.Dicom/CommandExecution/ExecuteCommandPacsFetch.cs +++ b/Rdmp.Dicom/CommandExecution/ExecuteCommandPacsFetch.cs @@ -20,9 +20,9 @@ class ExecuteCommandPacsFetch : BasicCommandExecution, ICacheFetchRequestProvide public ExecuteCommandPacsFetch(IBasicActivateItems activator,string start, string end, string remoteAeHost, ushort remotePort,string remoteAeTitle, string localAeHost, ushort localPort, string localAeTitle, string outDir, int maxRetries):base(activator) { - var startDate = DateTime.Parse(start); - var endDate = DateTime.Parse(end); - + var startDate = DateTime.Parse(start); + var endDate = DateTime.Parse(end); + // Make something that kinda looks like a valid DLE load var memory = new MemoryCatalogueRepository(); var lmd = new LoadMetadata(memory); diff --git a/Rdmp.Dicom/DataProviders/SMICachedFileRetriever.cs b/Rdmp.Dicom/DataProviders/SMICachedFileRetriever.cs index 8ad7faf9..8a7a44ec 100644 --- a/Rdmp.Dicom/DataProviders/SMICachedFileRetriever.cs +++ b/Rdmp.Dicom/DataProviders/SMICachedFileRetriever.cs @@ -15,7 +15,7 @@ public class SMICachedFileRetriever : CachedFileRetriever { public override void Initialize(ILoadDirectory hicProjectDirectory, DiscoveredDatabase dbInfo) { - + } public override ExitCodeType Fetch(IDataLoadJob dataLoadJob, GracefulCancellationToken cancellationToken) { diff --git a/Rdmp.Dicom/ExternalApis/SemEHRApiCaller.cs b/Rdmp.Dicom/ExternalApis/SemEHRApiCaller.cs index 283b766a..d72a0f55 100644 --- a/Rdmp.Dicom/ExternalApis/SemEHRApiCaller.cs +++ b/Rdmp.Dicom/ExternalApis/SemEHRApiCaller.cs @@ -71,7 +71,7 @@ internal void Run(AggregateConfiguration ac, CachedAggregateConfigurationResults if (semEHRResponse.success) { SubmitIdentifierList(config.ReturnField, - semEHRResponse.results.Count == 0 ? new string[] { } : semEHRResponse.results.ToArray(), ac, + semEHRResponse.results.Count == 0 ? Array.Empty() : semEHRResponse.results.ToArray(), ac, cache); /*If we can cope with the return field with multiple types this will handle that @@ -100,7 +100,7 @@ internal void Run(AggregateConfiguration ac, CachedAggregateConfigurationResults //If we failed, get the failing error message throw new($"The SemEHR API has failed: {semEHRResponse.message}"); } - + } else { diff --git a/Rdmp.Dicom/ExternalApis/SemEHRConfiguration.cs b/Rdmp.Dicom/ExternalApis/SemEHRConfiguration.cs index a4fb72fc..bf8b4376 100644 --- a/Rdmp.Dicom/ExternalApis/SemEHRConfiguration.cs +++ b/Rdmp.Dicom/ExternalApis/SemEHRConfiguration.cs @@ -69,7 +69,7 @@ public class SemEHRConfiguration /// The passphrase required to connect to the API /// public string Passphrase { get; set; } = ""; - + /// /// The HTTP Basic Authentication Username/Password to use when connecting to the SemEHR Api /// @@ -144,14 +144,14 @@ public class SemEHRConfiguration //API Return Fields /// - /// The list of fields that should be returned - "SOPInstanceUID", "SeriesInstanceUID", "StudyInstanceUID" + /// The list of fields that should be returned - "SOPInstanceUID", "SeriesInstanceUID", "StudyInstanceUID" /// //Currently only supporting one return feild which is all we need from an RDMP point of view //public List ReturnFields { get; set; } = new List(); //API Return Field /// - /// The field that should be returned - "SOPInstanceUID", "SeriesInstanceUID", "StudyInstanceUID" + /// The field that should be returned - "SOPInstanceUID", "SeriesInstanceUID", "StudyInstanceUID" /// public string ReturnField { get; set; } = ""; @@ -263,7 +263,7 @@ private SemEHRConfiguration OverrideWith(SemEHRConfiguration over) } public JsonObject GetQueryJson() - { + { //Set the terms dynamic termsObj = new JsonObject(); if (!string.IsNullOrWhiteSpace(Query)) @@ -271,7 +271,7 @@ public JsonObject GetQueryJson() if(QDepth > -1) termsObj.qdepth = QDepth; if(!string.IsNullOrWhiteSpace(QStop)) - termsObj.qstop = QStop; + termsObj.qstop = QStop; if (!string.IsNullOrWhiteSpace(Negation)) termsObj.negation = Negation; if (!string.IsNullOrWhiteSpace(Experiencer)) @@ -309,7 +309,7 @@ public JsonObject GetQueryJson() public string GetQueryJsonAsString() { return Regex.Replace(GetQueryJson().ToString(), @"\s+", ""); - + } public string GetUrlWithQuerystring() diff --git a/Rdmp.Dicom/Extraction/FoDicomBased/AmbiguousFilePath.cs b/Rdmp.Dicom/Extraction/FoDicomBased/AmbiguousFilePath.cs index cdc023b0..a590b597 100644 --- a/Rdmp.Dicom/Extraction/FoDicomBased/AmbiguousFilePath.cs +++ b/Rdmp.Dicom/Extraction/FoDicomBased/AmbiguousFilePath.cs @@ -83,7 +83,7 @@ private string Combine(string root, string path) if (!IsZipReference(path)) return Path.Combine(root,path); - + var bits = path.Split('!'); return $"{Path.Combine(root, bits[0])}!{bits[1]}"; } @@ -185,8 +185,8 @@ public static bool IsDicomReference(string fullPath) var extension = Path.GetExtension(fullPath); - return - string.IsNullOrWhiteSpace(extension) || + return + string.IsNullOrWhiteSpace(extension) || // The following is a valid dicom file name but looks like it has an extension .5323 // 123.3221.23123.5325 diff --git a/Rdmp.Dicom/Extraction/FoDicomBased/AmbiguousFilePathResolutionException.cs b/Rdmp.Dicom/Extraction/FoDicomBased/AmbiguousFilePathResolutionException.cs index e1bd412d..86d3021a 100644 --- a/Rdmp.Dicom/Extraction/FoDicomBased/AmbiguousFilePathResolutionException.cs +++ b/Rdmp.Dicom/Extraction/FoDicomBased/AmbiguousFilePathResolutionException.cs @@ -6,11 +6,11 @@ public class AmbiguousFilePathResolutionException : Exception { public AmbiguousFilePathResolutionException(string msg):base(msg) { - + } public AmbiguousFilePathResolutionException(string msg, Exception inner):base(msg,inner) { - + } } \ No newline at end of file diff --git a/Rdmp.Dicom/Extraction/FoDicomBased/DirectoryDecisions/PutDicomFilesInExtractionDirectories.cs b/Rdmp.Dicom/Extraction/FoDicomBased/DirectoryDecisions/PutDicomFilesInExtractionDirectories.cs index 5484713e..fae64c35 100644 --- a/Rdmp.Dicom/Extraction/FoDicomBased/DirectoryDecisions/PutDicomFilesInExtractionDirectories.cs +++ b/Rdmp.Dicom/Extraction/FoDicomBased/DirectoryDecisions/PutDicomFilesInExtractionDirectories.cs @@ -30,11 +30,11 @@ protected DirectoryInfo SubDirectoryCreate(DirectoryInfo parent, string child) protected string SaveDicomData(DirectoryInfo outputDirectory,DicomDataset dicomDataset) { var path = Path.Combine(outputDirectory.FullName, dicomDataset.GetValue(DicomTag.SOPInstanceUID, 0)); - + if(!path.EndsWith(".dcm")) { path += ".dcm"; - } + } var outPath = new FileInfo(path); new DicomFile(dicomDataset).Save(outPath.FullName); @@ -47,7 +47,7 @@ public virtual string PredictOutputPath(DirectoryInfo outputDirectory, string re return null; var path = Path.Combine(outputDirectory.FullName, sopUid); - + if (!path.EndsWith(".dcm")) { path += ".dcm"; diff --git a/Rdmp.Dicom/Extraction/FoDicomBased/FoDicomAnonymiser.cs b/Rdmp.Dicom/Extraction/FoDicomBased/FoDicomAnonymiser.cs index 9084296a..0fddf871 100644 --- a/Rdmp.Dicom/Extraction/FoDicomBased/FoDicomAnonymiser.cs +++ b/Rdmp.Dicom/Extraction/FoDicomBased/FoDicomAnonymiser.cs @@ -103,7 +103,7 @@ public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener if (MetadataOnly) { var matching = GetMetadataOnlyColumnsToProcess(toProcess); - + if (!matching.Any()) { // this should have already returned above via IgnoreDataset, bad times if you end up here. @@ -112,7 +112,7 @@ public DataTable ProcessPipelineData(DataTable toProcess, IDataLoadEventListener var dictionary = matching.ToDictionary(k => k, c => UIDMapping.SupportedTags.First(k => k.Key.DictionaryEntry.Keyword.Equals(c.ColumnName))); - + var releaseIdentifierColumn = GetReleaseIdentifierColumn().GetRuntimeName(); foreach (DataRow row in toProcess.Rows) @@ -236,7 +236,7 @@ private void SubstituteMetadataOnly(DataRow row, Dictionary - /// Setup class ready to start anonymising. Pass in + /// Setup class ready to start anonymising. Pass in /// /// /// Destination directory to pass to @@ -333,7 +333,7 @@ public void ProcessFile(DicomFile dicomFile, IDataLoadEventListener listener, s //change value in dataset ds.AddOrUpdate(key, releaseValue); - + //and change value in DataTable if (rowIfAny != null && rowIfAny.Table.Columns.Contains(key.DictionaryEntry.Keyword)) rowIfAny[key.DictionaryEntry.Keyword] = releaseValue; @@ -348,7 +348,7 @@ public void ProcessFile(DicomFile dicomFile, IDataLoadEventListener listener, s } var newPath = putter.WriteOutDataset(_destinationDirectory, releaseColumnValue, ds); - + if(rowIfAny != null) rowIfAny[RelativeArchiveColumnName] = newPath; @@ -387,12 +387,12 @@ private IEnumerable GetDeleteTags() public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { - + } public void Abort(IDataLoadEventListener listener) { - + } public void PreInitialize(IExtractCommand value, IDataLoadEventListener listener) @@ -412,7 +412,7 @@ public void Check(ICheckNotifier notifier) { notifier.OnCheckPerformed(new($"Error processing {nameof(DeleteTags)}",CheckResult.Fail, ex)); } - + lock(CreateServersOneAtATime) { diff --git a/Rdmp.Dicom/Extraction/MappingRepository.cs b/Rdmp.Dicom/Extraction/MappingRepository.cs index 2cda9c58..080db490 100644 --- a/Rdmp.Dicom/Extraction/MappingRepository.cs +++ b/Rdmp.Dicom/Extraction/MappingRepository.cs @@ -177,7 +177,7 @@ public string GetOrAllocateMapping(string value, int projectNumber, UIDType uidT } static readonly Random r = new(); - + private string GetKindaUid() { StringBuilder sb = new(); diff --git a/Rdmp.Dicom/Extraction/UIDMapping.cs b/Rdmp.Dicom/Extraction/UIDMapping.cs index 8a38811e..ef116938 100644 --- a/Rdmp.Dicom/Extraction/UIDMapping.cs +++ b/Rdmp.Dicom/Extraction/UIDMapping.cs @@ -25,7 +25,7 @@ public void SetUIDType(DicomTag tag) { if (SupportedTags.TryGetValue(tag, out var supportedTag)) UIDType = supportedTag; - else + else throw new InvalidOperationException( $"UIDMapping does not handle this tag type: {tag.DictionaryEntry.Keyword}"); } diff --git a/Rdmp.Dicom/LiveVsTemplateComparer.cs b/Rdmp.Dicom/LiveVsTemplateComparer.cs index 99233d36..48f0d7fc 100644 --- a/Rdmp.Dicom/LiveVsTemplateComparer.cs +++ b/Rdmp.Dicom/LiveVsTemplateComparer.cs @@ -34,7 +34,7 @@ public LiveVsTemplateComparer(ITableInfo table,ImageTableTemplateCollection temp c.TableName.Equals(liveTableNameWithoutPrefix,StringComparison.CurrentCultureIgnoreCase)) ?? throw new($"Could not find a Template called '{liveTableName}' or '{liveTableNameWithoutPrefix}'. Templates in file were {string.Join(",",templateCollection.Tables.Select(t=>t.TableName))}"); //script the template - var creator = new ImagingTableCreation(discoveredTable.Database.Server.GetQuerySyntaxHelper()); + var creator = new ImagingTableCreation(discoveredTable.Database.Server.GetQuerySyntaxHelper()); TemplateSql = creator.GetCreateTableSql(discoveredTable.Database,liveTableName,template, discoveredTable.Schema); TemplateSql = TailorTemplateSql(TemplateSql ); @@ -43,7 +43,7 @@ private string TailorTemplateSql(string templateSql) { //condense all multiple spaces to single spaces templateSql = Regex.Replace(templateSql," +"," "); - + return templateSql; } @@ -51,10 +51,10 @@ private string TailorLiveSql(string liveSql, DatabaseType databaseType) { // get rid of collation liveSql = Regex.Replace(liveSql,"\\bCOLLATE \\w+",""); - + // condense all multiple spaces to single spaces liveSql = Regex.Replace(liveSql," +"," "); - + return liveSql; } } \ No newline at end of file diff --git a/Rdmp.Dicom/PipelineComponents/CFind/CFindDirSource.cs b/Rdmp.Dicom/PipelineComponents/CFind/CFindDirSource.cs index 686ed0a3..d5ac36fc 100644 --- a/Rdmp.Dicom/PipelineComponents/CFind/CFindDirSource.cs +++ b/Rdmp.Dicom/PipelineComponents/CFind/CFindDirSource.cs @@ -29,14 +29,14 @@ public class CFindDirSource : IPluginDataFlowSource, IPipelineRequire [DemandsInitialization("Comma seperated list of dicom tags to read from the CFind results", Mandatory = true, DefaultValue = DefaultHeaders)] public string HeadersToRead { get; set; } = DefaultHeaders; - + int filesRead = 0; Stopwatch timer; public void Abort(IDataLoadEventListener listener) { - + } public void Check(ICheckNotifier notifier) @@ -50,9 +50,9 @@ public void Check(ICheckNotifier notifier) public void Dispose(IDataLoadEventListener listener, Exception pipelineFailureExceptionIfAny) { - + } - + bool firstTime = true; public DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) @@ -93,7 +93,7 @@ private DataTable GenerateTable() { dt.TableName = QuerySyntaxHelper.MakeHeaderNameSensible(_file.File.Name); } - + foreach (var h in HeadersToRead.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { @@ -138,7 +138,7 @@ private void ProcessDir(string dir, DataTable dt, IDataLoadEventListener listene private void XmlToRows(string file, DataTable dt) { using var fileStream = File.Open(file, FileMode.Open); - //Load the file and create a navigator object. + //Load the file and create a navigator object. var xDoc = new XmlDocument(); xDoc.Load(fileStream); diff --git a/Rdmp.Dicom/PipelineComponents/DicomSources/DicomDatasetCollectionSource.cs b/Rdmp.Dicom/PipelineComponents/DicomSources/DicomDatasetCollectionSource.cs index 40abe4d7..65ddadfb 100644 --- a/Rdmp.Dicom/PipelineComponents/DicomSources/DicomDatasetCollectionSource.cs +++ b/Rdmp.Dicom/PipelineComponents/DicomSources/DicomDatasetCollectionSource.cs @@ -28,7 +28,7 @@ protected override void MarkCorrupt(DicomDataset ds) base.MarkCorrupt(ds); _datasetListWorklist.MarkCorrupt(ds); } - + public override DataTable GetChunk(IDataLoadEventListener listener, GracefulCancellationToken cancellationToken) { if(_datasetListWorklist == null) @@ -49,7 +49,7 @@ public override DataTable GetChunk(IDataLoadEventListener listener, GracefulCanc ProcessDataset(filename, ds, dt, listener, otherValuesToStoreInRow); currentBatch--; } - + sw.Stop(); listener.OnNotify(this, new NotifyEventArgs(ProgressEventType.Information, $"GetChunk cumulative total time is {sw.ElapsedMilliseconds}ms")); diff --git a/Rdmp.Dicom/PipelineComponents/DicomSources/DicomFileCollectionSource.cs b/Rdmp.Dicom/PipelineComponents/DicomSources/DicomFileCollectionSource.cs index 26104972..858dfb0c 100644 --- a/Rdmp.Dicom/PipelineComponents/DicomSources/DicomFileCollectionSource.cs +++ b/Rdmp.Dicom/PipelineComponents/DicomSources/DicomFileCollectionSource.cs @@ -26,7 +26,7 @@ public class DicomFileCollectionSource : DicomSource, IPipelineRequirement0) listener.OnNotify(this,new(ProgressEventType.Warning, $"Skipped '{skippedEntries}' in zip archive '{zipFileName}' because they did not have .dcm extensions")); @@ -156,7 +156,7 @@ private void ProcessZipArchive(DataTable dt, string zipFileName, IDataLoadEventL } private void RecordError(string filenameOrZipEntry, Exception exception) - { + { _totalErrors ++; _listener.OnNotify(this, new(ProgressEventType.Warning, $"{filenameOrZipEntry} could not be processed", exception)); @@ -168,27 +168,27 @@ private void RecordError(string filenameOrZipEntry, Exception exception) List tasks = new(); readonly object oTasksLock = new(); - - + + private void ProcessDirectoryAsync(DataTable dt,DirectoryInfo directoryInfo, IDataLoadEventListener listener) { bool tooManyRunningTasks; - + lock (oTasksLock) tooManyRunningTasks = tasks.Count(t => !t.IsCompleted) >= ThreadCount; //if the maximum number of tasks are already executing if(tooManyRunningTasks) Task.WaitAll(tasks.ToArray()); - + lock (oTasksLock) tasks = tasks.Where(t=>!t.IsCompleted).ToList(); - + //start asynchronous processing of this directory var newT = Task.Run(() => ProcessDirectory(dt, directoryInfo, listener)); tasks.Add(newT); - - //but then continue to process the subdirectories + + //but then continue to process the subdirectories DirectoryInfo[] directories; try { @@ -257,7 +257,7 @@ private void ProcessFile(Stream stream, DataTable dt, string filename, IDataLoad var ds = file.Dataset; ProcessDataset(filename,ds,dt,listener); - + } } @@ -271,12 +271,12 @@ public void PreInitialize(IDicomWorklist value, IDataLoadEventListener listener) } _fileWorklist = value as IDicomFileWorklist; - + if(_fileWorklist == null) listener.OnNotify(this,new(ProgressEventType.Warning, $"Expected IDicomWorklist to be of Type IDicomProcessListProvider (but it was {value.GetType().Name}). This component will be skipped")); } - + public override DataTable TryGetPreview() { try diff --git a/Rdmp.Dicom/PipelineComponents/DicomSources/DicomSource.cs b/Rdmp.Dicom/PipelineComponents/DicomSources/DicomSource.cs index 1cab1bb7..16b5dd16 100644 --- a/Rdmp.Dicom/PipelineComponents/DicomSources/DicomSource.cs +++ b/Rdmp.Dicom/PipelineComponents/DicomSources/DicomSource.cs @@ -55,7 +55,7 @@ private static string StandardisePath(string value) //if it has a trailing slash (but isn't just '/') then trim the end if(value != null) return value.Length != 1 ? value.TrimEnd('\\', '/') : value; - + return null; } @@ -93,7 +93,7 @@ public void Check(ICheckNotifier notifier) { if (FieldMapTableIfAny != null && TagWhitelist != null) notifier.OnCheckPerformed(new("Cannot specify both a FieldMapTableIfAny and a TagWhitelist", CheckResult.Fail)); - + try { LoadElevationRequestsFile(); @@ -147,7 +147,7 @@ protected void ProcessDataset(string filename, DicomDataset ds, DataTable dt, ID value = DicomTypeTranslater.Flatten(DicomTypeTranslaterReader.GetCSharpValue(ds, item)); break; case InvalidDataHandling.MarkCorrupt: - + try { //try to enforce types @@ -156,10 +156,10 @@ protected void ProcessDataset(string filename, DicomDataset ds, DataTable dt, ID catch (Exception ex) { //something went wrong pulling out the value - + //mark it as corrupt MarkCorrupt(ds); - + //but make sure to warn people listening listener.OnNotify(this, new(ProgressEventType.Warning, $"Could not GetCSharpValue for DicomItem {item.Tag}({entry.Keyword}) for {GetProblemFileDescription(filename, otherValuesToStoreInRow)}", ex)); @@ -302,14 +302,14 @@ protected void ProcessDataset(string filename, DicomDataset ds, DataTable dt, ID public bool ShouldSkip(DataTable dt, DicomTag tag) { - //if there is a whitelist + //if there is a whitelist if (TagWhitelist != null) if (!TagWhitelist.IsMatch(tag.DictionaryEntry.Keyword)) //and the current header isn't matched by it return true; //if there is a blacklist if (TagBlacklist != null) - if (TagBlacklist.IsMatch(tag.DictionaryEntry.Keyword)) //and the current header matches the blacklist + if (TagBlacklist.IsMatch(tag.DictionaryEntry.Keyword)) //and the current header matches the blacklist return true; //skip it //if there is an explict mapping to follow @@ -398,7 +398,7 @@ public virtual TagElevationRequestCollection LoadElevationRequestsFile() //if tag elevation is specified in a file return TagElevationConfigurationFile != null ? new TagElevationRequestCollection(File.ReadAllText(TagElevationConfigurationFile.FullName)) : null; - + //there is no tag elevation } diff --git a/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/FlatFileToLoadDicomProcessListProvider.cs b/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/FlatFileToLoadDicomProcessListProvider.cs index 74cadea9..19468677 100644 --- a/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/FlatFileToLoadDicomProcessListProvider.cs +++ b/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/FlatFileToLoadDicomProcessListProvider.cs @@ -16,16 +16,16 @@ public class FlatFileToLoadDicomFileWorklist : IDicomFileWorklist public FlatFileToLoadDicomFileWorklist(FlatFileToLoad file) { _file = file; - + if(file.File is not { Extension: ".txt" }) return; - + //input is a textual list of files/zips _lines = File.ReadAllLines(file.File.FullName).Where(l => !string.IsNullOrWhiteSpace(l)).ToArray(); _linesCurrent = 0; } - + public bool GetNextFileOrDirectoryToProcess(out DirectoryInfo directory, out AmbiguousFilePath file) { file = null; @@ -33,7 +33,7 @@ public bool GetNextFileOrDirectoryToProcess(out DirectoryInfo directory, out Amb if (_dataExhausted) return false; - + //input is a single dicom file/zip if(_lines == null) { @@ -54,7 +54,7 @@ public bool GetNextFileOrDirectoryToProcess(out DirectoryInfo directory, out Amb file = new(new FileInfo(line.Trim()).FullName); return true; } - + if (Directory.Exists(line.Trim())) { _linesCurrent++; diff --git a/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/IDicomDatasetWorklist.cs b/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/IDicomDatasetWorklist.cs index 737da8c9..a10987c7 100644 --- a/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/IDicomDatasetWorklist.cs +++ b/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/IDicomDatasetWorklist.cs @@ -9,7 +9,7 @@ public interface IDicomDatasetWorklist : IDicomWorklist /// Returns the next DicomDataset that should be processed. Returns null if there are no more datasets to process. /// /// The absolute or relative path to the file that is represented by the DicomDataset - /// Key value collection of any other columns that should be populated with values + /// Key value collection of any other columns that should be populated with values /// (there should not include the names of any dicom tags in the key collection). E.g. 'MessageGuid' would be acceptable but 'StudyDate' would not /// DicomDataset GetNextDatasetToProcess(out string filename, out Dictionary otherValuesToStoreInRow); diff --git a/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/IDicomWorklist.cs b/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/IDicomWorklist.cs index 8308058d..ea042892 100644 --- a/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/IDicomWorklist.cs +++ b/Rdmp.Dicom/PipelineComponents/DicomSources/Worklists/IDicomWorklist.cs @@ -4,7 +4,7 @@ namespace Rdmp.Dicom.PipelineComponents.DicomSources.Worklists; /// /// Shared worklist interface for anything that can be turned into dicom datasets. This is used to define the context and compatibility of sources -/// for the dicom load pipeline +/// for the dicom load pipeline /// public interface IDicomWorklist { diff --git a/Rdmp.Dicom/PipelineComponents/IsolationReview.cs b/Rdmp.Dicom/PipelineComponents/IsolationReview.cs index eae8c425..cc4836d6 100644 --- a/Rdmp.Dicom/PipelineComponents/IsolationReview.cs +++ b/Rdmp.Dicom/PipelineComponents/IsolationReview.cs @@ -15,7 +15,7 @@ public class IsolationReview public string Error { get; } public TableInfo[] TablesToIsolate { get; set; } public ExternalDatabaseServer IsolationDatabase { get; set; } - + public int Top { get; set; } public int Timeout { get; set; } = 300; @@ -23,7 +23,7 @@ public IsolationReview(ProcessTask processTask) { if(processTask == null) throw new ArgumentNullException(nameof(processTask)); - + if (!processTask.IsPluginType() || processTask.ProcessTaskType != ProcessTaskType.MutilateDataTable || processTask.Path != typeof(PrimaryKeyCollisionIsolationMutilation).FullName) { @@ -46,7 +46,7 @@ public IsolationReview(ProcessTask processTask) return; } } - + if (TablesToIsolate == null || TablesToIsolate.Length == 0) { Error = "No tables configured on Isolation task"; @@ -62,7 +62,7 @@ public Dictionary GetIsolationTables() var db = IsolationDatabase.Discover(DataAccessContext.InternalDataProcessing); return TablesToIsolate.ToDictionary( - tableInfo => tableInfo, + tableInfo => tableInfo, tableInfo => db.ExpectTable(PrimaryKeyCollisionIsolationMutilation.GetIsolationTableName(tableInfo)) ); } @@ -75,19 +75,19 @@ public DataTable GetDifferences(KeyValuePair isolatio throw new($"Table '{tbl.GetFullyQualifiedName()}' did not exist"); var pks = ti.ColumnInfos.Where(c => c.IsPrimaryKey).ToArray(); - + if(pks.Length != 1) throw new($"TableInfo {ti} for which isolation table exists has {pks.Length} IsPrimaryKey columns"); - + var isolationCols = tbl.DiscoverColumns(); var isolationPks = isolationCols.Where(c => c.GetRuntimeName().Equals(pks[0].GetRuntimeName())).ToArray(); - + if(isolationPks.Length != 1) throw new($"Found {isolationPks.Length != 1} columns called {pks[0].GetRuntimeName()} in isolation table {tbl.GetFullyQualifiedName()}"); var isolationPk = isolationPks[0]; - + var sortOn = isolationPk.GetRuntimeName(); using var con = tbl.Database.Server.GetConnection(); @@ -99,7 +99,7 @@ public DataTable GetDifferences(KeyValuePair isolatio if (Top > 0) { var syntaxHelper = tbl.Database.Server.GetQuerySyntaxHelper(); - var topxSql = + var topxSql = syntaxHelper.HowDoWeAchieveTopX(Top); sql = topxSql.Location switch @@ -118,7 +118,7 @@ public DataTable GetDifferences(KeyValuePair isolatio } DataTable dt = new(); - + using (var cmd = tbl.Database.Server.GetCommand(sql, con)) { cmd.CommandTimeout = Timeout; @@ -127,7 +127,7 @@ public DataTable GetDifferences(KeyValuePair isolatio } differences = new(); - + //if there's only 1 row in the table then there are no differences! if (dt.Rows.Count < 2) { @@ -137,7 +137,7 @@ public DataTable GetDifferences(KeyValuePair isolatio //clone the schema and import only rows where there are 2+ entries for the same 'pk' value var differencesDt = dt.Clone(); - + //for each PK value the first time we encounter it it is the 'master' row version from which all other rows are compared var masterRow = dt.Rows[0]; var haveImportedMasterRow = false; @@ -199,9 +199,9 @@ private bool AreDifferent(object a, object b) public class IsolationDifference { public string Pk { get; set; } - + public int RowIndex { get; set; } - + public bool IsMaster { get; set; } public List ConflictingColumns { get; set; } = new(); diff --git a/Rdmp.Dicom/PipelineComponents/PrimaryKeyCollisionIsolationMutilation.cs b/Rdmp.Dicom/PipelineComponents/PrimaryKeyCollisionIsolationMutilation.cs index 12a98b45..bf58f093 100644 --- a/Rdmp.Dicom/PipelineComponents/PrimaryKeyCollisionIsolationMutilation.cs +++ b/Rdmp.Dicom/PipelineComponents/PrimaryKeyCollisionIsolationMutilation.cs @@ -28,13 +28,13 @@ public class PrimaryKeyCollisionIsolationMutilation:IPluginMutilateDataTables { [DemandsInitialization("All tables which participate in record isolation e.g. Study,Series, Image. These tables must have valid JoinInfos configured and one must be marked TableInfo.IsPrimaryExtractionTable",Mandatory=true)] public TableInfo[] TablesToIsolate { get; set; } - + [DemandsInitialization("Database in which to put _Isolation tables.",Mandatory=true)] public ExternalDatabaseServer IsolationDatabase { get; set; } [DemandsInitialization("Timeout for each individual sql command, measured in seconds",Mandatory=true, DefaultValue = 30)] public int TimeoutInSeconds {get;set;} - + private List _joins; private DiscoveredDatabase _raw; private IQuerySyntaxHelper _syntaxHelper; @@ -52,7 +52,7 @@ public void Check(ICheckNotifier notifier) //if there is only one or no tables that's fine (mandatory will check for null itself) if (TablesToIsolate == null) throw new("No tables have been selected"); - + //make sure there is only one primary key per table and that it's a string foreach (var t in TablesToIsolate) { @@ -64,7 +64,7 @@ public void Check(ICheckNotifier notifier) if (TablesToIsolate.Length >1 && TablesToIsolate.Count(t => t.IsPrimaryExtractionTable) != 1) { var primaryTables = TablesToIsolate.Where(t => t.IsPrimaryExtractionTable).ToArray(); - + notifier.OnCheckPerformed( new( $"There are {TablesToIsolate.Length} tables to operate on but {primaryTables.Length} are marked IsPrimaryExtractionTable ({string.Join(",",primaryTables.Select(t=>t.Name))}). This should be set on a single top level table only e.g. Study", @@ -137,7 +137,7 @@ private void CreateIsolationTable(DiscoveredTable toCreate, TableInfo tableInfo) //create a RAW table schema called TableName_Isolation var cloner = new TableInfoCloneOperation(new(toCreate.Database.Server),tableInfo,LoadBubble.Live,_job ?? (IDataLoadEventListener)ThrowImmediatelyDataLoadEventListener.Quiet); cloner.CloneTable(from.Database, toCreate.Database, from, toCreate.GetRuntimeName(), true, true, true, tableInfo.PreLoadDiscardedColumns); - + if(!toCreate.Exists()) throw new($"Table '{toCreate}' did not exist after issuing create command"); @@ -160,7 +160,7 @@ private void BuildJoinOrder(bool isChecks) _qb.PrimaryExtractionTable = _primaryTable; _qb.RegenerateSQL(); - + _joins = _qb.JoinsUsedInQuery ?? new List(); _fromSql = SqlQueryBuilderHelper.GetFROMSQL(_qb); @@ -186,7 +186,7 @@ private void BuildJoinOrder(bool isChecks) break; next = jnext.ForeignKey.TableInfo; - + if(overflow-- ==0) throw new("Joins resulted in a loop overflow"); } @@ -221,7 +221,7 @@ public ExitCodeType Mutilate(IDataLoadJob job) _job = job; BuildJoinOrder(false); - + foreach (var tableInfo in TablesToIsolate) { var pkCol = tableInfo.ColumnInfos.Single(c => c.IsPrimaryKey); @@ -323,7 +323,7 @@ private object[] GetPrimaryKeyValuesFor(ColumnInfo deleteOn, object[] deleteValu } } - + return toReturn.ToArray(); } @@ -335,7 +335,7 @@ private DataTable PullTable(TableInfo tableInfo, DbConnection con, string delete var pkColumnName = GetRAWColumnNameFullyQualified(pk); var deleteFromTableName = GetRAWTableNameFullyQualified(tableInfo); - + //fetch all the data (LEFT/RIGHT joins can introduce null records so add not null to WHERE for the table being migrated to avoid full null rows) var sqlSelect = $"Select distinct {deleteFromTableName}.* {_fromSql} WHERE {deleteOnColumnName} = @val AND {pkColumnName} is not null"; @@ -352,9 +352,9 @@ private DataTable PullTable(TableInfo tableInfo, DbConnection con, string delete using var da = _raw.Server.GetDataAdapter(cmdSelect); da.Fill(dt); } - + dt.Columns.Add(SpecialFieldNames.DataLoadRunID, typeof(int)); - + foreach (DataRow row in dt.Rows) row[SpecialFieldNames.DataLoadRunID] = _dataLoadInfoId; @@ -386,7 +386,7 @@ private void DeleteRows(TableInfo toDelete, string deleteOnColumnName, object[] foreach (var d in deleteValues) { p2.Value = d; - + //then delete it var affectedRows = cmdDelete.ExecuteNonQuery(); @@ -407,7 +407,7 @@ private string GetPostgreSqlDeleteCommand(TableInfo toDelete,string deleteOnColu var sb = new StringBuilder(); // 1 join per pair of tables - + if(_joins.Count != TablesToIsolate.Length -1) throw new($"Unexpected join count, expected {TablesToIsolate.Length -1} but found {_joins.Count}"); @@ -429,9 +429,9 @@ private string GetPostgreSqlDeleteCommand(TableInfo toDelete,string deleteOnColu sb.Append(syntax.EnsureWrapped(j.PrimaryKey.TableInfo.GetRuntimeName(LoadBubble.Raw,_namer))); sb.Append('.'); sb.Append(syntax.EnsureWrapped(j.PrimaryKey.GetRuntimeName(LoadStage.AdjustRaw))); - + sb.Append('='); - + sb.Append(syntax.EnsureWrapped(j.ForeignKey.TableInfo.GetRuntimeName(LoadBubble.Raw,_namer))); sb.Append('.'); sb.Append(syntax.EnsureWrapped(j.ForeignKey.GetRuntimeName(LoadStage.AdjustRaw))); diff --git a/Rdmp.Dicom/Rdmp.Dicom.csproj b/Rdmp.Dicom/Rdmp.Dicom.csproj index 13b14e65..3a7bfac2 100644 --- a/Rdmp.Dicom/Rdmp.Dicom.csproj +++ b/Rdmp.Dicom/Rdmp.Dicom.csproj @@ -31,6 +31,10 @@ all runtime; build; native; contentfiles; analyzers; buildtransitive + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/Rdmp.Dicom/SemEHRConsoleUI.cs b/Rdmp.Dicom/SemEHRConsoleUI.cs index 606d9504..590a3c96 100644 --- a/Rdmp.Dicom/SemEHRConsoleUI.cs +++ b/Rdmp.Dicom/SemEHRConsoleUI.cs @@ -47,7 +47,7 @@ public SemEHRConsoleUI(IBasicActivateItems activator, SemEHRApiCaller api, Aggre tbDateFormat.Text = _configuration.StartEndDateFormat; tbQuery.Text = _configuration.Query; - + cbUseStartDate.Checked = _configuration.UseStartDate; dptStartDate.Date = _configuration.StartDate; cbUseEndDate.Checked = _configuration.UseEndDate; diff --git a/Rdmp.Dicom/TagPromotionSchema/TagColumnAdder.cs b/Rdmp.Dicom/TagPromotionSchema/TagColumnAdder.cs index 3cad8a5b..98912d8c 100644 --- a/Rdmp.Dicom/TagPromotionSchema/TagColumnAdder.cs +++ b/Rdmp.Dicom/TagPromotionSchema/TagColumnAdder.cs @@ -50,7 +50,7 @@ public void Execute() Check(_notifierForExecute); var db = GetDatabase(); - + using (var con = db.Server.GetConnection()) { con.Open(); diff --git a/Rdmp.Dicom/TagPromotionSchema/TagLoadedColumnPair.cs b/Rdmp.Dicom/TagPromotionSchema/TagLoadedColumnPair.cs index 53770df2..ed19a477 100644 --- a/Rdmp.Dicom/TagPromotionSchema/TagLoadedColumnPair.cs +++ b/Rdmp.Dicom/TagPromotionSchema/TagLoadedColumnPair.cs @@ -7,7 +7,7 @@ namespace Rdmp.Dicom.TagPromotionSchema; public class TagLoadedColumnPair { public static DatabaseTypeRequest LoadedColumnDataType = new(typeof (string), 50); - + public enum States { None,