From c917a0db9e9c3d63b2b65ba0512de0fe4dcceda4 Mon Sep 17 00:00:00 2001 From: Oliver Stolpe Date: Tue, 31 Oct 2023 17:45:47 +0100 Subject: [PATCH] fixing tests. --- .../commands/generate_result_set.py | 176 ++++++------ utils/kickoff_orphaned_annotation_query.py | 4 + variants/tests/test_utils.py | 250 +++++++++--------- variants/utils.py | 123 +++++---- 4 files changed, 298 insertions(+), 255 deletions(-) diff --git a/maintenance/management/commands/generate_result_set.py b/maintenance/management/commands/generate_result_set.py index 95764259d..87273dee9 100644 --- a/maintenance/management/commands/generate_result_set.py +++ b/maintenance/management/commands/generate_result_set.py @@ -7,8 +7,7 @@ from projectroles.models import Project from variants.models import Case - -# from variants.tasks import create_queryresultset as task_create_queryresultset +from variants.tasks import create_queryresultset as task_create_queryresultset from variants.utils import create_queryresultset @@ -69,18 +68,15 @@ def handle(self, *args, **options): msg_warning = "" if options["async"]: - count, salvable, duplicates, tsv_lines = create_queryresultset( + count, salvable, duplicates, orphans = create_queryresultset( options["case_uuid"], options["project_uuid"], options["all"] ) msg = "Done creating result sets:" if ( count["smallvariantqueryresultset"] or count["svqueryresultset"] - or count["sms"]["added"]["flags"] - or count["sms"]["added"]["comments"] - or count["sms"]["added"]["acmg_ratings"] - or count["svs"]["added"]["flags"] - or count["svs"]["added"]["comments"] + or count["sms"]["added"] + or count["svs"]["added"] or count["sms"]["removed"] or count["svs"]["removed"] ): @@ -88,15 +84,10 @@ def handle(self, *args, **options): - SmallVariantQueryResultSets created: {count['smallvariantqueryresultset']} - SvQueryResultSets created: {count['svqueryresultset']} - SmallVariantQueryResultSets: - - Added: - - Flags: {count['sms']['added']['flags']} - - Comments: {count['sms']['added']['comments']} - - ACMG ratings: {count['sms']['added']['acmg_ratings']} + - Added: {count['sms']['added']} - Removed: {count['sms']['removed']} - SvQueryResultSets: - - Added: - - Flags: {count['svs']['added']['flags']} - - Comments: {count['svs']['added']['comments']} + - Added: {count['svs']['added']} - Removed: {count['svs']['removed']}""" else: msg += "\n- Nothing to do." @@ -130,77 +121,108 @@ def handle(self, *args, **options): - Lost: - Flags: {count['svs']['lost']['flags']} - Comments: {count['svs']['lost']['comments']}""".lstrip() + if ( + count["sms"]["salvable"]["flags"] + or count["sms"]["salvable"]["comments"] + or count["sms"]["salvable"]["acmg_ratings"] + or count["svs"]["salvable"]["flags"] + or count["svs"]["salvable"]["comments"] + ): with open("salvable.json", "w") as f: json.dump(salvable, f, indent=1) - # if duplicates["sms"] or duplicates["svs"]: - # msg_warning += f""" - # WARNING! There are duplicate variants: - # - Duplicate small variants: {len(duplicates['sms'])} - # - Duplicate structural variants: {len(duplicates['svs'])}""".lstrip() - # with open("duplicates.json", "w") as f: - # from variants.views import UUIDEncoder - # json.dump(duplicates, f, indent=1, cls=UUIDEncoder) - with open("orphaned_sv_flags.tsv", "w") as f: - f.write( - "case_uuid\tproject\tcase_name\tregion\tlost\tflag_molecular\tflag_visual\tflag_validation\tflag_phenotype_match\tflag_summary\tjson\n" - ) - for line in tsv_lines["svs"]["flags"]: - f.write( - "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{flag_molecular}\t{flag_visual}\t{flag_validation}\t{flag_phenotype_match}\t{flag_summary}\t{json}\n".format( - **line, - flag_molecular=line["json"]["flag_molecular"], - flag_visual=line["json"]["flag_visual"], - flag_validation=line["json"]["flag_validation"], - flag_phenotype_match=line["json"]["flag_phenotype_match"], - flag_summary=line["json"]["flag_summary"], + if duplicates["sms"] or duplicates["svs"]: + msg_warning += f""" +WARNING! There are duplicate variants: +- Duplicate small variants: {len(duplicates['sms'])} +- Duplicate structural variants: {len(duplicates['svs'])}""".lstrip() + if duplicates["sms"]: + with open("duplicates_sms.json", "w") as f: + f.write("case_uuid\tproject\tcase_name\tregion\tjson\n") + for line in duplicates["sms"]: + f.write( + "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{json}\n".format( + **line, + ) ) - ) - with open("orphaned_sv_comments.tsv", "w") as f: - f.write("case_uuid\tproject\tcase_name\tregion\tlost\tcomment\tjson\n") - for line in tsv_lines["svs"]["comments"]: - f.write( - "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{comment}\t{json}\n".format( - **line, - comment=line["json"]["comment"], + if duplicates["svs"]: + with open("duplicates_svs.json", "w") as f: + f.write("case_uuid\tproject\tcase_name\tregion\tjson\n") + for line in duplicates["svs"]: + f.write( + "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{json}\n".format( + **line, + ) ) - ) - with open("orphaned_sm_flags.tsv", "w") as f: - f.write("case_uuid\tproject\tcase_name\tregion\tlost\tjson\n") - for line in tsv_lines["sms"]["flags"]: - print(line) + if orphans["svs"]["flags"]: + with open("orphans_sv_flags.tsv", "w") as f: f.write( - "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{flag_molecular}\t{flag_visual}\t{flag_validation}\t{flag_phenotype_match}\t{flag_summary}\t{json}\n".format( - **line, - flag_molecular=line["json"]["flag_molecular"], - flag_visual=line["json"]["flag_visual"], - flag_validation=line["json"]["flag_validation"], - flag_phenotype_match=line["json"]["flag_phenotype_match"], - flag_summary=line["json"]["flag_summary"], - ) + "case_uuid\tproject\tcase_name\tregion\tlost\tflag_molecular\tflag_visual\tflag_validation\tflag_phenotype_match\tflag_summary\tjson\n" ) - with open("orphaned_sm_comments.tsv", "w") as f: - f.write("case_uuid\tproject\tcase_name\tregion\tlost\tcomment\tjson\n") - for line in tsv_lines["sms"]["comments"]: - f.write( - "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{comment}\t{json}\n".format( - **line, - comment=line["json"]["comment"], + for line in orphans["svs"]["flags"]: + j = json.loads(line["json"]) + f.write( + "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{flag_molecular}\t{flag_visual}\t{flag_validation}\t{flag_phenotype_match}\t{flag_summary}\t{json}\n".format( + **line, + flag_molecular=j["flag_molecular"], + flag_visual=j["flag_visual"], + flag_validation=j["flag_validation"], + flag_phenotype_match=j["flag_phenotype_match"], + flag_summary=j["flag_summary"], + ) ) - ) - with open("orphaned_sm_acmg_ratings.tsv", "w") as f: - f.write("case_uuid\tproject\tcase_name\tregion\tlost\tjson\n") - for line in tsv_lines["sms"]["acmg_ratings"]: - f.write( - "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{json}\n".format( - **line + if orphans["svs"]["comments"]: + with open("orphans_sv_comments.tsv", "w") as f: + f.write("case_uuid\tproject\tcase_name\tregion\tlost\tcomment\tjson\n") + for line in orphans["svs"]["comments"]: + j = json.loads(line["json"]) + f.write( + "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{comment}\t{json}\n".format( + **line, + comment=j["text"], + ) ) + if orphans["sms"]["flags"]: + with open("orphans_sm_flags.tsv", "w") as f: + f.write( + "case_uuid\tproject\tcase_name\tregion\tlost\tflag_molecular\tflag_visual\tflag_validation\tflag_phenotype_match\tflag_summary\tjson\n" ) - - # else: - # task_create_queryresultset.delay( - # options["case_uuid"], options["project_uuid"], options["all"] - # ) - # msg = "Pushed creating the query set to background." + for line in orphans["sms"]["flags"]: + j = json.loads(line["json"]) + f.write( + "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{flag_molecular}\t{flag_visual}\t{flag_validation}\t{flag_phenotype_match}\t{flag_summary}\t{json}\n".format( + **line, + flag_molecular=j["flag_molecular"], + flag_visual=j["flag_visual"], + flag_validation=j["flag_validation"], + flag_phenotype_match=j["flag_phenotype_match"], + flag_summary=j["flag_summary"], + ) + ) + if orphans["sms"]["comments"]: + with open("orphans_sm_comments.tsv", "w") as f: + f.write("case_uuid\tproject\tcase_name\tregion\tlost\tcomment\tjson\n") + for line in orphans["sms"]["comments"]: + j = json.loads(line["json"]) + f.write( + "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{comment}\t{json}\n".format( + **line, + comment=j["text"], + ) + ) + if orphans["sms"]["acmg_ratings"]: + with open("orphans_sm_acmg_ratings.tsv", "w") as f: + f.write("case_uuid\tproject\tcase_name\tregion\tlost\tjson\n") + for line in orphans["sms"]["acmg_ratings"]: + f.write( + "{case_uuid}\t{project}\t{case_name}\t{chromosome}:{start}-{end}\t{lost}\t{json}\n".format( + **line + ) + ) + else: + task_create_queryresultset.delay( + options["case_uuid"], options["project_uuid"], options["all"] + ) + msg = "Pushed creating the query set to background." self.stdout.write(self.style.SUCCESS(msg)) if msg_warning: diff --git a/utils/kickoff_orphaned_annotation_query.py b/utils/kickoff_orphaned_annotation_query.py index 84fd0ccce..453a84af0 100644 --- a/utils/kickoff_orphaned_annotation_query.py +++ b/utils/kickoff_orphaned_annotation_query.py @@ -80,6 +80,10 @@ def run_query(config, settings_endpoint, query_endpoint, case_uuid, case_orphans return "" response_json = response.json() response_json["query_settings"]["genomic_region"] = case_orphans + if "genotype_criteria" in response_json["query_settings"]: + for criteria in response_json["query_settings"]["genotype_criteria"]: + criteria["gt_one_of"].append(".") + criteria["gt_one_of"].append("./.") url = query_endpoint.format(case_uuid=case_uuid) response = connect_endpoint(config, url, data=response_json) if not response: diff --git a/variants/tests/test_utils.py b/variants/tests/test_utils.py index f5163d5cb..0fae36c3f 100644 --- a/variants/tests/test_utils.py +++ b/variants/tests/test_utils.py @@ -53,11 +53,7 @@ def create_expected_skeleton_sm(): # Set expectations expected_count = { - "added": { - "comments": 0, - "flags": 0, - "acmg_ratings": 0, - }, + "added": 0, "removed": 0, "salvable": { "comments": 0, @@ -72,21 +68,18 @@ def create_expected_skeleton_sm(): } expected_salvable = [] expected_duplicates = [] - expected_tsv_lines = { + expected_orphans = { "flags": [], "comments": [], "acmg_ratings": [], } - return expected_count, expected_salvable, expected_duplicates, expected_tsv_lines + return expected_count, expected_salvable, expected_duplicates, expected_orphans def create_expected_skeleton_sv(): # Set expectations expected_count = { - "added": { - "comments": 0, - "flags": 0, - }, + "added": 0, "removed": 0, "salvable": { "comments": 0, @@ -99,44 +92,42 @@ def create_expected_skeleton_sv(): } expected_salvable = [] expected_duplicates = [] - expected_tsv_lines = { + expected_orphans = { "flags": [], "comments": [], } - return expected_count, expected_salvable, expected_duplicates, expected_tsv_lines + return expected_count, expected_salvable, expected_duplicates, expected_orphans def create_expected_skeleton(uuids=None): # Set expectations if uuids is None: uuids = [] - expected_count_sm, _, _, expected_tsv_lines_sm = create_expected_skeleton_sm() - expected_count_sv, _, _, expected_tsv_lines_sv = create_expected_skeleton_sv() + ( + expected_count_sm, + _, + expected_duplicates_sm, + expected_orphans_sm, + ) = create_expected_skeleton_sm() + ( + expected_count_sv, + _, + expected_duplicates_sv, + expected_orphans_sv, + ) = create_expected_skeleton_sv() expected_count = { "svqueryresultset": 0, "smallvariantqueryresultset": 0, "svs": expected_count_sv, "sms": expected_count_sm, } - expected_salvable = { - str(uuid): { - "sms": create_expected_skeleton_sm()[1], - "svs": create_expected_skeleton_sv()[1], - } - for uuid in uuids - } - expected_duplicates = { - str(uuid): { - "sms": create_expected_skeleton_sm()[2], - "svs": create_expected_skeleton_sv()[2], - } - for uuid in uuids + expected_salvable = {} + expected_duplicates = {"sms": expected_duplicates_sm, "svs": expected_duplicates_sv} + expected_orphans = { + "sms": expected_orphans_sm, + "svs": expected_orphans_sv, } - expected_tsv_lines = { - "sms": expected_tsv_lines_sm, - "svs": expected_tsv_lines_sv, - } - return expected_count, expected_salvable, expected_duplicates, expected_tsv_lines + return expected_count, expected_salvable, expected_duplicates, expected_orphans class TestCreateQueryResultSet(TestCase): @@ -172,13 +163,13 @@ def test_no_resultset_to_create(self): ) # Run - count, salvable, duplicates, tsv_lines = create_queryresultset(all=True) + count, salvable, duplicates, orphans = create_queryresultset(all=True) ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -195,7 +186,7 @@ def test_no_resultset_to_create(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual( self.project1_case1.smallvariantqueryresultset_set.filter( smallvariantquery=None @@ -229,7 +220,7 @@ def test_no_resultset_to_create(self): def test_create_result_sets_for_case(self): # Run - count, salvable, duplicates, tsv_lines = create_queryresultset( + count, salvable, duplicates, orphans = create_queryresultset( case_uuid=self.project1_case1.sodar_uuid ) @@ -238,7 +229,7 @@ def test_create_result_sets_for_case(self): expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -256,7 +247,7 @@ def test_create_result_sets_for_case(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual( self.project1_case1.smallvariantqueryresultset_set.filter( smallvariantquery=None @@ -285,7 +276,7 @@ def test_create_small_variant_result_sets_for_case(self): svqueryresultset11 = SvQueryResultSetFactory(case=self.project1_case1, svquery=None) # Run - count, salvable, duplicates, tsv_lines = create_queryresultset( + count, salvable, duplicates, orphans = create_queryresultset( case_uuid=self.project1_case1.sodar_uuid ) @@ -294,7 +285,7 @@ def test_create_small_variant_result_sets_for_case(self): expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -311,7 +302,7 @@ def test_create_small_variant_result_sets_for_case(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual( self.project1_case1.smallvariantqueryresultset_set.filter( smallvariantquery=None @@ -342,7 +333,7 @@ def test_create_structural_variant_result_sets_for_case(self): ) # Run - count, salvable, duplicates, tsv_lines = create_queryresultset( + count, salvable, duplicates, orphans = create_queryresultset( case_uuid=self.project1_case1.sodar_uuid ) @@ -351,7 +342,7 @@ def test_create_structural_variant_result_sets_for_case(self): expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -368,7 +359,7 @@ def test_create_structural_variant_result_sets_for_case(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual( self.project1_case1.smallvariantqueryresultset_set.filter( smallvariantquery=None @@ -394,7 +385,7 @@ def test_create_structural_variant_result_sets_for_case(self): def test_create_result_sets_for_project(self): # Run - count, salvable, duplicates, tsv_lines = create_queryresultset( + count, salvable, duplicates, orphans = create_queryresultset( project_uuid=self.project1.sodar_uuid ) @@ -403,7 +394,7 @@ def test_create_result_sets_for_project(self): expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -422,7 +413,7 @@ def test_create_result_sets_for_project(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual( self.project1_case1.smallvariantqueryresultset_set.filter( smallvariantquery=None @@ -458,7 +449,7 @@ def test_create_result_sets_for_project_partly(self): ) # Run - count, salvable, duplicates, tsv_lines = create_queryresultset( + count, salvable, duplicates, orphans = create_queryresultset( project_uuid=self.project1.sodar_uuid ) @@ -467,7 +458,7 @@ def test_create_result_sets_for_project_partly(self): expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -486,7 +477,7 @@ def test_create_result_sets_for_project_partly(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual( self.project1_case1.smallvariantqueryresultset_set.filter( smallvariantquery=None @@ -516,14 +507,14 @@ def test_create_result_sets_for_project_partly(self): def test_create_result_sets_for_all(self): # Run - count, salvable, duplicates, tsv_lines = create_queryresultset(all=True) + count, salvable, duplicates, orphans = create_queryresultset(all=True) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -543,7 +534,7 @@ def test_create_result_sets_for_all(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual( self.project1_case1.smallvariantqueryresultset_set.filter( smallvariantquery=None @@ -633,14 +624,14 @@ def test_create_sets_fill_annotations_simple(self): ) # Run - count, salvable, duplicates, tsv_lines = create_queryresultset(all=True) + count, salvable, duplicates, orphans = create_queryresultset(all=True) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -650,8 +641,8 @@ def test_create_sets_fill_annotations_simple(self): ) expected_count["smallvariantqueryresultset"] = 3 expected_count["svqueryresultset"] = 3 - expected_count["sms"]["added"]["flags"] = 1 - expected_count["svs"]["added"]["flags"] = 1 + expected_count["sms"]["added"] = 1 + expected_count["svs"]["added"] = 1 # Refresh self.project1_case1.refresh_from_db() @@ -662,7 +653,7 @@ def test_create_sets_fill_annotations_simple(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertNotEqual( self.project1_case1.smallvariantqueryresultset_set.filter(smallvariantquery=None) .first() @@ -746,14 +737,14 @@ def test_create_sets_sm_annotations_complete(self): ) # Run - count, salvable, duplicates, tsv_lines = create_queryresultset(all=True) + count, salvable, duplicates, orphans = create_queryresultset(all=True) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton( [ self.project1_case1.sodar_uuid, @@ -763,14 +754,15 @@ def test_create_sets_sm_annotations_complete(self): ) expected_count["smallvariantqueryresultset"] = 2 expected_count["svqueryresultset"] = 3 - expected_count["sms"]["added"]["flags"] = 1 + expected_count["sms"]["added"] = 1 expected_count["sms"]["removed"] = 1 expected_count["sms"]["salvable"]["flags"] = 1 expected_count["sms"]["lost"]["flags"] = 1 - expected_salvable[str(self.project1_case1.sodar_uuid)]["sms"] = [ - "{chromosome}:{start}-{end}".format(**sm_flag_salvable.__dict__) - ] - expected_tsv_lines["sms"]["flags"] = [ + expected_salvable[str(self.project1_case1.sodar_uuid)] = { + "sms": ["{chromosome}:{start}-{end}".format(**sm_flag_salvable.__dict__)], + "svs": [], + } + expected_orphans["sms"]["flags"] = [ { "case_uuid": str(self.project1_case1.sodar_uuid), "case_name": self.project1_case1.name, @@ -804,7 +796,7 @@ def test_create_sets_sm_annotations_complete(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual( self.project1_case1.smallvariantqueryresultset_set.get( smallvariantquery=None @@ -879,19 +871,19 @@ def test_sms_nothing_to_fill(self): ) # Run - count, salvable, duplicates, tsv_lines = fill_sm_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sm_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sm() self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) def test_sms_fill_comment_flags_acmg(self): # Prepare @@ -957,7 +949,7 @@ def test_sms_fill_comment_flags_acmg(self): ) self.assertEqual(case_result_set.smallvariantqueryresultrow_set.count(), 0) - count, salvable, duplicates, tsv_lines = fill_sm_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sm_queryresultset(case_result_set) # Refresh case_result_set.refresh_from_db() @@ -967,13 +959,13 @@ def test_sms_fill_comment_flags_acmg(self): expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sm() - expected_count["added"]["flags"] = 2 + expected_count["added"] = 2 self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.smallvariantqueryresultrow_set.count(), 2) self.assertNotEqual( case_result_set.smallvariantqueryresultrow_set.all().order_by("id")[0], row1 @@ -1005,18 +997,18 @@ def test_sms_salvable_no_query(self): ) # Run - count, salvable, duplicates, tsv_lines = fill_sm_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sm_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sm() expected_count["salvable"]["flags"] = 1 expected_salvable = ["{chromosome}:{start}-{end}".format(**flag.__dict__)] - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1031,7 +1023,7 @@ def test_sms_salvable_no_query(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.smallvariantqueryresultrow_set.count(), 0) def test_sms_salvable_no_row(self): @@ -1053,18 +1045,18 @@ def test_sms_salvable_no_row(self): alternative=small_vars[0].alternative, ) - count, salvable, duplicates, tsv_lines = fill_sm_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sm_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sm() expected_count["salvable"]["flags"] = 1 expected_salvable = ["{chromosome}:{start}-{end}".format(**flag.__dict__)] - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1079,7 +1071,7 @@ def test_sms_salvable_no_row(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.smallvariantqueryresultrow_set.count(), 0) def test_sms_lost_no_var_with_query(self): @@ -1101,7 +1093,7 @@ def test_sms_lost_no_var_with_query(self): ) # Run - count, salvable, duplicates, tsv_lines = fill_sm_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sm_queryresultset(case_result_set) # Refresh case_result_set.refresh_from_db() @@ -1111,10 +1103,10 @@ def test_sms_lost_no_var_with_query(self): expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sm() expected_count["lost"]["flags"] = 1 - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1129,7 +1121,7 @@ def test_sms_lost_no_var_with_query(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.smallvariantqueryresultrow_set.count(), 0) def test_sms_lost_no_var_no_query_no_caseresultrow(self): @@ -1139,7 +1131,7 @@ def test_sms_lost_no_var_no_query_no_caseresultrow(self): flag = SmallVariantFlagsFactory(case=self.case1) # Run - count, salvable, duplicates, tsv_lines = fill_sm_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sm_queryresultset(case_result_set) # Refresh case_result_set.refresh_from_db() @@ -1149,10 +1141,10 @@ def test_sms_lost_no_var_no_query_no_caseresultrow(self): expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sm() expected_count["lost"]["flags"] = 1 - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1167,7 +1159,7 @@ def test_sms_lost_no_var_no_query_no_caseresultrow(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.smallvariantqueryresultrow_set.count(), 0) def test_sms_lost_no_var_no_query_with_caseresultrow(self): @@ -1186,18 +1178,18 @@ def test_sms_lost_no_var_no_query_with_caseresultrow(self): ) # Run - count, salvable, duplicates, tsv_lines = fill_sm_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sm_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sm() expected_count["lost"]["flags"] = 1 expected_count["removed"] = 1 - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1212,7 +1204,7 @@ def test_sms_lost_no_var_no_query_with_caseresultrow(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.smallvariantqueryresultrow_set.count(), 0) @@ -1258,19 +1250,19 @@ def test_svs_nothing_to_fill(self): ) # Run - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) def test_svs_fill_comment_flags(self): # Prepare @@ -1326,20 +1318,20 @@ def test_svs_fill_comment_flags(self): ) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 0) - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() - expected_count["added"]["flags"] = 2 + expected_count["added"] = 2 self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 2) self.assertNotEqual(case_result_set.svqueryresultrow_set.all().order_by("id")[0], row1) self.assertEqual( @@ -1365,18 +1357,18 @@ def test_svs_salvable_no_query(self): sv_sub_type=svs[0].sv_sub_type, ) - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() expected_count["salvable"]["flags"] = 1 expected_salvable = ["{chromosome}:{start}-{end}".format(**flag.__dict__)] - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1391,7 +1383,7 @@ def test_svs_salvable_no_query(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 0) def test_svs_salvable_no_row(self): @@ -1411,18 +1403,18 @@ def test_svs_salvable_no_row(self): sv_sub_type=svs[0].sv_sub_type, ) - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() expected_count["salvable"]["flags"] = 1 expected_salvable = ["{chromosome}:{start}-{end}".format(**flag.__dict__)] - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1437,7 +1429,7 @@ def test_svs_salvable_no_row(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 0) def test_svs_lost_no_var_with_query(self): @@ -1458,17 +1450,17 @@ def test_svs_lost_no_var_with_query(self): ) # Run - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() expected_count["lost"]["flags"] = 1 - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1483,7 +1475,7 @@ def test_svs_lost_no_var_with_query(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 0) def test_svs_lost_no_var_no_query_no_caseresultrow(self): @@ -1493,17 +1485,17 @@ def test_svs_lost_no_var_no_query_no_caseresultrow(self): flag = StructuralVariantFlagsFactory(case=self.case1) # Run - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() expected_count["lost"]["flags"] = 1 - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1518,7 +1510,7 @@ def test_svs_lost_no_var_no_query_no_caseresultrow(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 0) def test_svs_lost_no_var_no_query_with_caseresultrow(self): @@ -1537,18 +1529,18 @@ def test_svs_lost_no_var_no_query_with_caseresultrow(self): ) # Run - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() expected_count["lost"]["flags"] = 1 expected_count["removed"] = 1 - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1563,7 +1555,7 @@ def test_svs_lost_no_var_no_query_with_caseresultrow(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(SvQueryResultRow.objects.count(), 0) def test_svs_var_overlap_above_80(self): @@ -1594,20 +1586,20 @@ def test_svs_var_overlap_above_80(self): ) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 0) - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() - expected_count["added"]["flags"] = 1 + expected_count["added"] = 1 self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 1) self.assertNotEqual(case_result_set.svqueryresultrow_set.all().order_by("id")[0], row) self.assertEqual( @@ -1642,17 +1634,17 @@ def test_svs_var_overlap_below_80(self): ) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 0) - count, salvable, duplicates, tsv_lines = fill_sv_queryresultset(case_result_set) + count, salvable, duplicates, orphans = fill_sv_queryresultset(case_result_set) # Set expectations ( expected_count, expected_salvable, expected_duplicates, - expected_tsv_lines, + expected_orphans, ) = create_expected_skeleton_sv() expected_count["lost"]["flags"] = 1 - expected_tsv_lines["flags"] = [ + expected_orphans["flags"] = [ { "case_uuid": str(self.case1.sodar_uuid), "case_name": self.case1.name, @@ -1667,7 +1659,7 @@ def test_svs_var_overlap_below_80(self): self.assertEqual(count, expected_count) self.assertEqual(salvable, expected_salvable) self.assertEqual(duplicates, expected_duplicates) - self.assertEqual(tsv_lines, expected_tsv_lines) + self.assertEqual(orphans, expected_orphans) self.assertEqual(case_result_set.svqueryresultrow_set.count(), 0) diff --git a/variants/utils.py b/variants/utils.py index c07b64a11..4d8707334 100644 --- a/variants/utils.py +++ b/variants/utils.py @@ -39,10 +39,7 @@ def create_queryresultset(case_uuid=None, project_uuid=None, all=False): "svqueryresultset": 0, # SvQueryResultSet's created "smallvariantqueryresultset": 0, # SmallVariantQueryResultSet's created "svs": { - "added": { - "comments": 0, - "flags": 0, - }, # user annotations added to SvQueryResultSet's + "added": 0, # user annotations added to SvQueryResultSet's "removed": 0, # user annotations without corresponding query result row "salvable": { "comments": 0, @@ -54,11 +51,7 @@ def create_queryresultset(case_uuid=None, project_uuid=None, all=False): }, # user annotations without corresponding query result row }, "sms": { - "added": { - "comments": 0, - "flags": 0, - "acmg_ratings": 0, - }, + "added": 0, "removed": 0, "salvable": { "comments": 0, @@ -73,8 +66,11 @@ def create_queryresultset(case_uuid=None, project_uuid=None, all=False): }, } salvable = {} - duplicates = {} - tsv_lines = { + duplicates = { + "svs": [], + "sms": [], + } + orphans = { "svs": { "flags": [], "comments": [], @@ -125,29 +121,33 @@ def _perform_create(_case): return _sm_result_set, _sv_result_set def _perform_fill(sm_result_set, sv_result_set): - sm_count, sm_salvable, sm_duplicates, sm_tsv_lines = fill_sm_queryresultset(sm_result_set) - sv_count, sv_salvable, sv_duplicates, sv_tsv_lines = fill_sv_queryresultset(sv_result_set) - for i in ("added", "salvable", "lost"): + sm_count, sm_salvable, sm_duplicates, sm_orphans = fill_sm_queryresultset(sm_result_set) + sv_count, sv_salvable, sv_duplicates, sv_orphans = fill_sv_queryresultset(sv_result_set) + for i in ( + "salvable", + "lost", + ): count["svs"][i]["flags"] += sv_count[i]["flags"] count["svs"][i]["comments"] += sv_count[i]["comments"] count["sms"][i]["flags"] += sm_count[i]["flags"] count["sms"][i]["comments"] += sm_count[i]["comments"] count["sms"][i]["acmg_ratings"] += sm_count[i]["acmg_ratings"] + count["svs"]["added"] += sv_count["added"] + count["sms"]["added"] += sm_count["added"] count["svs"]["removed"] += sv_count["removed"] count["sms"]["removed"] += sm_count["removed"] - salvable[str(sm_result_set.case.sodar_uuid)] = { - "sms": list(set(sm_salvable)), - "svs": list(set(sv_salvable)), - } - duplicates[str(sm_result_set.case.sodar_uuid)] = { - "sms": sm_duplicates, - "svs": sv_duplicates, - } - tsv_lines["sms"]["flags"].extend(sm_tsv_lines["flags"]) - tsv_lines["sms"]["comments"].extend(sm_tsv_lines["comments"]) - tsv_lines["sms"]["acmg_ratings"].extend(sm_tsv_lines["acmg_ratings"]) - tsv_lines["svs"]["flags"].extend(sv_tsv_lines["flags"]) - tsv_lines["svs"]["comments"].extend(sv_tsv_lines["comments"]) + if sm_salvable or sv_salvable: + salvable[str(sm_result_set.case.sodar_uuid)] = { + "sms": list(set(sm_salvable)), + "svs": list(set(sv_salvable)), + } + duplicates["sms"].extend(sm_duplicates) + duplicates["svs"].extend(sv_duplicates) + orphans["sms"]["flags"].extend(sm_orphans["flags"]) + orphans["sms"]["comments"].extend(sm_orphans["comments"]) + orphans["sms"]["acmg_ratings"].extend(sm_orphans["acmg_ratings"]) + orphans["svs"]["flags"].extend(sv_orphans["flags"]) + orphans["svs"]["comments"].extend(sv_orphans["comments"]) def _perform_clear(sm_result_set, sv_result_set): count["sms"]["removed"] += clear_sm_queryresultset(sm_result_set) @@ -171,18 +171,14 @@ def _handle_case(_case): for _case in Case.objects.all(): _handle_case(_case) - return count, salvable, duplicates, tsv_lines + return count, salvable, duplicates, orphans def fill_sm_queryresultset(result_set): """Fill a SmallVariantQueryResultSet for the given case or project.""" case = result_set.case count = { - "added": { - "flags": 0, - "comments": 0, - "acmg_ratings": 0, - }, + "added": 0, "removed": 0, "salvable": { "flags": 0, @@ -197,7 +193,7 @@ def fill_sm_queryresultset(result_set): } salvable = [] duplicates = [] - tsv_lines = { + orphans = { "flags": [], "comments": [], "acmg_ratings": [], @@ -231,7 +227,7 @@ def _perform_create(obj): count["lost"][obj_type] += 1 from variants.views import UUIDEncoder - tsv_lines[obj_type].append( + orphans[obj_type].append( { "case_uuid": str(case.sodar_uuid), "case_name": case.name, @@ -261,7 +257,7 @@ def _perform_create(obj): result_row.sodar_uuid = uuid.uuid4() result_row.smallvariantqueryresultset = result_set result_row.save() - count["added"][obj_type] += 1 + count["added"] += 1 break else: # should exist as it was annotated. @@ -269,7 +265,7 @@ def _perform_create(obj): salvable.append("{chromosome}:{start}-{end}".format(**coords)) from variants.views import UUIDEncoder - tsv_lines[obj_type].append( + orphans[obj_type].append( { "case_uuid": str(case.sodar_uuid), "case_name": case.name, @@ -284,7 +280,24 @@ def _perform_create(obj): } ) elif result_rows.count() > 1: - duplicates.append(result_rows) + from variants.views import UUIDEncoder + + for result_row in result_rows: + duplicates.append( + { + { + "case_uuid": str(case.sodar_uuid), + "case_name": case.name, + "project": case.project.full_title, + "chromosome": result_row.chromosome, + "start": result_row.start, + "end": result_row.end, + "json": json.dumps( + model_to_dict(result_row, exclude=("id",)), cls=UUIDEncoder + ), + } + } + ) result_set.result_row_count = result_set.smallvariantqueryresultrow_set.count() result_set.save() @@ -296,7 +309,7 @@ def _perform_create(obj): for obj in chain(sm_flags, sm_comments, acmg_rating): _perform_create(obj) - return count, salvable, duplicates, tsv_lines + return count, salvable, duplicates, orphans def fill_sv_queryresultset(result_set): @@ -304,10 +317,7 @@ def fill_sv_queryresultset(result_set): case = result_set.case count = { - "added": { - "flags": 0, - "comments": 0, - }, + "added": 0, "removed": 0, "salvable": { "flags": 0, @@ -320,7 +330,7 @@ def fill_sv_queryresultset(result_set): } salvable = [] duplicates = [] - tsv_lines = { + orphans = { "flags": [], "comments": [], } @@ -362,7 +372,7 @@ def _perform_create(obj): count["lost"][obj_type] += 1 from variants.views import UUIDEncoder - tsv_lines[obj_type].append( + orphans[obj_type].append( { "case_uuid": str(case.sodar_uuid), "case_name": case.name, @@ -403,7 +413,7 @@ def _perform_create(obj): overlapping_result_row.sodar_uuid = uuid.uuid4() overlapping_result_row.svqueryresultset = result_set overlapping_result_row.save() - count["added"][obj_type] += 1 + count["added"] += 1 break else: # should exist as it was annotated. @@ -417,7 +427,7 @@ def _perform_create(obj): ) from variants.views import UUIDEncoder - tsv_lines[obj_type].append( + orphans[obj_type].append( { "case_uuid": str(case.sodar_uuid), "case_name": case.name, @@ -432,7 +442,22 @@ def _perform_create(obj): } ) elif len(result_rows) > 1: - duplicates.append(result_rows) + from variants.views import UUIDEncoder + + for result_row in result_rows: + duplicates.append( + { + "case_uuid": str(case.sodar_uuid), + "case_name": case.name, + "project": case.project.full_title, + "chromosome": result_row.chromosome, + "start": result_row.start, + "end": result_row.end, + "json": json.dumps( + model_to_dict(result_row, exclude=("id",)), cls=UUIDEncoder + ), + } + ) result_set.result_row_count = result_set.svqueryresultrow_set.count() result_set.save() @@ -443,7 +468,7 @@ def _perform_create(obj): for obj in chain(sv_flags, sv_comments): _perform_create(obj) - return count, salvable, duplicates, tsv_lines + return count, salvable, duplicates, orphans def clear_sm_queryresultset(result_set):