From e1c5c814da8681ee1402de94f0ca6127e5ceaeb3 Mon Sep 17 00:00:00 2001 From: sauravsapkota Date: Wed, 24 Jul 2024 14:36:39 +0545 Subject: [PATCH] Add Graphql mutation for Analysis CRUD Rename analyis field into analysis_id Add test cases for Analysis Mutation Change filter genericscalartype into filtertype Change filter genericscalartype into filtertype Remove required False in analyisis pillar analyis Add analysis pillar delete mutation Refactor add return when pillar is not None --- apps/analysis/models.py | 3 + apps/analysis/mutation.py | 52 +++- apps/analysis/schema.py | 9 +- apps/analysis/serializers.py | 48 +++- apps/analysis/tests/test_mutations.py | 358 ++++++++++++++++++++++++++ schema.graphql | 60 ++++- 6 files changed, 520 insertions(+), 10 deletions(-) diff --git a/apps/analysis/models.py b/apps/analysis/models.py index 4292313f39..52c0920d2a 100644 --- a/apps/analysis/models.py +++ b/apps/analysis/models.py @@ -349,6 +349,9 @@ def annotate_for_analysis_pillar_summary(cls, qs): analyzed_entries=models.F('dragged_entries') + models.F('discarded_entries'), ) + def can_delete(self, user): + return self.can_modify(user) + class DiscardedEntry(models.Model): """ diff --git a/apps/analysis/mutation.py b/apps/analysis/mutation.py index de5bd03c19..c3bdcc0b61 100644 --- a/apps/analysis/mutation.py +++ b/apps/analysis/mutation.py @@ -17,6 +17,7 @@ AnalysisReport, AnalysisReportUpload, AnalysisReportSnapshot, + Analysis, ) from .schema import ( get_analysis_pillar_qs, @@ -31,6 +32,7 @@ AnalysisReportType, AnalysisReportUploadType, AnalysisReportSnapshotType, + AnalysisType, ) from .serializers import ( AnalysisPillarGqlSerializer, @@ -42,6 +44,7 @@ AnalysisReportSerializer, AnalysisReportSnapshotSerializer, AnalysisReportUploadSerializer, + AnalysisGqlSerializer, ) @@ -84,7 +87,7 @@ ) -# Analysi Report +# Analysis Report AnalysisReportInputType = generate_input_type_for_serializer( 'AnalysisReportInputType', serializer_class=AnalysisReportSerializer, @@ -105,6 +108,11 @@ serializer_class=AnalysisReportUploadSerializer, ) +AnalysisInputType = generate_input_type_for_serializer( + 'AnalysisInputType', + serializer_class=AnalysisGqlSerializer, +) + class RequiredPermissionMixin(): permissions = [ @@ -269,9 +277,47 @@ class Arguments: result = graphene.Field(AnalysisReportUploadType) +class AnalysisMutationMixin(RequiredPermissionMixin): + @classmethod + def filter_queryset(cls, qs, info): + return qs.filter(project=info.context.active_project) + + +class CreateAnalysis(AnalysisMutationMixin, PsGrapheneMutation): + class Arguments: + data = AnalysisInputType(required=True) + model = Analysis + serializer_class = AnalysisGqlSerializer + result = graphene.Field(AnalysisType) + + +class UpdateAnalysis(AnalysisMutationMixin, PsGrapheneMutation): + class Arguments: + data = AnalysisInputType(required=True) + id = graphene.ID(required=True) + model = Analysis + serializer_class = AnalysisGqlSerializer + result = graphene.Field(AnalysisType) + + +class DeleteAnalysis(AnalysisMutationMixin, PsDeleteMutation): + class Arguments: + id = graphene.ID(required=True) + model = Analysis + result = graphene.Field(AnalysisType) + + +class DeleteAnalysisPillar(AnalysisPillarMutationMixin, PsDeleteMutation): + class Arguments: + id = graphene.ID(required=True) + model = AnalysisPillar + result = graphene.Field(AnalysisPillarType) + + class Mutation(): # Analysis Pillar analysis_pillar_update = UpdateAnalysisPillar.Field() + analysis_pillar_delete = DeleteAnalysisPillar.Field() # Discarded Entry discarded_entry_create = CreateAnalysisPillarDiscardedEntry.Field() discarded_entry_update = UpdateAnalysisPillarDiscardedEntry.Field() @@ -289,3 +335,7 @@ class Mutation(): # -- Uploads analysis_report_upload_create = CreateAnalysisReportUpload.Field() analysis_report_upload_delete = DeleteAnalysisReportUpload.Field() + # Analysis + analysis_create = CreateAnalysis.Field() + analysis_update = UpdateAnalysis.Field() + analysis_delete = DeleteAnalysis.Field() diff --git a/apps/analysis/schema.py b/apps/analysis/schema.py index 20e6c36e78..88803b9146 100644 --- a/apps/analysis/schema.py +++ b/apps/analysis/schema.py @@ -196,13 +196,18 @@ class Meta: 'title', 'main_statement', 'information_gap', - 'filters', ) assignee = graphene.Field(UserType, required=True) - analysis = graphene.ID(source='analysis_id', required=True) + analysis_id = graphene.ID(source='analysis_id', required=True) cloned_from = graphene.ID(source='cloned_from_id') analyzed_entries_count = graphene.Int(required=True) + filters = graphene.List(graphene.NonNull( + type('FilterDataType', (graphene.ObjectType,), { + 'id': graphene.String(), + 'key': graphene.String(), + 'uniqueId': graphene.String() + }))) # XXX: N+1 and No pagination statements = graphene.List(graphene.NonNull(AnalyticalStatementType)) diff --git a/apps/analysis/serializers.py b/apps/analysis/serializers.py index fbc4a41654..520f260601 100644 --- a/apps/analysis/serializers.py +++ b/apps/analysis/serializers.py @@ -331,11 +331,14 @@ def validate(self, data): class AnalysisPillarGqlSerializer(TempClientIdMixin, UserResourceSerializer): + id = IntegerIDField(required=False) statements = AnalyticalStatementGqlSerializer(many=True, source='analyticalstatement_set', required=False) + analysis = serializers.PrimaryKeyRelatedField(queryset=Analysis.objects.all(), required=False) class Meta: model = AnalysisPillar fields = ( + 'id', 'title', 'main_statement', 'information_gap', @@ -409,21 +412,18 @@ def validate(self, data): return data -class AnalysisGqlSerializer(UserResourceSerializer): - id = IntegerIDField(required=False) +class AnalysisGqlSerializer(UserResourceSerializer, ProjectPropertySerializerMixin): analysis_pillar = AnalysisPillarGqlSerializer(many=True, source='analysispillar_set', required=False) start_date = serializers.DateField(required=False, allow_null=True) class Meta: model = Analysis fields = ( - 'id', 'title', 'team_lead', - 'project', 'start_date', 'end_date', - 'cloned_from', + 'analysis_pillar', ) def validate_project(self, project): @@ -432,6 +432,7 @@ def validate_project(self, project): return project def validate(self, data): + data['project'] = self.project start_date = data.get('start_date') end_date = data.get('end_date') if start_date and start_date > end_date: @@ -440,6 +441,43 @@ def validate(self, data): ) return data + def create_or_update_pillar(self, pillar_data, instance): + data = { + "title": pillar_data.get('title'), + "assignee": pillar_data.get('assignee').id, + "analysis": instance.id, + "filters": pillar_data.get('filters'), + } + pillar_id = pillar_data.get('id', None) + if pillar_id: + data["id"] = pillar_id + analysis_pillar = get_object_or_404(AnalysisPillar, pk=pillar_id) + analysis_pillar_serializer = AnalysisPillarGqlSerializer( + analysis_pillar, + data=data, + context=self.context + ) + return analysis_pillar_serializer + analysis_pillar_serializer = AnalysisPillarGqlSerializer(data=data, context=self.context) + return analysis_pillar_serializer + + def update(self, instance, validated_data): + with transaction.atomic(): + if 'analysispillar_set' in validated_data: + pillars = validated_data.pop('analysispillar_set') + errors = {} + for pillar in pillars: + analysis_pillar_serializer = self.create_or_update_pillar(pillar, instance) + if analysis_pillar_serializer.is_valid(): + analysis_pillar_serializer.save() + else: + errors[pillar.get('id', 'new')] = analysis_pillar_serializer.errors + + if errors: + raise serializers.ValidationError(errors) + + return super().update(instance, validated_data) + AnalysisCloneGqlSerializer = AnalysisCloneInputSerializer diff --git a/apps/analysis/tests/test_mutations.py b/apps/analysis/tests/test_mutations.py index 5e00373901..154620a1df 100644 --- a/apps/analysis/tests/test_mutations.py +++ b/apps/analysis/tests/test_mutations.py @@ -9,6 +9,7 @@ from commons.schema_snapshots import SnapshotQuery from user.factories import UserFactory +from project.models import Project from project.factories import ProjectFactory from lead.factories import LeadFactory from entry.factories import EntryFactory @@ -27,6 +28,7 @@ AnalyticalStatementNGram, AnalyticalStatementGeoTask, AnalysisReportSnapshot, + AnalysisPillar, ) @@ -1326,3 +1328,359 @@ def _query_check(_id, **kwargs): else: self.force_login(user) assert _query_public_snapshot_check(snapshot_slug)['data']['publicAnalysisReportSnapshot'] is not None + + +class TestAnalysisMutationSchema(GraphQLTestCase): + CREATE_MUTATION = ''' + mutation MyMutation($analysisData: AnalysisInputType!, $projectId: ID!) { + project(id: $projectId) { + analysisCreate( + data: $analysisData + ) { + errors + ok + result { + id + endDate + title + teamLead { + id + } + pillars { + analysisId + id + title + } + } + } + } + } + ''' + + UPDATE_MUTATION = ''' + mutation MyMutation($analysisUpdate: AnalysisInputType!, $analysisID: ID!, $projectId: ID!) { + project(id: $projectId) { + analysisUpdate(data: $analysisUpdate, id: $analysisID) { + errors + ok + result { + id + endDate + title + teamLead { + id + } + pillars { + analysisId + id + title + } + } + } + } + } + ''' + + DELETE_MUTATION = ''' + mutation MyMutation($projectId: ID!, $deleteId: ID!) { + project(id: $projectId) { + analysisDelete(id: $deleteId) { + errors + result { + id + title + pillars { + analysisId + title + } + } + } + } + } + ''' + + def setUp(self): + super().setUp() + self.af = AnalysisFrameworkFactory.create() + self.project_with_af = ProjectFactory.create(analysis_framework=self.af, status=Project.Status.ACTIVE) + self.project_without_af = ProjectFactory.create() + # Users with different roles + self.non_member_user = UserFactory.create() + self.readonly_member_user = UserFactory.create() + self.member_user = UserFactory.create() + self.project_with_af.add_member(self.readonly_member_user, role=self.project_role_reader_non_confidential) + self.project_with_af.add_member(self.member_user, role=self.project_role_member) + self.analysis, self.analysis1 = AnalysisFactory.create_batch( + 2, + project=self.project_with_af, + team_lead=self.member_user, + end_date=datetime.date(2022, 4, 1), + ) + self.analysis_pillar1, self.analysis_pillar2, self.analysis_pillar3 = AnalysisPillarFactory.create_batch( + 3, + analysis=self.analysis, + assignee=self.member_user, + ) + + def test_create_analysis_without_pillar(self): + def _query_check(**kwargs): + return self.query_check( + self.CREATE_MUTATION, + variables=self.minput, + **kwargs + ) + + self.minput = dict( + analysisData=dict( + title='Test Analysis', teamLead=self.member_user.id, endDate='2020-01-01' + ), + projectId=self.project_with_af.id, + ) + + # -- Without login + _query_check(assert_for_error=True) + + # -- With login (non-member) + self.force_login(self.non_member_user) + _query_check(assert_for_error=True) + + # --- member user (read-only) + self.force_login(self.readonly_member_user) + _query_check(assert_for_error=True) + + # --- member user + self.force_login(self.member_user) + analysis_resp_data = _query_check()['data']['project']['analysisCreate']['result'] + self.assertEqual(analysis_resp_data['title'], self.minput['analysisData']['title']) + self.assertEqual(analysis_resp_data['teamLead']['id'], str(self.member_user.id)) + self.assertEqual(analysis_resp_data['endDate'], str(self.minput['analysisData']['endDate'])) + + def test_create_analysis_with_pillar(self): + def _query_check(**kwargs): + return self.query_check( + self.CREATE_MUTATION, + variables=self.minput, + **kwargs + ) + + self.minput = dict( + analysisData=dict( + title='Updated Analysis', + teamLead=self.member_user.id, + endDate='2022-01-01', + analysisPillar=[ + dict( + title=str("Analysis pillar 1"), + assignee=int(self.member_user.id), + analysis=int(self.analysis.id) + ), + dict( + title=str("Analysis Pillar 2"), + assignee=int(self.member_user.id), + analysis=int(self.analysis1.id) + ), + dict( + title=str("Analysis Pillar 3"), + assignee=int(self.member_user.id), + analysis=int(self.analysis.id) + ), + ] + ), + projectId=self.project_with_af.id, + ) + + # -- Without login + _query_check(assert_for_error=True) + + # -- With login (non-member) + self.force_login(self.non_member_user) + _query_check(assert_for_error=True) + + # --- member user (read-only) + self.force_login(self.readonly_member_user) + _query_check(assert_for_error=True) + + # --- member user + self.force_login(self.member_user) + analysis_resp_data = _query_check()['data']['project']['analysisCreate']['result'] + self.assertEqual(analysis_resp_data['title'], self.minput['analysisData']['title']) + self.assertEqual(analysis_resp_data['teamLead']['id'], str(self.member_user.id)) + self.assertEqual(analysis_resp_data['endDate'], str(self.minput['analysisData']['endDate'])) + for each in analysis_resp_data['pillars']: + self.assertEqual(each['analysisId'], str(analysis_resp_data['id'])) + + def test_create_analysis_without_analysis_framework(self): + minput = dict( + analysisData=dict( + title='Test Analysis', teamLead=self.member_user.id, endDate='2020-01-01' + ), + projectId=self.project_without_af.id, + ) + + self.force_login(self.member_user) + self.query_check(self.CREATE_MUTATION, variables=minput, assert_for_error=True) + + def test_update_analysis(self): + def _query_check(**kwargs): + return self.query_check( + self.UPDATE_MUTATION, + variables=self.update_minput, + **kwargs + ) + + self.update_minput = dict( + analysisUpdate=dict( + title='Updated Analysis', + teamLead=self.member_user.id, + endDate='2022-01-01', + analysisPillar=[ + dict( + id=int(self.analysis_pillar1.id), + title=str("Updated Analysis pillar1"), + assignee=int(self.member_user.id), + analysis=int(self.analysis.id) + ), + dict( + id=int(self.analysis_pillar3.id), + title=str("Updated Analysis pillar3"), + assignee=int(self.member_user.id), + analysis=int(self.analysis1.id) + ), + dict( + title=str("Analysis pillar5"), + assignee=int(self.member_user.id), + analysis=int(self.analysis.id) + ), + ] + ), + analysisID=self.analysis.id, + projectId=self.project_with_af.id, + ) + + # --- member user + self.force_login(self.member_user) + analysis_resp_data = _query_check()['data']['project']['analysisUpdate']['result'] + analysis_resp_data_pillars = [each["id"] for each in analysis_resp_data["pillars"]] + self.assertTrue( + all( + AnalysisPillar.objects.get(id=int(each["id"])).title == each["title"] + for each in analysis_resp_data["pillars"] + ) + ) + expected_analysis_pillar_ids_dict = { + str(self.analysis_pillar1.id), + str(self.analysis_pillar2.id), + str(self.analysis_pillar3.id) + } + self.assertGreaterEqual( + len(analysis_resp_data['pillars']), + len(self.update_minput['analysisUpdate']['analysisPillar']) + ) + self.assertEqual(len(analysis_resp_data['pillars']), 4) + for item in expected_analysis_pillar_ids_dict: + self.assertIn(item, analysis_resp_data_pillars) + self.assertEqual(analysis_resp_data['title'], self.update_minput['analysisUpdate']['title']) + self.assertEqual(analysis_resp_data['teamLead']['id'], str(self.member_user.id)) + self.assertEqual(analysis_resp_data['endDate'], str(self.update_minput['analysisUpdate']['endDate'])) + + def test_delete_analysis(self): + def _query_check(**kwargs): + return self.query_check( + self.DELETE_MUTATION, + variables=self.delete_minput, + **kwargs + ) + + self.delete_minput = dict( + projectId=self.project_with_af.id, + deleteId=self.analysis.id, + ) + # -- Without login + self.logout() + _query_check(assert_for_error=True) + + # -- With login (non-member) + self.force_login(self.non_member_user) + _query_check(assert_for_error=True) + + # --- member user (read-only) + self.force_login(self.readonly_member_user) + _query_check(assert_for_error=True) + + # --- member user + self.force_login(self.member_user) + analysis_resp_data = _query_check()['data']['project']['analysisDelete']['result'] + self.assertEqual(analysis_resp_data['id'], str(self.delete_minput['deleteId'])) + self.assertEqual(len(analysis_resp_data['pillars']), 0) + + +class TestAnalysisPillarMutationSchema(GraphQLTestCase): + UPDATE_MUTATION = ''' + mutation MyMutation($analysisPillarUpdate: AnalysisPillarUpdateInputType!, $analysisPillarID: ID!, $projectId: ID!) { + project(id: $projectId) { + analysisPillarUpdate(data: $analysisPillarUpdate, id: $analysisPillarID) { + errors + ok + result { + analysisId + title + id + } + } + } + } + ''' + + def setUp(self): + super().setUp() + self.af = AnalysisFrameworkFactory.create() + self.project_with_af = ProjectFactory.create(analysis_framework=self.af, status=Project.Status.ACTIVE) + # Users with different roles + self.non_member_user = UserFactory.create() + self.readonly_member_user = UserFactory.create() + self.member_user = UserFactory.create() + self.project_with_af.add_member(self.readonly_member_user, role=self.project_role_reader_non_confidential) + self.project_with_af.add_member(self.member_user, role=self.project_role_member) + self.analysis = AnalysisFactory.create( + project=self.project_with_af, + team_lead=self.member_user, + end_date=datetime.date(2022, 4, 1), + ) + self.analysis_pillar = AnalysisPillarFactory.create( + analysis=self.analysis, + assignee=self.member_user, + ) + + def test_update_analysis_pillar(self): + def _query_check(**kwargs): + return self.query_check( + self.UPDATE_MUTATION, + variables=self.update_minput, + **kwargs + ) + + self.update_minput = dict( + analysisPillarUpdate=dict( + title="Updated Analysis Pillar", + ), + analysisPillarID=self.analysis_pillar.id, + projectId=self.project_with_af.id, + ) + + # -- Without login + _query_check(assert_for_error=True) + + # -- With login (non-member) + self.force_login(self.non_member_user) + _query_check(assert_for_error=True) + + # --- member user (read-only) + self.force_login(self.readonly_member_user) + _query_check(assert_for_error=True) + + # --- member user + self.force_login(self.member_user) + analysis_pillar_resp_data = _query_check()['data']['project']['analysisPillarUpdate']['result'] + self.assertEqual(analysis_pillar_resp_data['title'], self.update_minput['analysisPillarUpdate']['title']) + self.assertEqual(analysis_pillar_resp_data['id'], str(self.update_minput['analysisPillarID'])) + self.assertEqual(analysis_pillar_resp_data['analysisId'], str(self.analysis.id)) diff --git a/schema.graphql b/schema.graphql index e6aebfb0ce..faee0e9414 100644 --- a/schema.graphql +++ b/schema.graphql @@ -281,6 +281,14 @@ type AnalysisFrameworkVisibleProjectType { isPrivate: Boolean! } +input AnalysisInputType { + title: String! + teamLead: ID! + startDate: Date + endDate: Date! + analysisPillar: [AnalysisPillarGqlInputType!] +} + type AnalysisListType { results: [AnalysisType!] totalCount: Int @@ -325,6 +333,19 @@ type AnalysisPillarEntryListType { pageSize: Int } +input AnalysisPillarGqlInputType { + id: ID + title: String! + mainStatement: String + informationGap: String + filters: GenericScalar + assignee: ID! + analysis: ID + clonedFrom: ID + statements: [AnalyticalStatementGqlInputType!] + clientId: String +} + type AnalysisPillarListType { results: [AnalysisPillarType!] totalCount: Int @@ -337,22 +358,23 @@ type AnalysisPillarType { title: String! mainStatement: String! informationGap: String! - filters: GenericScalar createdAt: DateTime! modifiedAt: DateTime! createdBy: UserType modifiedBy: UserType clientId: ID! assignee: UserType! - analysis: ID! + analysisId: ID! clonedFrom: ID analyzedEntriesCount: Int! + filters: [FilterDataType!] statements: [AnalyticalStatementType!] discardedEntries(tags: [DiscardedEntryTagTypeEnum!], page: Int = 1, ordering: String, pageSize: Int): AnalysisPillarDiscardedEntryListType entries(id: ID, excerpt: String, controlled: Boolean, createdAt: DateTime, createdAtGte: DateTime, createdAtLte: DateTime, modifiedAt: DateTime, modifiedAtGte: DateTime, modifiedAtLte: DateTime, createdBy: [ID!], modifiedBy: [ID!], leads: [ID!], leadCreatedBy: [ID!], leadPublishedOn: Date, leadPublishedOnGte: Date, leadPublishedOnLte: Date, leadTitle: String, leadAssignees: [ID!], leadStatuses: [LeadStatusEnum!], leadPriorities: [LeadPriorityEnum!], leadConfidentialities: [LeadConfidentialityEnum!], leadAuthoringOrganizationTypes: [ID!], leadAuthorOrganizations: [ID!], leadSourceOrganizations: [ID!], leadHasAssessment: Boolean, leadIsAssessment: Boolean, search: String, entryTypes: [EntryTagTypeEnum!], projectEntryLabels: [ID!], entriesId: [ID!], geoCustomShape: String, leadGroupLabel: String, filterableData: [EntryFilterDataInputType!], hasComment: Boolean, isVerified: Boolean, discarded: Boolean, excludeEntries: [ID!], page: Int = 1, ordering: String, pageSize: Int): AnalysisPillarEntryListType } input AnalysisPillarUpdateInputType { + id: ID title: String mainStatement: String informationGap: String @@ -3568,6 +3590,12 @@ type CreateAdminLevel { result: AdminLevelType } +type CreateAnalysis { + errors: [GenericScalar!] + ok: Boolean + result: AnalysisType +} + type CreateAnalysisFramework { errors: [GenericScalar!] ok: Boolean @@ -3705,6 +3733,18 @@ type DeleteAdminLevel { result: AdminLevelType } +type DeleteAnalysis { + errors: [GenericScalar!] + ok: Boolean + result: AnalysisType +} + +type DeleteAnalysisPillar { + errors: [GenericScalar!] + ok: Boolean + result: AnalysisPillarType +} + type DeleteAnalysisPillarDiscardedEntry { errors: [GenericScalar!] ok: Boolean @@ -4511,6 +4551,12 @@ input FileUploadInputType { projects: [ID!] } +type FilterDataType { + id: String + key: String + uniqueId: String +} + type GalleryFileType { id: ID! title: String! @@ -5460,6 +5506,7 @@ type ProjectMutationType { updateAssessmentRegistry(data: AssessmentRegistryCreateInputType!, id: ID!): UpdateAssessmentRegistry deleteAssessmentRegistry(id: ID!): DeleteAssessmentRegistry analysisPillarUpdate(data: AnalysisPillarUpdateInputType!, id: ID!): UpdateAnalysisPillar + analysisPillarDelete(id: ID!): DeleteAnalysisPillar discardedEntryCreate(data: DiscardedEntryCreateInputType!): CreateAnalysisPillarDiscardedEntry discardedEntryUpdate(data: DiscardedEntryUpdateInputType!, id: ID!): UpdateAnalysisPillarDiscardedEntry discardedEntryDelete(id: ID!): DeleteAnalysisPillarDiscardedEntry @@ -5473,6 +5520,9 @@ type ProjectMutationType { analysisReportSnapshotCreate(data: AnalysisReportSnapshotInputType!): CreateAnalysisReportSnapshot analysisReportUploadCreate(data: AnalysisReportUploadInputType!): CreateAnalysisReportUpload analysisReportUploadDelete(id: ID!): DeleteAnalysisReportUpload + analysisCreate(data: AnalysisInputType!): CreateAnalysis + analysisUpdate(data: AnalysisInputType!, id: ID!): UpdateAnalysis + analysisDelete(id: ID!): DeleteAnalysis exportCreate(data: ExportCreateInputType!): CreateUserExport exportUpdate(data: ExportUpdateInputType!, id: ID!): UpdateUserExport exportCancel(id: ID!): CancelUserExport @@ -6374,6 +6424,12 @@ type UpdateAdminLevel { result: AdminLevelType } +type UpdateAnalysis { + errors: [GenericScalar!] + ok: Boolean + result: AnalysisType +} + type UpdateAnalysisFramework { errors: [GenericScalar!] ok: Boolean