Skip to content

Commit

Permalink
Add Graphql mutation for Analysis CRUD
Browse files Browse the repository at this point in the history
Rename analyis field into analysis_id

Add test cases for Analysis Mutation

Change filter genericscalartype into filtertype

Change filter genericscalartype into filtertype

Remove required False in analyisis pillar analyis

Add analysis pillar delete mutation

Refactor
  • Loading branch information
sauravsapkota authored and sudan45 committed Nov 19, 2024
1 parent 410f737 commit f1fa1aa
Show file tree
Hide file tree
Showing 6 changed files with 520 additions and 10 deletions.
3 changes: 3 additions & 0 deletions apps/analysis/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,6 +349,9 @@ def annotate_for_analysis_pillar_summary(cls, qs):
analyzed_entries=models.F('dragged_entries') + models.F('discarded_entries'),
)

def can_delete(self, user):
return self.can_modify(user)


class DiscardedEntry(models.Model):
"""
Expand Down
52 changes: 51 additions & 1 deletion apps/analysis/mutation.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
AnalysisReport,
AnalysisReportUpload,
AnalysisReportSnapshot,
Analysis,
)
from .schema import (
get_analysis_pillar_qs,
Expand All @@ -31,6 +32,7 @@
AnalysisReportType,
AnalysisReportUploadType,
AnalysisReportSnapshotType,
AnalysisType,
)
from .serializers import (
AnalysisPillarGqlSerializer,
Expand All @@ -42,6 +44,7 @@
AnalysisReportSerializer,
AnalysisReportSnapshotSerializer,
AnalysisReportUploadSerializer,
AnalysisGqlSerializer,
)


Expand Down Expand Up @@ -84,7 +87,7 @@
)


# Analysi Report
# Analysis Report
AnalysisReportInputType = generate_input_type_for_serializer(
'AnalysisReportInputType',
serializer_class=AnalysisReportSerializer,
Expand All @@ -105,6 +108,11 @@
serializer_class=AnalysisReportUploadSerializer,
)

AnalysisInputType = generate_input_type_for_serializer(
'AnalysisInputType',
serializer_class=AnalysisGqlSerializer,
)


class RequiredPermissionMixin():
permissions = [
Expand Down Expand Up @@ -269,9 +277,47 @@ class Arguments:
result = graphene.Field(AnalysisReportUploadType)


class AnalysisMutationMixin(RequiredPermissionMixin):
@classmethod
def filter_queryset(cls, qs, info):
return qs.filter(project=info.context.active_project)


class CreateAnalysis(AnalysisMutationMixin, PsGrapheneMutation):
class Arguments:
data = AnalysisInputType(required=True)
model = Analysis
serializer_class = AnalysisGqlSerializer
result = graphene.Field(AnalysisType)


class UpdateAnalysis(AnalysisMutationMixin, PsGrapheneMutation):
class Arguments:
data = AnalysisInputType(required=True)
id = graphene.ID(required=True)
model = Analysis
serializer_class = AnalysisGqlSerializer
result = graphene.Field(AnalysisType)


class DeleteAnalysis(AnalysisMutationMixin, PsDeleteMutation):
class Arguments:
id = graphene.ID(required=True)
model = Analysis
result = graphene.Field(AnalysisType)


class DeleteAnalysisPillar(AnalysisPillarMutationMixin, PsDeleteMutation):
class Arguments:
id = graphene.ID(required=True)
model = AnalysisPillar
result = graphene.Field(AnalysisPillarType)


class Mutation():
# Analysis Pillar
analysis_pillar_update = UpdateAnalysisPillar.Field()
analysis_pillar_delete = DeleteAnalysisPillar.Field()
# Discarded Entry
discarded_entry_create = CreateAnalysisPillarDiscardedEntry.Field()
discarded_entry_update = UpdateAnalysisPillarDiscardedEntry.Field()
Expand All @@ -289,3 +335,7 @@ class Mutation():
# -- Uploads
analysis_report_upload_create = CreateAnalysisReportUpload.Field()
analysis_report_upload_delete = DeleteAnalysisReportUpload.Field()
# Analysis
analysis_create = CreateAnalysis.Field()
analysis_update = UpdateAnalysis.Field()
analysis_delete = DeleteAnalysis.Field()
9 changes: 7 additions & 2 deletions apps/analysis/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,13 +196,18 @@ class Meta:
'title',
'main_statement',
'information_gap',
'filters',
)

assignee = graphene.Field(UserType, required=True)
analysis = graphene.ID(source='analysis_id', required=True)
analysis_id = graphene.ID(source='analysis_id', required=True)
cloned_from = graphene.ID(source='cloned_from_id')
analyzed_entries_count = graphene.Int(required=True)
filters = graphene.List(graphene.NonNull(
type('FilterDataType', (graphene.ObjectType,), {
'id': graphene.String(),
'key': graphene.String(),
'uniqueId': graphene.String()
})))

# XXX: N+1 and No pagination
statements = graphene.List(graphene.NonNull(AnalyticalStatementType))
Expand Down
48 changes: 43 additions & 5 deletions apps/analysis/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,11 +331,14 @@ def validate(self, data):


class AnalysisPillarGqlSerializer(TempClientIdMixin, UserResourceSerializer):
id = IntegerIDField(required=False)
statements = AnalyticalStatementGqlSerializer(many=True, source='analyticalstatement_set', required=False)
analysis = serializers.PrimaryKeyRelatedField(queryset=Analysis.objects.all(), required=False)

class Meta:
model = AnalysisPillar
fields = (
'id',
'title',
'main_statement',
'information_gap',
Expand Down Expand Up @@ -409,21 +412,18 @@ def validate(self, data):
return data


class AnalysisGqlSerializer(UserResourceSerializer):
id = IntegerIDField(required=False)
class AnalysisGqlSerializer(UserResourceSerializer, ProjectPropertySerializerMixin):
analysis_pillar = AnalysisPillarGqlSerializer(many=True, source='analysispillar_set', required=False)
start_date = serializers.DateField(required=False, allow_null=True)

class Meta:
model = Analysis
fields = (
'id',
'title',
'team_lead',
'project',
'start_date',
'end_date',
'cloned_from',
'analysis_pillar',
)

def validate_project(self, project):
Expand All @@ -432,6 +432,7 @@ def validate_project(self, project):
return project

def validate(self, data):
data['project'] = self.project
start_date = data.get('start_date')
end_date = data.get('end_date')
if start_date and start_date > end_date:
Expand All @@ -440,6 +441,43 @@ def validate(self, data):
)
return data

def create_or_update_pillar(self, pillar_data, instance):
data = {
"title": pillar_data.get('title'),
"assignee": pillar_data.get('assignee').id,
"analysis": instance.id,
"filters": pillar_data.get('filters'),
}
pillar_id = pillar_data.get('id', None)
if pillar_id:
data["id"] = pillar_id
analysis_pillar = get_object_or_404(AnalysisPillar, pk=pillar_id)
analysis_pillar_serializer = AnalysisPillarGqlSerializer(
analysis_pillar,
data=data,
context=self.context
)
analysis_pillar_serializer = AnalysisPillarGqlSerializer(data=data, context=self.context)

return analysis_pillar_serializer

def update(self, instance, validated_data):
with transaction.atomic():
if 'analysispillar_set' in validated_data:
pillars = validated_data.pop('analysispillar_set')
errors = {}
for pillar in pillars:
analysis_pillar_serializer = self.create_or_update_pillar(pillar, instance)
if analysis_pillar_serializer.is_valid():
analysis_pillar_serializer.save()
else:
errors[pillar.get('id', 'new')] = analysis_pillar_serializer.errors

if errors:
raise serializers.ValidationError(errors)

return super().update(instance, validated_data)


AnalysisCloneGqlSerializer = AnalysisCloneInputSerializer

Expand Down
Loading

0 comments on commit f1fa1aa

Please sign in to comment.