Skip to content

Commit

Permalink
Merge branch 'master' into enhancement/increase-test-coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
metroid-samus authored Nov 19, 2024
2 parents a924e37 + 291279d commit 08d30db
Show file tree
Hide file tree
Showing 23 changed files with 632 additions and 228 deletions.
2 changes: 1 addition & 1 deletion requirements-base.in
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ requests
schedule
schemathesis
sentry-asgi
sentry-sdk
sentry-sdk==1.45.0
sh
slack_sdk
slack-bolt
Expand Down
15 changes: 8 additions & 7 deletions requirements-base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,13 @@
#
# pip-compile requirements-base.in
#

aiocache==0.12.3
# via -r requirements-base.in
aiofiles==24.1.0
# via -r requirements-base.in
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
aiohttp==3.11.0
# via -r requirements-base.in
aiosignal==1.3.1
# via aiohttp
Expand Down Expand Up @@ -119,15 +118,15 @@ email-validator==2.2.0
# via -r requirements-base.in
emails==0.6
# via -r requirements-base.in
fastapi==0.115.4
fastapi==0.115.5
# via -r requirements-base.in
frozenlist==1.5.0
# via
# aiohttp
# aiosignal
google-api-core==2.22.0
# via google-api-python-client
google-api-python-client==2.151.0
google-api-python-client==2.153.0
# via -r requirements-base.in
google-auth==2.36.0
# via
Expand Down Expand Up @@ -257,7 +256,7 @@ oauthlib[signedtoken]==3.2.2
# atlassian-python-api
# jira
# requests-oauthlib
openai==1.54.3
openai==1.54.4
# via -r requirements-base.in
packaging==24.2
# via
Expand Down Expand Up @@ -288,7 +287,9 @@ preshed==3.0.9
# spacy
# thinc
propcache==0.2.0
# via yarl
# via
# aiohttp
# yarl
proto-plus==1.25.0
# via google-api-core
protobuf==4.23.4
Expand Down Expand Up @@ -402,7 +403,7 @@ scipy==1.14.1
# via statsmodels
sentry-asgi==0.2.0
# via -r requirements-base.in
sentry-sdk==2.18.0
sentry-sdk==1.45.0
# via
# -r requirements-base.in
# sentry-asgi
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ executing==2.1.0
# stack-data
factory-boy==3.3.1
# via -r requirements-dev.in
faker==30.8.2
faker==32.1.0
# via
# -r requirements-dev.in
# factory-boy
Expand Down
17 changes: 9 additions & 8 deletions src/dispatch/case/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -744,6 +744,15 @@ def common_escalate_flow(
db_session.add(incident)
db_session.commit()

# we run the incident create flow in a background task
incident = incident_flows.incident_create_flow(
incident_id=incident.id,
organization_slug=organization_slug,
db_session=db_session,
case_id=case.id,
)

# we link the case to the incident
case.incidents.append(incident)
db_session.add(case)
db_session.commit()
Expand All @@ -755,14 +764,6 @@ def common_escalate_flow(
case_id=case.id,
)

# we run the incident create flow in a background task
incident = incident_flows.incident_create_flow(
incident_id=incident.id,
organization_slug=organization_slug,
db_session=db_session,
case_id=case.id,
)

# we add the case participants to the incident
for participant in case.participants:
# check to see if already a participant in the incident
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,28 @@ def upgrade():
"entity", "source", existing_type=sa.BOOLEAN(), type_=sa.String(), existing_nullable=True
)

op.drop_index("ix_entity_search_vector", table_name="entity", postgresql_using="gin")
op.create_index(
"entity_search_vector_idx",
"entity",
["search_vector"],
unique=False,
postgresql_using="gin",
)
indexes = inspector.get_indexes("entity")
index_exists = any(index["name"] == "ix_entity_search_vector" for index in indexes)

if index_exists:
op.drop_index("ix_entity_search_vector", table_name="entity", postgresql_using="gin")

index_exists = any(index["name"] == "entity_search_vector_idx" for index in indexes)
if not index_exists:
op.create_index(
"entity_search_vector_idx",
"entity",
["search_vector"],
unique=False,
postgresql_using="gin",
)
op.alter_column("entity_type", "jpath", existing_type=sa.VARCHAR(), nullable=True)
op.drop_column("plugin_instance", "configuration")

columns = inspector.get_columns("plugin_instance")
column_exists = any(column["name"] == "configuration" for column in columns)
if column_exists:
op.drop_column("plugin_instance", "configuration")

op.drop_constraint("project_stable_priority_id_fkey", "project", type_="foreignkey")
# ### end Alembic commands ###

Expand Down
7 changes: 5 additions & 2 deletions src/dispatch/database/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ def search_filter_sort_paginate(
db_session,
model,
query_str: str = None,
filter_spec: str = None,
filter_spec: str | dict | None = None,
page: int = 1,
items_per_page: int = 5,
sort_by: List[str] = None,
Expand All @@ -558,7 +558,10 @@ def search_filter_sort_paginate(

tag_all_filters = []
if filter_spec:
filter_spec = json.loads(filter_spec)
# some functions pass filter_spec as dictionary such as auth/views.py/get_users
# but most come from API as seraialized JSON
if isinstance(filter_spec, str):
filter_spec = json.loads(filter_spec)
query = apply_filter_specific_joins(model_cls, filter_spec, query)
# if the filter_spec has the TagAll filter, we need to split the query up
# and intersect all of the results
Expand Down
45 changes: 45 additions & 0 deletions src/dispatch/incident/scheduled.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,16 @@
)
from dispatch.nlp import build_phrase_matcher, build_term_vocab, extract_terms_from_text
from dispatch.notification import service as notification_service
from dispatch.incident import service as incident_service
from dispatch.plugin import service as plugin_service
from dispatch.project.models import Project
from dispatch.scheduler import scheduler
from dispatch.search_filter import service as search_filter_service
from dispatch.tag import service as tag_service
from dispatch.tag.models import Tag
from dispatch.participant import flows as participant_flows
from dispatch.participant_role.models import ParticipantRoleType


from .enums import IncidentStatus
from .messaging import send_incident_close_reminder
Expand Down Expand Up @@ -344,3 +348,44 @@ def incident_report_weekly(db_session: Session, project: Project):
notification=notification,
notification_params=notification_params,
)


@scheduler.add(every(1).hour, name="incident-sync-members")
@timer
@scheduled_project_task
def incident_sync_members(db_session: Session, project: Project):
"""Checks the members of all conversations associated with active
and stable incidents and ensures they are in the incident."""
plugin = plugin_service.get_active_instance(
db_session=db_session,
project_id=project.id,
plugin_type="conversation",
)
if not plugin:
log.warning("No conversation plugin is active.")
return

active_incidents = incident_service.get_all_by_status(
db_session=db_session, project_id=project.id, status=IncidentStatus.active
)
stable_incidents = incident_service.get_all_by_status(
db_session=db_session, project_id=project.id, status=IncidentStatus.stable
)
incidents = active_incidents + stable_incidents

for incident in incidents:
if incident.conversation:
conversation_members = plugin.instance.get_all_member_emails(
incident.conversation.channel_id
)
incident_members = [m.individual.email for m in incident.participants]

for member in conversation_members:
if member not in incident_members:
participant_flows.add_participant(
member,
incident,
db_session,
roles=[ParticipantRoleType.observer],
)
log.debug(f"Added missing {member} to incident {incident.name}")
27 changes: 17 additions & 10 deletions src/dispatch/plugins/dispatch_slack/case/interactive.py
Original file line number Diff line number Diff line change
Expand Up @@ -1892,16 +1892,6 @@ def handle_resolve_submission_event(
current_user=user,
)

# we run the case update flow
case_flows.case_update_flow(
case_id=updated_case.id,
previous_case=previous_case,
db_session=db_session,
reporter_email=updated_case.reporter.individual.email if updated_case.reporter else None,
assignee_email=updated_case.assignee.individual.email if updated_case.assignee else None,
organization_slug=context["subject"].organization_slug,
)

# we update the case notification with the resolution, resolution reason and status
blocks = create_case_message(case=updated_case, channel_id=context["subject"].channel_id)
client.chat_update(
Expand All @@ -1910,6 +1900,23 @@ def handle_resolve_submission_event(
channel=updated_case.conversation.channel_id,
)

try:
# we run the case update flow
case_flows.case_update_flow(
case_id=updated_case.id,
previous_case=previous_case,
db_session=db_session,
reporter_email=updated_case.reporter.individual.email
if updated_case.reporter
else None,
assignee_email=updated_case.assignee.individual.email
if updated_case.assignee
else None,
organization_slug=context["subject"].organization_slug,
)
except Exception as e:
log.error(f"Error running case update flow from Slack plugin: {e}")


@app.shortcut(CaseShortcutCallbacks.report, middleware=[db_middleware, shortcut_context_middleware])
def report_issue(
Expand Down
31 changes: 20 additions & 11 deletions src/dispatch/plugins/dispatch_slack/case/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,13 @@ def create_action_buttons_message(
project_id=project_id,
channel_id=channel_id,
).json()
mfa_button_metadata = SubjectMetadata(
type=CaseSubjects.case,
organization_slug=organization_slug,
id=case.id,
project_id=project_id,
channel_id=channel_id,
).json()

# we create the response plan and the snooze buttons
elements = []
Expand All @@ -270,17 +277,19 @@ def create_action_buttons_message(
)
)

elements.append(
Button(
text="💤 Snooze Alert",
action_id=SignalNotificationActions.snooze,
value=button_metadata,
),
Button(
text="👤 User MFA Challenge",
action_id=CaseNotificationActions.user_mfa,
value=button_metadata,
),
elements.extend(
[
Button(
text="💤 Snooze Alert",
action_id=SignalNotificationActions.snooze,
value=button_metadata,
),
Button(
text="👤 User MFA Challenge",
action_id=CaseNotificationActions.user_mfa,
value=mfa_button_metadata,
),
]
)

# we create the signal metadata blocks
Expand Down
1 change: 1 addition & 0 deletions src/dispatch/plugins/dispatch_slack/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ class SlackAPIGetEndpoints(DispatchEnum):
users_info = "users.info"
users_lookup_by_email = "users.lookupByEmail"
users_profile_get = "users.profile.get"
conversations_members = "conversations.members"


class SlackAPIPostEndpoints(DispatchEnum):
Expand Down
23 changes: 23 additions & 0 deletions src/dispatch/plugins/dispatch_slack/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
does_user_exist,
emails_to_user_ids,
get_user_avatar_url,
get_user_info_by_id,
get_user_profile_by_email,
is_user,
rename_conversation,
Expand Down Expand Up @@ -451,6 +452,28 @@ def get_conversation_replies(self, conversation_id: str, thread_ts: str) -> list
replies.append(f"{reply['text']}")
return replies

def get_all_member_emails(self, conversation_id: str) -> list[str]:
"""
Fetches all members of a Slack conversation.
Args:
conversation_id (str): The ID of the Slack conversation.
Returns:
list[str]: A list of the emails for all members in the conversation.
"""
client = create_slack_client(self.configuration)
member_ids = client.conversations_members(channel=conversation_id).get("members", [])

member_emails = []
for member_id in member_ids:
if is_user(config=self.configuration, user_id=member_id):
user = get_user_info_by_id(client, member_id)
if user and (profile := user.get("profile")) and (email := profile.get("email")):
member_emails.append(email)

return member_emails


@apply(counter, exclude=["__init__"])
@apply(timer, exclude=["__init__"])
Expand Down
Loading

0 comments on commit 08d30db

Please sign in to comment.