Skip to content

Commit

Permalink
Merge branch 'master' into chore/docker-compose-version
Browse files Browse the repository at this point in the history
  • Loading branch information
mvilanova authored Oct 31, 2024
2 parents 9a7f0fd + c87f545 commit f0e0e0a
Show file tree
Hide file tree
Showing 43 changed files with 847 additions and 320 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
# Minimum code coverage per file
COVERAGE_SINGLE: 50
# Minimum total code coverage
COVERAGE_TOTAL: 56
COVERAGE_TOTAL: 55
runs-on: ubuntu-latest
services:
postgres:
Expand Down
2 changes: 1 addition & 1 deletion .nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
16.13.0
20.18.0
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ default_language_version:
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# ruff version.
rev: v0.6.4
rev: v0.7.0
hooks:
# Run the linter.
#
Expand All @@ -28,7 +28,7 @@ repos:

# Typos
- repo: https://github.com/crate-ci/typos
rev: v1.24.5
rev: v1.26.1
hooks:
- id: typos
exclude: ^(data/dispatch-sample-data.dump|src/dispatch/static/dispatch/src/|src/dispatch/database/revisions/)
Expand Down
20 changes: 17 additions & 3 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
wget \
&& rm -rf /var/lib/apt/lists/*

RUN wget --quiet -O - https://deb.nodesource.com/setup_16.x | bash - \
RUN wget --quiet -O - https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y nodejs --no-install-recommends

ARG SOURCE_COMMIT
Expand Down Expand Up @@ -87,7 +87,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
RUN echo "deb http://apt.postgresql.org/pub/repos/apt bullseye-pgdg main" > /etc/apt/sources.list.d/pgdg.list \
&& wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -

RUN wget --quiet -O - https://deb.nodesource.com/setup_12.x | bash -
RUN wget --quiet -O - https://deb.nodesource.com/setup_20.x | bash -

COPY --from=sdist /dist/*.whl /tmp/dist/
RUN buildDeps="" \
Expand All @@ -104,7 +104,21 @@ RUN buildDeps="" \
pkg-config postgresql-client-14 nodejs \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& npm install mjml --no-cache-dir
# mjml has to be installed differently here because
# after node 14, docker will install npm files at the
# root directoy and fail, so we have to create a new
# directory and use it for the install then copy the
# files to the root directory to maintain backwards
# compatibility for email generation
&& mkdir -p /mjml_install \
# if our workdir is /, then pushd/popd doesn't work
# for the npm install. It still tries to install in /,
# which npm can't do
&& cd /mjml_install \
&& npm install --no-cache-dir mjml \
&& mv node_modules / \
&& cd / \
&& rm -rf /mjml_install

EXPOSE 8000
VOLUME /var/lib/dispatch/files
Expand Down
6 changes: 3 additions & 3 deletions docs/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions requirements-base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ python-dateutil==2.9.0.post0
# pandas
python-jose==3.3.0
# via -r requirements-base.in
python-multipart==0.0.12
python-multipart==0.0.16
# via -r requirements-base.in
python-slugify==8.0.4
# via -r requirements-base.in
Expand Down Expand Up @@ -396,9 +396,9 @@ six==1.16.0
# python-dateutil
# sqlalchemy-filters
# validators
slack-bolt==1.20.1
slack-bolt==1.21.2
# via -r requirements-base.in
slack-sdk==3.33.1
slack-sdk==3.33.3
# via
# -r requirements-base.in
# slack-bolt
Expand Down Expand Up @@ -496,7 +496,7 @@ wasabi==1.1.2
# weasel
weasel==0.3.4
# via spacy
werkzeug==3.0.3
werkzeug==3.0.6
# via schemathesis
wrapt==1.16.0
# via deprecated
Expand Down
8 changes: 4 additions & 4 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ click==8.1.7
# via
# -r requirements-dev.in
# black
coverage==7.6.3
coverage==7.6.4
# via -r requirements-dev.in
decorator==5.1.1
# via ipython
Expand All @@ -32,7 +32,7 @@ executing==2.0.1
# stack-data
factory-boy==3.3.1
# via -r requirements-dev.in
faker==30.6.0
faker==30.8.1
# via
# -r requirements-dev.in
# factory-boy
Expand All @@ -42,7 +42,7 @@ identify==2.5.33
# via pre-commit
iniconfig==2.0.0
# via pytest
ipython==8.28.0
ipython==8.29.0
# via -r requirements-dev.in
jedi==0.19.1
# via ipython
Expand Down Expand Up @@ -86,7 +86,7 @@ python-dateutil==2.9.0.post0
# via faker
pyyaml==6.0.1
# via pre-commit
ruff==0.6.9
ruff==0.7.1
# via -r requirements-dev.in
six==1.16.0
# via
Expand Down
66 changes: 50 additions & 16 deletions src/dispatch/case/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,45 +8,44 @@
from dispatch.case.messaging import send_case_welcome_participant_message
from dispatch.case.models import CaseRead
from dispatch.conversation import flows as conversation_flows
from dispatch.database.core import SessionLocal
from dispatch.decorators import background_task
from dispatch.document import flows as document_flows
from dispatch.enums import DocumentResourceTypes, Visibility, EventType
from dispatch.enums import DocumentResourceTypes, EventType, Visibility
from dispatch.event import service as event_service
from dispatch.group import flows as group_flows
from dispatch.group.enums import GroupAction, GroupType
from dispatch.incident import flows as incident_flows
from dispatch.incident import service as incident_service
from dispatch.incident.enums import IncidentStatus
from dispatch.incident.messaging import send_participant_announcement_message
from dispatch.incident.models import IncidentCreate, Incident
from dispatch.incident.type.models import IncidentType
from dispatch.incident.models import Incident, IncidentCreate
from dispatch.incident.priority.models import IncidentPriority
from dispatch.incident.type.models import IncidentType
from dispatch.individual.models import IndividualContactRead
from dispatch.models import OrganizationSlug, PrimaryKey
from dispatch.participant import flows as participant_flows
from dispatch.participant import service as participant_service
from dispatch.participant.models import ParticipantUpdate
from dispatch.participant_role import flows as role_flow
from dispatch.participant_role.models import ParticipantRoleType, ParticipantRole
from dispatch.participant_role.models import ParticipantRole, ParticipantRoleType
from dispatch.plugin import service as plugin_service
from dispatch.storage import flows as storage_flows
from dispatch.storage.enums import StorageAction
from dispatch.ticket import flows as ticket_flows

from .enums import CaseResolutionReason, CaseStatus
from .messaging import (
send_case_created_notifications,
send_case_update_notifications,
send_case_rating_feedback_message,
send_case_update_notifications,
)

from .models import Case, CaseStatus
from .models import Case
from .service import get

log = logging.getLogger(__name__)


def get_case_participants_flow(case: Case, db_session: SessionLocal):
def get_case_participants_flow(case: Case, db_session: Session):
"""Get additional case participants based on priority, type and description."""
individual_contacts = []
team_contacts = []
Expand Down Expand Up @@ -193,6 +192,30 @@ def update_conversation(case: Case, db_session: Session) -> None:
)


def case_auto_close_flow(case: Case, db_session: Session):
"Runs the case auto close flow."
# we mark the case as closed
case.resolution = "Auto closed via case type auto close configuration."
case.resolution_reason = CaseResolutionReason.user_acknowledge
case.status = CaseStatus.closed
db_session.add(case)
db_session.commit()

# we transition the case from the new to the closed state
case_triage_status_flow(
case=case,
db_session=db_session,
)
case_closed_status_flow(
case=case,
db_session=db_session,
)

if case.conversation and case.has_thread:
# we update the case conversation
update_conversation(case=case, db_session=db_session)


def case_new_create_flow(
*,
case_id: int,
Expand Down Expand Up @@ -255,6 +278,10 @@ def case_new_create_flow(
log.warning("Case assignee not paged. No plugin of type oncall enabled.")
return case

if case and case.case_type.auto_close:
# we transition the case to the closed state if its case type has auto close enabled
case_auto_close_flow(case=case, db_session=db_session)

return case


Expand Down Expand Up @@ -337,17 +364,21 @@ def case_update_flow(
# we get the case
case = get(db_session=db_session, case_id=case_id)

if reporter_email:
# we run the case assign role flow for the reporter
if not case:
log.warning(f"Case with id {case_id} not found.")
return

if reporter_email and case.reporter and reporter_email != case.reporter.individual.email:
# we run the case assign role flow for the reporter if it changed
case_assign_role_flow(
case_id=case.id,
participant_email=reporter_email,
participant_role=ParticipantRoleType.reporter,
db_session=db_session,
)

if assignee_email:
# we run the case assign role flow for the assignee
if assignee_email and case.assignee and assignee_email != case.assignee.individual.email:
# we run the case assign role flow for the assignee if it changed
case_assign_role_flow(
case_id=case.id,
participant_email=assignee_email,
Expand Down Expand Up @@ -375,15 +406,15 @@ def case_update_flow(

if case.tactical_group:
# we update the tactical group
if reporter_email:
if reporter_email and case.reporter and reporter_email != case.reporter.individual.email:
group_flows.update_group(
subject=case,
group=case.tactical_group,
group_action=GroupAction.add_member,
group_member=reporter_email,
db_session=db_session,
)
if assignee_email:
if assignee_email and case.assignee and assignee_email != case.assignee.individual.email:
group_flows.update_group(
subject=case,
group=case.tactical_group,
Expand All @@ -406,7 +437,7 @@ def case_update_flow(
send_case_update_notifications(case, previous_case, db_session)


def case_delete_flow(case: Case, db_session: SessionLocal):
def case_delete_flow(case: Case, db_session: Session):
"""Runs the case delete flow."""
# we delete the external ticket
if case.ticket:
Expand Down Expand Up @@ -489,6 +520,9 @@ def case_closed_status_flow(case: Case, db_session=None):
if not storage_plugin:
return

# we update the ticket
ticket_flows.update_case_ticket(case=case, db_session=db_session)

# Open document access if configured
if storage_plugin.configuration.open_on_close:
for document in case.documents:
Expand Down
5 changes: 4 additions & 1 deletion src/dispatch/case/type/models.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from typing import List, Optional

from pydantic import Field, validator, AnyHttpUrl
from pydantic import AnyHttpUrl, Field, validator
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
from sqlalchemy.event import listen
from sqlalchemy.ext.hybrid import hybrid_method
from sqlalchemy.orm import relationship
from sqlalchemy.sql import false
from sqlalchemy.sql.schema import UniqueConstraint
from sqlalchemy_utils import TSVectorType

Expand All @@ -27,6 +28,7 @@ class CaseType(ProjectMixin, Base):
exclude_from_metrics = Column(Boolean, default=False)
plugin_metadata = Column(JSON, default=[])
conversation_target = Column(String)
auto_close = Column(Boolean, default=False, server_default=false())

# the catalog here is simple to help matching "named entities"
search_vector = Column(TSVectorType("name", regconfig="pg_catalog.simple"))
Expand Down Expand Up @@ -100,6 +102,7 @@ class CaseTypeBase(DispatchBase):
project: Optional[ProjectRead]
visibility: Optional[str] = Field(None, nullable=True)
cost_model: Optional[CostModelRead] = None
auto_close: Optional[bool] = False

@validator("plugin_metadata", pre=True)
def replace_none_with_empty_list(cls, value):
Expand Down
Loading

0 comments on commit f0e0e0a

Please sign in to comment.