diff --git a/src/dispatch/case/models.py b/src/dispatch/case/models.py
index 550dc84923a5..d746808d0933 100644
--- a/src/dispatch/case/models.py
+++ b/src/dispatch/case/models.py
@@ -169,7 +169,7 @@ class SignalInstanceRead(DispatchBase):
entities: Optional[List[EntityRead]] = []
tags: Optional[List[TagRead]] = []
raw: Any
- fingerprint: str
+ fingerprint: Optional[str]
created_at: datetime
diff --git a/src/dispatch/database/revisions/tenant/versions/2023-02-13_93b517de08e2.py b/src/dispatch/database/revisions/tenant/versions/2023-02-13_93b517de08e2.py
new file mode 100644
index 000000000000..5affe963cd87
--- /dev/null
+++ b/src/dispatch/database/revisions/tenant/versions/2023-02-13_93b517de08e2.py
@@ -0,0 +1,42 @@
+"""Allows many to many signal filters
+
+Revision ID: 93b517de08e2
+Revises: b168b50764c7
+Create Date: 2023-02-13 15:19:36.921571
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision = "93b517de08e2"
+down_revision = "b168b50764c7"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "assoc_signal_filters",
+ sa.Column("signal_id", sa.Integer(), nullable=False),
+ sa.Column("signal_filter_id", sa.Integer(), nullable=False),
+ sa.ForeignKeyConstraint(["signal_filter_id"], ["signal_filter.id"], ondelete="CASCADE"),
+ sa.ForeignKeyConstraint(["signal_id"], ["signal.id"], ondelete="CASCADE"),
+ sa.PrimaryKeyConstraint("signal_id", "signal_filter_id"),
+ )
+ op.drop_constraint("signal_filter_signal_id_fkey", "signal_filter", type_="foreignkey")
+ op.drop_column("signal_filter", "signal_id")
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "signal_filter", sa.Column("signal_id", sa.INTEGER(), autoincrement=False, nullable=True)
+ )
+ op.create_foreign_key(
+ "signal_filter_signal_id_fkey", "signal_filter", "signal", ["signal_id"], ["id"]
+ )
+ op.drop_table("assoc_signal_filters")
+ # ### end Alembic commands ###
diff --git a/src/dispatch/database/revisions/tenant/versions/2023-02-13_b168b50764c7.py b/src/dispatch/database/revisions/tenant/versions/2023-02-13_b168b50764c7.py
new file mode 100644
index 000000000000..3a8821872b22
--- /dev/null
+++ b/src/dispatch/database/revisions/tenant/versions/2023-02-13_b168b50764c7.py
@@ -0,0 +1,254 @@
+"""Moves signal processing to filter approach.
+
+Revision ID: b168b50764c7
+Revises: 8746b4e292d2
+Create Date: 2023-02-13 13:56:48.032074
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utils
+from sqlalchemy.dialects import postgresql
+
+# revision identifiers, used by Alembic.
+revision = "b168b50764c7"
+down_revision = "8746b4e292d2"
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "signal_filter",
+ sa.Column("evergreen", sa.Boolean(), nullable=True),
+ sa.Column("evergreen_owner", sa.String(), nullable=True),
+ sa.Column("evergreen_reminder_interval", sa.Integer(), nullable=True),
+ sa.Column("evergreen_last_reminder_at", sa.DateTime(), nullable=True),
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("name", sa.String(), nullable=True),
+ sa.Column("description", sa.String(), nullable=True),
+ sa.Column("expression", sa.JSON(), nullable=False),
+ sa.Column("mode", sa.String(), nullable=False),
+ sa.Column("action", sa.String(), nullable=False),
+ sa.Column("expiration", sa.DateTime(), nullable=True),
+ sa.Column("window", sa.Integer(), nullable=True),
+ sa.Column("signal_id", sa.Integer(), nullable=True),
+ sa.Column("creator_id", sa.Integer(), nullable=True),
+ sa.Column("search_vector", sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True),
+ sa.Column("project_id", sa.Integer(), nullable=True),
+ sa.Column("created_at", sa.DateTime(), nullable=True),
+ sa.Column("updated_at", sa.DateTime(), nullable=True),
+ sa.ForeignKeyConstraint(
+ ["creator_id"],
+ ["dispatch_core.dispatch_user.id"],
+ ),
+ sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"),
+ sa.ForeignKeyConstraint(
+ ["signal_id"],
+ ["signal.id"],
+ ),
+ sa.PrimaryKeyConstraint("id"),
+ sa.UniqueConstraint("name", "project_id"),
+ )
+ op.create_index(
+ "signal_filter_search_vector_idx",
+ "signal_filter",
+ ["search_vector"],
+ unique=False,
+ postgresql_using="gin",
+ )
+ op.drop_constraint("signal_suppression_rule_id_fkey", "signal", type_="foreignkey")
+ op.drop_constraint("signal_duplication_rule_id_fkey", "signal", type_="foreignkey")
+ op.drop_constraint(
+ "signal_instance_duplication_rule_id_fkey", "signal_instance", type_="foreignkey"
+ )
+ op.drop_constraint(
+ "signal_instance_suppression_rule_id_fkey", "signal_instance", type_="foreignkey"
+ )
+ op.drop_table("assoc_duplication_rule_tag_types")
+ op.drop_table("assoc_suppression_rule_tags")
+ op.drop_table("assoc_signal_instance_tags")
+ op.drop_table("duplication_rule")
+ op.drop_table("suppression_rule")
+ op.drop_column("signal", "suppression_rule_id")
+ op.drop_column("signal", "duplication_rule_id")
+ op.add_column("signal_instance", sa.Column("filter_action", sa.String(), nullable=True))
+ op.drop_column("signal_instance", "suppression_rule_id")
+ op.drop_column("signal_instance", "duplication_rule_id")
+ # ### end Alembic commands ###
+
+
+def downgrade():
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column(
+ "signal_instance",
+ sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=True),
+ )
+ op.add_column(
+ "signal_instance",
+ sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=True),
+ )
+ op.create_foreign_key(
+ "signal_instance_suppression_rule_id_fkey",
+ "signal_instance",
+ "suppression_rule",
+ ["suppression_rule_id"],
+ ["id"],
+ )
+ op.create_foreign_key(
+ "signal_instance_duplication_rule_id_fkey",
+ "signal_instance",
+ "duplication_rule",
+ ["duplication_rule_id"],
+ ["id"],
+ )
+ op.drop_column("signal_instance", "filter_action")
+ op.add_column(
+ "signal", sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=True)
+ )
+ op.add_column(
+ "signal", sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=True)
+ )
+ op.create_foreign_key(
+ "signal_duplication_rule_id_fkey",
+ "signal",
+ "duplication_rule",
+ ["duplication_rule_id"],
+ ["id"],
+ )
+ op.create_foreign_key(
+ "signal_suppression_rule_id_fkey",
+ "signal",
+ "suppression_rule",
+ ["suppression_rule_id"],
+ ["id"],
+ )
+ op.add_column(
+ "plugin_instance",
+ sa.Column(
+ "configuration",
+ postgresql.JSON(astext_type=sa.Text()),
+ autoincrement=False,
+ nullable=True,
+ ),
+ )
+ op.drop_index("entity_search_vector_idx", table_name="entity", postgresql_using="gin")
+ op.create_index("ix_entity_search_vector", "entity", ["search_vector"], unique=False)
+ op.create_table(
+ "service_incident",
+ sa.Column("incident_id", sa.INTEGER(), autoincrement=False, nullable=False),
+ sa.Column("service_id", sa.INTEGER(), autoincrement=False, nullable=False),
+ sa.ForeignKeyConstraint(
+ ["incident_id"], ["incident.id"], name="service_incident_incident_id_fkey"
+ ),
+ sa.ForeignKeyConstraint(
+ ["service_id"], ["service.id"], name="service_incident_service_id_fkey"
+ ),
+ sa.PrimaryKeyConstraint("incident_id", "service_id", name="service_incident_pkey"),
+ )
+ op.create_table(
+ "assoc_suppression_rule_tags",
+ sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=False),
+ sa.Column("tag_id", sa.INTEGER(), autoincrement=False, nullable=False),
+ sa.ForeignKeyConstraint(
+ ["suppression_rule_id"],
+ ["suppression_rule.id"],
+ name="assoc_suppression_rule_tags_suppression_rule_id_fkey",
+ ondelete="CASCADE",
+ ),
+ sa.ForeignKeyConstraint(
+ ["tag_id"],
+ ["tag.id"],
+ name="assoc_suppression_rule_tags_tag_id_fkey",
+ ondelete="CASCADE",
+ ),
+ sa.PrimaryKeyConstraint(
+ "suppression_rule_id", "tag_id", name="assoc_suppression_rule_tags_pkey"
+ ),
+ )
+ op.create_table(
+ "assoc_duplication_rule_tag_types",
+ sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=False),
+ sa.Column("tag_type_id", sa.INTEGER(), autoincrement=False, nullable=False),
+ sa.ForeignKeyConstraint(
+ ["duplication_rule_id"],
+ ["duplication_rule.id"],
+ name="assoc_duplication_rule_tag_types_duplication_rule_id_fkey",
+ ondelete="CASCADE",
+ ),
+ sa.ForeignKeyConstraint(
+ ["tag_type_id"],
+ ["tag_type.id"],
+ name="assoc_duplication_rule_tag_types_tag_type_id_fkey",
+ ondelete="CASCADE",
+ ),
+ sa.PrimaryKeyConstraint(
+ "duplication_rule_id", "tag_type_id", name="assoc_duplication_rule_tag_types_pkey"
+ ),
+ )
+ op.create_table(
+ "assoc_signal_instance_tags",
+ sa.Column("signal_instance_id", postgresql.UUID(), autoincrement=False, nullable=False),
+ sa.Column("tag_id", sa.INTEGER(), autoincrement=False, nullable=False),
+ sa.ForeignKeyConstraint(
+ ["signal_instance_id"],
+ ["signal_instance.id"],
+ name="assoc_signal_instance_tags_signal_instance_id_fkey",
+ ondelete="CASCADE",
+ ),
+ sa.ForeignKeyConstraint(
+ ["tag_id"],
+ ["tag.id"],
+ name="assoc_signal_instance_tags_tag_id_fkey",
+ ondelete="CASCADE",
+ ),
+ sa.PrimaryKeyConstraint(
+ "signal_instance_id", "tag_id", name="assoc_signal_instance_tags_pkey"
+ ),
+ )
+ op.create_table(
+ "suppression_rule",
+ sa.Column("evergreen", sa.BOOLEAN(), autoincrement=False, nullable=True),
+ sa.Column("evergreen_owner", sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column("evergreen_reminder_interval", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column(
+ "evergreen_last_reminder_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True
+ ),
+ sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
+ sa.Column("mode", sa.VARCHAR(), autoincrement=False, nullable=False),
+ sa.Column("expiration", postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
+ sa.Column("project_id", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.ForeignKeyConstraint(
+ ["project_id"],
+ ["project.id"],
+ name="suppression_rule_project_id_fkey",
+ ondelete="CASCADE",
+ ),
+ sa.PrimaryKeyConstraint("id", name="suppression_rule_pkey"),
+ )
+ op.create_table(
+ "duplication_rule",
+ sa.Column("evergreen", sa.BOOLEAN(), autoincrement=False, nullable=True),
+ sa.Column("evergreen_owner", sa.VARCHAR(), autoincrement=False, nullable=True),
+ sa.Column("evergreen_reminder_interval", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column(
+ "evergreen_last_reminder_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True
+ ),
+ sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
+ sa.Column("mode", sa.VARCHAR(), autoincrement=False, nullable=False),
+ sa.Column("project_id", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column("window", sa.INTEGER(), autoincrement=False, nullable=True),
+ sa.ForeignKeyConstraint(
+ ["project_id"],
+ ["project.id"],
+ name="duplication_rule_project_id_fkey",
+ ondelete="CASCADE",
+ ),
+ sa.PrimaryKeyConstraint("id", name="duplication_rule_pkey"),
+ )
+ op.drop_index(
+ "signal_filter_search_vector_idx", table_name="signal_filter", postgresql_using="gin"
+ )
+ op.drop_table("signal_filter")
+ # ### end Alembic commands ###
diff --git a/src/dispatch/database/service.py b/src/dispatch/database/service.py
index ee2204ab4a4e..6af9fb7156e6 100644
--- a/src/dispatch/database/service.py
+++ b/src/dispatch/database/service.py
@@ -33,6 +33,7 @@
from dispatch.participant.models import Participant
from dispatch.plugin.models import Plugin, PluginInstance
from dispatch.search.fulltext.composite_search import CompositeSearch
+from dispatch.signal.models import SignalInstance
from dispatch.task.models import Task
from .core import Base, get_class_by_tablename, get_db, get_model_name_by_tablename
@@ -347,7 +348,8 @@ def apply_filter_specific_joins(model: Base, filter_spec: dict, query: orm.query
(Incident, "Tag"): (Incident.tags, True),
(Incident, "TagType"): (Incident.tags, True),
(Incident, "Term"): (Incident.terms, True),
- (Case, "Tag"): (Case.tags, True),
+ (SignalInstance, "Entity"): (SignalInstance.entities, True),
+ (SignalInstance, "EntityType"): (SignalInstance.entities, True),
}
filters = build_filters(filter_spec)
filter_models = get_named_models(filters)[0]
@@ -485,6 +487,8 @@ def search_filter_sort_paginate(
raise ValidationError(
[ErrorWrapper(InvalidFilterError(msg=str(e)), loc="filter")], model=BaseModel
) from None
+ except Exception as e:
+ log.exception(e)
if items_per_page == -1:
items_per_page = None
diff --git a/src/dispatch/entity/service.py b/src/dispatch/entity/service.py
index 37865a9c24b6..6072b872a363 100644
--- a/src/dispatch/entity/service.py
+++ b/src/dispatch/entity/service.py
@@ -124,7 +124,7 @@ def update(*, db_session, entity: Entity, entity_in: EntityUpdate) -> Entity:
def delete(*, db_session, entity_id: int):
"""Deletes an existing entity."""
- entity = db_session.query(Entity).filter(Entity.id == entity_id).one_or_none()
+ entity = db_session.query(Entity).filter(Entity.id == entity_id).one()
db_session.delete(entity)
db_session.commit()
diff --git a/src/dispatch/entity_type/service.py b/src/dispatch/entity_type/service.py
index c245c21af361..ab7b5018ddd0 100644
--- a/src/dispatch/entity_type/service.py
+++ b/src/dispatch/entity_type/service.py
@@ -93,6 +93,6 @@ def update(
def delete(*, db_session: Session, entity_type_id: int) -> None:
"""Deletes an entity type."""
- entity_type = db_session.query(EntityType).filter(EntityType.id == entity_type_id)
- db_session.delete(entity_type.one_or_none)
+ entity_type = db_session.query(EntityType).filter(EntityType.id == entity_type_id).one()
+ db_session.delete(entity_type)
db_session.commit()
diff --git a/src/dispatch/enums.py b/src/dispatch/enums.py
index efd7f9c1c0ee..c6c3e623aaa0 100644
--- a/src/dispatch/enums.py
+++ b/src/dispatch/enums.py
@@ -6,13 +6,6 @@ def __str__(self) -> str:
return str.__str__(self)
-class RuleMode(DispatchEnum):
- active = "Active"
- monitor = "Monitor"
- inactive = "Inactive"
- expired = "Expired"
-
-
class Visibility(DispatchEnum):
open = "Open"
restricted = "Restricted"
diff --git a/src/dispatch/main.py b/src/dispatch/main.py
index d020c0c9d080..f9fbfde0dfd5 100644
--- a/src/dispatch/main.py
+++ b/src/dispatch/main.py
@@ -194,6 +194,12 @@ async def dispatch(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content={"detail": [{"msg": "Unknown", "loc": ["Unknown"], "type": "Unknown"}]},
)
+ except Exception as e:
+ log.exception(e)
+ response = JSONResponse(
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ content={"detail": [{"msg": "Unknown", "loc": ["Unknown"], "type": "Unknown"}]},
+ )
return response
diff --git a/src/dispatch/models.py b/src/dispatch/models.py
index ea170adb7305..c9a02b00cfc0 100644
--- a/src/dispatch/models.py
+++ b/src/dispatch/models.py
@@ -1,9 +1,9 @@
from typing import Optional
from datetime import datetime, timedelta
-from pydantic import BaseModel
from pydantic.fields import Field
from pydantic.networks import EmailStr
+from pydantic import BaseModel
from pydantic.types import conint, constr, SecretStr
from sqlalchemy import Boolean, Column, DateTime, Integer, String, event, ForeignKey
diff --git a/src/dispatch/plugins/dispatch_slack/service.py b/src/dispatch/plugins/dispatch_slack/service.py
index 10bccf69aae6..90490121ebe3 100644
--- a/src/dispatch/plugins/dispatch_slack/service.py
+++ b/src/dispatch/plugins/dispatch_slack/service.py
@@ -341,10 +341,13 @@ def add_users_to_conversation_thread(
):
"""Adds user to a threaded conversation."""
users = [f"<@{user_id}>" for user_id in user_ids]
- blocks = Message(
- blocks=[Section(text="Looping in individuals to help resolve this case...", fields=users)]
- ).build()["blocks"]
- send_message(client=client, conversation_id=conversation_id, blocks=blocks, ts=thread_id)
+ if users:
+ blocks = Message(
+ blocks=[
+ Section(text="Looping in individuals to help resolve this case...", fields=users)
+ ]
+ ).build()["blocks"]
+ send_message(client=client, conversation_id=conversation_id, blocks=blocks, ts=thread_id)
def add_users_to_conversation(client: Any, conversation_id: str, user_ids: List[str]):
diff --git a/src/dispatch/signal/flows.py b/src/dispatch/signal/flows.py
index 961f56b6951f..5c4c1be4114b 100644
--- a/src/dispatch/signal/flows.py
+++ b/src/dispatch/signal/flows.py
@@ -5,7 +5,6 @@
from dispatch.case import flows as case_flows
from dispatch.entity import service as entity_service
from dispatch.signal import service as signal_service
-from dispatch.tag import service as tag_service
from dispatch.signal.models import SignalInstanceCreate, RawSignal
@@ -29,12 +28,6 @@ def create_signal_instance(
db_session=db_session, signal_instance_in=signal_instance_in
)
- # associate any known tags with the signal
- tag_service.get_by_name(db_session=db_session, project_id=project.id, name="foo")
-
- signal_instance.signal = signal
- db_session.commit()
-
entities = entity_service.find_entities(
db_session=db_session,
signal_instance=signal_instance,
@@ -42,33 +35,21 @@ def create_signal_instance(
)
signal_instance.entities = entities
- suppressed = signal_service.supress(
- db_session=db_session,
- signal_instance=signal_instance,
- suppression_rule=signal.suppression_rule,
- )
- if suppressed:
- return
-
- duplicate = signal_service.deduplicate(
- db_session=db_session,
- signal_instance=signal_instance,
- duplication_rule=signal.duplication_rule,
- )
- if duplicate:
- return
-
- # create a case if not duplicate or suppressed
- case_in = CaseCreate(
- title=signal.name,
- description=signal.description,
- case_priority=signal.case_priority,
- case_type=signal.case_type,
- )
- case = case_service.create(db_session=db_session, case_in=case_in)
-
- signal_instance.case = case
+ signal_instance.signal = signal
db_session.commit()
- return case_flows.case_new_create_flow(
- db_session=db_session, organization_slug=None, case_id=case.id
- )
+
+ if signal_service.apply_filter_actions(db_session=db_session, signal_instance=signal_instance):
+ # create a case if not duplicate or snoozed
+ case_in = CaseCreate(
+ title=signal.name,
+ description=signal.description,
+ case_priority=signal.case_priority,
+ case_type=signal.case_type,
+ )
+ case = case_service.create(db_session=db_session, case_in=case_in)
+
+ signal_instance.case = case
+ db_session.commit()
+ return case_flows.case_new_create_flow(
+ db_session=db_session, organization_slug=None, case_id=case.id
+ )
diff --git a/src/dispatch/signal/models.py b/src/dispatch/signal/models.py
index 4862e249691e..7edf764d3852 100644
--- a/src/dispatch/signal/models.py
+++ b/src/dispatch/signal/models.py
@@ -1,36 +1,43 @@
import uuid
from datetime import datetime
-from typing import List, Optional, Dict
-from pydantic import Field
+from typing import Dict, List, Optional
-from sqlalchemy.orm import relationship
+from pydantic import Field
from sqlalchemy import (
+ JSON,
+ Boolean,
Column,
+ DateTime,
+ ForeignKey,
Integer,
+ PrimaryKeyConstraint,
String,
- ForeignKey,
Table,
- PrimaryKeyConstraint,
- DateTime,
- Boolean,
+ UniqueConstraint,
)
-from sqlalchemy.dialects.postgresql import UUID, JSONB
+from sqlalchemy.dialects.postgresql import JSONB, UUID
+from sqlalchemy.orm import relationship
from sqlalchemy_utils import TSVectorType
-from dispatch.database.core import Base
-from dispatch.enums import DispatchEnum
-
-from dispatch.models import DispatchBase, EvergreenMixin, PrimaryKey, TimeStampMixin, ProjectMixin
-
+from dispatch.auth.models import DispatchUser
from dispatch.case.models import CaseRead
-from dispatch.case.type.models import CaseTypeRead, CaseType
from dispatch.case.priority.models import CasePriority, CasePriorityRead
-from dispatch.entity.models import EntityRead
-from dispatch.entity_type.models import EntityTypeRead, EntityTypeCreate
-from dispatch.tag.models import TagRead
-from dispatch.project.models import ProjectRead
+from dispatch.case.type.models import CaseType, CaseTypeRead
from dispatch.data.source.models import SourceBase
-from dispatch.tag_type.models import TagTypeRead
+from dispatch.project.models import ProjectRead
+
+from dispatch.database.core import Base
+from dispatch.entity.models import EntityRead
+from dispatch.entity_type.models import EntityTypeCreate, EntityTypeRead
+from dispatch.enums import DispatchEnum
+from dispatch.models import (
+ DispatchBase,
+ EvergreenMixin,
+ NameStr,
+ PrimaryKey,
+ ProjectMixin,
+ TimeStampMixin,
+)
class RuleMode(DispatchEnum):
@@ -51,6 +58,7 @@ class RuleMode(DispatchEnum):
PrimaryKeyConstraint("signal_instance_id", "tag_id"),
)
+
assoc_signal_tags = Table(
"assoc_signal_tags",
Base.metadata,
@@ -59,6 +67,14 @@ class RuleMode(DispatchEnum):
PrimaryKeyConstraint("signal_id", "tag_id"),
)
+assoc_signal_filters = Table(
+ "assoc_signal_filters",
+ Base.metadata,
+ Column("signal_id", Integer, ForeignKey("signal.id", ondelete="CASCADE")),
+ Column("signal_filter_id", Integer, ForeignKey("signal_filter.id", ondelete="CASCADE")),
+ PrimaryKeyConstraint("signal_id", "signal_filter_id"),
+)
+
assoc_signal_instance_entities = Table(
"assoc_signal_instance_entities",
Base.metadata,
@@ -79,43 +95,17 @@ class RuleMode(DispatchEnum):
PrimaryKeyConstraint("signal_id", "entity_type_id"),
)
-assoc_duplication_tag_types = Table(
- "assoc_duplication_rule_tag_types",
- Base.metadata,
- Column("duplication_rule_id", Integer, ForeignKey("duplication_rule.id", ondelete="CASCADE")),
- Column("tag_type_id", Integer, ForeignKey("tag_type.id", ondelete="CASCADE")),
- PrimaryKeyConstraint("duplication_rule_id", "tag_type_id"),
-)
-
-assoc_suppression_tags = Table(
- "assoc_suppression_rule_tags",
- Base.metadata,
- Column("suppression_rule_id", Integer, ForeignKey("suppression_rule.id", ondelete="CASCADE")),
- Column("tag_id", Integer, ForeignKey("tag.id", ondelete="CASCADE")),
- PrimaryKeyConstraint("suppression_rule_id", "tag_id"),
-)
-
-class SuppressionRule(Base, ProjectMixin, EvergreenMixin):
- id = Column(Integer, primary_key=True)
- mode = Column(String, default=RuleMode.active, nullable=False)
- expiration = Column(DateTime, nullable=True)
+class SignalFilterMode(DispatchEnum):
+ active = "active"
+ monitor = "monitor"
+ inactive = "inactive"
+ expired = "expired"
- # the tags to use for suppression
- tags = relationship("Tag", secondary=assoc_suppression_tags, backref="suppression_rules")
-
-class DuplicationRule(Base, ProjectMixin, EvergreenMixin):
- id = Column(Integer, primary_key=True)
- mode = Column(String, default=RuleMode.active, nullable=False)
-
- # number of seconds for duplication lookback default to 1 hour
- window = Column(Integer, default=(60 * 60))
-
- # the tag types to use for deduplication
- tag_types = relationship(
- "TagType", secondary=assoc_duplication_tag_types, backref="duplication_rules"
- )
+class SignalFilterAction(DispatchEnum):
+ deduplicate = "deduplicate"
+ snooze = "snooze"
class Signal(Base, TimeStampMixin, ProjectMixin):
@@ -133,15 +123,12 @@ class Signal(Base, TimeStampMixin, ProjectMixin):
case_type = relationship("CaseType", backref="signals")
case_priority_id = Column(Integer, ForeignKey(CasePriority.id))
case_priority = relationship("CasePriority", backref="signals")
- duplication_rule_id = Column(Integer, ForeignKey(DuplicationRule.id))
- duplication_rule = relationship("DuplicationRule", backref="signal")
+ filters = relationship("SignalFilter", secondary=assoc_signal_filters, backref="signals")
entity_types = relationship(
"EntityType",
secondary=assoc_signal_entity_types,
backref="signals",
)
- suppression_rule_id = Column(Integer, ForeignKey(SuppressionRule.id))
- suppression_rule = relationship("SuppressionRule", backref="signal")
tags = relationship(
"Tag",
secondary=assoc_signal_tags,
@@ -150,67 +137,69 @@ class Signal(Base, TimeStampMixin, ProjectMixin):
search_vector = Column(TSVectorType("name", regconfig="pg_catalog.simple"))
+class SignalFilter(Base, ProjectMixin, EvergreenMixin, TimeStampMixin):
+ __table_args__ = (UniqueConstraint("name", "project_id"),)
+ id = Column(Integer, primary_key=True)
+ name = Column(String)
+ description = Column(String)
+ expression = Column(JSON, nullable=False, default=[])
+ mode = Column(String, default=SignalFilterMode.active, nullable=False)
+ action = Column(String, nullable=False)
+ expiration = Column(DateTime, nullable=True)
+ window = Column(
+ Integer, default=(60 * 60)
+ ) # number of seconds for duplication lookback default to 1 hour
+
+ creator_id = Column(Integer, ForeignKey(DispatchUser.id))
+ creator = relationship("DispatchUser", backref="signal_filters")
+
+ search_vector = Column(
+ TSVectorType("name", "description", weights={"name": "A", "description": "B"})
+ )
+
+
class SignalInstance(Base, TimeStampMixin, ProjectMixin):
- id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
+ id = Column(UUID(as_uuid=True), primary_key=True, default=lambda: str(uuid.uuid4()))
case = relationship("Case", backref="signal_instances")
case_id = Column(Integer, ForeignKey("case.id", ondelete="CASCADE"))
- duplication_rule = relationship("DuplicationRule", backref="signal_instances")
- duplication_rule_id = Column(Integer, ForeignKey(DuplicationRule.id))
entities = relationship(
"Entity",
secondary=assoc_signal_instance_entities,
backref="signal_instances",
)
fingerprint = Column(String)
+ filter_action = Column(String)
raw = Column(JSONB)
signal = relationship("Signal", backref="instances")
signal_id = Column(Integer, ForeignKey("signal.id"))
- suppression_rule = relationship("SuppressionRule", backref="signal_instances")
- suppression_rule_id = Column(Integer, ForeignKey(SuppressionRule.id))
- tags = relationship(
- "Tag",
- secondary=assoc_signal_instance_tags,
- backref="signal_instances",
- )
# Pydantic models...
-class SignalRuleBase(DispatchBase):
- mode: Optional[RuleMode] = RuleMode.active
-
-
-class DuplicationRuleBase(SignalRuleBase):
+class SignalFilterBase(DispatchBase):
+ mode: Optional[SignalFilterMode] = SignalFilterMode.active
+ expression: List[dict]
+ name: NameStr
+ action: SignalFilterAction = SignalFilterAction.snooze
+ description: Optional[str] = Field(None, nullable=True)
window: Optional[int] = 600
- tag_types: List[TagTypeRead]
-
-
-class DuplicationRuleCreate(DuplicationRuleBase):
- pass
-
-
-class DuplicationRuleUpdate(DuplicationRuleBase):
- id: Optional[PrimaryKey]
+ expiration: Optional[datetime] = Field(None, nullable=True)
-class DuplicationRuleRead(DuplicationRuleBase):
+class SignalFilterUpdate(SignalFilterBase):
id: PrimaryKey
-class SuppressionRuleBase(SignalRuleBase):
- expiration: Optional[datetime]
- tags: List[TagRead]
-
-
-class SuppressionRuleCreate(SuppressionRuleBase):
- pass
+class SignalFilterCreate(SignalFilterBase):
+ project: ProjectRead
-class SuppressionRuleUpdate(SuppressionRuleBase):
- id: Optional[PrimaryKey]
+class SignalFilterRead(SignalFilterBase):
+ id: PrimaryKey
-class SuppressionRuleRead(SuppressionRuleBase):
- id: PrimaryKey
+class SignalFilterPagination(DispatchBase):
+ items: List[SignalFilterRead]
+ total: int
class SignalBase(DispatchBase):
@@ -224,30 +213,23 @@ class SignalBase(DispatchBase):
external_url: Optional[str]
source: Optional[SourceBase]
created_at: Optional[datetime] = None
+ filters: Optional[List[SignalFilterRead]] = []
entity_types: Optional[List[EntityTypeRead]]
- suppression_rule: Optional[SuppressionRuleRead]
- duplication_rule: Optional[DuplicationRuleBase]
project: ProjectRead
class SignalCreate(SignalBase):
- entity_types: Optional[EntityTypeCreate]
- suppression_rule: Optional[SuppressionRuleCreate]
- duplication_rule: Optional[DuplicationRuleCreate]
+ entity_types: Optional[EntityTypeCreate] = []
class SignalUpdate(SignalBase):
id: PrimaryKey
entity_types: Optional[List[EntityTypeRead]] = []
- suppression_rule: Optional[SuppressionRuleUpdate]
- duplication_rule: Optional[DuplicationRuleUpdate]
class SignalRead(SignalBase):
id: PrimaryKey
- entity_types: Optional[List[EntityTypeRead]]
- suppression_rule: Optional[SuppressionRuleRead]
- duplication_rule: Optional[DuplicationRuleRead]
+ entity_types: Optional[List[EntityTypeRead]] = []
class SignalPagination(DispatchBase):
@@ -277,10 +259,8 @@ class SignalInstanceBase(DispatchBase):
project: ProjectRead
case: Optional[CaseRead]
entities: Optional[List[EntityRead]] = []
- tags: Optional[List[TagRead]] = []
raw: RawSignal
- suppression_rule: Optional[SuppressionRuleBase]
- duplication_rule: Optional[DuplicationRuleBase]
+ filter_action: SignalFilterAction = None
created_at: Optional[datetime] = None
@@ -290,7 +270,7 @@ class SignalInstanceCreate(SignalInstanceBase):
class SignalInstanceRead(SignalInstanceBase):
id: uuid.UUID
- fingerprint: str = None
+ fingerprint: Optional[str]
signal: SignalRead
diff --git a/src/dispatch/signal/service.py b/src/dispatch/signal/service.py
index 935d84a67bba..6295340e4816 100644
--- a/src/dispatch/signal/service.py
+++ b/src/dispatch/signal/service.py
@@ -1,103 +1,98 @@
import json
+from datetime import datetime, timedelta, timezone
import hashlib
from typing import Optional
-from datetime import datetime, timedelta, timezone
-from dispatch.enums import RuleMode
-from dispatch.project import service as project_service
-from dispatch.tag import service as tag_service
-from dispatch.tag_type import service as tag_type_service
-from dispatch.case.type import service as case_type_service
+from sqlalchemy import asc
+
+from dispatch.auth.models import DispatchUser
from dispatch.case.priority import service as case_priority_service
-from dispatch.entity_type import service as entity_type_service
+from dispatch.case.type import service as case_type_service
+from dispatch.database.service import apply_filters, apply_filter_specific_joins
+from dispatch.project import service as project_service
from .models import (
Signal,
SignalCreate,
- SignalUpdate,
+ SignalFilter,
+ SignalFilterAction,
+ SignalFilterCreate,
+ SignalFilterMode,
+ SignalFilterUpdate,
SignalInstance,
- SuppressionRule,
- DuplicationRule,
SignalInstanceCreate,
- DuplicationRuleCreate,
- DuplicationRuleUpdate,
- SuppressionRuleCreate,
- SuppressionRuleUpdate,
+ SignalUpdate,
)
-def create_duplication_rule(
- *, db_session, duplication_rule_in: DuplicationRuleCreate
-) -> DuplicationRule:
- """Creates a new duplication rule."""
- rule = DuplicationRule(**duplication_rule_in.dict(exclude={"tag_types"}))
-
- tag_types = []
- for t in duplication_rule_in.tag_types:
- tag_types.append(tag_type_service.get(db_session=db_session, tag_type_id=t.id))
+def create_signal_filter(
+ *, db_session, creator: DispatchUser, signal_filter_in: SignalFilterCreate
+) -> SignalFilter:
+ """Creates a new signal filter."""
+ project = project_service.get_by_name_or_raise(
+ db_session=db_session, project_in=signal_filter_in.project
+ )
- rule.tag_types = tag_types
- db_session.add(rule)
+ signal_filter = SignalFilter(
+ **signal_filter_in.dict(
+ exclude={
+ "project",
+ }
+ ),
+ creator=creator,
+ project=project,
+ )
+ db_session.add(signal_filter)
db_session.commit()
- return rule
+ return signal_filter
+
+def update_signal_filter(
+ *, db_session, signal_filter: SignalFilter, signal_filter_in: SignalFilterUpdate
+) -> SignalFilter:
+ """Updates an existing signal filter."""
-def update_duplication_rule(
- *, db_session, duplication_rule_in: DuplicationRuleUpdate
-) -> DuplicationRule:
- """Updates an existing duplication rule."""
- rule = (
- db_session.query(DuplicationRule).filter(DuplicationRule.id == duplication_rule_in.id).one()
+ signal_filter_data = signal_filter.dict()
+ update_data = signal_filter_in.dict(
+ skip_defaults=True,
+ exclude={},
)
- tag_types = []
- for t in duplication_rule_in.tag_types:
- tag_types.append(tag_type_service.get(db_session=db_session, tag_type_id=t.id))
+ for field in signal_filter_data:
+ if field in update_data:
+ setattr(signal_filter, field, update_data[field])
- rule.tag_types = tag_types
- rule.window = duplication_rule_in.window
- db_session.add(rule)
+ db_session.add(signal_filter)
db_session.commit()
- return rule
+ return signal_filter
-def create_suppression_rule(
- *, db_session, suppression_rule_in: SuppressionRuleCreate
-) -> SuppressionRule:
- """Creates a new supression rule."""
- rule = SuppressionRule(**suppression_rule_in.dict(exclude={"tags"}))
-
- tags = []
- for t in suppression_rule_in.tags:
- tags.append(tag_service.get_or_create(db_session=db_session, tag_in=t))
-
- rule.tags = tags
- db_session.add(rule)
+def delete_signal_filter(*, db_session, signal_filter_id: int) -> int:
+ """Deletes an existing signal filter."""
+ signal_filter = db_session.query(SignalFilter).filter(SignalFilter.id == signal_filter_id).one()
+ db_session.delete(signal_filter)
db_session.commit()
- return rule
+ return signal_filter_id
-def update_suppression_rule(
- *, db_session, suppression_rule_in: SuppressionRuleUpdate
-) -> SuppressionRule:
- """Updates an existing supression rule."""
- rule = (
- db_session.query(SuppressionRule).filter(SuppressionRule.id == suppression_rule_in.id).one()
+def get_signal_filter_by_name(*, db_session, project_id: int, name: str) -> Optional[SignalFilter]:
+ """Gets a signal filter by it's name."""
+ return (
+ db_session.query(SignalFilter)
+ .filter(SignalFilter.project_id == project_id)
+ .filter(SignalFilter.name == name)
+ .first()
)
- tags = []
- for t in suppression_rule_in.tags:
- tags.append(tag_service.get_or_create(db_session=db_session, tag_in=t))
- rule.tags = tags
- db_session.add(rule)
- db_session.commit()
- return rule
+def get_signal_filter(*, db_session, signal_filter_id: int) -> SignalFilter:
+ """Gets a single signal filter."""
+ return db_session.query(SignalFilter).filter(SignalFilter.id == signal_filter_id).one_or_none()
def get(*, db_session, signal_id: int) -> Optional[Signal]:
"""Gets a signal by id."""
- return db_session.query(Signal).filter(Signal.id == signal_id).one()
+ return db_session.query(Signal).filter(Signal.id == signal_id).one_or_none()
def get_by_variant_or_external_id(
@@ -130,24 +125,15 @@ def create(*, db_session, signal_in: SignalCreate) -> Signal:
"case_type",
"case_priority",
"source",
- "suppression_rule",
- "duplication_rule",
+ "filters",
}
),
project=project,
)
- if signal_in.duplication_rule:
- duplication_rule = create_duplication_rule(
- db_session=db_session, duplication_rule_in=signal_in.duplication_rule
- )
- signal.duplication_rule = duplication_rule
-
- if signal_in.suppression_rule:
- suppression_rule = create_suppression_rule(
- db_session=db_session, suppression_rule_in=signal.suppression_rule
- )
- signal.suppression_rule = suppression_rule
+ for f in signal_in.filters:
+ signal_filter = get_signal_filter_by_name(db_session=db_session, signal_filter_in=f)
+ signal.filters.append(signal_filter)
if signal_in.case_priority:
case_priority = case_priority_service.get_by_name_or_default(
@@ -169,40 +155,26 @@ def create(*, db_session, signal_in: SignalCreate) -> Signal:
def update(*, db_session, signal: Signal, signal_in: SignalUpdate) -> Signal:
"""Creates a new signal."""
signal_data = signal.dict()
- update_data = signal_in.dict(skip_defaults=True)
+ update_data = signal_in.dict(
+ skip_defaults=True,
+ exclude={
+ "project",
+ "case_type",
+ "case_priority",
+ "source",
+ "filters",
+ },
+ )
for field in signal_data:
if field in update_data:
setattr(signal, field, update_data[field])
- entity_types = []
- for entity_type in signal_in.entity_types:
- entity_types.append(
- entity_type_service.get_or_create(db_session=db_session, entity_type_in=entity_type)
+ for f in signal_in.filters:
+ signal_filter = get_signal_filter_by_name(
+ db_session=db_session, project_id=signal.project.id, name=f.name
)
- signal.entity_types = entity_types
-
- if signal_in.duplication_rule:
- if signal_in.duplication_rule.id:
- update_duplication_rule(
- db_session=db_session, duplication_rule_in=signal_in.duplication_rule
- )
- else:
- duplication_rule = create_duplication_rule(
- db_session=db_session, duplication_rule_in=signal_in.duplication_rule
- )
- signal.duplication_rule = duplication_rule
-
- if signal_in.suppression_rule:
- if signal_in.suppression_rule.id:
- update_suppression_rule(
- db_session=db_session, suppression_rule_in=signal_in.suppression_rule
- )
- else:
- suppression_rule = create_suppression_rule(
- db_session=db_session, suppression_rule_in=signal_in.suppression_rule
- )
- signal.suppression_rule = suppression_rule
+ signal.filters.append(signal_filter)
if signal_in.case_priority:
case_priority = case_priority_service.get_by_name_or_default(
@@ -238,100 +210,52 @@ def create_instance(*, db_session, signal_instance_in: SignalInstanceCreate) ->
# we round trip the raw data to json-ify date strings
signal_instance = SignalInstance(
- **signal_instance_in.dict(exclude={"project", "tags", "raw"}),
+ **signal_instance_in.dict(exclude={"case", "signal", "project", "entities", "raw"}),
raw=json.loads(signal_instance_in.raw.json()),
project=project,
)
- tags = []
- for t in signal_instance_in.tags:
- tags.append(tag_service.get_or_create(db_session=db_session, tag_in=t))
-
- signal_instance.tags = tags
-
db_session.add(signal_instance)
db_session.commit()
return signal_instance
-def create_instance_fingerprint(duplication_rule, signal_instance: SignalInstance) -> str:
- """Given a list of tag_types and tags creates a hash of their values."""
- fingerprint = hashlib.sha1(str(signal_instance.raw).encode("utf-8")).hexdigest()
-
- # use tags if we have them
- if duplication_rule:
- if signal_instance.tags:
- tag_type_names = [t.name for t in duplication_rule.tag_types]
- hash_values = []
- for tag in signal_instance.tags:
- if tag.tag_type.name in tag_type_names:
- hash_values.append(tag.tag_type.name)
- fingerprint = hashlib.sha1("-".join(sorted(hash_values)).encode("utf-8")).hexdigest()
-
- return fingerprint
-
-
-def deduplicate(
- *, db_session, signal_instance: SignalInstance, duplication_rule: DuplicationRule
-) -> bool:
- """Find any matching duplication rules and match signals."""
- duplicate = False
-
- # always fingerprint
- fingerprint = create_instance_fingerprint(duplication_rule, signal_instance)
- signal_instance.fingerprint = fingerprint
- db_session.commit()
-
- if not duplication_rule:
- return duplicate
-
- if duplication_rule.mode != RuleMode.active:
- return duplicate
-
- window = datetime.now(timezone.utc) - timedelta(seconds=duplication_rule.window)
- fingerprint = create_instance_fingerprint(duplication_rule.tag_types, signal_instance)
+def apply_filter_actions(*, db_session, signal_instance: SignalInstance):
+ """Applies any matching filter actions associated with this instance."""
- instances = (
- db_session.query(SignalInstance)
- .filter(Signal.id == signal_instance.signal.id)
- .filter(SignalInstance.id != signal_instance.id)
- .filter(SignalInstance.created_at >= window)
- .filter(SignalInstance.fingerprint == fingerprint)
- .all()
- )
-
- if instances:
- duplicate = True
- # TODO find the earliest created instance
- signal_instance.case_id = instances[0].case_id
- signal_instance.duplication_rule_id = duplication_rule.id
-
- db_session.commit()
- return duplicate
-
-
-def supress(
- *, db_session, signal_instance: SignalInstance, suppression_rule: SuppressionRule
-) -> bool:
- """Find any matching suppression rules and match instances."""
- supressed = False
-
- if not suppression_rule:
- return supressed
+ for f in signal_instance.signal.filters:
+ if f.mode != SignalFilterMode.active:
+ continue
- if suppression_rule.mode != RuleMode.active:
- return supressed
-
- if suppression_rule.expiration:
- if suppression_rule.expiration <= datetime.now():
- return supressed
-
- rule_tag_ids = sorted([t.id for t in suppression_rule.tags])
- signal_tag_ids = sorted([t.id for t in signal_instance.tags])
-
- if rule_tag_ids == signal_tag_ids:
- supressed = True
- signal_instance.suppression_rule_id = suppression_rule.id
-
- db_session.commit()
- return supressed
+ query = db_session.query(SignalInstance).filter(
+ SignalInstance.signal_id == signal_instance.signal_id
+ )
+ query = apply_filter_specific_joins(SignalInstance, f.expression, query)
+ query = apply_filters(query, f.expression)
+
+ # order matters, check for snooze before deduplication
+ # we check to see if the current instances match's it's signals snooze filter
+ if f.action == SignalFilterAction.snooze:
+ if f.expiration.replace(tzinfo=timezone.utc) <= datetime.now(timezone.utc):
+ continue
+
+ instances = query.filter(SignalInstance.id == signal_instance.id).all()
+
+ if instances:
+ signal_instance.filter_action = SignalFilterAction.snooze
+ return
+
+ elif f.action == SignalFilterAction.deduplicate:
+ window = datetime.now(timezone.utc) - timedelta(seconds=f.window)
+ query = query.filter(SignalInstance.created_at >= window)
+
+ # get the earliest instance
+ query = query.order_by(asc(SignalInstance.created_at))
+ instances = query.all()
+
+ if instances:
+ # associate with existing case
+ signal_instance.case_id = instances[0].case_id
+ signal_instance.filter_action = SignalFilterAction.deduplicate
+ return
+ return True
diff --git a/src/dispatch/signal/views.py b/src/dispatch/signal/views.py
index 74f7b05752d9..a6c19b3b97fd 100644
--- a/src/dispatch/signal/views.py
+++ b/src/dispatch/signal/views.py
@@ -1,24 +1,39 @@
from fastapi import APIRouter, Depends, HTTPException, status
from pydantic.error_wrappers import ErrorWrapper, ValidationError
-
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Session
+from dispatch.auth.models import DispatchUser
+from dispatch.auth.service import get_current_user
from dispatch.database.core import get_db
-from dispatch.exceptions import ExistsError
from dispatch.database.service import common_parameters, search_filter_sort_paginate
+from dispatch.exceptions import ExistsError
from dispatch.models import PrimaryKey
from .models import (
SignalCreate,
- SignalUpdate,
- SignalPagination,
- SignalRead,
- SignalInstanceRead,
+ SignalFilterCreate,
+ SignalFilterPagination,
+ SignalFilterRead,
+ SignalFilterUpdate,
SignalInstanceCreate,
SignalInstancePagination,
+ SignalInstanceRead,
+ SignalPagination,
+ SignalRead,
+ SignalUpdate,
+)
+from .service import (
+ create,
+ create_instance,
+ create_signal_filter,
+ delete,
+ delete_signal_filter,
+ get,
+ get_signal_filter,
+ update,
+ update_signal_filter,
)
-from .service import create, update, get, create_instance, delete
router = APIRouter()
@@ -29,6 +44,87 @@ def get_signal_instances(*, common: dict = Depends(common_parameters)):
return search_filter_sort_paginate(model="SignalInstance", **common)
+@router.post("/{signal_id}/instances", response_model=SignalInstanceRead)
+def create_signal_instance(
+ *, db_session: Session = Depends(get_db), signal_instance_in: SignalInstanceCreate
+):
+ """Create a new signal instance."""
+ return create_instance(db_session=db_session, signal_instance_in=signal_instance_in)
+
+
+@router.get("/filters", response_model=SignalFilterPagination)
+def get_signal_filters(*, common: dict = Depends(common_parameters)):
+ """Get all signal filters."""
+ return search_filter_sort_paginate(model="SignalFilter", **common)
+
+
+@router.post("/filters", response_model=SignalFilterRead)
+def create_filter(
+ *,
+ db_session: Session = Depends(get_db),
+ signal_filter_in: SignalFilterCreate,
+ current_user: DispatchUser = Depends(get_current_user),
+):
+ """Create a new signal."""
+ try:
+ return create_signal_filter(
+ db_session=db_session, creator=current_user, signal_filter_in=signal_filter_in
+ )
+ except IntegrityError:
+ raise ValidationError(
+ [
+ ErrorWrapper(
+ ExistsError(msg="A signal filter with this name already exists."), loc="name"
+ )
+ ],
+ model=SignalFilterRead,
+ ) from None
+
+
+@router.put("/filters/{signal_filter_id}", response_model=SignalRead)
+def update_filter(
+ *,
+ db_session: Session = Depends(get_db),
+ signal_filter_id: PrimaryKey,
+ signal_filter_in: SignalFilterUpdate,
+):
+ """Updates an existing signal filter."""
+ signal_filter = get_signal_filter(db_session=db_session, signal_id=signal_filter_id)
+ if not signal_filter:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=[{"msg": "A signal filter with this id does not exist."}],
+ )
+
+ try:
+ signal_filter = update_signal_filter(
+ db_session=db_session, signal_filter=signal_filter, signal_filter_in=signal_filter_in
+ )
+ except IntegrityError:
+ raise ValidationError(
+ [
+ ErrorWrapper(
+ ExistsError(msg="A signal filter with this name already exists."), loc="name"
+ )
+ ],
+ model=SignalFilterUpdate,
+ ) from None
+
+ return signal_filter
+
+
+@router.delete("/filters/{signal_filter_id}", response_model=None)
+def delete_filter(*, db_session: Session = Depends(get_db), signal_filter_id: PrimaryKey):
+ """Deletes a signal filter."""
+ signal_filter = get(db_session=db_session, signal_filter_id=signal_filter_id)
+ if not signal_filter:
+ raise HTTPException(
+ status_code=status.HTTP_404_NOT_FOUND,
+ detail=[{"msg": "A signal filter with this id does not exist."}],
+ )
+ delete_signal_filter(db_session=db_session, signal_filter_id=signal_filter_id)
+
+
@router.get("", response_model=SignalPagination)
def get_signals(*, common: dict = Depends(common_parameters)):
"""Get all signal definitions."""
@@ -37,7 +133,7 @@ def get_signals(*, common: dict = Depends(common_parameters)):
@router.get("/{signal_id}", response_model=SignalRead)
def get_signal(*, db_session: Session = Depends(get_db), signal_id: PrimaryKey):
- """Update a signal."""
+ """Get a signal by it's ID."""
signal = get(db_session=db_session, signal_id=signal_id)
if not signal:
raise HTTPException(
@@ -86,11 +182,3 @@ def delete_signal(*, db_session: Session = Depends(get_db), signal_id: PrimaryKe
detail=[{"msg": "A signal with this id does not exist."}],
)
delete(db_session=db_session, signal_id=signal_id)
-
-
-@router.post("/{signal_id}/instances", response_model=SignalInstanceRead)
-def create_signal_instance(
- *, db_session: Session = Depends(get_db), signal_instance_in: SignalInstanceCreate
-):
- """Create a new signal instance."""
- return create_instance(db_session=db_session, signal_instance_in=signal_instance_in)
diff --git a/src/dispatch/static/dispatch/src/components/DateTimePicker.vue b/src/dispatch/static/dispatch/src/components/DateTimePicker.vue
new file mode 100644
index 000000000000..50853ce6b496
--- /dev/null
+++ b/src/dispatch/static/dispatch/src/components/DateTimePicker.vue
@@ -0,0 +1,141 @@
+
+
+
+
+
+
+ event
+
+
+
+
+ access_time
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/dispatch/static/dispatch/src/entity/EntityFilterCombobox.vue b/src/dispatch/static/dispatch/src/entity/EntityFilterCombobox.vue
new file mode 100644
index 000000000000..5bbb7c7be2ad
--- /dev/null
+++ b/src/dispatch/static/dispatch/src/entity/EntityFilterCombobox.vue
@@ -0,0 +1,156 @@
+
+
+
+
+
+
+ No entities matching "
+ {{ search }}"
+
+
+
+
+
+
+ {{ item.entity_type.name }} / {{ item.value }}
+
+
+
+
+ {{ data.item.name }}
+
+ {{ data.item.entity_type.name }}
+
+
+
+
+
+
+ Load More
+
+
+
+
+
+
+
diff --git a/src/dispatch/static/dispatch/src/signal/DuplicationRule.vue b/src/dispatch/static/dispatch/src/signal/DuplicationRule.vue
deleted file mode 100644
index 6f73d7c2da96..000000000000
--- a/src/dispatch/static/dispatch/src/signal/DuplicationRule.vue
+++ /dev/null
@@ -1,95 +0,0 @@
-
-
-
- Duplication Configuration
-
-
-
- help_outline
-
- Dispatch will attempt to deduplicate signals that match the given criteria.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/dispatch/static/dispatch/src/signal/NewEditSheet.vue b/src/dispatch/static/dispatch/src/signal/NewEditDialog.vue
similarity index 88%
rename from src/dispatch/static/dispatch/src/signal/NewEditSheet.vue
rename to src/dispatch/static/dispatch/src/signal/NewEditDialog.vue
index 973d0fc0f4f8..a0f635ce54e7 100644
--- a/src/dispatch/static/dispatch/src/signal/NewEditSheet.vue
+++ b/src/dispatch/static/dispatch/src/signal/NewEditDialog.vue
@@ -143,13 +143,22 @@
-
-
-
-
-
-
-
+
+
+ Filter(s)
+
+
+
+ help_outline
+
+ Defines a signal filter allowing you to take either a "Snooze" or "Deduplication"
+ action for any match signal matching the filter.
+
+
+
+
+
+
@@ -165,37 +174,21 @@ import { required } from "vee-validate/dist/rules"
import CaseTypeSelect from "@/case/type/CaseTypeSelect.vue"
import CasePrioritySelect from "@/case/priority/CasePrioritySelect.vue"
-import DuplicationRuleCard from "@/signal/DuplicationRule.vue"
-import EntityRuleCard from "@/signal/EntityRule.vue"
-import SuppressionRule from "./SuppressionRule.vue"
+import SignalFilterCombobox from "@/signal/filter/SignalFilterCombobox.vue"
extend("required", {
...required,
})
export default {
- name: "SignalNewEditSheet",
+ name: "SignalNewEditDialog",
components: {
ValidationObserver,
ValidationProvider,
CaseTypeSelect,
CasePrioritySelect,
- DuplicationRuleCard,
- EntityRuleCard,
- SuppressionRule,
- },
-
- data() {
- return {
- windows: [
- { label: "10min", value: 600 },
- { label: "30min", value: 1800 },
- { label: "1hr", value: 3600 },
- { label: "8hr", value: 28800 },
- { label: "24hr", value: 86400 },
- ],
- }
+ SignalFilterCombobox,
},
computed: {
@@ -210,9 +203,8 @@ export default {
"selected.external_url",
"selected.case_type",
"selected.case_priority",
+ "selected.filters",
"selected.entity_types",
- "selected.duplication_rule",
- "selected.suppression_rule",
"selected.source",
"selected.project",
"selected.loading",
diff --git a/src/dispatch/static/dispatch/src/signal/SignalInstanceTab.vue b/src/dispatch/static/dispatch/src/signal/SignalInstanceTab.vue
index fe3e7c74225b..91215e63da22 100644
--- a/src/dispatch/static/dispatch/src/signal/SignalInstanceTab.vue
+++ b/src/dispatch/static/dispatch/src/signal/SignalInstanceTab.vue
@@ -9,11 +9,12 @@
-
-
-
-
-
+
+
+
+ {{ entity.entity_type.name }}
+
+
@@ -58,9 +59,7 @@ export default {
menu: false,
headers: [
{ text: "Signal", value: "signal", sortable: false },
- { text: "Tags", value: "tags", sortable: false },
- { text: "Duplicate", value: "duplication_rule", sortable: false },
- { text: "Supressed", value: "suppression_rule", sortable: false },
+ { text: "Entities", value: "entities", sortable: false },
{ text: "Created At", value: "created_at" },
{ text: "", value: "data-table-actions", sortable: false, align: "end" },
],
@@ -72,7 +71,7 @@ export default {
if (this.inputSignalInstances.length) {
return this.inputSignalInstances
}
- return this.signal_instances;
+ return this.signal_instances
},
},
}
diff --git a/src/dispatch/static/dispatch/src/signal/SuppressionRule.vue b/src/dispatch/static/dispatch/src/signal/SuppressionRule.vue
deleted file mode 100644
index 85267413ad35..000000000000
--- a/src/dispatch/static/dispatch/src/signal/SuppressionRule.vue
+++ /dev/null
@@ -1,80 +0,0 @@
-
-
-
- Suppression Configuration
-
-
-
- help_outline
-
- Dispatch will attempt to suppress signals that match the given criteria.
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/dispatch/static/dispatch/src/signal/Table.vue b/src/dispatch/static/dispatch/src/signal/Table.vue
index ceac756e2c7b..61447661717f 100644
--- a/src/dispatch/static/dispatch/src/signal/Table.vue
+++ b/src/dispatch/static/dispatch/src/signal/Table.vue
@@ -6,7 +6,7 @@
Signal definitions determine how a signal is processed. Allowing you to map case types,
- supression and duplication rules for each signal.
+ snooze and duplication rules for each signal.
@@ -97,8 +97,8 @@
diff --git a/src/dispatch/static/dispatch/src/signal/filter/SignalFilterCombobox.vue b/src/dispatch/static/dispatch/src/signal/filter/SignalFilterCombobox.vue
new file mode 100644
index 000000000000..3401e9983d63
--- /dev/null
+++ b/src/dispatch/static/dispatch/src/signal/filter/SignalFilterCombobox.vue
@@ -0,0 +1,197 @@
+
+
+
+
+
+
+
+
+ {{ item.name }}
+
+
+
+
+
+
+ {{ item.name | initials }}
+
+
+ {{ item.name }}
+ {{ item.type }}
+
+
+
+ mdi-close-circle
+
+
+
+
+
+
+
+ mdi-text-box
+
+ {{ item.description }}
+
+
+
+ mdi-code-json
+
+
+ {{ item.expression }}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ No filters matching "
+ {{ search }}
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/dispatch/static/dispatch/src/signal/filter/SignalFilterCreateDialog.vue b/src/dispatch/static/dispatch/src/signal/filter/SignalFilterCreateDialog.vue
new file mode 100644
index 000000000000..dd4ac13caecb
--- /dev/null
+++ b/src/dispatch/static/dispatch/src/signal/filter/SignalFilterCreateDialog.vue
@@ -0,0 +1,296 @@
+
+
+
+
+ add
+
+
+
+
+ Create Signal Filter
+
+
+
+
+ Filter
+
+
+ Preview
+
+
+ Save
+
+
+
+
+
+
+ Define the entity and entity types that will be used to match with existing signal
+ instances.
+
+ Basic
+ Advanced
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Cancel
+ Continue
+
+
+
+
+
+
+
+ Examples matching your filter:
+
+
+
+
+
+ mdi-fingerprint
+
+ {{ item.fingerprint }}
+
+
+
+
+
+
+ Cancel
+ Continue
+
+
+
+
+
+
+
+ Provide a name and description for your filter.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Cancel
+
+ Save
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/dispatch/static/dispatch/src/signal/filter/api.js b/src/dispatch/static/dispatch/src/signal/filter/api.js
new file mode 100644
index 000000000000..71e229697cf4
--- /dev/null
+++ b/src/dispatch/static/dispatch/src/signal/filter/api.js
@@ -0,0 +1,27 @@
+import API from "@/api"
+
+const resource = "/signals/filters"
+
+export default {
+ getAll(options) {
+ return API.get(`${resource}`, {
+ params: { ...options },
+ })
+ },
+
+ get(signalFilterId) {
+ return API.get(`${resource}/${signalFilterId}`)
+ },
+
+ create(payload) {
+ return API.post(`${resource}`, payload)
+ },
+
+ update(signalFilterId, payload) {
+ return API.put(`${resource}/${signalFilterId}`, payload)
+ },
+
+ delete(signalFilterId) {
+ return API.delete(`${resource}/${signalFilterId}`)
+ },
+}
diff --git a/src/dispatch/static/dispatch/src/signal/filter/store.js b/src/dispatch/static/dispatch/src/signal/filter/store.js
new file mode 100644
index 000000000000..4aa6066deebe
--- /dev/null
+++ b/src/dispatch/static/dispatch/src/signal/filter/store.js
@@ -0,0 +1,130 @@
+import { getField, updateField } from "vuex-map-fields"
+import SignalFilterApi from "@/signal/filter/api"
+
+const getDefaultSelectedState = () => {
+ return {
+ expression: null,
+ description: null,
+ name: null,
+ action: "snooze",
+ expiration: null,
+ window: null,
+ }
+}
+
+const state = {
+ selected: {
+ ...getDefaultSelectedState(),
+ },
+ dialogs: {
+ showCreateEdit: false,
+ },
+ table: {
+ rows: {
+ items: [],
+ total: null,
+ },
+ options: {
+ filters: {
+ created_at: {
+ start: null,
+ end: null,
+ },
+ },
+ q: "",
+ page: 1,
+ itemsPerPage: 10,
+ sortBy: ["created_at"],
+ descending: [true],
+ },
+ loading: false,
+ },
+}
+
+const getters = {
+ getField,
+ tableOptions({ state }) {
+ // format our filters
+ return state.table.options
+ },
+}
+
+const actions = {
+ save({ commit, state }) {
+ commit("SET_SELECTED_LOADING", true)
+ if (!state.selected.id) {
+ return SignalFilterApi.create(state.selected)
+ .then((resp) => {
+ commit(
+ "notification_backend/addBeNotification",
+ { text: "Signal filter created successfully.", type: "success" },
+ { root: true }
+ )
+ commit("SET_SELECTED_LOADING", false)
+ commit("RESET_SELECTED")
+ commit("SET_DIALOG_CREATE_EDIT", false)
+ return resp.data
+ })
+ .catch(() => {
+ commit("SET_SELECTED_LOADING", false)
+ })
+ } else {
+ return SignalFilterApi.update(state.selected.id, state.selected)
+ .then(() => {
+ commit(
+ "notification_backend/addBeNotification",
+ { text: "Signal filter updated successfully.", type: "success" },
+ { root: true }
+ )
+ commit("SET_SELECTED_LOADING", false)
+ })
+ .catch(() => {
+ commit("SET_SELECTED_LOADING", false)
+ })
+ }
+ },
+ createEditShow({ commit }, signal) {
+ if (signal) {
+ commit("SET_SELECTED", signal)
+ }
+ commit("SET_DIALOG_CREATE_EDIT", true)
+ },
+ closeCreateEditDialog({ commit }) {
+ commit("SET_DIALOG_CREATE_EDIT", false)
+ commit("RESET_SELECTED")
+ },
+}
+
+const mutations = {
+ updateField,
+ SET_SELECTED(state, value) {
+ state.selected = Object.assign(state.selected, value)
+ },
+ SET_SELECTED_LOADING(state, value) {
+ state.selected.loading = value
+ },
+ SET_TABLE_LOADING(state, value) {
+ state.table.loading = value
+ },
+ SET_TABLE_ROWS(state, value) {
+ state.table.rows = value
+ },
+ SET_DIALOG_CREATE_EDIT(state, value) {
+ state.dialogs.showCreateEdit = value
+ },
+
+ RESET_SELECTED(state) {
+ // do not reset project
+ let project = state.selected.project
+ state.selected = { ...getDefaultSelectedState() }
+ state.selected.project = project
+ },
+}
+
+export default {
+ namespaced: true,
+ state,
+ getters,
+ actions,
+ mutations,
+}
diff --git a/src/dispatch/static/dispatch/src/signal/store.js b/src/dispatch/static/dispatch/src/signal/store.js
index ed5988f5ef62..31b5b3a30235 100644
--- a/src/dispatch/static/dispatch/src/signal/store.js
+++ b/src/dispatch/static/dispatch/src/signal/store.js
@@ -15,9 +15,8 @@ const getDefaultSelectedState = () => {
external_url: null,
case_type: null,
case_priority: null,
+ filters: null,
entity_types: null,
- duplication_rule: null,
- suppression_rule: null,
source: null,
project: null,
created_at: null,
@@ -34,26 +33,6 @@ const state = {
showRawSignalDialog: false,
showRemove: false,
},
- instanceTable: {
- rows: {
- items: [],
- total: null,
- },
- options: {
- filters: {
- created_at: {
- start: null,
- end: null,
- },
- },
- q: "",
- page: 1,
- itemsPerPage: 10,
- sortBy: ["created_at"],
- descending: [true],
- },
- loading: false,
- },
table: {
rows: {
items: [],
diff --git a/src/dispatch/static/dispatch/src/store.js b/src/dispatch/static/dispatch/src/store.js
index 762528f2d943..42cc2210a560 100644
--- a/src/dispatch/static/dispatch/src/store.js
+++ b/src/dispatch/static/dispatch/src/store.js
@@ -29,6 +29,7 @@ import runbook from "@/document/runbook/store"
import search from "@/search/store"
import service from "@/service/store"
import signal from "@/signal/store"
+import signalFilter from "@/signal/filter/store"
import source from "@/data/source/store"
import sourceDataFormat from "@/data/source/dataFormat/store"
import sourceEnvironment from "@/data/source/environment/store"
@@ -86,6 +87,7 @@ export default new Vuex.Store({
sourceTransport,
sourceType,
signal,
+ signalFilter,
tag,
tag_type,
task,
diff --git a/tests/conftest.py b/tests/conftest.py
index 5a728a56e893..fdf7c131025a 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,22 +1,19 @@
import pytest
-
from sqlalchemy_utils import drop_database
-from starlette.testclient import TestClient
from starlette.config import environ
+from starlette.testclient import TestClient
# set test config
environ["DATABASE_CREDENTIALS"] = "postgres:dispatch"
environ["DATABASE_HOSTNAME"] = "localhost"
environ["DATABASE_NAME"] = "dispatch-test"
environ["DISPATCH_ENCRYPTION_KEY"] = "test123"
-environ["DISPATCH_HELP_SLACK_CHANNEL"] = "help-me"
environ["DISPATCH_JWT_SECRET"] = "test123"
environ["DISPATCH_UI_URL"] = "https://example.com"
environ["ENV"] = "pytest"
environ["JWKS_URL"] = "example.com"
environ["METRIC_PROVIDERS"] = "" # TODO move this to the default
environ["SECRET_PROVIDER"] = ""
-environ["SLACK_APP_USER_SLUG"] = "XXX"
environ["STATIC_DIR"] = "" # we don't need static files for tests
from dispatch import config
@@ -25,7 +22,6 @@
from .database import Session
from .factories import (
- DispatchUserFactory,
CaseFactory,
CasePriorityFactory,
CaseSeverityFactory,
@@ -33,9 +29,11 @@
ConferenceFactory,
ConversationFactory,
DefinitionFactory,
+ DispatchUserFactory,
DocumentFactory,
- EventFactory,
+ EntityFactory,
EntityTypeFactory,
+ EventFactory,
FeedbackFactory,
GroupFactory,
IncidentCostFactory,
@@ -57,6 +55,8 @@
ReportFactory,
SearchFilterFactory,
ServiceFactory,
+ SignalFactory,
+ SignalFilterFactory,
SignalInstanceFactory,
StorageFactory,
TagFactory,
@@ -90,8 +90,8 @@ def pytest_runtest_makereport(item, call):
@pytest.fixture(scope="session")
def testapp():
# we only want to use test plugins so unregister everybody else
- from dispatch.plugins.base import unregister, plugins
from dispatch.main import app
+ from dispatch.plugins.base import plugins, unregister
for p in plugins.all():
unregister(p)
@@ -416,6 +416,11 @@ def reports(session):
return [ReportFactory(), ReportFactory()]
+@pytest.fixture()
+def entity(session):
+ return EntityFactory()
+
+
@pytest.fixture()
def entity_type(session):
return EntityTypeFactory()
@@ -426,6 +431,16 @@ def entity_types(session):
return [EntityTypeFactory(), EntityTypeFactory()]
+@pytest.fixture()
+def signal(session):
+ return SignalFactory()
+
+
+@pytest.fixture()
+def signal_filter(session):
+ return SignalFilterFactory()
+
+
@pytest.fixture()
def signal_instance(session):
return SignalInstanceFactory()
diff --git a/tests/entity/test_entity_service.py b/tests/entity/test_entity_service.py
index a28f977c2386..1955f71738ce 100644
--- a/tests/entity/test_entity_service.py
+++ b/tests/entity/test_entity_service.py
@@ -2,11 +2,59 @@
from dispatch.entity import service as entity_service
-def test_find_entities_with_field_and_regex(session, signal_instance, project):
- signal_instance = _setup_project(
- session=session, signal_instance=signal_instance, project=project
+def test_get(session, entity):
+ from dispatch.entity.service import get
+
+ t_entity = get(db_session=session, entity_id=entity.id)
+ assert t_entity.id == entity.id
+
+
+def test_create(session, entity_type, project):
+ from dispatch.entity.models import EntityCreate
+ from dispatch.entity.service import create
+
+ name = "name"
+ description = "description"
+
+ entity_in = EntityCreate(
+ name=name,
+ owner="example@test.com",
+ external_id="foo",
+ description=description,
+ entity_type=entity_type,
+ project=project,
)
+ entity = create(db_session=session, entity_in=entity_in)
+ assert entity
+
+
+def test_update(session, project, entity):
+ from dispatch.entity.models import EntityUpdate
+ from dispatch.entity.service import update
+
+ name = "Updated name"
+
+ entity_in = EntityUpdate(
+ id=entity.id, name=name, project=project, owner="example.com", external_id="foo"
+ )
+ entity = update(
+ db_session=session,
+ entity=entity,
+ entity_in=entity_in,
+ )
+ assert entity.name == name
+
+def test_delete(session, entity):
+ from dispatch.entity.service import delete, get
+
+ entity_id = entity.id
+
+ delete(db_session=session, entity_id=entity_id)
+ assert not get(db_session=session, entity_id=entity_id)
+
+
+def test_find_entities_with_field_and_regex(session, signal_instance, project):
entity_types = [
EntityType(
name="AWS IAM Role ARN",
@@ -20,10 +68,6 @@ def test_find_entities_with_field_and_regex(session, signal_instance, project):
def test_find_entities_with_regex_only(session, signal_instance, project):
- signal_instance = _setup_project(
- session=session, signal_instance=signal_instance, project=project
- )
-
entity_types = [
EntityType(
name="AWS IAM Role ARN",
@@ -37,10 +81,6 @@ def test_find_entities_with_regex_only(session, signal_instance, project):
def test_find_entities_with_field_only(session, signal_instance, project):
- signal_instance = _setup_project(
- session=session, signal_instance=signal_instance, project=project
- )
-
entity_types = [
EntityType(
name="AWS IAM Role ARN",
@@ -54,9 +94,6 @@ def test_find_entities_with_field_only(session, signal_instance, project):
def test_find_entities_with_no_regex_or_field(session, signal_instance, project):
- signal_instance = _setup_project(
- session=session, signal_instance=signal_instance, project=project
- )
entity_types = [
EntityType(
name="AWS IAM Role ARN",
@@ -67,10 +104,3 @@ def test_find_entities_with_no_regex_or_field(session, signal_instance, project)
]
entities = entity_service.find_entities(session, signal_instance, entity_types)
assert len(entities) == 0
-
-
-def _setup_project(session, signal_instance, project):
- signal_instance.project = project
- session.add(signal_instance)
- session.commit()
- return signal_instance
diff --git a/tests/entity_type/test_entity_type_service.py b/tests/entity_type/test_entity_type_service.py
new file mode 100644
index 000000000000..bb119e3f07e8
--- /dev/null
+++ b/tests/entity_type/test_entity_type_service.py
@@ -0,0 +1,51 @@
+def test_get(session, entity_type):
+ from dispatch.entity_type.service import get
+
+ t_entity_type = get(db_session=session, entity_type_id=entity_type.id)
+ assert t_entity_type.id == entity_type.id
+
+
+def test_create(session, project):
+ from dispatch.entity_type.models import EntityTypeCreate
+ from dispatch.entity_type.service import create
+
+ name = "name"
+ description = "description"
+
+ entity_type_in = EntityTypeCreate(
+ name=name,
+ description=description,
+ field="foo",
+ regular_expression="*.",
+ global_find=True,
+ enabled=False,
+ project=project,
+ )
+ entity_type = create(db_session=session, entity_type_in=entity_type_in)
+ assert entity_type
+
+
+def test_update(session, project, entity_type):
+ from dispatch.entity_type.models import EntityTypeUpdate
+ from dispatch.entity_type.service import update
+
+ name = "Updated name"
+
+ entity_type_in = EntityTypeUpdate(
+ id=entity_type.id,
+ name=name,
+ project=project,
+ )
+ entity_type = update(
+ db_session=session,
+ entity_type=entity_type,
+ entity_type_in=entity_type_in,
+ )
+ assert entity_type.name == name
+
+
+def test_delete(session, entity_type):
+ from dispatch.entity_type.service import delete, get
+
+ delete(db_session=session, entity_type_id=entity_type.id)
+ assert not get(db_session=session, entity_type_id=entity_type.id)
diff --git a/tests/factories.py b/tests/factories.py
index 69ec45d9f52e..2d1385be5219 100644
--- a/tests/factories.py
+++ b/tests/factories.py
@@ -1,13 +1,12 @@
import uuid
-
-from pytz import UTC
from datetime import datetime
+from factory import LazyAttribute, LazyFunction, Sequence, SubFactory, post_generation
+from factory.alchemy import SQLAlchemyModelFactory
+from factory.fuzzy import FuzzyChoice, FuzzyDateTime, FuzzyInteger, FuzzyText
from faker import Faker
from faker.providers import misc
-from factory import Sequence, post_generation, SubFactory, LazyAttribute, LazyFunction
-from factory.alchemy import SQLAlchemyModelFactory
-from factory.fuzzy import FuzzyChoice, FuzzyText, FuzzyDateTime, FuzzyInteger
+from pytz import UTC
from dispatch.auth.models import DispatchUser, hash_password # noqa
from dispatch.case.models import Case, CaseRead
@@ -18,6 +17,7 @@
from dispatch.conversation.models import Conversation
from dispatch.definition.models import Definition
from dispatch.document.models import Document
+from dispatch.entity.models import Entity
from dispatch.entity_type.models import EntityType
from dispatch.event.models import Event
from dispatch.feedback.models import Feedback
@@ -40,7 +40,7 @@
from dispatch.route.models import Recommendation, RecommendationMatch
from dispatch.search_filter.models import SearchFilter
from dispatch.service.models import Service
-from dispatch.signal.models import Signal, SignalInstance
+from dispatch.signal.models import Signal, SignalFilter, SignalInstance
from dispatch.storage.models import Storage
from dispatch.tag.models import Tag
from dispatch.tag_type.models import TagType
@@ -52,7 +52,6 @@
from .database import Session
-
fake = Faker()
fake.add_provider(misc)
@@ -687,6 +686,32 @@ class Meta:
model = CaseType
+class CasePriorityFactory(BaseFactory):
+ """Case Priority Factory."""
+
+ name = FuzzyText()
+ description = FuzzyText()
+ project = SubFactory(ProjectFactory)
+
+ class Meta:
+ """Factory Configuration."""
+
+ model = CasePriority
+
+
+class CaseSeverityFactory(BaseFactory):
+ """Case Severity Factory."""
+
+ name = FuzzyText()
+ description = FuzzyText()
+ project = SubFactory(ProjectFactory)
+
+ class Meta:
+ """Factory Configuration."""
+
+ model = CaseSeverity
+
+
class CaseFactory(BaseFactory):
"""Case Factory."""
@@ -696,6 +721,8 @@ class CaseFactory(BaseFactory):
description = FuzzyText()
status = FuzzyChoice(["New", "Triage", "Escalated", "Closed"])
project = SubFactory(ProjectFactory)
+ case_priority = SubFactory(CasePriorityFactory)
+ case_severity = SubFactory(CaseSeverityFactory)
case_type = SubFactory(CaseTypeFactory)
class Meta:
@@ -763,6 +790,19 @@ class Meta:
model = EntityType
+class EntityFactory(BaseFactory):
+ id = Sequence(lambda n: f"1{n}")
+ name = FuzzyText()
+ description = FuzzyText()
+ value = FuzzyText()
+ source = FuzzyText()
+ entity_type = SubFactory(EntityTypeFactory)
+ project = SubFactory(ProjectFactory)
+
+ class Meta:
+ model = Entity
+
+
class SignalFactory(BaseFactory):
name = "Test Signal"
owner = "Test Owner"
@@ -800,6 +840,28 @@ class Meta:
model = SignalInstance
+class SignalFilterFactory(BaseFactory):
+ """Signal Filter Factory."""
+
+ name = FuzzyText()
+ description = FuzzyText()
+ expression = [{}]
+ action = FuzzyChoice(choices=["snooze", "deduplicate"])
+
+ class Meta:
+ """Factory Configuration."""
+
+ model = SignalFilter
+
+ @post_generation
+ def creator(self, create, extracted, **kwargs):
+ if not create:
+ return
+
+ if extracted:
+ self.creator_id = extracted.id
+
+
class IncidentFactory(BaseFactory):
"""Incident Factory."""
diff --git a/tests/signal/test_signal_filter_service.py b/tests/signal/test_signal_filter_service.py
new file mode 100644
index 000000000000..c4016fcfafde
--- /dev/null
+++ b/tests/signal/test_signal_filter_service.py
@@ -0,0 +1,47 @@
+def test_get(session, signal_filter):
+ from dispatch.signal.service import get_signal_filter
+
+ t_signal_filter = get_signal_filter(db_session=session, signal_filter_id=signal_filter.id)
+ assert t_signal_filter.id == signal_filter.id
+
+
+def test_create(session, user, project):
+ from dispatch.signal.service import create_signal_filter
+ from dispatch.signal.models import SignalFilterCreate
+
+ name = "name"
+ description = "description"
+ expression = [{}]
+
+ signal_filter_in = SignalFilterCreate(
+ name=name,
+ description=description,
+ expression=expression,
+ project=project,
+ )
+ signal_filter = create_signal_filter(
+ db_session=session, creator=user, signal_filter_in=signal_filter_in
+ )
+ assert signal_filter
+
+
+def test_update(session, signal_filter):
+ from dispatch.signal.service import update_signal_filter
+ from dispatch.signal.models import SignalFilterUpdate
+
+ name = "Updated name"
+
+ signal_filter_in = SignalFilterUpdate(id=signal_filter.id, name=name, expression=[{}])
+ signal_filter = update_signal_filter(
+ db_session=session,
+ signal_filter=signal_filter,
+ signal_filter_in=signal_filter_in,
+ )
+ assert signal_filter.name == name
+
+
+def test_delete(session, signal_filter):
+ from dispatch.signal.service import delete_signal_filter, get_signal_filter
+
+ delete_signal_filter(db_session=session, signal_filter_id=signal_filter.id)
+ assert not get_signal_filter(db_session=session, signal_filter_id=signal_filter.id)
diff --git a/tests/signal/test_signal_service.py b/tests/signal/test_signal_service.py
new file mode 100644
index 000000000000..bbb4f9e0a319
--- /dev/null
+++ b/tests/signal/test_signal_service.py
@@ -0,0 +1,205 @@
+def test_get(session, signal):
+ from dispatch.signal.service import get
+
+ t_signal = get(db_session=session, signal_id=signal.id)
+ assert t_signal.id == signal.id
+
+
+def test_create(session, project):
+ from dispatch.signal.models import SignalCreate
+ from dispatch.signal.service import create
+
+ name = "name"
+ description = "description"
+
+ signal_in = SignalCreate(
+ name=name,
+ owner="example@test.com",
+ external_id="foo",
+ description=description,
+ project=project,
+ )
+ signal = create(db_session=session, signal_in=signal_in)
+ assert signal
+
+
+def test_update(session, project, signal):
+ from dispatch.signal.models import SignalUpdate
+ from dispatch.signal.service import update
+
+ name = "Updated name"
+
+ signal_in = SignalUpdate(
+ id=signal.id, name=name, project=project, owner="example.com", external_id="foo"
+ )
+ signal = update(
+ db_session=session,
+ signal=signal,
+ signal_in=signal_in,
+ )
+ assert signal.name == name
+
+
+def test_delete(session, signal):
+ from dispatch.signal.service import delete, get
+
+ delete(db_session=session, signal_id=signal.id)
+ assert not get(db_session=session, signal_id=signal.id)
+
+
+# instance tests
+def test_create_instance(session, case, signal, project):
+ from dispatch.signal.models import RawSignal, SignalInstanceCreate
+ from dispatch.signal.service import create_instance
+
+ signal_instance_in = SignalInstanceCreate(
+ raw=RawSignal(id="foo"),
+ project=project,
+ )
+ signal_instance = create_instance(db_session=session, signal_instance_in=signal_instance_in)
+ assert signal_instance
+
+
+def test_filter_actions_deduplicate(session, signal, project):
+ from dispatch.signal.models import (
+ RawSignal,
+ SignalFilter,
+ SignalInstance,
+ SignalFilterAction,
+ )
+ from dispatch.signal.service import apply_filter_actions
+ from dispatch.entity_type.models import EntityType
+ from dispatch.entity.models import Entity
+
+ entity_type = EntityType(
+ name="test",
+ field="id",
+ regular_expression=None,
+ project=project,
+ )
+ session.add(entity_type)
+
+ entity = Entity(name="test", description="test", value="foo", entity_type=entity_type)
+ session.add(entity)
+
+ # create instance
+ signal_instance_1 = SignalInstance(
+ raw=RawSignal(id="foo").json(), project=project, signal=signal, entities=[entity]
+ )
+ session.add(signal_instance_1)
+
+ signal_instance_2 = SignalInstance(
+ raw=RawSignal(id="foo").json(), project=project, signal=signal, entities=[entity]
+ )
+ session.add(signal_instance_2)
+ signal.entity_types.append(entity_type)
+
+ session.commit()
+
+ # create deduplicate signal filter
+ signal_filter = SignalFilter(
+ name="test",
+ description="test",
+ expression=[
+ {"or": [{"model": "EntityType", "field": "id", "op": "==", "value": entity_type.id}]}
+ ],
+ action=SignalFilterAction.deduplicate,
+ window=5,
+ project=project,
+ )
+ signal.filters.append(signal_filter)
+
+ session.commit()
+ assert not apply_filter_actions(db_session=session, signal_instance=signal_instance_2)
+ assert signal_instance_2.filter_action == SignalFilterAction.deduplicate
+
+
+def test_filter_actions_snooze(session, signal, project):
+ from datetime import datetime, timedelta, timezone
+ from dispatch.signal.models import (
+ RawSignal,
+ SignalFilter,
+ SignalInstance,
+ SignalFilterAction,
+ )
+ from dispatch.signal.service import apply_filter_actions
+ from dispatch.entity_type.models import EntityType
+ from dispatch.entity.models import Entity
+
+ entity_type = EntityType(
+ name="test",
+ field="id",
+ regular_expression=None,
+ project=project,
+ )
+ session.add(entity_type)
+ signal.entity_types.append(entity_type)
+
+ entity = Entity(name="test", description="test", value="foo", entity_type=entity_type)
+ session.add(entity)
+
+ # create instance
+ signal_instance_1 = SignalInstance(
+ raw=RawSignal(id="foo").json(), project=project, signal=signal, entities=[entity]
+ )
+ session.add(signal_instance_1)
+ session.commit()
+
+ signal_filter = SignalFilter(
+ name="snooze0",
+ description="test",
+ expression=[{"or": [{"model": "Entity", "field": "id", "op": "==", "value": entity.id}]}],
+ action=SignalFilterAction.snooze,
+ expiration=datetime.now(tz=timezone.utc) + timedelta(minutes=5),
+ project=project,
+ )
+
+ signal.filters = [signal_filter]
+
+ session.commit()
+ assert not apply_filter_actions(db_session=session, signal_instance=signal_instance_1)
+ assert signal_instance_1.filter_action == SignalFilterAction.snooze
+
+
+def test_filter_actions_snooze_expired(session, signal, project):
+ from datetime import datetime, timedelta, timezone
+ from dispatch.signal.models import (
+ RawSignal,
+ SignalFilter,
+ SignalInstance,
+ SignalFilterAction,
+ )
+ from dispatch.signal.service import apply_filter_actions
+ from dispatch.entity_type.models import EntityType
+ from dispatch.entity.models import Entity
+
+ entity_type = EntityType(
+ name="test",
+ field="id",
+ regular_expression=None,
+ project=project,
+ )
+ session.add(entity_type)
+
+ entity = Entity(name="test", description="test", value="foo", entity_type=entity_type)
+ session.add(entity)
+
+ # create instance
+ signal_instance_1 = SignalInstance(
+ raw=RawSignal(id="foo").json(), project=project, signal=signal, entities=[entity]
+ )
+ session.add(signal_instance_1)
+
+ # expired
+ signal_filter = SignalFilter(
+ name="snooze1",
+ description="test",
+ expression=[{"or": [{"model": "Entity", "field": "id", "op": "==", "value": 1}]}],
+ action=SignalFilterAction.snooze,
+ expiration=datetime.now(timezone.utc) - timedelta(minutes=5),
+ project=project,
+ )
+
+ signal.filters = [signal_filter]
+ session.commit()
+ assert apply_filter_actions(db_session=session, signal_instance=signal_instance_1)