diff --git a/src/dispatch/case/models.py b/src/dispatch/case/models.py index 550dc84923a5..d746808d0933 100644 --- a/src/dispatch/case/models.py +++ b/src/dispatch/case/models.py @@ -169,7 +169,7 @@ class SignalInstanceRead(DispatchBase): entities: Optional[List[EntityRead]] = [] tags: Optional[List[TagRead]] = [] raw: Any - fingerprint: str + fingerprint: Optional[str] created_at: datetime diff --git a/src/dispatch/database/revisions/tenant/versions/2023-02-13_93b517de08e2.py b/src/dispatch/database/revisions/tenant/versions/2023-02-13_93b517de08e2.py new file mode 100644 index 000000000000..5affe963cd87 --- /dev/null +++ b/src/dispatch/database/revisions/tenant/versions/2023-02-13_93b517de08e2.py @@ -0,0 +1,42 @@ +"""Allows many to many signal filters + +Revision ID: 93b517de08e2 +Revises: b168b50764c7 +Create Date: 2023-02-13 15:19:36.921571 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = "93b517de08e2" +down_revision = "b168b50764c7" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "assoc_signal_filters", + sa.Column("signal_id", sa.Integer(), nullable=False), + sa.Column("signal_filter_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["signal_filter_id"], ["signal_filter.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["signal_id"], ["signal.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("signal_id", "signal_filter_id"), + ) + op.drop_constraint("signal_filter_signal_id_fkey", "signal_filter", type_="foreignkey") + op.drop_column("signal_filter", "signal_id") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "signal_filter", sa.Column("signal_id", sa.INTEGER(), autoincrement=False, nullable=True) + ) + op.create_foreign_key( + "signal_filter_signal_id_fkey", "signal_filter", "signal", ["signal_id"], ["id"] + ) + op.drop_table("assoc_signal_filters") + # ### end Alembic commands ### diff --git a/src/dispatch/database/revisions/tenant/versions/2023-02-13_b168b50764c7.py b/src/dispatch/database/revisions/tenant/versions/2023-02-13_b168b50764c7.py new file mode 100644 index 000000000000..3a8821872b22 --- /dev/null +++ b/src/dispatch/database/revisions/tenant/versions/2023-02-13_b168b50764c7.py @@ -0,0 +1,254 @@ +"""Moves signal processing to filter approach. + +Revision ID: b168b50764c7 +Revises: 8746b4e292d2 +Create Date: 2023-02-13 13:56:48.032074 + +""" +from alembic import op +import sqlalchemy as sa +import sqlalchemy_utils +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "b168b50764c7" +down_revision = "8746b4e292d2" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "signal_filter", + sa.Column("evergreen", sa.Boolean(), nullable=True), + sa.Column("evergreen_owner", sa.String(), nullable=True), + sa.Column("evergreen_reminder_interval", sa.Integer(), nullable=True), + sa.Column("evergreen_last_reminder_at", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("description", sa.String(), nullable=True), + sa.Column("expression", sa.JSON(), nullable=False), + sa.Column("mode", sa.String(), nullable=False), + sa.Column("action", sa.String(), nullable=False), + sa.Column("expiration", sa.DateTime(), nullable=True), + sa.Column("window", sa.Integer(), nullable=True), + sa.Column("signal_id", sa.Integer(), nullable=True), + sa.Column("creator_id", sa.Integer(), nullable=True), + sa.Column("search_vector", sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True), + sa.Column("project_id", sa.Integer(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=True), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.ForeignKeyConstraint( + ["creator_id"], + ["dispatch_core.dispatch_user.id"], + ), + sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["signal_id"], + ["signal.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name", "project_id"), + ) + op.create_index( + "signal_filter_search_vector_idx", + "signal_filter", + ["search_vector"], + unique=False, + postgresql_using="gin", + ) + op.drop_constraint("signal_suppression_rule_id_fkey", "signal", type_="foreignkey") + op.drop_constraint("signal_duplication_rule_id_fkey", "signal", type_="foreignkey") + op.drop_constraint( + "signal_instance_duplication_rule_id_fkey", "signal_instance", type_="foreignkey" + ) + op.drop_constraint( + "signal_instance_suppression_rule_id_fkey", "signal_instance", type_="foreignkey" + ) + op.drop_table("assoc_duplication_rule_tag_types") + op.drop_table("assoc_suppression_rule_tags") + op.drop_table("assoc_signal_instance_tags") + op.drop_table("duplication_rule") + op.drop_table("suppression_rule") + op.drop_column("signal", "suppression_rule_id") + op.drop_column("signal", "duplication_rule_id") + op.add_column("signal_instance", sa.Column("filter_action", sa.String(), nullable=True)) + op.drop_column("signal_instance", "suppression_rule_id") + op.drop_column("signal_instance", "duplication_rule_id") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "signal_instance", + sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=True), + ) + op.add_column( + "signal_instance", + sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=True), + ) + op.create_foreign_key( + "signal_instance_suppression_rule_id_fkey", + "signal_instance", + "suppression_rule", + ["suppression_rule_id"], + ["id"], + ) + op.create_foreign_key( + "signal_instance_duplication_rule_id_fkey", + "signal_instance", + "duplication_rule", + ["duplication_rule_id"], + ["id"], + ) + op.drop_column("signal_instance", "filter_action") + op.add_column( + "signal", sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=True) + ) + op.add_column( + "signal", sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=True) + ) + op.create_foreign_key( + "signal_duplication_rule_id_fkey", + "signal", + "duplication_rule", + ["duplication_rule_id"], + ["id"], + ) + op.create_foreign_key( + "signal_suppression_rule_id_fkey", + "signal", + "suppression_rule", + ["suppression_rule_id"], + ["id"], + ) + op.add_column( + "plugin_instance", + sa.Column( + "configuration", + postgresql.JSON(astext_type=sa.Text()), + autoincrement=False, + nullable=True, + ), + ) + op.drop_index("entity_search_vector_idx", table_name="entity", postgresql_using="gin") + op.create_index("ix_entity_search_vector", "entity", ["search_vector"], unique=False) + op.create_table( + "service_incident", + sa.Column("incident_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("service_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["incident_id"], ["incident.id"], name="service_incident_incident_id_fkey" + ), + sa.ForeignKeyConstraint( + ["service_id"], ["service.id"], name="service_incident_service_id_fkey" + ), + sa.PrimaryKeyConstraint("incident_id", "service_id", name="service_incident_pkey"), + ) + op.create_table( + "assoc_suppression_rule_tags", + sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("tag_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["suppression_rule_id"], + ["suppression_rule.id"], + name="assoc_suppression_rule_tags_suppression_rule_id_fkey", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["tag_id"], + ["tag.id"], + name="assoc_suppression_rule_tags_tag_id_fkey", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "suppression_rule_id", "tag_id", name="assoc_suppression_rule_tags_pkey" + ), + ) + op.create_table( + "assoc_duplication_rule_tag_types", + sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("tag_type_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["duplication_rule_id"], + ["duplication_rule.id"], + name="assoc_duplication_rule_tag_types_duplication_rule_id_fkey", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["tag_type_id"], + ["tag_type.id"], + name="assoc_duplication_rule_tag_types_tag_type_id_fkey", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "duplication_rule_id", "tag_type_id", name="assoc_duplication_rule_tag_types_pkey" + ), + ) + op.create_table( + "assoc_signal_instance_tags", + sa.Column("signal_instance_id", postgresql.UUID(), autoincrement=False, nullable=False), + sa.Column("tag_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint( + ["signal_instance_id"], + ["signal_instance.id"], + name="assoc_signal_instance_tags_signal_instance_id_fkey", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["tag_id"], + ["tag.id"], + name="assoc_signal_instance_tags_tag_id_fkey", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "signal_instance_id", "tag_id", name="assoc_signal_instance_tags_pkey" + ), + ) + op.create_table( + "suppression_rule", + sa.Column("evergreen", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column("evergreen_owner", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("evergreen_reminder_interval", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column( + "evergreen_last_reminder_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("mode", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("expiration", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), + sa.Column("project_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["project_id"], + ["project.id"], + name="suppression_rule_project_id_fkey", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name="suppression_rule_pkey"), + ) + op.create_table( + "duplication_rule", + sa.Column("evergreen", sa.BOOLEAN(), autoincrement=False, nullable=True), + sa.Column("evergreen_owner", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("evergreen_reminder_interval", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column( + "evergreen_last_reminder_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("mode", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("project_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("window", sa.INTEGER(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["project_id"], + ["project.id"], + name="duplication_rule_project_id_fkey", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("id", name="duplication_rule_pkey"), + ) + op.drop_index( + "signal_filter_search_vector_idx", table_name="signal_filter", postgresql_using="gin" + ) + op.drop_table("signal_filter") + # ### end Alembic commands ### diff --git a/src/dispatch/database/service.py b/src/dispatch/database/service.py index ee2204ab4a4e..6af9fb7156e6 100644 --- a/src/dispatch/database/service.py +++ b/src/dispatch/database/service.py @@ -33,6 +33,7 @@ from dispatch.participant.models import Participant from dispatch.plugin.models import Plugin, PluginInstance from dispatch.search.fulltext.composite_search import CompositeSearch +from dispatch.signal.models import SignalInstance from dispatch.task.models import Task from .core import Base, get_class_by_tablename, get_db, get_model_name_by_tablename @@ -347,7 +348,8 @@ def apply_filter_specific_joins(model: Base, filter_spec: dict, query: orm.query (Incident, "Tag"): (Incident.tags, True), (Incident, "TagType"): (Incident.tags, True), (Incident, "Term"): (Incident.terms, True), - (Case, "Tag"): (Case.tags, True), + (SignalInstance, "Entity"): (SignalInstance.entities, True), + (SignalInstance, "EntityType"): (SignalInstance.entities, True), } filters = build_filters(filter_spec) filter_models = get_named_models(filters)[0] @@ -485,6 +487,8 @@ def search_filter_sort_paginate( raise ValidationError( [ErrorWrapper(InvalidFilterError(msg=str(e)), loc="filter")], model=BaseModel ) from None + except Exception as e: + log.exception(e) if items_per_page == -1: items_per_page = None diff --git a/src/dispatch/entity/service.py b/src/dispatch/entity/service.py index 37865a9c24b6..6072b872a363 100644 --- a/src/dispatch/entity/service.py +++ b/src/dispatch/entity/service.py @@ -124,7 +124,7 @@ def update(*, db_session, entity: Entity, entity_in: EntityUpdate) -> Entity: def delete(*, db_session, entity_id: int): """Deletes an existing entity.""" - entity = db_session.query(Entity).filter(Entity.id == entity_id).one_or_none() + entity = db_session.query(Entity).filter(Entity.id == entity_id).one() db_session.delete(entity) db_session.commit() diff --git a/src/dispatch/entity_type/service.py b/src/dispatch/entity_type/service.py index c245c21af361..ab7b5018ddd0 100644 --- a/src/dispatch/entity_type/service.py +++ b/src/dispatch/entity_type/service.py @@ -93,6 +93,6 @@ def update( def delete(*, db_session: Session, entity_type_id: int) -> None: """Deletes an entity type.""" - entity_type = db_session.query(EntityType).filter(EntityType.id == entity_type_id) - db_session.delete(entity_type.one_or_none) + entity_type = db_session.query(EntityType).filter(EntityType.id == entity_type_id).one() + db_session.delete(entity_type) db_session.commit() diff --git a/src/dispatch/enums.py b/src/dispatch/enums.py index efd7f9c1c0ee..c6c3e623aaa0 100644 --- a/src/dispatch/enums.py +++ b/src/dispatch/enums.py @@ -6,13 +6,6 @@ def __str__(self) -> str: return str.__str__(self) -class RuleMode(DispatchEnum): - active = "Active" - monitor = "Monitor" - inactive = "Inactive" - expired = "Expired" - - class Visibility(DispatchEnum): open = "Open" restricted = "Restricted" diff --git a/src/dispatch/main.py b/src/dispatch/main.py index d020c0c9d080..f9fbfde0dfd5 100644 --- a/src/dispatch/main.py +++ b/src/dispatch/main.py @@ -194,6 +194,12 @@ async def dispatch( status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, content={"detail": [{"msg": "Unknown", "loc": ["Unknown"], "type": "Unknown"}]}, ) + except Exception as e: + log.exception(e) + response = JSONResponse( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + content={"detail": [{"msg": "Unknown", "loc": ["Unknown"], "type": "Unknown"}]}, + ) return response diff --git a/src/dispatch/models.py b/src/dispatch/models.py index ea170adb7305..c9a02b00cfc0 100644 --- a/src/dispatch/models.py +++ b/src/dispatch/models.py @@ -1,9 +1,9 @@ from typing import Optional from datetime import datetime, timedelta -from pydantic import BaseModel from pydantic.fields import Field from pydantic.networks import EmailStr +from pydantic import BaseModel from pydantic.types import conint, constr, SecretStr from sqlalchemy import Boolean, Column, DateTime, Integer, String, event, ForeignKey diff --git a/src/dispatch/plugins/dispatch_slack/service.py b/src/dispatch/plugins/dispatch_slack/service.py index 10bccf69aae6..90490121ebe3 100644 --- a/src/dispatch/plugins/dispatch_slack/service.py +++ b/src/dispatch/plugins/dispatch_slack/service.py @@ -341,10 +341,13 @@ def add_users_to_conversation_thread( ): """Adds user to a threaded conversation.""" users = [f"<@{user_id}>" for user_id in user_ids] - blocks = Message( - blocks=[Section(text="Looping in individuals to help resolve this case...", fields=users)] - ).build()["blocks"] - send_message(client=client, conversation_id=conversation_id, blocks=blocks, ts=thread_id) + if users: + blocks = Message( + blocks=[ + Section(text="Looping in individuals to help resolve this case...", fields=users) + ] + ).build()["blocks"] + send_message(client=client, conversation_id=conversation_id, blocks=blocks, ts=thread_id) def add_users_to_conversation(client: Any, conversation_id: str, user_ids: List[str]): diff --git a/src/dispatch/signal/flows.py b/src/dispatch/signal/flows.py index 961f56b6951f..5c4c1be4114b 100644 --- a/src/dispatch/signal/flows.py +++ b/src/dispatch/signal/flows.py @@ -5,7 +5,6 @@ from dispatch.case import flows as case_flows from dispatch.entity import service as entity_service from dispatch.signal import service as signal_service -from dispatch.tag import service as tag_service from dispatch.signal.models import SignalInstanceCreate, RawSignal @@ -29,12 +28,6 @@ def create_signal_instance( db_session=db_session, signal_instance_in=signal_instance_in ) - # associate any known tags with the signal - tag_service.get_by_name(db_session=db_session, project_id=project.id, name="foo") - - signal_instance.signal = signal - db_session.commit() - entities = entity_service.find_entities( db_session=db_session, signal_instance=signal_instance, @@ -42,33 +35,21 @@ def create_signal_instance( ) signal_instance.entities = entities - suppressed = signal_service.supress( - db_session=db_session, - signal_instance=signal_instance, - suppression_rule=signal.suppression_rule, - ) - if suppressed: - return - - duplicate = signal_service.deduplicate( - db_session=db_session, - signal_instance=signal_instance, - duplication_rule=signal.duplication_rule, - ) - if duplicate: - return - - # create a case if not duplicate or suppressed - case_in = CaseCreate( - title=signal.name, - description=signal.description, - case_priority=signal.case_priority, - case_type=signal.case_type, - ) - case = case_service.create(db_session=db_session, case_in=case_in) - - signal_instance.case = case + signal_instance.signal = signal db_session.commit() - return case_flows.case_new_create_flow( - db_session=db_session, organization_slug=None, case_id=case.id - ) + + if signal_service.apply_filter_actions(db_session=db_session, signal_instance=signal_instance): + # create a case if not duplicate or snoozed + case_in = CaseCreate( + title=signal.name, + description=signal.description, + case_priority=signal.case_priority, + case_type=signal.case_type, + ) + case = case_service.create(db_session=db_session, case_in=case_in) + + signal_instance.case = case + db_session.commit() + return case_flows.case_new_create_flow( + db_session=db_session, organization_slug=None, case_id=case.id + ) diff --git a/src/dispatch/signal/models.py b/src/dispatch/signal/models.py index 4862e249691e..7edf764d3852 100644 --- a/src/dispatch/signal/models.py +++ b/src/dispatch/signal/models.py @@ -1,36 +1,43 @@ import uuid from datetime import datetime -from typing import List, Optional, Dict -from pydantic import Field +from typing import Dict, List, Optional -from sqlalchemy.orm import relationship +from pydantic import Field from sqlalchemy import ( + JSON, + Boolean, Column, + DateTime, + ForeignKey, Integer, + PrimaryKeyConstraint, String, - ForeignKey, Table, - PrimaryKeyConstraint, - DateTime, - Boolean, + UniqueConstraint, ) -from sqlalchemy.dialects.postgresql import UUID, JSONB +from sqlalchemy.dialects.postgresql import JSONB, UUID +from sqlalchemy.orm import relationship from sqlalchemy_utils import TSVectorType -from dispatch.database.core import Base -from dispatch.enums import DispatchEnum - -from dispatch.models import DispatchBase, EvergreenMixin, PrimaryKey, TimeStampMixin, ProjectMixin - +from dispatch.auth.models import DispatchUser from dispatch.case.models import CaseRead -from dispatch.case.type.models import CaseTypeRead, CaseType from dispatch.case.priority.models import CasePriority, CasePriorityRead -from dispatch.entity.models import EntityRead -from dispatch.entity_type.models import EntityTypeRead, EntityTypeCreate -from dispatch.tag.models import TagRead -from dispatch.project.models import ProjectRead +from dispatch.case.type.models import CaseType, CaseTypeRead from dispatch.data.source.models import SourceBase -from dispatch.tag_type.models import TagTypeRead +from dispatch.project.models import ProjectRead + +from dispatch.database.core import Base +from dispatch.entity.models import EntityRead +from dispatch.entity_type.models import EntityTypeCreate, EntityTypeRead +from dispatch.enums import DispatchEnum +from dispatch.models import ( + DispatchBase, + EvergreenMixin, + NameStr, + PrimaryKey, + ProjectMixin, + TimeStampMixin, +) class RuleMode(DispatchEnum): @@ -51,6 +58,7 @@ class RuleMode(DispatchEnum): PrimaryKeyConstraint("signal_instance_id", "tag_id"), ) + assoc_signal_tags = Table( "assoc_signal_tags", Base.metadata, @@ -59,6 +67,14 @@ class RuleMode(DispatchEnum): PrimaryKeyConstraint("signal_id", "tag_id"), ) +assoc_signal_filters = Table( + "assoc_signal_filters", + Base.metadata, + Column("signal_id", Integer, ForeignKey("signal.id", ondelete="CASCADE")), + Column("signal_filter_id", Integer, ForeignKey("signal_filter.id", ondelete="CASCADE")), + PrimaryKeyConstraint("signal_id", "signal_filter_id"), +) + assoc_signal_instance_entities = Table( "assoc_signal_instance_entities", Base.metadata, @@ -79,43 +95,17 @@ class RuleMode(DispatchEnum): PrimaryKeyConstraint("signal_id", "entity_type_id"), ) -assoc_duplication_tag_types = Table( - "assoc_duplication_rule_tag_types", - Base.metadata, - Column("duplication_rule_id", Integer, ForeignKey("duplication_rule.id", ondelete="CASCADE")), - Column("tag_type_id", Integer, ForeignKey("tag_type.id", ondelete="CASCADE")), - PrimaryKeyConstraint("duplication_rule_id", "tag_type_id"), -) - -assoc_suppression_tags = Table( - "assoc_suppression_rule_tags", - Base.metadata, - Column("suppression_rule_id", Integer, ForeignKey("suppression_rule.id", ondelete="CASCADE")), - Column("tag_id", Integer, ForeignKey("tag.id", ondelete="CASCADE")), - PrimaryKeyConstraint("suppression_rule_id", "tag_id"), -) - -class SuppressionRule(Base, ProjectMixin, EvergreenMixin): - id = Column(Integer, primary_key=True) - mode = Column(String, default=RuleMode.active, nullable=False) - expiration = Column(DateTime, nullable=True) +class SignalFilterMode(DispatchEnum): + active = "active" + monitor = "monitor" + inactive = "inactive" + expired = "expired" - # the tags to use for suppression - tags = relationship("Tag", secondary=assoc_suppression_tags, backref="suppression_rules") - -class DuplicationRule(Base, ProjectMixin, EvergreenMixin): - id = Column(Integer, primary_key=True) - mode = Column(String, default=RuleMode.active, nullable=False) - - # number of seconds for duplication lookback default to 1 hour - window = Column(Integer, default=(60 * 60)) - - # the tag types to use for deduplication - tag_types = relationship( - "TagType", secondary=assoc_duplication_tag_types, backref="duplication_rules" - ) +class SignalFilterAction(DispatchEnum): + deduplicate = "deduplicate" + snooze = "snooze" class Signal(Base, TimeStampMixin, ProjectMixin): @@ -133,15 +123,12 @@ class Signal(Base, TimeStampMixin, ProjectMixin): case_type = relationship("CaseType", backref="signals") case_priority_id = Column(Integer, ForeignKey(CasePriority.id)) case_priority = relationship("CasePriority", backref="signals") - duplication_rule_id = Column(Integer, ForeignKey(DuplicationRule.id)) - duplication_rule = relationship("DuplicationRule", backref="signal") + filters = relationship("SignalFilter", secondary=assoc_signal_filters, backref="signals") entity_types = relationship( "EntityType", secondary=assoc_signal_entity_types, backref="signals", ) - suppression_rule_id = Column(Integer, ForeignKey(SuppressionRule.id)) - suppression_rule = relationship("SuppressionRule", backref="signal") tags = relationship( "Tag", secondary=assoc_signal_tags, @@ -150,67 +137,69 @@ class Signal(Base, TimeStampMixin, ProjectMixin): search_vector = Column(TSVectorType("name", regconfig="pg_catalog.simple")) +class SignalFilter(Base, ProjectMixin, EvergreenMixin, TimeStampMixin): + __table_args__ = (UniqueConstraint("name", "project_id"),) + id = Column(Integer, primary_key=True) + name = Column(String) + description = Column(String) + expression = Column(JSON, nullable=False, default=[]) + mode = Column(String, default=SignalFilterMode.active, nullable=False) + action = Column(String, nullable=False) + expiration = Column(DateTime, nullable=True) + window = Column( + Integer, default=(60 * 60) + ) # number of seconds for duplication lookback default to 1 hour + + creator_id = Column(Integer, ForeignKey(DispatchUser.id)) + creator = relationship("DispatchUser", backref="signal_filters") + + search_vector = Column( + TSVectorType("name", "description", weights={"name": "A", "description": "B"}) + ) + + class SignalInstance(Base, TimeStampMixin, ProjectMixin): - id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + id = Column(UUID(as_uuid=True), primary_key=True, default=lambda: str(uuid.uuid4())) case = relationship("Case", backref="signal_instances") case_id = Column(Integer, ForeignKey("case.id", ondelete="CASCADE")) - duplication_rule = relationship("DuplicationRule", backref="signal_instances") - duplication_rule_id = Column(Integer, ForeignKey(DuplicationRule.id)) entities = relationship( "Entity", secondary=assoc_signal_instance_entities, backref="signal_instances", ) fingerprint = Column(String) + filter_action = Column(String) raw = Column(JSONB) signal = relationship("Signal", backref="instances") signal_id = Column(Integer, ForeignKey("signal.id")) - suppression_rule = relationship("SuppressionRule", backref="signal_instances") - suppression_rule_id = Column(Integer, ForeignKey(SuppressionRule.id)) - tags = relationship( - "Tag", - secondary=assoc_signal_instance_tags, - backref="signal_instances", - ) # Pydantic models... -class SignalRuleBase(DispatchBase): - mode: Optional[RuleMode] = RuleMode.active - - -class DuplicationRuleBase(SignalRuleBase): +class SignalFilterBase(DispatchBase): + mode: Optional[SignalFilterMode] = SignalFilterMode.active + expression: List[dict] + name: NameStr + action: SignalFilterAction = SignalFilterAction.snooze + description: Optional[str] = Field(None, nullable=True) window: Optional[int] = 600 - tag_types: List[TagTypeRead] - - -class DuplicationRuleCreate(DuplicationRuleBase): - pass - - -class DuplicationRuleUpdate(DuplicationRuleBase): - id: Optional[PrimaryKey] + expiration: Optional[datetime] = Field(None, nullable=True) -class DuplicationRuleRead(DuplicationRuleBase): +class SignalFilterUpdate(SignalFilterBase): id: PrimaryKey -class SuppressionRuleBase(SignalRuleBase): - expiration: Optional[datetime] - tags: List[TagRead] - - -class SuppressionRuleCreate(SuppressionRuleBase): - pass +class SignalFilterCreate(SignalFilterBase): + project: ProjectRead -class SuppressionRuleUpdate(SuppressionRuleBase): - id: Optional[PrimaryKey] +class SignalFilterRead(SignalFilterBase): + id: PrimaryKey -class SuppressionRuleRead(SuppressionRuleBase): - id: PrimaryKey +class SignalFilterPagination(DispatchBase): + items: List[SignalFilterRead] + total: int class SignalBase(DispatchBase): @@ -224,30 +213,23 @@ class SignalBase(DispatchBase): external_url: Optional[str] source: Optional[SourceBase] created_at: Optional[datetime] = None + filters: Optional[List[SignalFilterRead]] = [] entity_types: Optional[List[EntityTypeRead]] - suppression_rule: Optional[SuppressionRuleRead] - duplication_rule: Optional[DuplicationRuleBase] project: ProjectRead class SignalCreate(SignalBase): - entity_types: Optional[EntityTypeCreate] - suppression_rule: Optional[SuppressionRuleCreate] - duplication_rule: Optional[DuplicationRuleCreate] + entity_types: Optional[EntityTypeCreate] = [] class SignalUpdate(SignalBase): id: PrimaryKey entity_types: Optional[List[EntityTypeRead]] = [] - suppression_rule: Optional[SuppressionRuleUpdate] - duplication_rule: Optional[DuplicationRuleUpdate] class SignalRead(SignalBase): id: PrimaryKey - entity_types: Optional[List[EntityTypeRead]] - suppression_rule: Optional[SuppressionRuleRead] - duplication_rule: Optional[DuplicationRuleRead] + entity_types: Optional[List[EntityTypeRead]] = [] class SignalPagination(DispatchBase): @@ -277,10 +259,8 @@ class SignalInstanceBase(DispatchBase): project: ProjectRead case: Optional[CaseRead] entities: Optional[List[EntityRead]] = [] - tags: Optional[List[TagRead]] = [] raw: RawSignal - suppression_rule: Optional[SuppressionRuleBase] - duplication_rule: Optional[DuplicationRuleBase] + filter_action: SignalFilterAction = None created_at: Optional[datetime] = None @@ -290,7 +270,7 @@ class SignalInstanceCreate(SignalInstanceBase): class SignalInstanceRead(SignalInstanceBase): id: uuid.UUID - fingerprint: str = None + fingerprint: Optional[str] signal: SignalRead diff --git a/src/dispatch/signal/service.py b/src/dispatch/signal/service.py index 935d84a67bba..6295340e4816 100644 --- a/src/dispatch/signal/service.py +++ b/src/dispatch/signal/service.py @@ -1,103 +1,98 @@ import json +from datetime import datetime, timedelta, timezone import hashlib from typing import Optional -from datetime import datetime, timedelta, timezone -from dispatch.enums import RuleMode -from dispatch.project import service as project_service -from dispatch.tag import service as tag_service -from dispatch.tag_type import service as tag_type_service -from dispatch.case.type import service as case_type_service +from sqlalchemy import asc + +from dispatch.auth.models import DispatchUser from dispatch.case.priority import service as case_priority_service -from dispatch.entity_type import service as entity_type_service +from dispatch.case.type import service as case_type_service +from dispatch.database.service import apply_filters, apply_filter_specific_joins +from dispatch.project import service as project_service from .models import ( Signal, SignalCreate, - SignalUpdate, + SignalFilter, + SignalFilterAction, + SignalFilterCreate, + SignalFilterMode, + SignalFilterUpdate, SignalInstance, - SuppressionRule, - DuplicationRule, SignalInstanceCreate, - DuplicationRuleCreate, - DuplicationRuleUpdate, - SuppressionRuleCreate, - SuppressionRuleUpdate, + SignalUpdate, ) -def create_duplication_rule( - *, db_session, duplication_rule_in: DuplicationRuleCreate -) -> DuplicationRule: - """Creates a new duplication rule.""" - rule = DuplicationRule(**duplication_rule_in.dict(exclude={"tag_types"})) - - tag_types = [] - for t in duplication_rule_in.tag_types: - tag_types.append(tag_type_service.get(db_session=db_session, tag_type_id=t.id)) +def create_signal_filter( + *, db_session, creator: DispatchUser, signal_filter_in: SignalFilterCreate +) -> SignalFilter: + """Creates a new signal filter.""" + project = project_service.get_by_name_or_raise( + db_session=db_session, project_in=signal_filter_in.project + ) - rule.tag_types = tag_types - db_session.add(rule) + signal_filter = SignalFilter( + **signal_filter_in.dict( + exclude={ + "project", + } + ), + creator=creator, + project=project, + ) + db_session.add(signal_filter) db_session.commit() - return rule + return signal_filter + +def update_signal_filter( + *, db_session, signal_filter: SignalFilter, signal_filter_in: SignalFilterUpdate +) -> SignalFilter: + """Updates an existing signal filter.""" -def update_duplication_rule( - *, db_session, duplication_rule_in: DuplicationRuleUpdate -) -> DuplicationRule: - """Updates an existing duplication rule.""" - rule = ( - db_session.query(DuplicationRule).filter(DuplicationRule.id == duplication_rule_in.id).one() + signal_filter_data = signal_filter.dict() + update_data = signal_filter_in.dict( + skip_defaults=True, + exclude={}, ) - tag_types = [] - for t in duplication_rule_in.tag_types: - tag_types.append(tag_type_service.get(db_session=db_session, tag_type_id=t.id)) + for field in signal_filter_data: + if field in update_data: + setattr(signal_filter, field, update_data[field]) - rule.tag_types = tag_types - rule.window = duplication_rule_in.window - db_session.add(rule) + db_session.add(signal_filter) db_session.commit() - return rule + return signal_filter -def create_suppression_rule( - *, db_session, suppression_rule_in: SuppressionRuleCreate -) -> SuppressionRule: - """Creates a new supression rule.""" - rule = SuppressionRule(**suppression_rule_in.dict(exclude={"tags"})) - - tags = [] - for t in suppression_rule_in.tags: - tags.append(tag_service.get_or_create(db_session=db_session, tag_in=t)) - - rule.tags = tags - db_session.add(rule) +def delete_signal_filter(*, db_session, signal_filter_id: int) -> int: + """Deletes an existing signal filter.""" + signal_filter = db_session.query(SignalFilter).filter(SignalFilter.id == signal_filter_id).one() + db_session.delete(signal_filter) db_session.commit() - return rule + return signal_filter_id -def update_suppression_rule( - *, db_session, suppression_rule_in: SuppressionRuleUpdate -) -> SuppressionRule: - """Updates an existing supression rule.""" - rule = ( - db_session.query(SuppressionRule).filter(SuppressionRule.id == suppression_rule_in.id).one() +def get_signal_filter_by_name(*, db_session, project_id: int, name: str) -> Optional[SignalFilter]: + """Gets a signal filter by it's name.""" + return ( + db_session.query(SignalFilter) + .filter(SignalFilter.project_id == project_id) + .filter(SignalFilter.name == name) + .first() ) - tags = [] - for t in suppression_rule_in.tags: - tags.append(tag_service.get_or_create(db_session=db_session, tag_in=t)) - rule.tags = tags - db_session.add(rule) - db_session.commit() - return rule +def get_signal_filter(*, db_session, signal_filter_id: int) -> SignalFilter: + """Gets a single signal filter.""" + return db_session.query(SignalFilter).filter(SignalFilter.id == signal_filter_id).one_or_none() def get(*, db_session, signal_id: int) -> Optional[Signal]: """Gets a signal by id.""" - return db_session.query(Signal).filter(Signal.id == signal_id).one() + return db_session.query(Signal).filter(Signal.id == signal_id).one_or_none() def get_by_variant_or_external_id( @@ -130,24 +125,15 @@ def create(*, db_session, signal_in: SignalCreate) -> Signal: "case_type", "case_priority", "source", - "suppression_rule", - "duplication_rule", + "filters", } ), project=project, ) - if signal_in.duplication_rule: - duplication_rule = create_duplication_rule( - db_session=db_session, duplication_rule_in=signal_in.duplication_rule - ) - signal.duplication_rule = duplication_rule - - if signal_in.suppression_rule: - suppression_rule = create_suppression_rule( - db_session=db_session, suppression_rule_in=signal.suppression_rule - ) - signal.suppression_rule = suppression_rule + for f in signal_in.filters: + signal_filter = get_signal_filter_by_name(db_session=db_session, signal_filter_in=f) + signal.filters.append(signal_filter) if signal_in.case_priority: case_priority = case_priority_service.get_by_name_or_default( @@ -169,40 +155,26 @@ def create(*, db_session, signal_in: SignalCreate) -> Signal: def update(*, db_session, signal: Signal, signal_in: SignalUpdate) -> Signal: """Creates a new signal.""" signal_data = signal.dict() - update_data = signal_in.dict(skip_defaults=True) + update_data = signal_in.dict( + skip_defaults=True, + exclude={ + "project", + "case_type", + "case_priority", + "source", + "filters", + }, + ) for field in signal_data: if field in update_data: setattr(signal, field, update_data[field]) - entity_types = [] - for entity_type in signal_in.entity_types: - entity_types.append( - entity_type_service.get_or_create(db_session=db_session, entity_type_in=entity_type) + for f in signal_in.filters: + signal_filter = get_signal_filter_by_name( + db_session=db_session, project_id=signal.project.id, name=f.name ) - signal.entity_types = entity_types - - if signal_in.duplication_rule: - if signal_in.duplication_rule.id: - update_duplication_rule( - db_session=db_session, duplication_rule_in=signal_in.duplication_rule - ) - else: - duplication_rule = create_duplication_rule( - db_session=db_session, duplication_rule_in=signal_in.duplication_rule - ) - signal.duplication_rule = duplication_rule - - if signal_in.suppression_rule: - if signal_in.suppression_rule.id: - update_suppression_rule( - db_session=db_session, suppression_rule_in=signal_in.suppression_rule - ) - else: - suppression_rule = create_suppression_rule( - db_session=db_session, suppression_rule_in=signal_in.suppression_rule - ) - signal.suppression_rule = suppression_rule + signal.filters.append(signal_filter) if signal_in.case_priority: case_priority = case_priority_service.get_by_name_or_default( @@ -238,100 +210,52 @@ def create_instance(*, db_session, signal_instance_in: SignalInstanceCreate) -> # we round trip the raw data to json-ify date strings signal_instance = SignalInstance( - **signal_instance_in.dict(exclude={"project", "tags", "raw"}), + **signal_instance_in.dict(exclude={"case", "signal", "project", "entities", "raw"}), raw=json.loads(signal_instance_in.raw.json()), project=project, ) - tags = [] - for t in signal_instance_in.tags: - tags.append(tag_service.get_or_create(db_session=db_session, tag_in=t)) - - signal_instance.tags = tags - db_session.add(signal_instance) db_session.commit() return signal_instance -def create_instance_fingerprint(duplication_rule, signal_instance: SignalInstance) -> str: - """Given a list of tag_types and tags creates a hash of their values.""" - fingerprint = hashlib.sha1(str(signal_instance.raw).encode("utf-8")).hexdigest() - - # use tags if we have them - if duplication_rule: - if signal_instance.tags: - tag_type_names = [t.name for t in duplication_rule.tag_types] - hash_values = [] - for tag in signal_instance.tags: - if tag.tag_type.name in tag_type_names: - hash_values.append(tag.tag_type.name) - fingerprint = hashlib.sha1("-".join(sorted(hash_values)).encode("utf-8")).hexdigest() - - return fingerprint - - -def deduplicate( - *, db_session, signal_instance: SignalInstance, duplication_rule: DuplicationRule -) -> bool: - """Find any matching duplication rules and match signals.""" - duplicate = False - - # always fingerprint - fingerprint = create_instance_fingerprint(duplication_rule, signal_instance) - signal_instance.fingerprint = fingerprint - db_session.commit() - - if not duplication_rule: - return duplicate - - if duplication_rule.mode != RuleMode.active: - return duplicate - - window = datetime.now(timezone.utc) - timedelta(seconds=duplication_rule.window) - fingerprint = create_instance_fingerprint(duplication_rule.tag_types, signal_instance) +def apply_filter_actions(*, db_session, signal_instance: SignalInstance): + """Applies any matching filter actions associated with this instance.""" - instances = ( - db_session.query(SignalInstance) - .filter(Signal.id == signal_instance.signal.id) - .filter(SignalInstance.id != signal_instance.id) - .filter(SignalInstance.created_at >= window) - .filter(SignalInstance.fingerprint == fingerprint) - .all() - ) - - if instances: - duplicate = True - # TODO find the earliest created instance - signal_instance.case_id = instances[0].case_id - signal_instance.duplication_rule_id = duplication_rule.id - - db_session.commit() - return duplicate - - -def supress( - *, db_session, signal_instance: SignalInstance, suppression_rule: SuppressionRule -) -> bool: - """Find any matching suppression rules and match instances.""" - supressed = False - - if not suppression_rule: - return supressed + for f in signal_instance.signal.filters: + if f.mode != SignalFilterMode.active: + continue - if suppression_rule.mode != RuleMode.active: - return supressed - - if suppression_rule.expiration: - if suppression_rule.expiration <= datetime.now(): - return supressed - - rule_tag_ids = sorted([t.id for t in suppression_rule.tags]) - signal_tag_ids = sorted([t.id for t in signal_instance.tags]) - - if rule_tag_ids == signal_tag_ids: - supressed = True - signal_instance.suppression_rule_id = suppression_rule.id - - db_session.commit() - return supressed + query = db_session.query(SignalInstance).filter( + SignalInstance.signal_id == signal_instance.signal_id + ) + query = apply_filter_specific_joins(SignalInstance, f.expression, query) + query = apply_filters(query, f.expression) + + # order matters, check for snooze before deduplication + # we check to see if the current instances match's it's signals snooze filter + if f.action == SignalFilterAction.snooze: + if f.expiration.replace(tzinfo=timezone.utc) <= datetime.now(timezone.utc): + continue + + instances = query.filter(SignalInstance.id == signal_instance.id).all() + + if instances: + signal_instance.filter_action = SignalFilterAction.snooze + return + + elif f.action == SignalFilterAction.deduplicate: + window = datetime.now(timezone.utc) - timedelta(seconds=f.window) + query = query.filter(SignalInstance.created_at >= window) + + # get the earliest instance + query = query.order_by(asc(SignalInstance.created_at)) + instances = query.all() + + if instances: + # associate with existing case + signal_instance.case_id = instances[0].case_id + signal_instance.filter_action = SignalFilterAction.deduplicate + return + return True diff --git a/src/dispatch/signal/views.py b/src/dispatch/signal/views.py index 74f7b05752d9..a6c19b3b97fd 100644 --- a/src/dispatch/signal/views.py +++ b/src/dispatch/signal/views.py @@ -1,24 +1,39 @@ from fastapi import APIRouter, Depends, HTTPException, status from pydantic.error_wrappers import ErrorWrapper, ValidationError - from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session +from dispatch.auth.models import DispatchUser +from dispatch.auth.service import get_current_user from dispatch.database.core import get_db -from dispatch.exceptions import ExistsError from dispatch.database.service import common_parameters, search_filter_sort_paginate +from dispatch.exceptions import ExistsError from dispatch.models import PrimaryKey from .models import ( SignalCreate, - SignalUpdate, - SignalPagination, - SignalRead, - SignalInstanceRead, + SignalFilterCreate, + SignalFilterPagination, + SignalFilterRead, + SignalFilterUpdate, SignalInstanceCreate, SignalInstancePagination, + SignalInstanceRead, + SignalPagination, + SignalRead, + SignalUpdate, +) +from .service import ( + create, + create_instance, + create_signal_filter, + delete, + delete_signal_filter, + get, + get_signal_filter, + update, + update_signal_filter, ) -from .service import create, update, get, create_instance, delete router = APIRouter() @@ -29,6 +44,87 @@ def get_signal_instances(*, common: dict = Depends(common_parameters)): return search_filter_sort_paginate(model="SignalInstance", **common) +@router.post("/{signal_id}/instances", response_model=SignalInstanceRead) +def create_signal_instance( + *, db_session: Session = Depends(get_db), signal_instance_in: SignalInstanceCreate +): + """Create a new signal instance.""" + return create_instance(db_session=db_session, signal_instance_in=signal_instance_in) + + +@router.get("/filters", response_model=SignalFilterPagination) +def get_signal_filters(*, common: dict = Depends(common_parameters)): + """Get all signal filters.""" + return search_filter_sort_paginate(model="SignalFilter", **common) + + +@router.post("/filters", response_model=SignalFilterRead) +def create_filter( + *, + db_session: Session = Depends(get_db), + signal_filter_in: SignalFilterCreate, + current_user: DispatchUser = Depends(get_current_user), +): + """Create a new signal.""" + try: + return create_signal_filter( + db_session=db_session, creator=current_user, signal_filter_in=signal_filter_in + ) + except IntegrityError: + raise ValidationError( + [ + ErrorWrapper( + ExistsError(msg="A signal filter with this name already exists."), loc="name" + ) + ], + model=SignalFilterRead, + ) from None + + +@router.put("/filters/{signal_filter_id}", response_model=SignalRead) +def update_filter( + *, + db_session: Session = Depends(get_db), + signal_filter_id: PrimaryKey, + signal_filter_in: SignalFilterUpdate, +): + """Updates an existing signal filter.""" + signal_filter = get_signal_filter(db_session=db_session, signal_id=signal_filter_id) + if not signal_filter: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=[{"msg": "A signal filter with this id does not exist."}], + ) + + try: + signal_filter = update_signal_filter( + db_session=db_session, signal_filter=signal_filter, signal_filter_in=signal_filter_in + ) + except IntegrityError: + raise ValidationError( + [ + ErrorWrapper( + ExistsError(msg="A signal filter with this name already exists."), loc="name" + ) + ], + model=SignalFilterUpdate, + ) from None + + return signal_filter + + +@router.delete("/filters/{signal_filter_id}", response_model=None) +def delete_filter(*, db_session: Session = Depends(get_db), signal_filter_id: PrimaryKey): + """Deletes a signal filter.""" + signal_filter = get(db_session=db_session, signal_filter_id=signal_filter_id) + if not signal_filter: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=[{"msg": "A signal filter with this id does not exist."}], + ) + delete_signal_filter(db_session=db_session, signal_filter_id=signal_filter_id) + + @router.get("", response_model=SignalPagination) def get_signals(*, common: dict = Depends(common_parameters)): """Get all signal definitions.""" @@ -37,7 +133,7 @@ def get_signals(*, common: dict = Depends(common_parameters)): @router.get("/{signal_id}", response_model=SignalRead) def get_signal(*, db_session: Session = Depends(get_db), signal_id: PrimaryKey): - """Update a signal.""" + """Get a signal by it's ID.""" signal = get(db_session=db_session, signal_id=signal_id) if not signal: raise HTTPException( @@ -86,11 +182,3 @@ def delete_signal(*, db_session: Session = Depends(get_db), signal_id: PrimaryKe detail=[{"msg": "A signal with this id does not exist."}], ) delete(db_session=db_session, signal_id=signal_id) - - -@router.post("/{signal_id}/instances", response_model=SignalInstanceRead) -def create_signal_instance( - *, db_session: Session = Depends(get_db), signal_instance_in: SignalInstanceCreate -): - """Create a new signal instance.""" - return create_instance(db_session=db_session, signal_instance_in=signal_instance_in) diff --git a/src/dispatch/static/dispatch/src/components/DateTimePicker.vue b/src/dispatch/static/dispatch/src/components/DateTimePicker.vue new file mode 100644 index 000000000000..50853ce6b496 --- /dev/null +++ b/src/dispatch/static/dispatch/src/components/DateTimePicker.vue @@ -0,0 +1,141 @@ + + diff --git a/src/dispatch/static/dispatch/src/entity/EntityFilterCombobox.vue b/src/dispatch/static/dispatch/src/entity/EntityFilterCombobox.vue new file mode 100644 index 000000000000..5bbb7c7be2ad --- /dev/null +++ b/src/dispatch/static/dispatch/src/entity/EntityFilterCombobox.vue @@ -0,0 +1,156 @@ + + + diff --git a/src/dispatch/static/dispatch/src/signal/DuplicationRule.vue b/src/dispatch/static/dispatch/src/signal/DuplicationRule.vue deleted file mode 100644 index 6f73d7c2da96..000000000000 --- a/src/dispatch/static/dispatch/src/signal/DuplicationRule.vue +++ /dev/null @@ -1,95 +0,0 @@ - - - diff --git a/src/dispatch/static/dispatch/src/signal/NewEditSheet.vue b/src/dispatch/static/dispatch/src/signal/NewEditDialog.vue similarity index 88% rename from src/dispatch/static/dispatch/src/signal/NewEditSheet.vue rename to src/dispatch/static/dispatch/src/signal/NewEditDialog.vue index 973d0fc0f4f8..a0f635ce54e7 100644 --- a/src/dispatch/static/dispatch/src/signal/NewEditSheet.vue +++ b/src/dispatch/static/dispatch/src/signal/NewEditDialog.vue @@ -143,13 +143,22 @@ - - - - - - - + + + Filter(s) + + + + Defines a signal filter allowing you to take either a "Snooze" or "Deduplication" + action for any match signal matching the filter. + + + + + + @@ -165,37 +174,21 @@ import { required } from "vee-validate/dist/rules" import CaseTypeSelect from "@/case/type/CaseTypeSelect.vue" import CasePrioritySelect from "@/case/priority/CasePrioritySelect.vue" -import DuplicationRuleCard from "@/signal/DuplicationRule.vue" -import EntityRuleCard from "@/signal/EntityRule.vue" -import SuppressionRule from "./SuppressionRule.vue" +import SignalFilterCombobox from "@/signal/filter/SignalFilterCombobox.vue" extend("required", { ...required, }) export default { - name: "SignalNewEditSheet", + name: "SignalNewEditDialog", components: { ValidationObserver, ValidationProvider, CaseTypeSelect, CasePrioritySelect, - DuplicationRuleCard, - EntityRuleCard, - SuppressionRule, - }, - - data() { - return { - windows: [ - { label: "10min", value: 600 }, - { label: "30min", value: 1800 }, - { label: "1hr", value: 3600 }, - { label: "8hr", value: 28800 }, - { label: "24hr", value: 86400 }, - ], - } + SignalFilterCombobox, }, computed: { @@ -210,9 +203,8 @@ export default { "selected.external_url", "selected.case_type", "selected.case_priority", + "selected.filters", "selected.entity_types", - "selected.duplication_rule", - "selected.suppression_rule", "selected.source", "selected.project", "selected.loading", diff --git a/src/dispatch/static/dispatch/src/signal/SignalInstanceTab.vue b/src/dispatch/static/dispatch/src/signal/SignalInstanceTab.vue index fe3e7c74225b..91215e63da22 100644 --- a/src/dispatch/static/dispatch/src/signal/SignalInstanceTab.vue +++ b/src/dispatch/static/dispatch/src/signal/SignalInstanceTab.vue @@ -9,11 +9,12 @@ - -