Skip to content

Commit

Permalink
Enhancement/signal search filters (#2976)
Browse files Browse the repository at this point in the history
* Migrating signal snooze and deduplicate filters to search filters
  • Loading branch information
kevgliss authored Feb 16, 2023
1 parent 4182951 commit 7e0ffbe
Show file tree
Hide file tree
Showing 36 changed files with 2,207 additions and 655 deletions.
2 changes: 1 addition & 1 deletion src/dispatch/case/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ class SignalInstanceRead(DispatchBase):
entities: Optional[List[EntityRead]] = []
tags: Optional[List[TagRead]] = []
raw: Any
fingerprint: str
fingerprint: Optional[str]
created_at: datetime


Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
"""Allows many to many signal filters
Revision ID: 93b517de08e2
Revises: b168b50764c7
Create Date: 2023-02-13 15:19:36.921571
"""
from alembic import op
import sqlalchemy as sa

# revision identifiers, used by Alembic.
revision = "93b517de08e2"
down_revision = "b168b50764c7"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"assoc_signal_filters",
sa.Column("signal_id", sa.Integer(), nullable=False),
sa.Column("signal_filter_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["signal_filter_id"], ["signal_filter.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["signal_id"], ["signal.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("signal_id", "signal_filter_id"),
)
op.drop_constraint("signal_filter_signal_id_fkey", "signal_filter", type_="foreignkey")
op.drop_column("signal_filter", "signal_id")
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"signal_filter", sa.Column("signal_id", sa.INTEGER(), autoincrement=False, nullable=True)
)
op.create_foreign_key(
"signal_filter_signal_id_fkey", "signal_filter", "signal", ["signal_id"], ["id"]
)
op.drop_table("assoc_signal_filters")
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
@@ -0,0 +1,254 @@
"""Moves signal processing to filter approach.
Revision ID: b168b50764c7
Revises: 8746b4e292d2
Create Date: 2023-02-13 13:56:48.032074
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = "b168b50764c7"
down_revision = "8746b4e292d2"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"signal_filter",
sa.Column("evergreen", sa.Boolean(), nullable=True),
sa.Column("evergreen_owner", sa.String(), nullable=True),
sa.Column("evergreen_reminder_interval", sa.Integer(), nullable=True),
sa.Column("evergreen_last_reminder_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=True),
sa.Column("description", sa.String(), nullable=True),
sa.Column("expression", sa.JSON(), nullable=False),
sa.Column("mode", sa.String(), nullable=False),
sa.Column("action", sa.String(), nullable=False),
sa.Column("expiration", sa.DateTime(), nullable=True),
sa.Column("window", sa.Integer(), nullable=True),
sa.Column("signal_id", sa.Integer(), nullable=True),
sa.Column("creator_id", sa.Integer(), nullable=True),
sa.Column("search_vector", sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True),
sa.Column("project_id", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(
["creator_id"],
["dispatch_core.dispatch_user.id"],
),
sa.ForeignKeyConstraint(["project_id"], ["project.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["signal_id"],
["signal.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "project_id"),
)
op.create_index(
"signal_filter_search_vector_idx",
"signal_filter",
["search_vector"],
unique=False,
postgresql_using="gin",
)
op.drop_constraint("signal_suppression_rule_id_fkey", "signal", type_="foreignkey")
op.drop_constraint("signal_duplication_rule_id_fkey", "signal", type_="foreignkey")
op.drop_constraint(
"signal_instance_duplication_rule_id_fkey", "signal_instance", type_="foreignkey"
)
op.drop_constraint(
"signal_instance_suppression_rule_id_fkey", "signal_instance", type_="foreignkey"
)
op.drop_table("assoc_duplication_rule_tag_types")
op.drop_table("assoc_suppression_rule_tags")
op.drop_table("assoc_signal_instance_tags")
op.drop_table("duplication_rule")
op.drop_table("suppression_rule")
op.drop_column("signal", "suppression_rule_id")
op.drop_column("signal", "duplication_rule_id")
op.add_column("signal_instance", sa.Column("filter_action", sa.String(), nullable=True))
op.drop_column("signal_instance", "suppression_rule_id")
op.drop_column("signal_instance", "duplication_rule_id")
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"signal_instance",
sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=True),
)
op.add_column(
"signal_instance",
sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=True),
)
op.create_foreign_key(
"signal_instance_suppression_rule_id_fkey",
"signal_instance",
"suppression_rule",
["suppression_rule_id"],
["id"],
)
op.create_foreign_key(
"signal_instance_duplication_rule_id_fkey",
"signal_instance",
"duplication_rule",
["duplication_rule_id"],
["id"],
)
op.drop_column("signal_instance", "filter_action")
op.add_column(
"signal", sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=True)
)
op.add_column(
"signal", sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=True)
)
op.create_foreign_key(
"signal_duplication_rule_id_fkey",
"signal",
"duplication_rule",
["duplication_rule_id"],
["id"],
)
op.create_foreign_key(
"signal_suppression_rule_id_fkey",
"signal",
"suppression_rule",
["suppression_rule_id"],
["id"],
)
op.add_column(
"plugin_instance",
sa.Column(
"configuration",
postgresql.JSON(astext_type=sa.Text()),
autoincrement=False,
nullable=True,
),
)
op.drop_index("entity_search_vector_idx", table_name="entity", postgresql_using="gin")
op.create_index("ix_entity_search_vector", "entity", ["search_vector"], unique=False)
op.create_table(
"service_incident",
sa.Column("incident_id", sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column("service_id", sa.INTEGER(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(
["incident_id"], ["incident.id"], name="service_incident_incident_id_fkey"
),
sa.ForeignKeyConstraint(
["service_id"], ["service.id"], name="service_incident_service_id_fkey"
),
sa.PrimaryKeyConstraint("incident_id", "service_id", name="service_incident_pkey"),
)
op.create_table(
"assoc_suppression_rule_tags",
sa.Column("suppression_rule_id", sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column("tag_id", sa.INTEGER(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(
["suppression_rule_id"],
["suppression_rule.id"],
name="assoc_suppression_rule_tags_suppression_rule_id_fkey",
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["tag_id"],
["tag.id"],
name="assoc_suppression_rule_tags_tag_id_fkey",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint(
"suppression_rule_id", "tag_id", name="assoc_suppression_rule_tags_pkey"
),
)
op.create_table(
"assoc_duplication_rule_tag_types",
sa.Column("duplication_rule_id", sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column("tag_type_id", sa.INTEGER(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(
["duplication_rule_id"],
["duplication_rule.id"],
name="assoc_duplication_rule_tag_types_duplication_rule_id_fkey",
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["tag_type_id"],
["tag_type.id"],
name="assoc_duplication_rule_tag_types_tag_type_id_fkey",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint(
"duplication_rule_id", "tag_type_id", name="assoc_duplication_rule_tag_types_pkey"
),
)
op.create_table(
"assoc_signal_instance_tags",
sa.Column("signal_instance_id", postgresql.UUID(), autoincrement=False, nullable=False),
sa.Column("tag_id", sa.INTEGER(), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(
["signal_instance_id"],
["signal_instance.id"],
name="assoc_signal_instance_tags_signal_instance_id_fkey",
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["tag_id"],
["tag.id"],
name="assoc_signal_instance_tags_tag_id_fkey",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint(
"signal_instance_id", "tag_id", name="assoc_signal_instance_tags_pkey"
),
)
op.create_table(
"suppression_rule",
sa.Column("evergreen", sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column("evergreen_owner", sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column("evergreen_reminder_interval", sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(
"evergreen_last_reminder_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True
),
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column("mode", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column("expiration", postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column("project_id", sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(
["project_id"],
["project.id"],
name="suppression_rule_project_id_fkey",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id", name="suppression_rule_pkey"),
)
op.create_table(
"duplication_rule",
sa.Column("evergreen", sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column("evergreen_owner", sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column("evergreen_reminder_interval", sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column(
"evergreen_last_reminder_at", postgresql.TIMESTAMP(), autoincrement=False, nullable=True
),
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column("mode", sa.VARCHAR(), autoincrement=False, nullable=False),
sa.Column("project_id", sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column("window", sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(
["project_id"],
["project.id"],
name="duplication_rule_project_id_fkey",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id", name="duplication_rule_pkey"),
)
op.drop_index(
"signal_filter_search_vector_idx", table_name="signal_filter", postgresql_using="gin"
)
op.drop_table("signal_filter")
# ### end Alembic commands ###
6 changes: 5 additions & 1 deletion src/dispatch/database/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
from dispatch.participant.models import Participant
from dispatch.plugin.models import Plugin, PluginInstance
from dispatch.search.fulltext.composite_search import CompositeSearch
from dispatch.signal.models import SignalInstance
from dispatch.task.models import Task

from .core import Base, get_class_by_tablename, get_db, get_model_name_by_tablename
Expand Down Expand Up @@ -347,7 +348,8 @@ def apply_filter_specific_joins(model: Base, filter_spec: dict, query: orm.query
(Incident, "Tag"): (Incident.tags, True),
(Incident, "TagType"): (Incident.tags, True),
(Incident, "Term"): (Incident.terms, True),
(Case, "Tag"): (Case.tags, True),
(SignalInstance, "Entity"): (SignalInstance.entities, True),
(SignalInstance, "EntityType"): (SignalInstance.entities, True),
}
filters = build_filters(filter_spec)
filter_models = get_named_models(filters)[0]
Expand Down Expand Up @@ -485,6 +487,8 @@ def search_filter_sort_paginate(
raise ValidationError(
[ErrorWrapper(InvalidFilterError(msg=str(e)), loc="filter")], model=BaseModel
) from None
except Exception as e:
log.exception(e)

if items_per_page == -1:
items_per_page = None
Expand Down
2 changes: 1 addition & 1 deletion src/dispatch/entity/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def update(*, db_session, entity: Entity, entity_in: EntityUpdate) -> Entity:

def delete(*, db_session, entity_id: int):
"""Deletes an existing entity."""
entity = db_session.query(Entity).filter(Entity.id == entity_id).one_or_none()
entity = db_session.query(Entity).filter(Entity.id == entity_id).one()
db_session.delete(entity)
db_session.commit()

Expand Down
4 changes: 2 additions & 2 deletions src/dispatch/entity_type/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,6 @@ def update(

def delete(*, db_session: Session, entity_type_id: int) -> None:
"""Deletes an entity type."""
entity_type = db_session.query(EntityType).filter(EntityType.id == entity_type_id)
db_session.delete(entity_type.one_or_none)
entity_type = db_session.query(EntityType).filter(EntityType.id == entity_type_id).one()
db_session.delete(entity_type)
db_session.commit()
7 changes: 0 additions & 7 deletions src/dispatch/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,6 @@ def __str__(self) -> str:
return str.__str__(self)


class RuleMode(DispatchEnum):
active = "Active"
monitor = "Monitor"
inactive = "Inactive"
expired = "Expired"


class Visibility(DispatchEnum):
open = "Open"
restricted = "Restricted"
Expand Down
6 changes: 6 additions & 0 deletions src/dispatch/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,12 @@ async def dispatch(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content={"detail": [{"msg": "Unknown", "loc": ["Unknown"], "type": "Unknown"}]},
)
except Exception as e:
log.exception(e)
response = JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={"detail": [{"msg": "Unknown", "loc": ["Unknown"], "type": "Unknown"}]},
)

return response

Expand Down
2 changes: 1 addition & 1 deletion src/dispatch/models.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from typing import Optional
from datetime import datetime, timedelta

from pydantic import BaseModel
from pydantic.fields import Field
from pydantic.networks import EmailStr
from pydantic import BaseModel
from pydantic.types import conint, constr, SecretStr

from sqlalchemy import Boolean, Column, DateTime, Integer, String, event, ForeignKey
Expand Down
Loading

0 comments on commit 7e0ffbe

Please sign in to comment.