diff --git a/server/features/assignments_delete.feature b/server/features/assignments_delete.feature index 7cee7ff31..1b6e279e0 100644 --- a/server/features/assignments_delete.feature +++ b/server/features/assignments_delete.feature @@ -291,7 +291,9 @@ Feature: Assignments Delete }, "workflow_status": "active" }], - "event_item": "#events._id#" + "related_events": [ + {"_id": "#events._id#", "link_type": "primary"} + ] } """ Then we get OK response @@ -347,7 +349,11 @@ Feature: Assignments Delete }, "workflow_status": "active" }], - "event_item": "#EVENT2._id#", + "related_events": [{ + "_id": "#EVENT2._id#", + "link_type": "primary", + "recurrence_id": "#EVENT2.recurrence_id#" + }], "recurrence_id": "#EVENT2.recurrence_id#" } """ diff --git a/server/features/assignments_lock.feature b/server/features/assignments_lock.feature index f32ebf212..deb226019 100644 --- a/server/features/assignments_lock.feature +++ b/server/features/assignments_lock.feature @@ -6,6 +6,7 @@ Feature: Assignments Locking """ [{ "_id": "a123", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -34,6 +35,7 @@ Feature: Assignments Locking """ [{ "_id": "a123", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -148,6 +150,7 @@ Feature: Assignments Locking """ [{ "_id": "a123", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", diff --git a/server/features/assignments_revert.feature b/server/features/assignments_revert.feature index 292a603ed..f0f01572c 100644 --- a/server/features/assignments_revert.feature +++ b/server/features/assignments_revert.feature @@ -11,6 +11,7 @@ Feature: Assignment Revert """ [{ "_id": "aaaaaaaaaaaaaaaaaaaaaaaa", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -78,6 +79,7 @@ Feature: Assignment Revert """ [{ "_id": "aaaaaaaaaaaaaaaaaaaaaaaa", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -114,6 +116,7 @@ Feature: Assignment Revert """ [{ "_id": "aaaaaaaaaaaaaaaaaaaaaaaa", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", diff --git a/server/features/combined_export.feature b/server/features/combined_export.feature index bfdd3aa2a..58fa80efd 100644 --- a/server/features/combined_export.feature +++ b/server/features/combined_export.feature @@ -58,7 +58,9 @@ Feature: Export combined Planning and Event items with default template "headline": "Planning 1", "slugline": "planning-1", "description_text": "desc", - "event_item": "#events._id#", + "related_events": [ + {"_id": "#events._id#", "link_type": "primary"} + ], "ednote": "Ed. note 1", "coverages": [{ "coverage_id": "123", diff --git a/server/features/event_embedded_planning.feature b/server/features/event_embedded_planning.feature index d58feae44..68d5f14fc 100644 --- a/server/features/event_embedded_planning.feature +++ b/server/features/event_embedded_planning.feature @@ -43,7 +43,9 @@ Feature: Event Embedded Planning "original_creator": "#CONTEXT_USER_ID#", "firstcreated": "__now__", "versioncreated": "__now__", - "event_item": "event1", + "related_events": [ + {"_id": "event1", "link_type": "primary"} + ], "planning_date": "2029-11-21T12:00:00+0000", "coverages": [{ "coverage_id": "__any_value__", @@ -381,7 +383,9 @@ Feature: Event Embedded Planning Then we get list with 1 items """ {"_items": [{ - "event_item": "event1", + "related_events": [ + {"_id": "event1", "link_type": "primary"} + ], "slugline": "__no_value__", "coverages": [{ "planning": { diff --git a/server/features/events.feature b/server/features/events.feature index 874d3a9fd..b5036603a 100644 --- a/server/features/events.feature +++ b/server/features/events.feature @@ -677,7 +677,7 @@ Feature: Events When we get "/planning/plan1" Then we get existing resource """ - {"event_item": "#events._id#"} + {"related_events": [{"_id": "#events._id#", "link_type": "primary"}]} """ And we get notifications """ @@ -711,7 +711,9 @@ Feature: Events "operation": "create_event", "planning_id": "#planning._id#", "update": { - "event_item": "#events._id#" + "related_events": [ + {"_id": "#events._id#", "link_type": "primary"} + ] } }] } @@ -1135,7 +1137,7 @@ Feature: Events When we get "/planning/#planning._id#" Then we get existing resource """ - {"event_item": "#events._id#"} + {"related_events": [{"_id": "#events._id#", "link_type": "primary"}]} """ When we post to "/events/#events._id#/lock" """ @@ -1324,7 +1326,7 @@ Feature: Events When we get "/planning/#planning._id#" Then we get existing resource """ - {"event_item": "#events._id#"} + {"related_events": [{"_id": "#events._id#", "link_type": "primary"}]} """ When we post to "/events/#events._id#/lock" """ @@ -1465,7 +1467,9 @@ Feature: Events "assigned_to": {} } ], - "event_item": "tempId-1712220681040btveuuz" + "related_events": [ + {"_id": "tempId-1712220681040btveuuz", "link_type": "primary"} + ] } ], "related_items": [ @@ -1554,7 +1558,9 @@ Feature: Events "assigned_to": {} } ], - "event_item": "tempId-1712220681040btveuuz" + "related_events": [ + {"_id": "tempId-1712220681040btveuuz", "link_type": "primary"} + ] } ], "related_items": [ diff --git a/server/features/events_cancel.feature b/server/features/events_cancel.feature index 64274a377..bb2bcfe7f 100644 --- a/server/features/events_cancel.feature +++ b/server/features/events_cancel.feature @@ -164,7 +164,7 @@ Feature: Events Cancel "_id": "plan1", "guid": "plan1", "slugline": "TestPlan 1", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "state": "draft", "planning_date": "2016-01-02" }, @@ -172,7 +172,7 @@ Feature: Events Cancel "_id": "plan2", "guid": "plan2", "slugline": "TestPlan 2", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "state": "draft", "planning_date": "2016-01-02" }] @@ -311,7 +311,7 @@ Feature: Events Cancel [{ "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -349,6 +349,7 @@ Feature: Events Cancel """ [{ "_id": "aaaaaaaaaaaaaaaaaaaaaaaa", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -392,7 +393,9 @@ Feature: Events Cancel "_id": "plan1", "guid": "plan1", "slugline": "TestPlan 1", - "event_item": "event1", + "related_events": [ + {"_id": "event1", "link_type": "primary"} + ], "ednote": "We're covering this Event", "state": "draft", "coverages": [{ @@ -866,7 +869,9 @@ Feature: Events Cancel [{ "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT3._id#", + "related_events": [ + {"_id": "#EVENT3._id#", "link_type": "primary"} + ], "planning_date": "2016-01-02" }] """ @@ -895,3 +900,48 @@ Feature: Events Cancel { "_id": "#EVENT4._id#", "state": "cancelled" } ]} """ + + @auth + @vocabulary + Scenario: Cancelling an Event does not cancel Planning item with secondary link + Given we have sessions "/sessions" + And "events" + """ + [{ + "guid": "event1", + "name": "Event1", + "dates": { + "start": "2029-05-29T12:00:00+0000", + "end": "2029-05-29T14:00:00+0000", + "tz": "Australia/Sydney" + }, + "lock_user": "#CONTEXT_USER_ID#", + "lock_session": "#SESSION_ID#", + "lock_action": "cancel", + "lock_time": "#DATE#" + }] + """ + And "planning" + """ + [{ + "guid": "plan1", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "primary"}] + }, { + "guid": "plan2", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "secondary"}] + }] + """ + When we perform cancel on events "event1" + Then we get OK response + When we get "/planning" + Then we get list with 2 items + """ + {"_items": [ + {"_id": "plan1", "state": "cancelled"}, + {"_id": "plan2", "state": "draft"} + ]} + """ diff --git a/server/features/events_lock.feature b/server/features/events_lock.feature index 82fa2d852..c1c514824 100644 --- a/server/features/events_lock.feature +++ b/server/features/events_lock.feature @@ -101,7 +101,7 @@ Feature: Events Locking """ [{ "slugline": "TestPlan", - "event_item": "123", + "related_events": [{"_id": "123", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -113,7 +113,7 @@ Feature: Events Locking """ { "slugline": "TestPlan", - "event_item": "123", + "related_events": [{"_id": "123", "link_type": "primary"}], "lock_user": "#CONTEXT_USER_ID#" } @@ -236,7 +236,7 @@ Feature: Events Locking { "item_class": "item class value", "headline": "test headline", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "planning_date": "2016-01-02" } ] @@ -251,7 +251,7 @@ Feature: Events Locking { "item_class": "item class value", "headline": "test headline", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "lock_user": "#CONTEXT_USER_ID#" } """ diff --git a/server/features/events_post.feature b/server/features/events_post.feature index beb66fd01..74e2b260f 100644 --- a/server/features/events_post.feature +++ b/server/features/events_post.feature @@ -297,7 +297,7 @@ Feature: Events Post "headline": "test headline", "slugline": "test slugline", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] } """ Then we get OK response @@ -439,7 +439,7 @@ Feature: Events Post "headline": "test headline", "slugline": "test slugline", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] } """ Then we get OK response @@ -580,7 +580,7 @@ Feature: Events Post "headline": "test headline", "slugline": "test slugline", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] } """ Then we get OK response @@ -740,7 +740,7 @@ Feature: Events Post When we get "/planning/plan1" Then we get existing resource """ - {"event_item": "#events._id#"} + {"related_events": [{"_id": "#events._id#", "link_type": "primary"}]} """ When we post to "/events/post" """ @@ -763,7 +763,9 @@ Feature: Events Post { "item_id": "#planning._id#", "published_item": { - "event_item": "#events._id#" + "related_events": [ + {"_id": "#events._id#", "link_type": "primary"} + ] } }, { @@ -799,7 +801,7 @@ Feature: Events Post "headline": "test headline1", "slugline": "test slugline", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] } """ Then we get OK response @@ -866,7 +868,7 @@ Feature: Events Post "headline": "test headline1", "slugline": "test slugline", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] } """ Then we get OK response @@ -947,7 +949,7 @@ Feature: Events Post "headline": "test headline1", "slugline": "test slugline", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] } """ Then we get OK response @@ -1238,7 +1240,7 @@ Feature: Events Post "headline": "test headline", "guid": "123", "planning_date": "2029-11-22", - "event_item": "#EVENT1._id#" + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}] }] """ Then we get OK response @@ -1257,3 +1259,101 @@ Feature: Events Post """ {"failed_planning_ids": [{"_id": "123", "error": ["Related planning : SLUGLINE is a required field"]}]} """ + + @auth + Scenario: Posting an Event will not post Planning item with secondary link + # Configure auto-posting of primary linked Events + Given "planning_types" + """ + [{ + "_id": "event", + "name": "event", + "editor": {"related_plannings": {"enabled": true}}, + "schema": {"related_plannings": {"planning_auto_publish": true}} + }] + """ + + # Create the Events and linked Planning items + When we post to "/events" with success + """ + [{ + "guid": "event1", + "name": "Event1", + "dates": { + "start": "2029-05-29T12:00:00+0000", + "end": "2029-05-29T14:00:00+0000", + "tz": "Australia/Sydney" + } + }] + """ + When we post to "/planning" with success + """ + [{ + "guid": "plan1", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "primary"}] + }, { + "guid": "plan2", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "secondary"}] + }] + """ + + # Post the Event, and make sure only the primary linked Planning is posted + When we post to "/events/post" with success + """ + { + "event": "event1", + "etag": "#events._etag", + "pubstatus": "usable" + } + """ + When we get "/events/event1" + Then we get existing resource + """ + {"state": "scheduled", "pubstatus": "usable"} + """ + When we get "/planning" + Then we get list with 2 items + """ + {"_items": [ + {"_id": "plan1", "state": "scheduled", "pubstatus": "usable"}, + {"_id": "plan2", "state": "draft", "pubstatus": "__no_value__"} + ]} + """ + Then we store "PLAN2" with 2 item + + # Now post Plan2 + When we post to "/planning/post" with success + """ + { + "planning": "plan2", + "etag": "#PLAN2._etag", + "pubstatus": "usable" + } + """ + + # unpost the Event, and make sure only the primary linked Planning is modified + When we post to "/events/post" with success + """ + { + "event": "event1", + "etag": "#events._etag#", + "pubstatus": "cancelled" + } + """ + When we get "/events/event1" + Then we get existing resource + """ + {"state": "killed", "pubstatus": "cancelled"} + """ + When we get "/planning" + Then we get list with 2 items + """ + {"_items": [ + {"_id": "plan1", "state": "killed", "pubstatus": "cancelled"}, + {"_id": "plan2", "state": "scheduled", "pubstatus": "usable"} + ]} + """ diff --git a/server/features/events_postpone.feature b/server/features/events_postpone.feature index 897e365b7..5035128a8 100644 --- a/server/features/events_postpone.feature +++ b/server/features/events_postpone.feature @@ -85,7 +85,7 @@ Feature: Events Postpone "_id": "plan1", "guid": "plan1", "slugline": "TestPlan 1", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "state": "draft", "planning_date": "2016-01-02" }, @@ -93,7 +93,7 @@ Feature: Events Postpone "_id": "plan2", "guid": "plan2", "slugline": "TestPlan 2", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "state": "draft", "planning_date": "2016-01-02" }] @@ -219,7 +219,7 @@ Feature: Events Postpone [{ "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -277,7 +277,7 @@ Feature: Events Postpone [{ "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -316,6 +316,7 @@ Feature: Events Postpone """ [{ "_id": "aaaaaaaaaaaaaaaaaaaaaaaa", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -359,7 +360,7 @@ Feature: Events Postpone "_id": "plan1", "guid": "plan1", "slugline": "TestPlan 1", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "ednote": "We're covering this Event", "state": "draft", "coverages": [{ @@ -510,6 +511,7 @@ Feature: Events Postpone """ [{ "_id": "aaaaaaaaaaaaaaaaaaaaaaaa", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -554,7 +556,7 @@ Feature: Events Postpone "_id": "plan1", "guid": "plan1", "slugline": "TestPlan 1", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "ednote": "We're covering this Event", "state": "draft", "coverages": [{ @@ -763,7 +765,7 @@ Feature: Events Postpone [{ "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -792,3 +794,53 @@ Feature: Events Postpone { "_id": "#EVENT4._id#", "state": "postponed" } ]} """ + + @auth + @vocabulary + Scenario: Postponing an Event does not postpone Planning item with secondary link + Given we have sessions "/sessions" + And "events" + """ + [{ + "guid": "event1", + "name": "Event1", + "dates": { + "start": "2029-05-29T12:00:00+0000", + "end": "2029-05-29T14:00:00+0000", + "tz": "Australia/Sydney" + }, + "lock_user": "#CONTEXT_USER_ID#", + "lock_session": "#SESSION_ID#", + "lock_action": "postpone", + "lock_time": "#DATE#" + }] + """ + And "planning" + """ + [{ + "guid": "plan1", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "primary"}] + }, { + "guid": "plan2", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "secondary"}] + }] + """ + When we perform postpone on events "event1" + Then we get OK response + When we get "/events/event1" + Then we get existing resource + """ + {"state": "postponed"} + """ + When we get "/planning" + Then we get list with 2 items + """ + {"_items": [ + {"_id": "plan1", "state": "postponed"}, + {"_id": "plan2", "state": "draft"} + ]} + """ diff --git a/server/features/events_recurring.feature b/server/features/events_recurring.feature index 04163191e..377393a15 100644 --- a/server/features/events_recurring.feature +++ b/server/features/events_recurring.feature @@ -349,7 +349,7 @@ Feature: Events Recurring "_id": "plan1", "guid": "plan1", "slugline": "TestPlan", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "state": "scheduled", "pubstatus": "usable", "ednote": "We planned this.", @@ -458,7 +458,7 @@ Feature: Events Recurring { "slugline": "TestPlan", "state": "rescheduled", - "event_item": "event1", + "related_events": [{"_id": "event1", "recurrence_id": "event1", "link_type": "primary"}], "recurrence_id": "event1" } """ @@ -1085,7 +1085,7 @@ Feature: Events Recurring """ { "slugline": "TestPlanning", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#" } """ diff --git a/server/features/events_reschedule.feature b/server/features/events_reschedule.feature index d26c3b544..29e88cc2f 100644 --- a/server/features/events_reschedule.feature +++ b/server/features/events_reschedule.feature @@ -127,6 +127,7 @@ Feature: Events Reschedule """ [{ "_id": "aaaaaaaaaaaaaaaaaaaaaaaa", + "planning_item": "plan1", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -166,7 +167,7 @@ Feature: Events Reschedule "_id": "plan1", "guid": "plan1", "slugline": "TestEvent", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "state": "scheduled", "pubstatus": "usable", "ednote": "We planned this.", @@ -310,7 +311,7 @@ Feature: Events Reschedule [{ "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "state": "draft", "planning_date": "2016-01-02" }] @@ -397,7 +398,7 @@ Feature: Events Reschedule {"_items": [{ "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "state": "rescheduled" }]} """ @@ -912,7 +913,7 @@ Feature: Events Reschedule "guid": "plan1", "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}], "coverages": [{ "planning": { "internal_note": "test coverage, 250 words", @@ -930,7 +931,7 @@ Feature: Events Reschedule "guid": "plan2", "slugline": "Weekly Meetings", "headline": "Friday Club", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "planning_date": "2025-01-02" } ] @@ -944,12 +945,12 @@ Feature: Events Reschedule {"_items": [ { "_id": "#PLAN1._id#", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}], "state": "draft" }, { "_id": "#PLAN2._id#", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "state": "draft" } ]} @@ -1014,7 +1015,7 @@ Feature: Events Reschedule {"_items": [ { "_id": "#PLAN1._id#", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}], "state": "postponed", "state_reason": "Postponed this event!", "coverages": [{ @@ -1031,7 +1032,7 @@ Feature: Events Reschedule }, { "_id": "#PLAN2._id#", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "state": "postponed", "state_reason": "Postponed this event!" } @@ -1141,7 +1142,7 @@ Feature: Events Reschedule {"_items": [ { "_id": "#PLAN1._id#", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}], "state": "draft", "state_reason": "Event back on at original date and time", "coverages": [{ @@ -1158,7 +1159,7 @@ Feature: Events Reschedule }, { "_id": "#PLAN2._id#", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "state": "scheduled", "state_reason": "Event back on at original date and time" } @@ -1703,3 +1704,83 @@ Feature: Events Reschedule {"operation": "post", "event_id": "event1"} ]} """ + + @auth + @vocabulary + Scenario: Rescheduling an Event does not modify Planning item with secondary link + Given we have sessions "/sessions" + And "events" + """ + [{ + "guid": "event1", + "name": "Event1", + "dates": { + "start": "2029-05-29T12:00:00+0000", + "end": "2029-05-29T14:00:00+0000", + "tz": "Australia/Sydney" + }, + "lock_user": "#CONTEXT_USER_ID#", + "lock_session": "#SESSION_ID#", + "lock_action": "reschedule", + "lock_time": "#DATE#" + }] + """ + And "planning" + """ + [{ + "guid": "plan1", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "primary"}], + "coverages": [{ + "coverage_id": "plan1_cov1", + "workflow_status": "draft", + "news_coverage_status": {"qcode" : "ncostat:int"} + }] + }, { + "guid": "plan2", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "secondary"}], + "coverages": [{ + "coverage_id": "plan2_cov1", + "workflow_status": "draft", + "news_coverage_status": {"qcode" : "ncostat:int"} + }] + }] + """ + When we perform reschedule on events "event1" + """ + { + "reason": "Changing to June 1", + "dates": { + "start": "2029-06-01T12:00:00+0000", + "end": "2029-06-01T14:00:00+0000" + } + } + """ + Then we get OK response + When we get "/planning" + Then we get list with 2 items + """ + {"_items": [ + { + "_id": "plan1", + "state": "rescheduled", + "state_reason": "Changing to June 1", + "coverages": [{ + "coverage_id": "plan1_cov1", + "planning": {"workflow_status_reason": "Changing to June 1"} + }] + }, + { + "_id": "plan2", + "state": "draft", + "state_reason": "__no_value__", + "coverages": [{ + "coverage_id": "plan2_cov1", + "planning": {"workflow_status_reason": "__no_value__"} + }] + } + ]} + """ diff --git a/server/features/events_spike.feature b/server/features/events_spike.feature index 1d17dc853..5b74df83d 100644 --- a/server/features/events_spike.feature +++ b/server/features/events_spike.feature @@ -144,13 +144,13 @@ Feature: Events Spike """ [{ "slugline": "TestPlan 1", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "lock_user": "#CONTEXT_USER_ID#", "lock_session": "123", "planning_date": "2016-01-02" }, { "slugline": "TestPlan 2", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -159,13 +159,13 @@ Feature: Events Spike """ {"_items": [{ "slugline": "TestPlan 1", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "state": "draft", "lock_user": "#CONTEXT_USER_ID#", "lock_session": "123" }, { "slugline": "TestPlan 2", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "state": "draft" }]} """ @@ -256,7 +256,7 @@ Feature: Events Spike [{ "slugline": "Friday Club", "headline": "First Meeting", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -339,7 +339,7 @@ Feature: Events Spike [{ "slugline": "Friday Club", "headline": "First Meeting", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -470,7 +470,7 @@ Feature: Events Spike """ [{ "slugline": "Friday Club", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -478,7 +478,7 @@ Feature: Events Spike """ [{ "slugline": "Friday Club", - "event_item": "#EVENT4._id#", + "related_events": [{"_id": "#EVENT4._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -505,7 +505,7 @@ Feature: Events Spike """ [{ "slugline": "Friday Club", - "event_item": "#EVENT6._id#", + "related_events": [{"_id": "#EVENT6._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -743,7 +743,7 @@ Feature: Events Spike "slugline": "TestEvent", "state": "draft", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }, { "_id": "plan2", @@ -751,7 +751,7 @@ Feature: Events Spike "slugline": "TestEvent", "state": "spiked", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }, { "_id": "plan3", @@ -759,7 +759,7 @@ Feature: Events Spike "slugline": "TestEvent", "state": "postponed", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }, { "_id": "plan4", @@ -767,7 +767,7 @@ Feature: Events Spike "slugline": "TestEvent", "state": "rescheduled", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }, { "_id": "plan5", @@ -775,7 +775,7 @@ Feature: Events Spike "slugline": "TestEvent", "state": "cancelled", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }, { "_id": "plan6", @@ -783,7 +783,7 @@ Feature: Events Spike "slugline": "TestEvent", "state": "scheduled", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }, { "_id": "plan7", @@ -791,7 +791,7 @@ Feature: Events Spike "slugline": "TestEvent", "state": "killed", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }] """ Then we get OK response @@ -892,7 +892,7 @@ Feature: Events Spike "headline": "test headline", "slugline": "test slugline", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] } """ Then we get OK response @@ -1004,7 +1004,7 @@ Feature: Events Spike "headline": "test headline", "slugline": "test slugline", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] } """ Then we get OK response @@ -1074,4 +1074,66 @@ Feature: Events Spike "user": "#CONTEXT_USER_ID#" } }] - """ \ No newline at end of file + """ + + @auth + @vocabulary + Scenario: Spiking an Event does not spike Planning item with secondary link + Given we have sessions "/sessions" + And "events" + """ + [{ + "guid": "event1", + "name": "Event1", + "dates": { + "start": "2029-05-29T12:00:00+0000", + "end": "2029-05-29T14:00:00+0000", + "tz": "Australia/Sydney" + } + }] + """ + And "planning" + """ + [{ + "guid": "plan1", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "primary"}] + }, { + "guid": "plan2", + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "secondary"}], + "lock_user": "#CONTEXT_USER_ID#", + "lock_session": "#SESSION_ID#", + "lock_action": "edit", + "lock_time": "#DATE#" + }] + """ + When we spike events "event1" + Then we get OK response + When we get "/events/event1" + Then we get existing resource + """ + {"state": "spiked"} + """ + When we get "/planning" + Then we get list with 2 items + """ + {"_items": [ + {"_id": "plan1", "state": "spiked"}, + {"_id": "plan2", "state": "draft"} + ]} + """ + When we spike planning "plan2" + Then we get OK response + When we unspike events "event1" + Then we get OK response + When we get "/planning" + Then we get list with 2 items + """ + {"_items": [ + {"_id": "plan1", "state": "spiked"}, + {"_id": "plan2", "state": "spiked"} + ]} + """ diff --git a/server/features/events_update_repetitions.feature b/server/features/events_update_repetitions.feature index f8da67046..613b2e50e 100644 --- a/server/features/events_update_repetitions.feature +++ b/server/features/events_update_repetitions.feature @@ -964,7 +964,7 @@ Feature: Events Update Repetitions [{ "slugline": "Friday Club", "headline": "Fourth Meeting", - "event_item": "#EVENT4._id#", + "related_events": [{"_id": "#EVENT4._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ diff --git a/server/features/events_update_time.feature b/server/features/events_update_time.feature index eb6426739..681b2312a 100644 --- a/server/features/events_update_time.feature +++ b/server/features/events_update_time.feature @@ -554,7 +554,7 @@ Feature: Events Update Time When we post to "/planning" with success """ [{ - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "slugline": "Friday Club", "planning_date": "2016-01-02" }] @@ -674,7 +674,7 @@ Feature: Events Update Time When we post to "/planning" with success """ [{ - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "slugline": "Friday Club", "planning_date": "2016-01-02" }] @@ -775,7 +775,7 @@ Feature: Events Update Time When we post to "/planning" with success """ [{ - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "slugline": "Friday Club", "planning_date": "2016-01-02" }] diff --git a/server/features/planning.feature b/server/features/planning.feature index d2df9c862..3d7c5df3c 100644 --- a/server/features/planning.feature +++ b/server/features/planning.feature @@ -962,7 +962,7 @@ Feature: Planning { "item_class": "item class value", "headline": "test headline", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "planning_date": "2016-01-02" } ] @@ -1535,6 +1535,7 @@ Feature: Planning """ [{ "_id": "aaaaaaaaaaaaaaaaaaaaaaaa", + "planning_item": "123", "planning": { "ednote": "test coverage, I want 250 words", "headline": "test headline", @@ -1642,7 +1643,7 @@ Feature: Planning "item_class": "item class value", "name": "test name", "slugline": "test slugline", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "planning_date": "2016-01-02" } """ @@ -1684,7 +1685,7 @@ Feature: Planning "item_class": "item class value", "name": "test name", "slugline": "test slugline", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "planning_date": "2016-01-02" } """ @@ -4288,3 +4289,37 @@ Feature: Planning """ When we get "/planning_files/#planning_files._id#" Then we have string photoshop:TransmissionReference="#firstassignment#" in media stream + + @auth + Scenario: Validate Planning related Event must exist + When we post to "/planning" + """ + { + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "primary"}] + } + """ + Then we get error 400 + When we post to "/events" + """ + { + "guid": "event1", + "name": "Event1", + "dates": { + "start": "2029-05-29T12:00:00+0000", + "end": "2029-05-29T14:00:00+0000", + "tz": "Australia/Sydney" + } + } + """ + Then we get OK response + When we post to "/planning" + """ + { + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [{"_id": "event1", "link_type": "primary"}] + } + """ + Then we get OK response diff --git a/server/features/planning_cancel.feature b/server/features/planning_cancel.feature index fd12f8569..1cecbb01c 100644 --- a/server/features/planning_cancel.feature +++ b/server/features/planning_cancel.feature @@ -345,7 +345,7 @@ Feature: Cancel all coverage "guid": "123", "headline": "test headline", "slugline": "test slugline", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "state": "scheduled", "pubstatus": "usable", "coverages": [ diff --git a/server/features/planning_duplicate.feature b/server/features/planning_duplicate.feature index 66f36e792..574d530b2 100644 --- a/server/features/planning_duplicate.feature +++ b/server/features/planning_duplicate.feature @@ -400,7 +400,7 @@ Feature: Duplicate Planning "guid": "plan1", "slugline": "Test Event", "state": "draft", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "planning_date": "2029-11-21T14:00:00.000Z" }] """ @@ -418,7 +418,7 @@ Feature: Duplicate Planning "slugline": "Test Event", "state": "draft", "planning_date": "2029-11-21T14:00:00+0000", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "expired": "__no_value__" } """ @@ -447,7 +447,7 @@ Feature: Duplicate Planning "guid": "plan1", "slugline": "Test Event", "state": "draft", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "planning_date": "2029-11-21T14:00:00.000Z", "expired": true }] @@ -466,7 +466,7 @@ Feature: Duplicate Planning "slugline": "Test Event", "state": "draft", "planning_date": "2029-11-21T14:00:00+0000", - "event_item": "__no_value__", + "related_events": "__empty__", "expired": "__no_value__" } """ @@ -497,7 +497,7 @@ Feature: Duplicate Planning "slugline": "Test Event", "state" : "cancelled", "state_reason": "A reason why this is cancelled.", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "planning_date": "2029-11-21T14:00:00.000Z", "ednote" : "This is the ednote in planning", "coverages": [ @@ -534,7 +534,7 @@ Feature: Duplicate Planning "slugline": "Test Event", "state": "draft", "planning_date": "2029-11-21T14:00:00+0000", - "event_item": "__no_value__", + "related_events": "__empty__", "expired": "__no_value__", "state_reason": "__no_value__", "ednote": "This is the ednote in planning", @@ -567,7 +567,7 @@ Feature: Duplicate Planning "guid": "plan1", "slugline": "Test Event", "state" : "rescheduled", - "event_item": "event1", + "related_events": [{"_id": "event1", "link_type": "primary"}], "planning_date": "2029-11-21T14:00:00.000Z", "ednote" : "This is the ednote in planning", "state_reason": "A reason why this is rescheduled.", @@ -605,7 +605,7 @@ Feature: Duplicate Planning "slugline": "Test Event", "state": "draft", "planning_date": "2029-11-21T14:00:00+0000", - "event_item": "__no_value__", + "related_events": "__empty__", "expired": "__no_value__", "ednote": "This is the ednote in planning", "state_reason": "__no_value__", @@ -644,7 +644,7 @@ Feature: Duplicate Planning "slugline": "test slugline", "state": "scheduled", "pubstatus": "usable", - "event_item": "123", + "related_events": [{"_id": "123", "link_type": "primary"}], "planning_date": "2029-11-21T14:00:00.000Z" }] """ @@ -671,7 +671,7 @@ Feature: Duplicate Planning { "_id": "123", "state": "cancelled", - "event_item": "123" + "related_events": [{"_id": "123", "link_type": "primary"}] } """ When we post to "/planning/123/duplicate" @@ -685,7 +685,7 @@ Feature: Duplicate Planning { "_id": "#duplicate._id#", "state": "draft", - "event_item": "__no_value__" + "related_events": "__empty__" } """ @@ -719,7 +719,7 @@ Feature: Duplicate Planning "slugline": "test slugline", "state": "scheduled", "pubstatus": "usable", - "event_item": "123", + "related_events": [{"_id": "123", "link_type": "primary"}], "planning_date": "2029-11-21T14:00:00.000Z" }] """ @@ -753,7 +753,7 @@ Feature: Duplicate Planning { "_id": "123", "state": "rescheduled", - "event_item": "123" + "related_events": [{"_id": "123", "link_type": "primary"}] } """ When we post to "/planning/123/duplicate" @@ -767,6 +767,6 @@ Feature: Duplicate Planning { "_id": "#duplicate._id#", "state": "draft", - "event_item": "__no_value__" + "related_events": "__empty__" } """ diff --git a/server/features/planning_export.feature b/server/features/planning_export.feature index 242ea9714..022aeb9e7 100644 --- a/server/features/planning_export.feature +++ b/server/features/planning_export.feature @@ -42,7 +42,7 @@ Feature: Export planning items with default template "headline": "Planning 1", "slugline": "planning-1", "description_text": "desc", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "ednote": "Ed. note 1", "coverages": [{ "coverage_id": "123", diff --git a/server/features/planning_lock.feature b/server/features/planning_lock.feature index 64ec1d7eb..19f658304 100644 --- a/server/features/planning_lock.feature +++ b/server/features/planning_lock.feature @@ -163,7 +163,7 @@ Feature: Planning Item Locking [{ "_id": "plan1", "guid": "plan1", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }] """ @@ -216,13 +216,13 @@ Feature: Planning Item Locking { "_id": "plan1", "guid": "plan1", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }, { "_id": "plan2", "guid": "plan2", - "event_item": "#events._id#", + "related_events": [{"_id": "#events._id#", "link_type": "primary"}], "planning_date": "2016-01-02" } ] @@ -234,12 +234,12 @@ Feature: Planning Item Locking { "_id": "plan1", "guid": "plan1", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }, { "_id": "plan2", "guid": "plan2", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }]} """ When we post to "/planning/plan2/lock" @@ -332,13 +332,13 @@ Feature: Planning Item Locking { "_id": "plan1", "guid": "plan1", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "planning_date": "2016-01-02" }, { "_id": "plan2", "guid": "plan2", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "planning_date": "2016-01-02" } ] @@ -351,13 +351,13 @@ Feature: Planning Item Locking { "_id": "plan1", "guid": "plan1", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#" }, { "_id": "plan2", "guid": "plan2", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#" }]} """ @@ -451,7 +451,7 @@ Feature: Planning Item Locking { "_id": "plan1", "guid": "plan1", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "planning_date": "2016-01-02" } ] @@ -464,7 +464,7 @@ Feature: Planning Item Locking { "_id": "plan1", "guid": "plan1", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#" }] } @@ -490,5 +490,57 @@ Feature: Planning Item Locking {"_message": "An associated event in this recurring series is already locked."} """ - - + @auth + Scenario: Can lock Planning while related secondary Event is locked + Given "events" + """ + [{ + "guid": "event1", + "name": "Event1", + "dates": { + "start": "2029-05-29T12:00:00+0000", + "end": "2029-05-29T14:00:00+0000", + "tz": "Australia/Sydney" + } + }, { + "guid": "event2", + "name": "Event2", + "dates": { + "start": "2029-05-29T12:00:00+0000", + "end": "2029-05-29T14:00:00+0000", + "tz": "Australia/Sydney" + } + }] + """ + And "planning" + """ + [{ + "slugline": "test-plan", + "planning_date": "2029-05-29T12:00:00+0000", + "related_events": [ + {"_id": "event1", "link_type": "primary"}, + {"_id": "event2", "link_type": "secondary"} + ] + }] + """ + When we post to "/events/event1/lock" with success + """ + {"lock_action": "edit"} + """ + When we post to "/planning/#planning._id#/lock" + """ + {"lock_action": "edit"} + """ + Then we get error 403 + When we post to "/events/event1/unlock" with success + """ + {} + """ + When we post to "/events/event2/lock" with success + """ + {"lock_action": "edit"} + """ + When we post to "/planning/#planning._id#/lock" with success + """ + {"lock_action": "edit"} + """ diff --git a/server/features/planning_recurring.feature b/server/features/planning_recurring.feature index bf100b150..a588684d0 100644 --- a/server/features/planning_recurring.feature +++ b/server/features/planning_recurring.feature @@ -44,7 +44,7 @@ Feature: Recurring Events & Planning When we post to "/planning" """ [{ - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "planning_date": "2024-11-21T12:00:00.000Z", "update_method": "all", "coverages": [{ @@ -86,7 +86,7 @@ Feature: Recurring Events & Planning "guid": "#PLAN1._id#", "type": "planning", "planning_date": "2024-11-21T12:00:00+0000", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#", "planning_recurrence_id": "#PLAN1.planning_recurrence_id#", "coverages": [ @@ -113,7 +113,7 @@ Feature: Recurring Events & Planning "guid": "#PLAN2._id#", "type": "planning", "planning_date": "2024-11-22T12:00:00+0000", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#", "planning_recurrence_id": "#PLAN1.planning_recurrence_id#", "coverages": [ @@ -140,7 +140,7 @@ Feature: Recurring Events & Planning "guid": "#PLAN3._id#", "type": "planning", "planning_date": "2024-11-23T12:00:00+0000", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#", "planning_recurrence_id": "#PLAN1.planning_recurrence_id#", "coverages": [ @@ -608,7 +608,7 @@ Feature: Recurring Events & Planning "guid": "#PLAN1._id#", "type": "planning", "planning_date": "2024-11-21T12:00:00+0000", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#", "planning_recurrence_id": "#PLAN1.planning_recurrence_id#", "coverages": [ @@ -637,7 +637,7 @@ Feature: Recurring Events & Planning "guid": "#PLAN2._id#", "type": "planning", "planning_date": "2024-11-22T12:00:00+0000", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#", "planning_recurrence_id": "#PLAN1.planning_recurrence_id#", "coverages": [ @@ -666,7 +666,7 @@ Feature: Recurring Events & Planning "guid": "#PLAN3._id#", "type": "planning", "planning_date": "2024-11-23T12:00:00+0000", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary"}], "recurrence_id": "#EVENT1.recurrence_id#", "planning_recurrence_id": "#PLAN1.planning_recurrence_id#", "coverages": [ diff --git a/server/features/planning_spike.feature b/server/features/planning_spike.feature index e1e4d17da..0c91470dc 100644 --- a/server/features/planning_spike.feature +++ b/server/features/planning_spike.feature @@ -202,7 +202,7 @@ Feature: Planning Spike "state": "spiked", "revert_state": "draft", "planning_date": "2016-01-02", - "event_item": "#events._id#" + "related_events": [{"_id": "#events._id#", "link_type": "primary"}] }] """ When we unspike planning "#planning._id#" diff --git a/server/features/recurring_event_and_planning.feature b/server/features/recurring_event_and_planning.feature index 012091b0a..dd9f0cab7 100644 --- a/server/features/recurring_event_and_planning.feature +++ b/server/features/recurring_event_and_planning.feature @@ -48,7 +48,7 @@ Feature: Recurring Events & Planning """ [{ "headline": "test headline", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "planning_date": "2024-11-21T12:00:00.000Z", "coverages": [{ "workflow_status": "draft", @@ -80,7 +80,7 @@ Feature: Recurring Events & Planning "type": "planning", "headline": "test headline", "planning_date": "2024-11-21T12:00:00+0000", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-21T15:00:00+0000"}}, @@ -95,7 +95,7 @@ Feature: Recurring Events & Planning """ [{ "headline": "test headline", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary", "recurrence_id": "#EVENT2.recurrence_id#"}], "planning_date": "2024-11-22T12:00:00.000Z", "update_method": "future", "coverages": [{ @@ -128,7 +128,7 @@ Feature: Recurring Events & Planning "type": "planning", "headline": "test headline", "planning_date": "2024-11-22T12:00:00+0000", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-22T15:00:00+0000"}}, @@ -139,7 +139,7 @@ Feature: Recurring Events & Planning "type": "planning", "headline": "test headline", "planning_date": "2024-11-23T12:00:00+0000", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-23T15:00:00+0000"}}, @@ -154,7 +154,7 @@ Feature: Recurring Events & Planning """ [{ "headline": "test headline", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "planning_date": "2024-11-21T12:00:00.000Z", "update_method": "all", "coverages": [{ @@ -187,7 +187,7 @@ Feature: Recurring Events & Planning "type": "planning", "headline": "test headline", "planning_date": "2024-11-21T12:00:00+0000", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-21T15:00:00+0000"}}, @@ -198,8 +198,7 @@ Feature: Recurring Events & Planning "type": "planning", "headline": "test headline", "planning_date": "2024-11-22T12:00:00+0000", - "event_item": "#EVENT2._id#", - "recurrence_id": "#EVENT1.recurrence_id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-22T15:00:00+0000"}}, {"planning": {"g2_content_type": "picture", "scheduled": "2024-11-22T16:00:00+0000"}} @@ -209,7 +208,7 @@ Feature: Recurring Events & Planning "type": "planning", "headline": "test headline", "planning_date": "2024-11-23T12:00:00+0000", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-23T15:00:00+0000"}}, @@ -249,7 +248,7 @@ Feature: Recurring Events & Planning "guid": "__any_value__", "type": "planning", "planning_date": "2024-11-22T12:00:00+0000", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-22T15:00:00+0000"}}, @@ -259,7 +258,7 @@ Feature: Recurring Events & Planning "guid": "__any_value__", "type": "planning", "planning_date": "2024-11-23T12:00:00+0000", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-23T15:00:00+0000"}}, @@ -299,7 +298,7 @@ Feature: Recurring Events & Planning "guid": "__any_value__", "type": "planning", "planning_date": "2024-11-21T12:00:00+0000", - "event_item": "#EVENT1._id#", + "related_events": [{"_id": "#EVENT1._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-21T15:00:00+0000"}}, @@ -309,7 +308,7 @@ Feature: Recurring Events & Planning "guid": "__any_value__", "type": "planning", "planning_date": "2024-11-22T12:00:00+0000", - "event_item": "#EVENT2._id#", + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-22T15:00:00+0000"}}, @@ -319,7 +318,7 @@ Feature: Recurring Events & Planning "guid": "__any_value__", "type": "planning", "planning_date": "2024-11-23T12:00:00+0000", - "event_item": "#EVENT3._id#", + "related_events": [{"_id": "#EVENT3._id#", "link_type": "primary", "recurrence_id": "#EVENT1.recurrence_id#"}], "recurrence_id": "#EVENT1.recurrence_id#", "coverages": [ {"planning": {"g2_content_type": "text", "scheduled": "2024-11-23T15:00:00+0000"}}, diff --git a/server/features/search.feature b/server/features/search.feature index fef391008..d2f21f973 100644 --- a/server/features/search.feature +++ b/server/features/search.feature @@ -142,7 +142,7 @@ Feature: Search Feature "item_class": "item class value", "headline": "test headline", "slugline": "slug123", - "event_item": "event_123", + "related_events": [{"_id": "event_123", "link_type": "primary"}], "planning_date": "2016-01-02T13:00:00+0000" }, { @@ -150,7 +150,7 @@ Feature: Search Feature "item_class": "item class value", "headline": "test headline", "slugline": "slug456", - "event_item": "event_123", + "related_events": [{"_id": "event_123", "link_type": "primary"}], "subject": [{"qcode": "111", "name": "test name"}], "planning_date": "2016-01-02T14:00:00+0000" } @@ -273,7 +273,7 @@ Feature: Search Feature "item_class": "item class value", "headline": "test headline", "slugline": "slug123", - "event_item": "event_123", + "related_events": [{"_id": "event_123", "link_type": "primary"}], "planning_date": "2016-01-02T13:00:00+0000", "agendas": ["sports"] }, @@ -282,7 +282,7 @@ Feature: Search Feature "item_class": "item class value", "headline": "test headline", "slugline": "slug456", - "event_item": "event_456", + "related_events": [{"_id": "event_456", "link_type": "primary"}], "planning_date": "2016-01-02T14:00:00+0000", "agendas": ["finance"] }, @@ -291,7 +291,7 @@ Feature: Search Feature "item_class": "item class value", "headline": "test headline", "slugline": "slug456", - "event_item": "event_456", + "related_events": [{"_id": "event_456", "link_type": "primary"}], "planning_date": "2016-01-02T14:00:00+0000", "agendas": ["entertainment"] }, @@ -300,7 +300,7 @@ Feature: Search Feature "item_class": "item class value", "headline": "test headline", "slugline": "slug456", - "event_item": "event_786", + "related_events": [{"_id": "event_786", "link_type": "primary"}], "planning_date": "2016-01-02T14:00:00+0000", "agendas": ["sports", "finance"] }, diff --git a/server/features/search_combined.feature b/server/features/search_combined.feature index 7392693b7..69ad0a0c0 100644 --- a/server/features/search_combined.feature +++ b/server/features/search_combined.feature @@ -151,7 +151,7 @@ Feature: Search Events and Planning "headline": "plan1 for user 1", "planning_date": "2016-01-01T12:00:00+0000", "original_creator": "user_1", - "event_item": "user_1_event_1" + "related_events": [{"_id": "user_1_event_1", "link_type": "primary"}] }, { "guid": "user_1_plan_2", "headline": "plan2 for user 1", @@ -162,7 +162,7 @@ Feature: Search Events and Planning "headline": "plan1 for user 2", "planning_date": "2016-01-01T12:00:00+0000", "original_creator": "#CONTEXT_USER_ID#", - "event_item": "user_2_event_2" + "related_events": [{"_id": "user_2_event_2", "link_type": "primary"}] }, { "guid": "user_2_plan_2", "headline": "plan2 for user 2", diff --git a/server/features/search_combined_locks.feature b/server/features/search_combined_locks.feature index 6057d1a08..fcd9da5c0 100644 --- a/server/features/search_combined_locks.feature +++ b/server/features/search_combined_locks.feature @@ -76,21 +76,21 @@ Feature: Search Events and Planning Locks "headline": "unlocked", "slugline": "ep-unlocked", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "event_3" + "related_events": [{"_id": "event_3", "link_type": "primary"}] }, { "guid": "planning_4", "headline": "event locked", "slugline": "ep-e-locked", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "event_4" + "related_events": [{"_id": "event_4", "link_type": "primary"}] }, { "guid": "planning_5", "headline": "planning locked", "slugline": "ep-p-locked", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "event_5" + "related_events": [{"_id": "event_5", "link_type": "primary"}] } ] """ diff --git a/server/features/search_events_locks.feature b/server/features/search_events_locks.feature index efa67dd43..2be1da96f 100644 --- a/server/features/search_events_locks.feature +++ b/server/features/search_events_locks.feature @@ -64,14 +64,14 @@ Feature: Event Search Locks "headline": "unlocked", "slugline": "ep-unlocked", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "event_3" + "related_events": [{"_id": "event_3", "link_type": "primary"}] }, { "guid": "planning_2", "headline": "planning locked", "slugline": "ep-p-locked", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "event_5" + "related_events": [{"_id": "event_5", "link_type": "primary"}] } ] """ @@ -170,7 +170,7 @@ Feature: Event Search Locks "headline": "planning", "slugline": "planning", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "#EVENT2._id#" + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}] }] """ When we get "/events_planning_search?repo=events&only_future=false&lock_state=locked" diff --git a/server/features/search_planning.feature b/server/features/search_planning.feature index b232d3a2b..bbb3996eb 100644 --- a/server/features/search_planning.feature +++ b/server/features/search_planning.feature @@ -97,7 +97,7 @@ Feature: Planning Search "headline": "test headline", "slugline": "slug123", "name": "name456", - "event_item": "event_123", + "related_events": [{"_id": "event_123", "link_type": "primary"}], "planning_date": "2016-01-02T13:00:00+0000", "anpa_category": [ {"name": "Overseas Sport", "qcode": "s"} @@ -122,7 +122,7 @@ Feature: Planning Search "headline": "test headline", "slugline": "slug456", "name": "name789", - "event_item": "event_456", + "related_events": [{"_id": "event_456", "link_type": "primary", "recurrence_id": "recur1"}], "recurrence_id": "recur1", "planning_date": "2016-01-03T14:00:00+0000", "agendas": ["finance"], @@ -147,7 +147,7 @@ Feature: Planning Search "headline": "test headline", "slugline": "slug456", "name": "name012", - "event_item": "event_456", + "related_events": [{"_id": "event_456", "link_type": "primary", "recurrence_id": "recur1"}], "recurrence_id": "recur1", "planning_date": "2016-01-04T14:00:00+0000", "agendas": ["entertainment"], @@ -164,7 +164,7 @@ Feature: Planning Search "headline": "test headline", "slugline": "slug789", "name": "name345", - "event_item": "event_786", + "related_events": [{"_id": "event_786", "link_type": "primary"}], "planning_date": "2016-01-05T14:00:00+0000", "agendas": ["sports", "finance"], "language": "fr-CA", diff --git a/server/features/search_planning_locks.feature b/server/features/search_planning_locks.feature index a3ff0634f..d1eb621ee 100644 --- a/server/features/search_planning_locks.feature +++ b/server/features/search_planning_locks.feature @@ -56,21 +56,21 @@ Feature: Planning Search Locks "headline": "event unlocked", "slugline": "ep-unlocked", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "event_1" + "related_events": [{"_id": "event_1", "link_type": "primary"}] }, { "guid": "planning_4", "headline": "event locked", "slugline": "ep-e-locked", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "event_2" + "related_events": [{"_id": "event_2", "link_type": "primary"}] }, { "guid": "planning_5", "headline": "planning locked", "slugline": "ep-p-locked", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "event_3" + "related_events": [{"_id": "event_3", "link_type": "primary"}] } ] """ @@ -169,7 +169,7 @@ Feature: Planning Search Locks "headline": "planning", "slugline": "planning", "planning_date": "2016-01-02T12:00:00+0000", - "event_item": "#EVENT2._id#" + "related_events": [{"_id": "#EVENT2._id#", "link_type": "primary"}] }] """ When we get "/events_planning_search?repo=planning&only_future=false&lock_state=locked" diff --git a/server/features/search_sort.feature b/server/features/search_sort.feature index d56982602..f30da10e5 100644 --- a/server/features/search_sort.feature +++ b/server/features/search_sort.feature @@ -42,7 +42,7 @@ Feature: Event Search "firstcreated": "2016-01-02T01:00:00+0000", "versioncreated": "2016-01-03T01:00:00+0000", "guid": "plan_2", "name": "plan 2", "slugline": "test2 slugline", "planning_date": "2016-01-01T01:00:00+0000", - "event_item": "event_2" + "related_events": [{"_id": "event_2", "link_type": "primary"}] }, { "firstcreated": "2016-01-03T01:00:00+0000", "versioncreated": "2016-01-02T01:00:00+0000", "guid": "plan_3", "name": "plan 3", "slugline": "test3 slugline", @@ -51,7 +51,7 @@ Feature: Event Search "firstcreated": "2016-01-04T01:00:00+0000", "versioncreated": "2016-01-01T01:00:00+0000", "guid": "plan_4", "name": "plan 4", "slugline": "test4 slugline", "planning_date": "2016-01-02T01:00:00+0000", - "event_item": "event_4" + "related_events": [{"_id": "event_4", "link_type": "primary"}] }] """ diff --git a/server/planning/assignments/assignments.py b/server/planning/assignments/assignments.py index 71d4f0542..d514a9b76 100644 --- a/server/planning/assignments/assignments.py +++ b/server/planning/assignments/assignments.py @@ -11,12 +11,17 @@ """Superdesk Assignments""" from typing import Dict, Any -import superdesk -import logging from copy import deepcopy +import logging + from bson import ObjectId +from icalendar import Calendar, Event +from eve.utils import config, ParsedRequest +from flask import request, json, current_app as app + +import superdesk +from superdesk import get_resource_service from superdesk.errors import SuperdeskApiError -from planning.errors import AssignmentApiError from superdesk.metadata.utils import item_url from superdesk.metadata.item import ( metadata_schema, @@ -26,6 +31,9 @@ ) from superdesk.resource import not_analyzed from superdesk.notification import push_notification +from superdesk.utc import utcnow +from superdesk.users.services import current_user_has_privilege + from apps.archive.common import get_user, get_auth from apps.duplication.archive_move import ITEM_MOVE from apps.publish.enqueue import ( @@ -35,14 +43,13 @@ ITEM_TAKEDOWN, ITEM_UNPUBLISH, ) -from eve.utils import config, ParsedRequest -from superdesk.utc import utcnow +from apps.common.components.utils import get_component +from apps.content import push_content_notification + +from planning.errors import AssignmentApiError from planning.planning import coverage_schema from planning.planning.planning import planning_schema -from superdesk import get_resource_service -from apps.common.components.utils import get_component from planning.item_lock import LockService, LOCK_USER, LOCK_ACTION, LOCK_SESSION -from superdesk.users.services import current_user_has_privilege from planning.common import ( ASSIGNMENT_WORKFLOW_STATE, assignment_workflow_state, @@ -61,16 +68,21 @@ get_notify_self_on_assignment, planning_auto_assign_to_workflow, ) -from icalendar import Calendar, Event -from flask import request, json, current_app as app + from planning.planning_notifications import PlanningNotifications from planning.common import format_address, get_assginment_name -from apps.content import push_content_notification from .assignments_history import ASSIGNMENT_HISTORY_ACTIONS -from planning.utils import get_event_formatted_dates, get_formatted_contacts +from planning.utils import ( + get_event_formatted_dates, + get_formatted_contacts, + get_related_planning_for_events, + get_related_event_ids_for_planning, + get_first_related_event_id_for_planning, + get_first_event_item_for_planning_id, +) logger = logging.getLogger(__name__) -planning_type = deepcopy(superdesk.Resource.rel("planning", type="string")) +planning_type = deepcopy(superdesk.Resource.rel("planning", type="string", required=True)) planning_type["mapping"] = not_analyzed @@ -385,10 +397,18 @@ def send_assignment_notification(self, updates, original=None, force=False): assignment = deepcopy(original) assignment.update(updates) - planning_id = assignment.get("planning_item", -1) - planning_item = get_resource_service("planning").find_one(req=None, _id=planning_id) - if planning_item and planning_item.get("event_item"): - event_item = get_resource_service("events").find_one(req=None, _id=planning_item.get("event_item")) + planning_id = assignment.get("planning_item") + + if not planning_id: + raise SuperdeskApiError.badRequestError( + message="Unable to send notifications, planning_id not found on assignment", + payload=dict( + assignment_id=assignment_id, + ), + ) + + event_item = get_first_event_item_for_planning_id(planning_id, "primary") + if event_item: contacts = [] for contact_id in event_item.get("event_contact_info", []): contact_details = get_resource_service("contacts").find_one(req=None, _id=contact_id) @@ -396,8 +416,6 @@ def send_assignment_notification(self, updates, original=None, force=False): contacts.append(contact_details) if len(contacts): event_item["event_contact_info"] = contacts - else: - event_item = None # Allow to create the ICS object only if there is scheduled time in the assignment. # This situation won't be applicable in the production but only for the test cases. @@ -982,7 +1000,7 @@ def on_events_updated(self, updates, original): event = deepcopy(original) event.update(updates) - plannings = list(get_resource_service("events").get_plannings_for_event(event)) + plannings = get_related_planning_for_events([event[config.ID_FIELD]], "primary") if not plannings: # If this Event has no associated Planning items @@ -1154,14 +1172,14 @@ def can_edit(self, item, user_id): return True, "" def is_associated_planning_or_event_locked(self, planning_item): - associated_event = (planning_item or {}).get("event_item") if is_locked_in_this_session(planning_item): return True - if not associated_event: + first_primary_event_id = get_first_related_event_id_for_planning(planning_item, "primary") + if not first_primary_event_id: return False - event = get_resource_service("events").find_one(req=None, _id=associated_event) + event = get_resource_service("events").find_one(req=None, _id=first_primary_event_id) if not planning_item.get("recurrence_id"): return is_locked_in_this_session(event) else: @@ -1273,7 +1291,7 @@ def on_deleted(self, doc): planning=doc.get("planning_item"), coverage=doc.get("coverage_item"), planning_etag=updated_planning.get(config.ETAG), - event_item=updated_planning.get("event_item"), + event_ids=get_related_event_ids_for_planning(updated_planning), session=get_auth().get("_id"), ) if not doc.get("_to_delete") or marked_for_delete: diff --git a/server/planning/assignments/assignments_complete.py b/server/planning/assignments/assignments_complete.py index 7022ac7eb..57a5c1470 100644 --- a/server/planning/assignments/assignments_complete.py +++ b/server/planning/assignments/assignments_complete.py @@ -8,13 +8,16 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from copy import deepcopy + +from eve.utils import config + from superdesk import get_resource_service from superdesk.services import BaseService from superdesk.notification import push_notification from superdesk.errors import SuperdeskApiError from apps.archive.common import get_user, get_auth -from eve.utils import config -from copy import deepcopy + from .assignments import AssignmentsResource, assignments_schema, AssignmentsService from planning.common import ( ASSIGNMENT_WORKFLOW_STATE, @@ -31,6 +34,8 @@ # allow an external application to pass a user assignments_complete_schema["proxy_user"] = {"type": "objectid", "nullable": True} +assignments_complete_schema["planning_item"]["required"] = False + class AssignmentsCompleteResource(AssignmentsResource): url = "assignments/complete" diff --git a/server/planning/assignments/assignments_lock.py b/server/planning/assignments/assignments_lock.py index 7ebebb860..bed9c0853 100644 --- a/server/planning/assignments/assignments_lock.py +++ b/server/planning/assignments/assignments_lock.py @@ -8,23 +8,24 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license -from flask import request import logging +from copy import deepcopy +from flask import request from eve.utils import config + +from superdesk import get_resource_service +from superdesk.errors import SuperdeskApiError +from superdesk.services import BaseService from superdesk.resource import Resource, build_custom_hateoas from superdesk.metadata.utils import item_url from apps.archive.common import get_user, get_auth -from superdesk.services import BaseService -from planning.item_lock import LockService -from superdesk import get_resource_service -from superdesk.errors import SuperdeskApiError from apps.common.components.utils import get_component + +from planning.item_lock import LockService from planning.common import ASSIGNMENT_WORKFLOW_STATE from planning.assignments.assignments import assignments_schema -from copy import deepcopy - CUSTOM_HATEOAS = {"self": {"title": "Assignments", "href": "/assignments/{_id}"}} logger = logging.getLogger(__name__) @@ -40,6 +41,8 @@ class AssignmentsLockResource(Resource): endpoint_name = "assignments_lock" url = "assignments/<{0}:item_id>/lock".format(item_url) schema = deepcopy(assignments_schema) + schema["planning_item"]["required"] = False + datasource = {"source": "assignments"} resource_methods = ["GET", "POST"] resource_title = endpoint_name @@ -87,6 +90,8 @@ class AssignmentsUnlockResource(Resource): endpoint_name = "assignments_unlock" url = "assignments/<{0}:item_id>/unlock".format(item_url) schema = deepcopy(assignments_schema) + schema["planning_item"]["required"] = False + datasource = {"source": "assignments"} resource_methods = ["GET", "POST"] resource_title = endpoint_name diff --git a/server/planning/assignments/assignments_revert.py b/server/planning/assignments/assignments_revert.py index a6cb6f312..a44e24743 100644 --- a/server/planning/assignments/assignments_revert.py +++ b/server/planning/assignments/assignments_revert.py @@ -7,18 +7,23 @@ # For the full copyright and license information, please see the # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license + +from copy import deepcopy + +from eve.utils import config + +from superdesk import get_resource_service from superdesk.services import BaseService from superdesk.notification import push_notification from superdesk.errors import SuperdeskApiError from apps.archive.common import get_user, get_auth -from eve.utils import config -from copy import deepcopy -from superdesk import get_resource_service + from .assignments import AssignmentsResource, assignments_schema from planning.common import ASSIGNMENT_WORKFLOW_STATE, remove_lock_information assignments_revert_schema = deepcopy(assignments_schema) +assignments_revert_schema["planning_item"]["required"] = False class AssignmentsRevertResource(AssignmentsResource): diff --git a/server/planning/commands/__init__.py b/server/planning/commands/__init__.py index e7951c119..816bfe37b 100644 --- a/server/planning/commands/__init__.py +++ b/server/planning/commands/__init__.py @@ -4,3 +4,4 @@ from .export_to_newsroom import ExportToNewsroom # noqa from .export_scheduled_filters import ExportScheduledFilters # noqa from .purge_expired_locks import PurgeExpiredLocks # noqa +from .replace_deprecated_event_item_attribute import ReplaceDeprecatedEventItemAttributeCommand # noqa diff --git a/server/planning/commands/flag_expired_items.py b/server/planning/commands/flag_expired_items.py index 61136cf03..a3ee00762 100644 --- a/server/planning/commands/flag_expired_items.py +++ b/server/planning/commands/flag_expired_items.py @@ -8,16 +8,20 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from datetime import timedelta, datetime + +from bson.objectid import ObjectId +from eve.utils import config from flask import current_app as app + from superdesk import Command, command, get_resource_service from superdesk.logging import logger from superdesk.utc import utcnow from superdesk.celery_task_utils import get_lock_id from superdesk.lock import lock, unlock, remove_locks from superdesk.notification import push_notification -from datetime import timedelta, datetime -from eve.utils import config -from bson.objectid import ObjectId + +from planning.utils import get_related_planning_for_events, get_related_event_ids_for_planning class FlagExpiredItems(Command): @@ -149,13 +153,12 @@ def _flag_expired_planning(self, expiry_datetime): @staticmethod def _set_event_plans(events): - planning_service = get_resource_service("planning") - - for plan in planning_service.get_from_mongo(req=None, lookup={"event_item": {"$in": list(events.keys())}}): - event = events[plan["event_item"]] - if "_plans" not in event: - event["_plans"] = [] - event["_plans"].append(plan) + for plan in get_related_planning_for_events(list(events.keys()), "primary"): + for related_event_id in get_related_event_ids_for_planning(plan, "primary"): + event = events[related_event_id] + if "_plans" not in event: + event["_plans"] = [] + event["_plans"].append(plan) @staticmethod def _get_event_schedule(event): diff --git a/server/planning/commands/flag_expired_items_test.py b/server/planning/commands/flag_expired_items_test.py index 8991100d8..19868228e 100644 --- a/server/planning/commands/flag_expired_items_test.py +++ b/server/planning/commands/flag_expired_items_test.py @@ -8,13 +8,17 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license -from .flag_expired_items import FlagExpiredItems -from planning.tests import TestCase -from superdesk import get_resource_service -from superdesk.utc import utcnow from datetime import timedelta + from bson.objectid import ObjectId +from superdesk import get_resource_service +from superdesk.utc import utcnow + +from planning.tests import TestCase +from planning.types import PlanningRelatedEventLink +from .flag_expired_items import FlagExpiredItems + now = utcnow() yesterday = now - timedelta(hours=48) @@ -197,10 +201,26 @@ def test_event_with_single_planning_no_coverages(self): self.insert( "planning", [ - {"guid": "p1", "event_item": "e1", **active["plan"]}, - {"guid": "p2", "event_item": "e2", **active["plan"]}, - {"guid": "p3", "event_item": "e3", **expired["plan"]}, - {"guid": "p4", "event_item": "e4", **expired["plan"]}, + { + "guid": "p1", + "related_events": [PlanningRelatedEventLink(_id="e1", link_type="primary")], + **active["plan"], + }, + { + "guid": "p2", + "related_events": [PlanningRelatedEventLink(_id="e2", link_type="primary")], + **active["plan"], + }, + { + "guid": "p3", + "related_events": [PlanningRelatedEventLink(_id="e3", link_type="primary")], + **expired["plan"], + }, + { + "guid": "p4", + "related_events": [PlanningRelatedEventLink(_id="e4", link_type="primary")], + **expired["plan"], + }, ], ) FlagExpiredItems().run() @@ -230,49 +250,49 @@ def test_event_with_single_planning_single_coverage(self): [ { "guid": "p1", - "event_item": "e1", + "related_events": [PlanningRelatedEventLink(_id="e1", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p2", - "event_item": "e2", + "related_events": [PlanningRelatedEventLink(_id="e2", link_type="primary")], **expired["plan"], "coverages": [active["coverage"]], }, { "guid": "p3", - "event_item": "e3", + "related_events": [PlanningRelatedEventLink(_id="e3", link_type="primary")], **active["plan"], "coverages": [expired["coverage"]], }, { "guid": "p4", - "event_item": "e4", + "related_events": [PlanningRelatedEventLink(_id="e4", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, { "guid": "p5", - "event_item": "e5", + "related_events": [PlanningRelatedEventLink(_id="e5", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p6", - "event_item": "e6", + "related_events": [PlanningRelatedEventLink(_id="e6", link_type="primary")], **expired["plan"], "coverages": [active["coverage"]], }, { "guid": "p7", - "event_item": "e7", + "related_events": [PlanningRelatedEventLink(_id="e7", link_type="primary")], **active["plan"], "coverages": [expired["coverage"]], }, { "guid": "p8", - "event_item": "e8", + "related_events": [PlanningRelatedEventLink(_id="e8", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, @@ -335,85 +355,85 @@ def test_event_with_single_planning_multiple_coverages(self): [ { "guid": "p01", - "event_item": "e01", + "related_events": [PlanningRelatedEventLink(_id="e01", link_type="primary")], **active["plan"], "coverages": [active["coverage"], active["coverage"]], # AAA }, { "guid": "p02", - "event_item": "e02", + "related_events": [PlanningRelatedEventLink(_id="e02", link_type="primary")], **expired["plan"], "coverages": [active["coverage"], active["coverage"]], # EAA }, { "guid": "p03", - "event_item": "e03", + "related_events": [PlanningRelatedEventLink(_id="e03", link_type="primary")], **active["plan"], "coverages": [expired["coverage"], active["coverage"]], # AEA }, { "guid": "p04", - "event_item": "e04", + "related_events": [PlanningRelatedEventLink(_id="e04", link_type="primary")], **active["plan"], "coverages": [active["coverage"], expired["coverage"]], # AAE }, { "guid": "p05", - "event_item": "e05", + "related_events": [PlanningRelatedEventLink(_id="e05", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"], active["coverage"]], # EEA }, { "guid": "p06", - "event_item": "e06", + "related_events": [PlanningRelatedEventLink(_id="e06", link_type="primary")], **expired["plan"], "coverages": [active["coverage"], expired["coverage"]], # EAE }, { "guid": "p07", - "event_item": "e07", + "related_events": [PlanningRelatedEventLink(_id="e07", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"], expired["coverage"]], # EEE }, { "guid": "p08", - "event_item": "e08", + "related_events": [PlanningRelatedEventLink(_id="e08", link_type="primary")], **active["plan"], "coverages": [active["coverage"], active["coverage"]], # AAA }, { "guid": "p09", - "event_item": "e09", + "related_events": [PlanningRelatedEventLink(_id="e09", link_type="primary")], **expired["plan"], "coverages": [active["coverage"], active["coverage"]], # EAA }, { "guid": "p10", - "event_item": "e10", + "related_events": [PlanningRelatedEventLink(_id="e10", link_type="primary")], **active["plan"], "coverages": [expired["coverage"], active["coverage"]], # AEA }, { "guid": "p11", - "event_item": "e11", + "related_events": [PlanningRelatedEventLink(_id="e11", link_type="primary")], **active["plan"], "coverages": [active["coverage"], expired["coverage"]], # AAE }, { "guid": "p12", - "event_item": "e12", + "related_events": [PlanningRelatedEventLink(_id="e12", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"], active["coverage"]], # EEA }, { "guid": "p13", - "event_item": "e13", + "related_events": [PlanningRelatedEventLink(_id="e13", link_type="primary")], **expired["plan"], "coverages": [active["coverage"], expired["coverage"]], # EAE }, { "guid": "p14", - "event_item": "e14", + "related_events": [PlanningRelatedEventLink(_id="e14", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"], expired["coverage"]], # EEE }, @@ -482,97 +502,97 @@ def test_event_with_multiple_planning(self): [ { "guid": "p01", - "event_item": "e1", + "related_events": [PlanningRelatedEventLink(_id="e1", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p02", - "event_item": "e1", + "related_events": [PlanningRelatedEventLink(_id="e1", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p03", - "event_item": "e2", + "related_events": [PlanningRelatedEventLink(_id="e2", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, { "guid": "p04", - "event_item": "e2", + "related_events": [PlanningRelatedEventLink(_id="e2", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p05", - "event_item": "e3", + "related_events": [PlanningRelatedEventLink(_id="e3", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p06", - "event_item": "e3", + "related_events": [PlanningRelatedEventLink(_id="e3", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, { "guid": "p07", - "event_item": "e4", + "related_events": [PlanningRelatedEventLink(_id="e4", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, { "guid": "p08", - "event_item": "e4", + "related_events": [PlanningRelatedEventLink(_id="e4", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, { "guid": "p09", - "event_item": "e5", + "related_events": [PlanningRelatedEventLink(_id="e5", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p10", - "event_item": "e5", + "related_events": [PlanningRelatedEventLink(_id="e5", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p11", - "event_item": "e6", + "related_events": [PlanningRelatedEventLink(_id="e6", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, { "guid": "p12", - "event_item": "e6", + "related_events": [PlanningRelatedEventLink(_id="e6", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p13", - "event_item": "e7", + "related_events": [PlanningRelatedEventLink(_id="e7", link_type="primary")], **active["plan"], "coverages": [active["coverage"]], }, { "guid": "p14", - "event_item": "e7", + "related_events": [PlanningRelatedEventLink(_id="e7", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, { "guid": "p15", - "event_item": "e8", + "related_events": [PlanningRelatedEventLink(_id="e8", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, { "guid": "p16", - "event_item": "e8", + "related_events": [PlanningRelatedEventLink(_id="e8", link_type="primary")], **expired["plan"], "coverages": [expired["coverage"]], }, diff --git a/server/planning/commands/replace_deprecated_event_item_attribute.py b/server/planning/commands/replace_deprecated_event_item_attribute.py new file mode 100644 index 000000000..5741429c0 --- /dev/null +++ b/server/planning/commands/replace_deprecated_event_item_attribute.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8; -*- +# +# This file is part of Superdesk. +# +# Copyright 2013, 2014 Sourcefabric z.u. and contributors. +# +# For the full copyright and license information, please see the +# AUTHORS and LICENSE files distributed with this source code, or +# at https://www.sourcefabric.org/superdesk/license + +from typing import Dict, Any, Iterator +import logging + +from flask import current_app as app + +import superdesk +from superdesk.errors import SuperdeskApiError + +from planning.types import PlanningRelatedEventLink, Planning +from planning.utils import get_first_related_event_id_for_planning + + +logger = logging.getLogger(__name__) + + +class ReplaceDeprecatedEventItemAttributeCommand(superdesk.Command): + """Replace deprecated ``event_item`` attribute from Planning resource items + + The ``event_item`` attribute was replaced with a ``related_events`` attribute, + so that a Planning item can be linked to multiple Events. This command fixes older + items to use this newer attribute + + Usage:: + + # python manage.py planning:replace_deprecated_event_item_attribute + + Options: + + -d, --dry-run Don't update just print planning ids which would be updated + -r, --revert Replace ``related_events`` with deprecated ``event_item`` + """ + + option_list = [ + superdesk.Option("--dry-run", "-d", dest="dry_run", default=False, action="store_true"), + superdesk.Option("--revert", "-r", dest="revert", default=False, action="store_true"), + ] + + def run(self, dry_run: bool, revert: bool): + print("Replacing deprecated 'event_item' attribute in Planning resource items") + self.upgrade(dry_run) if not revert else self.downgrade(dry_run) + + def upgrade(self, dry_run: bool): + updated = 0 + for original in self.get_items(True): + related_event = PlanningRelatedEventLink(_id=original["event_item"], link_type="primary") + if original.get("recurrence_id"): + related_event["recurrence_id"] = original["recurrence_id"] + + updated += self.update_item(original, {"related_events": [related_event], "event_item": None}, dry_run) + + if not dry_run: + print("") + print(f"Done. Upgraded {updated} items") + + def downgrade(self, dry_run: bool): + updated = 0 + + for original in self.get_items(False): + updates: Dict[str, Any] = { + "event_item": get_first_related_event_id_for_planning(original), + "related_events": [], + } + updated += self.update_item(original, updates, dry_run) + + if not dry_run: + print("") + print(f"Done. Downgraded {updated} items") + + def update_item(self, original: Planning, updates: Planning, dry_run: bool) -> int: + if dry_run: + print("update", original["_id"], updates) + else: + try: + superdesk.get_resource_service("planning").system_update(original["_id"], updates, original) + print(".", end="") + except SuperdeskApiError as err: + print("x") # Add line break so the exception starts on its own line + logger.exception(err) + return 0 + + return 1 + + def get_items(self, for_upgrade: bool) -> Iterator[Planning]: + last_id = None + size = 500 + max_iterations = 10000 + + # Use pymongo directly, as ``event_item`` is not in the planning resource schema anymore + planning_db = app.data.mongo.pymongo("planning").db["planning"] + lookup: Dict[str, Any] = ( + {"event_item": {"$ne": None}} if for_upgrade else {"related_events": {"$exists": True, "$nin": [None, []]}} + ) + _lookup = lookup + + for i in range(max_iterations): + if last_id is not None: + _lookup = {"$and": [lookup.copy(), {"_id": {"$gt": last_id}}]} + items = list(planning_db.find(_lookup).sort("_id").limit(size)) + if not len(items): + break + for item in items: + yield item + last_id = item["_id"] + else: + logger.warning("Not enough iterations for planning resource") + + +superdesk.command("planning:replace_deprecated_event_item_attribute", ReplaceDeprecatedEventItemAttributeCommand()) diff --git a/server/planning/commands/replace_deprecated_event_item_attribute_test.py b/server/planning/commands/replace_deprecated_event_item_attribute_test.py new file mode 100644 index 000000000..bce2d6b2b --- /dev/null +++ b/server/planning/commands/replace_deprecated_event_item_attribute_test.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8; -*- +# +# This file is part of Superdesk. +# +# Copyright 2024 Sourcefabric z.u. and contributors. +# +# For the full copyright and license information, please see the +# AUTHORS and LICENSE files distributed with this source code, or +# at https://www.sourcefabric.org/superdesk/license + +from datetime import timedelta + +from superdesk.utc import utcnow + +from planning.tests import TestCase +from planning.types import PlanningRelatedEventLink +from .replace_deprecated_event_item_attribute import ReplaceDeprecatedEventItemAttributeCommand + + +now = utcnow() + + +class ReplaceDeprecatedEventItemAttributeTest(TestCase): + def setUp(self): + super().setUp() + self.command = ReplaceDeprecatedEventItemAttributeCommand() + self.app.data.insert( + "events", + [ + { + "_id": "event1", + "name": "Event1", + "dates": {"start": now, "end": now + timedelta(days=1), "tz": "Australia/Sydney"}, + } + ], + ) + self.app.data.insert( + "planning", + [ + { + "_id": "plan1", + "slugline": "test-plan-1", + "planning_date": now, + "event_item": "event1", + }, + { + "_id": "plan2", + "slugline": "test-plan-2", + "planning_date": now, + }, + ], + ) + + def _get_planning_item(self, plan_id): + return self.app.data.mongo.pymongo("planning").db["planning"].find_one({"_id": plan_id}) + + def test_get_items(self): + # Test original data + self.assertEqual([item["_id"] for item in self.command.get_items(True)], ["plan1"]) + self.assertEqual([item["_id"] for item in self.command.get_items(False)], []) + + # Test after data upgrade + self.command.run(dry_run=False, revert=False) + self.assertEqual([item["_id"] for item in self.command.get_items(True)], []) + self.assertEqual([item["_id"] for item in self.command.get_items(False)], ["plan1"]) + + # Test after data downgrade + self.command.run(dry_run=False, revert=True) + self.assertEqual([item["_id"] for item in self.command.get_items(True)], ["plan1"]) + self.assertEqual([item["_id"] for item in self.command.get_items(False)], []) + + def test_dry_run(self): + # Upgrade data + self.command.run(dry_run=True, revert=False) + plan1 = self._get_planning_item("plan1") + self.assertEqual(plan1["event_item"], "event1") + self.assertIsNone(plan1.get("related_events")) + + # Downgrade data + self.command.run(dry_run=True, revert=True) + plan1 = self._get_planning_item("plan1") + self.assertEqual(plan1["event_item"], "event1") + self.assertIsNone(plan1.get("related_events")) + + def test_upgrade_and_downgrade_planning(self): + # Upgrade data + self.command.run(dry_run=False, revert=False) + plan1 = self._get_planning_item("plan1") + self.assertIsNone(plan1["event_item"]) + self.assertEqual(plan1["related_events"], [PlanningRelatedEventLink(_id="event1", link_type="primary")]) + + # Downgrade data + self.command.run(dry_run=False, revert=True) + plan1 = self._get_planning_item("plan1") + self.assertEqual(plan1["event_item"], "event1") + self.assertEqual(plan1["related_events"], []) diff --git a/server/planning/events/events.py b/server/planning/events/events.py index 18aff08f7..4a007240f 100644 --- a/server/planning/events/events.py +++ b/server/planning/events/events.py @@ -11,17 +11,16 @@ """Superdesk Events""" from typing import Dict, Any, Optional, List, Tuple -import superdesk import logging import itertools -import copy +from copy import deepcopy +from datetime import timedelta + import pytz import re -from datetime import timedelta +from flask import current_app as app from eve.methods.common import resolve_document_etag from eve.utils import config, date_to_str -from flask import current_app as app -from copy import deepcopy from dateutil.rrule import ( rrule, YEARLY, @@ -37,17 +36,24 @@ SU, ) +import superdesk from superdesk import get_resource_service from superdesk.errors import SuperdeskApiError from superdesk.metadata.utils import generate_guid from superdesk.metadata.item import GUID_NEWSML from superdesk.notification import push_notification from superdesk.utc import get_date, utcnow +from superdesk.users.services import current_user_has_privilege from apps.auth import get_user, get_user_id from apps.archive.common import get_auth, update_dates_for -from superdesk.users.services import current_user_has_privilege -from planning.types import Event, EmbeddedPlanning, EmbeddedCoverageItem +from planning.types import ( + Event, + EmbeddedPlanning, + EmbeddedCoverageItem, + PlanningRelatedEventLink, + PLANNING_RELATED_EVENT_LINK_TYPE, +) from planning.common import ( UPDATE_SINGLE, UPDATE_FUTURE, @@ -69,6 +75,7 @@ update_ingest_on_patch, TEMP_ID_PREFIX, ) +from planning.utils import get_related_planning_for_events from .events_base_service import EventsBaseService from .events_schema import events_schema from .events_sync import sync_event_metadata_with_planning_items @@ -144,14 +151,10 @@ def on_fetched(self, docs): def on_fetched_item(self, doc): self._enhance_event_item(doc) - @staticmethod - def get_plannings_for_event(event): - return get_resource_service("planning").find(where={"event_item": event.get(config.ID_FIELD)}) - def _enhance_event_item(self, doc): - plannings = self.get_plannings_for_event(doc) + plannings = get_related_planning_for_events([doc[config.ID_FIELD]], "primary") - if plannings.count() > 0: + if len(plannings): doc["planning_ids"] = [planning.get("_id") for planning in plannings] for location in doc.get("location") or []: @@ -161,11 +164,7 @@ def _enhance_event_item(self, doc): if not doc.get("original_creator"): doc.pop("original_creator", None) - @staticmethod - def has_planning_items(doc): - return EventsService.get_plannings_for_event(doc).count() > 0 - - def get_all_items_in_relationship(self, item): + def get_all_items_in_relationship(self, item: Event, event_link_type: PLANNING_RELATED_EVENT_LINK_TYPE = "primary"): # Get recurring items if item.get("recurrence_id"): all_items = self.find(where={"recurrence_id": item.get("recurrence_id")}) @@ -176,7 +175,7 @@ def get_all_items_in_relationship(self, item): ) else: # Get associated planning items - return self.get_plannings_for_event(item) + return get_related_planning_for_events([item[config.ID_FIELD]], event_link_type) def on_locked_event(self, doc, user_id): self._enhance_event_item(doc) @@ -527,6 +526,7 @@ def on_updated(self, updates, original, from_ingest: Optional[bool] = None): if "location" not in updates and original.get("location"): updates["location"] = original["location"] + updates[config.ID_FIELD] = original[config.ID_FIELD] self._enhance_event_item(updates) def on_deleted(self, doc): @@ -670,8 +670,7 @@ def mark_event_complete(self, original, updates, event, mark_complete_validated) if event["dates"]["start"] < updates["actioned_date"]: return - plans = list(get_resource_service("planning").find(where={"event_item": event[config.ID_FIELD]})) - for plan in plans: + for plan in get_related_planning_for_events([event[config.ID_FIELD]], "primary"): if plan.get("state") != WORKFLOW_STATE.CANCELLED and len(plan.get("coverages", [])) > 0: get_resource_service("planning_cancel").patch( plan[config.ID_FIELD], @@ -686,7 +685,7 @@ def _convert_to_recurring_event(self, updates, original): self._validate_convert_to_recurring(updates, original) updates["recurrence_id"] = original["_id"] - merged = copy.deepcopy(original) + merged = deepcopy(original) merged.update(updates) # Generated new events will be "draft" @@ -726,7 +725,7 @@ def get_recurring_timeline(self, selected, spiked=False): return events_base_service.get_recurring_timeline(selected, postponed=True, spiked=spiked) @staticmethod - def _link_to_planning(event): + def _link_to_planning(event: Event, link_type: PLANNING_RELATED_EVENT_LINK_TYPE = "primary"): """ Links an Event to an existing Planning Item @@ -738,13 +737,17 @@ def _link_to_planning(event): event_id = event[config.ID_FIELD] planning_item = planning_service.find_one(req=None, _id=plan_id) - updates = {"event_item": event_id} + updates = {"related_events": planning_item.get("related_events") or []} + related_planning = PlanningRelatedEventLink(_id=event_id, link_type=link_type) + updates["related_events"].append(related_planning) - if "recurrence_id" in event: - updates["recurrence_id"] = event["recurrence_id"] + # Add ``recurrence_id`` if the supplied Event is part of a series + if event.get("recurrence_id"): + related_planning["recurrence_id"] = event["recurrence_id"] + if not planning_item.get("recurrence_id") and link_type == "primary": + updates["recurrence_id"] = event["recurrence_id"] planning_service.validate_on_update(updates, planning_item, get_user()) - planning_service.system_update(plan_id, updates, planning_item) app.on_updated_planning(updates, planning_item) @@ -955,7 +958,7 @@ def generate_recurring_events(event, recurrence_id=None): get_max_recurrent_events(), ): # set a limit to prevent too many events to be created # create event with the new dates - new_event = copy.deepcopy(event) + new_event = deepcopy(event) # Remove fields not required by the new events for key in list(new_event.keys()): diff --git a/server/planning/events/events_base_service.py b/server/planning/events/events_base_service.py index c143e58ac..6573852b2 100644 --- a/server/planning/events/events_base_service.py +++ b/server/planning/events/events_base_service.py @@ -7,8 +7,10 @@ # For the full copyright and license information, please see the # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license -import json + from datetime import datetime + +import json from flask import request from eve.utils import config, ParsedRequest @@ -272,37 +274,11 @@ def get_recurring_timeline( return historic, past, future - @staticmethod - def get_plannings_for_event(event): - return get_resource_service("planning").find(where={"event_item": event[config.ID_FIELD]}) - - @staticmethod - def has_planning_items(doc): - return EventsBaseService.get_plannings_for_event(doc).count() > 0 - - @staticmethod - def is_event_in_use(event): - return EventsBaseService.has_planning_items(event) or (event.get("pubstatus") or "") != "" - @staticmethod def is_original_event(original): # Check Flask's URL params if the ID matches the one provided here return original.get(config.ID_FIELD) == request.view_args.get(config.ID_FIELD) - @staticmethod - def _set_events_planning(events): - planning_service = get_resource_service("planning") - - planning_items = list( - planning_service.get_from_mongo(req=None, lookup={"event_item": {"$in": list(events.keys())}}) - ) - - for plan in planning_items: - event = events[plan["event_item"]] - if "_plans" not in event: - event["_plans"] = [] - event["_plans"].append(plan) - @staticmethod def remove_fields(new_event, extra_fields=None): """Remove fields not required by new event""" diff --git a/server/planning/events/events_cancel.py b/server/planning/events/events_cancel.py index 763a7f858..f125e7d0e 100644 --- a/server/planning/events/events_cancel.py +++ b/server/planning/events/events_cancel.py @@ -8,21 +8,25 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from copy import deepcopy + +from eve.utils import config +from flask import request + from superdesk import get_resource_service from superdesk.notification import push_notification -from eve.utils import config +from superdesk.errors import SuperdeskApiError from apps.archive.common import get_user, get_auth + from planning.common import ( UPDATE_FUTURE, WORKFLOW_STATE, remove_lock_information, set_actioned_date_to_event, ) -from copy import deepcopy +from planning.utils import get_related_planning_for_events from .events import EventsResource, events_schema from .events_base_service import EventsBaseService -from flask import request -from superdesk.errors import SuperdeskApiError event_cancel_schema = deepcopy(events_schema) event_cancel_schema["reason"] = { @@ -58,8 +62,7 @@ def _get_cancel_state(): def update_single_event(self, updates, original): occur_cancel_state = self._get_cancel_state() self._set_event_cancelled(updates, original, occur_cancel_state) - if self.is_event_in_use(original): - self._cancel_event_plannings(updates, original) + self._cancel_event_plannings(updates, original) def update(self, id, updates, original): reason = updates.pop("reason", None) @@ -93,18 +96,17 @@ def push_notification(name, updates, original): @staticmethod def _cancel_event_plannings(updates, original): - planning_service = get_resource_service("planning") planning_cancel_service = get_resource_service("planning_cancel") + planning_history_service = get_resource_service("planning_history") reason = updates.get("reason", None) - plans = list(planning_service.find(where={"event_item": original[config.ID_FIELD]})) - for plan in plans: + for plan in get_related_planning_for_events([original[config.ID_FIELD]], "primary"): if plan.get("state") != WORKFLOW_STATE.CANCELLED: request.view_args["event_cancellation"] = True cancelled_plan = planning_cancel_service.patch(plan[config.ID_FIELD], {"reason": reason}) # Write history records - get_resource_service("planning_history").on_cancel(cancelled_plan, plan) + planning_history_service.on_cancel(cancelled_plan, plan) @staticmethod def _set_event_cancelled(updates, original, occur_cancel_state): @@ -141,15 +143,10 @@ def update_recurring_events(self, updates, original, update_method): for event in cancelled_events: new_updates = deepcopy(updates) - if not self.is_event_in_use(event): - self.patch_related_event_as_cancelled(new_updates, event, notifications) - else: - # Cancel the planning item also as it is in use - self._cancel_event_plannings(new_updates, event) - self.patch_related_event_as_cancelled(new_updates, event, notifications) - - if self.is_event_in_use(original): - self._cancel_event_plannings(updates, original) + self._cancel_event_plannings(new_updates, event) + self.patch_related_event_as_cancelled(new_updates, event, notifications) + + self._cancel_event_plannings(updates, original) updates["_cancelled_events"] = notifications def patch_related_event_as_cancelled(self, updates, original, notifications): diff --git a/server/planning/events/events_history.py b/server/planning/events/events_history.py index 365b6138b..24e6242f2 100644 --- a/server/planning/events/events_history.py +++ b/server/planning/events/events_history.py @@ -8,11 +8,14 @@ """Superdesk Files""" -from superdesk import Resource, get_resource_service -from planning.history import HistoryService +from copy import deepcopy import logging + from eve.utils import config -from copy import deepcopy + +from superdesk import Resource, get_resource_service +from planning.utils import get_related_planning_for_events +from planning.history import HistoryService from planning.item_lock import LOCK_ACTION logger = logging.getLogger(__name__) @@ -35,8 +38,8 @@ def on_item_created(self, items, operation=None): created_from_planning = [] regular_events = [] for item in items: - planning_items = get_resource_service("events").get_plannings_for_event(item) - if planning_items.count() > 0: + planning_items = get_related_planning_for_events([item[config.ID_FIELD]], "primary") + if len(planning_items) > 0: item["created_from_planning"] = planning_items[0].get("_id") created_from_planning.append(item) else: diff --git a/server/planning/events/events_post.py b/server/planning/events/events_post.py index e4628da28..126887e46 100644 --- a/server/planning/events/events_post.py +++ b/server/planning/events/events_post.py @@ -1,3 +1,13 @@ +# -*- coding: utf-8; -*- +# +# This file is part of Superdesk. +# +# Copyright 2014 Sourcefabric z.u. and contributors. +# +# For the full copyright and license information, please see the +# AUTHORS and LICENSE files distributed with this source code, or +# at https://www.sourcefabric.org/superdesk/license + from flask import abort from eve.utils import config @@ -18,7 +28,7 @@ enqueue_planning_item, get_version_item_for_post, ) -from planning.utils import try_cast_object_id +from planning.utils import try_cast_object_id, get_related_planning_for_events from planning.content_profiles.utils import is_post_planning_with_event_enabled @@ -185,7 +195,7 @@ def post_event(self, event, new_post_state, repost): updates["version"] = version get_resource_service("events_history")._save_history(event, updates, "post") - plannings = list(get_resource_service("events").get_plannings_for_event(event)) + plannings = get_related_planning_for_events([event[config.ID_FIELD]], "primary") event["plans"] = [p.get("_id") for p in plannings] self.publish_event(event, version) diff --git a/server/planning/events/events_postpone.py b/server/planning/events/events_postpone.py index 3edc1f1f7..9e7eb3f78 100644 --- a/server/planning/events/events_postpone.py +++ b/server/planning/events/events_postpone.py @@ -8,20 +8,24 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from copy import deepcopy + +from eve.utils import config +from flask import current_app as app + from superdesk import get_resource_service from superdesk.notification import push_notification -from eve.utils import config from apps.archive.common import get_user, get_auth + from planning.common import ( UPDATE_FUTURE, WORKFLOW_STATE, remove_lock_information, set_actioned_date_to_event, ) -from copy import deepcopy from .events import EventsResource, events_schema from .events_base_service import EventsBaseService -from flask import current_app as app +from planning.utils import get_related_planning_for_events event_postpone_schema = deepcopy(events_schema) @@ -84,12 +88,10 @@ def push_notification(name, updates, original): @staticmethod def _postpone_event_plannings(updates, original): - planning_service = get_resource_service("planning") planning_postpone_service = get_resource_service("planning_postpone") reason = updates.get("reason", None) - plans = list(planning_service.find(where={"event_item": original[config.ID_FIELD]})) - for plan in plans: + for plan in get_related_planning_for_events([original[config.ID_FIELD]], "primary"): if plan.get("state") != WORKFLOW_STATE.CANCELLED: updated_plan = planning_postpone_service.patch(plan[config.ID_FIELD], {"reason": reason}) app.on_updated_planning_postpone(updated_plan, plan) diff --git a/server/planning/events/events_reschedule.py b/server/planning/events/events_reschedule.py index 31dd5eae4..3c5f4078b 100644 --- a/server/planning/events/events_reschedule.py +++ b/server/planning/events/events_reschedule.py @@ -8,10 +8,18 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from copy import deepcopy +from datetime import datetime +from itertools import islice + +import pytz +from eve.utils import config +from flask import current_app as app + from superdesk import get_resource_service from superdesk.metadata.utils import generate_guid from superdesk.metadata.item import GUID_NEWSML -from eve.utils import config + from planning.common import ( UPDATE_FUTURE, WORKFLOW_STATE, @@ -20,13 +28,9 @@ set_original_creator, set_actioned_date_to_event, ) -from copy import deepcopy from .events import EventsResource, events_schema, generate_recurring_dates -from flask import current_app as app -import pytz -from datetime import datetime -from itertools import islice from .events_base_service import EventsBaseService +from planning.utils import get_related_planning_for_events, event_has_planning_items event_reschedule_schema = deepcopy(events_schema) event_reschedule_schema["reason"] = { @@ -55,7 +59,7 @@ class EventsRescheduleService(EventsBaseService): def update_single_event(self, updates, original): events_service = get_resource_service("events") - has_plannings = events_service.has_planning_items(original) + has_plannings = event_has_planning_items(original[config.ID_FIELD], "primary") remove_lock_information(updates) reason = updates.pop("reason", None) @@ -93,15 +97,11 @@ def _mark_event_rescheduled(updates, reason, keep_dates=False): @staticmethod def _reschedule_event_plannings(original, reason, plans=None, state=None): - planning_service = get_resource_service("planning") planning_cancel_service = get_resource_service("planning_cancel") planning_reschedule_service = get_resource_service("planning_reschedule") - if plans is None: - plans = list(planning_service.find(where={"event_item": original[config.ID_FIELD]})) - plan_updates = {"reason": reason, "state": state} - for plan in plans: + for plan in plans or get_related_planning_for_events([original[config.ID_FIELD]], "primary"): if plan.get("state") != WORKFLOW_STATE.CANCELLED: updated_plan = planning_reschedule_service.patch(plan[config.ID_FIELD], plan_updates) get_resource_service("planning_history").on_reschedule(updated_plan, plan) @@ -303,11 +303,8 @@ def update_recurring_events(self, updates, original, update_method): events_service.create(new_events) app.on_inserted_events(new_events) - # Iterate over the events to delete/spike - self._set_events_planning(deleted_events) - for event in deleted_events.values(): - event_plans = event.get("_plans", []) + event_plans = get_related_planning_for_events([event[config.ID_FIELD]], "primary") is_original = event[config.ID_FIELD] == original[config.ID_FIELD] if len(event_plans) > 0 or event.get("pubstatus", None) is not None: if is_original: diff --git a/server/planning/events/events_schema.py b/server/planning/events/events_schema.py index 9b13c612a..60236b002 100644 --- a/server/planning/events/events_schema.py +++ b/server/planning/events/events_schema.py @@ -8,10 +8,11 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license -from superdesk import Resource +from copy import deepcopy + +from superdesk import Resource, get_resource_service from superdesk.resource import not_analyzed, not_enabled from superdesk.metadata.item import metadata_schema, ITEM_TYPE -from copy import deepcopy from planning.common import ( WORKFLOW_STATE_SCHEMA, @@ -20,7 +21,6 @@ TO_BE_CONFIRMED_FIELD, TO_BE_CONFIRMED_FIELD_SCHEMA, ) -from planning.planning.planning import planning_schema as original_planning_schema event_type = deepcopy(Resource.rel("events", type="string")) event_type["mapping"] = not_analyzed @@ -30,9 +30,6 @@ original_creator_schema = metadata_schema["original_creator"] original_creator_schema.update({"nullable": True}) -planning_schema = deepcopy(original_planning_schema) -planning_schema["event_item"] = {"type": "string"} - events_schema = { # Identifiers "_id": metadata_schema["_id"], @@ -325,6 +322,37 @@ }, }, }, + # HACK: Add ``related_events`` to elastic mapping + # Otherwise searching related events in combined view fails on events type + "related_events": { + "type": "list", + "required": False, + "schema": { + "type": "dict", + "allow_unknown": True, + "schema": { + "_id": Resource.rel("events", type="string", required=True), + "recurrence_id": { + "type": "string", + "nullable": True, + }, + "link_type": { + "type": "string", + "required": True, + "default": "primary", + "allowed": ["primary", "secondary"], + }, + }, + }, + "mapping": { + "type": "nested", + "properties": { + "_id": not_analyzed, + "recurrence_id": not_analyzed, + "link_type": not_analyzed, + }, + }, + }, "extra": metadata_schema["extra"], "translations": { "type": "list", diff --git a/server/planning/events/events_spike.py b/server/planning/events/events_spike.py index 1f3c28090..0ecb5fe6f 100644 --- a/server/planning/events/events_spike.py +++ b/server/planning/events/events_spike.py @@ -8,9 +8,16 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from flask import json, current_app as app +from eve.utils import ParsedRequest, config + +from superdesk import config, get_resource_service +from superdesk.errors import SuperdeskApiError +from superdesk.notification import push_notification +from apps.auth import get_user, get_user_id, get_auth + from .events import EventsResource from .events_base_service import EventsBaseService -from superdesk.errors import SuperdeskApiError from planning.common import ( ITEM_EXPIRY, ITEM_STATE, @@ -20,14 +27,12 @@ remove_lock_information, remove_autosave_on_spike, ) -from superdesk.notification import push_notification -from apps.auth import get_user, get_user_id, get_auth -from superdesk import config, get_resource_service from planning.item_lock import LOCK_USER, LOCK_SESSION -from eve.utils import ParsedRequest -from flask import json - -from flask import current_app as app +from planning.utils import ( + get_related_planning_for_events, + event_has_planning_items, + get_first_related_event_id_for_planning, +) class EventsSpikeResource(EventsResource): @@ -72,42 +77,38 @@ def on_updated(self, updates, original): # Spike associated planning planning_spike_service = get_resource_service("planning_spike") - query = {"query": {"bool": {"must": {"term": {"event_item": str(original[config.ID_FIELD])}}}}} - results = get_resource_service("planning").search(query) spiked_items = [] - if len(results.docs) > 0: - for planning in results.docs: - if planning["state"] == WORKFLOW_STATE.DRAFT: - planning_spike_service.patch(planning[config.ID_FIELD], {"state": "spiked"}) - spiked_items.append(str(planning[config.ID_FIELD])) - - # When a planning item associated with this event is spiked - # If there were any failures in removing assignments - # Send those notifications here - if len(spiked_items) > 0: - query = { - "query": {"filtered": {"filter": {"bool": {"must": {"terms": {"planning_item": spiked_items}}}}}} - } - req = ParsedRequest() - req.args = {"source": json.dumps(query)} - - assignments = get_resource_service("assignments").get(req=req, lookup=None) - if assignments.count() > 0: - session_id = get_auth().get("_id") - user_id = get_user().get(config.ID_FIELD) - push_notification( - "assignments:delete:fail", - items=[ - { - "slugline": a.get("planning").get("slugline"), - "type": a.get("planning").get("g2_content_type"), - } - for a in assignments - ], - session=session_id, - user=user_id, - ) + for planning in get_related_planning_for_events([original[config.ID_FIELD]], "primary"): + if planning["state"] == WORKFLOW_STATE.DRAFT: + planning_spike_service.patch(planning[config.ID_FIELD], {"state": "spiked"}) + spiked_items.append(str(planning[config.ID_FIELD])) + + # When a planning item associated with this event is spiked + # If there were any failures in removing assignments + # Send those notifications here + if len(spiked_items) > 0: + query = {"query": {"filtered": {"filter": {"bool": {"must": {"terms": {"planning_item": spiked_items}}}}}}} + + req = ParsedRequest() + req.args = {"source": json.dumps(query)} + + assignments = get_resource_service("assignments").get(req=req, lookup=None) + if assignments.count() > 0: + session_id = get_auth().get("_id") + user_id = get_user().get(config.ID_FIELD) + push_notification( + "assignments:delete:fail", + items=[ + { + "slugline": a.get("planning").get("slugline"), + "type": a.get("planning").get("g2_content_type"), + } + for a in assignments + ], + session=session_id, + user=user_id, + ) @staticmethod def push_notification(name, updates, original): @@ -182,7 +183,7 @@ def _validate(event): # If yes, return error # Check to see if we have any related planning items for that event which is locked planning_service = get_resource_service("planning") - for planning in list(planning_service.find(where={"event_item": event[config.ID_FIELD]})): + for planning in get_related_planning_for_events([event[config.ID_FIELD]], "primary"): if planning.get(LOCK_USER) or planning.get(LOCK_SESSION): raise SuperdeskApiError.forbiddenError( message="Spike failed. One or more related planning items are locked." @@ -210,21 +211,20 @@ def _validate_recurring(original, recurrence_id): if planning.get(LOCK_USER) or planning.get(LOCK_SESSION): raise SuperdeskApiError.forbiddenError(message="Spike failed. A related planning item is locked.") - if planning["event_item"] not in events_with_plans: - events_with_plans.append(planning["event_item"]) + first_event_id = get_first_related_event_id_for_planning(planning, "primary") + if first_event_id not in events_with_plans: + events_with_plans.append(first_event_id) return events_with_plans @staticmethod def _validate_states(event): - events_service = get_resource_service("events") - # Public Events (except unposted) cannot be spiked if event.get("pubstatus") and event.get("state") != WORKFLOW_STATE.KILLED: raise SuperdeskApiError.badRequestError(message="Spike failed. Posted Events cannot be spiked.") # Posted Events with Planning items cannot be spiked - elif event.get("pubstatus") and events_service.get_plannings_for_event(event).count() > 0: + elif event.get("pubstatus") and event_has_planning_items(event[config.ID_FIELD], "primary"): raise SuperdeskApiError.badRequestError(message="Spike failed. Event has an associated Planning item.") # Event was created from a 'Reschedule' action or is 'Rescheduled' diff --git a/server/planning/events/events_sync/__init__.py b/server/planning/events/events_sync/__init__.py index 5a220ffe9..50606aae1 100644 --- a/server/planning/events/events_sync/__init__.py +++ b/server/planning/events/events_sync/__init__.py @@ -10,14 +10,16 @@ from typing import Dict, Optional, List from copy import deepcopy -import pytz +import pytz from eve.utils import str_to_date + from superdesk import get_resource_service from planning.types import Event, EmbeddedPlanning, StringFieldTranslation from planning.common import get_config_event_fields_to_sync_with_planning from planning.content_profiles.utils import AllContentProfileData +from planning.utils import get_related_planning_for_events from .common import VocabsSyncData, SyncItemData, SyncData from .embedded_planning import ( @@ -140,8 +142,7 @@ def sync_event_metadata_with_planning_items( planning_service.patch(sync_data.planning.original["_id"], sync_data.planning.updates) # Sync all the Planning items that were NOT provided in the ``embedded_planning`` field - where = {"$and": [{"event_item": event_updated.get("_id")}, {"_id": {"$nin": processed_planning_ids}}]} - for item in planning_service.find(where=where): + for item in get_related_planning_for_events([event_updated["_id"]], "primary", processed_planning_ids): translated_fields = get_translated_fields(item.get("translations") or []) sync_data = SyncData( event=event_sync_data, diff --git a/server/planning/events/events_sync/embedded_planning.py b/server/planning/events/events_sync/embedded_planning.py index 4f69274d6..7f27373a9 100644 --- a/server/planning/events/events_sync/embedded_planning.py +++ b/server/planning/events/events_sync/embedded_planning.py @@ -14,7 +14,15 @@ from superdesk import get_resource_service -from planning.types import Event, EmbeddedPlanning, EmbeddedCoverageItem, Planning, Coverage, StringFieldTranslation +from planning.types import ( + Event, + EmbeddedPlanning, + EmbeddedCoverageItem, + Planning, + Coverage, + StringFieldTranslation, + PlanningRelatedEventLink, +) from planning.content_profiles.utils import AllContentProfileData from .common import VocabsSyncData @@ -73,6 +81,10 @@ def map_event_to_planning_translation(translation: StringFieldTranslation): ) ] + related_event = PlanningRelatedEventLink(_id=event["_id"], link_type="primary") + if event.get("recurrence_id"): + related_event["recurrence_id"] = event["recurrence_id"] + for plan in embedded_planning: if plan.get("planning_id"): # Skip this item, as it's an existing Planning item @@ -84,7 +96,7 @@ def map_event_to_planning_translation(translation: StringFieldTranslation): "state": "draft", "type": "planning", "planning_date": event["dates"]["start"], - "event_item": event["_id"], + "related_events": [related_event], "coverages": [], } diff --git a/server/planning/events/events_tests.py b/server/planning/events/events_tests.py index f8a649dbd..221d9a978 100644 --- a/server/planning/events/events_tests.py +++ b/server/planning/events/events_tests.py @@ -1,14 +1,27 @@ +# -*- coding: utf-8; -*- +# +# This file is part of Superdesk. +# +# Copyright 2014 Sourcefabric z.u. and contributors. +# +# For the full copyright and license information, please see the +# AUTHORS and LICENSE files distributed with this source code, or +# at https://www.sourcefabric.org/superdesk/license + from datetime import datetime, timedelta -import pytz from copy import deepcopy + +import pytz from mock import Mock, patch + from superdesk import get_resource_service from superdesk.utc import utcnow + from planning.tests import TestCase from planning.common import format_address, POST_STATE from planning.item_lock import LockService from planning.events.events import generate_recurring_dates -from werkzeug.exceptions import BadRequest +from planning.types import PlanningRelatedEventLink class EventTestCase(TestCase): @@ -468,7 +481,7 @@ def test_planning_item_is_published_with_events(self): "agendas": [], "languages": ["en"], "user": "12234553", - "event_item": event_id[0], + "related_events": [PlanningRelatedEventLink(_id=event_id[0], link_type="primary")], "coverages": [ { "coverage_id": "urn:newsml:localhost:5000:2023-09-08T17:40:56.290922:e264a179-5b1a-4b52-b73b-332660848cae", @@ -574,7 +587,7 @@ def test_new_planning_is_published_when_adding_to_published_event(self): "planning_date": datetime(2099, 11, 21, 12, 00, 00, tzinfo=pytz.UTC), "name": "Demo 1", "type": "planning", - "event_item": event_id, + "related_events": [PlanningRelatedEventLink(_id=event_id, link_type="primary")], } ] )[0] diff --git a/server/planning/events/events_update_repetitions.py b/server/planning/events/events_update_repetitions.py index 8080a0197..94a24ce1a 100644 --- a/server/planning/events/events_update_repetitions.py +++ b/server/planning/events/events_update_repetitions.py @@ -8,11 +8,18 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from copy import deepcopy + +import pytz +from eve.utils import config +from flask import current_app as app + from superdesk import get_resource_service from superdesk.errors import SuperdeskApiError from superdesk.metadata.utils import generate_guid from superdesk.metadata.item import GUID_NEWSML from apps.auth import get_user_id + from planning.common import ( remove_lock_information, WORKFLOW_STATE, @@ -23,12 +30,7 @@ from .events import EventsResource, generate_recurring_dates from .events_base_service import EventsBaseService from planning.item_lock import LOCK_ACTION - -from eve.utils import config -from flask import current_app as app - -from copy import deepcopy -import pytz +from planning.utils import event_has_planning_items class EventsUpdateRepetitionsResource(EventsResource): @@ -120,9 +122,6 @@ def on_update(self, updates, original): event, event[config.ID_FIELD], "update_repetitions_create" ) - # Iterate over the events to delete/cancel - self._set_events_planning(deleted_events) - for event in deleted_events.values(): self._delete_event(event, events_service, updated_rule) @@ -182,9 +181,7 @@ def _create_event(self, date, updates, original, time_delta): return new_event def _delete_event(self, event, events_service, updated_rule): - event_plans = event.get("_plans", []) - - if len(event_plans) > 0 or event.get("pubstatus", None) is not None: + if event.get("pubstatus", None) is not None or event_has_planning_items(event[config.ID_FIELD], "primary"): self._cancel_event(event, updated_rule) else: events_service.delete_action(lookup={"_id": event[config.ID_FIELD]}) diff --git a/server/planning/item_lock.py b/server/planning/item_lock.py index d976ba322..5b3f0968d 100644 --- a/server/planning/item_lock.py +++ b/server/planning/item_lock.py @@ -9,18 +9,21 @@ # at https://www.sourcefabric.org/superdesk/license import logging -import superdesk +from eve.utils import config + +import superdesk from superdesk.errors import SuperdeskApiError from superdesk.notification import push_notification from superdesk.users.services import current_user_has_privilege from superdesk.utc import utcnow from superdesk.lock import lock, unlock -from eve.utils import config from superdesk import get_resource_service, get_resource_privileges from apps.common.components.base_component import BaseComponent from apps.item_lock.components.item_lock import LOCK_USER, LOCK_SESSION, LOCK_ACTION, LOCK_TIME +from planning.utils import get_related_event_ids_for_planning, get_first_related_event_id_for_planning + logger = logging.getLogger(__name__) @@ -51,16 +54,16 @@ def lock(self, item, user_id, session_id, action, resource): # lock_id will be: # 1 - Recurrence Id for items part of recurring series (event or planning) - # 2 - event_item for planning with associated event + # 2 - Event ID for planning with related primary event # 3 - item's _id for all other cases - lock_id_field = config.ID_FIELD + first_primary_event_id = get_first_related_event_id_for_planning(item, "primary") if item.get("recurrence_id"): - lock_id_field = "recurrence_id" - elif item.get("type") != "event" and item.get("event_item"): - lock_id_field = "event_item" - - # set the lock_id it per item - lock_id = "item_lock {}".format(item.get(lock_id_field)) + recurrence_id = item["recurrence_id"] + lock_id = f"item_lock {recurrence_id}" + elif item.get("type") != "event" and first_primary_event_id is not None: + lock_id = f"item_lock {first_primary_event_id}" + else: + lock_id = f"item_lock {item_id}" # get the lock it not raise forbidden exception if not lock(lock_id, expire=5): @@ -92,7 +95,7 @@ def lock(self, item, user_id, session_id, action, resource): lock_session=str(session_id), lock_action=updates.get(LOCK_ACTION), etag=updates["_etag"], - event_item=item.get("event_item"), + event_ids=get_related_event_ids_for_planning(item), recurrence_id=item.get("recurrence_id") or None, type=item.get("type"), ) @@ -148,7 +151,7 @@ def unlock(self, item, user_id, session_id, resource): user=str(user_id), lock_session=str(session_id), etag=updates.get("_etag") or item.get("_etag"), - event_item=item.get("event_item") or None, + event_ids=get_related_event_ids_for_planning(item), recurrence_id=item.get("recurrence_id") or None, type=item.get("type"), ) diff --git a/server/planning/output_formatters/json_planning.py b/server/planning/output_formatters/json_planning.py index c23f4b23e..c2473a8bd 100644 --- a/server/planning/output_formatters/json_planning.py +++ b/server/planning/output_formatters/json_planning.py @@ -8,17 +8,20 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from copy import deepcopy +import json from flask import current_app as app -from superdesk.publish.formatters import Formatter + import superdesk -from apps.archive.common import ARCHIVE -import json -from superdesk.utils import json_serialize_datetime_objectId -from copy import deepcopy from superdesk import get_resource_service -from planning.common import ASSIGNMENT_WORKFLOW_STATE, WORKFLOW_STATE +from superdesk.publish.formatters import Formatter +from superdesk.utils import json_serialize_datetime_objectId from superdesk.metadata.item import CONTENT_STATE +from apps.archive.common import ARCHIVE + +from planning.common import ASSIGNMENT_WORKFLOW_STATE, WORKFLOW_STATE +from planning.utils import get_first_related_event_id_for_planning from .utils import expand_contact_info, get_matching_products from .json_utils import translate_names @@ -79,7 +82,7 @@ def format(self, item, subscriber, codes=None): ] def _format_item(self, item): - """Format the item to json event""" + """Format the item to json planning""" output_item = deepcopy(item) for f in self.remove_fields: output_item.pop(f, None) @@ -103,6 +106,10 @@ def _format_item(self, item): translate_names(output_item) + first_primary_event_id = get_first_related_event_id_for_planning(item, "primary") + if first_primary_event_id: + output_item["event_item"] = first_primary_event_id + return output_item def _get_coverage_workflow_state(self, assignment_state): diff --git a/server/planning/planning/__init__.py b/server/planning/planning/__init__.py index 9779eead7..4a135d881 100644 --- a/server/planning/planning/__init__.py +++ b/server/planning/planning/__init__.py @@ -8,9 +8,12 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license -import superdesk from flask_babel import lazy_gettext -from .planning import PlanningResource, PlanningService, coverage_schema # noqa + +import superdesk + +from .planning import PlanningResource, PlanningService # noqa +from .planning_schema import coverage_schema # noqa from .planning_spike import ( PlanningSpikeResource, PlanningSpikeService, diff --git a/server/planning/planning/planning.py b/server/planning/planning/planning.py index 40f0f7d20..9ecbd2826 100644 --- a/server/planning/planning/planning.py +++ b/server/planning/planning/planning.py @@ -9,32 +9,40 @@ # at https://www.sourcefabric.org/superdesk/license """Superdesk Planning""" + from typing import Dict, Any, Optional, List -from bson import ObjectId from copy import deepcopy import logging from datetime import datetime +from itertools import chain +from io import BytesIO +from lxml import etree +from bson import ObjectId from flask import json, current_app as app from eve.methods.common import resolve_document_etag +from eve.utils import config, ParsedRequest, date_to_str -import superdesk +from superdesk import get_resource_service, Service, Resource from superdesk.errors import SuperdeskApiError -from planning.errors import AssignmentApiError - +from superdesk.utc import utcnow, utc_to_local from superdesk.metadata.utils import generate_guid, item_url from superdesk.metadata.item import GUID_NEWSML, metadata_schema, ITEM_TYPE, CONTENT_STATE -from superdesk import get_resource_service -from superdesk.resource import not_analyzed, string_with_analyzer from superdesk.users.services import current_user_has_privilege from superdesk.notification import push_notification - from apps.archive.common import get_user, get_auth, update_dates_for -from eve.utils import config, ParsedRequest, date_to_str -from planning.types import Planning, Coverage, Event, UPDATE_METHOD + +from planning.errors import AssignmentApiError +from planning.types import ( + Planning, + Coverage, + Event, + UPDATE_METHOD, + PlanningRelatedEventLink, + ContentProfile, + PLANNING_RELATED_EVENT_LINK_TYPE, +) from planning.common import ( - WORKFLOW_STATE_SCHEMA, - POST_STATE_SCHEMA, get_coverage_status_from_cv, WORKFLOW_STATE, ASSIGNMENT_WORKFLOW_STATE, @@ -46,7 +54,6 @@ DEFAULT_ASSIGNMENT_PRIORITY, get_planning_allow_scheduled_updates, TO_BE_CONFIRMED_FIELD, - TO_BE_CONFIRMED_FIELD_SCHEMA, get_planning_xmp_assignment_mapping, sanitize_input_data, get_planning_xmp_slugline_mapping, @@ -56,21 +63,23 @@ set_ingest_version_datetime, is_new_version, update_ingest_on_patch, - UPDATE_METHODS, UPDATE_SINGLE, UPDATE_FUTURE, UPDATE_ALL, POST_STATE, ) -from superdesk.utc import utcnow -from itertools import chain + from planning.planning_notifications import PlanningNotifications -from superdesk.utc import utc_to_local from planning.content_profiles.utils import is_field_enabled, is_post_planning_with_event_enabled -from superdesk import Resource -from lxml import etree -from io import BytesIO from planning.signals import planning_created, planning_ingested +from .planning_schema import planning_schema +from planning.utils import ( + get_related_planning_for_events, + get_related_event_links_for_planning, + get_related_event_ids_for_planning, + get_first_related_event_id_for_planning, + get_related_event_items_for_planning, +) logger = logging.getLogger(__name__) @@ -84,7 +93,7 @@ def get_coverage_by_id( ) -class PlanningService(superdesk.Service): +class PlanningService(Service): """Service class for the planning model.""" def post_in_mongo(self, docs, **kwargs): @@ -164,7 +173,8 @@ def on_create(self, docs): self.validate_planning(doc) set_original_creator(doc) - event: Event = self._set_planning_event_info(doc, planning_type) + + first_event = self._set_planning_event_info(doc, planning_type) self._set_coverage(doc) self.set_planning_schedule(doc) # set timestamps @@ -175,8 +185,8 @@ def on_create(self, docs): history_service.on_item_created([doc]) update_method: Optional[UPDATE_METHOD] = doc.pop("update_method", None) - if event and update_method is not None: - new_plans = self._add_planning_to_event_series(doc, event, update_method) + if first_event and update_method is not None: + new_plans = self._add_planning_to_event_series(doc, first_event, update_method) if len(new_plans): if is_ingested: history_service.on_item_created(new_plans) @@ -197,16 +207,22 @@ def on_created(self, docs): added_agendas=doc.get("agendas") or [], removed_agendas=[], session=session_id, - event_item=doc.get("event_item", None), + event_ids=get_related_event_ids_for_planning(doc), # Event IDs for both primary and secondary events ) self._update_event_history(doc) planning_created.send(self, item=doc) - event_id = doc.get("event_item") - if event_id and post_planning_with_event: - event = get_resource_service("events").find_one(req=None, _id=event_id) + first_primary_event_id = get_first_related_event_id_for_planning(doc, "primary") + if first_primary_event_id and post_planning_with_event: + event = get_resource_service("events").find_one(req=None, _id=first_primary_event_id) if not event: - logger.warning(f"Failed to find linked event {event_id} for planning {plan_id}") + logger.warning( + "Failed to find linked event for planning", + extra=dict( + event_id=first_primary_event_id, + plan_id=plan_id, + ), + ) elif event.get("pubstatus") == POST_STATE.USABLE: updates = doc.copy() updates["pubstatus"] = POST_STATE.USABLE @@ -214,32 +230,25 @@ def on_created(self, docs): self.generate_related_assignments(docs) - def _update_event_history(self, doc): - event_id = doc.get("event_item") - if not event_id: - return + def _update_event_history(self, doc: Planning): events_service = get_resource_service("events") - original_event = events_service.find_one(req=None, _id=event_id) - - if not original_event: - logger.warning(f"Failed to update event history, Event '{event_id}' not found") - return + events_history_service = get_resource_service("events_history") - events_service.system_update( - doc["event_item"], - { - "expiry": None, - # Event hasn't actually been updated - # So we leave these version dates alone - "_updated": original_event["_updated"], - "versioncreated": original_event["versioncreated"], - }, - original_event, - ) - - get_resource_service("events_history").on_item_updated( - {"planning_id": doc.get("_id")}, original_event, "planning_created" - ) + for original_event in get_related_event_items_for_planning(doc, "primary"): + events_service.system_update( + original_event[config.ID_FIELD], + { + "expiry": None, + # Event hasn't actually been updated + # So we leave these version dates alone + "_updated": original_event["_updated"], + "versioncreated": original_event["versioncreated"], + }, + original_event, + ) + events_history_service.on_item_updated( + {"planning_id": doc[config.ID_FIELD]}, original_event, "planning_created" + ) def on_duplicated(self, doc, parent_id): self._update_event_history(doc) @@ -281,6 +290,7 @@ def update(self, id, updates, original): def on_update(self, updates, original): update_method = updates.pop("update_method", UPDATE_SINGLE) user = get_user() + self.validate_on_update(updates, original, user) if user and user.get(config.ID_FIELD): @@ -349,13 +359,13 @@ def validate_planning(self, updates, original=None): if next_schedule and next_schedule["planning"]["scheduled"] > scheduled_update["planning"]["scheduled"]: raise SuperdeskApiError(message="Scheduled updates of a coverage must be after the previous update") - def _set_planning_event_info(self, doc, planning_type) -> Optional[Dict[str, Any]]: + def _set_planning_event_info(self, doc: Planning, planning_type: ContentProfile) -> Optional[Event]: """Set the planning event date :param dict doc: planning document :param dict planning_types: planning type """ - event_id = doc.get("event_item") + event_id = get_first_related_event_id_for_planning(doc, "primary") if not event_id: return None @@ -363,8 +373,13 @@ def _set_planning_event_info(self, doc, planning_type) -> Optional[Dict[str, Any event = get_resource_service("events").find_one(req=None, _id=event_id) if not event: - plan_id = doc.get("_id") - logger.warning(f"Failed to find linked event {event_id} for planning {plan_id}") + logger.warning( + "Failed to find linked event for planning", + extra=dict( + event_id=event_id, + plan_id=doc.get(config.ID_FIELD), + ), + ) return None if event.get("recurrence_id"): @@ -408,7 +423,9 @@ def _add_planning_to_event_series( # Set the Planning & Event IDs for the new item new_plan["guid"] = new_plan["_id"] = generate_guid(type=GUID_NEWSML) - new_plan["event_item"] = series_entry["_id"] + new_plan["related_events"] = [ + PlanningRelatedEventLink(_id=series_entry["_id"], recurrence_id=recurrence_id, link_type="primary") + ] new_plan["recurrence_id"] = recurrence_id # Set the Planning date/time relative to the Event start date/time @@ -463,7 +480,7 @@ def on_updated(self, updates, original, from_ingest=False): user=user_id, lock_session=session_id, etag=updates["_etag"], - event_item=original.get("event_item"), + event_ids=get_related_event_ids_for_planning(doc), # Event IDs for both primary and secondary events, recurrence_id=original.get("recurrence_id") or None, from_ingest=from_ingest, ) @@ -490,27 +507,24 @@ def get_planning_by_agenda_id(self, agenda_id): req.args = {"source": json.dumps(query)} return super().get(req=req, lookup=None) - def get_all_items_in_relationship(self, item): - all_items = [] - if item.get("event_item"): - if item.get("recurrence_id"): - event_param = { - "_id": item.get("event_item"), - "recurrence_id": item.get("recurrence_id"), - } - # One call wil get all items in the recurring series from event service - return get_resource_service("events").get_all_items_in_relationship(event_param) - else: - event_param = {"_id": item.get("event_item")} - # Get associated event - all_items = get_resource_service("events").find(where={"_id": item.get("event_item")}) - # Get all associated planning items - return chain( - all_items, - get_resource_service("events").get_plannings_for_event(event_param), - ) + def get_all_items_in_relationship( + self, item: Planning, event_link_type: PLANNING_RELATED_EVENT_LINK_TYPE = "primary" + ): + event_id = get_first_related_event_id_for_planning(item, event_link_type) + if not event_id: + return [] + + events_service = get_resource_service("events") + if item.get("recurrence_id"): + # One call wil get all items in the recurring series from event service + return events_service.get_all_items_in_relationship( + {"recurrence_id": item["recurrence_id"]}, event_link_type + ) else: - return all_items + # Get associated event + all_items = events_service.find(where={"_id": event_id}) + # Get all associated planning items + return chain(all_items, get_related_planning_for_events([event_id], event_link_type)) def remove_coverages(self, updates, original): if "coverages" not in updates: @@ -1199,8 +1213,7 @@ def remove_assignment(self, assignment_item): get_resource_service("planning_autosave").on_assignment_removed(planning_item[config.ID_FIELD], coverage_id) - if planning_item.get("event_item"): - updated_planning["event_item"] = planning_item["event_item"] + updated_planning["related_events"] = get_related_event_links_for_planning(planning_item) return updated_planning @@ -1308,7 +1321,12 @@ def get_expired_items(self, expiry_datetime, spiked_planning_only=False): "query": { "bool": { "must_not": [ - {"constant_score": {"filter": {"exists": {"field": "event_item"}}}}, + { + "nested": { + "path": "related_events", + "query": {"term": {"related_events.link_type": "primary"}}, + }, + }, {"term": {"expired": True}}, nested_filter, range_filter, @@ -1357,10 +1375,22 @@ def get_expired_items(self, expiry_datetime, spiked_planning_only=False): yield list(results.docs) def on_event_converted_to_recurring(self, updates, original): - items = self.find(where={"event_item": original[config.ID_FIELD]}) - - for item in items: - self.patch(item[config.ID_FIELD], {"recurrence_id": updates["recurrence_id"]}) + event_id = original[config.ID_FIELD] + for item in get_related_planning_for_events([original[config.ID_FIELD]]): + related_events = get_related_event_links_for_planning(item) + + # Set the ``recurrence_id`` in the ``planning.related_events`` field + for event in related_events: + if event["_id"] == event_id: + event["recurrence_id"] = updates["recurrence_id"] + break + self.patch( + item[config.ID_FIELD], + { + "recurrence_id": updates["recurrence_id"], + "related_events": related_events, + }, + ) def get_xmp_file_for_updates(self, updates_coverage, original_coverage, for_slugline=False): rv = False @@ -1536,7 +1566,7 @@ def _update_recurring_planning_items(self, updates, original, update_method): "ingest_provider_sequence", "ingest_firstcreated", "ingest_versioncreated", - "event_item", + "related_events", "state", "pubstatus", "expiry", @@ -1659,315 +1689,7 @@ def _iter_recurring_plannings_to_update(self, updates, original, update_method): yield plan -event_type = deepcopy(superdesk.Resource.rel("events", type="string")) -event_type["mapping"] = not_analyzed - -assigned_to_schema = { - "type": "dict", - "mapping": { - "type": "object", - "properties": { - "assignment_id": not_analyzed, - "state": not_analyzed, - "contact": not_analyzed, - }, - }, -} - -coverage_schema = { - # Identifiers - "coverage_id": {"type": "string", "mapping": not_analyzed}, - "original_coverage_id": {"type": "string", "mapping": not_analyzed}, - "guid": metadata_schema["guid"], - # Audit Information - "original_creator": metadata_schema["original_creator"], - "version_creator": metadata_schema["version_creator"], - "firstcreated": metadata_schema["firstcreated"], - "versioncreated": metadata_schema["versioncreated"], - # News Coverage Details - # See IPTC-G2-Implementation_Guide 16.4 - "planning": { - "type": "dict", - "schema": { - "ednote": metadata_schema["ednote"], - "g2_content_type": {"type": "string", "mapping": not_analyzed}, - "coverage_provider": {"type": "string", "mapping": not_analyzed}, - "contact_info": Resource.rel("contacts", type="string", nullable=True), - "item_class": {"type": "string", "mapping": not_analyzed}, - "item_count": {"type": "string", "mapping": not_analyzed}, - "scheduled": {"type": "datetime"}, - "files": { - "type": "list", - "nullable": True, - "schema": Resource.rel("planning_files"), - "mapping": not_analyzed, - }, - "xmp_file": Resource.rel("planning_files", nullable=True), - "service": { - "type": "list", - "mapping": {"properties": {"qcode": not_analyzed, "name": not_analyzed}}, - }, - "news_content_characteristics": { - "type": "list", - "mapping": {"properties": {"name": not_analyzed, "value": not_analyzed}}, - }, - "planning_ext_property": { - "type": "list", - "mapping": { - "properties": { - "qcode": not_analyzed, - "value": not_analyzed, - "name": not_analyzed, - } - }, - }, - # Metadata hints. See IPTC-G2-Implementation_Guide 16.5.1.1 - "by": {"type": "list", "mapping": {"type": "string"}}, - "credit_line": {"type": "list", "mapping": {"type": "string"}}, - "dateline": {"type": "list", "mapping": {"type": "string"}}, - "description_text": metadata_schema["description_text"], - "genre": metadata_schema["genre"], - "headline": metadata_schema["headline"], - "keyword": {"type": "list", "mapping": {"type": "string"}}, - "language": metadata_schema["language"], - "slugline": metadata_schema["slugline"], - "subject": metadata_schema["subject"], - "internal_note": {"type": "string", "nullable": True}, - "workflow_status_reason": {"type": "string", "nullable": True}, - "priority": metadata_schema["priority"], - }, # end planning dict schema - }, # end planning - "news_coverage_status": { - "type": "dict", - "allow_unknown": True, - "schema": { - "qcode": {"type": "string"}, - "name": {"type": "string"}, - "label": {"type": "string"}, - }, - }, - "workflow_status": {"type": "string"}, - "previous_status": {"type": "string"}, - "assigned_to": assigned_to_schema, - "flags": { - "type": "dict", - "allow_unknown": True, - "schema": {"no_content_linking": {"type": "boolean", "default": False}}, - }, - TO_BE_CONFIRMED_FIELD: TO_BE_CONFIRMED_FIELD_SCHEMA, - "scheduled_updates": { - "type": "list", - "schema": { - "type": "dict", - "schema": { - "scheduled_update_id": {"type": "string", "mapping": not_analyzed}, - "coverage_id": {"type": "string", "mapping": not_analyzed}, - "workflow_status": {"type": "string"}, - "assigned_to": assigned_to_schema, - "previous_status": {"type": "string"}, - "news_coverage_status": { - "type": "dict", - "allow_unknown": True, - "schema": { - "qcode": {"type": "string"}, - "name": {"type": "string"}, - "label": {"type": "string"}, - }, - }, - "planning": { - "type": "dict", - "schema": { - "internal_note": {"type": "string", "nullable": True}, - "contact_info": Resource.rel("contacts", type="string", nullable=True), - "scheduled": {"type": "datetime"}, - "genre": metadata_schema["genre"], - "workflow_status_reason": {"type": "string", "nullable": True}, - }, - }, - }, - }, - }, # end scheduled_updates -} # end coverage_schema - -planning_schema = { - # Identifiers - config.ID_FIELD: metadata_schema[config.ID_FIELD], - "guid": metadata_schema["guid"], - # Audit Information - "original_creator": metadata_schema["original_creator"], - "version_creator": metadata_schema["version_creator"], - "firstcreated": metadata_schema["firstcreated"], - "versioncreated": metadata_schema["versioncreated"], - # Ingest Details - "ingest_provider": metadata_schema["ingest_provider"], - "source": metadata_schema["source"], - "original_source": metadata_schema["original_source"], - "ingest_provider_sequence": metadata_schema["ingest_provider_sequence"], - "ingest_firstcreated": metadata_schema["versioncreated"], - "ingest_versioncreated": metadata_schema["versioncreated"], - # Agenda Item details - "agendas": { - "type": "list", - "schema": superdesk.Resource.rel("agenda"), - "mapping": not_analyzed, - }, - # Event Item - "event_item": event_type, - "recurrence_id": { - "type": "string", - "mapping": not_analyzed, - "nullable": True, - }, - "planning_recurrence_id": { - "type": "string", - "mapping": not_analyzed, - "nullable": True, - }, - # Planning Details - # NewsML-G2 Event properties See IPTC-G2-Implementation_Guide 16 - # Planning Item Metadata - See IPTC-G2-Implementation_Guide 16.1 - "item_class": {"type": "string", "default": "plinat:newscoverage"}, - "ednote": metadata_schema["ednote"], - "description_text": metadata_schema["description_text"], - "internal_note": {"type": "string", "nullable": True}, - "anpa_category": metadata_schema["anpa_category"], - "subject": metadata_schema["subject"], - "genre": metadata_schema["genre"], - "company_codes": metadata_schema["company_codes"], - # Content Metadata - See IPTC-G2-Implementation_Guide 16.2 - "language": metadata_schema["language"], - "languages": { - "type": "list", - "mapping": not_analyzed, - }, - "translations": { - "type": "list", - "mapping": { - "type": "nested", - "properties": { - "field": not_analyzed, - "language": not_analyzed, - "value": metadata_schema["slugline"]["mapping"], - }, - }, - }, - "abstract": metadata_schema["abstract"], - "headline": metadata_schema["headline"], - "slugline": metadata_schema["slugline"], - "keywords": metadata_schema["keywords"], - "word_count": metadata_schema["word_count"], - "priority": metadata_schema["priority"], - "urgency": metadata_schema["urgency"], - "profile": metadata_schema["profile"], - # These next two are for spiking/unspiking and purging of planning/agenda items - "state": WORKFLOW_STATE_SCHEMA, - "expiry": {"type": "datetime", "nullable": True}, - "expired": {"type": "boolean", "default": False}, - "featured": {"type": "boolean"}, - "lock_user": metadata_schema["lock_user"], - "lock_time": metadata_schema["lock_time"], - "lock_session": metadata_schema["lock_session"], - "lock_action": metadata_schema["lock_action"], - "coverages": { - "type": "list", - "default": [], - "schema": { - "type": "dict", - "schema": coverage_schema, - }, - "mapping": { - "type": "nested", - "properties": { - "coverage_id": not_analyzed, - "planning": { - "type": "object", - "properties": { - "slugline": metadata_schema["slugline"]["mapping"], - }, - }, - "assigned_to": assigned_to_schema["mapping"], - "original_creator": { - "type": "keyword", - }, - }, - }, - }, - # field to sync coverage scheduled information - # to be used for sorting/filtering on scheduled - "_planning_schedule": { - "type": "list", - "mapping": { - "type": "nested", - "properties": { - "coverage_id": not_analyzed, - "scheduled": {"type": "date"}, - }, - }, - }, - # field to sync scheduled_updates scheduled information - # to be used for sorting/filtering on scheduled - "_updates_schedule": { - "type": "list", - "mapping": { - "type": "nested", - "properties": { - "scheduled_update_id": not_analyzed, - "scheduled": {"type": "date"}, - }, - }, - }, - "planning_date": { - "type": "datetime", - "nullable": False, - }, - "flags": { - "type": "dict", - "schema": { - "marked_for_not_publication": metadata_schema["flags"]["schema"]["marked_for_not_publication"], - # If the config is set to create coverage items in workflow this flag will override that and allow coverages - # created for this planning item to be created in draft - "overide_auto_assign_to_workflow": {"type": "boolean", "default": False}, - }, - }, - # Public/Published status - "pubstatus": POST_STATE_SCHEMA, - # The previous state the item was in before for example being spiked, - # when un-spiked it will revert to this state - "revert_state": metadata_schema["revert_state"], - # Item type used by superdesk publishing - ITEM_TYPE: { - "type": "string", - "mapping": not_analyzed, - "default": "planning", - }, - # Identifier used to synchronise the posted planning item with an external system. - "unique_id": {"type": "string", "mapping": not_analyzed}, - "place": metadata_schema["place"], - # Name used to identify the planning item - "name": {"type": "string"}, - "files": { - "type": "list", - "nullable": True, - "schema": Resource.rel("planning_files"), - "mapping": not_analyzed, - }, - # Reason (if any) for the current state (cancelled, postponed, rescheduled) - "state_reason": {"type": "string", "nullable": True}, - TO_BE_CONFIRMED_FIELD: TO_BE_CONFIRMED_FIELD_SCHEMA, - "_type": {"type": "string", "mapping": None}, - "extra": metadata_schema["extra"], - "versionposted": {"type": "datetime", "nullable": False}, - # The update method used for recurring planning items - "update_method": { - "type": "string", - "allowed": UPDATE_METHODS, - "mapping": not_analyzed, - "nullable": True, - }, -} # end planning_schema - - -class PlanningResource(superdesk.Resource): +class PlanningResource(Resource): """Resource for planning data model See IPTC-G2-Implementation_Guide (version 2.21) Section 16.5 for schema details @@ -1990,7 +1712,6 @@ class PlanningResource(superdesk.Resource): etag_ignore_fields = ["_planning_schedule", "_updates_schedule"] mongo_indexes = { - "event_item": ([("event_item", 1)], {"background": True}), "planning_recurrence_id": ([("planning_recurrence_id", 1)], {"background": True}), } diff --git a/server/planning/planning/planning_duplicate.py b/server/planning/planning/planning_duplicate.py index 2de22c6ba..ee8dbd98e 100644 --- a/server/planning/planning/planning_duplicate.py +++ b/server/planning/planning/planning_duplicate.py @@ -9,16 +9,20 @@ # at https://www.sourcefabric.org/superdesk/license import logging +from copy import deepcopy + from eve.utils import config +from flask import request + from superdesk import get_resource_service from superdesk.resource import Resource from superdesk.services import BaseService from superdesk.metadata.utils import item_url, generate_guid from superdesk.metadata.item import GUID_NEWSML from superdesk.utc import utcnow, utc_to_local -from flask import request + from planning.common import ITEM_STATE, WORKFLOW_STATE, TEMP_ID_PREFIX -from copy import deepcopy +from planning.utils import get_related_event_links_for_planning, get_related_event_items_for_planning logger = logging.getLogger(__name__) @@ -56,20 +60,28 @@ def create(self, docs, **kwargs): def _duplicate_planning(self, original): new_plan = deepcopy(original) - if new_plan.get("event_item") and new_plan.get(ITEM_STATE) == WORKFLOW_STATE.CANCELLED: - # if the event is cancelled remove the link to the associated event - event = get_resource_service("events").find_one(req=None, _id=new_plan.get("event_item")) - if event and event.get(ITEM_STATE) == WORKFLOW_STATE.CANCELLED: - del new_plan["event_item"] - - if (new_plan.get("expired") and new_plan.get("event_item")) or new_plan.get( - ITEM_STATE - ) == WORKFLOW_STATE.RESCHEDULED: - # If the Planning item has expired and is associated with an Event - # then we remove the link to the associated Event as the Event would have - # been expired also. - # If associated event is rescheduled then remove the associated event - del new_plan["event_item"] + related_events = get_related_event_links_for_planning(original) + + if len(related_events): + if original.get("expired") or original.get(ITEM_STATE) == WORKFLOW_STATE.RESCHEDULED: + # If the Planning item has expired, or has been rescheduled, and is associated with an Event + # then we remove the link to the associated Events as the Event would have been expired also. + new_plan["related_events"] = [] + elif original.get(ITEM_STATE) == WORKFLOW_STATE.CANCELLED: + events_to_remove = [] + + for related_event in get_related_event_items_for_planning(original): + if related_event.get(ITEM_STATE) == WORKFLOW_STATE.CANCELLED: + # If both the Planning and Events are cancelled, then unlink this Event + events_to_remove.append(related_event[config.ID_FIELD]) + + # Remove any of the Event's flagged to be removed from above + if len(events_to_remove): + new_plan["related_events"] = [ + related_event + for related_event in related_events + if related_event["_id"] not in events_to_remove + ] for f in ( "_id", diff --git a/server/planning/planning/planning_history.py b/server/planning/planning/planning_history.py index 8279f31e3..c5ba19468 100644 --- a/server/planning/planning/planning_history.py +++ b/server/planning/planning/planning_history.py @@ -8,16 +8,21 @@ """Superdesk Files""" +import logging +from copy import deepcopy + from flask import request +from eve.utils import config + from superdesk import Resource, get_resource_service +from superdesk.default_settings import strtobool + from planning.history import HistoryService -import logging -from eve.utils import config -from copy import deepcopy from planning.common import WORKFLOW_STATE, ITEM_ACTIONS, ASSIGNMENT_WORKFLOW_STATE from planning.item_lock import LOCK_ACTION from planning.assignments.assignments_history import ASSIGNMENT_HISTORY_ACTIONS -from superdesk.default_settings import strtobool +from planning.utils import get_related_event_links_for_planning + logger = logging.getLogger(__name__) update_item_actions = ["assign_agenda", "add_featured", "remove_featured"] @@ -84,7 +89,7 @@ def on_item_updated(self, updates, original, operation=None): if original.get(LOCK_ACTION) == "assign_agenda": diff["agendas"] = [a for a in diff.get("agendas", []) if a not in original.get("agendas", [])] - if diff.get("event_item"): + if len(get_related_event_links_for_planning(diff, "primary")): operation = "create_event" self._save_history(item, diff, operation) diff --git a/server/planning/planning/planning_lock.py b/server/planning/planning/planning_lock.py index cab5e57e6..72e5ee271 100644 --- a/server/planning/planning/planning_lock.py +++ b/server/planning/planning/planning_lock.py @@ -8,18 +8,22 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from copy import deepcopy + from flask import request +from eve.utils import config + from superdesk.resource import Resource, build_custom_hateoas from superdesk.metadata.utils import item_url -from apps.archive.common import get_user, get_auth from superdesk.services import BaseService -from planning.item_lock import LockService from superdesk import get_resource_service +from apps.archive.common import get_user, get_auth from apps.common.components.utils import get_component + +from planning.item_lock import LockService from planning.common import update_returned_document from planning.planning.planning import planning_schema -from copy import deepcopy -from eve.utils import config +from planning.utils import get_related_event_links_for_planning CUSTOM_HATEOAS_PLANNING = {"self": {"title": "Planning", "href": "/planning/{_id}"}} @@ -50,7 +54,7 @@ def lock_item(self, item_id, action, doc): lock_service = get_component(LockService) item = get_resource_service("planning").find_one(req=None, _id=item_id) - if item and item.get("event_item"): + if item and len(get_related_event_links_for_planning(item, "primary")): lock_service.validate_relationship_locks(item, "planning") updated_item = lock_service.lock(item, user_id, session_id, lock_action, "planning") diff --git a/server/planning/planning/planning_post.py b/server/planning/planning/planning_post.py index 6bc81ae47..6b9a6699f 100644 --- a/server/planning/planning/planning_post.py +++ b/server/planning/planning/planning_post.py @@ -8,18 +8,23 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from typing import List +from copy import deepcopy +import logging + from flask import abort -from superdesk import get_resource_service, logger +from eve.utils import config + +from superdesk import get_resource_service from superdesk.errors import SuperdeskApiError from superdesk.resource import Resource from superdesk.services import BaseService from superdesk.notification import push_notification from superdesk.utc import utcnow -from copy import deepcopy -import logging -from eve.utils import config -from planning.planning import PlanningResource +from planning.types import Planning, Event +from .planning import PlanningResource +from planning.utils import get_related_event_items_for_planning from planning.common import ( WORKFLOW_STATE, POST_STATE, @@ -54,11 +59,8 @@ def create(self, docs, **kwargs): assignments_to_delete = [] for doc in docs: plan = get_resource_service("planning").find_one(req=None, _id=doc["planning"]) - event = None - if plan.get("event_item"): - event = get_resource_service("events").find_one(req=None, _id=plan.get("event_item")) - - self.validate_item(plan, event, doc["pubstatus"]) + related_events = get_related_event_items_for_planning(plan, "primary") + self.validate_item(plan, related_events, doc["pubstatus"]) if not plan: abort(412) @@ -67,8 +69,11 @@ def create(self, docs, **kwargs): self.validate_related_item(plan) self.validate_post_state(doc["pubstatus"]) - if event and doc["pubstatus"] == POST_STATE.USABLE: - self.post_associated_event(event) + + if doc["pubstatus"] == POST_STATE.USABLE: + for related_event in related_events: + self.post_associated_event(related_event) + self.post_planning(plan, doc["pubstatus"], assignments_to_delete, **kwargs) ids.append(doc["planning"]) @@ -91,8 +96,10 @@ def validate_post_state(self, new_post_state): abort(409) @staticmethod - def validate_item(doc, event, new_post_status): - if new_post_status == POST_STATE.USABLE and event and event.get("pubstatus") == POST_STATE.CANCELLED: + def validate_item(doc: Planning, related_events: List[Event], new_post_status: str): + if new_post_status == POST_STATE.USABLE and any( + 1 for e in related_events if e.get("pubstatus") == POST_STATE.CANCELLED + ): raise SuperdeskApiError(message="Can't post the planning item as event is already unposted/cancelled.") errors = get_resource_service("planning_validator").post( diff --git a/server/planning/planning/planning_schema.py b/server/planning/planning/planning_schema.py new file mode 100644 index 000000000..4d7442662 --- /dev/null +++ b/server/planning/planning/planning_schema.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8; -*- +# +# This file is part of Superdesk. +# +# Copyright 2014 Sourcefabric z.u. and contributors. +# +# For the full copyright and license information, please see the +# AUTHORS and LICENSE files distributed with this source code, or +# at https://www.sourcefabric.org/superdesk/license + +import logging +from copy import deepcopy + +from eve.utils import config + +from superdesk.resource import Resource, not_analyzed, string_with_analyzer +from superdesk.metadata.item import metadata_schema, ITEM_TYPE + +from planning.common import ( + WORKFLOW_STATE_SCHEMA, + TO_BE_CONFIRMED_FIELD, + TO_BE_CONFIRMED_FIELD_SCHEMA, + POST_STATE_SCHEMA, + UPDATE_METHODS, +) + +logger = logging.getLogger(__name__) + + +assigned_to_schema = { + "type": "dict", + "mapping": { + "type": "object", + "properties": { + "assignment_id": not_analyzed, + "state": not_analyzed, + "contact": not_analyzed, + }, + }, +} + +coverage_schema = { + # Identifiers + "coverage_id": {"type": "string", "mapping": not_analyzed}, + "original_coverage_id": {"type": "string", "mapping": not_analyzed}, + "guid": metadata_schema["guid"], + # Audit Information + "original_creator": metadata_schema["original_creator"], + "version_creator": metadata_schema["version_creator"], + "firstcreated": metadata_schema["firstcreated"], + "versioncreated": metadata_schema["versioncreated"], + # News Coverage Details + # See IPTC-G2-Implementation_Guide 16.4 + "planning": { + "type": "dict", + "schema": { + "ednote": metadata_schema["ednote"], + "g2_content_type": {"type": "string", "mapping": not_analyzed}, + "coverage_provider": {"type": "string", "mapping": not_analyzed}, + "contact_info": Resource.rel("contacts", type="string", nullable=True), + "item_class": {"type": "string", "mapping": not_analyzed}, + "item_count": {"type": "string", "mapping": not_analyzed}, + "scheduled": {"type": "datetime"}, + "files": { + "type": "list", + "nullable": True, + "schema": Resource.rel("planning_files"), + "mapping": not_analyzed, + }, + "xmp_file": Resource.rel("planning_files", nullable=True), + "service": { + "type": "list", + "mapping": {"properties": {"qcode": not_analyzed, "name": not_analyzed}}, + }, + "news_content_characteristics": { + "type": "list", + "mapping": {"properties": {"name": not_analyzed, "value": not_analyzed}}, + }, + "planning_ext_property": { + "type": "list", + "mapping": { + "properties": { + "qcode": not_analyzed, + "value": not_analyzed, + "name": not_analyzed, + } + }, + }, + # Metadata hints. See IPTC-G2-Implementation_Guide 16.5.1.1 + "by": {"type": "list", "mapping": {"type": "string"}}, + "credit_line": {"type": "list", "mapping": {"type": "string"}}, + "dateline": {"type": "list", "mapping": {"type": "string"}}, + "description_text": metadata_schema["description_text"], + "genre": metadata_schema["genre"], + "headline": metadata_schema["headline"], + "keyword": {"type": "list", "mapping": {"type": "string"}}, + "language": metadata_schema["language"], + "slugline": metadata_schema["slugline"], + "subject": metadata_schema["subject"], + "internal_note": {"type": "string", "nullable": True}, + "workflow_status_reason": {"type": "string", "nullable": True}, + "priority": metadata_schema["priority"], + }, # end planning dict schema + }, # end planning + "news_coverage_status": { + "type": "dict", + "allow_unknown": True, + "schema": { + "qcode": {"type": "string"}, + "name": {"type": "string"}, + "label": {"type": "string"}, + }, + }, + "workflow_status": {"type": "string"}, + "previous_status": {"type": "string"}, + "assigned_to": assigned_to_schema, + "flags": { + "type": "dict", + "allow_unknown": True, + "schema": {"no_content_linking": {"type": "boolean", "default": False}}, + }, + TO_BE_CONFIRMED_FIELD: TO_BE_CONFIRMED_FIELD_SCHEMA, + "scheduled_updates": { + "type": "list", + "schema": { + "type": "dict", + "schema": { + "scheduled_update_id": {"type": "string", "mapping": not_analyzed}, + "coverage_id": {"type": "string", "mapping": not_analyzed}, + "workflow_status": {"type": "string"}, + "assigned_to": assigned_to_schema, + "previous_status": {"type": "string"}, + "news_coverage_status": { + "type": "dict", + "allow_unknown": True, + "schema": { + "qcode": {"type": "string"}, + "name": {"type": "string"}, + "label": {"type": "string"}, + }, + }, + "planning": { + "type": "dict", + "schema": { + "internal_note": {"type": "string", "nullable": True}, + "contact_info": Resource.rel("contacts", type="string", nullable=True), + "scheduled": {"type": "datetime"}, + "genre": metadata_schema["genre"], + "workflow_status_reason": {"type": "string", "nullable": True}, + }, + }, + }, + }, + }, # end scheduled_updates +} # end coverage_schema + +event_type = deepcopy(Resource.rel("events", type="string")) +event_type["mapping"] = not_analyzed + +planning_schema = { + # Identifiers + config.ID_FIELD: metadata_schema[config.ID_FIELD], + "guid": metadata_schema["guid"], + # Audit Information + "original_creator": metadata_schema["original_creator"], + "version_creator": metadata_schema["version_creator"], + "firstcreated": metadata_schema["firstcreated"], + "versioncreated": metadata_schema["versioncreated"], + # Ingest Details + "ingest_provider": metadata_schema["ingest_provider"], + "source": metadata_schema["source"], + "original_source": metadata_schema["original_source"], + "ingest_provider_sequence": metadata_schema["ingest_provider_sequence"], + "ingest_firstcreated": metadata_schema["versioncreated"], + "ingest_versioncreated": metadata_schema["versioncreated"], + # Agenda Item details + "agendas": { + "type": "list", + "schema": Resource.rel("agenda"), + "mapping": not_analyzed, + }, + # Related Events + "related_events": { + "type": "list", + "required": False, + "schema": { + "type": "dict", + "allow_unknown": True, + "schema": { + "_id": Resource.rel("events", type="string", required=True), + "recurrence_id": { + "type": "string", + "nullable": True, + }, + "link_type": { + "type": "string", + "required": True, + "default": "primary", + "allowed": ["primary", "secondary"], + }, + }, + }, + "mapping": { + "type": "nested", + "properties": { + "_id": not_analyzed, + "recurrence_id": not_analyzed, + "link_type": not_analyzed, + }, + }, + }, + "recurrence_id": { + "type": "string", + "mapping": not_analyzed, + "nullable": True, + }, + "planning_recurrence_id": { + "type": "string", + "mapping": not_analyzed, + "nullable": True, + }, + # Planning Details + # NewsML-G2 Event properties See IPTC-G2-Implementation_Guide 16 + # Planning Item Metadata - See IPTC-G2-Implementation_Guide 16.1 + "item_class": {"type": "string", "default": "plinat:newscoverage"}, + "ednote": metadata_schema["ednote"], + "description_text": metadata_schema["description_text"], + "internal_note": {"type": "string", "nullable": True}, + "anpa_category": metadata_schema["anpa_category"], + "subject": metadata_schema["subject"], + "genre": metadata_schema["genre"], + "company_codes": metadata_schema["company_codes"], + # Content Metadata - See IPTC-G2-Implementation_Guide 16.2 + "language": metadata_schema["language"], + "languages": { + "type": "list", + "mapping": not_analyzed, + }, + "translations": { + "type": "list", + "mapping": { + "type": "nested", + "properties": { + "field": not_analyzed, + "language": not_analyzed, + "value": metadata_schema["slugline"]["mapping"], + }, + }, + }, + "abstract": metadata_schema["abstract"], + "headline": metadata_schema["headline"], + "slugline": metadata_schema["slugline"], + "keywords": metadata_schema["keywords"], + "word_count": metadata_schema["word_count"], + "priority": metadata_schema["priority"], + "urgency": metadata_schema["urgency"], + "profile": metadata_schema["profile"], + # These next two are for spiking/unspiking and purging of planning/agenda items + "state": WORKFLOW_STATE_SCHEMA, + "expiry": {"type": "datetime", "nullable": True}, + "expired": {"type": "boolean", "default": False}, + "featured": {"type": "boolean"}, + "lock_user": metadata_schema["lock_user"], + "lock_time": metadata_schema["lock_time"], + "lock_session": metadata_schema["lock_session"], + "lock_action": metadata_schema["lock_action"], + "coverages": { + "type": "list", + "default": [], + "schema": { + "type": "dict", + "schema": coverage_schema, + }, + "mapping": { + "type": "nested", + "properties": { + "coverage_id": not_analyzed, + "planning": { + "type": "object", + "properties": { + "slugline": metadata_schema["slugline"]["mapping"], + }, + }, + "assigned_to": assigned_to_schema["mapping"], + "original_creator": { + "type": "keyword", + }, + }, + }, + }, + # field to sync coverage scheduled information + # to be used for sorting/filtering on scheduled + "_planning_schedule": { + "type": "list", + "mapping": { + "type": "nested", + "properties": { + "coverage_id": not_analyzed, + "scheduled": {"type": "date"}, + }, + }, + }, + # field to sync scheduled_updates scheduled information + # to be used for sorting/filtering on scheduled + "_updates_schedule": { + "type": "list", + "mapping": { + "type": "nested", + "properties": { + "scheduled_update_id": not_analyzed, + "scheduled": {"type": "date"}, + }, + }, + }, + "planning_date": { + "type": "datetime", + "nullable": False, + }, + "flags": { + "type": "dict", + "schema": { + "marked_for_not_publication": metadata_schema["flags"]["schema"]["marked_for_not_publication"], + # If the config is set to create coverage items in workflow this flag will override that and allow coverages + # created for this planning item to be created in draft + "overide_auto_assign_to_workflow": {"type": "boolean", "default": False}, + }, + }, + # Public/Published status + "pubstatus": POST_STATE_SCHEMA, + # The previous state the item was in before for example being spiked, + # when un-spiked it will revert to this state + "revert_state": metadata_schema["revert_state"], + # Item type used by superdesk publishing + ITEM_TYPE: { + "type": "string", + "mapping": not_analyzed, + "default": "planning", + }, + # Identifier used to synchronise the posted planning item with an external system. + "unique_id": {"type": "string", "mapping": not_analyzed}, + "place": metadata_schema["place"], + # Name used to identify the planning item + "name": {"type": "string"}, + "files": { + "type": "list", + "nullable": True, + "schema": Resource.rel("planning_files"), + "mapping": not_analyzed, + }, + # Reason (if any) for the current state (cancelled, postponed, rescheduled) + "state_reason": {"type": "string", "nullable": True}, + TO_BE_CONFIRMED_FIELD: TO_BE_CONFIRMED_FIELD_SCHEMA, + "_type": {"type": "string", "mapping": None}, + "extra": metadata_schema["extra"], + "versionposted": {"type": "datetime", "nullable": False}, + # The update method used for recurring planning items + "update_method": { + "type": "string", + "allowed": UPDATE_METHODS, + "mapping": not_analyzed, + "nullable": True, + }, +} # end planning_schema diff --git a/server/planning/planning/planning_spike.py b/server/planning/planning/planning_spike.py index 0aca53e29..4202e060c 100644 --- a/server/planning/planning/planning_spike.py +++ b/server/planning/planning/planning_spike.py @@ -8,7 +8,17 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from copy import deepcopy + +from superdesk import config, get_resource_service +from superdesk.services import BaseService +from superdesk.notification import push_notification +from superdesk.errors import SuperdeskApiError +from apps.auth import get_user, get_user_id +from apps.archive.common import get_auth + from .planning import PlanningResource +from planning.utils import get_related_event_ids_for_planning, get_first_related_event_id_for_planning from planning.common import ( ITEM_EXPIRY, ITEM_STATE, @@ -18,16 +28,8 @@ remove_autosave_on_spike, remove_lock_information, ) -from superdesk.services import BaseService -from superdesk.notification import push_notification -from superdesk.errors import SuperdeskApiError -from apps.auth import get_user, get_user_id -from apps.archive.common import get_auth -from superdesk import config -from superdesk import get_resource_service from planning.planning_notifications import PlanningNotifications from planning.item_lock import LOCK_USER -from copy import deepcopy class PlanningSpikeResource(PlanningResource): @@ -49,7 +51,9 @@ def on_updated(self, updates, original): user=str(get_user_id()), lock_session=str(get_auth().get(config.ID_FIELD)), etag=updates.get("_etag"), - event_item=original.get("event_item") or None, + event_ids=get_related_event_ids_for_planning( + original + ), # Event IDs for both primary and secondary events, recurrence_id=original.get("recurrence_id") or None, type=original.get("type"), ) @@ -131,12 +135,16 @@ def on_updated(self, updates, original): if coverage.get("workflow_status") == WORKFLOW_STATE.ACTIVE: assignments_to_delete.append(coverage) - notify = True - if original.get("event_item"): - event = get_resource_service("events").find_one(req=None, _id=original.get("event_item")) - notify = not event or event.get("state") != WORKFLOW_STATE.SPIKED + notify_user_on_failed_assignment_deletes = True + first_event_id = get_first_related_event_id_for_planning(original, "primary") - get_resource_service("planning").delete_assignments_for_coverages(assignments_to_delete, notify) + if first_event_id: + event = get_resource_service("events").find_one(req=None, _id=first_event_id) + notify_user_on_failed_assignment_deletes = not event or event.get("state") != WORKFLOW_STATE.SPIKED + + get_resource_service("planning").delete_assignments_for_coverages( + assignments_to_delete, notify_user_on_failed_assignment_deletes + ) class PlanningUnspikeResource(PlanningResource): @@ -151,8 +159,9 @@ class PlanningUnspikeResource(PlanningResource): class PlanningUnspikeService(PlanningSpikeServiceBase): def update(self, id, updates, original): - if original.get("event_item"): - event = get_resource_service("events").find_one(req=None, _id=original["event_item"]) + first_event_id = get_first_related_event_id_for_planning(original, "primary") + if first_event_id: + event = get_resource_service("events").find_one(req=None, _id=first_event_id) if event.get("state") == WORKFLOW_STATE.SPIKED: raise SuperdeskApiError.badRequestError(message="Unspike failed. Associated event is spiked.") diff --git a/server/planning/planning_article_export.py b/server/planning/planning_article_export.py index 4d4eadbce..d79c1330f 100644 --- a/server/planning/planning_article_export.py +++ b/server/planning/planning_article_export.py @@ -27,6 +27,7 @@ ASSIGNMENT_WORKFLOW_STATE, get_first_paragraph_text, ) +from planning.utils import get_related_planning_for_events, get_first_related_event_id_for_planning from planning.archive import create_item_from_template @@ -67,10 +68,13 @@ def get_items(ids, resource_type): events_service = get_resource_service("events") for item in items: item_type = item.get("type") - if item_type == "planning" and item.get("event_item"): - item["event"] = events_service.find_one(req=None, _id=item["event_item"]) + + if item_type == "planning": + event_id = get_first_related_event_id_for_planning(item, "primary") + if event_id: + item["event"] = events_service.find_one(req=None, _id=event_id) elif item_type == "event": - item["plannings"] = events_service.get_plannings_for_event(item) + item["plannings"] = get_related_planning_for_events([item["_id"]], "primary") item["coverages"] = [] for plan in item["plannings"]: item["coverages"].extend(plan.get("coverages") or []) diff --git a/server/planning/planning_locks.py b/server/planning/planning_locks.py index 2a0652d17..f37611402 100644 --- a/server/planning/planning_locks.py +++ b/server/planning/planning_locks.py @@ -8,8 +8,8 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license - from enum import Enum + from flask import request, json from eve.utils import ParsedRequest from eve.render import send_response @@ -17,6 +17,7 @@ from superdesk import Resource, get_resource_service, Blueprint, blueprint from superdesk.auth.decorator import blueprint_auth +from planning.utils import get_first_related_event_id_for_planning from planning.search.queries.elastic import ElasticQuery, field_exists @@ -43,7 +44,7 @@ class PlanningLockRepos(Enum): "_id", "type", "recurrence_id", - "event_item", + "related_events", "lock_time", "lock_action", "lock_user", @@ -98,10 +99,11 @@ def _get_planning_module_locks(): "action": item.get("lock_action"), "time": item.get("lock_time"), } + primary_event_id = get_first_related_event_id_for_planning(item, "primary") if item.get("recurrence_id"): locks["recurring"][lock["item_id"]] = lock - elif item.get("event_item"): - locks["event"][item["event_item"]] = lock + elif primary_event_id is not None: + locks["event"][primary_event_id] = lock else: locks[item["type"]][lock["item_id"]] = lock diff --git a/server/planning/prod_api/assignments/service.py b/server/planning/prod_api/assignments/service.py index 083ba37fc..d74250841 100644 --- a/server/planning/prod_api/assignments/service.py +++ b/server/planning/prod_api/assignments/service.py @@ -8,12 +8,14 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from typing import Union from eve.utils import config from superdesk import get_resource_service from superdesk.es_utils import get_docs from prod_api.service import ProdApiService +from planning.types import ArchiveItem, Assignment from .utils import ( construct_content_link, get_news_item_for_assignment, @@ -21,22 +23,19 @@ ) from planning.prod_api.common import excluded_lock_fields from planning.prod_api.planning.utils import construct_planning_link -from planning.prod_api.events.utils import construct_event_link +from planning.prod_api.events.utils import add_related_event_links class AssignmentsService(ProdApiService): excluded_fields = ProdApiService.excluded_fields | excluded_lock_fields - def _process_fetched_object(self, doc): + def _process_fetched_object(self, doc: Assignment): super()._process_fetched_object(doc) content_items = get_news_item_for_assignment(doc[config.ID_FIELD]) if doc.get(config.LINKS): doc[config.LINKS]["planning"] = construct_planning_link(doc["planning_item"]) - - planning = get_resource_service("planning").find_one(req=None, _id=doc["planning_item"]) - if planning.get("event_item"): - doc[config.LINKS]["event"] = construct_event_link(planning["event_item"]) + _add_related_event_links(doc, doc["planning_item"]) if content_items.count(): doc[config.LINKS]["content"] = [construct_content_link(item) for item in get_docs(content_items.hits)] @@ -47,18 +46,24 @@ def on_fetched_resource_archive(docs): on_fetched_item_archive(doc) -def on_fetched_item_archive(doc): - if doc.get("assignment_id"): - assignment = get_resource_service("assignments").find_one(req=None, _id=doc["assignment_id"]) - if assignment: - if doc.get(config.LINKS): - doc[config.LINKS].update( - { - "assignment": construct_assignment_link(assignment), - "planning": construct_planning_link(assignment["planning_item"]), - } - ) - - planning = get_resource_service("planning").find_one(req=None, _id=assignment["planning_item"]) - if planning.get("event_item"): - doc[config.LINKS]["event"] = construct_event_link(planning["event_item"]) +def on_fetched_item_archive(doc: ArchiveItem): + if not doc.get("assignment_id") or not doc.get(config.LINKS): + return + + assignment = get_resource_service("assignments").find_one(req=None, _id=doc["assignment_id"]) + if not assignment: + return + + doc[config.LINKS].update( + { + "assignment": construct_assignment_link(assignment), + "planning": construct_planning_link(assignment["planning_item"]), + } + ) + _add_related_event_links(doc, assignment["planning_item"]) + + +def _add_related_event_links(doc: Union[ArchiveItem, Assignment], planning_id: str): + planning = get_resource_service("planning").find_one(req=None, _id=planning_id) + if planning: + add_related_event_links(doc, planning) diff --git a/server/planning/prod_api/events/service.py b/server/planning/prod_api/events/service.py index 4f8cce4e7..c0c5f6568 100644 --- a/server/planning/prod_api/events/service.py +++ b/server/planning/prod_api/events/service.py @@ -10,7 +10,6 @@ from eve.utils import config -from superdesk import get_resource_service from prod_api.service import ProdApiService from planning.prod_api.common import excluded_lock_fields @@ -19,6 +18,7 @@ construct_assignment_links, ) from planning.prod_api.planning.utils import construct_planning_link +from planning.utils import get_related_planning_for_events class EventsService(ProdApiService): @@ -27,19 +27,19 @@ class EventsService(ProdApiService): def _process_fetched_object(self, doc): super()._process_fetched_object(doc) - planning_service = get_resource_service("planning") - plannings = list(planning_service.find(where={"event_item": doc.get("guid")})) + if not doc.get(config.LINKS): + return + plannings = get_related_planning_for_events([doc[config.ID_FIELD]], "primary") if len(plannings): assignment_ids = [] for plan in plannings: assignment_ids.extend(get_assignment_ids_from_planning(plan)) - if doc.get(config.LINKS): - doc[config.LINKS]["plannings"] = [construct_planning_link(item[config.ID_FIELD]) for item in plannings] + doc[config.LINKS]["plannings"] = [construct_planning_link(item[config.ID_FIELD]) for item in plannings] - if len(assignment_ids): - doc[config.LINKS]["assignments"] = construct_assignment_links(assignment_ids) + if len(assignment_ids): + doc[config.LINKS]["assignments"] = construct_assignment_links(assignment_ids) class EventsHistoryService(ProdApiService): diff --git a/server/planning/prod_api/events/utils.py b/server/planning/prod_api/events/utils.py index 23bafb8b0..d788f43a0 100644 --- a/server/planning/prod_api/events/utils.py +++ b/server/planning/prod_api/events/utils.py @@ -8,6 +8,12 @@ # AUTHORS and LICENSE files distributed with this source code, or # at https://www.sourcefabric.org/superdesk/license +from typing import Union + +from eve.utils import config + +from planning.types import ArchiveItem, Planning, Assignment +from planning.utils import get_related_event_links_for_planning from .resource import EventsResource @@ -16,3 +22,12 @@ def construct_event_link(event_id: str): "title": EventsResource.resource_title, "href": f"{EventsResource.url}/{event_id}", } + + +def add_related_event_links(item: Union[ArchiveItem, Assignment, Planning], planning: Planning): + for related_event in get_related_event_links_for_planning(planning): + event_link = construct_event_link(related_event["_id"]) + if related_event["link_type"] == "primary" and not item[config.LINKS]["event"]: + item[config.LINKS]["event"] = event_link + else: + item[config.LINKS].setdefault("related_events", []).append(event_link) diff --git a/server/planning/prod_api/planning/service.py b/server/planning/prod_api/planning/service.py index 2bc8cc817..95a8bd2e4 100644 --- a/server/planning/prod_api/planning/service.py +++ b/server/planning/prod_api/planning/service.py @@ -12,13 +12,14 @@ from prod_api.service import ProdApiService +from planning.types import Planning from planning.common import sync_assignment_details_to_coverages from planning.prod_api.common import excluded_lock_fields from planning.prod_api.assignments.utils import ( get_assignment_ids_from_planning, construct_assignment_links, ) -from planning.prod_api.events.utils import construct_event_link +from planning.prod_api.events.utils import add_related_event_links class PlanningService(ProdApiService): @@ -28,14 +29,12 @@ class PlanningService(ProdApiService): | excluded_lock_fields ) - def _process_fetched_object(self, doc): + def _process_fetched_object(self, doc: Planning): super()._process_fetched_object(doc) sync_assignment_details_to_coverages(doc) if doc.get(config.LINKS): - if doc.get("event_item"): - doc[config.LINKS]["event"] = construct_event_link(doc["event_item"]) - + add_related_event_links(doc, doc) assignment_ids = get_assignment_ids_from_planning(doc) if len(assignment_ids): doc[config.LINKS]["assignments"] = construct_assignment_links(assignment_ids) diff --git a/server/planning/search/eventsplanning_search.py b/server/planning/search/eventsplanning_search.py index 6abdd0019..a719ad230 100644 --- a/server/planning/search/eventsplanning_search.py +++ b/server/planning/search/eventsplanning_search.py @@ -168,7 +168,7 @@ def _get_events_and_planning(self, request, query, search_filter): "size": int((5 * max_results) * math.ceil(page / 3)), } ) - req.args["projections"] = json.dumps(["_id", "type", "event_item"]) + req.args["projections"] = json.dumps(["_id", "type", "related_events"]) req.page = page req.max_results = max_results req.exec_on_fetched_resource = False # don't call on_fetched_resource diff --git a/server/planning/search/queries/combined.py b/server/planning/search/queries/combined.py index d565d19a1..53458040c 100644 --- a/server/planning/search/queries/combined.py +++ b/server/planning/search/queries/combined.py @@ -1,5 +1,16 @@ +# -*- coding: utf-8; -*- +# +# This file is part of Superdesk. +# +# Copyright 2014 Sourcefabric z.u. and contributors. +# +# For the full copyright and license information, please see the +# AUTHORS and LICENSE files distributed with this source code, or +# at https://www.sourcefabric.org/superdesk/license + from typing import Dict, Any, List, Callable +from planning.utils import get_related_event_ids_for_planning from planning.search.queries import elastic, events, planning, common from flask import current_app as app @@ -10,16 +21,20 @@ def construct_combined_view_data_query( ids = set() for item in items: item_id = item.get("_id") - event_id = item.get("event_item") + event_ids = get_related_event_ids_for_planning(item, "primary") if common.strtobool(params.get("include_associated_planning", False)): ids.add(item_id) - if event_id: + for event_id in event_ids: ids.add(event_id) else: # Combined search prioritises Events over Planning items # therefore if the Planning item is linked to an Event # then we want to return that Event instead - ids.add(event_id or item_id) + if len(event_ids): + for event_id in event_ids: + ids.add(event_id) + else: + ids.add(item_id) query = elastic.ElasticQuery() diff --git a/server/planning/search/queries/common.py b/server/planning/search/queries/common.py index 85d190e59..ee3401ffd 100644 --- a/server/planning/search/queries/common.py +++ b/server/planning/search/queries/common.py @@ -9,9 +9,9 @@ # at https://www.sourcefabric.org/superdesk/license from typing import Dict, Any, Optional, List, Callable, Union - import logging from datetime import datetime + from eve.utils import str_to_date as _str_to_date, date_to_str from superdesk import get_resource_service @@ -21,6 +21,7 @@ from apps.auth import get_user_id +from planning.utils import get_related_event_ids_for_planning from planning.search.queries import elastic from planning.common import POST_STATE, WORKFLOW_STATE from planning.content_profiles.utils import get_multilingual_fields @@ -223,7 +224,7 @@ def add_field_exist_query(): ids = set() event_items = set() recurrence_ids = set() - locked_items = search_service.get_locked_items(projections=["_id", "type", "recurrence_id", "event_item"]) + locked_items = search_service.get_locked_items(projections=["_id", "type", "recurrence_id", "related_events"]) if not locked_items.count(): # If there are no locked items there is no need to perform logic @@ -233,15 +234,17 @@ def add_field_exist_query(): return for item in locked_items: + related_primary_events = get_related_event_ids_for_planning(item, "primary") if item.get("recurrence_id"): # This item is associated with a recurring series of events # Add `recurrence_id` to the query (common field to both events & planning) recurrence_ids.add(item["recurrence_id"]) - elif item.get("event_item"): + elif len(related_primary_events): # This is a Planning item associated with an event - # Add queries for `event_item` and `_id` with the ID of the Event - event_items.add(item["event_item"]) - ids.add(item["event_item"]) + # Add queries for ``related_events`` and `_id` with the ID of the Event + for related_event_id in related_primary_events: + event_items.add(related_event_id) + ids.add(related_event_id) else: # This item is locked, add query for it's ID ids.add(item["_id"]) @@ -257,7 +260,15 @@ def add_field_exist_query(): if len(event_items): # Add query for associated Planning items of a locked Event - terms.append(elastic.terms(field="event_item", values=list(event_items))) + terms.append( + elastic.bool_and( + [ + elastic.terms(field="related_events._id", values=list(event_items)), + elastic.term(field="related_events.link_type", value="primary"), + ], + "related_events", + ) + ) if len(recurrence_ids): # Add query for any Event or Planning in a locked recurring series of events diff --git a/server/planning/search/queries/planning.py b/server/planning/search/queries/planning.py index a17b22e37..245ecc1e2 100644 --- a/server/planning/search/queries/planning.py +++ b/server/planning/search/queries/planning.py @@ -53,7 +53,9 @@ def search_no_agenda_assigned(params: Dict[str, Any], query: elastic.ElasticQuer def search_ad_hoc_planning(params: Dict[str, Any], query: elastic.ElasticQuery): if strtobool(params.get("ad_hoc_planning", False)): - query.must_not.append(elastic.field_exists("event_item")) + query.must_not.append( + elastic.nested("related_events", elastic.term(field="related_events.link_type", value="primary")) + ) def search_exclude_rescheduled_and_cancelled(params: Dict[str, Any], query: elastic.ElasticQuery): @@ -145,12 +147,17 @@ def search_featured(params: Dict[str, Any], query: elastic.ElasticQuery): def search_by_events(params: Dict[str, Any], query: elastic.ElasticQuery): event_ids = [str(event_id) for event_id in str_to_array(params.get("event_item"))] - num_ids = len(event_ids) - if num_ids == 1: - query.must.append(elastic.term(field="event_item", value=event_ids[0])) - elif num_ids > 1: - query.must.append(elastic.terms(field="event_item", values=event_ids)) + if len(event_ids): + query.must.append( + elastic.bool_and( + [ + elastic.terms(field="related_events._id", values=event_ids), + elastic.term(field="related_events.link_type", value="primary"), + ], + "related_events", + ) + ) def search_date(params: Dict[str, Any], query: elastic.ElasticQuery): diff --git a/server/planning/tests/output_formatters/json_planning_test.py b/server/planning/tests/output_formatters/json_planning_test.py index 91f1eba98..31634d384 100644 --- a/server/planning/tests/output_formatters/json_planning_test.py +++ b/server/planning/tests/output_formatters/json_planning_test.py @@ -1,10 +1,23 @@ -from planning.tests import TestCase +# -*- coding: utf-8; -*- +# +# This file is part of Superdesk. +# +# Copyright 2014 Sourcefabric z.u. and contributors. +# +# For the full copyright and license information, please see the +# AUTHORS and LICENSE files distributed with this source code, or +# at https://www.sourcefabric.org/superdesk/license + +from copy import deepcopy from unittest import mock -from planning.output_formatters.json_planning import JsonPlanningFormatter + import json -from copy import deepcopy from bson.objectid import ObjectId +from planning.tests import TestCase +from planning.output_formatters.json_planning import JsonPlanningFormatter +from planning.types import PlanningRelatedEventLink + @mock.patch( "superdesk.publish.subscribers.SubscribersService.generate_sequence_number", @@ -61,7 +74,12 @@ class JsonPlanningTestCase(TestCase): "planning_date": "2018-04-09T14:00:53.000Z", "headline": "Name of the event", "agendas": [1], - "event_item": "urn:newsml:localhost:2018-04-10T11:05:55.664317:e1301640-80a2-4df9-b4d9-91bbb4af7946", + "related_events": [ + PlanningRelatedEventLink( + _id="event_prim_1", + link_type="primary", + ) + ], "place": [ { "group": "Rest Of World", @@ -129,10 +147,10 @@ class JsonPlanningTestCase(TestCase): } ] - def format(self): + def format(self, item=None): with self.app.app_context(): formatter = JsonPlanningFormatter() - output = formatter.format(self.item, {"name": "Test Subscriber"})[0] + output = formatter.format(item or self.item, {"name": "Test Subscriber"})[0] output_item = json.loads(output[1]) return output_item @@ -352,42 +370,64 @@ def test_expand_delivery_uses_ingest_id(self): self.assertEqual(deliveries[0]["item_id"], ingest_id) def test_assigned_desk_user(self): - with self.app.app_context(): - item = deepcopy(self.item) - desk_id = ObjectId() - user_id = ObjectId() + item = deepcopy(self.item) + desk_id = ObjectId() + user_id = ObjectId() - item["coverages"][0]["assigned_to"].update( - desk=desk_id, - user=user_id, - ) + item["coverages"][0]["assigned_to"].update( + desk=desk_id, + user=user_id, + ) + with self.app.app_context(): self.app.data.insert( "desks", [{"_id": desk_id, "name": "sports", "email": "sports@example.com"}], ) - self.app.data.insert("users", [{"_id": user_id, "display_name": "John Doe", "email": "john@example.com"}]) - formatter = JsonPlanningFormatter() - with mock.patch.dict(self.app.config, {"PLANNING_JSON_ASSIGNED_INFO_EXTENDED": True}): - output = formatter.format(item, {"name": "Test Subscriber"})[0] - output_item = json.loads(output[1]) - coverage = output_item["coverages"][0] - assert coverage["assigned_user"] == { - "first_name": None, - "last_name": None, - "display_name": "John Doe", - "email": "john@example.com", - } - assert coverage["assigned_desk"] == { - "name": "sports", - "email": "sports@example.com", - } + with mock.patch.dict(self.app.config, {"PLANNING_JSON_ASSIGNED_INFO_EXTENDED": True}): + output_item = self.format(item) + coverage = output_item["coverages"][0] + assert coverage["assigned_user"] == { + "first_name": None, + "last_name": None, + "display_name": "John Doe", + "email": "john@example.com", + } + assert coverage["assigned_desk"] == { + "name": "sports", + "email": "sports@example.com", + } - # without config - output = formatter.format(item, {"name": "Test Subscriber"})[0] - output_item = json.loads(output[1]) - coverage = output_item["coverages"][0] - assert "email" not in coverage["assigned_user"] - assert "email" not in coverage["assigned_desk"] + # without config + output_item = self.format(item) + coverage = output_item["coverages"][0] + assert "email" not in coverage["assigned_user"] + assert "email" not in coverage["assigned_desk"] + + def test_related_primary_event_copies_to_event_item(self): + item = deepcopy(self.item) + self.assertEqual(self.format(item)["event_item"], "event_prim_1") + + item["related_events"] = [ + PlanningRelatedEventLink( + _id="event_sec_1", + link_type="secondary", + ), + PlanningRelatedEventLink( + _id="event_prim_1", + link_type="primary", + ), + ] + self.assertEqual(self.format(item)["event_item"], "event_prim_1") + + item["related_events"] = [ + PlanningRelatedEventLink( + _id="event_sec_1", + link_type="secondary", + ) + ] + self.assertIsNone(self.format(item).get("event_item")) + item.pop("related_events") + self.assertIsNone(self.format(item).get("event_item")) diff --git a/server/planning/types/__init__.py b/server/planning/types/__init__.py index 627d16e76..a00215154 100644 --- a/server/planning/types/__init__.py +++ b/server/planning/types/__init__.py @@ -15,6 +15,7 @@ UPDATE_METHOD = Literal["single", "future", "all"] +PLANNING_RELATED_EVENT_LINK_TYPE = Literal["primary", "secondary"] class StringFieldTranslation(TypedDict): @@ -45,7 +46,8 @@ class EmbeddedPlanning(TypedDict, total=False): coverages: Dict[str, EmbeddedCoverageItem] -# TODO: Implement proper types for these next 3 +# TODO: Implement proper types for the following +ArchiveItem = Dict[str, Any] Event = Dict[str, Any] Planning = Dict[str, Any] Coverage = Dict[str, Any] @@ -65,3 +67,12 @@ class EventRelatedItem(TypedDict, total=False): pubstatus: str language: str word_count: int + + +class PlanningRelatedEventLinkBase(TypedDict): + _id: str + link_type: PLANNING_RELATED_EVENT_LINK_TYPE + + +class PlanningRelatedEventLink(PlanningRelatedEventLinkBase, total=False): + recurrence_id: str diff --git a/server/planning/utils.py b/server/planning/utils.py index 07679f0f2..b0d3bd4c5 100644 --- a/server/planning/utils.py +++ b/server/planning/utils.py @@ -1,13 +1,33 @@ +# -*- coding: utf-8; -*- +# +# This file is part of Superdesk. +# +# Copyright 2014 Sourcefabric z.u. and contributors. +# +# For the full copyright and license information, please see the +# AUTHORS and LICENSE files distributed with this source code, or +# at https://www.sourcefabric.org/superdesk/license + from typing import Union, List, Dict, Any, TypedDict, Optional +import logging +from datetime import datetime + from bson.objectid import ObjectId from bson.errors import InvalidId -from datetime import datetime +from flask import current_app as app, json from flask_babel import lazy_gettext -from eve.utils import str_to_date +from eve.utils import str_to_date, ParsedRequest, config import arrow -from flask import current_app as app import pytz +from superdesk import get_resource_service +from superdesk.json_utils import cast_item + +from planning.types import Event, Planning, PLANNING_RELATED_EVENT_LINK_TYPE, PlanningRelatedEventLink + + +logger = logging.getLogger(__name__) + class FormattedContact(TypedDict): name: str @@ -100,3 +120,96 @@ def get_event_formatted_dates(event: Dict[str, Any]) -> str: return "{} {}".format(time_short(start, tz), date_short(start, tz)) return "{} - {}, {}".format(time_short(start, tz), time_short(end, tz), date_short(start, tz)) + + +def get_related_planning_for_events( + event_ids: List[str], + link_type: Optional[PLANNING_RELATED_EVENT_LINK_TYPE] = None, + exclude_planning_ids: Optional[List[str]] = None, +) -> List[Planning]: + related_events_filters: List[Dict[str, Any]] = [{"terms": {"related_events._id": event_ids}}] + if link_type is not None: + related_events_filters.append({"term": {"related_events.link_type": link_type}}) + + bool_query: Dict[str, Any] = { + "filter": { + "nested": { + "path": "related_events", + "query": {"bool": {"filter": related_events_filters}}, + }, + } + } + + if len(exclude_planning_ids or []) > 0: + bool_query["must_not"] = {"terms": {"_id": exclude_planning_ids}} + + req = ParsedRequest() + req.args = {"source": json.dumps({"query": {"bool": bool_query}})} + + return [cast_item(item) for item in get_resource_service("planning").get(req=req, lookup=None)] + + +def event_has_planning_items(event_id: str, link_type: Optional[PLANNING_RELATED_EVENT_LINK_TYPE] = None) -> bool: + return len(get_related_planning_for_events([event_id], link_type)) > 0 + + +def get_related_event_links_for_planning( + plan: Planning, link_type: Optional[PLANNING_RELATED_EVENT_LINK_TYPE] = None +) -> List[PlanningRelatedEventLink]: + related_events: List[PlanningRelatedEventLink] = plan.get("related_events") or [] + return ( + related_events + if link_type is None + else [related_event for related_event in related_events if related_event["link_type"] == link_type] + ) + + +def get_related_event_ids_for_planning( + plan: Planning, link_type: Optional[PLANNING_RELATED_EVENT_LINK_TYPE] = None +) -> List[str]: + return [related_event["_id"] for related_event in get_related_event_links_for_planning(plan, link_type)] + + +def get_first_related_event_id_for_planning( + plan: Planning, link_type: Optional[PLANNING_RELATED_EVENT_LINK_TYPE] = None +) -> Optional[str]: + try: + return get_related_event_links_for_planning(plan, link_type)[0]["_id"] + except (KeyError, IndexError, TypeError): + return None + + +def get_related_event_items_for_planning( + plan: Planning, link_type: Optional[PLANNING_RELATED_EVENT_LINK_TYPE] = None +) -> List[Event]: + event_ids = get_related_event_ids_for_planning(plan, link_type) + if not len(event_ids): + return [] + + events = list(get_resource_service("events").find(where={"_id": {"$in": event_ids}})) + + if len(event_ids) != len(events): + logger.warning( + "Not all Events were found for the Planning item", + extra=dict( + plan_id=plan[config.ID_FIELD], + event_ids_requested=event_ids, + events_ids_found=[event[config.ID_FIELD] for event in events], + ), + ) + + return events + + +def get_first_event_item_for_planning_id( + planning_id: str, link_type: Optional[PLANNING_RELATED_EVENT_LINK_TYPE] = None +) -> Optional[Event]: + planning_item = get_resource_service("planning").find_one(req=None, _id=planning_id) + if not planning_item: + return None + + first_event_id = get_first_related_event_id_for_planning(planning_item, link_type) + if not first_event_id: + return None + + return get_resource_service("events").find_one(req=None, _id=first_event_id) diff --git a/server/tests/prod_api/conftest.py b/server/tests/prod_api/conftest.py index 1ed8f1223..7a9c70e5c 100644 --- a/server/tests/prod_api/conftest.py +++ b/server/tests/prod_api/conftest.py @@ -1 +1,14 @@ +from copy import deepcopy + from prod_api.conftest import * # noqa + +from planning.events.events_schema import events_schema +from planning.planning.planning_schema import planning_schema +from planning.prod_api.events import EventsResource +from planning.prod_api.planning import PlanningResource + + +# Copy schemas onto ProdAPI resources so elastic mapping is correct, otherwise certain queries will fail +# This will not happen in a production environment, as the index/types should already be created +EventsResource.schema = deepcopy(events_schema) +PlanningResource.schema = deepcopy(planning_schema)