Skip to content

Commit

Permalink
Merge branch 'develop' into authoring-react-post-broadcasting
Browse files Browse the repository at this point in the history
  • Loading branch information
petrjasek committed Feb 1, 2024
2 parents c4fa3cd + de8905c commit b75701f
Show file tree
Hide file tree
Showing 10 changed files with 253 additions and 145 deletions.
2 changes: 1 addition & 1 deletion e2e/server/core-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
gunicorn==19.7.1
honcho==1.0.1
git+https://github.com/superdesk/superdesk-core.git@develop#egg=superdesk-core
git+https://github.com/superdesk/superdesk-core.git@release/2.7#egg=superdesk-core
314 changes: 177 additions & 137 deletions package-lock.json

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,6 @@
"moment": "^2.29.4",
"moment-timezone": "^0.5.41",
"react": "^16.9.0",
"superdesk-ui-framework": "^3.0.59"
"superdesk-ui-framework": "^3.0.66"
}
}
1 change: 1 addition & 0 deletions server/features/events_postpone.feature
Original file line number Diff line number Diff line change
Expand Up @@ -499,6 +499,7 @@ Feature: Events Postpone

@auth
@notification
@planning_cvs
Scenario: Published event gets updated after postpone
Given we have sessions "/sessions"
Given "desks"
Expand Down
3 changes: 1 addition & 2 deletions server/features/planning_duplicate.feature
Original file line number Diff line number Diff line change
Expand Up @@ -357,8 +357,7 @@ Feature: Duplicate Planning
},
"assigned_to": {
"desk": "#desks._id#",
"user": "#CONTEXT_USER_ID#",
"assignment_id": "aaaaaaaaaaaaaaaaaaaaaaaa"
"user": "#CONTEXT_USER_ID#"
}
}],
"expired": true
Expand Down
18 changes: 18 additions & 0 deletions server/planning/output_formatters/json_planning.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from flask import current_app as app
from superdesk.publish.formatters import Formatter
import superdesk
from apps.archive.common import ARCHIVE
import json
from superdesk.utils import json_serialize_datetime_objectId
from copy import deepcopy
Expand Down Expand Up @@ -171,11 +172,28 @@ def _expand_delivery(self, coverage):
)
deliveries = list(delivery_service.get(req=None, lookup={"coverage_id": coverage.get("coverage_id")}))

# Get the associated article(s) linked to the coverage(s)
query = {"$and": [{"_id": {"$in": [item["item_id"] for item in deliveries]}}]}
articles = {item["_id"]: item for item in get_resource_service(ARCHIVE).get_from_mongo(req=None, lookup=query)}

# Check to see if in this delivery chain, whether the item has been published at least once
item_never_published = True
for delivery in deliveries:
for f in remove_fields:
delivery.pop(f, None)

# TODO: This is a hack, need to find a better way of doing this
# If the linked article was auto-published, then use the ``ingest_id`` for the article ID
# This is required when the article was published using the ``NewsroomNinjsFormatter``
# Otherwise this coverage in Newshub would point to a non-existing wire item
article = articles.get(delivery["item_id"])
if (
article is not None
and article.get("ingest_id")
and (article.get("auto_publish") or (article.get("extra") or {}).get("publish_ingest_id_as_guid"))
):
delivery["item_id"] = article["ingest_id"]

if delivery.get("item_state") == CONTENT_STATE.PUBLISHED:
item_never_published = False

Expand Down
22 changes: 19 additions & 3 deletions server/planning/planning/planning.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
from superdesk import Resource
from lxml import etree
from io import BytesIO
from planning.signals import planning_created
from planning.signals import planning_created, planning_ingested

logger = logging.getLogger(__name__)

Expand All @@ -85,6 +85,8 @@ def post_in_mongo(self, docs, **kwargs):
resolve_document_etag(docs, self.datasource)
ids = self.backend.create_in_mongo(self.datasource, docs, **kwargs)
self.on_created(docs)
for doc in docs:
planning_ingested.send(self, item=doc)
return ids

def patch_in_mongo(self, id, document, original):
Expand All @@ -93,6 +95,7 @@ def patch_in_mongo(self, id, document, original):
update_ingest_on_patch(document, original)
response = self.backend.update_in_mongo(self.datasource, id, document, original)
self.on_updated(document, original, from_ingest=True)
planning_ingested.send(self, item=document, original=original)
return response

def is_new_version(self, new_item, old_item):
Expand Down Expand Up @@ -463,6 +466,9 @@ def get_all_items_in_relationship(self, item):
return all_items

def remove_coverages(self, updates, original):
if "coverages" not in updates:
return

for coverage in (original or {}).get("coverages") or []:
updated_coverage = next(
(
Expand Down Expand Up @@ -523,14 +529,21 @@ def remove_coverage_entity(self, coverage_entity, original_planning, entity_type
self._create_update_assignment(original_planning, {}, updated_coverage_entity, coverage_entity)

def add_coverages(self, updates, original):
if "coverages" not in updates:
return

planning_date = original.get("planning_date") or updates.get("planning_date")
original_coverage_ids = [
coverage["coverage_id"] for coverage in original.get("coverages") or [] if coverage.get("coverage_id")
]
for coverage in updates.get("coverages") or []:
coverage_id = coverage.get("coverage_id", "")
if not coverage_id or TEMP_ID_PREFIX in coverage_id:
if not coverage_id or TEMP_ID_PREFIX in coverage_id or coverage_id not in original_coverage_ids:
if "duplicate" in coverage_id:
self.duplicate_xmp_file(coverage)
# coverage to be created
coverage["coverage_id"] = generate_guid(type=GUID_NEWSML)
if not coverage_id or TEMP_ID_PREFIX in coverage_id:
coverage["coverage_id"] = generate_guid(type=GUID_NEWSML)
coverage["firstcreated"] = utcnow()

# Make sure the coverage has a ``scheduled`` date
Expand Down Expand Up @@ -600,6 +613,9 @@ def update_scheduled_updates(self, updates, original, coverage, original_coverag
self._create_update_assignment(original, updates, s, original_scheduled_update, coverage)

def update_coverages(self, updates, original):
if "coverages" not in updates:
return

for coverage in updates.get("coverages") or []:
coverage_id = coverage.get("coverage_id")
original_coverage = next(
Expand Down
2 changes: 2 additions & 0 deletions server/planning/signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,10 @@

__all__ = [
"planning_created",
"planning_ingested",
]

signals = blinker.Namespace()

planning_created = signals.signal("planning:created")
planning_ingested = signals.signal("planning:ingested")
32 changes: 32 additions & 0 deletions server/planning/tests/output_formatters/json_planning_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,38 @@ def test_matching_product_ids(self):
output_item = json.loads(output[1])
self.assertEqual(output_item["products"], [{"code": "prod-type-planning", "name": "planning-only"}])

def test_expand_delivery_uses_ingest_id(self):
self.app.data.insert("assignments", self.assignment)
self.app.data.insert("delivery", self.delivery)
formatter = JsonPlanningFormatter()
item_id = self.delivery[0]["item_id"]
ingest_id = "urn:newsml:localhost:2024-01-24-ingest-1"
article = {
"_id": item_id,
"type": "text",
"headline": "test headline",
"slugline": "test slugline",
"ingest_id": ingest_id,
}

self.app.data.insert("archive", [article])
deliveries, _ = formatter._expand_delivery(deepcopy(self.item["coverages"][0]))
self.assertNotEqual(deliveries[0]["item_id"], ingest_id)

article = self.app.data.find_one("archive", req=None, _id=item_id)
self.app.data.update("archive", item_id, {"auto_publish": True}, article)
deliveries, _ = formatter._expand_delivery(deepcopy(self.item["coverages"][0]))
self.assertEqual(deliveries[0]["item_id"], ingest_id)

article = self.app.data.find_one("archive", req=None, _id=item_id)
updates = {
"auto_publish": None,
"extra": {"publish_ingest_id_as_guid": True},
}
self.app.data.update("archive", item_id, updates, article)
deliveries, _ = formatter._expand_delivery(deepcopy(self.item["coverages"][0]))
self.assertEqual(deliveries[0]["item_id"], ingest_id)

def test_assigned_desk_user(self):
with self.app.app_context():
item = deepcopy(self.item)
Expand Down
2 changes: 1 addition & 1 deletion server/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,4 @@ pytest-env

-e .
# Install in editable state so we get feature fixtures
-e git+https://github.com/superdesk/superdesk-core.git@develop#egg=superdesk-core
-e git+https://github.com/superdesk/superdesk-core.git@release/2.7#egg=superdesk-core

0 comments on commit b75701f

Please sign in to comment.