Query refactoring (#102)
* Rename query modules. * storage: save_publication_report: Create publications. * Remove placeholder PublicationStatus.UNSAVED * Minor fixes.
This commit is contained in:
parent
ed8f752fe6
commit
1efa191771
|
@ -6,7 +6,7 @@ from arrow import Arrow
|
|||
from mobilizon_reshare.event.event import EventPublicationStatus
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.event.event_selection_strategies import select_unpublished_events
|
||||
from mobilizon_reshare.storage.query.read_query import (
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
get_published_events,
|
||||
events_with_status,
|
||||
get_all_events,
|
||||
|
|
|
@ -15,7 +15,7 @@ from mobilizon_reshare.publishers.platforms.platform_mapping import (
|
|||
get_publisher_class,
|
||||
get_formatter_class,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.read_query import events_with_status
|
||||
from mobilizon_reshare.storage.query.read import events_with_status
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -33,7 +33,7 @@ async def recap() -> Optional[BaseCoordinatorReport]:
|
|||
events_to_recap = await select_events_to_recap()
|
||||
|
||||
if events_to_recap:
|
||||
logger.debug(f"Found {len(events_to_recap)} events to recap.")
|
||||
logger.info(f"Found {len(events_to_recap)} events to recap.")
|
||||
recap_publications = [
|
||||
RecapPublication(
|
||||
get_publisher_class(publisher)(),
|
||||
|
@ -49,4 +49,4 @@ async def recap() -> Optional[BaseCoordinatorReport]:
|
|||
PublicationFailureNotifiersCoordinator(report).notify_failure()
|
||||
return reports
|
||||
else:
|
||||
logger.debug("Found no events")
|
||||
logger.info("Found no events")
|
||||
|
|
|
@ -2,16 +2,15 @@ import logging.config
|
|||
|
||||
from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish
|
||||
from mobilizon_reshare.mobilizon.events import get_unpublished_events
|
||||
from mobilizon_reshare.publishers.abstract import EventPublication
|
||||
from mobilizon_reshare.publishers.coordinator import (
|
||||
PublicationFailureNotifiersCoordinator,
|
||||
)
|
||||
from mobilizon_reshare.publishers.coordinator import PublisherCoordinator
|
||||
from mobilizon_reshare.storage.query.model_creation import (
|
||||
create_event_publication_models,
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
get_published_events,
|
||||
build_publications,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.read_query import get_published_events
|
||||
from mobilizon_reshare.storage.query.save_query import (
|
||||
from mobilizon_reshare.storage.query.write import (
|
||||
create_unpublished_events,
|
||||
save_publication_report,
|
||||
)
|
||||
|
@ -46,11 +45,10 @@ async def start():
|
|||
if event:
|
||||
logger.info(f"Event to publish found: {event.name}")
|
||||
|
||||
models = await create_event_publication_models(event)
|
||||
publications = list(EventPublication.from_orm(m, event) for m in models)
|
||||
publications = await build_publications(event)
|
||||
reports = PublisherCoordinator(publications).run()
|
||||
|
||||
await save_publication_report(reports, models)
|
||||
await save_publication_report(reports)
|
||||
for report in reports.reports:
|
||||
if not report.succesful:
|
||||
PublicationFailureNotifiersCoordinator(report,).notify_failure()
|
||||
|
|
|
@ -3,7 +3,6 @@ from tortoise.models import Model
|
|||
|
||||
from mobilizon_reshare.models.publication import PublicationStatus, Publication
|
||||
from mobilizon_reshare.models.publisher import Publisher
|
||||
from mobilizon_reshare.publishers import get_active_publishers
|
||||
|
||||
|
||||
class Event(Model):
|
||||
|
@ -31,19 +30,8 @@ class Event(Model):
|
|||
class Meta:
|
||||
table = "event"
|
||||
|
||||
async def build_unsaved_publication_models(self):
|
||||
result = []
|
||||
publishers = get_active_publishers()
|
||||
for publisher in publishers:
|
||||
result.append(
|
||||
await self.build_publication_by_publisher_name(
|
||||
publisher, PublicationStatus.UNSAVED
|
||||
)
|
||||
)
|
||||
return result
|
||||
|
||||
async def build_publication_by_publisher_name(
|
||||
self, publisher_name: str, status: PublicationStatus
|
||||
self, publisher_name: str, status: PublicationStatus = PublicationStatus.FAILED
|
||||
) -> Publication:
|
||||
publisher = await Publisher.filter(name=publisher_name).first()
|
||||
return Publication(
|
||||
|
|
|
@ -5,9 +5,8 @@ from tortoise.models import Model
|
|||
|
||||
|
||||
class PublicationStatus(IntEnum):
|
||||
UNSAVED = 0
|
||||
FAILED = 1
|
||||
COMPLETED = 2
|
||||
FAILED = 0
|
||||
COMPLETED = 1
|
||||
|
||||
|
||||
class Publication(Model):
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
import asyncio
|
||||
import atexit
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from tortoise import Tortoise
|
||||
|
||||
from mobilizon_reshare.config.publishers import publisher_names
|
||||
from mobilizon_reshare.storage.query.save_query import update_publishers
|
||||
from mobilizon_reshare.storage.query.write import update_publishers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
from tortoise.transactions import atomic
|
||||
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.models.event import Event
|
||||
from mobilizon_reshare.models.publication import Publication
|
||||
from mobilizon_reshare.storage.query import CONNECTION_NAME
|
||||
from mobilizon_reshare.storage.query.read_query import prefetch_event_relations
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def create_event_publication_models(event: MobilizonEvent) -> list[Publication]:
|
||||
return await (
|
||||
await prefetch_event_relations(Event.filter(mobilizon_id=event.mobilizon_id))
|
||||
)[0].build_unsaved_publication_models()
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Iterable, Optional, Dict, List
|
||||
from typing import Iterable, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from arrow import Arrow
|
||||
|
@ -8,18 +8,11 @@ from tortoise.transactions import atomic
|
|||
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
|
||||
from mobilizon_reshare.models.event import Event
|
||||
from mobilizon_reshare.models.publication import Publication, PublicationStatus
|
||||
from mobilizon_reshare.publishers import get_active_publishers
|
||||
from mobilizon_reshare.publishers.abstract import EventPublication
|
||||
from mobilizon_reshare.storage.query import CONNECTION_NAME
|
||||
|
||||
|
||||
async def get_mobilizon_event_publications(
|
||||
event: MobilizonEvent,
|
||||
) -> Iterable[Publication]:
|
||||
models = await prefetch_event_relations(
|
||||
Event.filter(mobilizon_id=event.mobilizon_id)
|
||||
)
|
||||
return models[0].publications
|
||||
|
||||
|
||||
async def get_published_events(
|
||||
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None
|
||||
) -> Iterable[MobilizonEvent]:
|
||||
|
@ -100,7 +93,7 @@ async def publications_with_status(
|
|||
event_mobilizon_id: Optional[UUID] = None,
|
||||
from_date: Optional[Arrow] = None,
|
||||
to_date: Optional[Arrow] = None,
|
||||
) -> Dict[UUID, Publication]:
|
||||
) -> dict[UUID, Publication]:
|
||||
query = Publication.filter(status=status)
|
||||
|
||||
if event_mobilizon_id:
|
||||
|
@ -118,7 +111,7 @@ async def publications_with_status(
|
|||
|
||||
async def events_without_publications(
|
||||
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
|
||||
) -> List[MobilizonEvent]:
|
||||
) -> list[MobilizonEvent]:
|
||||
query = Event.filter(publications__id=None)
|
||||
events = await prefetch_event_relations(
|
||||
_add_date_window(query, "begin_datetime", from_date, to_date)
|
||||
|
@ -126,7 +119,7 @@ async def events_without_publications(
|
|||
return list(map(MobilizonEvent.from_model, events))
|
||||
|
||||
|
||||
def _remove_duplicated_events(events: List[MobilizonEvent]):
|
||||
def _remove_duplicated_events(events: list[MobilizonEvent]) -> list[MobilizonEvent]:
|
||||
"""Remove duplicates based on mobilizon_id"""
|
||||
result = []
|
||||
seen_ids = set()
|
||||
|
@ -139,7 +132,7 @@ def _remove_duplicated_events(events: List[MobilizonEvent]):
|
|||
|
||||
async def get_unpublished_events(
|
||||
unpublished_mobilizon_events: Iterable[MobilizonEvent],
|
||||
) -> List[MobilizonEvent]:
|
||||
) -> list[MobilizonEvent]:
|
||||
"""
|
||||
Returns all the unpublished events, removing duplicates that are present both in the DB and in the mobilizon query
|
||||
"""
|
||||
|
@ -148,3 +141,13 @@ async def get_unpublished_events(
|
|||
db_unpublished_events
|
||||
)
|
||||
return _remove_duplicated_events(all_unpublished_events)
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def build_publications(event: MobilizonEvent) -> list[EventPublication]:
|
||||
event_model = await Event.filter(mobilizon_id=event.mobilizon_id).first()
|
||||
models = [
|
||||
await event_model.build_publication_by_publisher_name(name)
|
||||
for name in get_active_publishers()
|
||||
]
|
||||
return list(EventPublication.from_orm(m, event) for m in models)
|
|
@ -1,53 +0,0 @@
|
|||
import logging
|
||||
from typing import List, Iterable, Optional
|
||||
|
||||
import arrow
|
||||
from tortoise.transactions import atomic
|
||||
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.models.publication import Publication
|
||||
from mobilizon_reshare.models.publisher import Publisher
|
||||
from mobilizon_reshare.publishers.coordinator import PublisherCoordinatorReport
|
||||
from mobilizon_reshare.storage.query import CONNECTION_NAME
|
||||
from mobilizon_reshare.storage.query.read_query import get_unpublished_events
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def save_publication_report(
|
||||
coordinator_report: PublisherCoordinatorReport,
|
||||
publication_models: List[Publication],
|
||||
) -> None:
|
||||
publication_models = {m.id: m for m in publication_models}
|
||||
for publication_report in coordinator_report.reports:
|
||||
publication_id = publication_report.publication.id
|
||||
publication_models[publication_id].status = publication_report.status
|
||||
publication_models[publication_id].reason = publication_report.reason
|
||||
publication_models[publication_id].timestamp = arrow.now().datetime
|
||||
|
||||
await publication_models[publication_id].save()
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def create_unpublished_events(
|
||||
unpublished_mobilizon_events: Iterable[MobilizonEvent],
|
||||
) -> List[MobilizonEvent]:
|
||||
# We store only new events, i.e. events whose mobilizon_id wasn't found in the DB.
|
||||
|
||||
unpublished_events = await get_unpublished_events(unpublished_mobilizon_events)
|
||||
for event in unpublished_events:
|
||||
await event.to_model().save()
|
||||
|
||||
return unpublished_events
|
||||
|
||||
|
||||
async def create_publisher(name: str, account_ref: Optional[str] = None) -> None:
|
||||
await Publisher.create(name=name, account_ref=account_ref)
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def update_publishers(names: Iterable[str],) -> None:
|
||||
names = set(names)
|
||||
known_publisher_names = set(p.name for p in await Publisher.all())
|
||||
for name in names.difference(known_publisher_names):
|
||||
logging.info(f"Creating {name} publisher")
|
||||
await create_publisher(name)
|
|
@ -0,0 +1,79 @@
|
|||
import logging
|
||||
from typing import Iterable, Optional
|
||||
|
||||
import arrow
|
||||
from tortoise.transactions import atomic
|
||||
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.models.event import Event
|
||||
from mobilizon_reshare.models.publication import Publication
|
||||
from mobilizon_reshare.models.publisher import Publisher
|
||||
from mobilizon_reshare.publishers.coordinator import PublisherCoordinatorReport
|
||||
from mobilizon_reshare.storage.query import CONNECTION_NAME
|
||||
from mobilizon_reshare.storage.query.read import events_without_publications
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def save_publication_report(
|
||||
coordinator_report: PublisherCoordinatorReport,
|
||||
) -> None:
|
||||
"""
|
||||
Store a publication process outcome
|
||||
"""
|
||||
for publication_report in coordinator_report.reports:
|
||||
event = await Event.filter(
|
||||
mobilizon_id=publication_report.publication.event.mobilizon_id
|
||||
).first()
|
||||
publisher = await Publisher.filter(
|
||||
name=publication_report.publication.publisher.name
|
||||
).first()
|
||||
await Publication.create(
|
||||
id=publication_report.publication.id,
|
||||
event_id=event.id,
|
||||
publisher_id=publisher.id,
|
||||
status=publication_report.status,
|
||||
reason=publication_report.reason,
|
||||
timestamp=arrow.now().datetime,
|
||||
)
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def create_unpublished_events(
|
||||
events_from_mobilizon: Iterable[MobilizonEvent],
|
||||
) -> list[MobilizonEvent]:
|
||||
"""
|
||||
Compute the difference between remote and local events and store it.
|
||||
|
||||
Returns the unpublished events merged state.
|
||||
"""
|
||||
# We store only new events, i.e. events whose mobilizon_id wasn't found in the DB.
|
||||
unpublished_events = await events_without_publications()
|
||||
known_event_mobilizon_ids = set(
|
||||
map(lambda event: event.mobilizon_id, unpublished_events)
|
||||
)
|
||||
new_unpublished_events = list(
|
||||
filter(
|
||||
lambda event: event.mobilizon_id not in known_event_mobilizon_ids,
|
||||
events_from_mobilizon,
|
||||
)
|
||||
)
|
||||
|
||||
for event in new_unpublished_events:
|
||||
await event.to_model().save()
|
||||
|
||||
return await events_without_publications()
|
||||
|
||||
|
||||
async def create_publisher(name: str, account_ref: Optional[str] = None) -> None:
|
||||
await Publisher.create(name=name, account_ref=account_ref)
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def update_publishers(
|
||||
names: Iterable[str],
|
||||
) -> None:
|
||||
names = set(names)
|
||||
known_publisher_names = set(p.name for p in await Publisher.all())
|
||||
for name in names.difference(known_publisher_names):
|
||||
logging.info(f"Creating {name} publisher")
|
||||
await create_publisher(name)
|
|
@ -0,0 +1,10 @@
|
|||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
today = datetime(
|
||||
year=2021,
|
||||
month=6,
|
||||
day=6,
|
||||
hour=5,
|
||||
minute=0,
|
||||
tzinfo=timezone(timedelta(hours=2)),
|
||||
)
|
|
@ -1,13 +1,15 @@
|
|||
import uuid
|
||||
|
||||
import arrow
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
import mobilizon_reshare.publishers
|
||||
from mobilizon_reshare.models import event
|
||||
import mobilizon_reshare.storage.query.read
|
||||
from mobilizon_reshare.models.publisher import Publisher
|
||||
import mobilizon_reshare.main.recap
|
||||
from mobilizon_reshare.publishers import coordinator
|
||||
from tests import today
|
||||
|
||||
|
||||
def simple_event_element():
|
||||
|
@ -30,16 +32,24 @@ def mobilizon_answer(elements):
|
|||
return {"data": {"group": {"organizedEvents": {"elements": elements}}}}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mock_now(monkeypatch):
|
||||
def _mock_now():
|
||||
return arrow.get(today)
|
||||
|
||||
monkeypatch.setattr(mobilizon_reshare.main.recap, "now", _mock_now)
|
||||
|
||||
return arrow.get(today)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mock_publisher_config(monkeypatch, publisher_class, mock_formatter_class):
|
||||
p = Publisher(name="test")
|
||||
# FIXME: This is subtly bound to the name field of publisher_class
|
||||
p = Publisher(name="mock")
|
||||
await p.save()
|
||||
|
||||
p2 = Publisher(name="test2")
|
||||
await p2.save()
|
||||
|
||||
def _mock_active_pub():
|
||||
return ["test", "test2"]
|
||||
return ["mock"]
|
||||
|
||||
def _mock_pub_class(name):
|
||||
return publisher_class
|
||||
|
@ -47,7 +57,6 @@ async def mock_publisher_config(monkeypatch, publisher_class, mock_formatter_cla
|
|||
def _mock_format_class(name):
|
||||
return mock_formatter_class
|
||||
|
||||
monkeypatch.setattr(event, "get_active_publishers", _mock_active_pub)
|
||||
monkeypatch.setattr(
|
||||
mobilizon_reshare.publishers.platforms.platform_mapping,
|
||||
"get_publisher_class",
|
||||
|
@ -59,14 +68,22 @@ async def mock_publisher_config(monkeypatch, publisher_class, mock_formatter_cla
|
|||
_mock_format_class,
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
mobilizon_reshare.storage.query.read, "get_active_publishers", _mock_active_pub
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
mobilizon_reshare.main.recap, "get_active_publishers", _mock_active_pub
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
mobilizon_reshare.main.recap, "get_publisher_class", _mock_pub_class,
|
||||
mobilizon_reshare.main.recap,
|
||||
"get_publisher_class",
|
||||
_mock_pub_class,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
mobilizon_reshare.main.recap, "get_formatter_class", _mock_format_class,
|
||||
mobilizon_reshare.main.recap,
|
||||
"get_formatter_class",
|
||||
_mock_format_class,
|
||||
)
|
||||
return p
|
||||
|
||||
|
@ -83,7 +100,9 @@ async def mock_notifier_config(monkeypatch, publisher_class, mock_formatter_clas
|
|||
return mock_formatter_class
|
||||
|
||||
monkeypatch.setattr(
|
||||
coordinator, "get_notifier_class", _mock_notifier_class,
|
||||
coordinator,
|
||||
"get_notifier_class",
|
||||
_mock_notifier_class,
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
mobilizon_reshare.publishers.platforms.platform_mapping,
|
||||
|
|
|
@ -1,14 +1,20 @@
|
|||
from logging import DEBUG
|
||||
from uuid import UUID
|
||||
|
||||
import arrow
|
||||
import pytest
|
||||
|
||||
from mobilizon_reshare.main.recap import recap
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.storage.query.model_creation import (
|
||||
create_event_publication_models,
|
||||
)
|
||||
|
||||
spec = {
|
||||
# We need three events since recap will print only
|
||||
# future events, but the 0th event happens at today + 0.
|
||||
"event": 3,
|
||||
"publications": [
|
||||
{"event_idx": 1, "publisher_idx": 0, "status": PublicationStatus.COMPLETED},
|
||||
{"event_idx": 2, "publisher_idx": 0, "status": PublicationStatus.COMPLETED},
|
||||
],
|
||||
"publisher": ["zulip"],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -16,19 +22,9 @@ from mobilizon_reshare.storage.query.model_creation import (
|
|||
)
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_event_from_db(
|
||||
caplog, mock_publisher_config, message_collector, event_generator,
|
||||
caplog, mock_publisher_config, mock_now, message_collector, generate_models
|
||||
):
|
||||
for i in range(2):
|
||||
event = event_generator(
|
||||
mobilizon_id=UUID(int=i), begin_date=arrow.now().shift(days=2)
|
||||
)
|
||||
event_model = event.to_model()
|
||||
await event_model.save()
|
||||
|
||||
publications = await create_event_publication_models(event_model)
|
||||
for p in publications:
|
||||
p.status = PublicationStatus.COMPLETED
|
||||
await p.save()
|
||||
await generate_models(spec)
|
||||
|
||||
with caplog.at_level(DEBUG):
|
||||
# calling the recap command
|
||||
|
@ -39,7 +35,7 @@ async def test_start_event_from_db(
|
|||
|
||||
recap_message = """Upcoming
|
||||
|
||||
test event
|
||||
event_1
|
||||
|
||||
test event"""
|
||||
assert message_collector == [recap_message] * 2 # two publishers * 1 recap
|
||||
event_2"""
|
||||
assert message_collector == [recap_message]
|
||||
|
|
|
@ -11,7 +11,8 @@ from mobilizon_reshare.models.publication import PublicationStatus
|
|||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"elements", [[]],
|
||||
"elements",
|
||||
[[]],
|
||||
)
|
||||
async def test_start_no_event(
|
||||
mock_mobilizon_success_answer, mobilizon_answer, caplog, elements
|
||||
|
@ -47,7 +48,6 @@ async def test_start_new_event(
|
|||
assert "Event to publish found" in caplog.text
|
||||
assert message_collector == [
|
||||
"test event|Some description",
|
||||
"test event|Some description",
|
||||
]
|
||||
|
||||
all_events = (
|
||||
|
@ -63,7 +63,7 @@ async def test_start_new_event(
|
|||
|
||||
# it should create a publication for each publisher
|
||||
publications = all_events[0].publications
|
||||
assert len(publications) == 2, publications
|
||||
assert len(publications) == 1, publications
|
||||
|
||||
# all the other events should have no publication
|
||||
for e in all_events[1:]:
|
||||
|
@ -85,7 +85,8 @@ async def test_start_new_event(
|
|||
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"elements", [[]],
|
||||
"elements",
|
||||
[[]],
|
||||
)
|
||||
@pytest.mark.parametrize("publication_window", [(0, 24)])
|
||||
async def test_start_event_from_db(
|
||||
|
@ -109,16 +110,15 @@ async def test_start_event_from_db(
|
|||
assert "Event to publish found" in caplog.text
|
||||
assert message_collector == [
|
||||
"test event|description of the event",
|
||||
"test event|description of the event",
|
||||
]
|
||||
|
||||
await event_model.fetch_related("publications", "publications__publisher")
|
||||
# it should create a publication for each publisher
|
||||
publications = event_model.publications
|
||||
assert len(publications) == 2, publications
|
||||
assert len(publications) == 1, publications
|
||||
|
||||
# all the publications for the first event should be saved as COMPLETED
|
||||
for p in publications[1:]:
|
||||
for p in publications:
|
||||
assert p.status == PublicationStatus.COMPLETED
|
||||
|
||||
# the derived status for the event should be COMPLETED
|
||||
|
@ -133,7 +133,8 @@ async def test_start_event_from_db(
|
|||
"publisher_class", [pytest.lazy_fixture("mock_publisher_invalid_class")]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"elements", [[]],
|
||||
"elements",
|
||||
[[]],
|
||||
)
|
||||
@pytest.mark.parametrize("publication_window", [(0, 24)])
|
||||
async def test_start_publisher_failure(
|
||||
|
@ -159,7 +160,7 @@ async def test_start_publisher_failure(
|
|||
await event_model.fetch_related("publications", "publications__publisher")
|
||||
# it should create a publication for each publisher
|
||||
publications = event_model.publications
|
||||
assert len(publications) == 2, publications
|
||||
assert len(publications) == 1, publications
|
||||
|
||||
# all the publications for event should be saved as FAILED
|
||||
for p in publications:
|
||||
|
@ -168,7 +169,7 @@ async def test_start_publisher_failure(
|
|||
|
||||
assert "Event to publish found" in caplog.text
|
||||
assert message_collector == [
|
||||
f"Publication {p.id} failed with status: 1."
|
||||
f"Publication {p.id} failed with status: 0."
|
||||
f"\nReason: credentials error\nPublisher: mock"
|
||||
for p in publications
|
||||
for _ in range(2)
|
||||
|
|
|
@ -2,6 +2,7 @@ import importlib.resources
|
|||
import os
|
||||
from collections import UserList
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Union
|
||||
from uuid import UUID
|
||||
|
||||
import arrow
|
||||
|
@ -21,6 +22,7 @@ from mobilizon_reshare.publishers.abstract import (
|
|||
AbstractEventFormatter,
|
||||
)
|
||||
from mobilizon_reshare.publishers.exceptions import PublisherError, InvalidResponse
|
||||
from tests import today
|
||||
|
||||
|
||||
def generate_publication_status(published):
|
||||
|
@ -192,6 +194,64 @@ def notification_model_generator():
|
|||
return _notification_model_generator
|
||||
|
||||
|
||||
async def _generate_publishers(specification):
|
||||
|
||||
publishers = []
|
||||
for i, publisher_name in enumerate(specification["publisher"]):
|
||||
publisher = Publisher(
|
||||
id=UUID(int=i), name=publisher_name, account_ref=f"account_ref_{i}"
|
||||
)
|
||||
publishers.append(publisher)
|
||||
await publisher.save()
|
||||
|
||||
return publishers
|
||||
|
||||
|
||||
async def _generate_events(specification):
|
||||
events = []
|
||||
if "event" in specification.keys():
|
||||
for i in range(specification["event"]):
|
||||
begin_date = today + timedelta(days=i)
|
||||
event = Event(
|
||||
id=UUID(int=i),
|
||||
name=f"event_{i}",
|
||||
description=f"desc_{i}",
|
||||
mobilizon_id=UUID(int=i),
|
||||
mobilizon_link=f"moblink_{i}",
|
||||
thumbnail_link=f"thumblink_{i}",
|
||||
location=f"loc_{i}",
|
||||
begin_datetime=begin_date,
|
||||
end_datetime=begin_date + timedelta(hours=2),
|
||||
)
|
||||
events.append(event)
|
||||
await event.save()
|
||||
return events
|
||||
|
||||
|
||||
async def _generate_publications(events, publishers, specification):
|
||||
if "publications" in specification.keys():
|
||||
for i, publication in enumerate(specification["publications"]):
|
||||
status = publication.get("status", PublicationStatus.COMPLETED)
|
||||
timestamp = publication.get("timestamp", today + timedelta(hours=i))
|
||||
await Publication.create(
|
||||
id=UUID(int=i),
|
||||
status=status,
|
||||
timestamp=timestamp,
|
||||
event_id=events[publication["event_idx"]].id,
|
||||
publisher_id=publishers[publication["publisher_idx"]].id,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def generate_models():
|
||||
async def _generate_models(specification: dict[str, Union[int, list]]):
|
||||
publishers = await _generate_publishers(specification)
|
||||
events = await _generate_events(specification)
|
||||
await _generate_publications(events, publishers, specification)
|
||||
|
||||
return _generate_models
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def message_collector():
|
||||
class MessageCollector(UserList):
|
||||
|
|
|
@ -5,7 +5,9 @@ from uuid import UUID
|
|||
import arrow
|
||||
import pytest
|
||||
|
||||
import mobilizon_reshare.storage.query.read
|
||||
from mobilizon_reshare.event.event import MobilizonEvent
|
||||
from mobilizon_reshare.models.publisher import Publisher
|
||||
from mobilizon_reshare.publishers.abstract import (
|
||||
AbstractPlatform,
|
||||
AbstractEventFormatter,
|
||||
|
@ -77,3 +79,20 @@ def mock_publisher_invalid_response(message_collector):
|
|||
pass
|
||||
|
||||
return MockPublisher()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
async def mock_active_publishers_config(monkeypatch):
|
||||
p = Publisher(name="zulip")
|
||||
await p.save()
|
||||
|
||||
def _mock_active_pub():
|
||||
return ["zulip"]
|
||||
|
||||
monkeypatch.setattr(
|
||||
mobilizon_reshare.storage.query.read,
|
||||
"get_active_publishers",
|
||||
_mock_active_pub
|
||||
)
|
||||
|
||||
return p
|
||||
|
|
|
@ -89,13 +89,11 @@ async def mock_publications(
|
|||
await publisher.save()
|
||||
publication = PublicationModel(
|
||||
id=UUID(int=i + 1),
|
||||
status=PublicationStatus.UNSAVED,
|
||||
event=event,
|
||||
publisher=publisher,
|
||||
timestamp=None,
|
||||
reason=None,
|
||||
)
|
||||
await publication.save()
|
||||
publication = EventPublication.from_orm(publication, test_event)
|
||||
publication.publisher = mock_publisher_valid
|
||||
publication.formatter = mock_formatter_valid
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
from functools import partial
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import responses
|
||||
|
@ -7,8 +5,6 @@ import responses
|
|||
from mobilizon_reshare.config.config import get_settings
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.models.publisher import Publisher
|
||||
from mobilizon_reshare.publishers import get_active_publishers
|
||||
from mobilizon_reshare.publishers.abstract import EventPublication
|
||||
from mobilizon_reshare.publishers.coordinator import PublisherCoordinator
|
||||
from mobilizon_reshare.publishers.exceptions import (
|
||||
InvalidEvent,
|
||||
|
@ -17,10 +13,7 @@ from mobilizon_reshare.publishers.exceptions import (
|
|||
InvalidMessage,
|
||||
)
|
||||
from mobilizon_reshare.publishers.platforms.zulip import ZulipFormatter, ZulipPublisher
|
||||
from mobilizon_reshare.storage.query.save_query import update_publishers
|
||||
from mobilizon_reshare.storage.query.model_creation import (
|
||||
create_event_publication_models,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.read import build_publications
|
||||
|
||||
api_uri = "https://zulip.twc-italia.org/api/v1/"
|
||||
users_me = {
|
||||
|
@ -88,19 +81,15 @@ def mocked_client_error_response():
|
|||
|
||||
@pytest.fixture
|
||||
@pytest.mark.asyncio
|
||||
async def setup_db(event_model_generator, publication_model_generator):
|
||||
async def setup_db(
|
||||
mock_active_publishers_config, event_model_generator, publication_model_generator
|
||||
):
|
||||
settings = get_settings()
|
||||
for publisher in get_active_publishers():
|
||||
if publisher != "zulip":
|
||||
settings["publisher"][publisher]["active"] = False
|
||||
settings["publisher"]["zulip"][
|
||||
"bot_email"
|
||||
] = "giacomotest2-bot@zulip.twc-italia.org"
|
||||
settings["publisher"]["zulip"][
|
||||
"instance"
|
||||
] = "https://zulip.twc-italia.org"
|
||||
settings["publisher"]["zulip"]["instance"] = "https://zulip.twc-italia.org"
|
||||
|
||||
await update_publishers(["zulip"])
|
||||
publisher = await Publisher.filter(name="zulip").first()
|
||||
event = event_model_generator()
|
||||
await event.save()
|
||||
|
@ -112,42 +101,33 @@ async def setup_db(event_model_generator, publication_model_generator):
|
|||
|
||||
@pytest.fixture
|
||||
@pytest.mark.asyncio
|
||||
async def publication_models(event):
|
||||
async def unsaved_publications(event):
|
||||
await event.to_model().save()
|
||||
publication_models = await create_event_publication_models(event)
|
||||
return publication_models
|
||||
return await build_publications(event)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_zulip_publisher(
|
||||
mocked_valid_response, setup_db, event, publication_models
|
||||
):
|
||||
async def test_zulip_publisher(mocked_valid_response, setup_db, unsaved_publications):
|
||||
|
||||
report = PublisherCoordinator(
|
||||
list(map(partial(EventPublication.from_orm, event=event), publication_models,))
|
||||
).run()
|
||||
report = PublisherCoordinator(unsaved_publications).run()
|
||||
|
||||
assert report.reports[0].status == PublicationStatus.COMPLETED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_zulip_publishr_failure_invalid_credentials(
|
||||
mocked_credential_error_response, setup_db, event, publication_models
|
||||
mocked_credential_error_response, setup_db, unsaved_publications
|
||||
):
|
||||
report = PublisherCoordinator(
|
||||
list(map(partial(EventPublication.from_orm, event=event), publication_models))
|
||||
).run()
|
||||
report = PublisherCoordinator(unsaved_publications).run()
|
||||
assert report.reports[0].status == PublicationStatus.FAILED
|
||||
assert report.reports[0].reason == "403 Error - Your credentials are not valid!"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_zulip_publisher_failure_client_error(
|
||||
mocked_client_error_response, setup_db, event, publication_models
|
||||
mocked_client_error_response, setup_db, unsaved_publications
|
||||
):
|
||||
report = PublisherCoordinator(
|
||||
list(map(partial(EventPublication.from_orm, event=event), publication_models))
|
||||
).run()
|
||||
report = PublisherCoordinator(unsaved_publications).run()
|
||||
assert report.reports[0].status == PublicationStatus.FAILED
|
||||
assert report.reports[0].reason == "400 Error - Invalid request"
|
||||
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
from datetime import datetime, timezone, timedelta
|
||||
from datetime import timedelta
|
||||
from uuid import UUID
|
||||
|
||||
from mobilizon_reshare.models.publication import Publication
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
|
||||
today = datetime(
|
||||
year=2021, month=6, day=6, hour=5, minute=0, tzinfo=timezone(timedelta(hours=2)),
|
||||
)
|
||||
|
||||
from tests import today
|
||||
|
||||
complete_specification = {
|
||||
"event": 4,
|
||||
|
|
|
@ -1,68 +1,19 @@
|
|||
from datetime import timedelta
|
||||
from typing import Union
|
||||
from uuid import UUID
|
||||
|
||||
import pytest
|
||||
|
||||
from mobilizon_reshare.models.event import Event
|
||||
from mobilizon_reshare.models.publication import PublicationStatus, Publication
|
||||
import mobilizon_reshare.storage.query.read
|
||||
from mobilizon_reshare.models.publisher import Publisher
|
||||
from tests.storage import today
|
||||
|
||||
|
||||
async def _generate_publishers(specification):
|
||||
@pytest.fixture(scope="function")
|
||||
async def mock_active_publishers(request, monkeypatch):
|
||||
for name in request.param:
|
||||
await Publisher.create(name=name)
|
||||
|
||||
publishers = []
|
||||
for i, publisher_name in enumerate(specification["publisher"]):
|
||||
publisher = Publisher(
|
||||
id=UUID(int=i), name=publisher_name, account_ref=f"account_ref_{i}"
|
||||
)
|
||||
publishers.append(publisher)
|
||||
await publisher.save()
|
||||
def _mock_active_pub():
|
||||
return request.param
|
||||
|
||||
return publishers
|
||||
monkeypatch.setattr(
|
||||
mobilizon_reshare.storage.query.read, "get_active_publishers", _mock_active_pub
|
||||
)
|
||||
|
||||
|
||||
async def _generate_events(specification):
|
||||
events = []
|
||||
if "event" in specification.keys():
|
||||
for i in range(specification["event"]):
|
||||
begin_date = today + timedelta(days=i)
|
||||
event = Event(
|
||||
id=UUID(int=i),
|
||||
name=f"event_{i}",
|
||||
description=f"desc_{i}",
|
||||
mobilizon_id=UUID(int=i),
|
||||
mobilizon_link=f"moblink_{i}",
|
||||
thumbnail_link=f"thumblink_{i}",
|
||||
location=f"loc_{i}",
|
||||
begin_datetime=begin_date,
|
||||
end_datetime=begin_date + timedelta(hours=2),
|
||||
)
|
||||
events.append(event)
|
||||
await event.save()
|
||||
return events
|
||||
|
||||
|
||||
async def _generate_publications(events, publishers, specification):
|
||||
if "publications" in specification.keys():
|
||||
for i, publication in enumerate(specification["publications"]):
|
||||
status = publication.get("status", PublicationStatus.COMPLETED)
|
||||
timestamp = publication.get("timestamp", today + timedelta(hours=i))
|
||||
await Publication.create(
|
||||
id=UUID(int=i),
|
||||
status=status,
|
||||
timestamp=timestamp,
|
||||
event_id=events[publication["event_idx"]].id,
|
||||
publisher_id=publishers[publication["publisher_idx"]].id,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def generate_models():
|
||||
async def _generate_models(specification: dict[str, Union[int, list]]):
|
||||
publishers = await _generate_publishers(specification)
|
||||
events = await _generate_events(specification)
|
||||
await _generate_publications(events, publishers, specification)
|
||||
|
||||
return _generate_models
|
||||
return request.param
|
||||
|
|
|
@ -7,17 +7,16 @@ import pytest
|
|||
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
|
||||
from mobilizon_reshare.models.event import Event
|
||||
from mobilizon_reshare.models.publication import PublicationStatus
|
||||
from mobilizon_reshare.storage.query.read_query import (
|
||||
get_mobilizon_event_publications,
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
get_published_events,
|
||||
events_with_status,
|
||||
prefetch_event_relations,
|
||||
publications_with_status,
|
||||
events_without_publications,
|
||||
build_publications,
|
||||
)
|
||||
from tests.storage import complete_specification
|
||||
from tests.storage import result_publication
|
||||
from tests.storage import today
|
||||
from tests import today
|
||||
|
||||
event_0 = MobilizonEvent(
|
||||
name="event_0",
|
||||
|
@ -41,33 +40,6 @@ async def test_get_published_events(generate_models):
|
|||
assert len(published_events) == 3
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_mobilizon_event_publications(generate_models):
|
||||
await generate_models(complete_specification)
|
||||
|
||||
models = await prefetch_event_relations(Event.filter(name="event_0"))
|
||||
mobilizon_event = MobilizonEvent.from_model(models[0])
|
||||
|
||||
publications = list(await get_mobilizon_event_publications(mobilizon_event))
|
||||
for pub in publications:
|
||||
await pub.fetch_related("event")
|
||||
await pub.fetch_related("publisher")
|
||||
|
||||
assert len(publications) == 3
|
||||
|
||||
assert publications[0].event.name == "event_0"
|
||||
assert publications[0].publisher.name == "telegram"
|
||||
assert publications[0].status == PublicationStatus.COMPLETED
|
||||
|
||||
assert publications[1].event.name == "event_0"
|
||||
assert publications[1].publisher.name == "twitter"
|
||||
assert publications[1].status == PublicationStatus.COMPLETED
|
||||
|
||||
assert publications[2].event.name == "event_0"
|
||||
assert publications[2].publisher.name == "mastodon"
|
||||
assert publications[2].status == PublicationStatus.COMPLETED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"status,mobilizon_id,from_date,to_date,expected_result",
|
||||
|
@ -96,7 +68,12 @@ async def test_get_mobilizon_event_publications(generate_models):
|
|||
],
|
||||
)
|
||||
async def test_publications_with_status(
|
||||
status, mobilizon_id, from_date, to_date, expected_result, generate_models,
|
||||
status,
|
||||
mobilizon_id,
|
||||
from_date,
|
||||
to_date,
|
||||
expected_result,
|
||||
generate_models,
|
||||
):
|
||||
await generate_models(complete_specification)
|
||||
publications = await publications_with_status(
|
||||
|
@ -186,6 +163,25 @@ async def test_event_with_status_window(
|
|||
),
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
"event": 3,
|
||||
"publications": [
|
||||
{
|
||||
"event_idx": 1,
|
||||
"publisher_idx": 0,
|
||||
"status": PublicationStatus.FAILED,
|
||||
},
|
||||
{
|
||||
"event_idx": 2,
|
||||
"publisher_idx": 0,
|
||||
"status": PublicationStatus.COMPLETED,
|
||||
},
|
||||
],
|
||||
"publisher": ["zulip"],
|
||||
},
|
||||
[event_0],
|
||||
),
|
||||
(
|
||||
complete_specification,
|
||||
[
|
||||
|
@ -212,3 +208,46 @@ async def test_events_without_publications(spec, expected_events, generate_model
|
|||
unpublished_events = list(await events_without_publications())
|
||||
assert len(unpublished_events) == len(expected_events)
|
||||
assert unpublished_events == expected_events
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"mock_active_publishers, spec, event, n_publications",
|
||||
[
|
||||
(
|
||||
[],
|
||||
{"event": 2, "publications": [], "publisher": ["zulip"]},
|
||||
event_0,
|
||||
0,
|
||||
),
|
||||
(
|
||||
["zulip"],
|
||||
{"event": 2, "publications": [], "publisher": ["zulip"]},
|
||||
event_0,
|
||||
1,
|
||||
),
|
||||
(
|
||||
["telegram", "zulip", "mastodon", "facebook"],
|
||||
{
|
||||
"event": 2,
|
||||
"publications": [],
|
||||
"publisher": ["telegram", "zulip", "mastodon", "facebook"],
|
||||
},
|
||||
event_0,
|
||||
4,
|
||||
),
|
||||
],
|
||||
indirect=["mock_active_publishers"],
|
||||
)
|
||||
async def test_build_publications(
|
||||
mock_active_publishers, spec, event, n_publications, generate_models
|
||||
):
|
||||
await generate_models(spec)
|
||||
|
||||
publications = list(await build_publications(event))
|
||||
|
||||
assert len(publications) == n_publications
|
||||
|
||||
for p in publications:
|
||||
assert p.event == event
|
||||
assert p.publisher.name in mock_active_publishers
|
||||
|
|
|
@ -2,7 +2,7 @@ from uuid import UUID
|
|||
|
||||
import pytest
|
||||
|
||||
from mobilizon_reshare.storage.query.read_query import (
|
||||
from mobilizon_reshare.storage.query.read import (
|
||||
get_unpublished_events,
|
||||
get_all_events,
|
||||
)
|
||||
|
|
|
@ -12,8 +12,12 @@ from mobilizon_reshare.publishers.coordinator import (
|
|||
PublisherCoordinatorReport,
|
||||
EventPublicationReport,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.read_query import publications_with_status
|
||||
from mobilizon_reshare.storage.query.save_query import (
|
||||
from mobilizon_reshare.publishers.platforms.telegram import (
|
||||
TelegramFormatter,
|
||||
TelegramPublisher,
|
||||
)
|
||||
from mobilizon_reshare.storage.query.read import publications_with_status
|
||||
from mobilizon_reshare.storage.query.write import (
|
||||
save_publication_report,
|
||||
update_publishers,
|
||||
)
|
||||
|
@ -22,6 +26,18 @@ from tests.storage import today
|
|||
|
||||
two_publishers_specification = {"publisher": ["telegram", "twitter"]}
|
||||
|
||||
event_1 = MobilizonEvent(
|
||||
name="event_1",
|
||||
description="desc_1",
|
||||
mobilizon_id=UUID(int=1),
|
||||
mobilizon_link="moblink_1",
|
||||
thumbnail_link="thumblink_1",
|
||||
location="loc_1",
|
||||
status=EventPublicationStatus.WAITING,
|
||||
begin_datetime=arrow.get(today + timedelta(days=1)),
|
||||
end_datetime=arrow.get(today + timedelta(days=1) + timedelta(hours=2)),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -48,7 +64,10 @@ two_publishers_specification = {"publisher": ["telegram", "twitter"]}
|
|||
],
|
||||
)
|
||||
async def test_update_publishers(
|
||||
specification, names, expected_result, generate_models,
|
||||
specification,
|
||||
names,
|
||||
expected_result,
|
||||
generate_models,
|
||||
):
|
||||
await generate_models(specification)
|
||||
await update_publishers(names)
|
||||
|
@ -74,46 +93,41 @@ async def test_update_publishers(
|
|||
status=PublicationStatus.COMPLETED,
|
||||
reason="",
|
||||
publication=EventPublication(
|
||||
id=UUID(int=4), formatter=None, event=None, publisher=None
|
||||
id=UUID(int=6),
|
||||
formatter=TelegramFormatter(),
|
||||
event=event_1,
|
||||
publisher=TelegramPublisher(),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
MobilizonEvent(
|
||||
name="event_1",
|
||||
description="desc_1",
|
||||
mobilizon_id=UUID(int=1),
|
||||
mobilizon_link="moblink_1",
|
||||
thumbnail_link="thumblink_1",
|
||||
location="loc_1",
|
||||
status=EventPublicationStatus.WAITING,
|
||||
begin_datetime=arrow.get(today + timedelta(days=1)),
|
||||
end_datetime=arrow.get(today + timedelta(days=1) + timedelta(hours=2)),
|
||||
),
|
||||
event_1,
|
||||
{
|
||||
UUID(int=4): Publication(
|
||||
id=UUID(int=4), status=PublicationStatus.COMPLETED, reason=""
|
||||
UUID(int=6): Publication(
|
||||
id=UUID(int=6), status=PublicationStatus.COMPLETED, reason=""
|
||||
),
|
||||
},
|
||||
],
|
||||
],
|
||||
)
|
||||
async def test_save_publication_report(
|
||||
specification, report, event, expected_result, generate_models,
|
||||
specification,
|
||||
report,
|
||||
event,
|
||||
expected_result,
|
||||
generate_models,
|
||||
):
|
||||
await generate_models(specification)
|
||||
known_publication_ids = set(p.id for p in await Publication.all())
|
||||
|
||||
await save_publication_report(report)
|
||||
|
||||
publications = await publications_with_status(
|
||||
status=PublicationStatus.COMPLETED, event_mobilizon_id=event.mobilizon_id,
|
||||
)
|
||||
await save_publication_report(report, list(publications.values()))
|
||||
publication_ids = set(publications.keys())
|
||||
publications = {
|
||||
p_id: await Publication.filter(id=p_id).first() for p_id in publication_ids
|
||||
p.id: p for p in await Publication.filter(id__not_in=known_publication_ids)
|
||||
}
|
||||
|
||||
assert len(publications) == len(expected_result)
|
||||
for i in publication_ids:
|
||||
for i in publications.keys():
|
||||
assert publications[i].status == expected_result[i].status
|
||||
assert publications[i].reason == expected_result[i].reason
|
||||
assert publications[i].timestamp
|
||||
|
|
Loading…
Reference in New Issue