mirror of
https://github.com/Tech-Workers-Coalition-Italia/mobilizon-reshare.git
synced 2025-02-16 19:50:41 +01:00
Publication report (#46)
* publishers: coordinator: Change `PublisherReport` to `PublicationReport`. * publishers: AbstractNotifier: Add `AbstractNotifier.get_name`. * models: Publication: Add `reason`. * storage: query: Add `get_mobilizon_event_publications`. * tests: query: Refactor models generation. * storage: query: Add `save_publication_report`. * Track publication ids during the publishing process. This patch changes the PublisherCoordinator to keep track of the different Publications it's performing. This also enables multiple publications for the same publisher. * tests: storage: Add some tests. Co-authored-by: Giacomo Leidi <goodoldpaul@autistici.org>
This commit is contained in:
parent
880a34115f
commit
41317f062d
@ -5,7 +5,7 @@ from arrow import Arrow
|
||||
from click import pass_context, pass_obj
|
||||
|
||||
from mobilizon_bots.cli import safe_execution
|
||||
from mobilizon_bots.cli.inspect import inspect_events
|
||||
from mobilizon_bots.cli.inspect_event import inspect_events
|
||||
from mobilizon_bots.cli.main import main
|
||||
from mobilizon_bots.event.event import EventPublicationStatus
|
||||
|
||||
|
@ -1,20 +1,22 @@
|
||||
import logging.config
|
||||
|
||||
|
||||
from mobilizon_bots.event.event_selection_strategies import select_event_to_publish
|
||||
from mobilizon_bots.mobilizon.events import get_unpublished_events
|
||||
from mobilizon_bots.models.publication import PublicationStatus
|
||||
from mobilizon_bots.publishers import get_active_publishers
|
||||
from mobilizon_bots.publishers.coordinator import PublisherCoordinator
|
||||
from mobilizon_bots.storage.db import tear_down
|
||||
from mobilizon_bots.storage.query import get_published_events, create_unpublished_events
|
||||
from mobilizon_bots.storage.query import (
|
||||
get_published_events,
|
||||
get_unpublished_events as get_db_unpublished_events,
|
||||
create_unpublished_events,
|
||||
save_publication_report,
|
||||
publications_with_status,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def graceful_exit(code):
|
||||
await tear_down()
|
||||
exit(code)
|
||||
|
||||
|
||||
async def main():
|
||||
"""
|
||||
STUB
|
||||
@ -30,14 +32,26 @@ async def main():
|
||||
unpublished_events = get_unpublished_events(published_events)
|
||||
# Store in the DB only the ones we didn't know about
|
||||
await create_unpublished_events(unpublished_events, active_publishers)
|
||||
event = select_event_to_publish(published_events, unpublished_events)
|
||||
event = select_event_to_publish(
|
||||
published_events,
|
||||
# We must load unpublished events from DB since it contains
|
||||
# merged state between Mobilizon and previous WAITING events.
|
||||
list(await get_db_unpublished_events()),
|
||||
)
|
||||
if event:
|
||||
logger.debug(f"Event to publish found: {event.name}")
|
||||
result = PublisherCoordinator(event).run()
|
||||
result = PublisherCoordinator(
|
||||
event,
|
||||
[
|
||||
(pub.id, pub.publisher.name)
|
||||
for pub in await publications_with_status(
|
||||
status=PublicationStatus.WAITING,
|
||||
event_mobilizon_id=event.mobilizon_id,
|
||||
)
|
||||
],
|
||||
).run()
|
||||
await save_publication_report(result, event)
|
||||
|
||||
logger.debug("Closing")
|
||||
|
||||
await graceful_exit(0 if result.successful else 1)
|
||||
return 0 if result.successful else 1
|
||||
else:
|
||||
logger.debug("Closing")
|
||||
await graceful_exit(0)
|
||||
return 0
|
||||
|
@ -85,10 +85,10 @@ class MobilizonEvent:
|
||||
description=event.description,
|
||||
begin_datetime=arrow.get(
|
||||
tortoise.timezone.localtime(value=event.begin_datetime, timezone=tz)
|
||||
),
|
||||
).to('local'),
|
||||
end_datetime=arrow.get(
|
||||
tortoise.timezone.localtime(value=event.end_datetime, timezone=tz)
|
||||
),
|
||||
).to('local'),
|
||||
mobilizon_link=event.mobilizon_link,
|
||||
mobilizon_id=event.mobilizon_id,
|
||||
thumbnail_link=event.thumbnail_link,
|
||||
@ -96,7 +96,7 @@ class MobilizonEvent:
|
||||
publication_time={
|
||||
pub.publisher.name: arrow.get(
|
||||
tortoise.timezone.localtime(value=pub.timestamp, timezone=tz)
|
||||
)
|
||||
).to('local')
|
||||
for pub in event.publications
|
||||
}
|
||||
if publication_status != PublicationStatus.WAITING
|
||||
|
@ -7,7 +7,7 @@ import arrow
|
||||
import requests
|
||||
|
||||
from mobilizon_bots.config.config import get_settings
|
||||
from mobilizon_bots.event.event import MobilizonEvent, PublicationStatus
|
||||
from mobilizon_bots.event.event import MobilizonEvent, EventPublicationStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -39,7 +39,7 @@ def parse_event(data):
|
||||
thumbnail_link=parse_picture(data),
|
||||
location=parse_location(data),
|
||||
publication_time=None,
|
||||
status=PublicationStatus.WAITING,
|
||||
status=EventPublicationStatus.WAITING,
|
||||
)
|
||||
|
||||
|
||||
|
@ -15,8 +15,9 @@ class Publication(Model):
|
||||
status = fields.IntEnumField(PublicationStatus)
|
||||
|
||||
# When a Publication's status is WAITING
|
||||
# we don't need a timestamp
|
||||
# we don't need a timestamp nor a reason
|
||||
timestamp = fields.DatetimeField(null=True)
|
||||
reason = fields.TextField(null=True)
|
||||
|
||||
event = fields.ForeignKeyField("models.Event", related_name="publications")
|
||||
publisher = fields.ForeignKeyField("models.Publisher", related_name="publications")
|
||||
|
@ -1,104 +1,83 @@
|
||||
from dataclasses import dataclass, field
|
||||
from enum import IntEnum
|
||||
from typing import List
|
||||
from uuid import UUID
|
||||
|
||||
from mobilizon_bots.event.event import MobilizonEvent
|
||||
from mobilizon_bots.publishers import get_active_publishers
|
||||
from mobilizon_bots.publishers.abstract import AbstractPublisher
|
||||
from mobilizon_bots.models.publication import PublicationStatus
|
||||
from mobilizon_bots.publishers.exceptions import PublisherError
|
||||
from mobilizon_bots.publishers.telegram import TelegramPublisher
|
||||
|
||||
KEY2CLS = {"telegram": TelegramPublisher}
|
||||
|
||||
|
||||
class PublisherStatus(IntEnum):
|
||||
WAITING = 1
|
||||
FAILED = 2
|
||||
COMPLETED = 3
|
||||
|
||||
|
||||
@dataclass
|
||||
class PublisherReport:
|
||||
status: PublisherStatus
|
||||
class PublicationReport:
|
||||
status: PublicationStatus
|
||||
reason: str
|
||||
publisher: AbstractPublisher
|
||||
|
||||
|
||||
@dataclass
|
||||
class PublisherCoordinatorReport:
|
||||
reports: List[PublisherReport] = field(default_factory=[])
|
||||
reports: dict[UUID, PublicationReport] = field(default_factory={})
|
||||
|
||||
@property
|
||||
def successful(self):
|
||||
return all(r.status == PublisherStatus.COMPLETED for r in self.reports)
|
||||
return all(
|
||||
r.status == PublicationStatus.COMPLETED for r in self.reports.values()
|
||||
)
|
||||
|
||||
def __iter__(self):
|
||||
return self.reports.__iter__()
|
||||
return self.reports.items().__iter__()
|
||||
|
||||
|
||||
class PublisherCoordinator:
|
||||
def __init__(self, event: MobilizonEvent):
|
||||
self.publishers = tuple(KEY2CLS[pn](event) for pn in get_active_publishers())
|
||||
def __init__(self, event: MobilizonEvent, publications: list[tuple[UUID, str]]):
|
||||
self.publications = tuple(
|
||||
(publication_id, KEY2CLS[publisher_name](event)) for publication_id, publisher_name in publications
|
||||
)
|
||||
|
||||
def run(self) -> PublisherCoordinatorReport:
|
||||
invalid_credentials, invalid_event, invalid_msg = self._validate()
|
||||
errors = invalid_credentials + invalid_event + invalid_msg
|
||||
errors = self._validate()
|
||||
if errors:
|
||||
return PublisherCoordinatorReport(reports=errors)
|
||||
|
||||
return self._post()
|
||||
|
||||
def _make_successful_report(self):
|
||||
return [
|
||||
PublisherReport(
|
||||
status=PublisherStatus.COMPLETED,
|
||||
return {
|
||||
publication_id: PublicationReport(
|
||||
status=PublicationStatus.COMPLETED,
|
||||
reason="",
|
||||
publisher=p,
|
||||
)
|
||||
for p in self.publishers
|
||||
]
|
||||
for publication_id, _ in self.publications
|
||||
}
|
||||
|
||||
def _post(self):
|
||||
failed_publishers_reports = []
|
||||
for p in self.publishers:
|
||||
failed_publishers_reports = {}
|
||||
for publication_id, p in self.publications:
|
||||
try:
|
||||
p.post()
|
||||
except PublisherError as e:
|
||||
failed_publishers_reports.append(
|
||||
PublisherReport(
|
||||
status=PublisherStatus.FAILED,
|
||||
reason=repr(e),
|
||||
publisher=p,
|
||||
)
|
||||
failed_publishers_reports[publication_id] = PublicationReport(
|
||||
status=PublicationStatus.FAILED,
|
||||
reason=repr(e),
|
||||
)
|
||||
reports = failed_publishers_reports or self._make_successful_report()
|
||||
return PublisherCoordinatorReport(reports)
|
||||
|
||||
def _validate(self):
|
||||
invalid_credentials, invalid_event, invalid_msg = [], [], []
|
||||
for p in self.publishers:
|
||||
errors: dict[UUID, PublicationReport] = {}
|
||||
for publication_id, p in self.publications:
|
||||
reason = []
|
||||
if not p.are_credentials_valid():
|
||||
invalid_credentials.append(
|
||||
PublisherReport(
|
||||
status=PublisherStatus.FAILED,
|
||||
reason="Invalid credentials",
|
||||
publisher=p,
|
||||
)
|
||||
)
|
||||
reason.append("Invalid credentials")
|
||||
if not p.is_event_valid():
|
||||
invalid_event.append(
|
||||
PublisherReport(
|
||||
status=PublisherStatus.FAILED,
|
||||
reason="Invalid event",
|
||||
publisher=p,
|
||||
)
|
||||
)
|
||||
reason.append("Invalid event")
|
||||
if not p.is_message_valid():
|
||||
invalid_msg.append(
|
||||
PublisherReport(
|
||||
status=PublisherStatus.FAILED,
|
||||
reason="Invalid message",
|
||||
publisher=p,
|
||||
)
|
||||
reason.append("Invalid message")
|
||||
|
||||
if len(reason) > 0:
|
||||
errors[publication_id] = PublicationReport(
|
||||
status=PublicationStatus.FAILED, reason=", ".join(reason)
|
||||
)
|
||||
return invalid_credentials, invalid_event, invalid_msg
|
||||
|
||||
return errors
|
||||
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||
from tortoise import Tortoise
|
||||
|
||||
from mobilizon_bots.config.publishers import publisher_names
|
||||
from mobilizon_bots.storage.query import create_publisher
|
||||
from mobilizon_bots.storage.query import update_publishers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -35,13 +35,11 @@ class MobilizonBotsDB:
|
||||
)
|
||||
if not self.is_init:
|
||||
await Tortoise.generate_schemas()
|
||||
for name in publisher_names:
|
||||
logging.info(f"Creating {name} publisher")
|
||||
# TODO: Deal with account_ref
|
||||
await create_publisher(name)
|
||||
self.is_init = True
|
||||
logger.info(f"Succesfully initialized database at {self.path}")
|
||||
|
||||
await update_publishers(publisher_names)
|
||||
|
||||
|
||||
@atexit.register
|
||||
def gracefully_tear_down():
|
||||
|
@ -1,6 +1,9 @@
|
||||
from typing import Iterable, Optional, List
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Iterable, Optional, Union
|
||||
from uuid import UUID
|
||||
|
||||
import arrow
|
||||
from arrow import Arrow
|
||||
from tortoise.queryset import QuerySet
|
||||
from tortoise.transactions import atomic
|
||||
@ -11,14 +14,17 @@ from mobilizon_bots.models.publication import Publication, PublicationStatus
|
||||
from mobilizon_bots.models.publisher import Publisher
|
||||
from mobilizon_bots.publishers.coordinator import PublisherCoordinatorReport
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# This is due to Tortoise community fixtures to
|
||||
# set up and tear down a DB instance for Pytest.
|
||||
# See: https://github.com/tortoise/tortoise-orm/issues/419#issuecomment-696991745
|
||||
# and: https://docs.pytest.org/en/stable/example/simple.html
|
||||
|
||||
CONNECTION_NAME = "models" if "pytest" in sys.modules else None
|
||||
|
||||
|
||||
async def prefetch_event_relations(queryset: QuerySet[Event]) -> List[Event]:
|
||||
async def prefetch_event_relations(queryset: QuerySet[Event]) -> list[Event]:
|
||||
return (
|
||||
await queryset.prefetch_related("publications__publisher")
|
||||
.order_by("begin_datetime")
|
||||
@ -27,18 +33,40 @@ async def prefetch_event_relations(queryset: QuerySet[Event]) -> List[Event]:
|
||||
|
||||
|
||||
def _add_date_window(
|
||||
query, from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
|
||||
query,
|
||||
field_name: str,
|
||||
from_date: Optional[Arrow] = None,
|
||||
to_date: Optional[Arrow] = None,
|
||||
):
|
||||
|
||||
if from_date:
|
||||
query = query.filter(end_datetime__gt=from_date.datetime)
|
||||
query = query.filter(**{f"{field_name}__gt": from_date.to("utc").datetime})
|
||||
if to_date:
|
||||
query = query.filter(end_datetime__lt=to_date.datetime)
|
||||
query = query.filter(**{f"{field_name}__lt": to_date.to("utc").datetime})
|
||||
return query
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def publications_with_status(
|
||||
status: PublicationStatus,
|
||||
event_mobilizon_id: Optional[UUID] = None,
|
||||
from_date: Optional[Arrow] = None,
|
||||
to_date: Optional[Arrow] = None,
|
||||
) -> Iterable[Publication]:
|
||||
query = Publication.filter(status=status)
|
||||
|
||||
if event_mobilizon_id:
|
||||
query = query.prefetch_related("event").filter(
|
||||
event__mobilizon_id=event_mobilizon_id
|
||||
)
|
||||
|
||||
query = _add_date_window(query, "timestamp", from_date, to_date)
|
||||
|
||||
return await query.prefetch_related("publisher").order_by("timestamp").distinct()
|
||||
|
||||
|
||||
async def events_with_status(
|
||||
status: List[EventPublicationStatus],
|
||||
status: list[EventPublicationStatus],
|
||||
from_date: Optional[Arrow] = None,
|
||||
to_date: Optional[Arrow] = None,
|
||||
) -> Iterable[MobilizonEvent]:
|
||||
@ -49,21 +77,27 @@ async def events_with_status(
|
||||
return event_status in status
|
||||
|
||||
query = Event.all()
|
||||
_add_date_window(query, from_date, to_date)
|
||||
|
||||
return map(
|
||||
MobilizonEvent.from_model,
|
||||
filter(_filter_event_with_status, await prefetch_event_relations(query),),
|
||||
filter(
|
||||
_filter_event_with_status,
|
||||
await prefetch_event_relations(
|
||||
_add_date_window(query, "begin_datetime", from_date, to_date)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def get_all_events(
|
||||
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
|
||||
from_date: Optional[Arrow] = None,
|
||||
to_date: Optional[Arrow] = None,
|
||||
) -> Iterable[MobilizonEvent]:
|
||||
|
||||
return map(
|
||||
MobilizonEvent.from_model,
|
||||
await prefetch_event_relations(
|
||||
_add_date_window(Event.all(), from_date, to_date)
|
||||
_add_date_window(Event.all(), "begin_datetime", from_date, to_date)
|
||||
),
|
||||
)
|
||||
|
||||
@ -76,18 +110,56 @@ async def get_unpublished_events() -> Iterable[MobilizonEvent]:
|
||||
return await events_with_status([EventPublicationStatus.WAITING])
|
||||
|
||||
|
||||
async def save_event(event):
|
||||
async def get_mobilizon_event_publications(
|
||||
event: MobilizonEvent,
|
||||
) -> Iterable[Publication]:
|
||||
models = await prefetch_event_relations(
|
||||
Event.filter(mobilizon_id=event.mobilizon_id)
|
||||
)
|
||||
return models[0].publications
|
||||
|
||||
|
||||
async def get_publishers(
|
||||
name: Optional[str] = None,
|
||||
) -> Union[Publisher, Iterable[Publisher]]:
|
||||
if name:
|
||||
return await Publisher.filter(name=name).first()
|
||||
else:
|
||||
return await Publisher.all()
|
||||
|
||||
|
||||
async def save_event(event: MobilizonEvent) -> Event:
|
||||
|
||||
event_model = event.to_model()
|
||||
await event_model.save()
|
||||
return event_model
|
||||
|
||||
|
||||
async def save_publication(publisher_name, event_model, status: PublicationStatus):
|
||||
async def create_publisher(name: str, account_ref: Optional[str] = None) -> None:
|
||||
await Publisher.create(name=name, account_ref=account_ref)
|
||||
|
||||
publisher = await Publisher.filter(name=publisher_name).first()
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def update_publishers(
|
||||
names: Iterable[str],
|
||||
) -> None:
|
||||
names = set(names)
|
||||
known_publisher_names = set(p.name for p in await get_publishers())
|
||||
for name in names.difference(known_publisher_names):
|
||||
logging.info(f"Creating {name} publisher")
|
||||
await create_publisher(name)
|
||||
|
||||
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def save_publication(
|
||||
publisher_name: str, event_model: Event, status: PublicationStatus
|
||||
) -> None:
|
||||
|
||||
publisher = await get_publishers(publisher_name)
|
||||
await Publication.create(
|
||||
status=status, event_id=event_model.id, publisher_id=publisher.id,
|
||||
status=status,
|
||||
event_id=event_model.id,
|
||||
publisher_id=publisher.id,
|
||||
)
|
||||
|
||||
|
||||
@ -115,10 +187,18 @@ async def create_unpublished_events(
|
||||
)
|
||||
|
||||
|
||||
async def create_publisher(name: str, account_ref: Optional[str] = None) -> None:
|
||||
await Publisher.create(name=name, account_ref=account_ref)
|
||||
@atomic(CONNECTION_NAME)
|
||||
async def save_publication_report(
|
||||
coordinator_report: PublisherCoordinatorReport, event: MobilizonEvent
|
||||
) -> None:
|
||||
publications: dict[UUID, Publication] = {
|
||||
p.id: p for p in await get_mobilizon_event_publications(event)
|
||||
}
|
||||
|
||||
for publication_id, publication_report in coordinator_report:
|
||||
|
||||
async def save_publication_report(publication_report: PublisherCoordinatorReport):
|
||||
for publisher_report in publication_report:
|
||||
pass
|
||||
publications[publication_id].status = publication_report.status
|
||||
publications[publication_id].reason = publication_report.reason
|
||||
publications[publication_id].timestamp = arrow.now().datetime
|
||||
|
||||
await publications[publication_id].save()
|
||||
|
@ -1,143 +1,565 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Union
|
||||
from uuid import UUID
|
||||
|
||||
import arrow
|
||||
import pytest
|
||||
|
||||
from mobilizon_bots.event.event import MobilizonEvent
|
||||
from mobilizon_bots.models.publication import PublicationStatus
|
||||
from mobilizon_bots.event.event import MobilizonEvent, EventPublicationStatus
|
||||
from mobilizon_bots.models.event import Event
|
||||
from mobilizon_bots.models.publication import PublicationStatus, Publication
|
||||
from mobilizon_bots.models.publisher import Publisher
|
||||
from mobilizon_bots.publishers.coordinator import (
|
||||
PublisherCoordinatorReport,
|
||||
PublicationReport,
|
||||
)
|
||||
|
||||
from mobilizon_bots.storage.query import (
|
||||
get_published_events,
|
||||
get_unpublished_events,
|
||||
create_unpublished_events,
|
||||
get_mobilizon_event_publications,
|
||||
prefetch_event_relations,
|
||||
get_publishers,
|
||||
publications_with_status,
|
||||
update_publishers,
|
||||
save_publication_report,
|
||||
)
|
||||
|
||||
|
||||
today = datetime(
|
||||
year=2021,
|
||||
month=6,
|
||||
day=6,
|
||||
hour=5,
|
||||
minute=0,
|
||||
tzinfo=timezone(timedelta(hours=2)),
|
||||
)
|
||||
|
||||
two_publishers_specification = {"publisher": 2}
|
||||
|
||||
complete_specification = {
|
||||
"event": 4,
|
||||
"publications": [
|
||||
{"event_idx": 0, "publisher_idx": 0},
|
||||
{
|
||||
"event_idx": 0,
|
||||
"publisher_idx": 1,
|
||||
"status": PublicationStatus.COMPLETED,
|
||||
},
|
||||
{
|
||||
"event_idx": 1,
|
||||
"publisher_idx": 0,
|
||||
"status": PublicationStatus.WAITING,
|
||||
},
|
||||
{
|
||||
"event_idx": 1,
|
||||
"publisher_idx": 1,
|
||||
},
|
||||
{
|
||||
"event_idx": 2,
|
||||
"publisher_idx": 2,
|
||||
"status": PublicationStatus.FAILED,
|
||||
},
|
||||
{
|
||||
"event_idx": 2,
|
||||
"publisher_idx": 1,
|
||||
"status": PublicationStatus.COMPLETED,
|
||||
},
|
||||
{
|
||||
"event_idx": 3,
|
||||
"publisher_idx": 2,
|
||||
"status": PublicationStatus.COMPLETED,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def setup():
|
||||
async def _setup(
|
||||
publisher_model_generator, publication_model_generator, event_model_generator
|
||||
):
|
||||
today = datetime(
|
||||
year=2021,
|
||||
month=6,
|
||||
day=6,
|
||||
hour=5,
|
||||
minute=0,
|
||||
tzinfo=timezone(timedelta(hours=2)),
|
||||
)
|
||||
publisher_1 = publisher_model_generator()
|
||||
publisher_2 = publisher_model_generator(idx=2)
|
||||
await publisher_1.save()
|
||||
await publisher_2.save()
|
||||
def generate_models():
|
||||
async def _generate_models(specification: dict[str, Union[int, list]]):
|
||||
publishers = []
|
||||
for i in range(
|
||||
specification["publisher"] if "publisher" in specification.keys() else 3
|
||||
):
|
||||
publisher = Publisher(
|
||||
id=UUID(int=i), name=f"publisher_{i}", account_ref=f"account_ref_{i}"
|
||||
)
|
||||
publishers.append(publisher)
|
||||
await publisher.save()
|
||||
|
||||
event_1 = event_model_generator(begin_date=today)
|
||||
event_2 = event_model_generator(idx=2, begin_date=today + timedelta(days=2))
|
||||
event_3 = event_model_generator(idx=3, begin_date=today + timedelta(days=-2))
|
||||
event_4 = event_model_generator(idx=4, begin_date=today + timedelta(days=-4))
|
||||
await event_1.save()
|
||||
await event_2.save()
|
||||
await event_3.save()
|
||||
await event_4.save()
|
||||
events = []
|
||||
if "event" in specification.keys():
|
||||
for i in range(specification["event"]):
|
||||
begin_date = today + timedelta(days=i)
|
||||
event = Event(
|
||||
id=UUID(int=i),
|
||||
name=f"event_{i}",
|
||||
description=f"desc_{i}",
|
||||
mobilizon_id=f"mobid_{i}",
|
||||
mobilizon_link=f"moblink_{i}",
|
||||
thumbnail_link=f"thumblink_{i}",
|
||||
location=f"loc_{i}",
|
||||
begin_datetime=begin_date,
|
||||
end_datetime=begin_date + timedelta(hours=2),
|
||||
)
|
||||
events.append(event)
|
||||
await event.save()
|
||||
|
||||
publication_1 = publication_model_generator(
|
||||
event_id=event_1.id, publisher_id=publisher_1.id
|
||||
)
|
||||
publication_2 = publication_model_generator(
|
||||
event_id=event_1.id,
|
||||
publisher_id=publisher_2.id,
|
||||
status=PublicationStatus.COMPLETED,
|
||||
)
|
||||
publication_3 = publication_model_generator(
|
||||
event_id=event_2.id,
|
||||
publisher_id=publisher_1.id,
|
||||
status=PublicationStatus.FAILED,
|
||||
)
|
||||
publication_4 = publication_model_generator(
|
||||
event_id=event_3.id,
|
||||
publisher_id=publisher_2.id,
|
||||
status=PublicationStatus.WAITING,
|
||||
)
|
||||
publication_5 = publication_model_generator(
|
||||
event_id=event_4.id,
|
||||
publisher_id=publisher_2.id,
|
||||
status=PublicationStatus.COMPLETED,
|
||||
)
|
||||
await publication_1.save()
|
||||
await publication_2.save()
|
||||
await publication_3.save()
|
||||
await publication_4.save()
|
||||
await publication_5.save()
|
||||
return (
|
||||
[event_1, event_2, event_3, event_4],
|
||||
[publication_1, publication_2, publication_3, publication_4, publication_5],
|
||||
[publisher_1, publisher_2],
|
||||
today,
|
||||
)
|
||||
if "publications" in specification.keys():
|
||||
for i in range(len(specification["publications"])):
|
||||
await Publication.create(
|
||||
id=UUID(int=i),
|
||||
status=specification["publications"][i].get(
|
||||
"status", PublicationStatus.WAITING
|
||||
),
|
||||
timestamp=specification["publications"][i].get(
|
||||
"timestamp", today + timedelta(hours=i)
|
||||
),
|
||||
event_id=events[specification["publications"][i]["event_idx"]].id,
|
||||
publisher_id=publishers[
|
||||
specification["publications"][i]["publisher_idx"]
|
||||
].id,
|
||||
)
|
||||
|
||||
return _setup
|
||||
return _generate_models
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_published_events(
|
||||
publisher_model_generator, publication_model_generator, event_model_generator, setup
|
||||
):
|
||||
events, publications, publishers, today = await setup(
|
||||
publisher_model_generator, publication_model_generator, event_model_generator
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specification,expected_result",
|
||||
[
|
||||
[
|
||||
complete_specification,
|
||||
[
|
||||
MobilizonEvent(
|
||||
name="event_3",
|
||||
description="desc_3",
|
||||
mobilizon_id="mobid_3",
|
||||
mobilizon_link="moblink_3",
|
||||
thumbnail_link="thumblink_3",
|
||||
location="loc_3",
|
||||
publication_time={
|
||||
"publisher_2": arrow.get(today + timedelta(hours=6)),
|
||||
},
|
||||
status=EventPublicationStatus.COMPLETED,
|
||||
begin_datetime=arrow.get(today + timedelta(days=3)),
|
||||
end_datetime=arrow.get(
|
||||
today + timedelta(days=3) + timedelta(hours=2)
|
||||
),
|
||||
)
|
||||
],
|
||||
]
|
||||
],
|
||||
)
|
||||
async def test_get_published_events(specification, expected_result, generate_models):
|
||||
await generate_models(specification)
|
||||
published_events = list(await get_published_events())
|
||||
assert len(published_events) == 1
|
||||
|
||||
assert published_events[0].mobilizon_id == events[3].mobilizon_id
|
||||
|
||||
assert published_events[0].begin_datetime == arrow.get(today + timedelta(days=-4))
|
||||
assert len(published_events) == len(expected_result)
|
||||
assert published_events == expected_result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_unpublished_events(
|
||||
publisher_model_generator, publication_model_generator, event_model_generator, setup
|
||||
):
|
||||
events, publications, publishers, today = await setup(
|
||||
publisher_model_generator, publication_model_generator, event_model_generator
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specification,expected_result",
|
||||
[
|
||||
[
|
||||
complete_specification,
|
||||
[
|
||||
MobilizonEvent(
|
||||
name="event_1",
|
||||
description="desc_1",
|
||||
mobilizon_id="mobid_1",
|
||||
mobilizon_link="moblink_1",
|
||||
thumbnail_link="thumblink_1",
|
||||
location="loc_1",
|
||||
status=EventPublicationStatus.WAITING,
|
||||
begin_datetime=arrow.get(today + timedelta(days=1)),
|
||||
end_datetime=arrow.get(
|
||||
today + timedelta(days=1) + timedelta(hours=2)
|
||||
),
|
||||
),
|
||||
],
|
||||
]
|
||||
],
|
||||
)
|
||||
async def test_get_unpublished_events(specification, expected_result, generate_models):
|
||||
await generate_models(specification)
|
||||
unpublished_events = list(await get_unpublished_events())
|
||||
assert len(unpublished_events) == 1
|
||||
|
||||
assert unpublished_events[0].mobilizon_id == events[2].mobilizon_id
|
||||
assert unpublished_events[0].begin_datetime == events[2].begin_datetime
|
||||
assert len(unpublished_events) == len(expected_result)
|
||||
assert unpublished_events == expected_result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"specification,expected_result",
|
||||
[
|
||||
[
|
||||
complete_specification,
|
||||
[
|
||||
Event(
|
||||
name="event_1",
|
||||
description="desc_1",
|
||||
mobilizon_id="mobid_1",
|
||||
mobilizon_link="moblink_1",
|
||||
thumbnail_link="thumblink_1",
|
||||
location="loc_1",
|
||||
begin_datetime=today + timedelta(days=1),
|
||||
end_datetime=today + timedelta(days=1) + timedelta(hours=2),
|
||||
),
|
||||
Event(
|
||||
name="test event",
|
||||
description="description of the event",
|
||||
mobilizon_id="12345",
|
||||
mobilizon_link="http://some_link.com/123",
|
||||
thumbnail_link="http://some_link.com/123.jpg",
|
||||
location="location",
|
||||
begin_datetime=today + timedelta(days=6),
|
||||
end_datetime=today + timedelta(days=6) + timedelta(hours=2),
|
||||
),
|
||||
Event(
|
||||
name="test event",
|
||||
description="description of the event",
|
||||
mobilizon_id="67890",
|
||||
mobilizon_link="http://some_link.com/123",
|
||||
thumbnail_link="http://some_link.com/123.jpg",
|
||||
location="location",
|
||||
begin_datetime=today + timedelta(days=12),
|
||||
end_datetime=today + timedelta(days=12) + timedelta(hours=2),
|
||||
),
|
||||
],
|
||||
]
|
||||
],
|
||||
)
|
||||
async def test_create_unpublished_events(
|
||||
publisher_model_generator,
|
||||
publication_model_generator,
|
||||
event_model_generator,
|
||||
specification,
|
||||
expected_result,
|
||||
generate_models,
|
||||
event_generator,
|
||||
setup,
|
||||
):
|
||||
events, publications, publishers, today = await setup(
|
||||
publisher_model_generator, publication_model_generator, event_model_generator
|
||||
)
|
||||
await generate_models(specification)
|
||||
|
||||
event_4 = event_generator(begin_date=arrow.get(today + timedelta(days=6)))
|
||||
event_5 = event_generator(
|
||||
event_3 = event_generator(begin_date=arrow.get(today + timedelta(days=6)))
|
||||
event_4 = event_generator(
|
||||
begin_date=arrow.get(today + timedelta(days=12)), mobilizon_id="67890"
|
||||
)
|
||||
models = await prefetch_event_relations(Event.filter(name="event_1"))
|
||||
|
||||
await events[0].fetch_related("publications")
|
||||
await events[0].fetch_related("publications__publisher")
|
||||
events_from_internet = [MobilizonEvent.from_model(events[0]), event_4, event_5]
|
||||
events_from_internet = [MobilizonEvent.from_model(models[0]), event_3, event_4]
|
||||
|
||||
await create_unpublished_events(
|
||||
unpublished_mobilizon_events=events_from_internet,
|
||||
active_publishers=["publisher_1", "publisher_2"],
|
||||
active_publishers=["publisher_0", "publisher_1", "publisher_2"],
|
||||
)
|
||||
unpublished_events = list(await get_unpublished_events())
|
||||
|
||||
assert len(unpublished_events) == 4
|
||||
assert unpublished_events[0].mobilizon_id == "mobid_3"
|
||||
assert unpublished_events[1].mobilizon_id == "mobid_1"
|
||||
assert unpublished_events[2].mobilizon_id == "12345"
|
||||
assert unpublished_events[3].mobilizon_id == "67890"
|
||||
assert len(unpublished_events) == 3
|
||||
assert unpublished_events[0].mobilizon_id == unpublished_events[0].mobilizon_id
|
||||
assert unpublished_events[1].mobilizon_id == unpublished_events[1].mobilizon_id
|
||||
assert unpublished_events[2].mobilizon_id == unpublished_events[2].mobilizon_id
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"specification",
|
||||
[
|
||||
complete_specification,
|
||||
],
|
||||
)
|
||||
async def test_get_mobilizon_event_publications(specification, generate_models):
|
||||
await generate_models(specification)
|
||||
|
||||
models = await prefetch_event_relations(Event.filter(name="event_0"))
|
||||
mobilizon_event = MobilizonEvent.from_model(models[0])
|
||||
|
||||
publications = list(await get_mobilizon_event_publications(mobilizon_event))
|
||||
for pub in publications:
|
||||
await pub.fetch_related("event")
|
||||
await pub.fetch_related("publisher")
|
||||
|
||||
assert len(publications) == 2
|
||||
|
||||
assert publications[0].event.name == "event_0"
|
||||
assert publications[0].publisher.name == "publisher_0"
|
||||
assert publications[0].status == PublicationStatus.WAITING
|
||||
|
||||
assert publications[1].event.name == "event_0"
|
||||
assert publications[1].publisher.name == "publisher_1"
|
||||
assert publications[1].status == PublicationStatus.COMPLETED
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"specification,name,expected_result",
|
||||
[
|
||||
[
|
||||
complete_specification,
|
||||
None,
|
||||
{
|
||||
"publisher_0",
|
||||
"publisher_1",
|
||||
"publisher_2",
|
||||
},
|
||||
],
|
||||
[
|
||||
complete_specification,
|
||||
"publisher_0",
|
||||
{"publisher_0"},
|
||||
],
|
||||
],
|
||||
)
|
||||
async def test_get_publishers(
|
||||
specification,
|
||||
name,
|
||||
expected_result,
|
||||
generate_models,
|
||||
):
|
||||
await generate_models(specification)
|
||||
result = await get_publishers(name)
|
||||
|
||||
if type(result) == list:
|
||||
publishers = set(p.name for p in result)
|
||||
else:
|
||||
publishers = {result.name}
|
||||
|
||||
assert len(publishers) == len(expected_result)
|
||||
assert publishers == expected_result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"specification,status,mobilizon_id,from_date,to_date,expected_result",
|
||||
[
|
||||
[
|
||||
complete_specification,
|
||||
PublicationStatus.WAITING,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
[
|
||||
Publication(
|
||||
id=UUID(int=0),
|
||||
status=PublicationStatus.WAITING,
|
||||
timestamp=today + timedelta(hours=0),
|
||||
event_id=UUID(int=0),
|
||||
publisher_id=UUID(int=0),
|
||||
),
|
||||
Publication(
|
||||
id=UUID(int=2),
|
||||
status=PublicationStatus.WAITING,
|
||||
timestamp=today + timedelta(hours=2),
|
||||
event_id=UUID(int=0),
|
||||
publisher_id=UUID(int=1),
|
||||
),
|
||||
Publication(
|
||||
id=UUID(int=3),
|
||||
status=PublicationStatus.WAITING,
|
||||
timestamp=today + timedelta(hours=3),
|
||||
event_id=UUID(int=1),
|
||||
publisher_id=UUID(int=1),
|
||||
),
|
||||
],
|
||||
],
|
||||
[
|
||||
complete_specification,
|
||||
PublicationStatus.WAITING,
|
||||
"mobid_1",
|
||||
None,
|
||||
None,
|
||||
[
|
||||
Publication(
|
||||
id=UUID(int=2),
|
||||
status=PublicationStatus.COMPLETED,
|
||||
timestamp=today + timedelta(hours=2),
|
||||
event_id=UUID(int=1),
|
||||
publisher_id=UUID(int=1),
|
||||
),
|
||||
Publication(
|
||||
id=UUID(int=3),
|
||||
status=PublicationStatus.WAITING,
|
||||
timestamp=today + timedelta(hours=5),
|
||||
event_id=UUID(int=1),
|
||||
publisher_id=UUID(int=1),
|
||||
),
|
||||
],
|
||||
],
|
||||
[
|
||||
complete_specification,
|
||||
PublicationStatus.WAITING,
|
||||
None,
|
||||
arrow.get(today + timedelta(hours=-1)),
|
||||
arrow.get(today + timedelta(hours=1)),
|
||||
[
|
||||
Publication(
|
||||
id=UUID(int=0),
|
||||
status=PublicationStatus.WAITING,
|
||||
timestamp=today + timedelta(hours=0),
|
||||
event_id=UUID(int=0),
|
||||
publisher_id=UUID(int=0),
|
||||
),
|
||||
],
|
||||
],
|
||||
[
|
||||
complete_specification,
|
||||
PublicationStatus.WAITING,
|
||||
None,
|
||||
arrow.get(today + timedelta(hours=1)),
|
||||
None,
|
||||
[
|
||||
Publication(
|
||||
id=UUID(int=2),
|
||||
status=PublicationStatus.WAITING,
|
||||
timestamp=today + timedelta(hours=2),
|
||||
event_id=UUID(int=0),
|
||||
publisher_id=UUID(int=1),
|
||||
),
|
||||
Publication(
|
||||
id=UUID(int=3),
|
||||
status=PublicationStatus.WAITING,
|
||||
timestamp=today + timedelta(hours=5),
|
||||
event_id=UUID(int=1),
|
||||
publisher_id=UUID(int=1),
|
||||
),
|
||||
],
|
||||
],
|
||||
[
|
||||
complete_specification,
|
||||
PublicationStatus.WAITING,
|
||||
None,
|
||||
None,
|
||||
arrow.get(today + timedelta(hours=1)),
|
||||
[
|
||||
Publication(
|
||||
id=UUID(int=0),
|
||||
status=PublicationStatus.WAITING,
|
||||
timestamp=today + timedelta(hours=0),
|
||||
event_id=UUID(int=0),
|
||||
publisher_id=UUID(int=0),
|
||||
),
|
||||
],
|
||||
],
|
||||
],
|
||||
)
|
||||
async def test_publications_with_status(
|
||||
specification,
|
||||
status,
|
||||
mobilizon_id,
|
||||
from_date,
|
||||
to_date,
|
||||
expected_result,
|
||||
generate_models,
|
||||
):
|
||||
await generate_models(specification)
|
||||
publications = await publications_with_status(
|
||||
status=status,
|
||||
event_mobilizon_id=mobilizon_id,
|
||||
from_date=from_date,
|
||||
to_date=to_date,
|
||||
)
|
||||
|
||||
assert len(publications) == len(expected_result)
|
||||
assert publications == expected_result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"specification,names,expected_result",
|
||||
[
|
||||
[
|
||||
two_publishers_specification,
|
||||
["publisher_0", "publisher_1"],
|
||||
{
|
||||
Publisher(id=UUID(int=0), name="publisher_0"),
|
||||
Publisher(id=UUID(int=1), name="publisher_1"),
|
||||
},
|
||||
],
|
||||
[
|
||||
{"publisher": 0},
|
||||
["publisher_0", "publisher_1"],
|
||||
{"publisher_0", "publisher_1"},
|
||||
],
|
||||
[
|
||||
two_publishers_specification,
|
||||
["publisher_0", "publisher_2", "publisher_3"],
|
||||
{"publisher_0", "publisher_1", "publisher_2", "publisher_3"},
|
||||
],
|
||||
],
|
||||
)
|
||||
async def test_update_publishers(
|
||||
specification,
|
||||
names,
|
||||
expected_result,
|
||||
generate_models,
|
||||
):
|
||||
await generate_models(specification)
|
||||
await update_publishers(names)
|
||||
if type(list(expected_result)[0]) == Publisher:
|
||||
publishers = set(await get_publishers())
|
||||
else:
|
||||
publishers = set(p.name for p in await get_publishers())
|
||||
|
||||
assert len(publishers) == len(expected_result)
|
||||
assert publishers == expected_result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"specification,report,event,expected_result",
|
||||
[
|
||||
[
|
||||
complete_specification,
|
||||
PublisherCoordinatorReport(
|
||||
reports={
|
||||
UUID(int=2): PublicationReport(
|
||||
status=PublicationStatus.FAILED, reason="Invalid credentials"
|
||||
),
|
||||
UUID(int=3): PublicationReport(
|
||||
status=PublicationStatus.COMPLETED, reason=""
|
||||
),
|
||||
}
|
||||
),
|
||||
MobilizonEvent(
|
||||
name="event_1",
|
||||
description="desc_1",
|
||||
mobilizon_id="mobid_1",
|
||||
mobilizon_link="moblink_1",
|
||||
thumbnail_link="thumblink_1",
|
||||
location="loc_1",
|
||||
status=EventPublicationStatus.WAITING,
|
||||
begin_datetime=arrow.get(today + timedelta(days=1)),
|
||||
end_datetime=arrow.get(today + timedelta(days=1) + timedelta(hours=2)),
|
||||
),
|
||||
{
|
||||
UUID(int=2): Publication(
|
||||
id=UUID(int=2),
|
||||
status=PublicationStatus.FAILED,
|
||||
reason="Invalid credentials",
|
||||
),
|
||||
UUID(int=3): Publication(
|
||||
id=UUID(int=0), status=PublicationStatus.COMPLETED, reason=""
|
||||
),
|
||||
},
|
||||
],
|
||||
],
|
||||
)
|
||||
async def test_save_publication_report(
|
||||
specification,
|
||||
report,
|
||||
event,
|
||||
expected_result,
|
||||
generate_models,
|
||||
):
|
||||
await generate_models(specification)
|
||||
await save_publication_report(report, event)
|
||||
publication_ids = set(report.reports.keys())
|
||||
publications = {
|
||||
p_id: await Publication.filter(id=p_id).first() for p_id in publication_ids
|
||||
}
|
||||
|
||||
assert len(publications) == len(expected_result)
|
||||
for i in publication_ids:
|
||||
assert publications[i].status == expected_result[i].status
|
||||
assert publications[i].reason == expected_result[i].reason
|
||||
assert publications[i].timestamp
|
||||
|
Loading…
x
Reference in New Issue
Block a user