Feature/optimizing download (#118)

* add column last_update_time

* save last_update_time in event db record

* use id + updatedAt for comparison instead of mobilizon_id, this will treat updated events like new ones

* rework event selection/comparison to include unpublished with updates to ones need to be saved

* added update for unpublished events

* tests: test_update: Add create_unpublished_events tests.

* Move `MobilizonEvent.to_model` to `storage.query`

* Move `MobilizonEvent.from_model` to `storage.query`

* Move `MobilizonEvent.compute_status` to `storage.query`

* Move `publishers.exception.EventNotFound` to `storage.query.exceptions`
This commit is contained in:
magowiz 2022-02-09 00:54:56 +01:00 committed by GitHub
parent eeb9b04e3e
commit 45802ecbdd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 392 additions and 351 deletions

View File

@ -1,4 +1,4 @@
[aerich]
tortoise_orm = mobilizon_reshare.storage.db.TORTOISE_ORM
tortoise_orm = storage.db.TORTOISE_ORM
location = ./migrations
src_folder = ./.

View File

@ -3,6 +3,7 @@ import click
from mobilizon_reshare.event.event import MobilizonEvent
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.publishers.platforms.platform_mapping import get_formatter_class
from mobilizon_reshare.storage.query import from_model
async def format_event(event_id, publisher_name: str):
@ -12,6 +13,6 @@ async def format_event(event_id, publisher_name: str):
if not event:
click.echo(f"Event with mobilizon_id {event_id} not found.")
return
event = MobilizonEvent.from_model(event)
event = from_model(event)
message = get_formatter_class(publisher_name)().get_message_from_event(event)
click.echo(message)

View File

@ -1,15 +1,11 @@
from dataclasses import dataclass, asdict
from enum import IntEnum
from typing import Optional, Set
from typing import Optional
from uuid import UUID
import arrow
import tortoise.timezone
from jinja2 import Template
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus, Publication
class EventPublicationStatus(IntEnum):
WAITING = 1
@ -28,6 +24,7 @@ class MobilizonEvent:
end_datetime: arrow.Arrow
mobilizon_link: str
mobilizon_id: UUID
last_update_time: arrow.Arrow
thumbnail_link: Optional[str] = None
location: Optional[str] = None
publication_time: Optional[dict[str, arrow.Arrow]] = None
@ -50,62 +47,3 @@ class MobilizonEvent:
def format(self, pattern: Template) -> str:
return self._fill_template(pattern)
def to_model(self) -> Event:
return Event(
name=self.name,
description=self.description,
mobilizon_id=self.mobilizon_id,
mobilizon_link=self.mobilizon_link,
thumbnail_link=self.thumbnail_link,
location=self.location,
begin_datetime=self.begin_datetime.astimezone(self.begin_datetime.tzinfo),
end_datetime=self.end_datetime.astimezone(self.end_datetime.tzinfo),
)
@staticmethod
def compute_status(publications: list[Publication]) -> EventPublicationStatus:
if not publications:
return EventPublicationStatus.WAITING
unique_statuses: Set[PublicationStatus] = set(
pub.status for pub in publications
)
if unique_statuses == {
PublicationStatus.COMPLETED,
PublicationStatus.FAILED,
}:
return EventPublicationStatus.PARTIAL
elif len(unique_statuses) == 1:
return EventPublicationStatus[unique_statuses.pop().name]
raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}")
@staticmethod
def from_model(event: Event, tz: str = "UTC"):
publication_status = MobilizonEvent.compute_status(list(event.publications))
publication_time = {}
for pub in event.publications:
if publication_status != EventPublicationStatus.WAITING:
assert pub.timestamp is not None
publication_time[pub.publisher.name] = arrow.get(
tortoise.timezone.localtime(value=pub.timestamp, timezone=tz)
).to("local")
return MobilizonEvent(
name=event.name,
description=event.description,
begin_datetime=arrow.get(
tortoise.timezone.localtime(value=event.begin_datetime, timezone=tz)
).to("local"),
end_datetime=arrow.get(
tortoise.timezone.localtime(value=event.end_datetime, timezone=tz)
).to("local"),
mobilizon_link=event.mobilizon_link,
mobilizon_id=event.mobilizon_id,
thumbnail_link=event.thumbnail_link,
location=event.location,
publication_time=publication_time,
status=publication_status,
)

View File

@ -5,7 +5,7 @@ from mobilizon_reshare.publishers.coordinator import (
PublisherCoordinator,
PublicationFailureNotifiersCoordinator,
)
from mobilizon_reshare.publishers.exceptions import EventNotFound
from mobilizon_reshare.storage.query.exceptions import EventNotFound
from mobilizon_reshare.storage.query.read import get_failed_publications_for_event
from mobilizon_reshare.storage.query.write import save_publication_report

View File

@ -1,7 +1,7 @@
import logging.config
from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish
from mobilizon_reshare.mobilizon.events import get_unpublished_events
from mobilizon_reshare.mobilizon.events import get_mobilizon_future_events
from mobilizon_reshare.publishers.coordinator import (
PublicationFailureNotifiersCoordinator,
)
@ -24,22 +24,15 @@ async def start():
:return:
"""
# TODO: the logic to get published and unpublished events is probably redundant.
# We need a simpler way to bring together events from mobilizon, unpublished events from the db
# and published events from the DB
# Load past events
published_events = list(await get_published_events())
# Pull unpublished events from Mobilizon
unpublished_events = get_unpublished_events(published_events)
# Pull future events from Mobilizon
future_events = get_mobilizon_future_events()
# Store in the DB only the ones we didn't know about
db_unpublished_events = await create_unpublished_events(unpublished_events)
events_without_publications = await create_unpublished_events(future_events)
event = select_event_to_publish(
published_events,
list(await get_published_events()),
# We must load unpublished events from DB since it contains
# merged state between Mobilizon and previous WAITING events.
db_unpublished_events,
events_without_publications,
)
if event:
@ -51,6 +44,8 @@ async def start():
await save_publication_report(reports)
for report in reports.reports:
if not report.succesful:
PublicationFailureNotifiersCoordinator(report,).notify_failure()
PublicationFailureNotifiersCoordinator(
report,
).notify_failure()
else:
logger.info("No event to publish found")

View File

@ -0,0 +1,4 @@
-- upgrade --
ALTER TABLE "event" ADD "last_update_time" TIMESTAMP NOT NULL;
-- downgrade --
ALTER TABLE "event" DROP COLUMN "last_update_time";

View File

@ -44,6 +44,7 @@ def parse_event(data):
location=parse_location(data),
publication_time=None,
status=EventPublicationStatus.WAITING,
last_update_time=arrow.get(data["updatedAt"]) if "updatedAt" in data else None,
)
@ -53,6 +54,7 @@ query_future_events = """{{
elements {{
title,
url,
updatedAt,
beginsOn,
endsOn,
options {{
@ -76,20 +78,6 @@ query_future_events = """{{
}}"""
def get_unpublished_events(published_events: List[MobilizonEvent]):
# I take all the future events
future_events = get_mobilizon_future_events()
# I get the ids of all the published events coming from the DB
published_events_id = set(map(lambda x: x.mobilizon_id, published_events))
# I keep the future events only the ones that haven't been published
# Note: some events might exist in the DB and be unpublished. Here they should be ignored because the information
# in the DB might be old and the event might have been updated.
# We assume the published_events list doesn't contain such events.
return list(
filter(lambda x: x.mobilizon_id not in published_events_id, future_events)
)
def get_mobilizon_future_events(
page: int = 1, from_date: Optional[arrow.Arrow] = None
) -> List[MobilizonEvent]:

View File

@ -18,6 +18,7 @@ class Event(Model):
begin_datetime = fields.DatetimeField()
end_datetime = fields.DatetimeField()
last_update_time = fields.DatetimeField()
publications: fields.ReverseRelation["Publication"]

View File

@ -1,44 +1,40 @@
class PublisherError(Exception):
""" Generic publisher error """
"""Generic publisher error"""
pass
class InvalidAttribute(PublisherError):
""" Publisher defined with invalid or missing attribute """
"""Publisher defined with invalid or missing attribute"""
class InvalidBot(PublisherError):
""" Publisher refers to the wrong service bot """
"""Publisher refers to the wrong service bot"""
class InvalidCredentials(PublisherError):
""" Publisher cannot validate credentials """
"""Publisher cannot validate credentials"""
class InvalidEvent(PublisherError):
""" Publisher cannot validate events """
"""Publisher cannot validate events"""
class InvalidMessage(PublisherError):
""" Publisher cannot validate message """
"""Publisher cannot validate message"""
class InvalidResponse(PublisherError):
""" Publisher receives an invalid response from its service """
"""Publisher receives an invalid response from its service"""
class InvalidSettings(PublisherError):
""" Publisher settings are either missing or badly configured """
"""Publisher settings are either missing or badly configured"""
class ZulipError(PublisherError):
""" Publisher receives an error response from Zulip"""
"""Publisher receives an error response from Zulip"""
class HTTPResponseError(PublisherError):
""" Publisher receives a HTTP error"""
class EventNotFound(Exception):
""" Event is not present in the database """
"""Publisher receives a HTTP error"""

View File

@ -56,7 +56,6 @@ class MoReDB:
self.path.parent.mkdir(parents=True, exist_ok=True)
async def _implement_db_changes(self):
logging.info("Updating database to latest version")
migration_queries_location = pkg_resources.resource_filename(
"mobilizon_reshare", "migrations"
)
@ -66,7 +65,9 @@ class MoReDB:
location=migration_queries_location,
)
await command.init()
await command.upgrade()
migrations = await command.upgrade()
if migrations:
logging.warning("Updated database to latest version")
async def setup(self):
await self._implement_db_changes()

View File

@ -1,3 +1,79 @@
import sys
from typing import Optional
from uuid import UUID
import arrow
import tortoise.timezone
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus, Publication
CONNECTION_NAME = "models" if "pytest" in sys.modules else None
def to_model(event: MobilizonEvent, db_id: Optional[UUID] = None) -> Event:
kwargs = {
"name": event.name,
"description": event.description,
"mobilizon_id": event.mobilizon_id,
"mobilizon_link": event.mobilizon_link,
"thumbnail_link": event.thumbnail_link,
"location": event.location,
"begin_datetime": event.begin_datetime.astimezone(event.begin_datetime.tzinfo),
"end_datetime": event.end_datetime.astimezone(event.end_datetime.tzinfo),
"last_update_time": event.last_update_time.astimezone(
event.last_update_time.tzinfo
),
}
if db_id is not None:
kwargs.update({"id": db_id})
return Event(**kwargs)
def from_model(event: Event, tz: str = "UTC"):
publication_status = compute_status(list(event.publications))
publication_time = {}
for pub in event.publications:
if publication_status != EventPublicationStatus.WAITING:
assert pub.timestamp is not None
publication_time[pub.publisher.name] = arrow.get(
tortoise.timezone.localtime(value=pub.timestamp, timezone=tz)
).to("local")
return MobilizonEvent(
name=event.name,
description=event.description,
begin_datetime=arrow.get(
tortoise.timezone.localtime(value=event.begin_datetime, timezone=tz)
).to("local"),
end_datetime=arrow.get(
tortoise.timezone.localtime(value=event.end_datetime, timezone=tz)
).to("local"),
mobilizon_link=event.mobilizon_link,
mobilizon_id=event.mobilizon_id,
thumbnail_link=event.thumbnail_link,
location=event.location,
publication_time=publication_time,
status=publication_status,
last_update_time=arrow.get(
tortoise.timezone.localtime(value=event.last_update_time, timezone=tz)
).to("local"),
)
def compute_status(publications: list[Publication]) -> EventPublicationStatus:
if not publications:
return EventPublicationStatus.WAITING
unique_statuses: set[PublicationStatus] = set(pub.status for pub in publications)
if unique_statuses == {
PublicationStatus.COMPLETED,
PublicationStatus.FAILED,
}:
return EventPublicationStatus.PARTIAL
elif len(unique_statuses) == 1:
return EventPublicationStatus[unique_statuses.pop().name]
raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}")

View File

@ -0,0 +1,6 @@
class EventNotFound(Exception):
"""Event is not present in the database"""
class DuplicateEvent(ValueError):
"""A duplicate mobilizon_id has been found in the database"""

View File

@ -10,10 +10,11 @@ from tortoise.transactions import atomic
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import Publication, PublicationStatus
from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers import get_active_publishers
from mobilizon_reshare.publishers.abstract import EventPublication
from mobilizon_reshare.publishers.exceptions import EventNotFound
from mobilizon_reshare.storage.query import CONNECTION_NAME
from mobilizon_reshare.storage.query.exceptions import EventNotFound, DuplicateEvent
from mobilizon_reshare.storage.query import CONNECTION_NAME, from_model, compute_status
async def get_published_events(
@ -42,13 +43,13 @@ async def events_with_status(
def _filter_event_with_status(event: Event) -> bool:
# This computes the status client-side instead of running in the DB. It shouldn't pose a performance problem
# in the short term, but should be moved to the query if possible.
event_status = MobilizonEvent.compute_status(list(event.publications))
event_status = compute_status(list(event.publications))
return event_status in status
query = Event.all()
return map(
MobilizonEvent.from_model,
from_model,
filter(
_filter_event_with_status,
await prefetch_event_relations(
@ -70,7 +71,7 @@ async def get_all_events(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> Iterable[MobilizonEvent]:
return map(
MobilizonEvent.from_model,
from_model,
await prefetch_event_relations(
_add_date_window(Event.all(), "begin_datetime", from_date, to_date)
),
@ -134,36 +135,34 @@ async def events_without_publications(
events = await prefetch_event_relations(
_add_date_window(query, "begin_datetime", from_date, to_date)
)
return list(map(MobilizonEvent.from_model, events))
return list(map(from_model, events))
def _remove_duplicated_events(events: list[MobilizonEvent]) -> list[MobilizonEvent]:
"""Remove duplicates based on mobilizon_id"""
result = []
seen_ids = set()
for event in events:
if event.mobilizon_id not in seen_ids:
result.append(event)
seen_ids.add(event.mobilizon_id)
return result
async def get_unpublished_events(
unpublished_mobilizon_events: Iterable[MobilizonEvent],
) -> list[MobilizonEvent]:
"""
Returns all the unpublished events, removing duplicates that are present both in the DB and in the mobilizon query
"""
db_unpublished_events = await events_without_publications()
all_unpublished_events = list(unpublished_mobilizon_events) + list(
db_unpublished_events
async def get_event(event_mobilizon_id: UUID) -> Event:
events = await prefetch_event_relations(
Event.filter(mobilizon_id=event_mobilizon_id)
)
return _remove_duplicated_events(all_unpublished_events)
if not events:
raise EventNotFound(f"No event with mobilizon_id {event_mobilizon_id} found.")
return events[0]
async def get_publisher_by_name(name) -> Publisher:
return await Publisher.filter(name=name).first()
async def is_known(event: MobilizonEvent) -> bool:
try:
await get_event(event.mobilizon_id)
return True
except EventNotFound:
return False
@atomic(CONNECTION_NAME)
async def build_publications(event: MobilizonEvent) -> list[EventPublication]:
event_model = await Event.filter(mobilizon_id=event.mobilizon_id).first()
event_model = await get_event(event.mobilizon_id)
models = [
await event_model.build_publication_by_publisher_name(name)
for name in get_active_publishers()
@ -174,16 +173,9 @@ async def build_publications(event: MobilizonEvent) -> list[EventPublication]:
@atomic(CONNECTION_NAME)
async def get_event(event_mobilizon_id) -> None:
event = await Event.filter(mobilizon_id=event_mobilizon_id).first()
if not event:
raise EventNotFound(f"No event with mobilizon_id {event_mobilizon_id} found.")
await event.fetch_related("publications")
return event
@atomic(CONNECTION_NAME)
async def get_failed_publications_for_event(event_mobilizon_id):
async def get_failed_publications_for_event(
event_mobilizon_id: UUID,
) -> list[MobilizonEvent]:
event = await get_event(event_mobilizon_id)
failed_publications = list(
filter(

View File

@ -9,10 +9,20 @@ from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import Publication
from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers.coordinator import PublisherCoordinatorReport
from mobilizon_reshare.storage.query import CONNECTION_NAME
from mobilizon_reshare.storage.query.read import events_without_publications
from mobilizon_reshare.storage.query import CONNECTION_NAME, to_model
from mobilizon_reshare.storage.query.read import (
events_without_publications,
is_known,
get_publisher_by_name,
get_event,
)
async def create_publisher(name: str, account_ref: Optional[str] = None) -> None:
await Publisher.create(name=name, account_ref=account_ref)
@atomic(CONNECTION_NAME)
async def upsert_publication(publication_report, event):
publisher = await get_publisher_by_name(
@ -59,40 +69,33 @@ async def create_unpublished_events(
events_from_mobilizon: Iterable[MobilizonEvent],
) -> list[MobilizonEvent]:
"""
Compute the difference between remote and local events and store it.
Computes the difference between remote and local events and store it.
Returns the unpublished events merged state.
"""
# We store only new events, i.e. events whose mobilizon_id wasn't found in the DB.
unpublished_events = await events_without_publications()
known_event_mobilizon_ids = set(
map(lambda event: event.mobilizon_id, unpublished_events)
)
new_unpublished_events = list(
filter(
lambda event: event.mobilizon_id not in known_event_mobilizon_ids,
events_from_mobilizon,
)
)
for event in new_unpublished_events:
await event.to_model().save()
# There are three cases:
for event in events_from_mobilizon:
if not await is_known(event):
# Either an event is unknown
await to_model(event).save()
else:
# Or it's known and changed
event_model = await get_event(event.mobilizon_id)
if event.last_update_time > event_model.last_update_time:
await to_model(event=event, db_id=event_model.id).save(
force_update=True
)
# Or it's known and unchanged, in which case we do nothing.
return await events_without_publications()
async def create_publisher(name: str, account_ref: Optional[str] = None) -> None:
await Publisher.create(name=name, account_ref=account_ref)
@atomic(CONNECTION_NAME)
async def update_publishers(names: Iterable[str],) -> None:
async def update_publishers(
names: Iterable[str],
) -> None:
names = set(names)
known_publisher_names = set(p.name for p in await Publisher.all())
for name in names.difference(known_publisher_names):
logging.info(f"Creating {name} publisher")
await create_publisher(name)
async def get_publisher_by_name(name):
return await Publisher.filter(name=name).first()

View File

@ -24,6 +24,7 @@ def simple_event_element():
"title": "test event",
"url": "https://some_mobilizon/events/1e2e5943-4a5c-497a-b65d-90457b715d7b",
"uuid": str(uuid.uuid4()),
"updatedAt": "2021-05-23T12:15:00Z",
}

View File

@ -7,13 +7,14 @@ from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_formatter_class,
name_to_formatter_class,
)
from mobilizon_reshare.storage.query import to_model
@pytest.mark.timezone_sensitive
@pytest.mark.parametrize("publisher_name", name_to_formatter_class.keys())
@pytest.mark.asyncio
async def test_format_event(runner, event, capsys, publisher_name):
event_model = event.to_model()
event_model = to_model(event)
await event_model.save()
await format_event(
event_id=str(event_model.mobilizon_id), publisher_name=publisher_name

View File

@ -4,9 +4,10 @@ from logging import DEBUG, INFO
import arrow
import pytest
from mobilizon_reshare.storage.query import to_model, from_model
from mobilizon_reshare.storage.query.read import get_all_events
from tests.commands.conftest import simple_event_element
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
from mobilizon_reshare.event.event import EventPublicationStatus
from mobilizon_reshare.main.start import start
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus
@ -75,7 +76,7 @@ async def test_start_new_event(
# the derived status for the event should be COMPLETED
assert (
MobilizonEvent.from_model(all_events[0]).status
from_model(all_events[0]).status
== EventPublicationStatus.COMPLETED
)
@ -97,7 +98,7 @@ async def test_start_event_from_db(
event_generator,
):
event = event_generator()
event_model = event.to_model()
event_model = to_model(event)
await event_model.save()
with caplog.at_level(DEBUG):
@ -120,10 +121,7 @@ async def test_start_event_from_db(
assert p.status == PublicationStatus.COMPLETED
# the derived status for the event should be COMPLETED
assert (
MobilizonEvent.from_model(event_model).status
== EventPublicationStatus.COMPLETED
)
assert from_model(event_model).status == EventPublicationStatus.COMPLETED
@pytest.mark.asyncio
@ -144,7 +142,7 @@ async def test_start_publisher_failure(
mock_notifier_config,
):
event = event_generator()
event_model = event.to_model()
event_model = to_model(event)
await event_model.save()
with caplog.at_level(DEBUG):
@ -171,17 +169,14 @@ async def test_start_publisher_failure(
for _ in range(2)
] # 2 publications failed * 2 notifiers
# the derived status for the event should be FAILED
assert (
MobilizonEvent.from_model(event_model).status
== EventPublicationStatus.FAILED
)
assert from_model(event_model).status == EventPublicationStatus.FAILED
@pytest.fixture
async def published_event(event_generator):
event = event_generator()
event_model = event.to_model()
event_model = to_model(event)
await event_model.save()
assert await start() is None
await event_model.refresh_from_db()
@ -204,6 +199,7 @@ def second_event_element():
"title": "test event",
"url": "https://some_mobilizon/events/1e2e5943-4a5c-497a-b65d-90457b715d7b",
"uuid": str(uuid.uuid4()),
"updatedAt": "2021-05-23T12:15:00Z",
}

View File

@ -22,6 +22,7 @@ from mobilizon_reshare.publishers.abstract import (
AbstractEventFormatter,
)
from mobilizon_reshare.publishers.exceptions import PublisherError, InvalidResponse
from mobilizon_reshare.storage.query import to_model
from mobilizon_reshare.storage.query.write import get_publisher_by_name
from tests import today
@ -65,6 +66,7 @@ def event_generator():
published=False,
publication_time=None,
mobilizon_id=UUID(int=12345),
last_update_time=arrow.Arrow(year=2021, month=1, day=1, hour=11, minute=30),
):
return MobilizonEvent(
@ -79,6 +81,7 @@ def event_generator():
status=generate_event_status(published),
publication_time=publication_time
or (begin_date.shift(days=-1) if published else None),
last_update_time=last_update_time,
)
return _event_generator
@ -105,12 +108,13 @@ def event() -> MobilizonEvent:
mobilizon_id=UUID(int=12345),
thumbnail_link="http://some_link.com/123.jpg",
location="location",
last_update_time=begin_date,
)
@pytest.fixture
async def stored_event(event):
model = event.to_model()
model = to_model(event)
await model.save()
await model.fetch_related("publications")
return model
@ -155,6 +159,7 @@ def event_model_generator():
location=f"loc_{idx}",
begin_datetime=begin_date,
end_datetime=begin_date + timedelta(hours=2),
last_update_time=begin_date,
)
return _event_model_generator
@ -231,6 +236,7 @@ async def _generate_events(specification):
location=f"loc_{i}",
begin_datetime=begin_date,
end_datetime=begin_date + timedelta(hours=2),
last_update_time=begin_date,
)
events.append(event)
await event.save()

View File

@ -5,19 +5,22 @@ from jinja2 import Template
@pytest.fixture()
def simple_template():
return Template(
"{{name}}|{{description}}|{{location}}|{{begin_datetime.strftime('%Y-%m-%d, %H:%M')}}"
(
"{{name}}|{{description}}|{{location}}|{{begin_datetime.strftime('%Y-%m-%d, %H:%M')}}"
"|{{last_update_time.strftime('%Y-%m-%d, %H:%M')}}"
)
)
def test_fill_template(event, simple_template):
assert (
event._fill_template(simple_template)
== "test event|description of the event|location|2021-01-01, 11:30"
== "test event|description of the event|location|2021-01-01, 11:30|2021-01-01, 11:30"
)
def test_format(event, simple_template):
assert (
event.format(simple_template)
== "test event|description of the event|location|2021-01-01, 11:30"
== "test event|description of the event|location|2021-01-01, 11:30|2021-01-01, 11:30"
)

View File

@ -7,7 +7,6 @@ from mobilizon_reshare.event.event import MobilizonEvent
from mobilizon_reshare.mobilizon.events import (
get_mobilizon_future_events,
MobilizonRequestFailed,
get_unpublished_events,
)
simple_event_element = {
@ -21,6 +20,7 @@ simple_event_element = {
"title": "test event",
"url": "https://some_mobilizon/events/1e2e5943-4a5c-497a-b65d-90457b715d7b",
"uuid": "1e2e5943-4a5c-497a-b65d-90457b715d7b",
"updatedAt": "2021-05-23T12:15:00Z",
}
simple_event_response = {
"data": {"group": {"organizedEvents": {"elements": [simple_event_element]}}}
@ -37,6 +37,7 @@ full_event_element = {
"title": "full event",
"url": "https://some_mobilizon/events/56e7ca43-1b6b-4c50-8362-0439393197e6",
"uuid": "56e7ca43-1b6b-4c50-8362-0439393197e6",
"updatedAt": "2021-05-25T15:15:00Z",
}
full_event_response = {
"data": {"group": {"organizedEvents": {"elements": [full_event_element]}}}
@ -59,6 +60,7 @@ simple_event = MobilizonEvent(
mobilizon_id=UUID("1e2e5943-4a5c-497a-b65d-90457b715d7b"),
thumbnail_link=None,
location=None,
last_update_time=arrow.get("2021-05-23T12:15:00Z"),
)
full_event = MobilizonEvent(
@ -70,6 +72,7 @@ full_event = MobilizonEvent(
mobilizon_id=UUID("56e7ca43-1b6b-4c50-8362-0439393197e6"),
thumbnail_link=None,
location="http://some_location",
last_update_time=arrow.get("2021-05-25T15:15:00+00:00"),
)
@ -115,18 +118,3 @@ def test_failure_404(mock_mobilizon_failure_answer):
def test_failure_wrong_group(mock_mobilizon_success_answer):
with pytest.raises(MobilizonRequestFailed):
get_mobilizon_future_events()
@pytest.mark.parametrize(
"mobilizon_answer, published_events,expected_result",
[
[{"data": {"group": {"organizedEvents": {"elements": []}}}}, [], []],
[simple_event_response, [], [simple_event]],
[two_events_response, [], [simple_event, full_event]],
[two_events_response, [simple_event], [full_event]],
],
)
def test_get_unpublished_events(
mock_mobilizon_success_answer, published_events, expected_result
):
assert get_unpublished_events(published_events) == expected_result

View File

@ -9,6 +9,7 @@ from mobilizon_reshare.event.event import EventPublicationStatus
from mobilizon_reshare.event.event import MobilizonEvent
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.storage.query import to_model, from_model, compute_status
@pytest.mark.asyncio
@ -89,7 +90,7 @@ async def test_event_sort_by_date(event_model_generator):
@pytest.mark.asyncio
async def test_mobilizon_event_to_model(event):
event_model = event.to_model()
event_model = to_model(event)
await event_model.save()
event_db = await Event.all().first()
@ -137,7 +138,7 @@ async def test_mobilizon_event_from_model(
.prefetch_related("publications__publisher")
.first()
)
event = MobilizonEvent.from_model(event=event_db, tz="CET")
event = from_model(event=event_db, tz="CET")
begin_date_utc = arrow.Arrow(year=2021, month=1, day=1, hour=11, minute=30)
@ -192,4 +193,4 @@ async def test_mobilizon_event_compute_status_partial(
)
await publication.save()
publications.append(publication)
assert MobilizonEvent.compute_status(publications) == expected_result
assert compute_status(publications) == expected_result

View File

@ -26,6 +26,7 @@ def test_event():
"end_datetime": now + timedelta(hours=1),
"mobilizon_link": "",
"mobilizon_id": UUID(int=0),
"last_update_time": now,
}
)

View File

@ -19,6 +19,7 @@ from mobilizon_reshare.publishers.coordinator import (
PublicationFailureNotifiersCoordinator,
RecapCoordinator,
)
from mobilizon_reshare.storage.query import to_model
from tests import today
@ -88,7 +89,7 @@ async def mock_publications(
):
result = []
for i in range(num_publications):
event = test_event.to_model()
event = to_model(test_event)
await event.save()
publisher = Publisher(name="telegram")
await publisher.save()

View File

@ -13,6 +13,7 @@ from mobilizon_reshare.publishers.exceptions import (
HTTPResponseError,
)
from mobilizon_reshare.publishers.platforms.zulip import ZulipFormatter, ZulipPublisher
from mobilizon_reshare.storage.query import to_model
from mobilizon_reshare.storage.query.read import build_publications
api_uri = "https://zulip.twc-italia.org/api/v1/"
@ -41,7 +42,10 @@ users_me = {
def mocked_valid_response():
with responses.RequestsMock() as rsps:
rsps.add(
responses.GET, api_uri + "users/me", json=users_me, status=200,
responses.GET,
api_uri + "users/me",
json=users_me,
status=200,
)
rsps.add(
responses.POST,
@ -68,7 +72,10 @@ def mocked_credential_error_response():
def mocked_client_error_response():
with responses.RequestsMock() as rsps:
rsps.add(
responses.GET, api_uri + "users/me", json=users_me, status=200,
responses.GET,
api_uri + "users/me",
json=users_me,
status=200,
)
rsps.add(
responses.POST,
@ -102,7 +109,7 @@ async def setup_db(
@pytest.fixture
@pytest.mark.asyncio
async def unsaved_publications(event):
await event.to_model().save()
await to_model(event).save()
return await build_publications(event)
@ -115,7 +122,7 @@ async def test_zulip_publisher(mocked_valid_response, setup_db, unsaved_publicat
@pytest.mark.asyncio
async def test_zulip_publishr_failure_invalid_credentials(
async def test_zulip_publisher_failure_invalid_credentials(
mocked_credential_error_response, setup_db, unsaved_publications
):
report = PublisherCoordinator(unsaved_publications).run()

View File

@ -1,10 +1,91 @@
from datetime import timedelta
from uuid import UUID
import arrow
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
from mobilizon_reshare.models.publication import Publication
from mobilizon_reshare.models.publication import PublicationStatus
from tests import today
event_0 = MobilizonEvent(
name="event_0",
description="desc_0",
mobilizon_id=UUID(int=0),
mobilizon_link="moblink_0",
thumbnail_link="thumblink_0",
location="loc_0",
status=EventPublicationStatus.WAITING,
begin_datetime=arrow.get(today),
end_datetime=arrow.get(today + timedelta(hours=2)),
last_update_time=arrow.get(today),
)
event_1 = MobilizonEvent(
name="event_1",
description="desc_1",
mobilizon_id=UUID(int=1),
mobilizon_link="moblink_1",
thumbnail_link="thumblink_1",
location="loc_1",
status=EventPublicationStatus.WAITING,
begin_datetime=arrow.get(today + timedelta(days=1)),
end_datetime=arrow.get(today + timedelta(days=1) + timedelta(hours=2)),
last_update_time=arrow.get(today + timedelta(days=1)),
)
event_2 = MobilizonEvent(
name="event_2",
description="desc_2",
mobilizon_id=UUID(int=2),
mobilizon_link="moblink_2",
thumbnail_link="thumblink_2",
location="loc_2",
status=EventPublicationStatus.WAITING,
begin_datetime=arrow.get(today + timedelta(days=2)),
end_datetime=arrow.get(today + timedelta(days=2) + timedelta(hours=2)),
last_update_time=arrow.get(today + timedelta(days=2)),
)
event_3 = MobilizonEvent(
name="event_3",
description="desc_3",
mobilizon_id=UUID(int=3),
mobilizon_link="moblink_3",
thumbnail_link="thumblink_3",
location="loc_3",
status=EventPublicationStatus.WAITING,
begin_datetime=arrow.get(today + timedelta(days=3)),
end_datetime=arrow.get(today + timedelta(days=3) + timedelta(hours=2)),
last_update_time=arrow.get(today + timedelta(days=3)),
)
event_3_updated = MobilizonEvent(
name="event_3",
description="desc_3",
mobilizon_id=UUID(int=3),
mobilizon_link="moblink_3",
thumbnail_link="thumblink_3",
location="loc_6",
status=EventPublicationStatus.WAITING,
begin_datetime=arrow.get(today + timedelta(days=3)),
end_datetime=arrow.get(today + timedelta(days=3) + timedelta(hours=2)),
last_update_time=arrow.get(today + timedelta(days=4)),
)
event_6 = MobilizonEvent(
name="event_6",
description="desc_6",
mobilizon_id=UUID(int=6),
mobilizon_link="moblink_6",
thumbnail_link="thumblink_6",
location="loc_6",
status=EventPublicationStatus.WAITING,
begin_datetime=arrow.get(today + timedelta(days=6)),
end_datetime=arrow.get(today + timedelta(days=6) + timedelta(hours=2)),
last_update_time=arrow.get(today + timedelta(days=6)),
)
complete_specification = {
"event": 4,
"publications": [

View File

@ -13,23 +13,10 @@ from mobilizon_reshare.storage.query.read import (
events_without_publications,
build_publications,
)
from tests.storage import complete_specification
from tests.storage import complete_specification, event_0, event_1, event_3
from tests.storage import result_publication
from tests import today
event_0 = MobilizonEvent(
name="event_0",
description="desc_0",
mobilizon_id=UUID(int=0),
mobilizon_link="moblink_0",
thumbnail_link="thumblink_0",
location="loc_0",
publication_time={},
status=EventPublicationStatus.WAITING,
begin_datetime=arrow.get(today + timedelta(days=0)),
end_datetime=arrow.get(today + timedelta(days=0) + timedelta(hours=2)),
)
@pytest.mark.asyncio
async def test_get_published_events(generate_models):
@ -146,20 +133,7 @@ async def test_event_with_status_window(
{"event": 2, "publications": [], "publisher": ["zulip"]},
[
event_0,
MobilizonEvent(
name="event_1",
description="desc_1",
mobilizon_id=UUID(int=1),
mobilizon_link="moblink_1",
thumbnail_link="thumblink_1",
location="loc_1",
status=EventPublicationStatus.WAITING,
publication_time={},
begin_datetime=arrow.get(today + timedelta(days=1)),
end_datetime=arrow.get(
today + timedelta(days=1) + timedelta(hours=2)
),
),
event_1,
],
),
(
@ -184,20 +158,7 @@ async def test_event_with_status_window(
(
complete_specification,
[
MobilizonEvent(
name="event_3",
description="desc_3",
mobilizon_id=UUID(int=3),
mobilizon_link="moblink_3",
thumbnail_link="thumblink_3",
location="loc_3",
status=EventPublicationStatus.WAITING,
publication_time={},
begin_datetime=arrow.get(today + timedelta(days=3)),
end_datetime=arrow.get(
today + timedelta(days=3) + timedelta(hours=2)
),
),
event_3,
],
),
],

View File

@ -2,91 +2,18 @@ from uuid import UUID
import pytest
from mobilizon_reshare.storage.query import to_model
from mobilizon_reshare.storage.query.read import (
get_unpublished_events,
get_all_events,
)
@pytest.mark.parametrize(
"spec, expected_output_len",
[
[{"event": 2, "publisher": [], "publications": []}, 2],
[{"event": 0, "publisher": [], "publications": []}, 0],
[
{
"event": 2,
"publisher": ["zulip"],
"publications": [{"event_idx": 0, "publisher_idx": 0}],
},
1,
],
],
)
@pytest.mark.asyncio
async def test_get_unpublished_events_db_only(
spec, generate_models, expected_output_len, event_generator
):
"""Testing that with no events on Mobilizon, I retrieve all the DB unpublished events """
await generate_models(spec)
unpublished_events = await get_unpublished_events([])
assert len(unpublished_events) == expected_output_len
@pytest.mark.parametrize("num_mobilizon_events", [0, 2])
@pytest.mark.asyncio
async def test_get_unpublished_events_mobilizon_only_no_publications(
event_generator, num_mobilizon_events
):
"""Testing that when there are no events present in the DB, all the mobilizon events are returned"""
mobilizon_events = [
event_generator(mobilizon_id=UUID(int=i), published=False)
for i in range(num_mobilizon_events)
]
unpublished_events = await get_unpublished_events(mobilizon_events)
assert unpublished_events == mobilizon_events
@pytest.mark.asyncio
async def test_get_unpublished_events_no_overlap(event_generator):
"Testing that all the events are returned when there's no overlap"
all_events = [
event_generator(mobilizon_id=UUID(int=i), published=False) for i in range(4)
]
db_events = all_events[:1]
mobilizon_events = all_events[1:]
for e in db_events:
await e.to_model().save()
unpublished_events = await get_unpublished_events(mobilizon_events)
assert sorted(all_events, key=lambda x: x.mobilizon_id) == sorted(
unpublished_events, key=lambda x: x.mobilizon_id
)
@pytest.mark.asyncio
async def test_get_unpublished_events_overlap(event_generator):
"""Testing that there are no duplicates when an event from mobilizon is already present in the db
and that no event is lost"""
all_events = [
event_generator(mobilizon_id=UUID(int=i), published=False) for i in range(4)
]
db_events = all_events[:2]
mobilizon_events = all_events[1:]
for e in db_events:
await e.to_model().save()
unpublished_events = await get_unpublished_events(mobilizon_events)
assert len(unpublished_events) == 4
@pytest.mark.asyncio
async def test_get_all_events(event_generator):
all_events = [
event_generator(mobilizon_id=UUID(int=i), published=False) for i in range(4)
]
for e in all_events:
await e.to_model().save()
await to_model(e).save()
assert list(await get_all_events()) == all_events

View File

@ -1,10 +1,7 @@
from datetime import timedelta
from uuid import UUID
import arrow
import pytest
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
from mobilizon_reshare.models.publication import PublicationStatus, Publication
from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers.abstract import EventPublication
@ -16,27 +13,40 @@ from mobilizon_reshare.publishers.platforms.telegram import (
TelegramFormatter,
TelegramPublisher,
)
from mobilizon_reshare.storage.query.read import publications_with_status
from mobilizon_reshare.storage.query.write import (
save_publication_report,
update_publishers,
create_unpublished_events,
)
from tests.storage import complete_specification
from tests.storage import today
from tests.storage import (
complete_specification,
event_0,
event_1,
event_2,
event_3,
event_3_updated,
event_6,
)
two_publishers_specification = {"publisher": ["telegram", "twitter"]}
event_1 = MobilizonEvent(
name="event_1",
description="desc_1",
mobilizon_id=UUID(int=1),
mobilizon_link="moblink_1",
thumbnail_link="thumblink_1",
location="loc_1",
status=EventPublicationStatus.WAITING,
begin_datetime=arrow.get(today + timedelta(days=1)),
end_datetime=arrow.get(today + timedelta(days=1) + timedelta(hours=2)),
)
all_published_specification = {
"event": 2,
"publications": [
{"event_idx": 0, "publisher_idx": 1, "status": PublicationStatus.FAILED},
{"event_idx": 1, "publisher_idx": 0, "status": PublicationStatus.COMPLETED},
],
"publisher": ["telegram", "twitter"],
}
two_events_specification = {
"event": 2,
"publications": [
{"event_idx": 0, "publisher_idx": 1, "status": PublicationStatus.FAILED},
],
"publisher": ["telegram", "twitter"],
}
@pytest.mark.asyncio
@ -80,6 +90,62 @@ async def test_update_publishers(
assert publishers == expected_result
@pytest.mark.asyncio
@pytest.mark.parametrize(
"specification,events_from_mobilizon,expected_result",
[
[
# Empty DB
{"event": 0, "publications": [], "publisher": []},
[event_1],
[event_1],
],
[
# Test whether the query actually does nothing when all events are published
all_published_specification,
[event_1],
[],
],
[
# Test whether the query actually returns only unknown unpublished events
all_published_specification,
[event_2],
[event_2],
],
[
# Test whether the query actually merges remote and local state
{"event": 2, "publisher": ["telegram", "mastodon", "facebook"]},
[event_2],
[event_0, event_1, event_2],
],
[
# Test whether the query actually merges remote and local state
complete_specification,
[event_0, event_1, event_2, event_6],
[event_3, event_6],
],
[
# Test update
complete_specification,
[event_0, event_3_updated, event_6],
[event_3_updated, event_6],
],
],
)
async def test_create_unpublished_events(
specification,
events_from_mobilizon,
expected_result,
generate_models,
):
await generate_models(specification)
unpublished_events = await create_unpublished_events(events_from_mobilizon)
assert len(unpublished_events) == len(expected_result)
assert unpublished_events == expected_result
@pytest.mark.asyncio
@pytest.mark.parametrize(
"specification,report,event,expected_result",