diff --git a/manifest.scm b/manifest.scm index 5b88753..d6fb027 100644 --- a/manifest.scm +++ b/manifest.scm @@ -9,6 +9,6 @@ (map cadr (package-direct-inputs mobilizon-reshare.git)) (map specification->package+output '("git-cal" "man-db" "texinfo" - "python-pre-commit" + "python-pre-commit" "cloc" "ripgrep" "python-semver" "fd" "docker-compose")))) diff --git a/mobilizon_reshare/cli/cli.py b/mobilizon_reshare/cli/cli.py index f659a97..d49c078 100644 --- a/mobilizon_reshare/cli/cli.py +++ b/mobilizon_reshare/cli/cli.py @@ -9,6 +9,8 @@ from mobilizon_reshare.cli.commands.list.list_event import list_events from mobilizon_reshare.cli.commands.list.list_publication import list_publications from mobilizon_reshare.cli.commands.recap.main import recap_command as recap_main from mobilizon_reshare.cli.commands.start.main import start_command as start_main +from mobilizon_reshare.cli.commands.pull.main import pull_command as pull_main +from mobilizon_reshare.cli.commands.publish.main import publish_command as publish_main from mobilizon_reshare.config.config import current_version from mobilizon_reshare.config.publishers import publisher_names from mobilizon_reshare.event.event import EventPublicationStatus @@ -76,16 +78,40 @@ def mobilizon_reshare(obj): pass -@mobilizon_reshare.command(help="Synchronize and publish events.") +@mobilizon_reshare.command( + help="Synchronize and publish events. It is equivalent to running consecutively pull and then publish." +) @pass_context -def start(ctx,): +def start( + ctx, +): ctx.ensure_object(dict) - safe_execution(start_main,) + safe_execution( + start_main, + ) @mobilizon_reshare.command(help="Publish a recap of already published events.") def recap(): - safe_execution(recap_main,) + safe_execution( + recap_main, + ) + + +@mobilizon_reshare.command( + help="Fetch the latest events from Mobilizon and store them." +) +def pull(): + safe_execution( + pull_main, + ) + + +@mobilizon_reshare.command(help="Select an event and publish it.") +def publish(): + safe_execution( + publish_main, + ) @mobilizon_reshare.group(help="Operations that pertain to events") @@ -106,7 +132,10 @@ def event_list(status, begin, end): safe_execution( functools.partial( - list_events, status_name_to_enum["event"][status], frm=begin, to=end, + list_events, + status_name_to_enum["event"][status], + frm=begin, + to=end, ), ) @@ -133,21 +162,28 @@ def publication_list(status, begin, end): @click.argument("event-id", type=click.UUID) @click.argument("publisher", type=click.Choice(publisher_names)) def format( - event_id, publisher, + event_id, + publisher, ): - safe_execution(functools.partial(format_event, event_id, publisher),) + safe_execution( + functools.partial(format_event, event_id, publisher), + ) @event.command(name="retry", help="Retries all the failed publications") @click.argument("event-id", type=click.UUID) def event_retry(event_id): - safe_execution(functools.partial(retry_event_command, event_id),) + safe_execution( + functools.partial(retry_event_command, event_id), + ) @publication.command(name="retry", help="Retries a specific publication") @click.argument("publication-id", type=click.UUID) def publication_retry(publication_id): - safe_execution(functools.partial(retry_publication_command, publication_id),) + safe_execution( + functools.partial(retry_publication_command, publication_id), + ) if __name__ == "__main__": diff --git a/mobilizon_reshare/cli/commands/list/list_event.py b/mobilizon_reshare/cli/commands/list/list_event.py index 27395f6..ab6fe90 100644 --- a/mobilizon_reshare/cli/commands/list/list_event.py +++ b/mobilizon_reshare/cli/commands/list/list_event.py @@ -43,7 +43,7 @@ async def list_unpublished_events(frm: Arrow = None, to: Arrow = None): async def list_events( - status: EventPublicationStatus = None, + status: Optional[EventPublicationStatus] = None, frm: Optional[datetime] = None, to: Optional[datetime] = None, ): diff --git a/mobilizon_reshare/cli/commands/publish/__init__.py b/mobilizon_reshare/cli/commands/publish/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mobilizon_reshare/cli/commands/publish/main.py b/mobilizon_reshare/cli/commands/publish/main.py new file mode 100644 index 0000000..6e1be91 --- /dev/null +++ b/mobilizon_reshare/cli/commands/publish/main.py @@ -0,0 +1,10 @@ +from mobilizon_reshare.main.publish import publish + + +async def publish_command(): + """ + STUB + :return: + """ + reports = await publish() + return 0 if reports and reports.successful else 1 diff --git a/mobilizon_reshare/cli/commands/pull/__init__.py b/mobilizon_reshare/cli/commands/pull/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mobilizon_reshare/cli/commands/pull/main.py b/mobilizon_reshare/cli/commands/pull/main.py new file mode 100644 index 0000000..b7c5031 --- /dev/null +++ b/mobilizon_reshare/cli/commands/pull/main.py @@ -0,0 +1,10 @@ +from mobilizon_reshare.main.pull import pull + + +async def pull_command(): + """ + STUB + :return: + """ + await pull() + return 0 diff --git a/mobilizon_reshare/main/publish.py b/mobilizon_reshare/main/publish.py new file mode 100644 index 0000000..1461ab2 --- /dev/null +++ b/mobilizon_reshare/main/publish.py @@ -0,0 +1,50 @@ +import logging.config +from typing import Optional + +from mobilizon_reshare.event.event import MobilizonEvent +from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish +from mobilizon_reshare.publishers.coordinator import ( + PublicationFailureNotifiersCoordinator, + PublisherCoordinatorReport, +) +from mobilizon_reshare.publishers.coordinator import PublisherCoordinator +from mobilizon_reshare.storage.query.read import ( + get_published_events, + build_publications, + events_without_publications, +) +from mobilizon_reshare.storage.query.write import save_publication_report + +logger = logging.getLogger(__name__) + + +async def publish( + events: Optional[list[MobilizonEvent]] = None, +) -> Optional[PublisherCoordinatorReport]: + """ + STUB + :return: + """ + if events is None: + events = await events_without_publications() + + event = select_event_to_publish( + list(await get_published_events()), + events, + ) + + if event: + logger.info(f"Event to publish found: {event.name}") + + publications = await build_publications(event) + reports = PublisherCoordinator(publications).run() + + await save_publication_report(reports) + for report in reports.reports: + if not report.succesful: + PublicationFailureNotifiersCoordinator( + report, + ).notify_failure() + return reports + else: + logger.info("No event to publish found") diff --git a/mobilizon_reshare/main/pull.py b/mobilizon_reshare/main/pull.py new file mode 100644 index 0000000..b8edd7a --- /dev/null +++ b/mobilizon_reshare/main/pull.py @@ -0,0 +1,25 @@ +import logging.config + +from mobilizon_reshare.event.event import MobilizonEvent +from mobilizon_reshare.mobilizon.events import get_mobilizon_future_events + +from mobilizon_reshare.storage.query.write import ( + create_unpublished_events, +) + +logger = logging.getLogger(__name__) + + +async def pull() -> list[MobilizonEvent]: + """ + Fetches the latest events from Mobilizon and stores them. + :return: + """ + + # Pull future events from Mobilizon + future_events = get_mobilizon_future_events() + logger.info(f"Pulled {len(future_events)} events from Mobilizon.") + # Store in the DB only the ones we didn't know about + events = await create_unpublished_events(future_events) + logger.debug(f"There are now {len(events)} unpublished events.") + return events diff --git a/mobilizon_reshare/main/start.py b/mobilizon_reshare/main/start.py index 95fe966..c2f3515 100644 --- a/mobilizon_reshare/main/start.py +++ b/mobilizon_reshare/main/start.py @@ -1,51 +1,17 @@ import logging.config +from typing import Optional -from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish -from mobilizon_reshare.mobilizon.events import get_mobilizon_future_events -from mobilizon_reshare.publishers.coordinator import ( - PublicationFailureNotifiersCoordinator, -) -from mobilizon_reshare.publishers.coordinator import PublisherCoordinator -from mobilizon_reshare.storage.query.read import ( - get_published_events, - build_publications, -) -from mobilizon_reshare.storage.query.write import ( - create_unpublished_events, - save_publication_report, -) +from mobilizon_reshare.main.publish import publish +from mobilizon_reshare.main.pull import pull +from mobilizon_reshare.publishers.coordinator import PublisherCoordinatorReport logger = logging.getLogger(__name__) -async def start(): +async def start() -> Optional[PublisherCoordinatorReport]: """ STUB :return: """ - - # Pull future events from Mobilizon - future_events = get_mobilizon_future_events() - # Store in the DB only the ones we didn't know about - events_without_publications = await create_unpublished_events(future_events) - event = select_event_to_publish( - list(await get_published_events()), - # We must load unpublished events from DB since it contains - # merged state between Mobilizon and previous WAITING events. - events_without_publications, - ) - - if event: - logger.info(f"Event to publish found: {event.name}") - - publications = await build_publications(event) - reports = PublisherCoordinator(publications).run() - - await save_publication_report(reports) - for report in reports.reports: - if not report.succesful: - PublicationFailureNotifiersCoordinator( - report, - ).notify_failure() - else: - logger.info("No event to publish found") + events = await pull() + return await publish(events) diff --git a/mobilizon_reshare/storage/query/converter.py b/mobilizon_reshare/storage/query/converter.py index 87b5292..7fe98a3 100644 --- a/mobilizon_reshare/storage/query/converter.py +++ b/mobilizon_reshare/storage/query/converter.py @@ -10,7 +10,7 @@ from mobilizon_reshare.models.publication import Publication, PublicationStatus from mobilizon_reshare.publishers.abstract import EventPublication -def event_from_model(event: Event, tz: str = "UTC"): +def event_from_model(event: Event): publication_status = compute_event_status(list(event.publications)) publication_time = {} @@ -18,27 +18,19 @@ def event_from_model(event: Event, tz: str = "UTC"): for pub in event.publications: if publication_status != EventPublicationStatus.WAITING: assert pub.timestamp is not None - publication_time[pub.publisher.name] = arrow.get( - tortoise.timezone.localtime(value=pub.timestamp, timezone=tz) - ).to("local") + publication_time[pub.publisher.name] = arrow.get(pub.timestamp).to("local") return MobilizonEvent( name=event.name, description=event.description, - begin_datetime=arrow.get( - tortoise.timezone.localtime(value=event.begin_datetime, timezone=tz) - ).to("local"), - end_datetime=arrow.get( - tortoise.timezone.localtime(value=event.end_datetime, timezone=tz) - ).to("local"), + begin_datetime=arrow.get(event.begin_datetime).to("local"), + end_datetime=arrow.get(event.end_datetime).to("local"), mobilizon_link=event.mobilizon_link, mobilizon_id=event.mobilizon_id, thumbnail_link=event.thumbnail_link, location=event.location, publication_time=publication_time, status=publication_status, - last_update_time=arrow.get( - tortoise.timezone.localtime(value=event.last_update_time, timezone=tz) - ).to("local"), + last_update_time=arrow.get(event.last_update_time).to("local"), ) diff --git a/mobilizon_reshare/storage/query/read.py b/mobilizon_reshare/storage/query/read.py index 13c9c00..723cac7 100644 --- a/mobilizon_reshare/storage/query/read.py +++ b/mobilizon_reshare/storage/query/read.py @@ -66,7 +66,8 @@ async def events_with_status( async def get_all_publications( - from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None, + from_date: Optional[Arrow] = None, + to_date: Optional[Arrow] = None, ) -> Iterable[EventPublication]: return await prefetch_publication_relations( _add_date_window(Publication.all(), "timestamp", from_date, to_date) @@ -74,14 +75,15 @@ async def get_all_publications( async def get_all_events( - from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None, -) -> Iterable[MobilizonEvent]: - return map( - event_from_model, - await prefetch_event_relations( + from_date: Optional[Arrow] = None, + to_date: Optional[Arrow] = None, +) -> list[MobilizonEvent]: + return [ + event_from_model(event) + for event in await prefetch_event_relations( _add_date_window(Event.all(), "begin_datetime", from_date, to_date) - ), - ) + ) + ] async def prefetch_event_relations(queryset: QuerySet[Event]) -> list[Event]: @@ -130,13 +132,14 @@ async def publications_with_status( async def events_without_publications( - from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None, + from_date: Optional[Arrow] = None, + to_date: Optional[Arrow] = None, ) -> list[MobilizonEvent]: query = Event.filter(publications__id=None) events = await prefetch_event_relations( _add_date_window(query, "begin_datetime", from_date, to_date) ) - return list(map(event_from_model, events)) + return [event_from_model(event) for event in events] async def get_event(event_mobilizon_id: UUID) -> Event: @@ -168,7 +171,7 @@ async def build_publications(event: MobilizonEvent) -> list[EventPublication]: await event_model.build_publication_by_publisher_name(name) for name in get_active_publishers() ] - return list(publication_from_orm(m, dataclasses.replace(event)) for m in models) + return [publication_from_orm(m, dataclasses.replace(event)) for m in models] @atomic(CONNECTION_NAME) diff --git a/tests/commands/conftest.py b/tests/commands/conftest.py index 6b13792..f592964 100644 --- a/tests/commands/conftest.py +++ b/tests/commands/conftest.py @@ -10,6 +10,7 @@ from mobilizon_reshare.models.publisher import Publisher import mobilizon_reshare.main.recap from mobilizon_reshare.publishers import coordinator from tests import today +from tests.conftest import event_1, event_0 def simple_event_element(): @@ -28,11 +29,51 @@ def simple_event_element(): } +def first_event_element(): + return { + "beginsOn": event_0.begin_datetime.isoformat(), + "description": "desc_0", + "endsOn": event_0.end_datetime.isoformat(), + "onlineAddress": None, + "options": {"showEndTime": True, "showStartTime": True}, + "physicalAddress": {"description": "", "locality": "loc_0", "region": ""}, + "picture": {"url": "https://example.org/thumblink_0"}, + "title": "event_0", + "url": "https://example.org/moblink_0", + "uuid": str(uuid.UUID(int=0)), + "updatedAt": event_0.last_update_time.isoformat(), + } + + +def second_event_element(): + return { + "beginsOn": event_1.begin_datetime.isoformat(), + "description": "desc_1", + "endsOn": event_1.end_datetime.isoformat(), + "onlineAddress": None, + "options": {"showEndTime": True, "showStartTime": True}, + "physicalAddress": {"description": "", "locality": "loc_1", "region": ""}, + "picture": {"url": "https://example.org/thumblink_1"}, + "title": "event_1", + "url": "https://example.org/moblink_1", + "uuid": str(uuid.UUID(int=1)), + "updatedAt": event_1.last_update_time.isoformat(), + } + + @pytest.fixture def mobilizon_answer(elements): return {"data": {"group": {"organizedEvents": {"elements": elements}}}} +@pytest.fixture +def multiple_answers(multiple_elements: list[list[dict]]): + return [ + {"data": {"group": {"organizedEvents": {"elements": elements}}}} + for elements in multiple_elements + ] + + @pytest.fixture async def mock_now(monkeypatch): def _mock_now(): diff --git a/tests/commands/test_list.py b/tests/commands/test_list.py index 6abfedc..b7639fe 100644 --- a/tests/commands/test_list.py +++ b/tests/commands/test_list.py @@ -65,7 +65,7 @@ async def test_list_publications(capsys, generate_models): async def test_list_publications_from_window(capsys, generate_models): await generate_models(spec) await list_publications( - frm=arrow.Arrow(year=2021, month=6, day=6, hour=3, minute=30) + frm=arrow.Arrow(year=2021, month=6, day=6, hour=3, minute=30).datetime ) output = capsys.readouterr() assert clean_output(output) == [ @@ -78,7 +78,7 @@ async def test_list_publications_from_window(capsys, generate_models): async def test_list_publications_to_window(capsys, generate_models): await generate_models(spec) await list_publications( - to=arrow.Arrow(year=2021, month=6, day=6, hour=3, minute=30) + to=arrow.Arrow(year=2021, month=6, day=6, hour=3, minute=30).datetime ) output = capsys.readouterr() assert clean_output(output) == [ diff --git a/tests/commands/test_pull.py b/tests/commands/test_pull.py new file mode 100644 index 0000000..b00e0b2 --- /dev/null +++ b/tests/commands/test_pull.py @@ -0,0 +1,206 @@ +from logging import DEBUG, INFO + +import pytest + +from mobilizon_reshare.storage.query.read import ( + get_all_events, + events_without_publications, +) +from tests.commands.conftest import ( + second_event_element, + first_event_element, +) +from mobilizon_reshare.main.pull import pull +from mobilizon_reshare.main.start import start +from tests.conftest import event_0, event_1 + +empty_specification = {"event": 0, "publications": [], "publisher": []} +one_unpublished_event_specification = { + "event": 1, + "publications": [], + "publisher": ["telegram"], +} + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "elements, expected_result", + [[[], []]], +) +async def test_pull_no_event( + generate_models, + mock_mobilizon_success_answer, + mobilizon_answer, + caplog, + elements, + expected_result, +): + await generate_models(empty_specification) + with caplog.at_level(DEBUG): + assert await pull() == [] + assert "Pulled 0 events from Mobilizon." in caplog.text + assert "There are now 0 unpublished events." in caplog.text + + assert expected_result == await get_all_events() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "elements, specification, expected_result", + [ + [[second_event_element()], empty_specification, [event_1]], + [ + [second_event_element()], + one_unpublished_event_specification, + [event_0, event_1], + ], + [[first_event_element()], one_unpublished_event_specification, [event_0]], + ], +) +async def test_pull( + generate_models, + mock_mobilizon_success_answer, + mobilizon_answer, + caplog, + elements, + specification, + expected_result, +): + await generate_models(specification) + with caplog.at_level(DEBUG): + assert await pull() == expected_result + assert f"Pulled {len(elements)} events from Mobilizon." in caplog.text + assert expected_result == await get_all_events() + + assert ( + f"There are now {len(expected_result)} unpublished events." in caplog.text + ) + assert expected_result == await events_without_publications() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "publisher_class", [pytest.lazy_fixture("mock_publisher_class")] +) +@pytest.mark.parametrize( + "elements, specification, expected_pull, expected_publish", + [ + [ + [second_event_element(), first_event_element()], + empty_specification, + [event_0, event_1], + event_0, + ], + ], +) +async def test_pull_start( + generate_models, + mock_mobilizon_success_answer, + mock_publisher_config, + mobilizon_answer, + caplog, + message_collector, + elements, + specification, + expected_pull, + expected_publish, +): + await generate_models(specification) + + # I clean the message collector + message_collector.data = [] + + with caplog.at_level(INFO): + assert await pull() == expected_pull + assert expected_pull == await get_all_events() + assert expected_pull == await events_without_publications() + + report = await start() + assert report.successful + + assert f"Event to publish found: {expected_publish.name}" in caplog.text + + pull_ids = set(event.mobilizon_id for event in expected_pull) + publish_ids = {expected_publish.mobilizon_id} + + assert pull_ids == set(event.mobilizon_id for event in await get_all_events()) + assert (pull_ids - publish_ids) == set( + event.mobilizon_id for event in await events_without_publications() + ) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "publisher_class", [pytest.lazy_fixture("mock_publisher_class")] +) +@pytest.mark.parametrize( + "elements, specification, expected_result", + [ + [[second_event_element()], one_unpublished_event_specification, event_0], + ], +) +async def test_start_pull( + generate_models, + mock_mobilizon_success_answer, + mock_publisher_config, + mobilizon_answer, + caplog, + message_collector, + elements, + specification, + expected_result, +): + await generate_models(specification) + + # I clean the message collector + message_collector.data = [] + + with caplog.at_level(DEBUG): + assert await start() + assert f"Event to publish found: {expected_result.name}" in caplog.text + assert await pull() + assert "There are now 1 unpublished events." + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "multiple_elements, specification, expected_first, expected_last", + [ + [ + [[second_event_element()], [second_event_element(), first_event_element()]], + empty_specification, + [event_1], + [event_1, event_0], + ], + ], +) +async def test_multiple_pull( + generate_models, + mock_multiple_success_answer, + multiple_answers, + caplog, + message_collector, + multiple_elements, + specification, + expected_first, + expected_last, +): + await generate_models(specification) + with caplog.at_level(DEBUG): + assert await pull() + assert f"There are now {len(expected_first)} unpublished events." in caplog.text + assert expected_first == await get_all_events() + assert await events_without_publications() == await get_all_events() + + # I clean the message collector + message_collector.data = [] + + assert await pull() + + assert f"Pulled {len(expected_last)} events from Mobilizon." in caplog.text + assert f"There are now {len(expected_last)} unpublished events." in caplog.text + + assert set(event.mobilizon_id for event in expected_last) == set( + event.mobilizon_id for event in await get_all_events() + ) + assert await events_without_publications() == await get_all_events() diff --git a/tests/commands/test_start.py b/tests/commands/test_start.py index 46585cc..f142aed 100644 --- a/tests/commands/test_start.py +++ b/tests/commands/test_start.py @@ -1,17 +1,17 @@ -import uuid from logging import DEBUG, INFO -import arrow import pytest from mobilizon_reshare.storage.query.converter import event_from_model, event_to_model from mobilizon_reshare.storage.query.read import get_all_events -from tests.commands.conftest import simple_event_element +from tests.commands.conftest import simple_event_element, second_event_element from mobilizon_reshare.event.event import EventPublicationStatus from mobilizon_reshare.main.start import start from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.publication import PublicationStatus +one_published_event_specification = {"event": 1, "publications": [{"event_idx": 0, "publisher_idx": 0, "status": PublicationStatus.COMPLETED}], "publisher": ["telegram", "twitter", "mastodon", "zulip"]} + @pytest.mark.asyncio @pytest.mark.parametrize( @@ -43,7 +43,7 @@ async def test_start_new_event( ): with caplog.at_level(DEBUG): # calling the start command - assert await start() is None + assert await start() is not None # since the mobilizon_answer contains at least one result, one event to publish must be found and published # by the publisher coordinator @@ -98,7 +98,7 @@ async def test_start_event_from_db( with caplog.at_level(DEBUG): # calling the start command - assert await start() is None + assert await start() is not None # since the db contains at least one event, this has to be picked and published assert "Event to publish found" in caplog.text @@ -141,7 +141,7 @@ async def test_start_publisher_failure( with caplog.at_level(DEBUG): # calling the start command - assert await start() is None + assert await start() is not None # since the db contains at least one event, this has to be picked and published @@ -166,37 +166,6 @@ async def test_start_publisher_failure( assert event_from_model(event_model).status == EventPublicationStatus.FAILED -@pytest.fixture -async def published_event(event_generator): - - event = event_generator() - event_model = event_to_model(event) - await event_model.save() - assert await start() is None - await event_model.refresh_from_db() - await event_model.fetch_related("publications") - for pub in event_model.publications: - pub.timestamp = arrow.now().shift(days=-2).datetime - await pub.save() - return event_model - - -def second_event_element(): - return { - "beginsOn": "2021-05-23T12:15:00Z", - "description": "description of the second event", - "endsOn": "2021-05-23T15:15:00Z", - "onlineAddress": None, - "options": {"showEndTime": True, "showStartTime": True}, - "physicalAddress": None, - "picture": None, - "title": "test event", - "url": "https://some_mobilizon/events/1e2e5943-4a5c-497a-b65d-90457b715d7b", - "uuid": str(uuid.uuid4()), - "updatedAt": "2021-05-23T12:15:00Z", - } - - @pytest.mark.asyncio @pytest.mark.parametrize( "publisher_class", [pytest.lazy_fixture("mock_publisher_class")] @@ -210,22 +179,21 @@ async def test_start_second_execution( caplog, mock_publisher_config, message_collector, - event_generator, - published_event, + generate_models ): - # the fixture published_event provides an existing event in the db + await generate_models(one_published_event_specification) # I clean the message collector message_collector.data = [] with caplog.at_level(INFO): # calling the start command - assert await start() is None + assert await start() is not None # verify that the second event gets published assert "Event to publish found" in caplog.text assert message_collector == [ - "test event|description of the second event", + "event_1|desc_1", ] # I verify that the db event and the new event coming from mobilizon are both in the db assert len(list(await get_all_events())) == 2 diff --git a/tests/conftest.py b/tests/conftest.py index efffaa5..15429e5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -163,7 +163,7 @@ def event_model_generator(): mobilizon_id=UUID(int=idx), mobilizon_link=f"moblink_{idx}", thumbnail_link=f"thumblink_{idx}", - location=f"loc_{idx}", + location=f", loc_{idx}, ", begin_datetime=begin_date, end_datetime=begin_date + timedelta(hours=2), last_update_time=begin_date, @@ -236,9 +236,9 @@ async def _generate_events(specification): name=f"event_{i}", description=f"desc_{i}", mobilizon_id=UUID(int=i), - mobilizon_link=f"moblink_{i}", - thumbnail_link=f"thumblink_{i}", - location=f"loc_{i}", + mobilizon_link=f"https://example.org/moblink_{i}", + thumbnail_link=f"https://example.org/thumblink_{i}", + location=f", loc_{i}, ", begin_datetime=begin_date, end_datetime=begin_date + timedelta(hours=2), last_update_time=begin_date, @@ -272,6 +272,85 @@ def generate_models(): return _generate_models +event_0 = MobilizonEvent( + name="event_0", + description="desc_0", + mobilizon_id=UUID(int=0), + mobilizon_link="https://example.org/moblink_0", + thumbnail_link="https://example.org/thumblink_0", + location=", loc_0, ", + status=EventPublicationStatus.WAITING, + begin_datetime=arrow.get(today), + end_datetime=arrow.get(today + timedelta(hours=2)), + last_update_time=arrow.get(today), +) + +event_1 = MobilizonEvent( + name="event_1", + description="desc_1", + mobilizon_id=UUID(int=1), + mobilizon_link="https://example.org/moblink_1", + thumbnail_link="https://example.org/thumblink_1", + location=", loc_1, ", + status=EventPublicationStatus.WAITING, + begin_datetime=arrow.get(today + timedelta(days=1)), + end_datetime=arrow.get(today + timedelta(days=1) + timedelta(hours=2)), + last_update_time=arrow.get(today + timedelta(days=1)), +) + +event_2 = MobilizonEvent( + name="event_2", + description="desc_2", + mobilizon_id=UUID(int=2), + mobilizon_link="https://example.org/moblink_2", + thumbnail_link="https://example.org/thumblink_2", + location=", loc_2, ", + status=EventPublicationStatus.WAITING, + begin_datetime=arrow.get(today + timedelta(days=2)), + end_datetime=arrow.get(today + timedelta(days=2) + timedelta(hours=2)), + last_update_time=arrow.get(today + timedelta(days=2)), +) + +event_3 = MobilizonEvent( + name="event_3", + description="desc_3", + mobilizon_id=UUID(int=3), + mobilizon_link="https://example.org/moblink_3", + thumbnail_link="https://example.org/thumblink_3", + location=", loc_3, ", + status=EventPublicationStatus.WAITING, + begin_datetime=arrow.get(today + timedelta(days=3)), + end_datetime=arrow.get(today + timedelta(days=3) + timedelta(hours=2)), + last_update_time=arrow.get(today + timedelta(days=3)), +) + +event_3_updated = MobilizonEvent( + name="event_3", + description="desc_3", + mobilizon_id=UUID(int=3), + mobilizon_link="https://example.org/moblink_3", + thumbnail_link="https://example.org/thumblink_3", + location=", loc_6, ", + status=EventPublicationStatus.WAITING, + begin_datetime=arrow.get(today + timedelta(days=3)), + end_datetime=arrow.get(today + timedelta(days=3) + timedelta(hours=2)), + last_update_time=arrow.get(today + timedelta(days=4)), +) + +event_6 = MobilizonEvent( + name="event_6", + description="desc_6", + mobilizon_id=UUID(int=6), + mobilizon_link="https://example.org/moblink_6", + thumbnail_link="https://example.org/thumblink_6", + location=", loc_6, ", + status=EventPublicationStatus.WAITING, + begin_datetime=arrow.get(today + timedelta(days=6)), + end_datetime=arrow.get(today + timedelta(days=6) + timedelta(hours=2)), + last_update_time=arrow.get(today + timedelta(days=6)), +) + + @pytest.fixture() def message_collector(): class MessageCollector(UserList): @@ -320,6 +399,19 @@ def mock_mobilizon_success_answer(mobilizon_answer, mobilizon_url): yield +@responses.activate +@pytest.fixture +def mock_multiple_success_answer(multiple_answers, mobilizon_url): + with responses.RequestsMock() as rsps: + + for answer in multiple_answers: + rsps.add( + responses.POST, mobilizon_url, json=answer, status=200, + ) + + yield + + @pytest.fixture def mock_formatter_class(): class MockFormatter(AbstractEventFormatter): diff --git a/tests/models/test_event.py b/tests/models/test_event.py index 5ae3c83..03e234e 100644 --- a/tests/models/test_event.py +++ b/tests/models/test_event.py @@ -141,7 +141,7 @@ async def test_mobilizon_event_from_model( .prefetch_related("publications__publisher") .first() ) - event = event_from_model(event=event_db, tz="CET") + event = event_from_model(event=event_db) begin_date_utc = arrow.Arrow(year=2021, month=1, day=1, hour=11, minute=30) @@ -152,7 +152,7 @@ async def test_mobilizon_event_from_model( assert event.mobilizon_link == "moblink_1" assert event.mobilizon_id == UUID(int=1) assert event.thumbnail_link == "thumblink_1" - assert event.location == "loc_1" + assert event.location == ", loc_1, " assert event.publication_time[publisher_model.name] == publication.timestamp assert event.status == EventPublicationStatus.PARTIAL diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py index 5a347e5..56f24b0 100644 --- a/tests/storage/__init__.py +++ b/tests/storage/__init__.py @@ -1,91 +1,10 @@ from datetime import timedelta from uuid import UUID -import arrow - -from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus from mobilizon_reshare.models.publication import Publication from mobilizon_reshare.models.publication import PublicationStatus from tests import today -event_0 = MobilizonEvent( - name="event_0", - description="desc_0", - mobilizon_id=UUID(int=0), - mobilizon_link="moblink_0", - thumbnail_link="thumblink_0", - location="loc_0", - status=EventPublicationStatus.WAITING, - begin_datetime=arrow.get(today), - end_datetime=arrow.get(today + timedelta(hours=2)), - last_update_time=arrow.get(today), -) - -event_1 = MobilizonEvent( - name="event_1", - description="desc_1", - mobilizon_id=UUID(int=1), - mobilizon_link="moblink_1", - thumbnail_link="thumblink_1", - location="loc_1", - status=EventPublicationStatus.WAITING, - begin_datetime=arrow.get(today + timedelta(days=1)), - end_datetime=arrow.get(today + timedelta(days=1) + timedelta(hours=2)), - last_update_time=arrow.get(today + timedelta(days=1)), -) - -event_2 = MobilizonEvent( - name="event_2", - description="desc_2", - mobilizon_id=UUID(int=2), - mobilizon_link="moblink_2", - thumbnail_link="thumblink_2", - location="loc_2", - status=EventPublicationStatus.WAITING, - begin_datetime=arrow.get(today + timedelta(days=2)), - end_datetime=arrow.get(today + timedelta(days=2) + timedelta(hours=2)), - last_update_time=arrow.get(today + timedelta(days=2)), -) - -event_3 = MobilizonEvent( - name="event_3", - description="desc_3", - mobilizon_id=UUID(int=3), - mobilizon_link="moblink_3", - thumbnail_link="thumblink_3", - location="loc_3", - status=EventPublicationStatus.WAITING, - begin_datetime=arrow.get(today + timedelta(days=3)), - end_datetime=arrow.get(today + timedelta(days=3) + timedelta(hours=2)), - last_update_time=arrow.get(today + timedelta(days=3)), -) - -event_3_updated = MobilizonEvent( - name="event_3", - description="desc_3", - mobilizon_id=UUID(int=3), - mobilizon_link="moblink_3", - thumbnail_link="thumblink_3", - location="loc_6", - status=EventPublicationStatus.WAITING, - begin_datetime=arrow.get(today + timedelta(days=3)), - end_datetime=arrow.get(today + timedelta(days=3) + timedelta(hours=2)), - last_update_time=arrow.get(today + timedelta(days=4)), -) - -event_6 = MobilizonEvent( - name="event_6", - description="desc_6", - mobilizon_id=UUID(int=6), - mobilizon_link="moblink_6", - thumbnail_link="thumblink_6", - location="loc_6", - status=EventPublicationStatus.WAITING, - begin_datetime=arrow.get(today + timedelta(days=6)), - end_datetime=arrow.get(today + timedelta(days=6) + timedelta(hours=2)), - last_update_time=arrow.get(today + timedelta(days=6)), -) - complete_specification = { "event": 4, "publications": [ diff --git a/tests/storage/test_query.py b/tests/storage/test_query.py index 4551407..369dbc3 100644 --- a/tests/storage/test_query.py +++ b/tests/storage/test_query.py @@ -13,7 +13,8 @@ from mobilizon_reshare.storage.query.read import ( build_publications, ) from tests import today -from tests.storage import complete_specification, event_0, event_1, event_3 +from tests.storage import complete_specification +from tests.conftest import event_0, event_1, event_3 from tests.storage import result_publication diff --git a/tests/storage/test_update.py b/tests/storage/test_update.py index 85a0f7f..914b04f 100644 --- a/tests/storage/test_update.py +++ b/tests/storage/test_update.py @@ -20,14 +20,8 @@ from mobilizon_reshare.storage.query.write import ( ) from tests.storage import ( complete_specification, - event_0, - event_1, - event_2, - event_3, - event_3_updated, - event_6, ) - +from tests.conftest import event_6, event_0, event_1, event_2, event_3, event_3_updated two_publishers_specification = {"publisher": ["telegram", "twitter"]}