add dry run (#169)

* introduced CommandConfig object

* added dry_run for start command

* added test to start_dry_run

* added dry_run recap

* fixed import

* improved printing

* fixed zulip debug info

* improved recap dry-run print
This commit is contained in:
Simone Robutti 2022-07-12 07:39:56 +02:00 committed by GitHub
parent cf2fabefb4
commit 9d71ef36b9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 399 additions and 126 deletions

View File

@ -1,10 +1,12 @@
import asyncio
import functools
import logging
import traceback
from logging.config import dictConfig
from pathlib import Path
import sys
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.storage.db import tear_down, MoReDB
@ -23,12 +25,12 @@ async def init():
await db.setup()
async def _safe_execution(f):
async def _safe_execution(function):
await init()
return_code = 1
try:
return_code = await f()
return_code = await function()
except Exception:
traceback.print_exc()
finally:
@ -37,6 +39,9 @@ async def _safe_execution(f):
return return_code
def safe_execution(f):
code = asyncio.run(_safe_execution(f))
def safe_execution(function, command_config: CommandConfig = None):
if command_config:
function = functools.partial(function, command_config)
code = asyncio.run(_safe_execution(function))
sys.exit(code)

View File

@ -3,6 +3,7 @@ import functools
import click
from click import pass_context
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.cli import safe_execution
from mobilizon_reshare.cli.commands.format.format import format_event
from mobilizon_reshare.cli.commands.list.list_event import list_events
@ -144,21 +145,27 @@ def mobilizon_reshare(obj):
@mobilizon_reshare.command(
help="Synchronize and publish events. It is equivalent to running consecutively pull and then publish."
)
@pass_context
def start(
ctx,
):
ctx.ensure_object(dict)
safe_execution(
start_main,
)
@click.option(
"--dry-run",
is_flag=True,
help="Prevents data to be published to platforms. WARNING: it will download and write new events to the database",
default=False,
)
def start(dry_run):
safe_execution(start_main, CommandConfig(dry_run=dry_run))
@mobilizon_reshare.command(help="Publish a recap of already published events.")
def recap():
safe_execution(
recap_main,
)
@click.option(
"--dry-run",
"dry_run",
is_flag=True,
help="Prevents data to be published to platforms. WARNING: it will download and write new events to the database",
default=False,
)
def recap(dry_run):
safe_execution(recap_main, CommandConfig(dry_run=dry_run))
@mobilizon_reshare.command(
@ -166,9 +173,7 @@ def recap():
"update them if they are known and changed."
)
def pull():
safe_execution(
pull_main,
)
safe_execution(pull_main,)
@mobilizon_reshare.command(
@ -179,9 +184,7 @@ def pull():
@publication_uuid_option
@platform_name_option
def publish():
safe_execution(
publish_main,
)
safe_execution(publish_main,)
@mobilizon_reshare.group(help="Operations that pertain to events")
@ -202,10 +205,7 @@ def event_list(status, begin, end):
safe_execution(
functools.partial(
list_events,
status_name_to_enum["event"][status],
frm=begin,
to=end,
list_events, status_name_to_enum["event"][status], frm=begin, to=end,
),
)
@ -232,28 +232,21 @@ def publication_list(status, begin, end):
@click.argument("event-id", type=click.UUID)
@click.argument("publisher", type=click.Choice(publisher_names))
def format(
event_id,
publisher,
event_id, publisher,
):
safe_execution(
functools.partial(format_event, event_id, publisher),
)
safe_execution(functools.partial(format_event, event_id, publisher),)
@event.command(name="retry", help="Retries all the failed publications")
@click.argument("event-id", type=click.UUID)
def event_retry(event_id):
safe_execution(
functools.partial(retry_event_command, event_id),
)
safe_execution(functools.partial(retry_event_command, event_id),)
@publication.command(name="retry", help="Retries a specific publication")
@click.argument("publication-id", type=click.UUID)
def publication_retry(publication_id):
safe_execution(
functools.partial(retry_publication_command, publication_id),
)
safe_execution(functools.partial(retry_publication_command, publication_id),)
if __name__ == "__main__":

View File

@ -0,0 +1,5 @@
import click
def print_reports(reports) -> None:
click.echo(reports)

View File

@ -1,11 +1,15 @@
import logging.config
from mobilizon_reshare.cli.commands import print_reports
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.main.recap import recap
logger = logging.getLogger(__name__)
async def recap_command():
async def recap_command(command_config: CommandConfig):
reports = await recap()
reports = await recap(command_config)
if command_config.dry_run and reports:
print_reports(reports)
return 0 if reports and reports.successful else 1

View File

@ -1,10 +1,14 @@
from mobilizon_reshare.cli.commands import print_reports
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.main.start import start
async def start_command():
async def start_command(command_config: CommandConfig):
"""
STUB
:return:
"""
reports = await start()
reports = await start(command_config)
if command_config.dry_run and reports:
print_reports(reports)
return 0 if reports and reports.successful else 1

View File

@ -0,0 +1,6 @@
import dataclasses
@dataclasses.dataclass
class CommandConfig:
dry_run: bool = dataclasses.field(default=False)

View File

@ -1,6 +1,7 @@
import logging.config
from typing import Optional
from typing import Optional, Iterator
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.event.event import MobilizonEvent
from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish
from mobilizon_reshare.publishers import get_active_publishers
@ -16,6 +17,7 @@ from mobilizon_reshare.storage.query.read import (
events_without_publications,
)
from mobilizon_reshare.storage.query.write import save_publication_report
from mobilizon_reshare.publishers.coordinator import DryRunPublisherCoordinator
logger = logging.getLogger(__name__)
@ -28,15 +30,19 @@ async def publish_publications(
await save_publication_report(report)
for publication_report in report.reports:
if not publication_report.succesful:
PublicationFailureNotifiersCoordinator(
publication_report,
).notify_failure()
PublicationFailureNotifiersCoordinator(publication_report,).notify_failure()
return report
def perform_dry_run(publications: list[EventPublication]):
return DryRunPublisherCoordinator(publications).run()
async def publish_event(
event: MobilizonEvent, publishers: Optional[list[Optional[str]]] = None
event: MobilizonEvent,
command_config: CommandConfig,
publishers: Optional[Iterator[str]] = None,
) -> PublisherCoordinatorReport:
logger.info(f"Event to publish found: {event.name}")
@ -44,10 +50,15 @@ async def publish_event(
publishers = get_active_publishers()
publications = await build_publications(event, publishers)
return await publish_publications(publications)
if command_config.dry_run:
logger.info("Executing in dry run mode. No event is going to be published.")
return perform_dry_run(publications)
else:
return await publish_publications(publications)
async def select_and_publish(
command_config: CommandConfig,
unpublished_events: Optional[list[MobilizonEvent]] = None,
) -> Optional[PublisherCoordinatorReport]:
"""
@ -58,11 +69,10 @@ async def select_and_publish(
unpublished_events = await events_without_publications()
event = select_event_to_publish(
list(await get_published_events()),
unpublished_events,
list(await get_published_events()), unpublished_events,
)
if event:
return await publish_event(event)
return await publish_event(event, command_config)
else:
logger.info("No event to publish found")

View File

@ -2,10 +2,7 @@ import logging.config
from mobilizon_reshare.event.event import MobilizonEvent
from mobilizon_reshare.mobilizon.events import get_mobilizon_future_events
from mobilizon_reshare.storage.query.write import (
create_unpublished_events,
)
from mobilizon_reshare.storage.query.write import create_unpublished_events
logger = logging.getLogger(__name__)

View File

@ -3,6 +3,7 @@ from typing import Optional, List
from arrow import now
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent
from mobilizon_reshare.publishers import get_active_publishers
from mobilizon_reshare.publishers.abstract import RecapPublication
@ -16,6 +17,7 @@ from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_formatter_class,
)
from mobilizon_reshare.storage.query.read import events_with_status
from mobilizon_reshare.publishers.coordinator import DryRunRecapCoordinator
logger = logging.getLogger(__name__)
@ -28,7 +30,7 @@ async def select_events_to_recap() -> List[MobilizonEvent]:
)
async def recap() -> Optional[BaseCoordinatorReport]:
async def recap(command_config: CommandConfig) -> Optional[BaseCoordinatorReport]:
# I want to recap only the events that have been successfully published and that haven't happened yet
events_to_recap = await select_events_to_recap()
@ -42,7 +44,10 @@ async def recap() -> Optional[BaseCoordinatorReport]:
)
for publisher in get_active_publishers()
]
reports = RecapCoordinator(recap_publications).run()
if command_config.dry_run:
reports = DryRunRecapCoordinator(recap_publications).run()
else:
reports = RecapCoordinator(recap_publications).run()
for report in reports.reports:
if report.status == EventPublicationStatus.FAILED:

View File

@ -1,6 +1,6 @@
import logging.config
from typing import Optional
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.main.publish import select_and_publish
from mobilizon_reshare.main.pull import pull
from mobilizon_reshare.publishers.coordinator import PublisherCoordinatorReport
@ -8,10 +8,10 @@ from mobilizon_reshare.publishers.coordinator import PublisherCoordinatorReport
logger = logging.getLogger(__name__)
async def start() -> Optional[PublisherCoordinatorReport]:
async def start(command_config: CommandConfig) -> PublisherCoordinatorReport:
"""
STUB
:return:
"""
events = await pull()
return await select_and_publish(events)
return await select_and_publish(command_config, events,)

View File

@ -1,11 +1,13 @@
from typing import Iterator
import mobilizon_reshare.config.notifiers
import mobilizon_reshare.config.publishers
from mobilizon_reshare.config.config import get_settings
def get_active_publishers():
def get_active_publishers() -> Iterator[str]:
return mobilizon_reshare.config.publishers.get_active_publishers(get_settings())
def get_active_notifiers():
def get_active_notifiers() -> Iterator[str]:
return mobilizon_reshare.config.notifiers.get_active_notifiers(get_settings())

View File

@ -1,7 +1,8 @@
import dataclasses
import logging
from abc import abstractmethod, ABC
from dataclasses import dataclass
from typing import List, Optional
from typing import List, Optional, Sequence
from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.publishers import get_active_notifiers
@ -26,18 +27,23 @@ class BasePublicationReport:
return self.status == PublicationStatus.COMPLETED
def get_failure_message(self):
return (
f"Publication failed with status: {self.status}.\n" f"Reason: {self.reason}"
)
@dataclass
class RecapPublicationReport(BasePublicationReport):
publication: RecapPublication
published_content: Optional[str] = dataclasses.field(default=None)
@dataclass
class EventPublicationReport(BasePublicationReport):
publication: EventPublication
published_content: Optional[str] = dataclasses.field(default=None)
def get_failure_message(self):
if not self.reason:
logger.error("Report of failure without reason.", exc_info=True)
@ -51,7 +57,7 @@ class EventPublicationReport(BasePublicationReport):
@dataclass
class BaseCoordinatorReport:
reports: List[BasePublicationReport]
reports: Sequence[BasePublicationReport]
@property
def successful(self):
@ -59,10 +65,38 @@ class BaseCoordinatorReport:
@dataclass
class PublisherCoordinatorReport(BaseCoordinatorReport):
class RecapCoordinatorReport(BaseCoordinatorReport):
reports: Sequence[RecapPublicationReport]
reports: List[EventPublicationReport]
publications: List[EventPublication]
def __str__(self):
platform_messages = []
for report in self.reports:
intro = f"Message for: {report.publication.publisher.name}"
platform_messages.append(
f"""{intro}
{"*"*len(intro)}
{report.published_content}
{"-"*80}"""
)
return "\n".join(platform_messages)
@dataclass
class PublisherCoordinatorReport(BaseCoordinatorReport):
reports: Sequence[EventPublicationReport]
publications: Sequence[EventPublication] = dataclasses.field(default_factory=list)
def __str__(self):
platform_messages = []
for report in self.reports:
intro = f"Message for: {report.publication.publisher.name}"
platform_messages.append(
f"""{intro}
{"*"*len(intro)}
{report.published_content}
{"-"*80}"""
)
return "\n".join(platform_messages)
class PublisherCoordinator:
@ -94,6 +128,7 @@ class PublisherCoordinator:
status=PublicationStatus.COMPLETED,
publication=publication,
reason=None,
published_content=message,
)
)
except PublisherError as e:
@ -141,6 +176,35 @@ class PublisherCoordinator:
return errors
class DryRunPublisherCoordinator(PublisherCoordinator):
def __init__(self, publications: List[EventPublication]):
self.publications = publications
def run(self) -> PublisherCoordinatorReport:
errors = self._validate()
if errors:
coord_report = PublisherCoordinatorReport(
reports=errors, publications=self.publications
)
else:
reports = [
EventPublicationReport(
status=PublicationStatus.COMPLETED,
publication=publication,
reason=None,
published_content=publication.formatter.get_message_from_event(
publication.event
),
)
for publication in self.publications
]
coord_report = PublisherCoordinatorReport(
publications=self.publications, reports=reports
)
return coord_report
class Sender:
def __init__(self, message: str, platforms: List[AbstractPlatform] = None):
self.message = message
@ -156,7 +220,9 @@ class Sender:
class AbstractNotifiersCoordinator(ABC):
def __init__(self, report: EventPublicationReport, notifiers: List[AbstractPlatform] = None):
def __init__(
self, report: EventPublicationReport, notifiers: List[AbstractPlatform] = None
):
self.platforms = notifiers or [
get_notifier_class(notifier)() for notifier in get_active_notifiers()
]
@ -192,28 +258,42 @@ class RecapCoordinator:
def __init__(self, recap_publications: List[RecapPublication]):
self.recap_publications = recap_publications
def run(self) -> BaseCoordinatorReport:
def _build_recap_content(self, recap_publication: RecapPublication):
fragments = [recap_publication.formatter.get_recap_header()]
for event in recap_publication.events:
fragments.append(recap_publication.formatter.get_recap_fragment(event))
return "\n\n".join(fragments)
def _send(self, content, recap_publication):
recap_publication.publisher.send(content)
def run(self) -> RecapCoordinatorReport:
reports = []
for recap_publication in self.recap_publications:
try:
fragments = [recap_publication.formatter.get_recap_header()]
for event in recap_publication.events:
fragments.append(
recap_publication.formatter.get_recap_fragment(event)
)
message = "\n\n".join(fragments)
recap_publication.publisher.send(message)
message = self._build_recap_content(recap_publication)
self._send(message, recap_publication)
reports.append(
BasePublicationReport(
status=PublicationStatus.COMPLETED, reason=None,
RecapPublicationReport(
status=PublicationStatus.COMPLETED,
reason=None,
published_content=message,
publication=recap_publication,
)
)
except PublisherError as e:
reports.append(
BasePublicationReport(
status=PublicationStatus.FAILED, reason=str(e),
RecapPublicationReport(
status=PublicationStatus.FAILED,
reason=str(e),
publication=recap_publication,
)
)
return BaseCoordinatorReport(reports=reports)
return RecapCoordinatorReport(reports=reports)
class DryRunRecapCoordinator(RecapCoordinator):
def _send(self, content, recap_publication):
pass

View File

@ -102,7 +102,7 @@ class ZulipPlatform(AbstractPlatform):
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
self._log_debug(str(response))
self._log_debug(str(response.text))
self._log_error(
str(e), raise_error=HTTPResponseError,
)

View File

@ -1,5 +1,5 @@
from functools import partial
from typing import Iterable, Optional
from typing import Iterable, Optional, Iterator
from uuid import UUID
from arrow import Arrow
@ -64,8 +64,7 @@ async def events_with_status(
async def get_all_publications(
from_date: Optional[Arrow] = None,
to_date: Optional[Arrow] = None,
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> Iterable[Publication]:
return await prefetch_publication_relations(
_add_date_window(Publication.all(), "timestamp", from_date, to_date)
@ -73,8 +72,7 @@ async def get_all_publications(
async def get_all_events(
from_date: Optional[Arrow] = None,
to_date: Optional[Arrow] = None,
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> list[MobilizonEvent]:
return [
event_from_model(event)
@ -134,8 +132,7 @@ async def publications_with_status(
async def events_without_publications(
from_date: Optional[Arrow] = None,
to_date: Optional[Arrow] = None,
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> list[MobilizonEvent]:
query = Event.filter(publications__id=None)
events = await prefetch_event_relations(
@ -172,7 +169,7 @@ async def is_known(event: MobilizonEvent) -> bool:
@atomic(CONNECTION_NAME)
async def build_publications(
event: MobilizonEvent, publishers: list[str]
event: MobilizonEvent, publishers: Iterator[str]
) -> list[EventPublication]:
event_model = await get_event(event.mobilizon_id)
models = [

View File

@ -24,9 +24,9 @@ three_event_specification = {
@pytest.mark.asyncio
async def test_publish_no_event(caplog):
async def test_publish_no_event(caplog, command_config):
with caplog.at_level(DEBUG):
assert await select_and_publish() is None
assert await select_and_publish(command_config) is None
assert "No event to publish found" in caplog.text
@ -48,11 +48,12 @@ async def test_select_and_publish_new_event(
message_collector,
specification,
expected_event,
command_config,
):
await generate_models(specification)
with caplog.at_level(DEBUG):
# calling the publish command without arguments
assert await select_and_publish() is not None
assert await select_and_publish(command_config) is not None
assert "Event to publish found" in caplog.text
assert message_collector == [
@ -94,11 +95,12 @@ async def test_publish_event(
message_collector,
publishers,
expected,
command_config,
):
await generate_models(one_unpublished_event_specification)
with caplog.at_level(DEBUG):
# calling mobilizon-reshare publish -E <UUID> -p <platform>
report = await publish_event(event_0, publishers)
report = await publish_event(event_0, command_config, publishers)
assert report is not None
assert report.successful

View File

@ -24,8 +24,7 @@ one_unpublished_event_specification = {
@pytest.mark.asyncio
@pytest.mark.parametrize(
"elements, expected_result",
[[[], []]],
"elements, expected_result", [[[], []]],
)
async def test_pull_no_event(
generate_models,
@ -104,6 +103,7 @@ async def test_pull_start(
specification,
expected_pull,
expected_publish,
command_config,
):
await generate_models(specification)
@ -115,7 +115,7 @@ async def test_pull_start(
assert expected_pull == await get_all_events()
assert expected_pull == await events_without_publications()
report = await start()
report = await start(command_config)
assert report.successful
assert f"Event to publish found: {expected_publish.name}" in caplog.text
@ -135,9 +135,7 @@ async def test_pull_start(
)
@pytest.mark.parametrize(
"elements, specification, expected_result",
[
[[second_event_element()], one_unpublished_event_specification, event_0],
],
[[[second_event_element()], one_unpublished_event_specification, event_0]],
)
async def test_start_pull(
generate_models,
@ -149,6 +147,7 @@ async def test_start_pull(
elements,
specification,
expected_result,
command_config,
):
await generate_models(specification)
@ -156,7 +155,7 @@ async def test_start_pull(
message_collector.data = []
with caplog.at_level(DEBUG):
assert await start()
assert await start(command_config)
assert f"Event to publish found: {expected_result.name}" in caplog.text
assert await pull()
assert "There are now 1 unpublished events."

View File

@ -4,6 +4,7 @@ import pytest
from mobilizon_reshare.cli.commands.recap.main import recap
from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.main.publish import CommandConfig
spec = {
# We need three events since recap will print only
@ -18,17 +19,22 @@ spec = {
@pytest.mark.parametrize(
"publisher_class", [pytest.lazy_fixture("mock_publisher_invalid_class")]
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
)
@pytest.mark.asyncio
async def test_start_event_from_db(
caplog, mock_publisher_config, mock_now, message_collector, generate_models
async def test_recap_event_from_db(
caplog,
mock_publisher_config,
mock_now,
message_collector,
generate_models,
command_config,
):
await generate_models(spec)
with caplog.at_level(DEBUG):
# calling the recap command
report = await recap()
report = await recap(command_config)
assert report.successful
assert "Found 2 events to recap" in caplog.text
@ -39,3 +45,47 @@ event_1
event_2"""
assert message_collector == [recap_message]
@pytest.mark.asyncio
@pytest.mark.parametrize(
"dry_run", [True, False]
) # the behavior should be identical with and without dry-run
async def test_recap_no_event(caplog, mock_now, message_collector, dry_run):
with caplog.at_level(DEBUG):
# calling the recap command
report = await recap(CommandConfig(dry_run=dry_run))
assert report is None
assert "Found no events" in caplog.text
assert message_collector == []
@pytest.mark.parametrize(
"publisher_class", [pytest.lazy_fixture("mock_publisher_invalid_class")]
)
@pytest.mark.asyncio
async def test_recap_event_dry_run(
caplog, mock_publisher_config, mock_now, message_collector, generate_models
):
await generate_models(spec)
with caplog.at_level(DEBUG):
# calling the recap command
reports = await recap(CommandConfig(dry_run=True))
assert reports.successful
assert "Found 2 events to recap" in caplog.text
assert message_collector == []
recap_message = """Upcoming
event_1
event_2"""
for report in reports.reports:
assert report.published_content == recap_message

View File

@ -2,6 +2,7 @@ from logging import DEBUG, INFO
import pytest
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.storage.query.converter import event_from_model, event_to_model
from mobilizon_reshare.storage.query.read import get_all_events
from tests.commands.conftest import simple_event_element, second_event_element
@ -10,18 +11,27 @@ from mobilizon_reshare.main.start import start
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus
one_published_event_specification = {"event": 1, "publications": [{"event_idx": 0, "publisher_idx": 0, "status": PublicationStatus.COMPLETED}], "publisher": ["telegram", "twitter", "mastodon", "zulip"]}
one_published_event_specification = {
"event": 1,
"publications": [
{"event_idx": 0, "publisher_idx": 0, "status": PublicationStatus.COMPLETED}
],
"publisher": ["telegram", "twitter", "mastodon", "zulip"],
}
@pytest.mark.asyncio
@pytest.mark.parametrize(
"dry_run", [True, False]
) # the behavior should be identical with and without dry-run
@pytest.mark.parametrize(
"elements", [[]],
)
async def test_start_no_event(
mock_mobilizon_success_answer, mobilizon_answer, caplog, elements
mock_mobilizon_success_answer, mobilizon_answer, caplog, elements, dry_run
):
with caplog.at_level(DEBUG):
assert await start() is None
assert await start(CommandConfig(dry_run=dry_run)) is None
assert "No event to publish found" in caplog.text
@ -40,10 +50,11 @@ async def test_start_new_event(
mock_publisher_config,
message_collector,
elements,
command_config,
):
with caplog.at_level(DEBUG):
# calling the start command
assert await start() is not None
assert await start(command_config) is not None
# since the mobilizon_answer contains at least one result, one event to publish must be found and published
# by the publisher coordinator
@ -74,7 +85,9 @@ async def test_start_new_event(
assert p.status == PublicationStatus.COMPLETED
# the derived status for the event should be COMPLETED
assert event_from_model(all_events[0]).status == EventPublicationStatus.COMPLETED
assert (
event_from_model(all_events[0]).status == EventPublicationStatus.COMPLETED
)
@pytest.mark.asyncio
@ -91,6 +104,7 @@ async def test_start_event_from_db(
mock_publisher_config,
message_collector,
event_generator,
command_config,
):
event = event_generator()
event_model = event_to_model(event)
@ -98,7 +112,13 @@ async def test_start_event_from_db(
with caplog.at_level(DEBUG):
# calling the start command
assert await start() is not None
result = await start(command_config)
assert result.successful
assert len(result.reports) == 1
assert (
result.reports[0].published_content == "test event|description of the event"
)
# since the db contains at least one event, this has to be picked and published
assert "Event to publish found" in caplog.text
@ -134,6 +154,7 @@ async def test_start_publisher_failure(
message_collector,
event_generator,
mock_notifier_config,
command_config,
):
event = event_generator()
event_model = event_to_model(event)
@ -141,7 +162,11 @@ async def test_start_publisher_failure(
with caplog.at_level(DEBUG):
# calling the start command
assert await start() is not None
result = await start(command_config)
assert not result.successful
assert len(result.reports) == 1
assert result.reports[0].published_content is None
# since the db contains at least one event, this has to be picked and published
@ -179,7 +204,8 @@ async def test_start_second_execution(
caplog,
mock_publisher_config,
message_collector,
generate_models
generate_models,
command_config,
):
await generate_models(one_published_event_specification)
@ -188,7 +214,7 @@ async def test_start_second_execution(
with caplog.at_level(INFO):
# calling the start command
assert await start() is not None
assert await start(command_config) is not None
# verify that the second event gets published
assert "Event to publish found" in caplog.text
@ -197,3 +223,93 @@ async def test_start_second_execution(
]
# I verify that the db event and the new event coming from mobilizon are both in the db
assert len(list(await get_all_events())) == 2
@pytest.mark.parametrize(
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"elements",
[[simple_event_element()], [simple_event_element(), simple_event_element()]],
)
async def test_start_dry_run(
mock_mobilizon_success_answer,
mobilizon_answer,
caplog,
mock_publisher_config,
message_collector,
elements,
):
with caplog.at_level(DEBUG):
# calling the start command
result = await start(CommandConfig(dry_run=True))
assert result.successful
assert len(result.reports) == 1
assert result.reports[0].published_content == "test event|Some description"
assert "Event to publish found" in caplog.text
assert (
"Executing in dry run mode. No event is going to be published."
in caplog.text
)
assert (
message_collector == []
) # the configured publisher shouldn't be called if in dry run mode
all_events = (
await Event.all()
.prefetch_related("publications")
.prefetch_related("publications__publisher")
)
# the start command should save all the events in the database
assert len(all_events) == len(elements), all_events
# it should create no publication
publications = all_events[0].publications
assert len(publications) == 0, publications
@pytest.mark.parametrize(
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"elements",
[[simple_event_element()], [simple_event_element(), simple_event_element()]],
)
async def test_start_dry_run_second_execution(
mock_mobilizon_success_answer,
mobilizon_answer,
caplog,
mock_publisher_config,
message_collector,
elements,
):
with caplog.at_level(DEBUG):
# calling the start command in dry_run
assert await start(CommandConfig(dry_run=True)) is not None
assert "Event to publish found" in caplog.text
assert (
"Executing in dry run mode. No event is going to be published."
in caplog.text
)
assert (
message_collector == []
) # the configured publisher shouldn't be called if in dry run mode
# calling the start command in normal mode
assert await start(CommandConfig(dry_run=False)) is not None
assert message_collector == [
"test event|Some description"
] # the publisher should now have published one message
all_events = (
await Event.all()
.prefetch_related("publications")
.prefetch_related("publications__publisher")
)
# verify that the dry run doesn't mistakenly does double saves
assert len(all_events) == len(elements), all_events

View File

@ -12,6 +12,7 @@ import responses
from tortoise.contrib.test import finalizer, initializer
import mobilizon_reshare
from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
from mobilizon_reshare.models.event import Event
@ -174,9 +175,7 @@ def event_model_generator():
@pytest.fixture()
def publisher_model_generator():
def _publisher_model_generator(
idx=1,
):
def _publisher_model_generator(idx=1,):
return Publisher(name=f"publisher_{idx}", account_ref=f"account_ref_{idx}")
return _publisher_model_generator
@ -413,10 +412,7 @@ def mock_mobilizon_success_answer(mobilizon_answer, mobilizon_url):
with responses.RequestsMock() as rsps:
rsps.add(
responses.POST,
mobilizon_url,
json=mobilizon_answer,
status=200,
responses.POST, mobilizon_url, json=mobilizon_answer, status=200,
)
yield
@ -428,10 +424,7 @@ def mock_multiple_success_answer(multiple_answers, mobilizon_url):
for answer in multiple_answers:
rsps.add(
responses.POST,
mobilizon_url,
json=answer,
status=200,
responses.POST, mobilizon_url, json=answer, status=200,
)
yield
@ -506,3 +499,8 @@ async def failed_publication(stored_event) -> Publication:
)
await p.save()
return p
@pytest.fixture
def command_config():
return CommandConfig(dry_run=False)