fix event retry (#152)

* added twitter error handling

* added facebook tests

* added header format test

* added multiple newlines check

* added test list command

* fixed commands structure

* fixed event retry

* fixed publication retry

* added publication tests

* removed unused option

* fixed list begin/end window

* added test retry failures

* linting

* refactored sender

* added timezone freeze

* fixed facebook-sdk and beatifulsoup errors
This commit is contained in:
Simone Robutti 2022-03-06 10:41:02 +01:00 committed by GitHub
parent 8b81ceedd0
commit 1217b17326
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 230 additions and 144 deletions

View File

@ -3,6 +3,7 @@ import logging
import traceback import traceback
from logging.config import dictConfig from logging.config import dictConfig
from pathlib import Path from pathlib import Path
import sys
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.storage.db import tear_down, MoReDB from mobilizon_reshare.storage.db import tear_down, MoReDB
@ -38,4 +39,4 @@ async def _safe_execution(f):
def safe_execution(f): def safe_execution(f):
code = asyncio.run(_safe_execution(f)) code = asyncio.run(_safe_execution(f))
exit(code) sys.exit(code)

View File

@ -1,4 +1,5 @@
from typing import Iterable from datetime import datetime
from typing import Iterable, Optional
import click import click
from arrow import Arrow from arrow import Arrow
@ -42,8 +43,13 @@ async def list_unpublished_events(frm: Arrow = None, to: Arrow = None):
async def list_events( async def list_events(
status: EventPublicationStatus = None, frm: Arrow = None, to: Arrow = None status: EventPublicationStatus = None,
frm: Optional[datetime] = None,
to: Optional[datetime] = None,
): ):
frm = Arrow.fromdatetime(frm) if frm else None
to = Arrow.fromdatetime(to) if to else None
if status is None: if status is None:
events = await get_all_events(from_date=frm, to_date=to) events = await get_all_events(from_date=frm, to_date=to)
elif status == EventPublicationStatus.WAITING: elif status == EventPublicationStatus.WAITING:

View File

@ -1,4 +1,5 @@
from typing import Iterable from datetime import datetime
from typing import Iterable, Optional
import click import click
from arrow import Arrow from arrow import Arrow
@ -28,8 +29,13 @@ def pretty(publication: Publication):
async def list_publications( async def list_publications(
status: PublicationStatus = None, frm: Arrow = None, to: Arrow = None status: PublicationStatus = None,
frm: Optional[datetime] = None,
to: Optional[datetime] = None,
): ):
frm = Arrow.fromdatetime(frm) if frm else None
to = Arrow.fromdatetime(to) if to else None
if status is None: if status is None:
publications = await get_all_publications(from_date=frm, to_date=to) publications = await get_all_publications(from_date=frm, to_date=to)
else: else:

View File

@ -36,7 +36,7 @@ def current_version() -> str:
return fp.read() return fp.read()
def get_settings_files_paths(): def get_settings_files_paths() -> Optional[str]:
dirs = AppDirs(appname="mobilizon-reshare", version=current_version()) dirs = AppDirs(appname="mobilizon-reshare", version=current_version())
bundled_settings_path = pkg_resources.resource_filename( bundled_settings_path = pkg_resources.resource_filename(
@ -111,7 +111,7 @@ def build_and_validate_settings():
# better in the future. # better in the future.
class CustomConfig(object): class CustomConfig:
@classmethod @classmethod
def get_instance(cls): def get_instance(cls):
if not hasattr(cls, "_instance") or cls._instance is None: if not hasattr(cls, "_instance") or cls._instance is None:

View File

@ -19,7 +19,6 @@ class EventSelectionStrategy(ABC):
selected = self._select(published_events, unpublished_events) selected = self._select(published_events, unpublished_events)
if selected: if selected:
return selected[0] return selected[0]
else:
return None return None
@abstractmethod @abstractmethod

View File

@ -14,11 +14,11 @@ def html_to_plaintext(content) -> str:
:return: :return:
""" """
# TODO: support links and quotes # TODO: support links and quotes
soup = BeautifulSoup(content) soup = BeautifulSoup(content, features="html.parser")
p_list = get_bottom_paragraphs(soup) p_list = get_bottom_paragraphs(soup)
if p_list: if p_list:
return "\n".join(" ".join(tag.stripped_strings) for tag in p_list) return "\n".join(" ".join(tag.stripped_strings) for tag in p_list)
else:
return soup.text return soup.text

View File

@ -1,9 +1,11 @@
import logging import logging
from typing import Optional
from uuid import UUID from uuid import UUID
from mobilizon_reshare.publishers.coordinator import ( from mobilizon_reshare.publishers.coordinator import (
PublisherCoordinator, PublisherCoordinator,
PublicationFailureNotifiersCoordinator, PublisherCoordinatorReport,
PublicationFailureLoggerCoordinator,
) )
from mobilizon_reshare.storage.query.exceptions import EventNotFound from mobilizon_reshare.storage.query.exceptions import EventNotFound
from mobilizon_reshare.storage.query.read import ( from mobilizon_reshare.storage.query.read import (
@ -26,7 +28,7 @@ async def retry_event_publications(event_id):
return PublisherCoordinator(failed_publications).run() return PublisherCoordinator(failed_publications).run()
async def retry_publication(publication_id): async def retry_publication(publication_id) -> Optional[PublisherCoordinatorReport]:
# TODO test this function # TODO test this function
publication = await get_publication(publication_id) publication = await get_publication(publication_id)
if not publication: if not publication:
@ -34,10 +36,18 @@ async def retry_publication(publication_id):
return return
logger.info(f"Publication {publication_id} found.") logger.info(f"Publication {publication_id} found.")
return PublisherCoordinator([publication]).run() reports = PublisherCoordinator([publication]).run()
await save_publication_report(reports)
for report in reports.reports:
if not report.succesful:
PublicationFailureLoggerCoordinator(report,).notify_failure()
async def retry_event(mobilizon_event_id: UUID = None): async def retry_event(
mobilizon_event_id: UUID = None,
) -> Optional[PublisherCoordinatorReport]:
if mobilizon_event_id is None: if mobilizon_event_id is None:
raise NotImplementedError( raise NotImplementedError(
"Autonomous retry not implemented yet, please specify an event_id" "Autonomous retry not implemented yet, please specify an event_id"
@ -54,4 +64,4 @@ async def retry_event(mobilizon_event_id: UUID = None):
await save_publication_report(reports) await save_publication_report(reports)
for report in reports.reports: for report in reports.reports:
if not report.succesful: if not report.succesful:
PublicationFailureNotifiersCoordinator(report,).notify_failure() PublicationFailureLoggerCoordinator(report,).notify_failure()

View File

@ -24,7 +24,7 @@ def parse_location(data):
return f"{addr['description']}, {addr['locality']}, {addr['region']}" return f"{addr['description']}, {addr['locality']}, {addr['region']}"
elif "onlineAddress" in data and data["onlineAddress"]: elif "onlineAddress" in data and data["onlineAddress"]:
return data["onlineAddress"] return data["onlineAddress"]
else:
return None return None

View File

@ -23,7 +23,7 @@ class Event(Model):
publications: fields.ReverseRelation["Publication"] publications: fields.ReverseRelation["Publication"]
def __str__(self): def __str__(self):
return self.name return str(self.name)
def __repr__(self): def __repr__(self):
return f"{self.id} - {self.name}" return f"{self.id} - {self.name}"

View File

@ -5,7 +5,6 @@ from dataclasses import dataclass
from typing import List, Optional from typing import List, Optional
from uuid import UUID from uuid import UUID
import arrow
from dynaconf.utils.boxing import DynaBox from dynaconf.utils.boxing import DynaBox
from jinja2 import Environment, FileSystemLoader, Template from jinja2 import Environment, FileSystemLoader, Template
@ -131,6 +130,7 @@ class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
self._validate_event(event) self._validate_event(event)
self._validate_message(self.get_message_from_event(event)) self._validate_message(self.get_message_from_event(event))
@abstractmethod
def _preprocess_event(self, event): def _preprocess_event(self, event):
""" """
Allows publishers to preprocess events before feeding them to the template Allows publishers to preprocess events before feeding them to the template

View File

@ -1,4 +1,5 @@
import logging import logging
from abc import abstractmethod, ABC
from dataclasses import dataclass from dataclasses import dataclass
from typing import List, Optional from typing import List, Optional
@ -77,7 +78,7 @@ class PublisherCoordinator:
return self._post() return self._post()
def _post(self): def _post(self) -> PublisherCoordinatorReport:
reports = [] reports = []
for publication in self.publications: for publication in self.publications:
@ -122,8 +123,7 @@ class PublisherCoordinator:
for publication in self.publications: for publication in self.publications:
reasons = [] reasons = []
reasons = self._safe_run( reasons = self._safe_run(
reasons, reasons, publication.publisher.validate_credentials,
publication.publisher.validate_credentials,
) )
reasons = self._safe_run( reasons = self._safe_run(
reasons, publication.formatter.validate_event, publication.event reasons, publication.formatter.validate_event, publication.event
@ -141,7 +141,7 @@ class PublisherCoordinator:
return errors return errors
class AbstractCoordinator: class Sender:
def __init__(self, message: str, platforms: List[AbstractPlatform] = None): def __init__(self, message: str, platforms: List[AbstractPlatform] = None):
self.message = message self.message = message
self.platforms = platforms self.platforms = platforms
@ -151,29 +151,41 @@ class AbstractCoordinator:
try: try:
platform.send(self.message) platform.send(self.message)
except Exception as e: except Exception as e:
logger.critical(f"Notifier failed to send message:\n{self.message}") logger.critical(f"Failed to send message:\n{self.message}")
logger.exception(e) logger.exception(e)
class AbstractNotifiersCoordinator(AbstractCoordinator): class AbstractNotifiersCoordinator(ABC):
def __init__(self, message: str, notifiers: List[AbstractPlatform] = None): def __init__(self, report, notifiers: List[AbstractPlatform] = None):
platforms = notifiers or [ self.platforms = notifiers or [
get_notifier_class(notifier)() for notifier in get_active_notifiers() get_notifier_class(notifier)() for notifier in get_active_notifiers()
] ]
super(AbstractNotifiersCoordinator, self).__init__(message, platforms) self.report = report
@abstractmethod
def notify_failure(self):
pass
class PublicationFailureNotifiersCoordinator(AbstractNotifiersCoordinator): class PublicationFailureNotifiersCoordinator(AbstractNotifiersCoordinator):
def __init__(self, report: BasePublicationReport, platforms=None): """
self.report = report Sends a notification of a failure report to the active platforms
super(PublicationFailureNotifiersCoordinator, self).__init__( """
message=report.get_failure_message(), notifiers=platforms
)
def notify_failure(self): def notify_failure(self):
logger.info("Sending failure notifications") logger.info("Sending failure notifications")
if self.report.status == PublicationStatus.FAILED: if self.report.status == PublicationStatus.FAILED:
self.send_to_all() Sender(self.report.get_failure_message(), self.platforms).send_to_all()
class PublicationFailureLoggerCoordinator(PublicationFailureNotifiersCoordinator):
"""
Logs a report to console
"""
def notify_failure(self):
if self.report.status == PublicationStatus.FAILED:
logger.error(self.report.get_failure_message())
class RecapCoordinator: class RecapCoordinator:
@ -194,15 +206,13 @@ class RecapCoordinator:
recap_publication.publisher.send(message) recap_publication.publisher.send(message)
reports.append( reports.append(
BasePublicationReport( BasePublicationReport(
status=PublicationStatus.COMPLETED, status=PublicationStatus.COMPLETED, reason=None,
reason=None,
) )
) )
except PublisherError as e: except PublisherError as e:
reports.append( reports.append(
BasePublicationReport( BasePublicationReport(
status=PublicationStatus.FAILED, status=PublicationStatus.FAILED, reason=str(e),
reason=str(e),
) )
) )

View File

@ -1,8 +1,6 @@
class PublisherError(Exception): class PublisherError(Exception):
"""Generic publisher error""" """Generic publisher error"""
pass
class InvalidAttribute(PublisherError): class InvalidAttribute(PublisherError):
"""Publisher defined with invalid or missing attribute""" """Publisher defined with invalid or missing attribute"""

View File

@ -56,9 +56,7 @@ class FacebookPlatform(AbstractPlatform):
name = "facebook" name = "facebook"
def _get_api(self) -> facebook.GraphAPI: def _get_api(self) -> facebook.GraphAPI:
return facebook.GraphAPI( return facebook.GraphAPI(access_token=self.conf["page_access_token"])
access_token=self.conf["page_access_token"], version="8.0"
)
def _send(self, message: str, event: Optional[MobilizonEvent] = None): def _send(self, message: str, event: Optional[MobilizonEvent] = None):
try: try:

View File

@ -61,18 +61,6 @@ class ZulipPlatform(AbstractPlatform):
api_uri = "api/v1/" api_uri = "api/v1/"
name = "zulip" name = "zulip"
def _send_private(
self, message: str, event: Optional[MobilizonEvent] = None
) -> Response:
"""
Send private messages
"""
return requests.post(
url=urljoin(self.conf.instance, self.api_uri) + "messages",
auth=HTTPBasicAuth(self.conf.bot_email, self.conf.bot_token),
data={"type": "private", "to": f"[{self.user_id}]", "content": message},
)
def _send(self, message: str, event: Optional[MobilizonEvent] = None) -> Response: def _send(self, message: str, event: Optional[MobilizonEvent] = None) -> Response:
""" """
Send stream messages Send stream messages
@ -110,18 +98,18 @@ class ZulipPlatform(AbstractPlatform):
raise_error=InvalidBot, raise_error=InvalidBot,
) )
def _validate_response(self, res: Response) -> dict: def _validate_response(self, response: Response) -> dict:
try: try:
res.raise_for_status() response.raise_for_status()
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self._log_debug(str(res)) self._log_debug(str(response))
self._log_error( self._log_error(
str(e), raise_error=HTTPResponseError, str(e), raise_error=HTTPResponseError,
) )
# See https://zulip.com/api/rest-error-handling # See https://zulip.com/api/rest-error-handling
try: try:
data = res.json() data = response.json()
except Exception as e: except Exception as e:
self._log_error( self._log_error(
f"Server returned invalid json data: {str(e)}", f"Server returned invalid json data: {str(e)}",
@ -130,7 +118,7 @@ class ZulipPlatform(AbstractPlatform):
if data["result"] == "error": if data["result"] == "error":
self._log_error( self._log_error(
f"{res.status_code} Error - {data['msg']}", raise_error=ZulipError, f"{response.status_code} Error - {data['msg']}", raise_error=ZulipError,
) )
return data return data

View File

@ -1,4 +1,4 @@
# {{ name }} {{ name }}
🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm') }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm') }} 🕒 {{ begin_datetime.to('local').format('DD MMMM, HH:mm') }} - {{ end_datetime.to('local').format('DD MMMM, HH:mm') }}

View File

@ -78,7 +78,7 @@ def compute_event_status(publications: list[Publication]) -> EventPublicationSta
raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}") raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}")
def publication_from_orm(model: Publication, event: MobilizonEvent): def publication_from_orm(model: Publication, event: MobilizonEvent) -> EventPublication:
# imported here to avoid circular dependencies # imported here to avoid circular dependencies
from mobilizon_reshare.publishers.platforms.platform_mapping import ( from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_publisher_class, get_publisher_class,
@ -87,9 +87,4 @@ def publication_from_orm(model: Publication, event: MobilizonEvent):
publisher = get_publisher_class(model.publisher.name)() publisher = get_publisher_class(model.publisher.name)()
formatter = get_formatter_class(model.publisher.name)() formatter = get_formatter_class(model.publisher.name)()
return EventPublication( return EventPublication(publisher, formatter, event, model.id,)
publisher,
formatter,
event,
model.id,
)

View File

@ -119,17 +119,11 @@ def _add_date_window(
@atomic(CONNECTION_NAME) @atomic(CONNECTION_NAME)
async def publications_with_status( async def publications_with_status(
status: PublicationStatus, status: PublicationStatus,
event_mobilizon_id: Optional[UUID] = None,
from_date: Optional[Arrow] = None, from_date: Optional[Arrow] = None,
to_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> Iterable[EventPublication]: ) -> Iterable[Publication]:
query = Publication.filter(status=status) query = Publication.filter(status=status)
if event_mobilizon_id:
query = query.prefetch_related("event").filter(
event__mobilizon_id=event_mobilizon_id
)
return await prefetch_publication_relations( return await prefetch_publication_relations(
_add_date_window(query, "timestamp", from_date, to_date) _add_date_window(query, "timestamp", from_date, to_date)
) )
@ -180,7 +174,7 @@ async def build_publications(event: MobilizonEvent) -> list[EventPublication]:
@atomic(CONNECTION_NAME) @atomic(CONNECTION_NAME)
async def get_failed_publications_for_event( async def get_failed_publications_for_event(
event_mobilizon_id: UUID, event_mobilizon_id: UUID,
) -> list[MobilizonEvent]: ) -> list[EventPublication]:
event = await get_event(event_mobilizon_id) event = await get_event(event_mobilizon_id)
failed_publications = list( failed_publications = list(
filter( filter(
@ -190,7 +184,10 @@ async def get_failed_publications_for_event(
) )
for p in failed_publications: for p in failed_publications:
await p.fetch_related("publisher") await p.fetch_related("publisher")
return list(map(partial(publication_from_orm, event=event), failed_publications)) mobilizon_event = event_from_model(event)
return list(
map(partial(publication_from_orm, event=mobilizon_event), failed_publications)
)
@atomic(CONNECTION_NAME) @atomic(CONNECTION_NAME)
@ -201,6 +198,8 @@ async def get_publication(publication_id):
) )
# TODO: this is redundant but there's some prefetch problem otherwise # TODO: this is redundant but there's some prefetch problem otherwise
publication.event = await get_event(publication.event.mobilizon_id) publication.event = await get_event(publication.event.mobilizon_id)
return publication_from_orm(event=event_from_model(publication.event)) return publication_from_orm(
event=event_from_model(publication.event), model=publication
)
except DoesNotExist: except DoesNotExist:
return None return None

View File

@ -1,4 +1,5 @@
import pytest import pytest
from arrow import arrow
from mobilizon_reshare.cli.commands.list.list_event import list_events from mobilizon_reshare.cli.commands.list.list_event import list_events
from mobilizon_reshare.cli.commands.list.list_publication import list_publications from mobilizon_reshare.cli.commands.list.list_publication import list_publications
@ -60,6 +61,32 @@ async def test_list_publications(capsys, generate_models):
] ]
@pytest.mark.asyncio
async def test_list_publications_from_window(capsys, generate_models):
await generate_models(spec)
await list_publications(
frm=arrow.Arrow(year=2021, month=6, day=6, hour=3, minute=30)
)
output = capsys.readouterr()
assert clean_output(output) == [
"00000000-0000-0000-0000-000000000001 2021-06-06T04:00:00+00:00 "
"FAILED zulip 00000000-0000-0000-0000-000000000002",
]
@pytest.mark.asyncio
async def test_list_publications_to_window(capsys, generate_models):
await generate_models(spec)
await list_publications(
to=arrow.Arrow(year=2021, month=6, day=6, hour=3, minute=30)
)
output = capsys.readouterr()
assert clean_output(output) == [
"00000000-0000-0000-0000-000000000000 2021-06-06T03:00:00+00:00 "
"COMPLETED zulip 00000000-0000-0000-0000-000000000001",
]
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_list_publications_with_status(capsys, generate_models): async def test_list_publications_with_status(capsys, generate_models):
await generate_models(spec) await generate_models(spec)

View File

@ -1,9 +1,9 @@
import uuid import uuid
from logging import INFO from logging import INFO, ERROR
import pytest import pytest
from mobilizon_reshare.main.retry import retry_event from mobilizon_reshare.main.retry import retry_event, retry_publication
from mobilizon_reshare.models.publication import PublicationStatus, Publication from mobilizon_reshare.models.publication import PublicationStatus, Publication
@ -17,13 +17,12 @@ async def test_retry_decision():
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")] "publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
) )
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_retry( async def test_retry_event(
event_with_failed_publication, event_with_failed_publication,
mock_publisher_config, mock_publisher_config,
message_collector, message_collector,
failed_publication, failed_publication,
): ):
assert failed_publication.status == PublicationStatus.FAILED
await retry_event(event_with_failed_publication.mobilizon_id) await retry_event(event_with_failed_publication.mobilizon_id)
p = await Publication.filter(id=failed_publication.id).first() p = await Publication.filter(id=failed_publication.id).first()
assert p.status == PublicationStatus.COMPLETED, p.id assert p.status == PublicationStatus.COMPLETED, p.id
@ -35,7 +34,7 @@ async def test_retry(
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")] "publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
) )
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_retry_no_publications( async def test_retry_event_no_publications(
stored_event, mock_publisher_config, message_collector, caplog stored_event, mock_publisher_config, message_collector, caplog
): ):
with caplog.at_level(INFO): with caplog.at_level(INFO):
@ -48,7 +47,9 @@ async def test_retry_no_publications(
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")] "publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
) )
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_retry_missing_event(mock_publisher_config, message_collector, caplog): async def test_retry_event_missing_event(
mock_publisher_config, message_collector, caplog
):
event_id = uuid.uuid4() event_id = uuid.uuid4()
with caplog.at_level(INFO): with caplog.at_level(INFO):
await retry_event(event_id) await retry_event(event_id)
@ -61,7 +62,7 @@ async def test_retry_missing_event(mock_publisher_config, message_collector, cap
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")] "publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
) )
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_retry_mixed_publications( async def test_retry_event_mixed_publications(
event_with_failed_publication, event_with_failed_publication,
mock_publisher_config, mock_publisher_config,
message_collector, message_collector,
@ -81,3 +82,77 @@ async def test_retry_mixed_publications(
assert p.status == PublicationStatus.COMPLETED, p.id assert p.status == PublicationStatus.COMPLETED, p.id
assert len(message_collector) == 1 assert len(message_collector) == 1
assert message_collector[0] == "test event|description of the event" assert message_collector[0] == "test event|description of the event"
@pytest.mark.parametrize(
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
)
@pytest.mark.asyncio
async def test_retry_publication(
event_with_failed_publication,
mock_publisher_config,
message_collector,
failed_publication: Publication,
):
await retry_publication(failed_publication.id)
p = await Publication.filter(id=failed_publication.id).first()
assert p.status == PublicationStatus.COMPLETED, p.id
assert len(message_collector) == 1
assert message_collector[0] == "test event|description of the event"
@pytest.mark.parametrize(
"publisher_class", [pytest.lazy_fixture("mock_publisher_class")]
)
@pytest.mark.asyncio
async def test_retry_publication_missing(
mock_publisher_config, message_collector, caplog
):
publication_id = uuid.uuid4()
with caplog.at_level(INFO):
await retry_publication(publication_id)
assert f"Publication {publication_id} not found.\n" in caplog.text
assert len(message_collector) == 0
@pytest.mark.parametrize(
"publisher_class", [pytest.lazy_fixture("mock_publisher_invalid_class")]
)
@pytest.mark.asyncio
async def test_event_retry_failure(
event_with_failed_publication,
mock_publisher_config,
failed_publication: Publication,
caplog,
):
with caplog.at_level(ERROR):
await retry_event(event_with_failed_publication.mobilizon_id)
assert (
f"Publication {failed_publication.id} failed with status: 0.\nReason: credentials error"
in caplog.text
)
p = await Publication.filter(id=failed_publication.id).first()
assert p.status == PublicationStatus.FAILED, p.id
@pytest.mark.parametrize(
"publisher_class", [pytest.lazy_fixture("mock_publisher_invalid_class")]
)
@pytest.mark.asyncio
async def test_publication_retry_failure(
event_with_failed_publication,
mock_publisher_config,
failed_publication: Publication,
caplog,
):
with caplog.at_level(ERROR):
await retry_publication(failed_publication.id)
assert (
f"Publication {failed_publication.id} failed with status: 0.\nReason: credentials error"
in caplog.text
)
p = await Publication.filter(id=failed_publication.id).first()
assert p.status == PublicationStatus.FAILED, p.id

View File

@ -33,8 +33,8 @@ with importlib.resources.path(
os.environ["SECRETS_FOR_DYNACONF"] = str(bundled_secrets_path) os.environ["SECRETS_FOR_DYNACONF"] = str(bundled_secrets_path)
def generate_publication_status(published): def generate_publication_status(published) -> PublicationStatus:
return PublicationStatus.COMPLETED if published else PublicationStatus.WAITING return PublicationStatus.COMPLETED if published else PublicationStatus.FAILED
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@ -56,7 +56,7 @@ def generate_notification_status(published):
@pytest.fixture(scope="session", autouse=True) @pytest.fixture(scope="session", autouse=True)
def set_dynaconf_environment(request) -> None: def set_dynaconf_environment() -> None:
os.environ["ENV_FOR_DYNACONF"] = "testing" os.environ["ENV_FOR_DYNACONF"] = "testing"
os.environ["FORCE_ENV_FOR_DYNACONF"] = "testing" os.environ["FORCE_ENV_FOR_DYNACONF"] = "testing"
@ -120,7 +120,7 @@ def event() -> MobilizonEvent:
@pytest.fixture @pytest.fixture
async def stored_event(event): async def stored_event(event) -> Event:
model = event_to_model(event) model = event_to_model(event)
await model.save() await model.save()
await model.fetch_related("publications") await model.fetch_related("publications")
@ -379,7 +379,7 @@ async def event_with_failed_publication(
@pytest.fixture @pytest.fixture
async def failed_publication(stored_event): async def failed_publication(stored_event) -> Publication:
p = Publication( p = Publication(
event=stored_event, event=stored_event,

View File

@ -13,11 +13,10 @@ end_date = begin_date.shift(hours=1)
@pytest.fixture() @pytest.fixture()
def event() -> MobilizonEvent: def event_to_format() -> MobilizonEvent:
return MobilizonEvent( event = MobilizonEvent(
name="test event", name="test event",
description="<p><h1>description of the event</h1><h1>another header</h1></p>", description="<p><h1>description of the event</h1><h1>another header</h1></p>",
# "<ul><li>element</li></ul>",
begin_datetime=begin_date, begin_datetime=begin_date,
end_datetime=end_date, end_datetime=end_date,
mobilizon_link="http://some_link.com/123", mobilizon_link="http://some_link.com/123",
@ -26,6 +25,8 @@ def event() -> MobilizonEvent:
location="location", location="location",
last_update_time=begin_date, last_update_time=begin_date,
) )
print("in fix", event.description, id(event))
return event
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -33,7 +34,7 @@ def event() -> MobilizonEvent:
[ [
[ [
"facebook", "facebook",
f"""# test event f"""test event
🕒 01 January, {begin_date.format('HH:mm')} - 01 January, {end_date.format('HH:mm')} 🕒 01 January, {begin_date.format('HH:mm')} - 01 January, {end_date.format('HH:mm')}
@ -61,8 +62,12 @@ Link: http://some_link.com/123
], ],
], ],
) )
def test_output_format(event, publisher_name, expected_output): def test_output_format(
event_to_format: MobilizonEvent, publisher_name, expected_output
):
assert ( assert (
get_formatter_class(publisher_name)().get_message_from_event(event).strip() get_formatter_class(publisher_name)()
.get_message_from_event(event_to_format)
.strip()
== expected_output.strip() == expected_output.strip()
) )

View File

@ -84,16 +84,14 @@ def mock_publisher_invalid_response(message_collector):
@pytest.fixture() @pytest.fixture()
async def mock_active_publishers_config(monkeypatch): async def mock_active_publishers_config(monkeypatch):
p = Publisher(name="zulip") publisher = Publisher(name="zulip")
await p.save() await publisher.save()
def _mock_active_pub(): def _mock_active_pub():
return ["zulip"] return ["zulip"]
monkeypatch.setattr( monkeypatch.setattr(
mobilizon_reshare.storage.query.read, mobilizon_reshare.storage.query.read, "get_active_publishers", _mock_active_pub
"get_active_publishers",
_mock_active_pub
) )
return p return publisher

View File

@ -51,7 +51,7 @@ def failure_report(mock_publisher_invalid, event):
) )
def test_publication_report_successful(statuses, successful): def test_publication_report_successful(statuses, successful):
reports = [] reports = []
for i, status in enumerate(statuses): for _, status in enumerate(statuses):
reports.append( reports.append(
EventPublicationReport(reason=None, publication=None, status=status) EventPublicationReport(reason=None, publication=None, status=status)
) )
@ -112,12 +112,8 @@ async def mock_publications(
@pytest.mark.parametrize("num_publications", [2]) @pytest.mark.parametrize("num_publications", [2])
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_publication_coordinator_run_success( async def test_publication_coordinator_run_success(mock_publications,):
mock_publications, coordinator = PublisherCoordinator(publications=mock_publications,)
):
coordinator = PublisherCoordinator(
publications=mock_publications,
)
report = coordinator.run() report = coordinator.run()
assert len(report.reports) == 2 assert len(report.reports) == 2
assert report.successful, "\n".join(map(lambda rep: rep.reason, report.reports)) assert report.successful, "\n".join(map(lambda rep: rep.reason, report.reports))
@ -182,7 +178,7 @@ async def test_notifier_coordinator_error(
) )
with caplog.at_level(logging.CRITICAL): with caplog.at_level(logging.CRITICAL):
coordinator.notify_failure() coordinator.notify_failure()
assert "Notifier failed to send" in caplog.text assert "Failed to send" in caplog.text
assert failure_report.get_failure_message() in caplog.text assert failure_report.get_failure_message() in caplog.text
# 4 = 2 reports * 2 notifiers # 4 = 2 reports * 2 notifiers
assert mock_send.call_count == 2 assert mock_send.call_count == 2

View File

@ -1,10 +1,9 @@
from datetime import timedelta from datetime import timedelta
from uuid import UUID
import arrow import arrow
import pytest import pytest
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus from mobilizon_reshare.event.event import EventPublicationStatus
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.storage.query.read import ( from mobilizon_reshare.storage.query.read import (
get_published_events, get_published_events,
@ -13,9 +12,9 @@ from mobilizon_reshare.storage.query.read import (
events_without_publications, events_without_publications,
build_publications, build_publications,
) )
from tests import today
from tests.storage import complete_specification, event_0, event_1, event_3 from tests.storage import complete_specification, event_0, event_1, event_3
from tests.storage import result_publication from tests.storage import result_publication
from tests import today
@pytest.mark.asyncio @pytest.mark.asyncio
@ -54,19 +53,11 @@ async def test_get_published_events(generate_models):
], ],
) )
async def test_publications_with_status( async def test_publications_with_status(
status, status, mobilizon_id, from_date, to_date, expected_result, generate_models,
mobilizon_id,
from_date,
to_date,
expected_result,
generate_models,
): ):
await generate_models(complete_specification) await generate_models(complete_specification)
publications = await publications_with_status( publications = await publications_with_status(
status=status, status=status, from_date=from_date, to_date=to_date,
event_mobilizon_id=mobilizon_id,
from_date=from_date,
to_date=to_date,
) )
assert publications == expected_result assert publications == expected_result
@ -129,13 +120,7 @@ async def test_event_with_status_window(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"spec, expected_events", "spec, expected_events",
[ [
( ({"event": 2, "publications": [], "publisher": ["zulip"]}, [event_0, event_1],),
{"event": 2, "publications": [], "publisher": ["zulip"]},
[
event_0,
event_1,
],
),
( (
{ {
"event": 3, "event": 3,
@ -155,12 +140,7 @@ async def test_event_with_status_window(
}, },
[event_0], [event_0],
), ),
( (complete_specification, [event_3],),
complete_specification,
[
event_3,
],
),
], ],
) )
async def test_events_without_publications(spec, expected_events, generate_models): async def test_events_without_publications(spec, expected_events, generate_models):
@ -174,12 +154,7 @@ async def test_events_without_publications(spec, expected_events, generate_model
@pytest.mark.parametrize( @pytest.mark.parametrize(
"mock_active_publishers, spec, event, n_publications", "mock_active_publishers, spec, event, n_publications",
[ [
( ([], {"event": 2, "publications": [], "publisher": ["zulip"]}, event_0, 0,),
[],
{"event": 2, "publications": [], "publisher": ["zulip"]},
event_0,
0,
),
( (
["zulip"], ["zulip"],
{"event": 2, "publications": [], "publisher": ["zulip"]}, {"event": 2, "publications": [], "publisher": ["zulip"]},