decouple dataclasses from models (#181)

* fixed parsing bug

* implemented events and publications endpoints

split endpoints by entity

removed credentials

* add pagination (#179)

* added pagination

* integrated pagination with tortoise

* added test for publications

* removed converter file

* moved publications to dataclasses module

* implemented import pattern on dataclasses to prevent circular imports

* removed redundant fetch

* removed unused query

* split build_publications

* split failed_publications

* removed redundant query functions

* split publication retrieve

* split all read functions

* removed redundant write function

* fixed lock
This commit is contained in:
Simone Robutti 2022-12-11 14:15:04 +01:00 committed by GitHub
parent ddc706e201
commit 370e00d187
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
40 changed files with 466 additions and 486 deletions

View File

@ -19,7 +19,7 @@ from mobilizon_reshare.cli.commands.start.main import start_command as start_mai
from mobilizon_reshare.config.command import CommandConfig from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.config.config import current_version, get_settings from mobilizon_reshare.config.config import current_version, get_settings
from mobilizon_reshare.config.publishers import publisher_names from mobilizon_reshare.config.publishers import publisher_names
from mobilizon_reshare.event.event import EventPublicationStatus from mobilizon_reshare.dataclasses.event import _EventPublicationStatus
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.publishers import get_active_publishers from mobilizon_reshare.publishers import get_active_publishers
@ -49,10 +49,10 @@ def print_platforms(ctx, param, value):
status_name_to_enum = { status_name_to_enum = {
"event": { "event": {
"waiting": EventPublicationStatus.WAITING, "waiting": _EventPublicationStatus.WAITING,
"completed": EventPublicationStatus.COMPLETED, "completed": _EventPublicationStatus.COMPLETED,
"failed": EventPublicationStatus.FAILED, "failed": _EventPublicationStatus.FAILED,
"partial": EventPublicationStatus.PARTIAL, "partial": _EventPublicationStatus.PARTIAL,
"all": None, "all": None,
}, },
"publication": { "publication": {

View File

@ -1,6 +1,6 @@
import click import click
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.publishers.platforms.platform_mapping import get_formatter_class from mobilizon_reshare.publishers.platforms.platform_mapping import get_formatter_class

View File

@ -4,21 +4,21 @@ from typing import Iterable, Optional
import click import click
from arrow import Arrow from arrow import Arrow
from mobilizon_reshare.event.event import EventPublicationStatus from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses.event import (
from mobilizon_reshare.event.event_selection_strategies import select_unpublished_events _EventPublicationStatus,
from mobilizon_reshare.storage.query.read import (
get_published_events,
events_with_status,
get_all_mobilizon_events, get_all_mobilizon_events,
events_without_publications, get_published_events,
get_mobilizon_events_with_status,
get_mobilizon_events_without_publications,
) )
from mobilizon_reshare.event.event_selection_strategies import select_unpublished_events
status_to_color = { status_to_color = {
EventPublicationStatus.COMPLETED: "green", _EventPublicationStatus.COMPLETED: "green",
EventPublicationStatus.FAILED: "red", _EventPublicationStatus.FAILED: "red",
EventPublicationStatus.PARTIAL: "yellow", _EventPublicationStatus.PARTIAL: "yellow",
EventPublicationStatus.WAITING: "white", _EventPublicationStatus.WAITING: "white",
} }
@ -38,12 +38,14 @@ def pretty(event: MobilizonEvent):
async def list_unpublished_events(frm: Arrow = None, to: Arrow = None): async def list_unpublished_events(frm: Arrow = None, to: Arrow = None):
return select_unpublished_events( return select_unpublished_events(
list(await get_published_events(from_date=frm, to_date=to)), list(await get_published_events(from_date=frm, to_date=to)),
list(await events_without_publications(from_date=frm, to_date=to)), list(
await get_mobilizon_events_without_publications(from_date=frm, to_date=to)
),
) )
async def list_events( async def list_events(
status: Optional[EventPublicationStatus] = None, status: Optional[_EventPublicationStatus] = None,
frm: Optional[datetime] = None, frm: Optional[datetime] = None,
to: Optional[datetime] = None, to: Optional[datetime] = None,
): ):
@ -52,10 +54,12 @@ async def list_events(
to = Arrow.fromdatetime(to) if to else None to = Arrow.fromdatetime(to) if to else None
if status is None: if status is None:
events = await get_all_mobilizon_events(from_date=frm, to_date=to) events = await get_all_mobilizon_events(from_date=frm, to_date=to)
elif status == EventPublicationStatus.WAITING: elif status == _EventPublicationStatus.WAITING:
events = await list_unpublished_events(frm=frm, to=to) events = await list_unpublished_events(frm=frm, to=to)
else: else:
events = await events_with_status([status], from_date=frm, to_date=to) events = await get_mobilizon_events_with_status(
[status], from_date=frm, to_date=to
)
events = list(events) events = list(events)
if events: if events:
show_events(events) show_events(events)

View File

@ -0,0 +1,9 @@
from mobilizon_reshare.dataclasses.event import _MobilizonEvent
from mobilizon_reshare.dataclasses.event_publication_status import (
_EventPublicationStatus,
)
from mobilizon_reshare.dataclasses.publication import _EventPublication
EventPublication = _EventPublication
MobilizonEvent = _MobilizonEvent
EventPublicationStatus = _EventPublicationStatus

View File

@ -1,25 +1,26 @@
from dataclasses import dataclass, asdict from dataclasses import dataclass, asdict
from enum import IntEnum from typing import Optional, Iterable
from typing import Optional
from uuid import UUID from uuid import UUID
import arrow import arrow
from arrow import Arrow
from jinja2 import Template from jinja2 import Template
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.dataclasses.event_publication_status import (
_EventPublicationStatus,
_compute_event_status,
)
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus, Publication from mobilizon_reshare.storage.query.read import (
get_all_events,
get_event,
class EventPublicationStatus(IntEnum): get_events_without_publications,
WAITING = 1 )
FAILED = 2
COMPLETED = 3
PARTIAL = 4
@dataclass @dataclass
class MobilizonEvent: class _MobilizonEvent:
"""Class representing an event retrieved from Mobilizon.""" """Class representing an event retrieved from Mobilizon."""
name: str name: str
@ -32,7 +33,7 @@ class MobilizonEvent:
thumbnail_link: Optional[str] = None thumbnail_link: Optional[str] = None
location: Optional[str] = None location: Optional[str] = None
publication_time: Optional[dict[str, arrow.Arrow]] = None publication_time: Optional[dict[str, arrow.Arrow]] = None
status: EventPublicationStatus = EventPublicationStatus.WAITING status: _EventPublicationStatus = _EventPublicationStatus.WAITING
def __post_init__(self): def __post_init__(self):
assert self.begin_datetime.tzinfo == self.end_datetime.tzinfo assert self.begin_datetime.tzinfo == self.end_datetime.tzinfo
@ -41,9 +42,9 @@ class MobilizonEvent:
self.publication_time = {} self.publication_time = {}
if self.publication_time: if self.publication_time:
assert self.status in [ assert self.status in [
EventPublicationStatus.COMPLETED, _EventPublicationStatus.COMPLETED,
EventPublicationStatus.PARTIAL, _EventPublicationStatus.PARTIAL,
EventPublicationStatus.FAILED, _EventPublicationStatus.FAILED,
] ]
def _fill_template(self, pattern: Template) -> str: def _fill_template(self, pattern: Template) -> str:
@ -55,11 +56,11 @@ class MobilizonEvent:
@classmethod @classmethod
def from_model(cls, event: Event): def from_model(cls, event: Event):
publication_status = cls._compute_event_status(list(event.publications)) publication_status = _compute_event_status(list(event.publications))
publication_time = {} publication_time = {}
for pub in event.publications: for pub in event.publications:
if publication_status != EventPublicationStatus.WAITING: if publication_status != _EventPublicationStatus.WAITING:
assert pub.timestamp is not None assert pub.timestamp is not None
publication_time[pub.publisher.name] = arrow.get(pub.timestamp).to( publication_time[pub.publisher.name] = arrow.get(pub.timestamp).to(
"local" "local"
@ -99,23 +100,58 @@ class MobilizonEvent:
kwargs.update({"id": db_id}) kwargs.update({"id": db_id})
return Event(**kwargs) return Event(**kwargs)
@staticmethod @classmethod
def _compute_event_status( async def retrieve(cls, mobilizon_id):
publications: list[Publication], return cls.from_model(await get_event(mobilizon_id))
) -> EventPublicationStatus:
if not publications:
return EventPublicationStatus.WAITING
unique_statuses: set[PublicationStatus] = set(
pub.status for pub in publications async def get_all_mobilizon_events(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> list[_MobilizonEvent]:
return [_MobilizonEvent.from_model(event) for event in await get_all_events()]
async def get_published_events(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None
) -> Iterable[_MobilizonEvent]:
"""
Retrieves events that are not waiting. Function could be renamed to something more fitting
:return:
"""
return await get_mobilizon_events_with_status(
[
_EventPublicationStatus.COMPLETED,
_EventPublicationStatus.PARTIAL,
_EventPublicationStatus.FAILED,
],
from_date=from_date,
to_date=to_date,
)
async def get_mobilizon_events_with_status(
status: list[_EventPublicationStatus],
from_date: Optional[Arrow] = None,
to_date: Optional[Arrow] = None,
) -> Iterable[_MobilizonEvent]:
def _filter_event_with_status(event: Event) -> bool:
# This computes the status client-side instead of running in the DB. It shouldn't pose a performance problem
# in the short term, but should be moved to the query if possible.
event_status = _compute_event_status(list(event.publications))
return event_status in status
return map(
_MobilizonEvent.from_model,
filter(_filter_event_with_status, await get_all_events(from_date, to_date)),
)
async def get_mobilizon_events_without_publications(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> list[_MobilizonEvent]:
return [
_MobilizonEvent.from_model(event)
for event in await get_events_without_publications(
from_date=from_date, to_date=to_date
) )
]
if unique_statuses == {
PublicationStatus.COMPLETED,
PublicationStatus.FAILED,
}:
return EventPublicationStatus.PARTIAL
elif len(unique_statuses) == 1:
return EventPublicationStatus[unique_statuses.pop().name]
raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}")

View File

@ -0,0 +1,27 @@
from enum import IntEnum
from mobilizon_reshare.models.publication import Publication, PublicationStatus
class _EventPublicationStatus(IntEnum):
WAITING = 1
FAILED = 2
COMPLETED = 3
PARTIAL = 4
def _compute_event_status(publications: list[Publication],) -> _EventPublicationStatus:
if not publications:
return _EventPublicationStatus.WAITING
unique_statuses: set[PublicationStatus] = set(pub.status for pub in publications)
if unique_statuses == {
PublicationStatus.COMPLETED,
PublicationStatus.FAILED,
}:
return _EventPublicationStatus.PARTIAL
elif len(unique_statuses) == 1:
return _EventPublicationStatus[unique_statuses.pop().name]
raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}")

View File

@ -0,0 +1,72 @@
from dataclasses import dataclass
from functools import partial
from typing import List, Iterator
from uuid import UUID
from tortoise.transactions import atomic
from mobilizon_reshare.dataclasses.event import _MobilizonEvent
from mobilizon_reshare.models.publication import Publication
from mobilizon_reshare.publishers.abstract import (
AbstractPlatform,
AbstractEventFormatter,
)
from mobilizon_reshare.storage.query.read import (
get_event,
prefetch_publication_relations,
)
@dataclass
class BasePublication:
publisher: AbstractPlatform
formatter: AbstractEventFormatter
@dataclass
class _EventPublication(BasePublication):
event: _MobilizonEvent
id: UUID
@classmethod
def from_orm(cls, model: Publication, event: _MobilizonEvent):
# imported here to avoid circular dependencies
from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_publisher_class,
get_formatter_class,
)
publisher = get_publisher_class(model.publisher.name)()
formatter = get_formatter_class(model.publisher.name)()
return cls(publisher, formatter, event, model.id,)
@classmethod
async def retrieve(cls, publication_id):
publication = await prefetch_publication_relations(
Publication.get(id=publication_id)
)
event = _MobilizonEvent.from_model(publication.event)
return cls.from_orm(publication, event)
@dataclass
class RecapPublication(BasePublication):
events: List[_MobilizonEvent]
@atomic()
async def build_publications_for_event(
event: _MobilizonEvent, publishers: Iterator[str]
) -> list[_EventPublication]:
publication_models = await event.to_model().build_publications(publishers)
return [_EventPublication.from_orm(m, event) for m in publication_models]
async def get_failed_publications_for_event(
event: _MobilizonEvent,
) -> List[_EventPublication]:
event_model = await get_event(event.mobilizon_id)
failed_publications = await event_model.get_failed_publications()
return list(
map(partial(_EventPublication.from_orm, event=event), failed_publications)
)

View File

@ -5,7 +5,7 @@ from typing import List, Optional
import arrow import arrow
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -2,10 +2,20 @@ import logging.config
from typing import Optional, Iterator from typing import Optional, Iterator
from mobilizon_reshare.config.command import CommandConfig from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.dataclasses.event import (
get_published_events,
get_mobilizon_events_without_publications,
)
from mobilizon_reshare.dataclasses.publication import (
_EventPublication,
build_publications_for_event,
)
from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish from mobilizon_reshare.event.event_selection_strategies import select_event_to_publish
from mobilizon_reshare.publishers import get_active_publishers from mobilizon_reshare.publishers import get_active_publishers
from mobilizon_reshare.publishers.abstract import EventPublication from mobilizon_reshare.publishers.coordinators.event_publishing.dry_run import (
DryRunPublisherCoordinator,
)
from mobilizon_reshare.publishers.coordinators.event_publishing.notify import ( from mobilizon_reshare.publishers.coordinators.event_publishing.notify import (
PublicationFailureNotifiersCoordinator, PublicationFailureNotifiersCoordinator,
) )
@ -13,21 +23,13 @@ from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
PublisherCoordinatorReport, PublisherCoordinatorReport,
PublisherCoordinator, PublisherCoordinator,
) )
from mobilizon_reshare.storage.query.read import (
get_published_events,
build_publications,
events_without_publications,
)
from mobilizon_reshare.storage.query.write import save_publication_report from mobilizon_reshare.storage.query.write import save_publication_report
from mobilizon_reshare.publishers.coordinators.event_publishing.dry_run import (
DryRunPublisherCoordinator,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def publish_publications( async def publish_publications(
publications: list[EventPublication], publications: list[_EventPublication],
) -> PublisherCoordinatorReport: ) -> PublisherCoordinatorReport:
report = PublisherCoordinator(publications).run() report = PublisherCoordinator(publications).run()
@ -39,7 +41,7 @@ async def publish_publications(
return report return report
def perform_dry_run(publications: list[EventPublication]): def perform_dry_run(publications: list[_EventPublication]):
return DryRunPublisherCoordinator(publications).run() return DryRunPublisherCoordinator(publications).run()
@ -53,7 +55,7 @@ async def publish_event(
if not (publishers and all(publishers)): if not (publishers and all(publishers)):
publishers = get_active_publishers() publishers = get_active_publishers()
publications = await build_publications(event, publishers) publications = await build_publications_for_event(event, publishers)
if command_config.dry_run: if command_config.dry_run:
logger.info("Executing in dry run mode. No event is going to be published.") logger.info("Executing in dry run mode. No event is going to be published.")
return perform_dry_run(publications) return perform_dry_run(publications)
@ -70,7 +72,7 @@ async def select_and_publish(
:return: :return:
""" """
if unpublished_events is None: if unpublished_events is None:
unpublished_events = await events_without_publications() unpublished_events = await get_mobilizon_events_without_publications()
event = select_event_to_publish( event = select_event_to_publish(
list(await get_published_events()), unpublished_events, list(await get_published_events()), unpublished_events,

View File

@ -1,6 +1,6 @@
import logging.config import logging.config
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.mobilizon.events import get_mobilizon_future_events from mobilizon_reshare.mobilizon.events import get_mobilizon_future_events
from mobilizon_reshare.storage.query.write import create_unpublished_events from mobilizon_reshare.storage.query.write import create_unpublished_events

View File

@ -4,31 +4,32 @@ from typing import Optional, List
from arrow import now from arrow import now
from mobilizon_reshare.config.command import CommandConfig from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent from mobilizon_reshare.dataclasses import EventPublicationStatus
from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.dataclasses.event import get_mobilizon_events_with_status
from mobilizon_reshare.dataclasses.publication import RecapPublication
from mobilizon_reshare.publishers import get_active_publishers from mobilizon_reshare.publishers import get_active_publishers
from mobilizon_reshare.publishers.abstract import RecapPublication from mobilizon_reshare.publishers.coordinators import BaseCoordinatorReport
from mobilizon_reshare.publishers.coordinators.event_publishing.notify import ( from mobilizon_reshare.publishers.coordinators.event_publishing.notify import (
PublicationFailureNotifiersCoordinator, PublicationFailureNotifiersCoordinator,
) )
from mobilizon_reshare.publishers.coordinators.recap_publishing.dry_run import (
DryRunRecapCoordinator,
)
from mobilizon_reshare.publishers.coordinators.recap_publishing.recap import ( from mobilizon_reshare.publishers.coordinators.recap_publishing.recap import (
RecapCoordinator, RecapCoordinator,
) )
from mobilizon_reshare.publishers.coordinators import BaseCoordinatorReport
from mobilizon_reshare.publishers.platforms.platform_mapping import ( from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_publisher_class, get_publisher_class,
get_formatter_class, get_formatter_class,
) )
from mobilizon_reshare.storage.query.read import events_with_status
from mobilizon_reshare.publishers.coordinators.recap_publishing.dry_run import (
DryRunRecapCoordinator,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def select_events_to_recap() -> List[MobilizonEvent]: async def select_events_to_recap() -> List[MobilizonEvent]:
return list( return list(
await events_with_status( await get_mobilizon_events_with_status(
status=[EventPublicationStatus.COMPLETED], from_date=now() status=[EventPublicationStatus.COMPLETED], from_date=now()
) )
) )

View File

@ -2,22 +2,22 @@ import logging
from typing import Optional from typing import Optional
from uuid import UUID from uuid import UUID
from tortoise.exceptions import DoesNotExist
from mobilizon_reshare.dataclasses import MobilizonEvent, EventPublication
from mobilizon_reshare.dataclasses.publication import get_failed_publications_for_event
from mobilizon_reshare.main.publish import publish_publications from mobilizon_reshare.main.publish import publish_publications
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import ( from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
PublisherCoordinatorReport, PublisherCoordinatorReport,
) )
from mobilizon_reshare.storage.query.exceptions import EventNotFound from mobilizon_reshare.storage.query.exceptions import EventNotFound
from mobilizon_reshare.storage.query.read import (
get_failed_publications_for_event,
get_publication,
)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def retry_event_publications(event_id) -> Optional[PublisherCoordinatorReport]: async def retry_event_publications(event_id) -> Optional[PublisherCoordinatorReport]:
event = await MobilizonEvent.retrieve(event_id)
failed_publications = await get_failed_publications_for_event(event_id) failed_publications = await get_failed_publications_for_event(event)
if not failed_publications: if not failed_publications:
logger.info("No failed publications found.") logger.info("No failed publications found.")
return return
@ -27,8 +27,9 @@ async def retry_event_publications(event_id) -> Optional[PublisherCoordinatorRep
async def retry_publication(publication_id) -> Optional[PublisherCoordinatorReport]: async def retry_publication(publication_id) -> Optional[PublisherCoordinatorReport]:
publication = await get_publication(publication_id) try:
if not publication: publication = await EventPublication.retrieve(publication_id)
except DoesNotExist:
logger.info(f"Publication {publication_id} not found.") logger.info(f"Publication {publication_id} not found.")
return return

View File

@ -8,7 +8,7 @@ import arrow
import requests import requests
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus from mobilizon_reshare.dataclasses import MobilizonEvent, _EventPublicationStatus
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -43,7 +43,7 @@ def parse_event(data):
thumbnail_link=parse_picture(data), thumbnail_link=parse_picture(data),
location=parse_location(data), location=parse_location(data),
publication_time=None, publication_time=None,
status=EventPublicationStatus.WAITING, status=_EventPublicationStatus.WAITING,
last_update_time=arrow.get(data["updatedAt"]) if "updatedAt" in data else None, last_update_time=arrow.get(data["updatedAt"]) if "updatedAt" in data else None,
) )

View File

@ -1,5 +1,8 @@
from typing import Iterator
from tortoise import fields from tortoise import fields
from tortoise.models import Model from tortoise.models import Model
from tortoise.transactions import atomic
from mobilizon_reshare.models import WithPydantic from mobilizon_reshare.models import WithPydantic
from mobilizon_reshare.models.publication import PublicationStatus, Publication from mobilizon_reshare.models.publication import PublicationStatus, Publication
@ -42,3 +45,17 @@ class Event(Model, WithPydantic):
publisher_id=publisher.id, publisher_id=publisher.id,
publisher=publisher, publisher=publisher,
) )
async def build_publications(self, publishers: Iterator[str]):
return [
await self.build_publication_by_publisher_name(name) for name in publishers
]
@atomic()
async def get_failed_publications(self,) -> list[Publication]:
return list(
filter(
lambda publications: publications.status == PublicationStatus.FAILED,
self.publications,
)
)

View File

@ -1,17 +1,14 @@
import inspect import inspect
import logging import logging
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from dataclasses import dataclass from typing import Optional
from typing import List, Optional
from uuid import UUID
from dynaconf.utils.boxing import DynaBox from dynaconf.utils.boxing import DynaBox
from jinja2 import Environment, FileSystemLoader, Template from jinja2 import Environment, FileSystemLoader, Template
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.event.event import MobilizonEvent
from .exceptions import InvalidAttribute from .exceptions import InvalidAttribute
from ..models.publication import Publication from ..dataclasses import _MobilizonEvent
JINJA_ENV = Environment(loader=FileSystemLoader("/")) JINJA_ENV = Environment(loader=FileSystemLoader("/"))
@ -84,10 +81,10 @@ class AbstractPlatform(ABC, LoggerMixin, ConfLoaderMixin):
pass pass
@abstractmethod @abstractmethod
def _send(self, message: str, event: Optional[MobilizonEvent] = None): def _send(self, message: str, event: Optional[_MobilizonEvent] = None):
raise NotImplementedError # pragma: no cover raise NotImplementedError # pragma: no cover
def send(self, message: str, event: Optional[MobilizonEvent] = None): def send(self, message: str, event: Optional[_MobilizonEvent] = None):
""" """
Sends a message to the target channel Sends a message to the target channel
""" """
@ -110,7 +107,7 @@ class AbstractPlatform(ABC, LoggerMixin, ConfLoaderMixin):
class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin): class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
@abstractmethod @abstractmethod
def _validate_event(self, event: MobilizonEvent) -> None: def _validate_event(self, event: _MobilizonEvent) -> None:
""" """
Validates publisher's event. Validates publisher's event.
Should raise ``PublisherError`` (or one of its subclasses) if event Should raise ``PublisherError`` (or one of its subclasses) if event
@ -127,7 +124,7 @@ class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
""" """
raise NotImplementedError # pragma: no cover raise NotImplementedError # pragma: no cover
def validate_event(self, event: MobilizonEvent) -> None: def validate_event(self, event: _MobilizonEvent) -> None:
self._validate_event(event) self._validate_event(event)
self._validate_message(self.get_message_from_event(event)) self._validate_message(self.get_message_from_event(event))
@ -138,7 +135,7 @@ class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
""" """
return event return event
def get_message_from_event(self, event: MobilizonEvent) -> str: def get_message_from_event(self, event: _MobilizonEvent) -> str:
""" """
Retrieves a message from the event itself. Retrieves a message from the event itself.
""" """
@ -167,7 +164,7 @@ class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
) )
return JINJA_ENV.get_template(template_path) return JINJA_ENV.get_template(template_path)
def get_recap_fragment(self, event: MobilizonEvent) -> str: def get_recap_fragment(self, event: _MobilizonEvent) -> str:
""" """
Retrieves the fragment that describes a single event inside the event recap. Retrieves the fragment that describes a single event inside the event recap.
""" """
@ -176,32 +173,3 @@ class AbstractEventFormatter(LoggerMixin, ConfLoaderMixin):
def _preprocess_message(self, message: str): def _preprocess_message(self, message: str):
return message return message
@dataclass
class BasePublication:
publisher: AbstractPlatform
formatter: AbstractEventFormatter
@dataclass
class EventPublication(BasePublication):
event: MobilizonEvent
id: UUID
@classmethod
def from_orm(cls, model: Publication, event: MobilizonEvent):
# imported here to avoid circular dependencies
from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_publisher_class,
get_formatter_class,
)
publisher = get_publisher_class(model.publisher.name)()
formatter = get_formatter_class(model.publisher.name)()
return cls(publisher, formatter, event, model.id,)
@dataclass
class RecapPublication(BasePublication):
events: List[MobilizonEvent]

View File

@ -3,17 +3,16 @@ import logging
from dataclasses import dataclass from dataclasses import dataclass
from typing import List, Optional from typing import List, Optional
from mobilizon_reshare.dataclasses.publication import _EventPublication
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.publishers.abstract import EventPublication
from mobilizon_reshare.publishers.coordinators import BasePublicationReport from mobilizon_reshare.publishers.coordinators import BasePublicationReport
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@dataclass @dataclass
class EventPublicationReport(BasePublicationReport): class EventPublicationReport(BasePublicationReport):
publication: EventPublication publication: _EventPublication
published_content: Optional[str] = dataclasses.field(default=None) published_content: Optional[str] = dataclasses.field(default=None)
def get_failure_message(self): def get_failure_message(self):
@ -29,7 +28,7 @@ class EventPublicationReport(BasePublicationReport):
class BaseEventPublishingCoordinator: class BaseEventPublishingCoordinator:
def __init__(self, publications: List[EventPublication]): def __init__(self, publications: List[_EventPublication]):
self.publications = publications self.publications = publications
def _safe_run(self, reasons, f, *args, **kwargs): def _safe_run(self, reasons, f, *args, **kwargs):

View File

@ -1,9 +1,10 @@
import dataclasses import dataclasses
import logging
from dataclasses import dataclass from dataclasses import dataclass
from typing import Sequence from typing import Sequence
import logging
from mobilizon_reshare.dataclasses.publication import _EventPublication
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.publishers.abstract import EventPublication
from mobilizon_reshare.publishers.coordinators import BaseCoordinatorReport from mobilizon_reshare.publishers.coordinators import BaseCoordinatorReport
from mobilizon_reshare.publishers.coordinators.event_publishing import ( from mobilizon_reshare.publishers.coordinators.event_publishing import (
BaseEventPublishingCoordinator, BaseEventPublishingCoordinator,
@ -17,7 +18,7 @@ logger = logging.getLogger(__name__)
@dataclass @dataclass
class PublisherCoordinatorReport(BaseCoordinatorReport): class PublisherCoordinatorReport(BaseCoordinatorReport):
reports: Sequence[EventPublicationReport] reports: Sequence[EventPublicationReport]
publications: Sequence[EventPublication] = dataclasses.field(default_factory=list) publications: Sequence[_EventPublication] = dataclasses.field(default_factory=list)
def __str__(self): def __str__(self):
platform_messages = [] platform_messages = []

View File

@ -3,7 +3,7 @@ from dataclasses import dataclass
from typing import Optional, Sequence, List from typing import Optional, Sequence, List
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.publishers.abstract import RecapPublication from mobilizon_reshare.dataclasses.publication import RecapPublication
from mobilizon_reshare.publishers.coordinators import ( from mobilizon_reshare.publishers.coordinators import (
BasePublicationReport, BasePublicationReport,
BaseCoordinatorReport, BaseCoordinatorReport,

View File

@ -4,7 +4,7 @@ import facebook
import pkg_resources import pkg_resources
from facebook import GraphAPIError from facebook import GraphAPIError
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.formatting.description import html_to_plaintext from mobilizon_reshare.formatting.description import html_to_plaintext
from mobilizon_reshare.publishers.abstract import ( from mobilizon_reshare.publishers.abstract import (
AbstractPlatform, AbstractPlatform,

View File

@ -5,7 +5,7 @@ import pkg_resources
import requests import requests
from requests import Response from requests import Response
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.publishers.abstract import ( from mobilizon_reshare.publishers.abstract import (
AbstractPlatform, AbstractPlatform,
AbstractEventFormatter, AbstractEventFormatter,

View File

@ -6,7 +6,7 @@ import requests
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from requests import Response from requests import Response
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.publishers.abstract import ( from mobilizon_reshare.publishers.abstract import (
AbstractEventFormatter, AbstractEventFormatter,
AbstractPlatform, AbstractPlatform,

View File

@ -4,7 +4,7 @@ import pkg_resources
from tweepy import OAuthHandler, API, TweepyException from tweepy import OAuthHandler, API, TweepyException
from tweepy.models import Status from tweepy.models import Status
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.publishers.abstract import ( from mobilizon_reshare.publishers.abstract import (
AbstractPlatform, AbstractPlatform,
AbstractEventFormatter, AbstractEventFormatter,

View File

@ -6,7 +6,7 @@ import requests
from requests import Response from requests import Response
from requests.auth import HTTPBasicAuth from requests.auth import HTTPBasicAuth
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.formatting.description import html_to_markdown from mobilizon_reshare.formatting.description import html_to_markdown
from mobilizon_reshare.publishers.abstract import ( from mobilizon_reshare.publishers.abstract import (
AbstractPlatform, AbstractPlatform,

View File

@ -1,63 +1,16 @@
from functools import partial from typing import Iterable, Optional
from typing import Iterable, Optional, Iterator
from uuid import UUID from uuid import UUID
from arrow import Arrow from arrow import Arrow
from tortoise.exceptions import DoesNotExist
from tortoise.queryset import QuerySet from tortoise.queryset import QuerySet
from tortoise.transactions import atomic from tortoise.transactions import atomic
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import Publication, PublicationStatus from mobilizon_reshare.models.publication import Publication, PublicationStatus
from mobilizon_reshare.models.publisher import Publisher from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers.abstract import EventPublication
from mobilizon_reshare.storage.query.exceptions import EventNotFound from mobilizon_reshare.storage.query.exceptions import EventNotFound
async def get_published_events(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None
) -> Iterable[MobilizonEvent]:
"""
Retrieves events that are not waiting. Function could be renamed to something more fitting
:return:
"""
return await events_with_status(
[
EventPublicationStatus.COMPLETED,
EventPublicationStatus.PARTIAL,
EventPublicationStatus.FAILED,
],
from_date=from_date,
to_date=to_date,
)
async def events_with_status(
status: list[EventPublicationStatus],
from_date: Optional[Arrow] = None,
to_date: Optional[Arrow] = None,
) -> Iterable[MobilizonEvent]:
def _filter_event_with_status(event: Event) -> bool:
# This computes the status client-side instead of running in the DB. It shouldn't pose a performance problem
# in the short term, but should be moved to the query if possible.
event_status = MobilizonEvent._compute_event_status(list(event.publications))
return event_status in status
query = Event.all()
return map(
MobilizonEvent.from_model,
filter(
_filter_event_with_status,
await prefetch_event_relations(
_add_date_window(query, "begin_datetime", from_date, to_date)
),
),
)
async def get_all_publications( async def get_all_publications(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None, from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> Iterable[Publication]: ) -> Iterable[Publication]:
@ -66,12 +19,6 @@ async def get_all_publications(
) )
async def get_all_mobilizon_events(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> list[MobilizonEvent]:
return [MobilizonEvent.from_model(event) for event in await get_all_events()]
async def get_all_events( async def get_all_events(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None
): ):
@ -96,7 +43,12 @@ async def prefetch_publication_relations(
queryset: QuerySet[Publication], queryset: QuerySet[Publication],
) -> list[Publication]: ) -> list[Publication]:
publication = ( publication = (
await queryset.prefetch_related("publisher", "event") await queryset.prefetch_related(
"publisher",
"event",
"event__publications",
"event__publications__publisher",
)
.order_by("timestamp") .order_by("timestamp")
.distinct() .distinct()
) )
@ -129,16 +81,6 @@ async def publications_with_status(
) )
async def events_without_publications(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> list[MobilizonEvent]:
query = Event.filter(publications__id=None)
events = await prefetch_event_relations(
_add_date_window(query, "begin_datetime", from_date, to_date)
)
return [MobilizonEvent.from_model(event) for event in events]
async def get_event(event_mobilizon_id: UUID) -> Event: async def get_event(event_mobilizon_id: UUID) -> Event:
events = await prefetch_event_relations( events = await prefetch_event_relations(
Event.filter(mobilizon_id=event_mobilizon_id) Event.filter(mobilizon_id=event_mobilizon_id)
@ -149,73 +91,10 @@ async def get_event(event_mobilizon_id: UUID) -> Event:
return events[0] return events[0]
async def get_event_publications( async def get_events_without_publications(
mobilizon_event: MobilizonEvent, from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> list[EventPublication]: ) -> list[Event]:
event = await get_event(mobilizon_event.mobilizon_id) query = Event.filter(publications__id=None)
return [EventPublication.from_orm(p, mobilizon_event) for p in event.publications] return await prefetch_event_relations(
_add_date_window(query, "begin_datetime", from_date, to_date)
async def get_mobilizon_event(event_mobilizon_id: UUID) -> MobilizonEvent:
return MobilizonEvent.from_model(await get_event(event_mobilizon_id))
async def get_publisher_by_name(name) -> Publisher:
return await Publisher.filter(name=name).first()
async def is_known(event: MobilizonEvent) -> bool:
try:
await get_event(event.mobilizon_id)
return True
except EventNotFound:
return False
@atomic()
async def build_publications(
event: MobilizonEvent, publishers: Iterator[str]
) -> list[EventPublication]:
event_model = await get_event(event.mobilizon_id)
models = [
await event_model.build_publication_by_publisher_name(name)
for name in publishers
]
return [EventPublication.from_orm(m, event) for m in models]
@atomic()
async def get_failed_publications_for_event(
event_mobilizon_id: UUID,
) -> list[EventPublication]:
event = await get_event(event_mobilizon_id)
failed_publications = list(
filter(
lambda publications: publications.status == PublicationStatus.FAILED,
event.publications,
)
) )
for p in failed_publications:
await p.fetch_related("publisher")
mobilizon_event = MobilizonEvent.from_model(event)
return list(
map(
partial(EventPublication.from_orm, event=mobilizon_event),
failed_publications,
)
)
@atomic()
async def get_publication(publication_id: UUID):
try:
publication = await prefetch_publication_relations(
Publication.get(id=publication_id).first()
)
# TODO: this is redundant but there's some prefetch problem otherwise
publication.event = await get_event(publication.event.mobilizon_id)
return EventPublication.from_orm(
event=MobilizonEvent.from_model(publication.event), model=publication
)
except DoesNotExist:
return None

View File

@ -1,33 +1,32 @@
import logging import logging
from typing import Iterable, Optional from typing import Iterable
import arrow import arrow
from tortoise.transactions import atomic from tortoise.transactions import atomic
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.dataclasses.event import (
get_mobilizon_events_without_publications,
)
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import Publication from mobilizon_reshare.models.publication import Publication
from mobilizon_reshare.models.publisher import Publisher from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers.coordinators.event_publishing import (
EventPublicationReport,
)
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import ( from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
PublisherCoordinatorReport, PublisherCoordinatorReport,
) )
from mobilizon_reshare.storage.query.read import ( from mobilizon_reshare.storage.query.read import get_event
events_without_publications,
is_known,
get_publisher_by_name,
get_event,
)
async def create_publisher(name: str, account_ref: Optional[str] = None) -> None:
await Publisher.create(name=name, account_ref=account_ref)
@atomic() @atomic()
async def upsert_publication(publication_report, event): async def upsert_publication(
publication_report: EventPublicationReport, event: MobilizonEvent
):
publisher = await get_publisher_by_name( publisher_model = await (
name=publication_report.publication.publisher.name Publisher.get(name=publication_report.publication.publisher.name).first()
) )
old_publication = await Publication.filter( old_publication = await Publication.filter(
id=publication_report.publication.id id=publication_report.publication.id
@ -44,7 +43,7 @@ async def upsert_publication(publication_report, event):
await Publication.create( await Publication.create(
id=publication_report.publication.id, id=publication_report.publication.id,
event_id=event.id, event_id=event.id,
publisher_id=publisher.id, publisher_id=publisher_model.id,
status=publication_report.status, status=publication_report.status,
reason=publication_report.reason, reason=publication_report.reason,
timestamp=arrow.now().datetime, timestamp=arrow.now().datetime,
@ -76,7 +75,7 @@ async def create_unpublished_events(
""" """
# There are three cases: # There are three cases:
for event in events_from_mobilizon: for event in events_from_mobilizon:
if not await is_known(event): if not await Event.exists(mobilizon_id=event.mobilizon_id):
# Either an event is unknown # Either an event is unknown
await event.to_model().save() await event.to_model().save()
else: else:
@ -86,7 +85,7 @@ async def create_unpublished_events(
await event.to_model(db_id=event_model.id).save(force_update=True) await event.to_model(db_id=event_model.id).save(force_update=True)
# Or it's known and unchanged, in which case we do nothing. # Or it's known and unchanged, in which case we do nothing.
return await events_without_publications() return await get_mobilizon_events_without_publications()
@atomic() @atomic()
@ -95,4 +94,4 @@ async def update_publishers(names: Iterable[str],) -> None:
known_publisher_names = set(p.name for p in await Publisher.all()) known_publisher_names = set(p.name for p in await Publisher.all())
for name in names.difference(known_publisher_names): for name in names.difference(known_publisher_names):
logging.info(f"Creating {name} publisher") logging.info(f"Creating {name} publisher")
await create_publisher(name) await Publisher.create(name=name, account_ref=None)

76
poetry.lock generated
View File

@ -38,7 +38,7 @@ python-versions = "*"
[[package]] [[package]]
name = "anyio" name = "anyio"
version = "3.6.2" version = "3.6.1"
description = "High level compatibility layer for multiple asynchronous event loop implementations" description = "High level compatibility layer for multiple asynchronous event loop implementations"
category = "main" category = "main"
optional = false optional = false
@ -51,7 +51,7 @@ sniffio = ">=1.1"
[package.extras] [package.extras]
doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"]
trio = ["trio (>=0.16,<0.22)"] trio = ["trio (>=0.16)"]
[[package]] [[package]]
name = "appdirs" name = "appdirs"
@ -85,16 +85,16 @@ tests = ["pytest", "pytest-asyncio", "mypy (>=0.800)"]
[[package]] [[package]]
name = "asyncpg" name = "asyncpg"
version = "0.27.0" version = "0.26.0"
description = "An asyncio PostgreSQL driver" description = "An asyncio PostgreSQL driver"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.7.0" python-versions = ">=3.6.0"
[package.extras] [package.extras]
dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "flake8 (>=5.0.4,<5.1.0)", "uvloop (>=0.15.3)"] dev = ["Cython (>=0.29.24,<0.30.0)", "pytest (>=6.0)", "Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"]
docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)"]
test = ["flake8 (>=5.0.4,<5.1.0)", "uvloop (>=0.15.3)"] test = ["pycodestyle (>=2.7.0,<2.8.0)", "flake8 (>=3.9.2,<3.10.0)", "uvloop (>=0.15.3)"]
[[package]] [[package]]
name = "asynctest" name = "asynctest"
@ -128,7 +128,7 @@ tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>
[[package]] [[package]]
name = "babel" name = "babel"
version = "2.11.0" version = "2.10.3"
description = "Internationalization utilities" description = "Internationalization utilities"
category = "dev" category = "dev"
optional = false optional = false
@ -154,7 +154,7 @@ lxml = ["lxml"]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2022.12.7" version = "2022.9.24"
description = "Python package for providing Mozilla's CA Bundle." description = "Python package for providing Mozilla's CA Bundle."
category = "main" category = "main"
optional = false optional = false
@ -162,11 +162,11 @@ python-versions = ">=3.6"
[[package]] [[package]]
name = "charset-normalizer" name = "charset-normalizer"
version = "2.1.1" version = "2.0.12"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6.0" python-versions = ">=3.5.0"
[package.extras] [package.extras]
unicode_backport = ["unicodedata2"] unicode_backport = ["unicodedata2"]
@ -184,11 +184,11 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]] [[package]]
name = "colorama" name = "colorama"
version = "0.4.6" version = "0.4.5"
description = "Cross-platform colored terminal text." description = "Cross-platform colored terminal text."
category = "main" category = "main"
optional = false optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[[package]] [[package]]
name = "coverage" name = "coverage"
@ -265,7 +265,7 @@ requests = "*"
[[package]] [[package]]
name = "fastapi" name = "fastapi"
version = "0.85.2" version = "0.85.1"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
category = "main" category = "main"
optional = false optional = false
@ -278,8 +278,8 @@ starlette = "0.20.4"
[package.extras] [package.extras]
all = ["email-validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] all = ["email-validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"]
dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"] dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "uvicorn[standard] (>=0.12.0,<0.19.0)"]
doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.7.0)"] doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.7.0)"]
test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest-cov (>=2.12.0,<5.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<=1.4.41)", "types-orjson (==3.6.2)", "types-ujson (==5.5.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-orjson (==3.6.2)", "types-ujson (==5.4.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"]
[[package]] [[package]]
name = "fastapi-pagination" name = "fastapi-pagination"
@ -311,24 +311,24 @@ scylla-driver = ["scylla-driver (>=3.25.6,<4.0.0)"]
[[package]] [[package]]
name = "h11" name = "h11"
version = "0.14.0" version = "0.12.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.6"
[[package]] [[package]]
name = "httpcore" name = "httpcore"
version = "0.16.2" version = "0.15.0"
description = "A minimal low-level HTTP client." description = "A minimal low-level HTTP client."
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
anyio = ">=3.0,<5.0" anyio = ">=3.0.0,<4.0.0"
certifi = "*" certifi = "*"
h11 = ">=0.13,<0.15" h11 = ">=0.11,<0.13"
sniffio = ">=1.0.0,<2.0.0" sniffio = ">=1.0.0,<2.0.0"
[package.extras] [package.extras]
@ -337,7 +337,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]] [[package]]
name = "httpx" name = "httpx"
version = "0.23.1" version = "0.23.0"
description = "The next generation HTTP client." description = "The next generation HTTP client."
category = "dev" category = "dev"
optional = false optional = false
@ -345,13 +345,13 @@ python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
certifi = "*" certifi = "*"
httpcore = ">=0.15.0,<0.17.0" httpcore = ">=0.15.0,<0.16.0"
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
sniffio = "*" sniffio = "*"
[package.extras] [package.extras]
brotli = ["brotli", "brotlicffi"] brotli = ["brotlicffi", "brotli"]
cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"]
http2 = ["h2 (>=3,<5)"] http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"] socks = ["socksio (>=1.0.0,<2.0.0)"]
@ -373,7 +373,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]] [[package]]
name = "importlib-metadata" name = "importlib-metadata"
version = "5.1.0" version = "5.0.0"
description = "Read metadata from Python packages" description = "Read metadata from Python packages"
category = "dev" category = "dev"
optional = false optional = false
@ -385,7 +385,7 @@ zipp = ">=0.5"
[package.extras] [package.extras]
docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"]
perf = ["ipython"] perf = ["ipython"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
[[package]] [[package]]
name = "iniconfig" name = "iniconfig"
@ -640,7 +640,7 @@ unidecode = ["Unidecode (>=1.1.1)"]
[[package]] [[package]]
name = "pytz" name = "pytz"
version = "2022.6" version = "2022.4"
description = "World timezone definitions, modern and historical" description = "World timezone definitions, modern and historical"
category = "main" category = "main"
optional = false optional = false
@ -930,11 +930,11 @@ python-versions = ">=3.7"
[[package]] [[package]]
name = "tomlkit" name = "tomlkit"
version = "0.11.6" version = "0.11.5"
description = "Style preserving TOML library" description = "Style preserving TOML library"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6,<4.0"
[[package]] [[package]]
name = "tortoise-orm" name = "tortoise-orm"
@ -995,11 +995,11 @@ python-versions = ">=3.5"
[[package]] [[package]]
name = "urllib3" name = "urllib3"
version = "1.26.13" version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more." description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main" category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras] [package.extras]
brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
@ -1024,7 +1024,7 @@ standard = ["websockets (>=10.0)", "httptools (>=0.4.0)", "watchgod (>=0.6)", "p
[[package]] [[package]]
name = "zipp" name = "zipp"
version = "3.11.0" version = "3.9.0"
description = "Backport of pathlib-compatible object wrapper for zip files" description = "Backport of pathlib-compatible object wrapper for zip files"
category = "dev" category = "dev"
optional = false optional = false
@ -1032,7 +1032,7 @@ python-versions = ">=3.7"
[package.extras] [package.extras]
docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "jaraco.functools", "more-itertools", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "jaraco.functools", "more-itertools", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
@ -1069,7 +1069,10 @@ beautifulsoup4 = [
{file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"}, {file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"},
] ]
certifi = [] certifi = []
charset-normalizer = [] charset-normalizer = [
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
]
click = [] click = []
colorama = [] colorama = []
coverage = [] coverage = []
@ -1083,7 +1086,10 @@ dynaconf = []
facebook-sdk = [] facebook-sdk = []
fastapi = [] fastapi = []
fastapi-pagination = [] fastapi-pagination = []
h11 = [] h11 = [
{file = "h11-0.12.0-py3-none-any.whl", hash = "sha256:36a3cb8c0a032f56e2da7084577878a035d3b61d104230d4bd49c0c6b555a9c6"},
{file = "h11-0.12.0.tar.gz", hash = "sha256:47222cb6067e4a307d535814917cd98fd0a57b6788ce715755fa2b6c28b56042"},
]
httpcore = [] httpcore = []
httpx = [] httpx = []
idna = [] idna = []

View File

@ -3,7 +3,7 @@ from arrow import arrow
from mobilizon_reshare.cli.commands.list.list_event import list_events from mobilizon_reshare.cli.commands.list.list_event import list_events
from mobilizon_reshare.cli.commands.list.list_publication import list_publications from mobilizon_reshare.cli.commands.list.list_publication import list_publications
from mobilizon_reshare.event.event import EventPublicationStatus from mobilizon_reshare.dataclasses.event import _EventPublicationStatus
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
spec = { spec = {
@ -40,7 +40,7 @@ async def test_list_events(capsys, generate_models):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_list_events_with_status(capsys, generate_models): async def test_list_events_with_status(capsys, generate_models):
await generate_models(spec) await generate_models(spec)
await list_events(status=EventPublicationStatus.WAITING) await list_events(status=_EventPublicationStatus.WAITING)
output = capsys.readouterr() output = capsys.readouterr()
assert clean_output(output) == [ assert clean_output(output) == [
"event_0 WAITING 00000000-0000-0000-0000-000000000000" "event_0 WAITING 00000000-0000-0000-0000-000000000000"
@ -114,7 +114,7 @@ async def test_list_publications_empty(capsys, generate_models):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_list_events_empty_with_status(capsys, generate_models): async def test_list_events_empty_with_status(capsys, generate_models):
await list_events(status=EventPublicationStatus.FAILED) await list_events(status=_EventPublicationStatus.FAILED)
output = capsys.readouterr() output = capsys.readouterr()
assert clean_output(output) == ["No event found with status: FAILED"] assert clean_output(output) == ["No event found with status: FAILED"]

View File

@ -2,8 +2,9 @@ from logging import DEBUG
import pytest import pytest
from mobilizon_reshare.dataclasses import EventPublicationStatus
from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.main.publish import select_and_publish, publish_event from mobilizon_reshare.main.publish import select_and_publish, publish_event
from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.storage.query.read import get_all_publications from mobilizon_reshare.storage.query.read import get_all_publications

View File

@ -2,16 +2,16 @@ from logging import DEBUG, INFO
import pytest import pytest
from mobilizon_reshare.storage.query.read import ( from mobilizon_reshare.dataclasses.event import (
get_all_mobilizon_events, get_all_mobilizon_events,
events_without_publications, get_mobilizon_events_without_publications,
) )
from mobilizon_reshare.main.pull import pull
from mobilizon_reshare.main.start import start
from tests.commands.conftest import ( from tests.commands.conftest import (
second_event_element, second_event_element,
first_event_element, first_event_element,
) )
from mobilizon_reshare.main.pull import pull
from mobilizon_reshare.main.start import start
from tests.conftest import event_0, event_1 from tests.conftest import event_0, event_1
empty_specification = {"event": 0, "publications": [], "publisher": []} empty_specification = {"event": 0, "publications": [], "publisher": []}
@ -74,7 +74,7 @@ async def test_pull(
assert ( assert (
f"There are now {len(expected_result)} unpublished events." in caplog.text f"There are now {len(expected_result)} unpublished events." in caplog.text
) )
assert expected_result == await events_without_publications() assert expected_result == await get_mobilizon_events_without_publications()
@pytest.mark.asyncio @pytest.mark.asyncio
@ -113,7 +113,7 @@ async def test_pull_start(
with caplog.at_level(INFO): with caplog.at_level(INFO):
assert await pull() == expected_pull assert await pull() == expected_pull
assert expected_pull == await get_all_mobilizon_events() assert expected_pull == await get_all_mobilizon_events()
assert expected_pull == await events_without_publications() assert expected_pull == await get_mobilizon_events_without_publications()
report = await start(command_config) report = await start(command_config)
assert report.successful assert report.successful
@ -127,7 +127,8 @@ async def test_pull_start(
event.mobilizon_id for event in await get_all_mobilizon_events() event.mobilizon_id for event in await get_all_mobilizon_events()
) )
assert (pull_ids - publish_ids) == set( assert (pull_ids - publish_ids) == set(
event.mobilizon_id for event in await events_without_publications() event.mobilizon_id
for event in await get_mobilizon_events_without_publications()
) )
@ -191,7 +192,10 @@ async def test_multiple_pull(
assert await pull() assert await pull()
assert f"There are now {len(expected_first)} unpublished events." in caplog.text assert f"There are now {len(expected_first)} unpublished events." in caplog.text
assert expected_first == await get_all_mobilizon_events() assert expected_first == await get_all_mobilizon_events()
assert await events_without_publications() == await get_all_mobilizon_events() assert (
await get_mobilizon_events_without_publications()
== await get_all_mobilizon_events()
)
# I clean the message collector # I clean the message collector
message_collector.data = [] message_collector.data = []
@ -204,4 +208,7 @@ async def test_multiple_pull(
assert set(event.mobilizon_id for event in expected_last) == set( assert set(event.mobilizon_id for event in expected_last) == set(
event.mobilizon_id for event in await get_all_mobilizon_events() event.mobilizon_id for event in await get_all_mobilizon_events()
) )
assert await events_without_publications() == await get_all_mobilizon_events() assert (
await get_mobilizon_events_without_publications()
== await get_all_mobilizon_events()
)

View File

@ -3,12 +3,13 @@ from logging import DEBUG, INFO
import pytest import pytest
from mobilizon_reshare.config.command import CommandConfig from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.storage.query.read import get_all_mobilizon_events from mobilizon_reshare.dataclasses import EventPublicationStatus
from tests.commands.conftest import simple_event_element, second_event_element from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent from mobilizon_reshare.dataclasses.event import get_all_mobilizon_events
from mobilizon_reshare.main.start import start from mobilizon_reshare.main.start import start
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from tests.commands.conftest import simple_event_element, second_event_element
one_published_event_specification = { one_published_event_specification = {
"event": 1, "event": 1,

View File

@ -14,7 +14,7 @@ from tortoise import Tortoise
import mobilizon_reshare import mobilizon_reshare
from mobilizon_reshare.config.command import CommandConfig from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.event.event import MobilizonEvent, EventPublicationStatus from mobilizon_reshare.dataclasses import MobilizonEvent, EventPublicationStatus
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.notification import Notification, NotificationStatus from mobilizon_reshare.models.notification import Notification, NotificationStatus
from mobilizon_reshare.models.publication import Publication, PublicationStatus from mobilizon_reshare.models.publication import Publication, PublicationStatus

View File

@ -4,18 +4,10 @@ from uuid import UUID
import arrow import arrow
import pytest import pytest
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.publishers.platforms.platform_mapping import get_formatter_class from mobilizon_reshare.publishers.platforms.platform_mapping import get_formatter_class
begin_date = arrow.get( begin_date = arrow.get(datetime(year=2021, month=1, day=1, hour=11, minute=30,))
datetime(
year=2021,
month=1,
day=1,
hour=11,
minute=30,
)
)
end_date = begin_date.shift(hours=1) end_date = begin_date.shift(hours=1)

View File

@ -3,7 +3,7 @@ from uuid import UUID
import arrow import arrow
import pytest import pytest
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.mobilizon.events import ( from mobilizon_reshare.mobilizon.events import (
get_mobilizon_future_events, get_mobilizon_future_events,
MobilizonRequestFailed, MobilizonRequestFailed,

View File

@ -5,7 +5,9 @@ import arrow
import pytest import pytest
import tortoise.timezone import tortoise.timezone
from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent from mobilizon_reshare.dataclasses import EventPublicationStatus
from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.dataclasses.event_publication_status import _compute_event_status
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
@ -191,4 +193,4 @@ async def test_mobilizon_event_compute_status_partial(
) )
await publication.save() await publication.save()
publications.append(publication) publications.append(publication)
assert MobilizonEvent._compute_event_status(publications) == expected_result assert _compute_event_status(publications) == expected_result

View File

@ -5,7 +5,7 @@ from uuid import UUID
import arrow import arrow
import pytest import pytest
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.publishers.abstract import ( from mobilizon_reshare.publishers.abstract import (
AbstractPlatform, AbstractPlatform,
AbstractEventFormatter, AbstractEventFormatter,

View File

@ -1,17 +1,20 @@
import logging import logging
from datetime import timedelta from datetime import timedelta
from uuid import UUID
from unittest.mock import MagicMock from unittest.mock import MagicMock
from uuid import UUID
import pytest import pytest
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.dataclasses import MobilizonEvent
from mobilizon_reshare.dataclasses.publication import (
_EventPublication,
RecapPublication,
)
from mobilizon_reshare.models.publication import ( from mobilizon_reshare.models.publication import (
PublicationStatus, PublicationStatus,
Publication as PublicationModel, Publication as PublicationModel,
) )
from mobilizon_reshare.models.publisher import Publisher from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers.abstract import EventPublication, RecapPublication
from mobilizon_reshare.publishers.coordinators.event_publishing.notify import ( from mobilizon_reshare.publishers.coordinators.event_publishing.notify import (
PublicationFailureNotifiersCoordinator, PublicationFailureNotifiersCoordinator,
) )
@ -31,7 +34,7 @@ def failure_report(mock_publisher_invalid, event):
return EventPublicationReport( return EventPublicationReport(
status=PublicationStatus.FAILED, status=PublicationStatus.FAILED,
reason="some failure", reason="some failure",
publication=EventPublication( publication=_EventPublication(
publisher=mock_publisher_invalid, publisher=mock_publisher_invalid,
formatter=None, formatter=None,
event=event, event=event,
@ -103,7 +106,7 @@ async def mock_publications(
timestamp=today + timedelta(hours=i), timestamp=today + timedelta(hours=i),
reason=None, reason=None,
) )
publication = EventPublication.from_orm(publication, test_event) publication = _EventPublication.from_orm(publication, test_event)
publication.publisher = mock_publisher_valid publication.publisher = mock_publisher_valid
publication.formatter = mock_formatter_valid publication.formatter = mock_formatter_valid
result.append(publication) result.append(publication)

View File

@ -3,6 +3,7 @@ import requests
import responses import responses
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.dataclasses.publication import build_publications_for_event
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import ( from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
PublisherCoordinator, PublisherCoordinator,
@ -14,7 +15,7 @@ from mobilizon_reshare.publishers.exceptions import (
HTTPResponseError, HTTPResponseError,
) )
from mobilizon_reshare.publishers.platforms.zulip import ZulipFormatter, ZulipPublisher from mobilizon_reshare.publishers.platforms.zulip import ZulipFormatter, ZulipPublisher
from mobilizon_reshare.storage.query.read import build_publications, get_all_publishers from mobilizon_reshare.storage.query.read import get_all_publishers
one_publication_specification = { one_publication_specification = {
"event": 1, "event": 1,
@ -104,7 +105,7 @@ async def setup_db(generate_models):
async def unsaved_publications(setup_db, event): async def unsaved_publications(setup_db, event):
await event.to_model().save() await event.to_model().save()
publishers = [p.name for p in await get_all_publishers()] publishers = [p.name for p in await get_all_publishers()]
return await build_publications(event, publishers) return await build_publications_for_event(event, publishers)
@pytest.mark.asyncio @pytest.mark.asyncio

View File

@ -1,23 +1,20 @@
from datetime import timedelta from datetime import timedelta
from uuid import UUID
import arrow import arrow
import pytest import pytest
from mobilizon_reshare.event.event import EventPublicationStatus from mobilizon_reshare.dataclasses.event import (
from mobilizon_reshare.models.publication import PublicationStatus _EventPublicationStatus,
from mobilizon_reshare.publishers.abstract import EventPublication
from mobilizon_reshare.storage.query.read import (
get_published_events, get_published_events,
events_with_status, get_mobilizon_events_with_status,
publications_with_status, get_mobilizon_events_without_publications,
events_without_publications,
build_publications,
get_event_publications,
) )
from mobilizon_reshare.dataclasses.publication import build_publications_for_event
from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.storage.query.read import publications_with_status
from tests import today from tests import today
from tests.storage import complete_specification
from tests.conftest import event_0, event_1, event_3 from tests.conftest import event_0, event_1, event_3
from tests.storage import complete_specification
from tests.storage import result_publication from tests.storage import result_publication
@ -57,7 +54,7 @@ async def test_get_published_events(generate_models):
], ],
) )
async def test_publications_with_status( async def test_publications_with_status(
status, mobilizon_id, from_date, to_date, expected_result, generate_models, status, mobilizon_id, from_date, to_date, expected_result, generate_models,
): ):
await generate_models(complete_specification) await generate_models(complete_specification)
publications = await publications_with_status( publications = await publications_with_status(
@ -70,11 +67,11 @@ async def test_publications_with_status(
@pytest.mark.asyncio @pytest.mark.asyncio
@pytest.mark.parametrize( @pytest.mark.parametrize(
"status, expected_events_count", "status, expected_events_count",
[(EventPublicationStatus.COMPLETED, 2), (EventPublicationStatus.PARTIAL, 1)], [(_EventPublicationStatus.COMPLETED, 2), (_EventPublicationStatus.PARTIAL, 1)],
) )
async def test_event_with_status(generate_models, status, expected_events_count): async def test_event_with_status(generate_models, status, expected_events_count):
await generate_models(complete_specification) await generate_models(complete_specification)
result = list(await events_with_status([status])) result = list(await get_mobilizon_events_with_status([status]))
assert len(result) == expected_events_count assert len(result) == expected_events_count
@ -84,37 +81,39 @@ async def test_event_with_status(generate_models, status, expected_events_count)
"status, expected_events_count, begin_window, end_window", "status, expected_events_count, begin_window, end_window",
[ [
( (
EventPublicationStatus.COMPLETED, _EventPublicationStatus.COMPLETED,
2, 2,
arrow.get(today + timedelta(hours=-1)), arrow.get(today + timedelta(hours=-1)),
None, None,
), ),
( (
EventPublicationStatus.COMPLETED, _EventPublicationStatus.COMPLETED,
1, 1,
arrow.get(today + timedelta(hours=1)), arrow.get(today + timedelta(hours=1)),
None, None,
), ),
( (
EventPublicationStatus.COMPLETED, _EventPublicationStatus.COMPLETED,
1, 1,
arrow.get(today + timedelta(hours=-2)), arrow.get(today + timedelta(hours=-2)),
arrow.get(today + timedelta(hours=1)), arrow.get(today + timedelta(hours=1)),
), ),
( (
EventPublicationStatus.COMPLETED, _EventPublicationStatus.COMPLETED,
0, 0,
arrow.get(today + timedelta(hours=-2)), arrow.get(today + timedelta(hours=-2)),
arrow.get(today + timedelta(hours=0)), arrow.get(today + timedelta(hours=0)),
), ),
], ],
) )
async def test_event_with_status_window( async def test_event_with_status_window(
generate_models, status, expected_events_count, begin_window, end_window generate_models, status, expected_events_count, begin_window, end_window
): ):
await generate_models(complete_specification) await generate_models(complete_specification)
result = list( result = list(
await events_with_status([status], from_date=begin_window, to_date=end_window) await get_mobilizon_events_with_status(
[status], from_date=begin_window, to_date=end_window
)
) )
assert len(result) == expected_events_count assert len(result) == expected_events_count
@ -126,30 +125,30 @@ async def test_event_with_status_window(
[ [
({"event": 2, "publications": [], "publisher": ["zulip"]}, [event_0, event_1],), ({"event": 2, "publications": [], "publisher": ["zulip"]}, [event_0, event_1],),
( (
{ {
"event": 3, "event": 3,
"publications": [ "publications": [
{ {
"event_idx": 1, "event_idx": 1,
"publisher_idx": 0, "publisher_idx": 0,
"status": PublicationStatus.FAILED, "status": PublicationStatus.FAILED,
}, },
{ {
"event_idx": 2, "event_idx": 2,
"publisher_idx": 0, "publisher_idx": 0,
"status": PublicationStatus.COMPLETED, "status": PublicationStatus.COMPLETED,
}, },
], ],
"publisher": ["zulip"], "publisher": ["zulip"],
}, },
[event_0], [event_0],
), ),
(complete_specification, [event_3],), (complete_specification, [event_3],),
], ],
) )
async def test_events_without_publications(spec, expected_events, generate_models): async def test_events_without_publications(spec, expected_events, generate_models):
await generate_models(spec) await generate_models(spec)
unpublished_events = list(await events_without_publications()) unpublished_events = list(await get_mobilizon_events_without_publications())
assert len(unpublished_events) == len(expected_events) assert len(unpublished_events) == len(expected_events)
assert unpublished_events == expected_events assert unpublished_events == expected_events
@ -160,82 +159,35 @@ async def test_events_without_publications(spec, expected_events, generate_model
[ [
([], {"event": 2, "publications": [], "publisher": ["zulip"]}, event_0, 0,), ([], {"event": 2, "publications": [], "publisher": ["zulip"]}, event_0, 0,),
( (
["zulip"], ["zulip"],
{"event": 2, "publications": [], "publisher": ["zulip"]}, {"event": 2, "publications": [], "publisher": ["zulip"]},
event_0, event_0,
1, 1,
), ),
( (
["telegram", "zulip", "mastodon", "facebook"], ["telegram", "zulip", "mastodon", "facebook"],
{ {
"event": 2, "event": 2,
"publications": [], "publications": [],
"publisher": ["telegram", "zulip", "mastodon", "facebook"], "publisher": ["telegram", "zulip", "mastodon", "facebook"],
}, },
event_0, event_0,
4, 4,
), ),
], ],
indirect=["mock_active_publishers"], indirect=["mock_active_publishers"],
) )
async def test_build_publications( async def test_build_publications(
mock_active_publishers, spec, event, n_publications, generate_models mock_active_publishers, spec, event, n_publications, generate_models
): ):
await generate_models(spec) await generate_models(spec)
publications = list(await build_publications(event, mock_active_publishers)) publications = list(
await build_publications_for_event(event, mock_active_publishers)
)
assert len(publications) == n_publications assert len(publications) == n_publications
for p in publications: for p in publications:
assert p.event == event assert p.event == event
assert p.publisher.name in mock_active_publishers assert p.publisher.name in mock_active_publishers
@pytest.mark.asyncio
@pytest.mark.parametrize(
"mock_active_publishers, spec, event, publications_ids",
[
(
["telegram", "zulip", "mastodon", "facebook"],
{"event": 2, "publications": [], "publisher": ["zulip"]},
event_0,
[],
),
(
["telegram", "zulip", "mastodon", "facebook"],
{
"event": 2,
"publications": [
{
"event_idx": 1,
"publisher_idx": 0,
"status": PublicationStatus.COMPLETED,
},
{
"event_idx": 0,
"publisher_idx": 0,
"status": PublicationStatus.FAILED,
},
],
"publisher": ["zulip"],
},
event_1,
# This tuples are made like so: (event_mobilizon_id, publication_id)
[(UUID(int=1), UUID(int=0))],
),
],
indirect=["mock_active_publishers"],
)
async def test_get_event_publications(
mock_active_publishers, spec, event, publications_ids, generate_models
):
await generate_models(spec)
publications = list(await get_event_publications(event))
assert len(publications) == len(publications_ids)
for i, p in enumerate(publications):
assert p.event.mobilizon_id == publications_ids[i][0]
assert p.id == publications_ids[i][1]

View File

@ -2,7 +2,7 @@ from uuid import UUID
import pytest import pytest
from mobilizon_reshare.storage.query.read import get_all_mobilizon_events from mobilizon_reshare.dataclasses.event import get_all_mobilizon_events
@pytest.mark.asyncio @pytest.mark.asyncio

View File

@ -2,9 +2,9 @@ from uuid import UUID
import pytest import pytest
from mobilizon_reshare.dataclasses.publication import _EventPublication
from mobilizon_reshare.models.publication import PublicationStatus, Publication from mobilizon_reshare.models.publication import PublicationStatus, Publication
from mobilizon_reshare.models.publisher import Publisher from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers.abstract import EventPublication
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import ( from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
EventPublicationReport, EventPublicationReport,
PublisherCoordinatorReport, PublisherCoordinatorReport,
@ -18,8 +18,8 @@ from mobilizon_reshare.storage.query.write import (
update_publishers, update_publishers,
create_unpublished_events, create_unpublished_events,
) )
from tests.storage import complete_specification
from tests.conftest import event_6, event_0, event_1, event_2, event_3, event_3_updated from tests.conftest import event_6, event_0, event_1, event_2, event_3, event_3_updated
from tests.storage import complete_specification
two_publishers_specification = {"publisher": ["telegram", "twitter"]} two_publishers_specification = {"publisher": ["telegram", "twitter"]}
@ -144,7 +144,7 @@ async def test_create_unpublished_events(
EventPublicationReport( EventPublicationReport(
status=PublicationStatus.COMPLETED, status=PublicationStatus.COMPLETED,
reason="", reason="",
publication=EventPublication( publication=_EventPublication(
id=UUID(int=6), id=UUID(int=6),
formatter=TelegramFormatter(), formatter=TelegramFormatter(),
event=event_1, event=event_1,