Add publication crud (#178)

* fixed parsing bug

* implemented events and publications endpoints

split endpoints by entity

removed credentials

* add pagination (#179)

* added pagination

* integrated pagination with tortoise

* added test for publications

* removed converter file

* updated dependencies
This commit is contained in:
Simone Robutti 2022-12-07 21:46:57 +01:00 committed by GitHub
parent 081ca87857
commit ddc706e201
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 385 additions and 231 deletions

View File

@ -1,8 +1,8 @@
import click import click
from mobilizon_reshare.event.event import MobilizonEvent
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.publishers.platforms.platform_mapping import get_formatter_class from mobilizon_reshare.publishers.platforms.platform_mapping import get_formatter_class
from mobilizon_reshare.storage.query.converter import event_from_model
async def format_event(event_id, publisher_name: str): async def format_event(event_id, publisher_name: str):
@ -12,6 +12,6 @@ async def format_event(event_id, publisher_name: str):
if not event: if not event:
click.echo(f"Event with mobilizon_id {event_id} not found.") click.echo(f"Event with mobilizon_id {event_id} not found.")
return return
event = event_from_model(event) event = MobilizonEvent.from_model(event)
message = get_formatter_class(publisher_name)().get_message_from_event(event) message = get_formatter_class(publisher_name)().get_message_from_event(event)
click.echo(message) click.echo(message)

View File

@ -10,7 +10,7 @@ from mobilizon_reshare.event.event_selection_strategies import select_unpublishe
from mobilizon_reshare.storage.query.read import ( from mobilizon_reshare.storage.query.read import (
get_published_events, get_published_events,
events_with_status, events_with_status,
get_all_events, get_all_mobilizon_events,
events_without_publications, events_without_publications,
) )
@ -51,7 +51,7 @@ async def list_events(
frm = Arrow.fromdatetime(frm) if frm else None frm = Arrow.fromdatetime(frm) if frm else None
to = Arrow.fromdatetime(to) if to else None to = Arrow.fromdatetime(to) if to else None
if status is None: if status is None:
events = await get_all_events(from_date=frm, to_date=to) events = await get_all_mobilizon_events(from_date=frm, to_date=to)
elif status == EventPublicationStatus.WAITING: elif status == EventPublicationStatus.WAITING:
events = await list_unpublished_events(frm=frm, to=to) events = await list_unpublished_events(frm=frm, to=to)
else: else:

View File

@ -7,6 +7,8 @@ import arrow
from jinja2 import Template from jinja2 import Template
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus, Publication
class EventPublicationStatus(IntEnum): class EventPublicationStatus(IntEnum):
@ -50,3 +52,70 @@ class MobilizonEvent:
def format(self, pattern: Template) -> str: def format(self, pattern: Template) -> str:
return self._fill_template(pattern) return self._fill_template(pattern)
@classmethod
def from_model(cls, event: Event):
publication_status = cls._compute_event_status(list(event.publications))
publication_time = {}
for pub in event.publications:
if publication_status != EventPublicationStatus.WAITING:
assert pub.timestamp is not None
publication_time[pub.publisher.name] = arrow.get(pub.timestamp).to(
"local"
)
return cls(
name=event.name,
description=event.description,
begin_datetime=arrow.get(event.begin_datetime).to("local"),
end_datetime=arrow.get(event.end_datetime).to("local"),
mobilizon_link=event.mobilizon_link,
mobilizon_id=event.mobilizon_id,
thumbnail_link=event.thumbnail_link,
location=event.location,
publication_time=publication_time,
status=publication_status,
last_update_time=arrow.get(event.last_update_time).to("local"),
)
def to_model(self, db_id: Optional[UUID] = None) -> Event:
kwargs = {
"name": self.name,
"description": self.description,
"mobilizon_id": self.mobilizon_id,
"mobilizon_link": self.mobilizon_link,
"thumbnail_link": self.thumbnail_link,
"location": self.location,
"begin_datetime": self.begin_datetime.astimezone(
self.begin_datetime.tzinfo
),
"end_datetime": self.end_datetime.astimezone(self.end_datetime.tzinfo),
"last_update_time": self.last_update_time.astimezone(
self.last_update_time.tzinfo
),
}
if db_id is not None:
kwargs.update({"id": db_id})
return Event(**kwargs)
@staticmethod
def _compute_event_status(
publications: list[Publication],
) -> EventPublicationStatus:
if not publications:
return EventPublicationStatus.WAITING
unique_statuses: set[PublicationStatus] = set(
pub.status for pub in publications
)
if unique_statuses == {
PublicationStatus.COMPLETED,
PublicationStatus.FAILED,
}:
return EventPublicationStatus.PARTIAL
elif len(unique_statuses) == 1:
return EventPublicationStatus[unique_statuses.pop().name]
raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}")

View File

@ -0,0 +1,7 @@
from tortoise.contrib.pydantic import pydantic_model_creator
class WithPydantic:
@classmethod
def to_pydantic(cls):
return pydantic_model_creator(cls)

View File

@ -1,11 +1,12 @@
from tortoise import fields from tortoise import fields
from tortoise.models import Model from tortoise.models import Model
from mobilizon_reshare.models import WithPydantic
from mobilizon_reshare.models.publication import PublicationStatus, Publication from mobilizon_reshare.models.publication import PublicationStatus, Publication
from mobilizon_reshare.models.publisher import Publisher from mobilizon_reshare.models.publisher import Publisher
class Event(Model): class Event(Model, WithPydantic):
id = fields.UUIDField(pk=True) id = fields.UUIDField(pk=True)
name = fields.TextField() name = fields.TextField()
description = fields.TextField(null=True) description = fields.TextField(null=True)

View File

@ -17,9 +17,7 @@ class Notification(Model):
message = fields.TextField() message = fields.TextField()
target = fields.ForeignKeyField( target = fields.ForeignKeyField("models.Publisher", null=True, related_name=False,)
"models.Publisher", related_name="notifications", null=True
)
publication = fields.ForeignKeyField( publication = fields.ForeignKeyField(
"models.Publication", related_name="notifications", null=True "models.Publication", related_name="notifications", null=True

View File

@ -3,13 +3,15 @@ from enum import IntEnum
from tortoise import fields from tortoise import fields
from tortoise.models import Model from tortoise.models import Model
from mobilizon_reshare.models import WithPydantic
class PublicationStatus(IntEnum): class PublicationStatus(IntEnum):
FAILED = 0 FAILED = 0
COMPLETED = 1 COMPLETED = 1
class Publication(Model): class Publication(Model, WithPydantic):
id = fields.UUIDField(pk=True) id = fields.UUIDField(pk=True)
status = fields.IntEnumField(PublicationStatus) status = fields.IntEnumField(PublicationStatus)

View File

@ -11,6 +11,7 @@ from jinja2 import Environment, FileSystemLoader, Template
from mobilizon_reshare.config.config import get_settings from mobilizon_reshare.config.config import get_settings
from mobilizon_reshare.event.event import MobilizonEvent from mobilizon_reshare.event.event import MobilizonEvent
from .exceptions import InvalidAttribute from .exceptions import InvalidAttribute
from ..models.publication import Publication
JINJA_ENV = Environment(loader=FileSystemLoader("/")) JINJA_ENV = Environment(loader=FileSystemLoader("/"))
@ -188,6 +189,18 @@ class EventPublication(BasePublication):
event: MobilizonEvent event: MobilizonEvent
id: UUID id: UUID
@classmethod
def from_orm(cls, model: Publication, event: MobilizonEvent):
# imported here to avoid circular dependencies
from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_publisher_class,
get_formatter_class,
)
publisher = get_publisher_class(model.publisher.name)()
formatter = get_formatter_class(model.publisher.name)()
return cls(publisher, formatter, event, model.id,)
@dataclass @dataclass
class RecapPublication(BasePublication): class RecapPublication(BasePublication):

View File

@ -35,11 +35,13 @@ class TelegramFormatter(AbstractEventFormatter):
_conf = ("publisher", "telegram") _conf = ("publisher", "telegram")
def _validate_event(self, event: MobilizonEvent) -> None: def _validate_event(self, event: MobilizonEvent) -> None:
description = event.description description = event.description
if not (description and description.strip()): if not (description and description.strip()):
self._log_error("No description was found", raise_error=InvalidEvent) self._log_error("No description was found", raise_error=InvalidEvent)
def _validate_message(self, message: str) -> None: def _validate_message(self, message: str) -> None:
if ( if (
len("".join(BeautifulSoup(message, "html.parser").findAll(text=True))) len("".join(BeautifulSoup(message, "html.parser").findAll(text=True)))
>= 4096 >= 4096
@ -74,7 +76,8 @@ class TelegramFormatter(AbstractEventFormatter):
tag.unwrap() tag.unwrap()
# cleaning html trailing whitespace # cleaning html trailing whitespace
for tag in html.findAll("a"): for tag in html.findAll("a"):
tag["href"] = tag["href"].replace(" ", "").strip().lstrip() if "href" in tag:
tag["href"] = tag["href"].replace(" ", "").strip().lstrip()
s = str(html) s = str(html)
return re.sub(r"\n{2,}", "\n\n", s).strip() # remove multiple newlines return re.sub(r"\n{2,}", "\n\n", s).strip() # remove multiple newlines
@ -103,7 +106,6 @@ class TelegramPlatform(AbstractPlatform):
def _validate_response(self, res): def _validate_response(self, res):
try: try:
res.raise_for_status() res.raise_for_status()
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:
self._log_error( self._log_error(
@ -113,6 +115,7 @@ class TelegramPlatform(AbstractPlatform):
try: try:
data = res.json() data = res.json()
except Exception as e: except Exception as e:
self._log_error( self._log_error(
f"Server returned invalid json data: {str(e)}", f"Server returned invalid json data: {str(e)}",

View File

@ -65,8 +65,12 @@ class MoReDB:
logging.info("Updated database to latest version") logging.info("Updated database to latest version")
async def setup(self): async def setup(self):
tortoise_config = get_tortoise_orm()
Tortoise.init_models(
tortoise_config["apps"]["models"]["models"], "models", _init_relations=True
)
await self._implement_db_changes() await self._implement_db_changes()
await Tortoise.init(config=get_tortoise_orm(),) await Tortoise.init(config=tortoise_config)
await Tortoise.generate_schemas() await Tortoise.generate_schemas()
await update_publishers(publisher_names) await update_publishers(publisher_names)

View File

@ -1,82 +0,0 @@
from typing import Optional
from uuid import UUID
import arrow
import tortoise.timezone
from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import Publication, PublicationStatus
from mobilizon_reshare.publishers.abstract import EventPublication
def event_from_model(event: Event):
publication_status = compute_event_status(list(event.publications))
publication_time = {}
for pub in event.publications:
if publication_status != EventPublicationStatus.WAITING:
assert pub.timestamp is not None
publication_time[pub.publisher.name] = arrow.get(pub.timestamp).to("local")
return MobilizonEvent(
name=event.name,
description=event.description,
begin_datetime=arrow.get(event.begin_datetime).to("local"),
end_datetime=arrow.get(event.end_datetime).to("local"),
mobilizon_link=event.mobilizon_link,
mobilizon_id=event.mobilizon_id,
thumbnail_link=event.thumbnail_link,
location=event.location,
publication_time=publication_time,
status=publication_status,
last_update_time=arrow.get(event.last_update_time).to("local"),
)
def event_to_model(event: MobilizonEvent, db_id: Optional[UUID] = None) -> Event:
kwargs = {
"name": event.name,
"description": event.description,
"mobilizon_id": event.mobilizon_id,
"mobilizon_link": event.mobilizon_link,
"thumbnail_link": event.thumbnail_link,
"location": event.location,
"begin_datetime": event.begin_datetime.astimezone(event.begin_datetime.tzinfo),
"end_datetime": event.end_datetime.astimezone(event.end_datetime.tzinfo),
"last_update_time": event.last_update_time.astimezone(
event.last_update_time.tzinfo
),
}
if db_id is not None:
kwargs.update({"id": db_id})
return Event(**kwargs)
def compute_event_status(publications: list[Publication]) -> EventPublicationStatus:
if not publications:
return EventPublicationStatus.WAITING
unique_statuses: set[PublicationStatus] = set(pub.status for pub in publications)
if unique_statuses == {
PublicationStatus.COMPLETED,
PublicationStatus.FAILED,
}:
return EventPublicationStatus.PARTIAL
elif len(unique_statuses) == 1:
return EventPublicationStatus[unique_statuses.pop().name]
raise ValueError(f"Illegal combination of PublicationStatus: {unique_statuses}")
def publication_from_orm(model: Publication, event: MobilizonEvent) -> EventPublication:
# imported here to avoid circular dependencies
from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_publisher_class,
get_formatter_class,
)
publisher = get_publisher_class(model.publisher.name)()
formatter = get_formatter_class(model.publisher.name)()
return EventPublication(publisher, formatter, event, model.id,)

View File

@ -12,11 +12,7 @@ from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import Publication, PublicationStatus from mobilizon_reshare.models.publication import Publication, PublicationStatus
from mobilizon_reshare.models.publisher import Publisher from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers.abstract import EventPublication from mobilizon_reshare.publishers.abstract import EventPublication
from mobilizon_reshare.storage.query.converter import (
event_from_model,
compute_event_status,
publication_from_orm,
)
from mobilizon_reshare.storage.query.exceptions import EventNotFound from mobilizon_reshare.storage.query.exceptions import EventNotFound
@ -46,13 +42,13 @@ async def events_with_status(
def _filter_event_with_status(event: Event) -> bool: def _filter_event_with_status(event: Event) -> bool:
# This computes the status client-side instead of running in the DB. It shouldn't pose a performance problem # This computes the status client-side instead of running in the DB. It shouldn't pose a performance problem
# in the short term, but should be moved to the query if possible. # in the short term, but should be moved to the query if possible.
event_status = compute_event_status(list(event.publications)) event_status = MobilizonEvent._compute_event_status(list(event.publications))
return event_status in status return event_status in status
query = Event.all() query = Event.all()
return map( return map(
event_from_model, MobilizonEvent.from_model,
filter( filter(
_filter_event_with_status, _filter_event_with_status,
await prefetch_event_relations( await prefetch_event_relations(
@ -70,15 +66,18 @@ async def get_all_publications(
) )
async def get_all_events( async def get_all_mobilizon_events(
from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None, from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None,
) -> list[MobilizonEvent]: ) -> list[MobilizonEvent]:
return [ return [MobilizonEvent.from_model(event) for event in await get_all_events()]
event_from_model(event)
for event in await prefetch_event_relations(
_add_date_window(Event.all(), "begin_datetime", from_date, to_date) async def get_all_events(
) from_date: Optional[Arrow] = None, to_date: Optional[Arrow] = None
] ):
return await prefetch_event_relations(
_add_date_window(Event.all(), "begin_datetime", from_date, to_date)
)
async def get_all_publishers() -> list[Publisher]: async def get_all_publishers() -> list[Publisher]:
@ -137,7 +136,7 @@ async def events_without_publications(
events = await prefetch_event_relations( events = await prefetch_event_relations(
_add_date_window(query, "begin_datetime", from_date, to_date) _add_date_window(query, "begin_datetime", from_date, to_date)
) )
return [event_from_model(event) for event in events] return [MobilizonEvent.from_model(event) for event in events]
async def get_event(event_mobilizon_id: UUID) -> Event: async def get_event(event_mobilizon_id: UUID) -> Event:
@ -154,11 +153,11 @@ async def get_event_publications(
mobilizon_event: MobilizonEvent, mobilizon_event: MobilizonEvent,
) -> list[EventPublication]: ) -> list[EventPublication]:
event = await get_event(mobilizon_event.mobilizon_id) event = await get_event(mobilizon_event.mobilizon_id)
return [publication_from_orm(p, mobilizon_event) for p in event.publications] return [EventPublication.from_orm(p, mobilizon_event) for p in event.publications]
async def get_mobilizon_event(event_mobilizon_id: UUID) -> MobilizonEvent: async def get_mobilizon_event(event_mobilizon_id: UUID) -> MobilizonEvent:
return event_from_model(await get_event(event_mobilizon_id)) return MobilizonEvent.from_model(await get_event(event_mobilizon_id))
async def get_publisher_by_name(name) -> Publisher: async def get_publisher_by_name(name) -> Publisher:
@ -182,7 +181,7 @@ async def build_publications(
await event_model.build_publication_by_publisher_name(name) await event_model.build_publication_by_publisher_name(name)
for name in publishers for name in publishers
] ]
return [publication_from_orm(m, event) for m in models] return [EventPublication.from_orm(m, event) for m in models]
@atomic() @atomic()
@ -198,9 +197,12 @@ async def get_failed_publications_for_event(
) )
for p in failed_publications: for p in failed_publications:
await p.fetch_related("publisher") await p.fetch_related("publisher")
mobilizon_event = event_from_model(event) mobilizon_event = MobilizonEvent.from_model(event)
return list( return list(
map(partial(publication_from_orm, event=mobilizon_event), failed_publications) map(
partial(EventPublication.from_orm, event=mobilizon_event),
failed_publications,
)
) )
@ -212,8 +214,8 @@ async def get_publication(publication_id: UUID):
) )
# TODO: this is redundant but there's some prefetch problem otherwise # TODO: this is redundant but there's some prefetch problem otherwise
publication.event = await get_event(publication.event.mobilizon_id) publication.event = await get_event(publication.event.mobilizon_id)
return publication_from_orm( return EventPublication.from_orm(
event=event_from_model(publication.event), model=publication event=MobilizonEvent.from_model(publication.event), model=publication
) )
except DoesNotExist: except DoesNotExist:
return None return None

View File

@ -11,7 +11,6 @@ from mobilizon_reshare.models.publisher import Publisher
from mobilizon_reshare.publishers.coordinators.event_publishing.publish import ( from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
PublisherCoordinatorReport, PublisherCoordinatorReport,
) )
from mobilizon_reshare.storage.query.converter import event_to_model
from mobilizon_reshare.storage.query.read import ( from mobilizon_reshare.storage.query.read import (
events_without_publications, events_without_publications,
is_known, is_known,
@ -79,14 +78,12 @@ async def create_unpublished_events(
for event in events_from_mobilizon: for event in events_from_mobilizon:
if not await is_known(event): if not await is_known(event):
# Either an event is unknown # Either an event is unknown
await event_to_model(event).save() await event.to_model().save()
else: else:
# Or it's known and changed # Or it's known and changed
event_model = await get_event(event.mobilizon_id) event_model = await get_event(event.mobilizon_id)
if event.last_update_time > event_model.last_update_time: if event.last_update_time > event_model.last_update_time:
await event_to_model(event=event, db_id=event_model.id).save( await event.to_model(db_id=event_model.id).save(force_update=True)
force_update=True
)
# Or it's known and unchanged, in which case we do nothing. # Or it's known and unchanged, in which case we do nothing.
return await events_without_publications() return await events_without_publications()

View File

@ -0,0 +1,10 @@
from fastapi_pagination import Page
from fastapi_pagination.ext.tortoise import paginate
from mobilizon_reshare.models.event import Event
def register_endpoints(app):
@app.get("/events", status_code=200, response_model=Page[Event.to_pydantic()])
async def get_events():
return await paginate(Event, prefetch_related=True)

View File

@ -1,15 +1,17 @@
import logging import logging
from fastapi import FastAPI from fastapi import FastAPI
from tortoise.contrib.pydantic import pydantic_model_creator from fastapi_pagination import add_pagination
from mobilizon_reshare.models.event import Event
from mobilizon_reshare.storage.db import init as init_db, get_db_url from mobilizon_reshare.storage.db import init as init_db, get_db_url
from mobilizon_reshare.web.backend.events.endpoints import (
register_endpoints as register_event_endpoints,
)
from mobilizon_reshare.web.backend.publications.endpoints import (
register_endpoints as register_publication_endpoints,
)
app = FastAPI() app = FastAPI()
event_pydantic = pydantic_model_creator(Event)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -23,15 +25,20 @@ def check_database():
def register_endpoints(app): def register_endpoints(app):
@app.get("/events", status_code=200)
async def get_event():
return await event_pydantic.from_queryset(Event.all()) register_event_endpoints(app)
register_publication_endpoints(app)
def init_endpoints(app):
register_endpoints(app)
add_pagination(app)
@app.on_event("startup") @app.on_event("startup")
async def init_app(init_logging=True): async def init_app(init_logging=True):
check_database() check_database()
await init_db(init_logging=init_logging) await init_db(init_logging=init_logging)
register_endpoints(app) init_endpoints(app)
return app return app

View File

@ -0,0 +1,12 @@
from fastapi_pagination import Page
from fastapi_pagination.ext.tortoise import paginate
from mobilizon_reshare.models.publication import Publication
def register_endpoints(app):
@app.get(
"/publications", status_code=200, response_model=Page[Publication.to_pydantic()]
)
async def get_publications():
return await paginate(Publication, prefetch_related=True)

181
poetry.lock generated
View File

@ -14,8 +14,8 @@ tomlkit = "*"
tortoise-orm = "*" tortoise-orm = "*"
[package.extras] [package.extras]
asyncmy = ["asyncmy"]
asyncpg = ["asyncpg"] asyncpg = ["asyncpg"]
asyncmy = ["asyncmy"]
[[package]] [[package]]
name = "aiosqlite" name = "aiosqlite"
@ -154,7 +154,7 @@ lxml = ["lxml"]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2022.9.24" version = "2022.12.7"
description = "Python package for providing Mozilla's CA Bundle." description = "Python package for providing Mozilla's CA Bundle."
category = "main" category = "main"
optional = false optional = false
@ -221,10 +221,10 @@ optional = false
python-versions = "*" python-versions = "*"
[package.extras] [package.extras]
tests = ["pytest-pydocstyle (>=2.2.0)", "pytest-pycodestyle (>=2.2.0)", "pytest (>=6)", "pytest-pydocstyle (>=2)", "pytest-pycodestyle (>=2)", "pytest (==5.4.3)", "tox (>=3.7.0)", "sphinx (>=3)", "pytest-isort (>=1.2.0)", "pytest-cov (>=2.10.1)", "mock (>=1.3.0)", "check-manifest (>=0.42)"] all = ["Sphinx (>=3)", "sphinx-rtd-theme (>=0.2)", "check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "sphinx (>=3)", "tox (>=3.7.0)", "numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "pytest (==5.4.3)", "pytest-pycodestyle (>=2)", "pytest-pydocstyle (>=2)", "pytest (>=6)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2.2.0)", "numpy (>=1.20.0)"]
numpy = ["numpy (>=1.20.0)", "numpy (>=1.18.0)", "numpy (>=1.15.0)", "numpy (>=1.13.0)"] docs = ["Sphinx (>=3)", "sphinx-rtd-theme (>=0.2)"]
docs = ["sphinx-rtd-theme (>=0.2)", "Sphinx (>=3)"] numpy = ["numpy (>=1.13.0)", "numpy (>=1.15.0)", "numpy (>=1.18.0)", "numpy (>=1.20.0)"]
all = ["numpy (>=1.20.0)", "pytest-pydocstyle (>=2.2.0)", "pytest-pycodestyle (>=2.2.0)", "pytest (>=6)", "pytest-pydocstyle (>=2)", "pytest-pycodestyle (>=2)", "pytest (==5.4.3)", "numpy (>=1.18.0)", "numpy (>=1.15.0)", "numpy (>=1.13.0)", "tox (>=3.7.0)", "sphinx (>=3)", "pytest-isort (>=1.2.0)", "pytest-cov (>=2.10.1)", "mock (>=1.3.0)", "check-manifest (>=0.42)", "sphinx-rtd-theme (>=0.2)", "Sphinx (>=3)"] tests = ["check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest-cov (>=2.10.1)", "pytest-isort (>=1.2.0)", "sphinx (>=3)", "tox (>=3.7.0)", "pytest (==5.4.3)", "pytest-pycodestyle (>=2)", "pytest-pydocstyle (>=2)", "pytest (>=6)", "pytest-pycodestyle (>=2.2.0)", "pytest-pydocstyle (>=2.2.0)"]
[[package]] [[package]]
name = "docutils" name = "docutils"
@ -281,26 +281,54 @@ dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "pre-commit (>=2
doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.7.0)"] doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.7.0)"]
test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest-cov (>=2.12.0,<5.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<=1.4.41)", "types-orjson (==3.6.2)", "types-ujson (==5.5.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest-cov (>=2.12.0,<5.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<=1.4.41)", "types-orjson (==3.6.2)", "types-ujson (==5.5.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"]
[[package]]
name = "fastapi-pagination"
version = "0.11.0"
description = "FastAPI pagination"
category = "main"
optional = false
python-versions = ">=3.8,<4.0"
[package.dependencies]
fastapi = ">=0.80.0"
pydantic = ">=1.9.1"
[package.extras]
sqlalchemy = ["SQLAlchemy (>=1.3.20)", "sqlakeyset (>=1.0.1659142803,<2.0.0)"]
asyncpg = ["SQLAlchemy (>=1.3.20)", "asyncpg (>=0.24.0)"]
all = ["SQLAlchemy (>=1.3.20)", "databases (>=0.6.0)", "orm (>=0.3.1)", "tortoise-orm (>=0.16.18,<0.20.0)", "asyncpg (>=0.24.0)", "ormar (>=0.11.2)", "django (<5.0.0)", "piccolo (>=0.89,<0.98)", "motor (>=2.5.1,<4.0.0)", "mongoengine (>=0.23.1,<0.25.0)", "sqlmodel (>=0.0.8,<0.0.9)", "pony (>=0.7.16,<0.8.0)", "beanie (>=1.11.9,<2.0.0)", "sqlakeyset (>=1.0.1659142803,<2.0.0)", "scylla-driver (>=3.25.6,<4.0.0)"]
databases = ["databases (>=0.6.0)"]
orm = ["databases (>=0.6.0)", "orm (>=0.3.1)"]
django = ["databases (>=0.6.0)", "django (<5.0.0)"]
tortoise = ["tortoise-orm (>=0.16.18,<0.20.0)"]
ormar = ["ormar (>=0.11.2)"]
piccolo = ["piccolo (>=0.89,<0.98)"]
motor = ["motor (>=2.5.1,<4.0.0)"]
mongoengine = ["mongoengine (>=0.23.1,<0.25.0)"]
sqlmodel = ["sqlmodel (>=0.0.8,<0.0.9)", "sqlakeyset (>=1.0.1659142803,<2.0.0)"]
beanie = ["beanie (>=1.11.9,<2.0.0)"]
scylla-driver = ["scylla-driver (>=3.25.6,<4.0.0)"]
[[package]] [[package]]
name = "h11" name = "h11"
version = "0.12.0" version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
category = "main" category = "main"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.7"
[[package]] [[package]]
name = "httpcore" name = "httpcore"
version = "0.15.0" version = "0.16.2"
description = "A minimal low-level HTTP client." description = "A minimal low-level HTTP client."
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
anyio = ">=3.0.0,<4.0.0" anyio = ">=3.0,<5.0"
certifi = "*" certifi = "*"
h11 = ">=0.11,<0.13" h11 = ">=0.13,<0.15"
sniffio = ">=1.0.0,<2.0.0" sniffio = ">=1.0.0,<2.0.0"
[package.extras] [package.extras]
@ -309,7 +337,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]] [[package]]
name = "httpx" name = "httpx"
version = "0.23.0" version = "0.23.1"
description = "The next generation HTTP client." description = "The next generation HTTP client."
category = "dev" category = "dev"
optional = false optional = false
@ -317,13 +345,13 @@ python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
certifi = "*" certifi = "*"
httpcore = ">=0.15.0,<0.16.0" httpcore = ">=0.15.0,<0.17.0"
rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
sniffio = "*" sniffio = "*"
[package.extras] [package.extras]
brotli = ["brotlicffi", "brotli"] brotli = ["brotli", "brotlicffi"]
cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"] cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
http2 = ["h2 (>=3,<5)"] http2 = ["h2 (>=3,<5)"]
socks = ["socksio (>=1.0.0,<2.0.0)"] socks = ["socksio (>=1.0.0,<2.0.0)"]
@ -345,7 +373,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]] [[package]]
name = "importlib-metadata" name = "importlib-metadata"
version = "5.0.0" version = "5.1.0"
description = "Read metadata from Python packages" description = "Read metadata from Python packages"
category = "dev" category = "dev"
optional = false optional = false
@ -357,7 +385,7 @@ zipp = ">=0.5"
[package.extras] [package.extras]
docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"]
perf = ["ipython"] perf = ["ipython"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8", "importlib-resources (>=1.3)"]
[[package]] [[package]]
name = "iniconfig" name = "iniconfig"
@ -456,8 +484,8 @@ optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
[package.extras] [package.extras]
testing = ["pytest-benchmark", "pytest"] dev = ["pre-commit", "tox"]
dev = ["tox", "pre-commit"] testing = ["pytest", "pytest-benchmark"]
[[package]] [[package]]
name = "pockets" name = "pockets"
@ -597,11 +625,11 @@ six = ">=1.5"
[[package]] [[package]]
name = "python-slugify" name = "python-slugify"
version = "6.1.2" version = "7.0.0"
description = "A Python slugify application that also handles Unicode" description = "A Python slugify application that also handles Unicode"
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
text-unidecode = ">=1.3" text-unidecode = ">=1.3"
@ -967,11 +995,11 @@ python-versions = ">=3.5"
[[package]] [[package]]
name = "urllib3" name = "urllib3"
version = "1.26.12" version = "1.26.13"
description = "HTTP library with thread-safe connection pooling, file post, and more." description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main" category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
[package.extras] [package.extras]
brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
@ -996,7 +1024,7 @@ standard = ["websockets (>=10.0)", "httptools (>=0.4.0)", "watchgod (>=0.6)", "p
[[package]] [[package]]
name = "zipp" name = "zipp"
version = "3.10.0" version = "3.11.0"
description = "Backport of pathlib-compatible object wrapper for zip files" description = "Backport of pathlib-compatible object wrapper for zip files"
category = "dev" category = "dev"
optional = false optional = false
@ -1004,71 +1032,123 @@ python-versions = ">=3.7"
[package.extras] [package.extras]
docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"] docs = ["sphinx (>=3.5)", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "furo", "jaraco.tidelift (>=1.4)"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "jaraco.functools", "more-itertools", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "flake8 (<5)", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "jaraco.functools", "more-itertools", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "pytest-flake8"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.9" python-versions = "^3.9"
content-hash = "2aa23f0b2cdc0fbe76d3a7430e3a8ce65bd037dd737d64c1c22a6b6db0d8e66b" content-hash = "cd97a84eb76bc864f6c1470619296845aef3480ed6f574803a0c9aa00e4db189"
[metadata.files] [metadata.files]
aerich = [] aerich = []
aiosqlite = [] aiosqlite = [
{file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"},
{file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"},
]
alabaster = [] alabaster = []
anyio = [] anyio = []
appdirs = [] appdirs = [
arrow = [] {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
]
arrow = [
{file = "arrow-1.1.1-py3-none-any.whl", hash = "sha256:77a60a4db5766d900a2085ce9074c5c7b8e2c99afeaa98ad627637ff6f292510"},
{file = "arrow-1.1.1.tar.gz", hash = "sha256:dee7602f6c60e3ec510095b5e301441bc56288cb8f51def14dcb3079f623823a"},
]
asgiref = [] asgiref = []
asyncpg = [] asyncpg = []
asynctest = [] asynctest = [
{file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"},
{file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"},
]
atomicwrites = [] atomicwrites = []
attrs = [] attrs = []
babel = [] babel = []
beautifulsoup4 = [] beautifulsoup4 = [
{file = "beautifulsoup4-4.10.0-py3-none-any.whl", hash = "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf"},
{file = "beautifulsoup4-4.10.0.tar.gz", hash = "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891"},
]
certifi = [] certifi = []
charset-normalizer = [] charset-normalizer = []
click = [] click = []
colorama = [] colorama = []
coverage = [] coverage = []
css-html-js-minify = [] css-html-js-minify = []
dictdiffer = [] dictdiffer = [
{file = "dictdiffer-0.9.0-py2.py3-none-any.whl", hash = "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595"},
{file = "dictdiffer-0.9.0.tar.gz", hash = "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578"},
]
docutils = [] docutils = []
dynaconf = [] dynaconf = []
facebook-sdk = [] facebook-sdk = []
fastapi = [] fastapi = []
fastapi-pagination = []
h11 = [] h11 = []
httpcore = [] httpcore = []
httpx = [] httpx = []
idna = [] idna = []
imagesize = [] imagesize = []
importlib-metadata = [] importlib-metadata = []
iniconfig = [] iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
iso8601 = [] iso8601 = []
jinja2 = [] jinja2 = []
lxml = [] lxml = []
markdownify = [] markdownify = []
markupsafe = [] markupsafe = []
oauthlib = [] oauthlib = []
packaging = [] packaging = [
pluggy = [] {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
]
pluggy = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
pockets = [] pockets = []
py = [] py = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
]
pydantic = [] pydantic = []
pygments = [] pygments = []
pyparsing = [] pyparsing = []
pypika-tortoise = [] pypika-tortoise = []
pytest = [] pytest = [
pytest-asyncio = [] {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
{file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
]
pytest-asyncio = [
{file = "pytest-asyncio-0.15.1.tar.gz", hash = "sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f"},
{file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"},
]
pytest-cov = [] pytest-cov = []
pytest-lazy-fixture = [] pytest-lazy-fixture = [
python-dateutil = [] {file = "pytest-lazy-fixture-0.6.3.tar.gz", hash = "sha256:0e7d0c7f74ba33e6e80905e9bfd81f9d15ef9a790de97993e34213deb5ad10ac"},
{file = "pytest_lazy_fixture-0.6.3-py3-none-any.whl", hash = "sha256:e0b379f38299ff27a653f03eaa69b08a6fd4484e46fd1c9907d984b9f9daeda6"},
]
python-dateutil = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
python-slugify = [] python-slugify = []
pytz = [] pytz = []
requests = [] requests = []
requests-oauthlib = [] requests-oauthlib = []
responses = [] responses = [
rfc3986 = [] {file = "responses-0.13.4-py2.py3-none-any.whl", hash = "sha256:d8d0f655710c46fd3513b9202a7f0dcedd02ca0f8cf4976f27fa8ab5b81e656d"},
six = [] {file = "responses-0.13.4.tar.gz", hash = "sha256:9476775d856d3c24ae660bbebe29fb6d789d4ad16acd723efbfb6ee20990b899"},
]
rfc3986 = [
{file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
{file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
sniffio = [] sniffio = []
snowballstemmer = [] snowballstemmer = []
soupsieve = [] soupsieve = []
@ -1083,12 +1163,21 @@ sphinxcontrib-napoleon = []
sphinxcontrib-qthelp = [] sphinxcontrib-qthelp = []
sphinxcontrib-serializinghtml = [] sphinxcontrib-serializinghtml = []
starlette = [] starlette = []
text-unidecode = [] text-unidecode = [
toml = [] {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"},
{file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
tomli = [] tomli = []
tomlkit = [] tomlkit = []
tortoise-orm = [] tortoise-orm = []
tweepy = [] tweepy = [
{file = "tweepy-4.4.0-py2.py3-none-any.whl", hash = "sha256:cf02c4fbbd027fbc7172c24d03f53f061329ac040b22d201e59592a1cff86364"},
{file = "tweepy-4.4.0.tar.gz", hash = "sha256:8d4b4520271b796fa7efc4c5d5ef3228af4d79f6a4d3ace3900b2778ed8f6f1c"},
]
typing-extensions = [] typing-extensions = []
unidecode = [] unidecode = []
urllib3 = [] urllib3 = []

View File

@ -25,6 +25,7 @@ facebook-sdk = "~3.1"
aerich = "~0.6" aerich = "~0.6"
fastapi = "~0.85" fastapi = "~0.85"
uvicorn = "~0.17" uvicorn = "~0.17"
fastapi-pagination = "^0.11.0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
responses = "~0.13" responses = "~0.13"

View File

@ -7,13 +7,12 @@ from mobilizon_reshare.publishers.platforms.platform_mapping import (
get_formatter_class, get_formatter_class,
name_to_formatter_class, name_to_formatter_class,
) )
from mobilizon_reshare.storage.query.converter import event_to_model
@pytest.mark.parametrize("publisher_name", name_to_formatter_class.keys()) @pytest.mark.parametrize("publisher_name", name_to_formatter_class.keys())
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_format_event(runner, event, capsys, publisher_name): async def test_format_event(runner, event, capsys, publisher_name):
event_model = event_to_model(event) event_model = event.to_model()
await event_model.save() await event_model.save()
await format_event( await format_event(
event_id=str(event_model.mobilizon_id), publisher_name=publisher_name event_id=str(event_model.mobilizon_id), publisher_name=publisher_name

View File

@ -3,8 +3,7 @@ from logging import DEBUG
import pytest import pytest
from mobilizon_reshare.main.publish import select_and_publish, publish_event from mobilizon_reshare.main.publish import select_and_publish, publish_event
from mobilizon_reshare.storage.query.converter import event_from_model from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent
from mobilizon_reshare.event.event import EventPublicationStatus
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.storage.query.read import get_all_publications from mobilizon_reshare.storage.query.read import get_all_publications
@ -75,7 +74,9 @@ async def test_select_and_publish_new_event(
assert p.status == PublicationStatus.COMPLETED assert p.status == PublicationStatus.COMPLETED
# the derived status for the event should be COMPLETED # the derived status for the event should be COMPLETED
assert event_from_model(event).status == EventPublicationStatus.COMPLETED assert (
MobilizonEvent.from_model(event).status == EventPublicationStatus.COMPLETED
)
@pytest.mark.asyncio @pytest.mark.asyncio

View File

@ -3,7 +3,7 @@ from logging import DEBUG, INFO
import pytest import pytest
from mobilizon_reshare.storage.query.read import ( from mobilizon_reshare.storage.query.read import (
get_all_events, get_all_mobilizon_events,
events_without_publications, events_without_publications,
) )
from tests.commands.conftest import ( from tests.commands.conftest import (
@ -40,7 +40,7 @@ async def test_pull_no_event(
assert "Pulled 0 events from Mobilizon." in caplog.text assert "Pulled 0 events from Mobilizon." in caplog.text
assert "There are now 0 unpublished events." in caplog.text assert "There are now 0 unpublished events." in caplog.text
assert expected_result == await get_all_events() assert expected_result == await get_all_mobilizon_events()
@pytest.mark.asyncio @pytest.mark.asyncio
@ -69,7 +69,7 @@ async def test_pull(
with caplog.at_level(DEBUG): with caplog.at_level(DEBUG):
assert await pull() == expected_result assert await pull() == expected_result
assert f"Pulled {len(elements)} events from Mobilizon." in caplog.text assert f"Pulled {len(elements)} events from Mobilizon." in caplog.text
assert expected_result == await get_all_events() assert expected_result == await get_all_mobilizon_events()
assert ( assert (
f"There are now {len(expected_result)} unpublished events." in caplog.text f"There are now {len(expected_result)} unpublished events." in caplog.text
@ -112,7 +112,7 @@ async def test_pull_start(
with caplog.at_level(INFO): with caplog.at_level(INFO):
assert await pull() == expected_pull assert await pull() == expected_pull
assert expected_pull == await get_all_events() assert expected_pull == await get_all_mobilizon_events()
assert expected_pull == await events_without_publications() assert expected_pull == await events_without_publications()
report = await start(command_config) report = await start(command_config)
@ -123,7 +123,9 @@ async def test_pull_start(
pull_ids = set(event.mobilizon_id for event in expected_pull) pull_ids = set(event.mobilizon_id for event in expected_pull)
publish_ids = {expected_publish.mobilizon_id} publish_ids = {expected_publish.mobilizon_id}
assert pull_ids == set(event.mobilizon_id for event in await get_all_events()) assert pull_ids == set(
event.mobilizon_id for event in await get_all_mobilizon_events()
)
assert (pull_ids - publish_ids) == set( assert (pull_ids - publish_ids) == set(
event.mobilizon_id for event in await events_without_publications() event.mobilizon_id for event in await events_without_publications()
) )
@ -188,8 +190,8 @@ async def test_multiple_pull(
with caplog.at_level(DEBUG): with caplog.at_level(DEBUG):
assert await pull() assert await pull()
assert f"There are now {len(expected_first)} unpublished events." in caplog.text assert f"There are now {len(expected_first)} unpublished events." in caplog.text
assert expected_first == await get_all_events() assert expected_first == await get_all_mobilizon_events()
assert await events_without_publications() == await get_all_events() assert await events_without_publications() == await get_all_mobilizon_events()
# I clean the message collector # I clean the message collector
message_collector.data = [] message_collector.data = []
@ -200,6 +202,6 @@ async def test_multiple_pull(
assert f"There are now {len(expected_last)} unpublished events." in caplog.text assert f"There are now {len(expected_last)} unpublished events." in caplog.text
assert set(event.mobilizon_id for event in expected_last) == set( assert set(event.mobilizon_id for event in expected_last) == set(
event.mobilizon_id for event in await get_all_events() event.mobilizon_id for event in await get_all_mobilizon_events()
) )
assert await events_without_publications() == await get_all_events() assert await events_without_publications() == await get_all_mobilizon_events()

View File

@ -3,10 +3,9 @@ from logging import DEBUG, INFO
import pytest import pytest
from mobilizon_reshare.config.command import CommandConfig from mobilizon_reshare.config.command import CommandConfig
from mobilizon_reshare.storage.query.converter import event_from_model, event_to_model from mobilizon_reshare.storage.query.read import get_all_mobilizon_events
from mobilizon_reshare.storage.query.read import get_all_events
from tests.commands.conftest import simple_event_element, second_event_element from tests.commands.conftest import simple_event_element, second_event_element
from mobilizon_reshare.event.event import EventPublicationStatus from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent
from mobilizon_reshare.main.start import start from mobilizon_reshare.main.start import start
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
@ -86,7 +85,8 @@ async def test_start_new_event(
# the derived status for the event should be COMPLETED # the derived status for the event should be COMPLETED
assert ( assert (
event_from_model(all_events[0]).status == EventPublicationStatus.COMPLETED MobilizonEvent.from_model(all_events[0]).status
== EventPublicationStatus.COMPLETED
) )
@ -107,7 +107,7 @@ async def test_start_event_from_db(
command_config, command_config,
): ):
event = event_generator() event = event_generator()
event_model = event_to_model(event) event_model = event.to_model()
await event_model.save() await event_model.save()
with caplog.at_level(DEBUG): with caplog.at_level(DEBUG):
@ -136,7 +136,10 @@ async def test_start_event_from_db(
assert p.status == PublicationStatus.COMPLETED assert p.status == PublicationStatus.COMPLETED
# the derived status for the event should be COMPLETED # the derived status for the event should be COMPLETED
assert event_from_model(event_model).status == EventPublicationStatus.COMPLETED assert (
MobilizonEvent.from_model(event_model).status
== EventPublicationStatus.COMPLETED
)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -157,7 +160,7 @@ async def test_start_publisher_failure(
command_config, command_config,
): ):
event = event_generator() event = event_generator()
event_model = event_to_model(event) event_model = event.to_model()
await event_model.save() await event_model.save()
with caplog.at_level(DEBUG): with caplog.at_level(DEBUG):
@ -188,7 +191,10 @@ async def test_start_publisher_failure(
for _ in range(2) for _ in range(2)
] # 2 publications failed * 2 notifiers ] # 2 publications failed * 2 notifiers
# the derived status for the event should be FAILED # the derived status for the event should be FAILED
assert event_from_model(event_model).status == EventPublicationStatus.FAILED assert (
MobilizonEvent.from_model(event_model).status
== EventPublicationStatus.FAILED
)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -222,7 +228,7 @@ async def test_start_second_execution(
"event_1|desc_1", "event_1|desc_1",
] ]
# I verify that the db event and the new event coming from mobilizon are both in the db # I verify that the db event and the new event coming from mobilizon are both in the db
assert len(list(await get_all_events())) == 2 assert len(list(await get_all_mobilizon_events())) == 2
@pytest.mark.parametrize( @pytest.mark.parametrize(

View File

@ -24,8 +24,6 @@ from mobilizon_reshare.publishers.abstract import (
AbstractEventFormatter, AbstractEventFormatter,
) )
from mobilizon_reshare.publishers.exceptions import PublisherError, InvalidResponse from mobilizon_reshare.publishers.exceptions import PublisherError, InvalidResponse
from mobilizon_reshare.storage.query.converter import event_to_model
from mobilizon_reshare.storage.query.write import get_publisher_by_name
from tests import today from tests import today
with importlib.resources.path( with importlib.resources.path(
@ -122,7 +120,7 @@ def event() -> MobilizonEvent:
@pytest.fixture @pytest.fixture
async def stored_event(event) -> Event: async def stored_event(event) -> Event:
model = event_to_model(event) model = event.to_model()
await model.save() await model.save()
await model.fetch_related("publications") await model.fetch_related("publications")
return model return model
@ -195,8 +193,10 @@ def event_model_generator():
@pytest.fixture() @pytest.fixture()
def publisher_model_generator(): def publisher_model_generator():
def _publisher_model_generator(idx=1,): def _publisher_model_generator(idx=1, name=None):
return Publisher(name=f"publisher_{idx}", account_ref=f"account_ref_{idx}") return Publisher(
name=name or f"publisher_{idx}", account_ref=f"account_ref_{idx}"
)
return _publisher_model_generator return _publisher_model_generator
@ -509,13 +509,13 @@ async def event_with_failed_publication(
@pytest.fixture @pytest.fixture
async def failed_publication(stored_event) -> Publication: async def failed_publication(stored_event, mock_publisher) -> Publication:
p = Publication( p = Publication(
event=stored_event, event=stored_event,
status=PublicationStatus.FAILED, status=PublicationStatus.FAILED,
timestamp=arrow.now().datetime, timestamp=arrow.now().datetime,
publisher=await get_publisher_by_name("mock"), publisher=mock_publisher,
) )
await p.save() await p.save()
return p return p
@ -524,3 +524,10 @@ async def failed_publication(stored_event) -> Publication:
@pytest.fixture @pytest.fixture
def command_config(): def command_config():
return CommandConfig(dry_run=False) return CommandConfig(dry_run=False)
@pytest.fixture()
async def mock_publisher(publisher_model_generator):
publisher = await publisher_model_generator(name="mock")
await publisher.save()
return publisher

View File

@ -5,14 +5,9 @@ import arrow
import pytest import pytest
import tortoise.timezone import tortoise.timezone
from mobilizon_reshare.event.event import EventPublicationStatus from mobilizon_reshare.event.event import EventPublicationStatus, MobilizonEvent
from mobilizon_reshare.models.event import Event from mobilizon_reshare.models.event import Event
from mobilizon_reshare.models.publication import PublicationStatus from mobilizon_reshare.models.publication import PublicationStatus
from mobilizon_reshare.storage.query.converter import (
event_from_model,
event_to_model,
compute_event_status,
)
@pytest.mark.asyncio @pytest.mark.asyncio
@ -93,7 +88,7 @@ async def test_event_sort_by_date(event_model_generator):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_mobilizon_event_to_model(event): async def test_mobilizon_event_to_model(event):
event_model = event_to_model(event) event_model = event.to_model()
await event_model.save() await event_model.save()
event_db = await Event.all().first() event_db = await Event.all().first()
@ -141,7 +136,7 @@ async def test_mobilizon_event_from_model(
.prefetch_related("publications__publisher") .prefetch_related("publications__publisher")
.first() .first()
) )
event = event_from_model(event=event_db) event = MobilizonEvent.from_model(event=event_db)
begin_date_utc = arrow.Arrow(year=2021, month=1, day=1, hour=11, minute=30) begin_date_utc = arrow.Arrow(year=2021, month=1, day=1, hour=11, minute=30)
@ -196,4 +191,4 @@ async def test_mobilizon_event_compute_status_partial(
) )
await publication.save() await publication.save()
publications.append(publication) publications.append(publication)
assert compute_event_status(publications) == expected_result assert MobilizonEvent._compute_event_status(publications) == expected_result

View File

@ -23,10 +23,6 @@ from mobilizon_reshare.publishers.coordinators.event_publishing.publish import (
from mobilizon_reshare.publishers.coordinators.recap_publishing.recap import ( from mobilizon_reshare.publishers.coordinators.recap_publishing.recap import (
RecapCoordinator, RecapCoordinator,
) )
from mobilizon_reshare.storage.query.converter import (
event_to_model,
publication_from_orm,
)
from tests import today from tests import today
@ -96,7 +92,7 @@ async def mock_publications(
): ):
result = [] result = []
for i in range(num_publications): for i in range(num_publications):
event = event_to_model(test_event) event = test_event.to_model()
await event.save() await event.save()
publisher = Publisher(name="telegram") publisher = Publisher(name="telegram")
await publisher.save() await publisher.save()
@ -107,7 +103,7 @@ async def mock_publications(
timestamp=today + timedelta(hours=i), timestamp=today + timedelta(hours=i),
reason=None, reason=None,
) )
publication = publication_from_orm(publication, test_event) publication = EventPublication.from_orm(publication, test_event)
publication.publisher = mock_publisher_valid publication.publisher = mock_publisher_valid
publication.formatter = mock_formatter_valid publication.formatter = mock_formatter_valid
result.append(publication) result.append(publication)

View File

@ -14,7 +14,6 @@ from mobilizon_reshare.publishers.exceptions import (
HTTPResponseError, HTTPResponseError,
) )
from mobilizon_reshare.publishers.platforms.zulip import ZulipFormatter, ZulipPublisher from mobilizon_reshare.publishers.platforms.zulip import ZulipFormatter, ZulipPublisher
from mobilizon_reshare.storage.query.converter import event_to_model
from mobilizon_reshare.storage.query.read import build_publications, get_all_publishers from mobilizon_reshare.storage.query.read import build_publications, get_all_publishers
one_publication_specification = { one_publication_specification = {
@ -103,7 +102,7 @@ async def setup_db(generate_models):
@pytest.fixture @pytest.fixture
@pytest.mark.asyncio @pytest.mark.asyncio
async def unsaved_publications(setup_db, event): async def unsaved_publications(setup_db, event):
await event_to_model(event).save() await event.to_model().save()
publishers = [p.name for p in await get_all_publishers()] publishers = [p.name for p in await get_all_publishers()]
return await build_publications(event, publishers) return await build_publications(event, publishers)

View File

@ -2,8 +2,7 @@ from uuid import UUID
import pytest import pytest
from mobilizon_reshare.storage.query.converter import event_to_model from mobilizon_reshare.storage.query.read import get_all_mobilizon_events
from mobilizon_reshare.storage.query.read import get_all_events
@pytest.mark.asyncio @pytest.mark.asyncio
@ -12,6 +11,6 @@ async def test_get_all_events(event_generator):
event_generator(mobilizon_id=UUID(int=i), published=False) for i in range(4) event_generator(mobilizon_id=UUID(int=i), published=False) for i in range(4)
] ]
for e in all_events: for e in all_events:
await event_to_model(e).save() await e.to_model().save()
assert list(await get_all_events()) == all_events assert list(await get_all_mobilizon_events()) == all_events

View File

@ -3,7 +3,7 @@ import urllib3
from httpx import AsyncClient from httpx import AsyncClient
from mobilizon_reshare.storage import db from mobilizon_reshare.storage import db
from mobilizon_reshare.web.backend.main import app, register_endpoints from mobilizon_reshare.web.backend.main import app, init_endpoints
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
@ -13,7 +13,8 @@ def anyio_backend():
@pytest.fixture() @pytest.fixture()
async def client(): async def client():
register_endpoints(app) init_endpoints(app)
async with AsyncClient(app=app, base_url="http://test") as client: async with AsyncClient(app=app, base_url="http://test") as client:
yield client yield client

View File

@ -3,7 +3,7 @@ import json
import pytest import pytest
from httpx import AsyncClient from httpx import AsyncClient
from mobilizon_reshare.web.backend.main import event_pydantic from mobilizon_reshare.models.event import Event
@pytest.mark.anyio @pytest.mark.anyio
@ -13,4 +13,5 @@ async def test_events(client: AsyncClient, event_model_generator):
response = await client.get("/events") response = await client.get("/events")
assert response.status_code == 200 assert response.status_code == 200
assert response.json()[0] == [json.loads(event_pydantic.from_orm(event).json())][0] expected = await Event.to_pydantic().from_tortoise_orm(event)
assert response.json()["items"][0] == json.loads(expected.json())

View File

@ -0,0 +1,15 @@
import json
import pytest
from httpx import AsyncClient
from mobilizon_reshare.models.publication import Publication
@pytest.mark.asyncio
async def test_publication(client: AsyncClient, failed_publication):
response = await client.get("/publications")
assert response.status_code == 200
expected = await Publication.to_pydantic().from_tortoise_orm(failed_publication)
assert response.json()["items"][0] == json.loads(expected.json())