1
0
mirror of https://git.sr.ht/~tsileo/microblog.pub synced 2025-06-05 21:59:23 +02:00

2 Commits

Author SHA1 Message Date
0f1fdd3944 Add missing migration 2022-09-21 19:19:12 +02:00
254588f7c0 Boostrap support for quote URL 2022-09-21 19:18:44 +02:00
18 changed files with 291 additions and 534 deletions

View File

@ -0,0 +1,34 @@
"""Add support for quote URL
Revision ID: c3027d0e18dc
Revises: 604d125ea2fb
Create Date: 2022-09-21 07:08:24.568124+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'c3027d0e18dc'
down_revision = '604d125ea2fb'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('inbox', schema=None) as batch_op:
batch_op.add_column(sa.Column('quoted_inbox_object_id', sa.Integer(), nullable=True))
batch_op.create_foreign_key('fk_quoted_inbox_object_id', 'inbox', ['quoted_inbox_object_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('inbox', schema=None) as batch_op:
batch_op.drop_constraint('fk_quoted_inbox_object_id', type_='foreignkey')
batch_op.drop_column('quoted_inbox_object_id')
# ### end Alembic commands ###

View File

@ -6,6 +6,7 @@ from typing import Any
import httpx
from loguru import logger
from markdown import markdown
from app import config
from app.config import ALSO_KNOWN_AS
@ -13,7 +14,6 @@ from app.config import AP_CONTENT_TYPE # noqa: F401
from app.config import MOVED_TO
from app.httpsig import auth
from app.key import get_pubkey_as_pem
from app.source import dedup_tags
from app.source import hashtagify
from app.utils.url import check_url
@ -53,26 +53,15 @@ AS_EXTENDED_CTX = [
]
class FetchError(Exception):
def __init__(self, url: str, resp: httpx.Response | None = None) -> None:
resp_part = ""
if resp:
resp_part = f", got HTTP {resp.status_code}: {resp.text}"
message = f"Failed to fetch {url}{resp_part}"
super().__init__(message)
self.resp = resp
self.url = url
class ObjectIsGoneError(FetchError):
class ObjectIsGoneError(Exception):
pass
class ObjectNotFoundError(FetchError):
class ObjectNotFoundError(Exception):
pass
class ObjectUnavailableError(FetchError):
class ObjectUnavailableError(Exception):
pass
@ -101,19 +90,6 @@ class VisibilityEnum(str, enum.Enum):
_LOCAL_ACTOR_SUMMARY, _LOCAL_ACTOR_TAGS = hashtagify(config.CONFIG.summary)
_LOCAL_ACTOR_METADATA = []
if config.CONFIG.metadata:
for kv in config.CONFIG.metadata:
kv_value, kv_tags = hashtagify(kv.value)
_LOCAL_ACTOR_METADATA.append(
{
"name": kv.key,
"type": "PropertyValue",
"value": kv_value,
}
)
_LOCAL_ACTOR_TAGS.extend(kv_tags)
ME = {
"@context": AS_EXTENDED_CTX,
@ -126,7 +102,7 @@ ME = {
"outbox": config.BASE_URL + "/outbox",
"preferredUsername": config.USERNAME,
"name": config.CONFIG.name,
"summary": _LOCAL_ACTOR_SUMMARY,
"summary": markdown(_LOCAL_ACTOR_SUMMARY, extensions=["mdx_linkify"]),
"endpoints": {
# For compat with servers expecting a sharedInbox...
"sharedInbox": config.BASE_URL
@ -134,7 +110,16 @@ ME = {
},
"url": config.ID + "/", # XXX: the path is important for Mastodon compat
"manuallyApprovesFollowers": config.CONFIG.manually_approves_followers,
"attachment": _LOCAL_ACTOR_METADATA,
"attachment": [
{
"name": kv.key,
"type": "PropertyValue",
"value": markdown(kv.value, extensions=["mdx_linkify", "fenced_code"]),
}
for kv in config.CONFIG.metadata
]
if config.CONFIG.metadata
else [],
"icon": {
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
"type": "Image",
@ -145,7 +130,7 @@ ME = {
"owner": config.ID,
"publicKeyPem": get_pubkey_as_pem(config.KEY_PATH),
},
"tag": dedup_tags(_LOCAL_ACTOR_TAGS),
"tag": _LOCAL_ACTOR_TAGS,
}
if ALSO_KNOWN_AS:
@ -185,17 +170,13 @@ async def fetch(
# Special handling for deleted object
if resp.status_code == 410:
raise ObjectIsGoneError(url, resp)
raise ObjectIsGoneError(f"{url} is gone")
elif resp.status_code in [401, 403]:
raise ObjectUnavailableError(url, resp)
raise ObjectUnavailableError(f"not allowed to fetch {url}")
elif resp.status_code == 404:
raise ObjectNotFoundError(url, resp)
try:
resp.raise_for_status()
except httpx.HTTPError as http_error:
raise FetchError(url, resp) from http_error
raise ObjectNotFoundError(f"{url} not found")
resp.raise_for_status()
try:
return resp.json()
except json.JSONDecodeError:

View File

@ -1,7 +1,6 @@
import hashlib
import typing
from dataclasses import dataclass
from datetime import timedelta
from functools import cached_property
from typing import Union
from urllib.parse import urlparse
@ -13,8 +12,6 @@ from sqlalchemy.orm import joinedload
from app import activitypub as ap
from app import media
from app.database import AsyncSession
from app.utils.datetime import as_utc
from app.utils.datetime import now
if typing.TYPE_CHECKING:
from app.models import Actor as ActorModel
@ -192,64 +189,26 @@ async def fetch_actor(
if existing_actor:
if existing_actor.is_deleted:
raise ap.ObjectNotFoundError(f"{actor_id} was deleted")
if now() - as_utc(existing_actor.updated_at) > timedelta(hours=24):
logger.info(
f"Refreshing {actor_id=} last updated {existing_actor.updated_at}"
)
try:
ap_actor = await ap.fetch(actor_id)
await update_actor_if_needed(
db_session,
existing_actor,
RemoteActor(ap_actor),
)
return existing_actor
except Exception:
logger.exception(f"Failed to refresh {actor_id}")
# If we fail to refresh the actor, return the cached one
return existing_actor
else:
return existing_actor
if save_if_not_found:
ap_actor = await ap.fetch(actor_id)
# Some softwares uses URL when we expect ID
if actor_id == ap_actor.get("url"):
# Which mean we may already have it in DB
existing_actor_by_url = (
await db_session.scalars(
select(models.Actor).where(
models.Actor.ap_id == ap.get_id(ap_actor),
)
)
).one_or_none()
if existing_actor_by_url:
# Update the actor as we had to fetch it anyway
await update_actor_if_needed(
db_session,
existing_actor_by_url,
RemoteActor(ap_actor),
)
return existing_actor_by_url
return await save_actor(db_session, ap_actor)
return existing_actor
else:
raise ap.ObjectNotFoundError(actor_id)
if save_if_not_found:
ap_actor = await ap.fetch(actor_id)
# Some softwares uses URL when we expect ID
if actor_id == ap_actor.get("url"):
# Which mean we may already have it in DB
existing_actor_by_url = (
await db_session.scalars(
select(models.Actor).where(
models.Actor.ap_id == ap.get_id(ap_actor),
)
)
).one_or_none()
if existing_actor_by_url:
return existing_actor_by_url
async def update_actor_if_needed(
db_session: AsyncSession,
actor_in_db: "ActorModel",
ra: RemoteActor,
) -> None:
# Check if we actually need to udpte the actor in DB
if _actor_hash(ra) != _actor_hash(actor_in_db):
actor_in_db.ap_actor = ra.ap_actor
actor_in_db.handle = ra.handle
actor_in_db.ap_type = ra.ap_type
actor_in_db.updated_at = now()
await db_session.flush()
return await save_actor(db_session, ap_actor)
else:
raise ap.ObjectNotFoundError
@dataclass

View File

@ -1,11 +1,12 @@
import hashlib
import mimetypes
from datetime import datetime
from functools import cached_property
from typing import Any
from typing import Optional
import pydantic
from bs4 import BeautifulSoup # type: ignore
from loguru import logger
from markdown import markdown
from app import activitypub as ap
@ -75,6 +76,10 @@ class Object:
def tags(self) -> list[ap.RawObject]:
return ap.as_list(self.ap_object.get("tag", []))
@property
def quote_url(self) -> str | None:
return self.ap_object.get("quoteUrl")
@cached_property
def inlined_images(self) -> set[str]:
image_urls: set[str] = set()
@ -156,7 +161,7 @@ class Object:
@cached_property
def url(self) -> str | None:
obj_url = self.ap_object.get("url")
if isinstance(obj_url, str) and obj_url:
if isinstance(obj_url, str):
return obj_url
elif obj_url:
for u in ap.as_list(obj_url):
@ -277,22 +282,17 @@ class Attachment(BaseModel):
proxied_url: str | None = None
resized_url: str | None = None
@property
def mimetype(self) -> str:
mimetype = self.media_type
if not mimetype:
mimetype, _ = mimetypes.guess_type(self.url)
if not mimetype:
return "unknown"
return mimetype.split("/")[-1]
class RemoteObject(Object):
def __init__(self, raw_object: ap.RawObject, actor: Actor):
def __init__(
self,
raw_object: ap.RawObject,
actor: Actor,
quoted_object: Object | None = None,
):
self._raw_object = raw_object
self._actor = actor
self._quoted_object = quoted_object
if self._actor.ap_id != ap.get_actor_id(self._raw_object):
raise ValueError(f"Invalid actor {self._actor.ap_id}")
@ -302,6 +302,7 @@ class RemoteObject(Object):
cls,
raw_object: ap.RawObject,
actor: Actor | None = None,
fetch_quoted_url: bool = True,
):
# Pre-fetch the actor
actor_id = ap.get_actor_id(raw_object)
@ -318,7 +319,17 @@ class RemoteObject(Object):
ap_actor=await ap.fetch(ap.get_actor_id(raw_object)),
)
return cls(raw_object, _actor)
quoted_object: Object | None = None
if quote_url := raw_object.get("quoteUrl"):
try:
quoted_object = await RemoteObject.from_raw_object(
await ap.fetch(quote_url),
fetch_quoted_url=fetch_quoted_url,
)
except Exception:
logger.exception(f"Failed to fetch {quote_url=}")
return cls(raw_object, _actor, quoted_object=quoted_object)
@property
def og_meta(self) -> list[dict[str, Any]] | None:
@ -331,3 +342,9 @@ class RemoteObject(Object):
@property
def actor(self) -> Actor:
return self._actor
@property
def quoted_object(self) -> Optional["RemoteObject"]:
if self._quoted_object:
return self._quoted_object
return None

View File

@ -24,7 +24,6 @@ from app.actor import Actor
from app.actor import RemoteActor
from app.actor import fetch_actor
from app.actor import save_actor
from app.actor import update_actor_if_needed
from app.ap_object import RemoteObject
from app.config import BASE_URL
from app.config import BLOCKED_SERVERS
@ -33,7 +32,6 @@ from app.config import MANUALLY_APPROVES_FOLLOWERS
from app.config import set_moved_to
from app.database import AsyncSession
from app.outgoing_activities import new_outgoing_activity
from app.source import dedup_tags
from app.source import markdownify
from app.uploads import upload_to_attachment
from app.utils import opengraph
@ -290,7 +288,6 @@ async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None:
raise ValueError("Should never happen")
outbox_object_to_undo.undone_by_outbox_object_id = outbox_object.id
outbox_object_to_undo.is_deleted = True
if outbox_object_to_undo.ap_type == "Follow":
if not outbox_object_to_undo.activity_object_ap_id:
@ -349,7 +346,6 @@ async def fetch_conversation_root(
db_session: AsyncSession,
obj: AnyboxObject | RemoteObject,
is_root: bool = False,
depth: int = 0,
) -> str:
"""Some softwares do not set the context/conversation field (like Misskey).
This means we have to track conversation ourselves. To do so, we fetch
@ -357,13 +353,12 @@ async def fetch_conversation_root(
- use the context field if set
- or build a custom conversation ID
"""
logger.info(f"Fetching convo root for ap_id={obj.ap_id}/{depth=}")
if obj.ap_context:
return obj.ap_context
if not obj.in_reply_to or is_root or depth > 10:
# Use the root AP ID if there'no context
return f"microblogpub:root:{obj.ap_id}"
if not obj.in_reply_to or is_root:
if obj.ap_context:
return obj.ap_context
else:
# Use the root AP ID if there'no context
return f"microblogpub:root:{obj.ap_id}"
else:
in_reply_to_object: AnyboxObject | RemoteObject | None = (
await get_anybox_object_by_ap_id(db_session, obj.in_reply_to)
@ -376,24 +371,20 @@ async def fetch_conversation_root(
)
in_reply_to_object = RemoteObject(raw_reply, actor=raw_reply_actor)
except (
ap.FetchError,
ap.ObjectNotFoundError,
ap.ObjectIsGoneError,
ap.NotAnObjectError,
ap.ObjectUnavailableError,
):
return await fetch_conversation_root(
db_session, obj, is_root=True, depth=depth + 1
)
return await fetch_conversation_root(db_session, obj, is_root=True)
except httpx.HTTPStatusError as http_status_error:
if 400 <= http_status_error.response.status_code < 500:
# We may not have access, in this case consider if root
return await fetch_conversation_root(
db_session, obj, is_root=True, depth=depth + 1
)
return await fetch_conversation_root(db_session, obj, is_root=True)
else:
raise
return await fetch_conversation_root(
db_session, in_reply_to_object, depth=depth + 1
)
return await fetch_conversation_root(db_session, in_reply_to_object)
async def send_move(
@ -552,7 +543,7 @@ async def send_create(
"context": context,
"conversation": context,
"url": outbox_object_id(note_id),
"tag": dedup_tags(tags),
"tag": tags,
"summary": content_warning,
"inReplyTo": in_reply_to,
"sensitive": is_sensitive,
@ -572,7 +563,7 @@ async def send_create(
for tag in tags:
if tag["type"] == "Hashtag":
tagged_object = models.TaggedOutboxObject(
tag=tag["name"][1:].lower(),
tag=tag["name"][1:],
outbox_object_id=outbox_object.id,
)
db_session.add(tagged_object)
@ -1078,17 +1069,7 @@ async def _revert_side_effect_for_deleted_object(
) -> None:
is_delete_needs_to_be_forwarded = False
# Delete related notifications
notif_deletion_result = await db_session.execute(
delete(models.Notification)
.where(models.Notification.inbox_object_id == deleted_ap_object.id)
.execution_options(synchronize_session=False)
)
logger.info(
f"Deleted {notif_deletion_result.rowcount} notifications" # type: ignore
)
# Decrement/refresh the replies counter if needed
# Decrement the replies counter if needed
if deleted_ap_object.in_reply_to:
replied_object = await get_anybox_object_by_ap_id(
db_session,
@ -1500,7 +1481,7 @@ async def _handle_update_activity(
)
# Update the actor
await update_actor_if_needed(db_session, from_actor, updated_actor)
from_actor.ap_actor = updated_actor.ap_actor
elif (ap_type := wrapped_object["type"]) in [
"Question",
"Note",
@ -1523,7 +1504,6 @@ async def _handle_update_activity(
# Everything looks correct, update the object in the inbox
logger.info(f"Updating {existing_object.ap_id}")
existing_object.ap_object = wrapped_object
existing_object.updated_at = now()
else:
# TODO(ts): support updating objects
logger.info(f'Cannot update {wrapped_object["type"]}')
@ -1534,24 +1514,8 @@ async def _handle_create_activity(
from_actor: models.Actor,
create_activity: models.InboxObject,
forwarded_by_actor: models.Actor | None = None,
relates_to_inbox_object: models.InboxObject | None = None,
) -> None:
logger.info("Processing Create activity")
# Some PeerTube activities make no sense to process
if (
ap_object_type := ap.as_list(
(await ap.get_object(create_activity.ap_object))["type"]
)[0]
) in ["CacheFile"]:
logger.info(f"Dropping Create activity for {ap_object_type} object")
await db_session.delete(create_activity)
return None
if relates_to_inbox_object:
logger.warning(f"{relates_to_inbox_object.ap_id} is already in the inbox")
return None
wrapped_object = ap.unwrap_activity(create_activity.ap_object)
if create_activity.actor.ap_id != ap.get_actor_id(wrapped_object):
raise ValueError("Object actor does not match activity")
@ -1602,14 +1566,6 @@ async def _handle_read_activity(
if not wrapped_object_actor.is_blocked:
ro = RemoteObject(wrapped_object, actor=wrapped_object_actor)
# Check if we already know about this object
if await get_inbox_object_by_ap_id(
db_session,
ro.ap_id,
):
logger.info(f"{ro.ap_id} is already in the inbox, skipping processing")
return None
# Then process it likes it's coming from a forwarded activity
await _process_note_object(db_session, read_activity, wrapped_object_actor, ro)
@ -1620,8 +1576,11 @@ async def _process_note_object(
from_actor: models.Actor,
ro: RemoteObject,
forwarded_by_actor: models.Actor | None = None,
) -> None:
if parent_activity.ap_type not in ["Create", "Read"]:
process_quoted_url: bool = True,
) -> models.InboxObject:
if process_quoted_url and parent_activity.quote_url == ro.ap_id:
logger.info(f"Processing quoted URL for {parent_activity.ap_id}")
elif parent_activity.ap_type not in ["Create", "Read"]:
raise ValueError(f"Unexpected parent activity {parent_activity.ap_id}")
ap_published_at = now()
@ -1664,6 +1623,7 @@ async def _process_note_object(
),
# We may already have some replies in DB
replies_count=await _get_replies_count(db_session, ro.ap_id),
quoted_inbox_object_id=None,
)
db_session.add(inbox_object)
@ -1744,6 +1704,28 @@ async def _process_note_object(
)
db_session.add(notif)
await db_session.flush()
if ro.quote_url and process_quoted_url:
try:
quoted_raw_object = await ap.fetch(ro.quote_url)
quoted_object_actor = await fetch_actor(
db_session, ap.get_actor_id(quoted_raw_object)
)
quoted_ro = RemoteObject(quoted_raw_object, quoted_object_actor)
quoted_inbox_object = await _process_note_object(
db_session,
inbox_object,
from_actor=quoted_object_actor,
ro=quoted_ro,
process_quoted_url=False,
)
inbox_object.quoted_inbox_object_id = quoted_inbox_object.id
except Exception:
logger.exception("Failed to process quoted object")
return inbox_object
async def _handle_vote_answer(
db_session: AsyncSession,
@ -1993,7 +1975,7 @@ async def save_to_inbox(
except ap.ObjectNotFoundError:
logger.warning("Actor not found")
return
except ap.FetchError:
except httpx.HTTPStatusError:
logger.exception("Failed to fetch actor")
return
@ -2101,11 +2083,7 @@ async def save_to_inbox(
if activity_ro.ap_type == "Create":
await _handle_create_activity(
db_session,
actor,
inbox_object,
forwarded_by_actor=forwarded_by_actor,
relates_to_inbox_object=relates_to_inbox_object,
db_session, actor, inbox_object, forwarded_by_actor=forwarded_by_actor
)
elif activity_ro.ap_type == "Read":
await _handle_read_activity(db_session, actor, inbox_object)

View File

@ -88,12 +88,8 @@ def _body_digest(body: bytes) -> str:
return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8")
async def _get_public_key(
db_session: AsyncSession,
key_id: str,
should_skip_cache: bool = False,
) -> Key:
if not should_skip_cache and (cached_key := _KEY_CACHE.get(key_id)):
async def _get_public_key(db_session: AsyncSession, key_id: str) -> Key:
if cached_key := _KEY_CACHE.get(key_id):
logger.info(f"Key {key_id} found in cache")
return cached_key
@ -105,18 +101,15 @@ async def _get_public_key(
select(models.Actor).where(models.Actor.ap_id == key_id.split("#")[0])
)
).one_or_none()
if not should_skip_cache:
if existing_actor and existing_actor.public_key_id == key_id:
k = Key(existing_actor.ap_id, key_id)
k.load_pub(existing_actor.public_key_as_pem)
logger.info(f"Found {key_id} on an existing actor")
_KEY_CACHE[key_id] = k
return k
if existing_actor and existing_actor.public_key_id == key_id:
k = Key(existing_actor.ap_id, key_id)
k.load_pub(existing_actor.public_key_as_pem)
logger.info(f"Found {key_id} on an existing actor")
_KEY_CACHE[key_id] = k
return k
# Fetch it
from app import activitypub as ap
from app.actor import RemoteActor
from app.actor import update_actor_if_needed
# Without signing the request as if it's the first contact, the 2 servers
# might race to fetch each other key
@ -140,12 +133,6 @@ async def _get_public_key(
f"failed to fetch requested key {key_id}: got {actor['publicKey']}"
)
if should_skip_cache and actor["type"] != "Key" and existing_actor:
# We had to skip the cache, which means the actor key probably changed
# and we want to update our cached version
await update_actor_if_needed(db_session, existing_actor, RemoteActor(actor))
await db_session.commit()
_KEY_CACHE[key_id] = k
return k
@ -229,17 +216,7 @@ async def httpsig_checker(
has_valid_signature = _verify_h(
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
)
# If the signature is not valid, we may have to update the cached actor
if not has_valid_signature:
logger.info("Invalid signature, trying to refresh actor")
try:
k = await _get_public_key(db_session, hsig["keyId"], should_skip_cache=True)
has_valid_signature = _verify_h(
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
)
except Exception:
logger.exception("Failed to refresh actor")
# FIXME: fetch/update the user if the signature is wrong
httpsig_info = HTTPSigInfo(
has_valid_signature=has_valid_signature,

View File

@ -763,7 +763,7 @@ async def tag_by_name(
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse:
where = [
models.TaggedOutboxObject.tag == tag.lower(),
models.TaggedOutboxObject.tag == tag,
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.is_deleted.is_(False),
]
@ -789,7 +789,7 @@ async def tag_by_name(
return ActivityPubResponse(
{
"@context": ap.AS_CTX,
"id": BASE_URL + f"/t/{tag.lower()}",
"id": BASE_URL + f"/t/{tag}",
"type": "OrderedCollection",
"totalItems": tagged_count,
"orderedItems": [
@ -883,48 +883,6 @@ async def post_remote_follow(
)
@app.get("/remote_interaction")
async def remote_interaction(
request: Request,
ap_id: str,
db_session: AsyncSession = Depends(get_db_session),
) -> templates.TemplateResponse:
outbox_object = await boxes.get_outbox_object_by_ap_id(
db_session,
ap_id,
)
if not outbox_object:
raise HTTPException(status_code=404)
return await templates.render_template(
db_session,
request,
"remote_interact.html",
{"outbox_object": outbox_object},
)
@app.post("/remote_interaction")
async def post_remote_interaction(
request: Request,
csrf_check: None = Depends(verify_csrf_token),
profile: str = Form(),
ap_id: str = Form(),
) -> RedirectResponse:
if not profile.startswith("@"):
profile = f"@{profile}"
remote_follow_template = await get_remote_follow_template(profile)
if not remote_follow_template:
# TODO(ts): error message to user
raise HTTPException(status_code=404)
return RedirectResponse(
remote_follow_template.format(uri=ap_id),
status_code=302,
)
@app.get("/.well-known/webfinger")
async def wellknown_webfinger(resource: str) -> JSONResponse:
"""Exposes/servers WebFinger data."""
@ -1221,7 +1179,6 @@ async def robots_file():
Disallow: /followers
Disallow: /following
Disallow: /admin
Disallow: /remote_interaction
Disallow: /remote_follow"""

View File

@ -113,6 +113,18 @@ class InboxObject(Base, BaseObject):
uselist=False,
)
quoted_inbox_object_id = Column(
Integer,
ForeignKey("inbox.id", name="fk_quoted_inbox_object_id"),
nullable=True,
)
quoted_inbox_object: Mapped[Optional["InboxObject"]] = relationship(
"InboxObject",
foreign_keys=quoted_inbox_object_id,
remote_side=id,
uselist=False,
)
undone_by_inbox_object_id = Column(Integer, ForeignKey("inbox.id"), nullable=True)
# Link the oubox AP ID to allow undo without any extra query
@ -147,6 +159,12 @@ class InboxObject(Base, BaseObject):
def is_from_inbox(self) -> bool:
return True
@property
def quoted_object(self) -> Optional["InboxObject"]:
if self.quoted_inbox_object_id:
return self.quoted_inbox_object
return None
class OutboxObject(Base, BaseObject):
__tablename__ = "outbox"
@ -281,6 +299,10 @@ class OutboxObject(Base, BaseObject):
def is_from_outbox(self) -> bool:
return True
@property
def quoted_object(self) -> Optional["InboxObject"]:
return None
class Follower(Base):
__tablename__ = "follower"

View File

@ -388,7 +388,7 @@ nav.flexbox {
margin-right: 0px;
}
}
a:not(.label-btn) {
a {
color: $primary-color;
text-decoration: none;
&:hover, &:active {
@ -396,29 +396,23 @@ nav.flexbox {
text-decoration: underline;
}
}
a.active:not(.label-btn) {
a.active {
color: $secondary-color;
font-weight: bold;
}
}
// after nav.flexbox to override default behavior
a.label-btn {
color: $form-text-color;
&:hover {
text-decoration: none;
color: $form-text-color;
}
}
.ap-object {
margin: 15px 0;
padding: 20px;
nav {
color: $muted-color;
}
.in-reply-to {
display: inline;
color: $muted-color;
&:hover {
color: $secondary-color;
text-decoration: underline;
}
}
nav {
color: $muted-color;
}
.e-content, .activity-og-meta {

View File

@ -1,123 +1,52 @@
import re
import typing
from mistletoe import Document # type: ignore
from mistletoe.html_renderer import HTMLRenderer # type: ignore
from mistletoe.span_token import SpanToken # type: ignore
from pygments import highlight # type: ignore
from pygments.formatters import HtmlFormatter # type: ignore
from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore
from pygments.lexers import guess_lexer # type: ignore
from markdown import markdown
from sqlalchemy import select
from app import webfinger
from app.config import BASE_URL
from app.config import CODE_HIGHLIGHTING_THEME
from app.database import AsyncSession
from app.utils import emoji
if typing.TYPE_CHECKING:
from app.actor import Actor
_FORMATTER = HtmlFormatter(style=CODE_HIGHLIGHTING_THEME)
def _set_a_attrs(attrs, new=False):
attrs[(None, "target")] = "_blank"
attrs[(None, "class")] = "external"
attrs[(None, "rel")] = "noopener"
attrs[(None, "title")] = attrs[(None, "href")]
return attrs
_HASHTAG_REGEX = re.compile(r"(#[\d\w]+)")
_MENTION_REGEX = re.compile(r"(@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+)")
_URL_REGEX = re.compile(
"(https?:\\/\\/(?:www\\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6}\\b(?:[-a-zA-Z0-9()@:%_\\+.~#?&\\/=]*))" # noqa: E501
)
_MENTION_REGEX = re.compile(r"@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+")
class AutoLink(SpanToken):
parse_inner = False
precedence = 10
pattern = _URL_REGEX
def __init__(self, match_obj: re.Match) -> None:
self.target = match_obj.group()
def hashtagify(content: str) -> tuple[str, list[dict[str, str]]]:
tags = []
hashtags = re.findall(_HASHTAG_REGEX, content)
hashtags = sorted(set(hashtags), reverse=True) # unique tags, longest first
for hashtag in hashtags:
tag = hashtag[1:]
link = f'<a href="{BASE_URL}/t/{tag}" class="mention hashtag" rel="tag">#<span>{tag}</span></a>' # noqa: E501
tags.append(dict(href=f"{BASE_URL}/t/{tag}", name=hashtag, type="Hashtag"))
content = content.replace(hashtag, link)
return content, tags
class Mention(SpanToken):
parse_inner = False
precedence = 10
pattern = _MENTION_REGEX
def __init__(self, match_obj: re.Match) -> None:
self.target = match_obj.group()
class Hashtag(SpanToken):
parse_inner = False
precedence = 10
pattern = _HASHTAG_REGEX
def __init__(self, match_obj: re.Match) -> None:
self.target = match_obj.group()
class CustomRenderer(HTMLRenderer):
def __init__(
self,
mentioned_actors: dict[str, "Actor"] = {},
enable_mentionify: bool = True,
enable_hashtagify: bool = True,
) -> None:
extra_tokens = []
if enable_mentionify:
extra_tokens.append(Mention)
if enable_hashtagify:
extra_tokens.append(Hashtag)
super().__init__(AutoLink, *extra_tokens)
self.tags: list[dict[str, str]] = []
self.mentioned_actors = mentioned_actors
def render_auto_link(self, token: AutoLink) -> str:
template = '<a href="{target}" rel="noopener">{inner}</a>'
target = self.escape_url(token.target)
return template.format(target=target, inner=target)
def render_mention(self, token: Mention) -> str:
mention = token.target
actor = self.mentioned_actors.get(mention)
if not actor:
return mention
self.tags.append(dict(type="Mention", href=actor.ap_id, name=mention))
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>' # noqa: E501
return link
def render_hashtag(self, token: Hashtag) -> str:
tag = token.target[1:]
link = f'<a href="{BASE_URL}/t/{tag.lower()}" class="mention hashtag" rel="tag">#<span>{tag}</span></a>' # noqa: E501
self.tags.append(
dict(
href=f"{BASE_URL}/t/{tag.lower()}",
name=token.target.lower(),
type="Hashtag",
)
)
return link
def render_block_code(self, token: typing.Any) -> str:
code = token.children[0].content
lexer = get_lexer(token.language) if token.language else guess_lexer(code)
return highlight(code, lexer, _FORMATTER)
async def _prefetch_mentioned_actors(
async def _mentionify(
db_session: AsyncSession,
content: str,
) -> dict[str, "Actor"]:
) -> tuple[str, list[dict[str, str]], list["Actor"]]:
from app import models
from app.actor import fetch_actor
actors = {}
tags = []
mentioned_actors = []
for mention in re.findall(_MENTION_REGEX, content):
if mention in actors:
continue
_, username, domain = mention.split("@")
actor = (
await db_session.execute(
@ -134,27 +63,12 @@ async def _prefetch_mentioned_actors(
continue
actor = await fetch_actor(db_session, actor_url)
actors[mention] = actor
mentioned_actors.append(actor)
tags.append(dict(type="Mention", href=actor.ap_id, name=mention))
return actors
def hashtagify(
content: str,
) -> tuple[str, list[dict[str, str]]]:
tags = []
with CustomRenderer(
mentioned_actors={},
enable_mentionify=False,
enable_hashtagify=True,
) as renderer:
rendered_content = renderer.render(Document(content))
tags.extend(renderer.tags)
# Handle custom emoji
tags.extend(emoji.tags(content))
return rendered_content, tags
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>' # noqa: E501
content = content.replace(mention, link)
return content, tags, mentioned_actors
async def markdownify(
@ -168,33 +82,17 @@ async def markdownify(
"""
tags = []
mentioned_actors: dict[str, "Actor"] = {}
mentioned_actors: list["Actor"] = []
if enable_hashtagify:
content, hashtag_tags = hashtagify(content)
tags.extend(hashtag_tags)
if enable_mentionify:
mentioned_actors = await _prefetch_mentioned_actors(db_session, content)
with CustomRenderer(
mentioned_actors=mentioned_actors,
enable_mentionify=enable_mentionify,
enable_hashtagify=enable_hashtagify,
) as renderer:
rendered_content = renderer.render(Document(content))
tags.extend(renderer.tags)
content, mention_tags, mentioned_actors = await _mentionify(db_session, content)
tags.extend(mention_tags)
# Handle custom emoji
tags.extend(emoji.tags(content))
return rendered_content, tags, list(mentioned_actors.values())
content = markdown(content, extensions=["mdx_linkify", "fenced_code"])
def dedup_tags(tags: list[dict[str, str]]) -> list[dict[str, str]]:
idx = set()
deduped_tags = []
for tag in tags:
tag_idx = (tag["type"], tag["name"])
if tag_idx in idx:
continue
idx.add(tag_idx)
deduped_tags.append(tag)
return deduped_tags
return content, tags, mentioned_actors

View File

@ -12,16 +12,18 @@
{% for outbox_object in outbox %}
{% if outbox_object.ap_type == "Announce" %}
<div class="actor-action">You shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
<div class="actor-action">You shared</div>
{{ utils.display_object(outbox_object.relates_to_anybox_object) }}
{% elif outbox_object.ap_type == "Like" %}
<div class="actor-action">You liked <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
<div class="actor-action">You liked</div>
{{ utils.display_object(outbox_object.relates_to_anybox_object) }}
{% elif outbox_object.ap_type == "Follow" %}
<div class="actor-action">You followed <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
<div class="actor-action">You followed</div>
{{ utils.display_actor(outbox_object.relates_to_actor, actors_metadata) }}
{% elif outbox_object.ap_type in ["Article", "Note", "Video", "Question"] %}
{{ utils.display_object(outbox_object) }}
{% else %}
Implement {{ outbox_object.ap_type }}
{% endif %}
{% endfor %}

View File

@ -1,26 +0,0 @@
{%- import "utils.html" as utils with context -%}
{% extends "layout.html" %}
{% block head %}
<title>Interact from your instance</title>
{% endblock %}
{% block content %}
{% include "header.html" %}
<div class="box">
<h2>Interact with this object</h2>
</div>
{{ utils.display_object(outbox_object) }}
<div class="box">
<form class="form" action="{{ url_for("post_remote_interaction") }}" method="POST">
{{ utils.embed_csrf_token() }}
<input type="text" name="profile" placeholder="you@instance.tld" autofocus>
<input type="hidden" name="ap_id" value="{{ outbox_object.ap_id }}">
<input type="submit" value="interact from your instance">
</form>
</div>
{% endblock %}

View File

@ -219,9 +219,7 @@
{% if metadata.is_following %}
<li>already following</li>
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "unfollow")}}</li>
{% if not with_details %}
<li>{{ admin_profile_button(actor.ap_id) }}</li>
{% endif %}
<li>{{ admin_profile_button(actor.ap_id) }}</li>
{% elif metadata.is_follow_request_sent %}
<li>follow request sent</li>
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "undo follow") }}</li>
@ -233,7 +231,7 @@
{% if not metadata.is_following and not with_details %}
<li>{{ admin_profile_button(actor.ap_id) }}</li>
{% endif %}
{% elif actor.is_from_db and not with_details and not metadata.is_following %}
{% elif actor.is_from_db and not with_details %}
<li>{{ admin_profile_button(actor.ap_id) }}</li>
{% endif %}
{% if actor.moved_to %}
@ -263,9 +261,6 @@
<li>rejected</li>
{% endif %}
{% endif %}
{% if with_details %}
<li><a href="{{ actor.url }}" class="label-btn">remote profile</a></li>
{% endif %}
</ul>
</nav>
</div>
@ -343,13 +338,11 @@
{% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %}
<audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment"></audio>
{% elif attachment.type == "Link" %}
<a href="{{ attachment.url }}" class="attachment">{{ attachment.url | truncate(64, True) }}</a> ({{ attachment.mimetype}})
<a href="{{ attachment.url }}" class="attachment">{{ attachment.url }}</a>
{% else %}
<a href="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.url }}"{% endif %} class="attachment">
{% if attachment.name %}{{ attachment.name }}{% else %}{{ attachment.url | truncate(64, True) }}{% endif %}
</a> ({{ attachment.mimetype }})
<a href="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %} class="attachment">{{ attachment.url }}</a>
{% endif %}
{% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %}
{% if object.sensitive %}
</div>
</div>
</div>
@ -376,9 +369,9 @@
{% endif %}
{% if object.in_reply_to %}
<p class="in-reply-to">in reply to <a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" rel="nofollow">
this {{ object.ap_type|lower }}
</a></p>
<a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" class="in-reply-to" rel="nofollow">
in reply to {{ object.in_reply_to|truncate(64, True) }}
</a>
{% endif %}
{% if object.ap_type == "Article" %}
@ -402,6 +395,13 @@
{{ object.content | clean_html(object) | safe }}
</div>
{% if object.quoted_object %}
<div class="ap-object-expanded ap-quoted-object">
{{ display_object(object.quoted_object) }}
</div>
{% endif %}
{% if object.ap_type == "Question" %}
{% set can_vote = is_admin and object.is_from_inbox and not object.is_poll_ended and not object.voted_for_answers %}
{% if can_vote %}
@ -468,16 +468,6 @@
<li>
<div><a href="{{ object.url }}"{% if object.is_from_inbox %} rel="nofollow"{% endif %} class="object-permalink u-url u-uid">permalink</a></div>
</li>
{% if object.is_from_outbox and is_object_page and not is_admin and not request.url.path.startswith("/remote_interaction") %}
<li>
<a class="label-btn" href="{{ request.url_for("remote_interaction") }}?ap_id={{ object.ap_id }}">
interact from your instance
</a>
</li>
{% endif %}
{% if not is_article_mode %}
<li>
<time class="dt-published" datetime="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}" title="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}">{{ object.ap_published_at | timeago }}</time>
@ -573,7 +563,7 @@
{% if object.visibility in [visibility_enum.PUBLIC, visibility_enum.UNLISTED] %}
<li>
{% if object.announced_via_outbox_object_ap_id %}
{{ admin_undo_button(object.announced_via_outbox_object_ap_id, "unshare") }}
{{ admin_undo_button(object.liked_via_outbox_object_ap_id, "unshare") }}
{% else %}
{{ admin_announce_button(object.ap_id, permalink_id=object.permalink_id) }}
{% endif %}

View File

@ -9,7 +9,6 @@ from bs4 import BeautifulSoup # type: ignore
from loguru import logger
from pydantic import BaseModel
from app import activitypub as ap
from app import ap_object
from app import config
from app.actor import LOCAL_ACTOR
@ -67,15 +66,11 @@ async def external_urls(
tags_hrefs = set()
for tag in ro.tags:
if tag_href := tag.get("href"):
tags_hrefs.add(tag_href)
if tag_href and tag_href not in filter(None, [ro.quote_url]):
tags_hrefs.add(tag_href)
if tag.get("type") == "Mention":
if tag["href"] != LOCAL_ACTOR.ap_id:
try:
mentioned_actor = await fetch_actor(db_session, tag["href"])
except (ap.FetchError, ap.NotAnObjectError):
tags_hrefs.add(tag["href"])
continue
mentioned_actor = await fetch_actor(db_session, tag["href"])
tags_hrefs.add(mentioned_actor.url)
tags_hrefs.add(mentioned_actor.ap_id)
else:
@ -90,22 +85,18 @@ async def external_urls(
if not h:
continue
try:
ph = urlparse(h)
mimetype, _ = mimetypes.guess_type(h)
if (
ph.scheme in {"http", "https"}
and ph.hostname != note_host
and is_url_valid(h)
and (
not mimetype
or mimetype.split("/")[0] not in ["image", "video", "audio"]
)
):
urls.add(h)
except Exception:
logger.exception(f"Failed to check {h}")
continue
ph = urlparse(h)
mimetype, _ = mimetypes.guess_type(h)
if (
ph.scheme in {"http", "https"}
and ph.hostname != note_host
and is_url_valid(h)
and (
not mimetype
or mimetype.split("/")[0] not in ["image", "video", "audio"]
)
):
urls.add(h)
return urls - tags_hrefs

View File

@ -58,10 +58,6 @@ def is_url_valid(url: str) -> bool:
logger.warning(f"{parsed.hostname} is blocked")
return False
if parsed.hostname.endswith(".onion"):
logger.warning(f"{url} is an onion service")
return False
ip_address = _getaddrinfo(
parsed.hostname, parsed.port or (80 if parsed.scheme == "http" else 443)
)

58
poetry.lock generated
View File

@ -197,7 +197,7 @@ python-versions = "~=3.7"
[[package]]
name = "certifi"
version = "2022.9.24"
version = "2022.9.14"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
@ -286,11 +286,11 @@ doc = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"]
[[package]]
name = "faker"
version = "15.0.0"
version = "14.2.0"
description = "Faker is a Python package that generates fake data for you."
category = "dev"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.6"
[package.dependencies]
python-dateutil = ">=2.4"
@ -463,7 +463,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]]
name = "humanize"
version = "4.4.0"
version = "4.3.0"
description = "Python humanize utilities"
category = "main"
optional = false
@ -582,7 +582,7 @@ source = ["Cython (>=0.29.7)"]
[[package]]
name = "mako"
version = "1.2.3"
version = "1.2.2"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
category = "main"
optional = false
@ -648,14 +648,6 @@ BeautifulSoup4 = ">=4.6.0"
html5lib = ">=1.0.1"
requests = ">=2.18.4"
[[package]]
name = "mistletoe"
version = "0.9.0"
description = "A fast, extensible Markdown parser in pure Python."
category = "main"
optional = false
python-versions = "~=3.5"
[[package]]
name = "mypy"
version = "0.960"
@ -1126,7 +1118,7 @@ python-versions = "*"
[[package]]
name = "types-pillow"
version = "9.2.2"
version = "9.2.1"
description = "Typing stubs for Pillow"
category = "dev"
optional = false
@ -1142,7 +1134,7 @@ python-versions = "*"
[[package]]
name = "types-requests"
version = "2.28.11"
version = "2.28.10"
description = "Typing stubs for requests"
category = "dev"
optional = false
@ -1161,7 +1153,7 @@ python-versions = "*"
[[package]]
name = "types-urllib3"
version = "1.26.25"
version = "1.26.24"
description = "Typing stubs for urllib3"
category = "dev"
optional = false
@ -1283,7 +1275,7 @@ dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
content-hash = "bc8585a0da6f4d4e54afafde1da287ed75ed6544981d11bba561a7678bc31b8f"
content-hash = "84b3a6dcfc055fb0712c6abbf1bf94d9526eda940c4ddb0bd275664e68a4c3e3"
[metadata.files]
aiosqlite = [
@ -1437,8 +1429,8 @@ cachetools = [
{file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"},
]
certifi = [
{file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
{file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
{file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"},
{file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"},
]
cffi = [
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
@ -1530,8 +1522,8 @@ factory-boy = [
{file = "factory_boy-3.2.1.tar.gz", hash = "sha256:a98d277b0c047c75eb6e4ab8508a7f81fb03d2cb21986f627913546ef7a2a55e"},
]
faker = [
{file = "Faker-15.0.0-py3-none-any.whl", hash = "sha256:84c83f0ac1a2c8ecabd784c501aa0ef1d082d4aee52c3d797d586081c166434c"},
{file = "Faker-15.0.0.tar.gz", hash = "sha256:245fc7d23470dc57164bd9a59b7b1126e16289ffcf813d88a6c8e9b8a37ea3fb"},
{file = "Faker-14.2.0-py3-none-any.whl", hash = "sha256:e02c55a5b0586caaf913cc6c254b3de178e08b031c5922e590fd033ebbdbfd02"},
{file = "Faker-14.2.0.tar.gz", hash = "sha256:6db56e2c43a2b74250d1c332ef25fef7dc07dcb6c5fab5329dd7b4467b8ed7b9"},
]
fastapi = [
{file = "fastapi-0.78.0-py3-none-any.whl", hash = "sha256:15fcabd5c78c266fa7ae7d8de9b384bfc2375ee0503463a6febbe3bab69d6f65"},
@ -1667,8 +1659,8 @@ httpx = [
{file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"},
]
humanize = [
{file = "humanize-4.4.0-py3-none-any.whl", hash = "sha256:8830ebf2d65d0395c1bd4c79189ad71e023f277c2c7ae00f263124432e6f2ffa"},
{file = "humanize-4.4.0.tar.gz", hash = "sha256:efb2584565cc86b7ea87a977a15066de34cdedaf341b11c851cfcfd2b964779c"},
{file = "humanize-4.3.0-py3-none-any.whl", hash = "sha256:5dd159c9910cd57b94072e4d7decae097f0eb84c4645153706929a7f127cb2ef"},
{file = "humanize-4.3.0.tar.gz", hash = "sha256:0dfac79fe8c1c0c734c14177b07b857bad9ae30dd50daa0a14e2c3d8054ee0c4"},
]
hyperframe = []
idna = [
@ -1784,8 +1776,8 @@ lxml = [
{file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"},
]
mako = [
{file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"},
{file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"},
{file = "Mako-1.2.2-py3-none-any.whl", hash = "sha256:8efcb8004681b5f71d09c983ad5a9e6f5c40601a6ec469148753292abc0da534"},
{file = "Mako-1.2.2.tar.gz", hash = "sha256:3724869b363ba630a272a5f89f68c070352137b8fd1757650017b7e06fda163f"},
]
markdown = []
markupsafe = [
@ -1840,10 +1832,6 @@ mdx-linkify = [
mf2py = [
{file = "mf2py-1.1.2.tar.gz", hash = "sha256:84f1f8f2ff3f1deb1c30be497e7ccd805452996a662fd4a77f09e0105bede2c9"},
]
mistletoe = [
{file = "mistletoe-0.9.0-py3-none-any.whl", hash = "sha256:11316e2fe0be422a8248293ad0efbee9ad0c6f3683b2f45bc6b989ea17a68c74"},
{file = "mistletoe-0.9.0.tar.gz", hash = "sha256:3cb96d78226d08f0d3bf09efcaf330d23902492006e18b2c06558e8b86bf7faf"},
]
mypy = [
{file = "mypy-0.960-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3a3e525cd76c2c4f90f1449fd034ba21fcca68050ff7c8397bb7dd25dd8b8248"},
{file = "mypy-0.960-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a76dc4f91e92db119b1be293892df8379b08fd31795bb44e0ff84256d34c251"},
@ -2205,18 +2193,18 @@ types-markdown = [
{file = "types_Markdown-3.4.2-py3-none-any.whl", hash = "sha256:7dd9fe8bd2645ec984f438b255f42b795507b5cd2a998cc1970ec13f443dc832"},
]
types-pillow = [
{file = "types-Pillow-9.2.2.tar.gz", hash = "sha256:6b525b0951ada076f3aefe2347e0bff6231283ce959ad8577a4416604acc673c"},
{file = "types_Pillow-9.2.2-py3-none-any.whl", hash = "sha256:bfe14afa1e9047a52b3dc326c93b4f57db72641794e194f11b511f68b0061814"},
{file = "types-Pillow-9.2.1.tar.gz", hash = "sha256:9781104ee2176f680576523fa2a2b83b134957aec6f4d62582cc9e74c93a60b4"},
{file = "types_Pillow-9.2.1-py3-none-any.whl", hash = "sha256:d63743ef631e47f8d8669590ea976162321a9a7604588b424b6306533453fb63"},
]
types-python-dateutil = []
types-requests = [
{file = "types-requests-2.28.11.tar.gz", hash = "sha256:7ee827eb8ce611b02b5117cfec5da6455365b6a575f5e3ff19f655ba603e6b4e"},
{file = "types_requests-2.28.11-py3-none-any.whl", hash = "sha256:af5f55e803cabcfb836dad752bd6d8a0fc8ef1cd84243061c0e27dee04ccf4fd"},
{file = "types-requests-2.28.10.tar.gz", hash = "sha256:97d8f40aa1ffe1e58c3726c77d63c182daea9a72d9f1fa2cafdea756b2a19f2c"},
{file = "types_requests-2.28.10-py3-none-any.whl", hash = "sha256:45b485725ed58752f2b23461252f1c1ad9205b884a1e35f786bb295525a3e16a"},
]
types-tabulate = []
types-urllib3 = [
{file = "types-urllib3-1.26.25.tar.gz", hash = "sha256:5aef0e663724eef924afa8b320b62ffef2c1736c1fa6caecfc9bc6c8ae2c3def"},
{file = "types_urllib3-1.26.25-py3-none-any.whl", hash = "sha256:c1d78cef7bd581e162e46c20a57b2e1aa6ebecdcf01fd0713bb90978ff3e3427"},
{file = "types-urllib3-1.26.24.tar.gz", hash = "sha256:a1b3aaea7dda3eb1b51699ee723aadd235488e4dc4648e030f09bc429ecff42f"},
{file = "types_urllib3-1.26.24-py3-none-any.whl", hash = "sha256:cf7918503d02d3576e503bbfb419b0e047c4617653bba09624756ab7175e15c9"},
]
typing-extensions = [
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},

View File

@ -45,7 +45,6 @@ boussole = "^2.0.0"
uvicorn = {extras = ["standard"], version = "^0.18.3"}
Brotli = "^1.0.9"
greenlet = "^1.1.3"
mistletoe = "^0.9.0"
[tool.poetry.dev-dependencies]
black = "^22.3.0"

View File

@ -179,7 +179,7 @@ def test_send_create_activity__with_attachment(
outbox_object = db.execute(select(models.OutboxObject)).scalar_one()
assert outbox_object.ap_type == "Note"
assert outbox_object.summary is None
assert outbox_object.content == "<p>hello</p>\n"
assert outbox_object.content == "<p>hello</p>"
assert len(outbox_object.attachments) == 1
attachment = outbox_object.attachments[0]
assert attachment.type == "Document"
@ -227,7 +227,7 @@ def test_send_create_activity__no_content_with_cw_and_attachments(
outbox_object = db.execute(select(models.OutboxObject)).scalar_one()
assert outbox_object.ap_type == "Note"
assert outbox_object.summary is None
assert outbox_object.content == "<p>cw</p>\n"
assert outbox_object.content == "<p>cw</p>"
assert len(outbox_object.attachments) == 1