1
0
mirror of https://git.sr.ht/~tsileo/microblog.pub synced 2025-06-05 21:59:23 +02:00

25 Commits

Author SHA1 Message Date
b5b56e9ed5 More actor refresh improvements 2022-10-09 11:36:00 +02:00
9a36b0edf5 Fix conversation processing 2022-10-07 19:50:14 +02:00
20f996d165 Tweak HTTP sig handling 2022-10-07 19:00:18 +02:00
602da69083 Support actor refresh while checking HTTP sig 2022-10-07 12:05:28 +02:00
f6cfe06f66 Force refresh actor once in a while 2022-10-07 08:55:05 +02:00
c8a9793638 Make hashtag case insensitive 2022-10-05 20:27:21 +02:00
5eaa0f291b More Markdown improvements 2022-10-05 20:05:16 +02:00
881d0ad899 Switch Markdown parser 2022-10-04 20:26:01 +02:00
5a20b9d23a More CSS tweaks for the in reply to section 2022-10-03 20:05:06 +02:00
919a61f75d Tweak in reply to link 2022-10-03 19:21:08 +02:00
7faa4655f8 Make 'in reply to' more user-friendly by hiding the URL behind object type 2022-10-03 19:12:28 +02:00
cf6a891349 Improve/fix non-media attachment display 2022-09-30 09:07:07 +02:00
58b383ba4e Don't try to contact onion services 2022-09-29 09:16:35 +02:00
57fc5ef913 Improve OG meta processing 2022-09-29 09:10:05 +02:00
5348398b23 Update deps 2022-09-29 08:42:53 +02:00
572a84b4bd Fix/imprive Undo support 2022-09-29 08:41:24 +02:00
992cd55d7b Tweak processing 2022-09-26 21:41:34 +02:00
6216b316e8 Add remote interaction button 2022-09-23 20:09:05 +02:00
96eae971b8 Prevent processing duplicate objects 2022-09-23 09:13:59 +02:00
928bdafeea Tweak Create processing for CacheFile 2022-09-23 09:01:50 +02:00
dc89aeb70b Fix permalink 2022-09-23 09:00:23 +02:00
25d3daa6d2 Improve inbox delete side effects 2022-09-22 19:56:36 +02:00
715df3c563 Update deps 2022-09-21 21:01:37 +02:00
cb5d21baeb More admin profile related tweaks 2022-09-21 21:00:17 +02:00
8d0b5d1114 Fix double profile button in the admin 2022-09-21 19:35:48 +02:00
16 changed files with 529 additions and 167 deletions

View File

@ -6,7 +6,6 @@ from typing import Any
import httpx import httpx
from loguru import logger from loguru import logger
from markdown import markdown
from app import config from app import config
from app.config import ALSO_KNOWN_AS from app.config import ALSO_KNOWN_AS
@ -14,6 +13,7 @@ from app.config import AP_CONTENT_TYPE # noqa: F401
from app.config import MOVED_TO from app.config import MOVED_TO
from app.httpsig import auth from app.httpsig import auth
from app.key import get_pubkey_as_pem from app.key import get_pubkey_as_pem
from app.source import dedup_tags
from app.source import hashtagify from app.source import hashtagify
from app.utils.url import check_url from app.utils.url import check_url
@ -53,15 +53,26 @@ AS_EXTENDED_CTX = [
] ]
class ObjectIsGoneError(Exception): class FetchError(Exception):
def __init__(self, url: str, resp: httpx.Response | None = None) -> None:
resp_part = ""
if resp:
resp_part = f", got HTTP {resp.status_code}: {resp.text}"
message = f"Failed to fetch {url}{resp_part}"
super().__init__(message)
self.resp = resp
self.url = url
class ObjectIsGoneError(FetchError):
pass pass
class ObjectNotFoundError(Exception): class ObjectNotFoundError(FetchError):
pass pass
class ObjectUnavailableError(Exception): class ObjectUnavailableError(FetchError):
pass pass
@ -90,6 +101,19 @@ class VisibilityEnum(str, enum.Enum):
_LOCAL_ACTOR_SUMMARY, _LOCAL_ACTOR_TAGS = hashtagify(config.CONFIG.summary) _LOCAL_ACTOR_SUMMARY, _LOCAL_ACTOR_TAGS = hashtagify(config.CONFIG.summary)
_LOCAL_ACTOR_METADATA = []
if config.CONFIG.metadata:
for kv in config.CONFIG.metadata:
kv_value, kv_tags = hashtagify(kv.value)
_LOCAL_ACTOR_METADATA.append(
{
"name": kv.key,
"type": "PropertyValue",
"value": kv_value,
}
)
_LOCAL_ACTOR_TAGS.extend(kv_tags)
ME = { ME = {
"@context": AS_EXTENDED_CTX, "@context": AS_EXTENDED_CTX,
@ -102,7 +126,7 @@ ME = {
"outbox": config.BASE_URL + "/outbox", "outbox": config.BASE_URL + "/outbox",
"preferredUsername": config.USERNAME, "preferredUsername": config.USERNAME,
"name": config.CONFIG.name, "name": config.CONFIG.name,
"summary": markdown(_LOCAL_ACTOR_SUMMARY, extensions=["mdx_linkify"]), "summary": _LOCAL_ACTOR_SUMMARY,
"endpoints": { "endpoints": {
# For compat with servers expecting a sharedInbox... # For compat with servers expecting a sharedInbox...
"sharedInbox": config.BASE_URL "sharedInbox": config.BASE_URL
@ -110,16 +134,7 @@ ME = {
}, },
"url": config.ID + "/", # XXX: the path is important for Mastodon compat "url": config.ID + "/", # XXX: the path is important for Mastodon compat
"manuallyApprovesFollowers": config.CONFIG.manually_approves_followers, "manuallyApprovesFollowers": config.CONFIG.manually_approves_followers,
"attachment": [ "attachment": _LOCAL_ACTOR_METADATA,
{
"name": kv.key,
"type": "PropertyValue",
"value": markdown(kv.value, extensions=["mdx_linkify", "fenced_code"]),
}
for kv in config.CONFIG.metadata
]
if config.CONFIG.metadata
else [],
"icon": { "icon": {
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0], "mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
"type": "Image", "type": "Image",
@ -130,7 +145,7 @@ ME = {
"owner": config.ID, "owner": config.ID,
"publicKeyPem": get_pubkey_as_pem(config.KEY_PATH), "publicKeyPem": get_pubkey_as_pem(config.KEY_PATH),
}, },
"tag": _LOCAL_ACTOR_TAGS, "tag": dedup_tags(_LOCAL_ACTOR_TAGS),
} }
if ALSO_KNOWN_AS: if ALSO_KNOWN_AS:
@ -170,13 +185,17 @@ async def fetch(
# Special handling for deleted object # Special handling for deleted object
if resp.status_code == 410: if resp.status_code == 410:
raise ObjectIsGoneError(f"{url} is gone") raise ObjectIsGoneError(url, resp)
elif resp.status_code in [401, 403]: elif resp.status_code in [401, 403]:
raise ObjectUnavailableError(f"not allowed to fetch {url}") raise ObjectUnavailableError(url, resp)
elif resp.status_code == 404: elif resp.status_code == 404:
raise ObjectNotFoundError(f"{url} not found") raise ObjectNotFoundError(url, resp)
try:
resp.raise_for_status() resp.raise_for_status()
except httpx.HTTPError as http_error:
raise FetchError(url, resp) from http_error
try: try:
return resp.json() return resp.json()
except json.JSONDecodeError: except json.JSONDecodeError:

View File

@ -1,6 +1,7 @@
import hashlib import hashlib
import typing import typing
from dataclasses import dataclass from dataclasses import dataclass
from datetime import timedelta
from functools import cached_property from functools import cached_property
from typing import Union from typing import Union
from urllib.parse import urlparse from urllib.parse import urlparse
@ -12,6 +13,8 @@ from sqlalchemy.orm import joinedload
from app import activitypub as ap from app import activitypub as ap
from app import media from app import media
from app.database import AsyncSession from app.database import AsyncSession
from app.utils.datetime import as_utc
from app.utils.datetime import now
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from app.models import Actor as ActorModel from app.models import Actor as ActorModel
@ -189,8 +192,26 @@ async def fetch_actor(
if existing_actor: if existing_actor:
if existing_actor.is_deleted: if existing_actor.is_deleted:
raise ap.ObjectNotFoundError(f"{actor_id} was deleted") raise ap.ObjectNotFoundError(f"{actor_id} was deleted")
if now() - as_utc(existing_actor.updated_at) > timedelta(hours=24):
logger.info(
f"Refreshing {actor_id=} last updated {existing_actor.updated_at}"
)
try:
ap_actor = await ap.fetch(actor_id)
await update_actor_if_needed(
db_session,
existing_actor,
RemoteActor(ap_actor),
)
return existing_actor
except Exception:
logger.exception(f"Failed to refresh {actor_id}")
# If we fail to refresh the actor, return the cached one
return existing_actor return existing_actor
else: else:
return existing_actor
if save_if_not_found: if save_if_not_found:
ap_actor = await ap.fetch(actor_id) ap_actor = await ap.fetch(actor_id)
# Some softwares uses URL when we expect ID # Some softwares uses URL when we expect ID
@ -204,11 +225,31 @@ async def fetch_actor(
) )
).one_or_none() ).one_or_none()
if existing_actor_by_url: if existing_actor_by_url:
# Update the actor as we had to fetch it anyway
await update_actor_if_needed(
db_session,
existing_actor_by_url,
RemoteActor(ap_actor),
)
return existing_actor_by_url return existing_actor_by_url
return await save_actor(db_session, ap_actor) return await save_actor(db_session, ap_actor)
else: else:
raise ap.ObjectNotFoundError raise ap.ObjectNotFoundError(actor_id)
async def update_actor_if_needed(
db_session: AsyncSession,
actor_in_db: "ActorModel",
ra: RemoteActor,
) -> None:
# Check if we actually need to udpte the actor in DB
if _actor_hash(ra) != _actor_hash(actor_in_db):
actor_in_db.ap_actor = ra.ap_actor
actor_in_db.handle = ra.handle
actor_in_db.ap_type = ra.ap_type
actor_in_db.updated_at = now()
await db_session.flush()
@dataclass @dataclass

View File

@ -1,4 +1,5 @@
import hashlib import hashlib
import mimetypes
from datetime import datetime from datetime import datetime
from functools import cached_property from functools import cached_property
from typing import Any from typing import Any
@ -155,7 +156,7 @@ class Object:
@cached_property @cached_property
def url(self) -> str | None: def url(self) -> str | None:
obj_url = self.ap_object.get("url") obj_url = self.ap_object.get("url")
if isinstance(obj_url, str): if isinstance(obj_url, str) and obj_url:
return obj_url return obj_url
elif obj_url: elif obj_url:
for u in ap.as_list(obj_url): for u in ap.as_list(obj_url):
@ -276,6 +277,17 @@ class Attachment(BaseModel):
proxied_url: str | None = None proxied_url: str | None = None
resized_url: str | None = None resized_url: str | None = None
@property
def mimetype(self) -> str:
mimetype = self.media_type
if not mimetype:
mimetype, _ = mimetypes.guess_type(self.url)
if not mimetype:
return "unknown"
return mimetype.split("/")[-1]
class RemoteObject(Object): class RemoteObject(Object):
def __init__(self, raw_object: ap.RawObject, actor: Actor): def __init__(self, raw_object: ap.RawObject, actor: Actor):

View File

@ -24,6 +24,7 @@ from app.actor import Actor
from app.actor import RemoteActor from app.actor import RemoteActor
from app.actor import fetch_actor from app.actor import fetch_actor
from app.actor import save_actor from app.actor import save_actor
from app.actor import update_actor_if_needed
from app.ap_object import RemoteObject from app.ap_object import RemoteObject
from app.config import BASE_URL from app.config import BASE_URL
from app.config import BLOCKED_SERVERS from app.config import BLOCKED_SERVERS
@ -32,6 +33,7 @@ from app.config import MANUALLY_APPROVES_FOLLOWERS
from app.config import set_moved_to from app.config import set_moved_to
from app.database import AsyncSession from app.database import AsyncSession
from app.outgoing_activities import new_outgoing_activity from app.outgoing_activities import new_outgoing_activity
from app.source import dedup_tags
from app.source import markdownify from app.source import markdownify
from app.uploads import upload_to_attachment from app.uploads import upload_to_attachment
from app.utils import opengraph from app.utils import opengraph
@ -288,6 +290,7 @@ async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None:
raise ValueError("Should never happen") raise ValueError("Should never happen")
outbox_object_to_undo.undone_by_outbox_object_id = outbox_object.id outbox_object_to_undo.undone_by_outbox_object_id = outbox_object.id
outbox_object_to_undo.is_deleted = True
if outbox_object_to_undo.ap_type == "Follow": if outbox_object_to_undo.ap_type == "Follow":
if not outbox_object_to_undo.activity_object_ap_id: if not outbox_object_to_undo.activity_object_ap_id:
@ -346,6 +349,7 @@ async def fetch_conversation_root(
db_session: AsyncSession, db_session: AsyncSession,
obj: AnyboxObject | RemoteObject, obj: AnyboxObject | RemoteObject,
is_root: bool = False, is_root: bool = False,
depth: int = 0,
) -> str: ) -> str:
"""Some softwares do not set the context/conversation field (like Misskey). """Some softwares do not set the context/conversation field (like Misskey).
This means we have to track conversation ourselves. To do so, we fetch This means we have to track conversation ourselves. To do so, we fetch
@ -353,10 +357,11 @@ async def fetch_conversation_root(
- use the context field if set - use the context field if set
- or build a custom conversation ID - or build a custom conversation ID
""" """
if not obj.in_reply_to or is_root: logger.info(f"Fetching convo root for ap_id={obj.ap_id}/{depth=}")
if obj.ap_context: if obj.ap_context:
return obj.ap_context return obj.ap_context
else:
if not obj.in_reply_to or is_root or depth > 10:
# Use the root AP ID if there'no context # Use the root AP ID if there'no context
return f"microblogpub:root:{obj.ap_id}" return f"microblogpub:root:{obj.ap_id}"
else: else:
@ -371,20 +376,24 @@ async def fetch_conversation_root(
) )
in_reply_to_object = RemoteObject(raw_reply, actor=raw_reply_actor) in_reply_to_object = RemoteObject(raw_reply, actor=raw_reply_actor)
except ( except (
ap.ObjectNotFoundError, ap.FetchError,
ap.ObjectIsGoneError,
ap.NotAnObjectError, ap.NotAnObjectError,
ap.ObjectUnavailableError,
): ):
return await fetch_conversation_root(db_session, obj, is_root=True) return await fetch_conversation_root(
db_session, obj, is_root=True, depth=depth + 1
)
except httpx.HTTPStatusError as http_status_error: except httpx.HTTPStatusError as http_status_error:
if 400 <= http_status_error.response.status_code < 500: if 400 <= http_status_error.response.status_code < 500:
# We may not have access, in this case consider if root # We may not have access, in this case consider if root
return await fetch_conversation_root(db_session, obj, is_root=True) return await fetch_conversation_root(
db_session, obj, is_root=True, depth=depth + 1
)
else: else:
raise raise
return await fetch_conversation_root(db_session, in_reply_to_object) return await fetch_conversation_root(
db_session, in_reply_to_object, depth=depth + 1
)
async def send_move( async def send_move(
@ -543,7 +552,7 @@ async def send_create(
"context": context, "context": context,
"conversation": context, "conversation": context,
"url": outbox_object_id(note_id), "url": outbox_object_id(note_id),
"tag": tags, "tag": dedup_tags(tags),
"summary": content_warning, "summary": content_warning,
"inReplyTo": in_reply_to, "inReplyTo": in_reply_to,
"sensitive": is_sensitive, "sensitive": is_sensitive,
@ -563,7 +572,7 @@ async def send_create(
for tag in tags: for tag in tags:
if tag["type"] == "Hashtag": if tag["type"] == "Hashtag":
tagged_object = models.TaggedOutboxObject( tagged_object = models.TaggedOutboxObject(
tag=tag["name"][1:], tag=tag["name"][1:].lower(),
outbox_object_id=outbox_object.id, outbox_object_id=outbox_object.id,
) )
db_session.add(tagged_object) db_session.add(tagged_object)
@ -1069,7 +1078,17 @@ async def _revert_side_effect_for_deleted_object(
) -> None: ) -> None:
is_delete_needs_to_be_forwarded = False is_delete_needs_to_be_forwarded = False
# Decrement the replies counter if needed # Delete related notifications
notif_deletion_result = await db_session.execute(
delete(models.Notification)
.where(models.Notification.inbox_object_id == deleted_ap_object.id)
.execution_options(synchronize_session=False)
)
logger.info(
f"Deleted {notif_deletion_result.rowcount} notifications" # type: ignore
)
# Decrement/refresh the replies counter if needed
if deleted_ap_object.in_reply_to: if deleted_ap_object.in_reply_to:
replied_object = await get_anybox_object_by_ap_id( replied_object = await get_anybox_object_by_ap_id(
db_session, db_session,
@ -1481,7 +1500,7 @@ async def _handle_update_activity(
) )
# Update the actor # Update the actor
from_actor.ap_actor = updated_actor.ap_actor await update_actor_if_needed(db_session, from_actor, updated_actor)
elif (ap_type := wrapped_object["type"]) in [ elif (ap_type := wrapped_object["type"]) in [
"Question", "Question",
"Note", "Note",
@ -1504,6 +1523,7 @@ async def _handle_update_activity(
# Everything looks correct, update the object in the inbox # Everything looks correct, update the object in the inbox
logger.info(f"Updating {existing_object.ap_id}") logger.info(f"Updating {existing_object.ap_id}")
existing_object.ap_object = wrapped_object existing_object.ap_object = wrapped_object
existing_object.updated_at = now()
else: else:
# TODO(ts): support updating objects # TODO(ts): support updating objects
logger.info(f'Cannot update {wrapped_object["type"]}') logger.info(f'Cannot update {wrapped_object["type"]}')
@ -1514,8 +1534,24 @@ async def _handle_create_activity(
from_actor: models.Actor, from_actor: models.Actor,
create_activity: models.InboxObject, create_activity: models.InboxObject,
forwarded_by_actor: models.Actor | None = None, forwarded_by_actor: models.Actor | None = None,
relates_to_inbox_object: models.InboxObject | None = None,
) -> None: ) -> None:
logger.info("Processing Create activity") logger.info("Processing Create activity")
# Some PeerTube activities make no sense to process
if (
ap_object_type := ap.as_list(
(await ap.get_object(create_activity.ap_object))["type"]
)[0]
) in ["CacheFile"]:
logger.info(f"Dropping Create activity for {ap_object_type} object")
await db_session.delete(create_activity)
return None
if relates_to_inbox_object:
logger.warning(f"{relates_to_inbox_object.ap_id} is already in the inbox")
return None
wrapped_object = ap.unwrap_activity(create_activity.ap_object) wrapped_object = ap.unwrap_activity(create_activity.ap_object)
if create_activity.actor.ap_id != ap.get_actor_id(wrapped_object): if create_activity.actor.ap_id != ap.get_actor_id(wrapped_object):
raise ValueError("Object actor does not match activity") raise ValueError("Object actor does not match activity")
@ -1566,6 +1602,14 @@ async def _handle_read_activity(
if not wrapped_object_actor.is_blocked: if not wrapped_object_actor.is_blocked:
ro = RemoteObject(wrapped_object, actor=wrapped_object_actor) ro = RemoteObject(wrapped_object, actor=wrapped_object_actor)
# Check if we already know about this object
if await get_inbox_object_by_ap_id(
db_session,
ro.ap_id,
):
logger.info(f"{ro.ap_id} is already in the inbox, skipping processing")
return None
# Then process it likes it's coming from a forwarded activity # Then process it likes it's coming from a forwarded activity
await _process_note_object(db_session, read_activity, wrapped_object_actor, ro) await _process_note_object(db_session, read_activity, wrapped_object_actor, ro)
@ -1949,7 +1993,7 @@ async def save_to_inbox(
except ap.ObjectNotFoundError: except ap.ObjectNotFoundError:
logger.warning("Actor not found") logger.warning("Actor not found")
return return
except httpx.HTTPStatusError: except ap.FetchError:
logger.exception("Failed to fetch actor") logger.exception("Failed to fetch actor")
return return
@ -2057,7 +2101,11 @@ async def save_to_inbox(
if activity_ro.ap_type == "Create": if activity_ro.ap_type == "Create":
await _handle_create_activity( await _handle_create_activity(
db_session, actor, inbox_object, forwarded_by_actor=forwarded_by_actor db_session,
actor,
inbox_object,
forwarded_by_actor=forwarded_by_actor,
relates_to_inbox_object=relates_to_inbox_object,
) )
elif activity_ro.ap_type == "Read": elif activity_ro.ap_type == "Read":
await _handle_read_activity(db_session, actor, inbox_object) await _handle_read_activity(db_session, actor, inbox_object)

View File

@ -88,8 +88,12 @@ def _body_digest(body: bytes) -> str:
return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8") return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8")
async def _get_public_key(db_session: AsyncSession, key_id: str) -> Key: async def _get_public_key(
if cached_key := _KEY_CACHE.get(key_id): db_session: AsyncSession,
key_id: str,
should_skip_cache: bool = False,
) -> Key:
if not should_skip_cache and (cached_key := _KEY_CACHE.get(key_id)):
logger.info(f"Key {key_id} found in cache") logger.info(f"Key {key_id} found in cache")
return cached_key return cached_key
@ -101,6 +105,7 @@ async def _get_public_key(db_session: AsyncSession, key_id: str) -> Key:
select(models.Actor).where(models.Actor.ap_id == key_id.split("#")[0]) select(models.Actor).where(models.Actor.ap_id == key_id.split("#")[0])
) )
).one_or_none() ).one_or_none()
if not should_skip_cache:
if existing_actor and existing_actor.public_key_id == key_id: if existing_actor and existing_actor.public_key_id == key_id:
k = Key(existing_actor.ap_id, key_id) k = Key(existing_actor.ap_id, key_id)
k.load_pub(existing_actor.public_key_as_pem) k.load_pub(existing_actor.public_key_as_pem)
@ -110,6 +115,8 @@ async def _get_public_key(db_session: AsyncSession, key_id: str) -> Key:
# Fetch it # Fetch it
from app import activitypub as ap from app import activitypub as ap
from app.actor import RemoteActor
from app.actor import update_actor_if_needed
# Without signing the request as if it's the first contact, the 2 servers # Without signing the request as if it's the first contact, the 2 servers
# might race to fetch each other key # might race to fetch each other key
@ -133,6 +140,12 @@ async def _get_public_key(db_session: AsyncSession, key_id: str) -> Key:
f"failed to fetch requested key {key_id}: got {actor['publicKey']}" f"failed to fetch requested key {key_id}: got {actor['publicKey']}"
) )
if should_skip_cache and actor["type"] != "Key" and existing_actor:
# We had to skip the cache, which means the actor key probably changed
# and we want to update our cached version
await update_actor_if_needed(db_session, existing_actor, RemoteActor(actor))
await db_session.commit()
_KEY_CACHE[key_id] = k _KEY_CACHE[key_id] = k
return k return k
@ -216,7 +229,17 @@ async def httpsig_checker(
has_valid_signature = _verify_h( has_valid_signature = _verify_h(
signed_string, base64.b64decode(hsig["signature"]), k.pubkey signed_string, base64.b64decode(hsig["signature"]), k.pubkey
) )
# FIXME: fetch/update the user if the signature is wrong
# If the signature is not valid, we may have to update the cached actor
if not has_valid_signature:
logger.info("Invalid signature, trying to refresh actor")
try:
k = await _get_public_key(db_session, hsig["keyId"], should_skip_cache=True)
has_valid_signature = _verify_h(
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
)
except Exception:
logger.exception("Failed to refresh actor")
httpsig_info = HTTPSigInfo( httpsig_info = HTTPSigInfo(
has_valid_signature=has_valid_signature, has_valid_signature=has_valid_signature,

View File

@ -763,7 +763,7 @@ async def tag_by_name(
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker), _: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse: ) -> ActivityPubResponse | templates.TemplateResponse:
where = [ where = [
models.TaggedOutboxObject.tag == tag, models.TaggedOutboxObject.tag == tag.lower(),
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC, models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.is_deleted.is_(False), models.OutboxObject.is_deleted.is_(False),
] ]
@ -789,7 +789,7 @@ async def tag_by_name(
return ActivityPubResponse( return ActivityPubResponse(
{ {
"@context": ap.AS_CTX, "@context": ap.AS_CTX,
"id": BASE_URL + f"/t/{tag}", "id": BASE_URL + f"/t/{tag.lower()}",
"type": "OrderedCollection", "type": "OrderedCollection",
"totalItems": tagged_count, "totalItems": tagged_count,
"orderedItems": [ "orderedItems": [
@ -883,6 +883,48 @@ async def post_remote_follow(
) )
@app.get("/remote_interaction")
async def remote_interaction(
request: Request,
ap_id: str,
db_session: AsyncSession = Depends(get_db_session),
) -> templates.TemplateResponse:
outbox_object = await boxes.get_outbox_object_by_ap_id(
db_session,
ap_id,
)
if not outbox_object:
raise HTTPException(status_code=404)
return await templates.render_template(
db_session,
request,
"remote_interact.html",
{"outbox_object": outbox_object},
)
@app.post("/remote_interaction")
async def post_remote_interaction(
request: Request,
csrf_check: None = Depends(verify_csrf_token),
profile: str = Form(),
ap_id: str = Form(),
) -> RedirectResponse:
if not profile.startswith("@"):
profile = f"@{profile}"
remote_follow_template = await get_remote_follow_template(profile)
if not remote_follow_template:
# TODO(ts): error message to user
raise HTTPException(status_code=404)
return RedirectResponse(
remote_follow_template.format(uri=ap_id),
status_code=302,
)
@app.get("/.well-known/webfinger") @app.get("/.well-known/webfinger")
async def wellknown_webfinger(resource: str) -> JSONResponse: async def wellknown_webfinger(resource: str) -> JSONResponse:
"""Exposes/servers WebFinger data.""" """Exposes/servers WebFinger data."""
@ -1179,6 +1221,7 @@ async def robots_file():
Disallow: /followers Disallow: /followers
Disallow: /following Disallow: /following
Disallow: /admin Disallow: /admin
Disallow: /remote_interaction
Disallow: /remote_follow""" Disallow: /remote_follow"""

View File

@ -388,7 +388,7 @@ nav.flexbox {
margin-right: 0px; margin-right: 0px;
} }
} }
a { a:not(.label-btn) {
color: $primary-color; color: $primary-color;
text-decoration: none; text-decoration: none;
&:hover, &:active { &:hover, &:active {
@ -396,25 +396,31 @@ nav.flexbox {
text-decoration: underline; text-decoration: underline;
} }
} }
a.active { a.active:not(.label-btn) {
color: $secondary-color; color: $secondary-color;
font-weight: bold; font-weight: bold;
} }
} }
// after nav.flexbox to override default behavior
a.label-btn {
color: $form-text-color;
&:hover {
text-decoration: none;
color: $form-text-color;
}
}
.ap-object { .ap-object {
margin: 15px 0; margin: 15px 0;
padding: 20px; padding: 20px;
.in-reply-to {
color: $muted-color;
&:hover {
color: $secondary-color;
text-decoration: underline;
}
}
nav { nav {
color: $muted-color; color: $muted-color;
} }
.in-reply-to {
display: inline;
color: $muted-color;
}
.e-content, .activity-og-meta { .e-content, .activity-og-meta {
a:hover { a:hover {
text-decoration: underline; text-decoration: underline;

View File

@ -1,52 +1,123 @@
import re import re
import typing import typing
from markdown import markdown from mistletoe import Document # type: ignore
from mistletoe.html_renderer import HTMLRenderer # type: ignore
from mistletoe.span_token import SpanToken # type: ignore
from pygments import highlight # type: ignore
from pygments.formatters import HtmlFormatter # type: ignore
from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore
from pygments.lexers import guess_lexer # type: ignore
from sqlalchemy import select from sqlalchemy import select
from app import webfinger from app import webfinger
from app.config import BASE_URL from app.config import BASE_URL
from app.config import CODE_HIGHLIGHTING_THEME
from app.database import AsyncSession from app.database import AsyncSession
from app.utils import emoji from app.utils import emoji
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from app.actor import Actor from app.actor import Actor
_FORMATTER = HtmlFormatter(style=CODE_HIGHLIGHTING_THEME)
def _set_a_attrs(attrs, new=False):
attrs[(None, "target")] = "_blank"
attrs[(None, "class")] = "external"
attrs[(None, "rel")] = "noopener"
attrs[(None, "title")] = attrs[(None, "href")]
return attrs
_HASHTAG_REGEX = re.compile(r"(#[\d\w]+)") _HASHTAG_REGEX = re.compile(r"(#[\d\w]+)")
_MENTION_REGEX = re.compile(r"@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+") _MENTION_REGEX = re.compile(r"(@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+)")
_URL_REGEX = re.compile(
"(https?:\\/\\/(?:www\\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6}\\b(?:[-a-zA-Z0-9()@:%_\\+.~#?&\\/=]*))" # noqa: E501
)
def hashtagify(content: str) -> tuple[str, list[dict[str, str]]]: class AutoLink(SpanToken):
tags = [] parse_inner = False
hashtags = re.findall(_HASHTAG_REGEX, content) precedence = 10
hashtags = sorted(set(hashtags), reverse=True) # unique tags, longest first pattern = _URL_REGEX
for hashtag in hashtags:
tag = hashtag[1:] def __init__(self, match_obj: re.Match) -> None:
link = f'<a href="{BASE_URL}/t/{tag}" class="mention hashtag" rel="tag">#<span>{tag}</span></a>' # noqa: E501 self.target = match_obj.group()
tags.append(dict(href=f"{BASE_URL}/t/{tag}", name=hashtag, type="Hashtag"))
content = content.replace(hashtag, link)
return content, tags
async def _mentionify( class Mention(SpanToken):
parse_inner = False
precedence = 10
pattern = _MENTION_REGEX
def __init__(self, match_obj: re.Match) -> None:
self.target = match_obj.group()
class Hashtag(SpanToken):
parse_inner = False
precedence = 10
pattern = _HASHTAG_REGEX
def __init__(self, match_obj: re.Match) -> None:
self.target = match_obj.group()
class CustomRenderer(HTMLRenderer):
def __init__(
self,
mentioned_actors: dict[str, "Actor"] = {},
enable_mentionify: bool = True,
enable_hashtagify: bool = True,
) -> None:
extra_tokens = []
if enable_mentionify:
extra_tokens.append(Mention)
if enable_hashtagify:
extra_tokens.append(Hashtag)
super().__init__(AutoLink, *extra_tokens)
self.tags: list[dict[str, str]] = []
self.mentioned_actors = mentioned_actors
def render_auto_link(self, token: AutoLink) -> str:
template = '<a href="{target}" rel="noopener">{inner}</a>'
target = self.escape_url(token.target)
return template.format(target=target, inner=target)
def render_mention(self, token: Mention) -> str:
mention = token.target
actor = self.mentioned_actors.get(mention)
if not actor:
return mention
self.tags.append(dict(type="Mention", href=actor.ap_id, name=mention))
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>' # noqa: E501
return link
def render_hashtag(self, token: Hashtag) -> str:
tag = token.target[1:]
link = f'<a href="{BASE_URL}/t/{tag.lower()}" class="mention hashtag" rel="tag">#<span>{tag}</span></a>' # noqa: E501
self.tags.append(
dict(
href=f"{BASE_URL}/t/{tag.lower()}",
name=token.target.lower(),
type="Hashtag",
)
)
return link
def render_block_code(self, token: typing.Any) -> str:
code = token.children[0].content
lexer = get_lexer(token.language) if token.language else guess_lexer(code)
return highlight(code, lexer, _FORMATTER)
async def _prefetch_mentioned_actors(
db_session: AsyncSession, db_session: AsyncSession,
content: str, content: str,
) -> tuple[str, list[dict[str, str]], list["Actor"]]: ) -> dict[str, "Actor"]:
from app import models from app import models
from app.actor import fetch_actor from app.actor import fetch_actor
tags = [] actors = {}
mentioned_actors = []
for mention in re.findall(_MENTION_REGEX, content): for mention in re.findall(_MENTION_REGEX, content):
if mention in actors:
continue
_, username, domain = mention.split("@") _, username, domain = mention.split("@")
actor = ( actor = (
await db_session.execute( await db_session.execute(
@ -63,12 +134,27 @@ async def _mentionify(
continue continue
actor = await fetch_actor(db_session, actor_url) actor = await fetch_actor(db_session, actor_url)
mentioned_actors.append(actor) actors[mention] = actor
tags.append(dict(type="Mention", href=actor.ap_id, name=mention))
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>' # noqa: E501 return actors
content = content.replace(mention, link)
return content, tags, mentioned_actors
def hashtagify(
content: str,
) -> tuple[str, list[dict[str, str]]]:
tags = []
with CustomRenderer(
mentioned_actors={},
enable_mentionify=False,
enable_hashtagify=True,
) as renderer:
rendered_content = renderer.render(Document(content))
tags.extend(renderer.tags)
# Handle custom emoji
tags.extend(emoji.tags(content))
return rendered_content, tags
async def markdownify( async def markdownify(
@ -82,17 +168,33 @@ async def markdownify(
""" """
tags = [] tags = []
mentioned_actors: list["Actor"] = [] mentioned_actors: dict[str, "Actor"] = {}
if enable_hashtagify:
content, hashtag_tags = hashtagify(content)
tags.extend(hashtag_tags)
if enable_mentionify: if enable_mentionify:
content, mention_tags, mentioned_actors = await _mentionify(db_session, content) mentioned_actors = await _prefetch_mentioned_actors(db_session, content)
tags.extend(mention_tags)
with CustomRenderer(
mentioned_actors=mentioned_actors,
enable_mentionify=enable_mentionify,
enable_hashtagify=enable_hashtagify,
) as renderer:
rendered_content = renderer.render(Document(content))
tags.extend(renderer.tags)
# Handle custom emoji # Handle custom emoji
tags.extend(emoji.tags(content)) tags.extend(emoji.tags(content))
content = markdown(content, extensions=["mdx_linkify", "fenced_code"]) return rendered_content, tags, list(mentioned_actors.values())
return content, tags, mentioned_actors
def dedup_tags(tags: list[dict[str, str]]) -> list[dict[str, str]]:
idx = set()
deduped_tags = []
for tag in tags:
tag_idx = (tag["type"], tag["name"])
if tag_idx in idx:
continue
idx.add(tag_idx)
deduped_tags.append(tag)
return deduped_tags

View File

@ -12,18 +12,16 @@
{% for outbox_object in outbox %} {% for outbox_object in outbox %}
{% if outbox_object.ap_type == "Announce" %} {% if outbox_object.ap_type == "Announce" %}
<div class="actor-action">You shared</div> <div class="actor-action">You shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
{{ utils.display_object(outbox_object.relates_to_anybox_object) }} {{ utils.display_object(outbox_object.relates_to_anybox_object) }}
{% elif outbox_object.ap_type == "Like" %} {% elif outbox_object.ap_type == "Like" %}
<div class="actor-action">You liked</div> <div class="actor-action">You liked <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
{{ utils.display_object(outbox_object.relates_to_anybox_object) }} {{ utils.display_object(outbox_object.relates_to_anybox_object) }}
{% elif outbox_object.ap_type == "Follow" %} {% elif outbox_object.ap_type == "Follow" %}
<div class="actor-action">You followed</div> <div class="actor-action">You followed <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
{{ utils.display_actor(outbox_object.relates_to_actor, actors_metadata) }} {{ utils.display_actor(outbox_object.relates_to_actor, actors_metadata) }}
{% elif outbox_object.ap_type in ["Article", "Note", "Video", "Question"] %} {% elif outbox_object.ap_type in ["Article", "Note", "Video", "Question"] %}
{{ utils.display_object(outbox_object) }} {{ utils.display_object(outbox_object) }}
{% else %}
Implement {{ outbox_object.ap_type }}
{% endif %} {% endif %}
{% endfor %} {% endfor %}

View File

@ -0,0 +1,26 @@
{%- import "utils.html" as utils with context -%}
{% extends "layout.html" %}
{% block head %}
<title>Interact from your instance</title>
{% endblock %}
{% block content %}
{% include "header.html" %}
<div class="box">
<h2>Interact with this object</h2>
</div>
{{ utils.display_object(outbox_object) }}
<div class="box">
<form class="form" action="{{ url_for("post_remote_interaction") }}" method="POST">
{{ utils.embed_csrf_token() }}
<input type="text" name="profile" placeholder="you@instance.tld" autofocus>
<input type="hidden" name="ap_id" value="{{ outbox_object.ap_id }}">
<input type="submit" value="interact from your instance">
</form>
</div>
{% endblock %}

View File

@ -219,7 +219,9 @@
{% if metadata.is_following %} {% if metadata.is_following %}
<li>already following</li> <li>already following</li>
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "unfollow")}}</li> <li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "unfollow")}}</li>
{% if not with_details %}
<li>{{ admin_profile_button(actor.ap_id) }}</li> <li>{{ admin_profile_button(actor.ap_id) }}</li>
{% endif %}
{% elif metadata.is_follow_request_sent %} {% elif metadata.is_follow_request_sent %}
<li>follow request sent</li> <li>follow request sent</li>
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "undo follow") }}</li> <li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "undo follow") }}</li>
@ -231,7 +233,7 @@
{% if not metadata.is_following and not with_details %} {% if not metadata.is_following and not with_details %}
<li>{{ admin_profile_button(actor.ap_id) }}</li> <li>{{ admin_profile_button(actor.ap_id) }}</li>
{% endif %} {% endif %}
{% elif actor.is_from_db and not with_details %} {% elif actor.is_from_db and not with_details and not metadata.is_following %}
<li>{{ admin_profile_button(actor.ap_id) }}</li> <li>{{ admin_profile_button(actor.ap_id) }}</li>
{% endif %} {% endif %}
{% if actor.moved_to %} {% if actor.moved_to %}
@ -261,6 +263,9 @@
<li>rejected</li> <li>rejected</li>
{% endif %} {% endif %}
{% endif %} {% endif %}
{% if with_details %}
<li><a href="{{ actor.url }}" class="label-btn">remote profile</a></li>
{% endif %}
</ul> </ul>
</nav> </nav>
</div> </div>
@ -338,11 +343,13 @@
{% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %} {% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %}
<audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment"></audio> <audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment"></audio>
{% elif attachment.type == "Link" %} {% elif attachment.type == "Link" %}
<a href="{{ attachment.url }}" class="attachment">{{ attachment.url }}</a> <a href="{{ attachment.url }}" class="attachment">{{ attachment.url | truncate(64, True) }}</a> ({{ attachment.mimetype}})
{% else %} {% else %}
<a href="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %} class="attachment">{{ attachment.url }}</a> <a href="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.url }}"{% endif %} class="attachment">
{% if attachment.name %}{{ attachment.name }}{% else %}{{ attachment.url | truncate(64, True) }}{% endif %}
</a> ({{ attachment.mimetype }})
{% endif %} {% endif %}
{% if object.sensitive %} {% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %}
</div> </div>
</div> </div>
</div> </div>
@ -369,9 +376,9 @@
{% endif %} {% endif %}
{% if object.in_reply_to %} {% if object.in_reply_to %}
<a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" class="in-reply-to" rel="nofollow"> <p class="in-reply-to">in reply to <a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" rel="nofollow">
in reply to {{ object.in_reply_to|truncate(64, True) }} this {{ object.ap_type|lower }}
</a> </a></p>
{% endif %} {% endif %}
{% if object.ap_type == "Article" %} {% if object.ap_type == "Article" %}
@ -461,6 +468,16 @@
<li> <li>
<div><a href="{{ object.url }}"{% if object.is_from_inbox %} rel="nofollow"{% endif %} class="object-permalink u-url u-uid">permalink</a></div> <div><a href="{{ object.url }}"{% if object.is_from_inbox %} rel="nofollow"{% endif %} class="object-permalink u-url u-uid">permalink</a></div>
</li> </li>
{% if object.is_from_outbox and is_object_page and not is_admin and not request.url.path.startswith("/remote_interaction") %}
<li>
<a class="label-btn" href="{{ request.url_for("remote_interaction") }}?ap_id={{ object.ap_id }}">
interact from your instance
</a>
</li>
{% endif %}
{% if not is_article_mode %} {% if not is_article_mode %}
<li> <li>
<time class="dt-published" datetime="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}" title="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}">{{ object.ap_published_at | timeago }}</time> <time class="dt-published" datetime="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}" title="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}">{{ object.ap_published_at | timeago }}</time>
@ -556,7 +573,7 @@
{% if object.visibility in [visibility_enum.PUBLIC, visibility_enum.UNLISTED] %} {% if object.visibility in [visibility_enum.PUBLIC, visibility_enum.UNLISTED] %}
<li> <li>
{% if object.announced_via_outbox_object_ap_id %} {% if object.announced_via_outbox_object_ap_id %}
{{ admin_undo_button(object.liked_via_outbox_object_ap_id, "unshare") }} {{ admin_undo_button(object.announced_via_outbox_object_ap_id, "unshare") }}
{% else %} {% else %}
{{ admin_announce_button(object.ap_id, permalink_id=object.permalink_id) }} {{ admin_announce_button(object.ap_id, permalink_id=object.permalink_id) }}
{% endif %} {% endif %}

View File

@ -9,6 +9,7 @@ from bs4 import BeautifulSoup # type: ignore
from loguru import logger from loguru import logger
from pydantic import BaseModel from pydantic import BaseModel
from app import activitypub as ap
from app import ap_object from app import ap_object
from app import config from app import config
from app.actor import LOCAL_ACTOR from app.actor import LOCAL_ACTOR
@ -69,7 +70,12 @@ async def external_urls(
tags_hrefs.add(tag_href) tags_hrefs.add(tag_href)
if tag.get("type") == "Mention": if tag.get("type") == "Mention":
if tag["href"] != LOCAL_ACTOR.ap_id: if tag["href"] != LOCAL_ACTOR.ap_id:
try:
mentioned_actor = await fetch_actor(db_session, tag["href"]) mentioned_actor = await fetch_actor(db_session, tag["href"])
except (ap.FetchError, ap.NotAnObjectError):
tags_hrefs.add(tag["href"])
continue
tags_hrefs.add(mentioned_actor.url) tags_hrefs.add(mentioned_actor.url)
tags_hrefs.add(mentioned_actor.ap_id) tags_hrefs.add(mentioned_actor.ap_id)
else: else:
@ -84,6 +90,7 @@ async def external_urls(
if not h: if not h:
continue continue
try:
ph = urlparse(h) ph = urlparse(h)
mimetype, _ = mimetypes.guess_type(h) mimetype, _ = mimetypes.guess_type(h)
if ( if (
@ -96,6 +103,9 @@ async def external_urls(
) )
): ):
urls.add(h) urls.add(h)
except Exception:
logger.exception(f"Failed to check {h}")
continue
return urls - tags_hrefs return urls - tags_hrefs

View File

@ -58,6 +58,10 @@ def is_url_valid(url: str) -> bool:
logger.warning(f"{parsed.hostname} is blocked") logger.warning(f"{parsed.hostname} is blocked")
return False return False
if parsed.hostname.endswith(".onion"):
logger.warning(f"{url} is an onion service")
return False
ip_address = _getaddrinfo( ip_address = _getaddrinfo(
parsed.hostname, parsed.port or (80 if parsed.scheme == "http" else 443) parsed.hostname, parsed.port or (80 if parsed.scheme == "http" else 443)
) )

58
poetry.lock generated
View File

@ -197,7 +197,7 @@ python-versions = "~=3.7"
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2022.9.14" version = "2022.9.24"
description = "Python package for providing Mozilla's CA Bundle." description = "Python package for providing Mozilla's CA Bundle."
category = "main" category = "main"
optional = false optional = false
@ -286,11 +286,11 @@ doc = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"]
[[package]] [[package]]
name = "faker" name = "faker"
version = "14.2.0" version = "15.0.0"
description = "Faker is a Python package that generates fake data for you." description = "Faker is a Python package that generates fake data for you."
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
python-dateutil = ">=2.4" python-dateutil = ">=2.4"
@ -463,7 +463,7 @@ socks = ["socksio (>=1.0.0,<2.0.0)"]
[[package]] [[package]]
name = "humanize" name = "humanize"
version = "4.3.0" version = "4.4.0"
description = "Python humanize utilities" description = "Python humanize utilities"
category = "main" category = "main"
optional = false optional = false
@ -582,7 +582,7 @@ source = ["Cython (>=0.29.7)"]
[[package]] [[package]]
name = "mako" name = "mako"
version = "1.2.2" version = "1.2.3"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages." description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
category = "main" category = "main"
optional = false optional = false
@ -648,6 +648,14 @@ BeautifulSoup4 = ">=4.6.0"
html5lib = ">=1.0.1" html5lib = ">=1.0.1"
requests = ">=2.18.4" requests = ">=2.18.4"
[[package]]
name = "mistletoe"
version = "0.9.0"
description = "A fast, extensible Markdown parser in pure Python."
category = "main"
optional = false
python-versions = "~=3.5"
[[package]] [[package]]
name = "mypy" name = "mypy"
version = "0.960" version = "0.960"
@ -1118,7 +1126,7 @@ python-versions = "*"
[[package]] [[package]]
name = "types-pillow" name = "types-pillow"
version = "9.2.1" version = "9.2.2"
description = "Typing stubs for Pillow" description = "Typing stubs for Pillow"
category = "dev" category = "dev"
optional = false optional = false
@ -1134,7 +1142,7 @@ python-versions = "*"
[[package]] [[package]]
name = "types-requests" name = "types-requests"
version = "2.28.10" version = "2.28.11"
description = "Typing stubs for requests" description = "Typing stubs for requests"
category = "dev" category = "dev"
optional = false optional = false
@ -1153,7 +1161,7 @@ python-versions = "*"
[[package]] [[package]]
name = "types-urllib3" name = "types-urllib3"
version = "1.26.24" version = "1.26.25"
description = "Typing stubs for urllib3" description = "Typing stubs for urllib3"
category = "dev" category = "dev"
optional = false optional = false
@ -1275,7 +1283,7 @@ dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "84b3a6dcfc055fb0712c6abbf1bf94d9526eda940c4ddb0bd275664e68a4c3e3" content-hash = "bc8585a0da6f4d4e54afafde1da287ed75ed6544981d11bba561a7678bc31b8f"
[metadata.files] [metadata.files]
aiosqlite = [ aiosqlite = [
@ -1429,8 +1437,8 @@ cachetools = [
{file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"},
] ]
certifi = [ certifi = [
{file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"}, {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"},
{file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"}, {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"},
] ]
cffi = [ cffi = [
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
@ -1522,8 +1530,8 @@ factory-boy = [
{file = "factory_boy-3.2.1.tar.gz", hash = "sha256:a98d277b0c047c75eb6e4ab8508a7f81fb03d2cb21986f627913546ef7a2a55e"}, {file = "factory_boy-3.2.1.tar.gz", hash = "sha256:a98d277b0c047c75eb6e4ab8508a7f81fb03d2cb21986f627913546ef7a2a55e"},
] ]
faker = [ faker = [
{file = "Faker-14.2.0-py3-none-any.whl", hash = "sha256:e02c55a5b0586caaf913cc6c254b3de178e08b031c5922e590fd033ebbdbfd02"}, {file = "Faker-15.0.0-py3-none-any.whl", hash = "sha256:84c83f0ac1a2c8ecabd784c501aa0ef1d082d4aee52c3d797d586081c166434c"},
{file = "Faker-14.2.0.tar.gz", hash = "sha256:6db56e2c43a2b74250d1c332ef25fef7dc07dcb6c5fab5329dd7b4467b8ed7b9"}, {file = "Faker-15.0.0.tar.gz", hash = "sha256:245fc7d23470dc57164bd9a59b7b1126e16289ffcf813d88a6c8e9b8a37ea3fb"},
] ]
fastapi = [ fastapi = [
{file = "fastapi-0.78.0-py3-none-any.whl", hash = "sha256:15fcabd5c78c266fa7ae7d8de9b384bfc2375ee0503463a6febbe3bab69d6f65"}, {file = "fastapi-0.78.0-py3-none-any.whl", hash = "sha256:15fcabd5c78c266fa7ae7d8de9b384bfc2375ee0503463a6febbe3bab69d6f65"},
@ -1659,8 +1667,8 @@ httpx = [
{file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"},
] ]
humanize = [ humanize = [
{file = "humanize-4.3.0-py3-none-any.whl", hash = "sha256:5dd159c9910cd57b94072e4d7decae097f0eb84c4645153706929a7f127cb2ef"}, {file = "humanize-4.4.0-py3-none-any.whl", hash = "sha256:8830ebf2d65d0395c1bd4c79189ad71e023f277c2c7ae00f263124432e6f2ffa"},
{file = "humanize-4.3.0.tar.gz", hash = "sha256:0dfac79fe8c1c0c734c14177b07b857bad9ae30dd50daa0a14e2c3d8054ee0c4"}, {file = "humanize-4.4.0.tar.gz", hash = "sha256:efb2584565cc86b7ea87a977a15066de34cdedaf341b11c851cfcfd2b964779c"},
] ]
hyperframe = [] hyperframe = []
idna = [ idna = [
@ -1776,8 +1784,8 @@ lxml = [
{file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"},
] ]
mako = [ mako = [
{file = "Mako-1.2.2-py3-none-any.whl", hash = "sha256:8efcb8004681b5f71d09c983ad5a9e6f5c40601a6ec469148753292abc0da534"}, {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"},
{file = "Mako-1.2.2.tar.gz", hash = "sha256:3724869b363ba630a272a5f89f68c070352137b8fd1757650017b7e06fda163f"}, {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"},
] ]
markdown = [] markdown = []
markupsafe = [ markupsafe = [
@ -1832,6 +1840,10 @@ mdx-linkify = [
mf2py = [ mf2py = [
{file = "mf2py-1.1.2.tar.gz", hash = "sha256:84f1f8f2ff3f1deb1c30be497e7ccd805452996a662fd4a77f09e0105bede2c9"}, {file = "mf2py-1.1.2.tar.gz", hash = "sha256:84f1f8f2ff3f1deb1c30be497e7ccd805452996a662fd4a77f09e0105bede2c9"},
] ]
mistletoe = [
{file = "mistletoe-0.9.0-py3-none-any.whl", hash = "sha256:11316e2fe0be422a8248293ad0efbee9ad0c6f3683b2f45bc6b989ea17a68c74"},
{file = "mistletoe-0.9.0.tar.gz", hash = "sha256:3cb96d78226d08f0d3bf09efcaf330d23902492006e18b2c06558e8b86bf7faf"},
]
mypy = [ mypy = [
{file = "mypy-0.960-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3a3e525cd76c2c4f90f1449fd034ba21fcca68050ff7c8397bb7dd25dd8b8248"}, {file = "mypy-0.960-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3a3e525cd76c2c4f90f1449fd034ba21fcca68050ff7c8397bb7dd25dd8b8248"},
{file = "mypy-0.960-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a76dc4f91e92db119b1be293892df8379b08fd31795bb44e0ff84256d34c251"}, {file = "mypy-0.960-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a76dc4f91e92db119b1be293892df8379b08fd31795bb44e0ff84256d34c251"},
@ -2193,18 +2205,18 @@ types-markdown = [
{file = "types_Markdown-3.4.2-py3-none-any.whl", hash = "sha256:7dd9fe8bd2645ec984f438b255f42b795507b5cd2a998cc1970ec13f443dc832"}, {file = "types_Markdown-3.4.2-py3-none-any.whl", hash = "sha256:7dd9fe8bd2645ec984f438b255f42b795507b5cd2a998cc1970ec13f443dc832"},
] ]
types-pillow = [ types-pillow = [
{file = "types-Pillow-9.2.1.tar.gz", hash = "sha256:9781104ee2176f680576523fa2a2b83b134957aec6f4d62582cc9e74c93a60b4"}, {file = "types-Pillow-9.2.2.tar.gz", hash = "sha256:6b525b0951ada076f3aefe2347e0bff6231283ce959ad8577a4416604acc673c"},
{file = "types_Pillow-9.2.1-py3-none-any.whl", hash = "sha256:d63743ef631e47f8d8669590ea976162321a9a7604588b424b6306533453fb63"}, {file = "types_Pillow-9.2.2-py3-none-any.whl", hash = "sha256:bfe14afa1e9047a52b3dc326c93b4f57db72641794e194f11b511f68b0061814"},
] ]
types-python-dateutil = [] types-python-dateutil = []
types-requests = [ types-requests = [
{file = "types-requests-2.28.10.tar.gz", hash = "sha256:97d8f40aa1ffe1e58c3726c77d63c182daea9a72d9f1fa2cafdea756b2a19f2c"}, {file = "types-requests-2.28.11.tar.gz", hash = "sha256:7ee827eb8ce611b02b5117cfec5da6455365b6a575f5e3ff19f655ba603e6b4e"},
{file = "types_requests-2.28.10-py3-none-any.whl", hash = "sha256:45b485725ed58752f2b23461252f1c1ad9205b884a1e35f786bb295525a3e16a"}, {file = "types_requests-2.28.11-py3-none-any.whl", hash = "sha256:af5f55e803cabcfb836dad752bd6d8a0fc8ef1cd84243061c0e27dee04ccf4fd"},
] ]
types-tabulate = [] types-tabulate = []
types-urllib3 = [ types-urllib3 = [
{file = "types-urllib3-1.26.24.tar.gz", hash = "sha256:a1b3aaea7dda3eb1b51699ee723aadd235488e4dc4648e030f09bc429ecff42f"}, {file = "types-urllib3-1.26.25.tar.gz", hash = "sha256:5aef0e663724eef924afa8b320b62ffef2c1736c1fa6caecfc9bc6c8ae2c3def"},
{file = "types_urllib3-1.26.24-py3-none-any.whl", hash = "sha256:cf7918503d02d3576e503bbfb419b0e047c4617653bba09624756ab7175e15c9"}, {file = "types_urllib3-1.26.25-py3-none-any.whl", hash = "sha256:c1d78cef7bd581e162e46c20a57b2e1aa6ebecdcf01fd0713bb90978ff3e3427"},
] ]
typing-extensions = [ typing-extensions = [
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},

View File

@ -45,6 +45,7 @@ boussole = "^2.0.0"
uvicorn = {extras = ["standard"], version = "^0.18.3"} uvicorn = {extras = ["standard"], version = "^0.18.3"}
Brotli = "^1.0.9" Brotli = "^1.0.9"
greenlet = "^1.1.3" greenlet = "^1.1.3"
mistletoe = "^0.9.0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
black = "^22.3.0" black = "^22.3.0"

View File

@ -179,7 +179,7 @@ def test_send_create_activity__with_attachment(
outbox_object = db.execute(select(models.OutboxObject)).scalar_one() outbox_object = db.execute(select(models.OutboxObject)).scalar_one()
assert outbox_object.ap_type == "Note" assert outbox_object.ap_type == "Note"
assert outbox_object.summary is None assert outbox_object.summary is None
assert outbox_object.content == "<p>hello</p>" assert outbox_object.content == "<p>hello</p>\n"
assert len(outbox_object.attachments) == 1 assert len(outbox_object.attachments) == 1
attachment = outbox_object.attachments[0] attachment = outbox_object.attachments[0]
assert attachment.type == "Document" assert attachment.type == "Document"
@ -227,7 +227,7 @@ def test_send_create_activity__no_content_with_cw_and_attachments(
outbox_object = db.execute(select(models.OutboxObject)).scalar_one() outbox_object = db.execute(select(models.OutboxObject)).scalar_one()
assert outbox_object.ap_type == "Note" assert outbox_object.ap_type == "Note"
assert outbox_object.summary is None assert outbox_object.summary is None
assert outbox_object.content == "<p>cw</p>" assert outbox_object.content == "<p>cw</p>\n"
assert len(outbox_object.attachments) == 1 assert len(outbox_object.attachments) == 1