Compare commits

...

164 Commits

Author SHA1 Message Date
Thomas Sileo 9c8693ea55 Quick hotfix for retries 2023-07-14 17:50:26 +02:00
Thomas Sileo febd8c3d26 Upgrade deps 2023-07-03 20:36:24 +02:00
Thomas Sileo a5290af5c8 Fix proxy by forwarding content-encoding 2023-07-03 20:29:10 +02:00
Thomas Sileo 2cec800332 Fix for pruned Move objects 2023-07-03 20:25:03 +02:00
Thomas Sileo 3c07494809 Make CSRF expiration configurable and increase default value 2023-06-09 22:22:37 +02:00
Thomas Sileo 2433fa01cd Fix typing 2023-06-09 22:22:12 +02:00
Thomas Sileo 3169890a39 Update deps 2023-06-09 21:58:23 +02:00
Thomas Sileo 4e1bb330aa Fix OAuth introspection endpoint 2023-02-03 08:55:31 +01:00
Thomas Sileo 625f399309 Fix OAuth introspection endpoint 2023-02-03 08:32:50 +01:00
Thomas Sileo 2bd6c98538 Add OAuth 2.0 introspection endpoint 2023-02-01 20:12:53 +01:00
Thomas Sileo f13376de84 More docs tweaks 2023-01-20 08:38:19 +01:00
Alexey Shpakovsky c97070e3d8 Add documentation about image_url and custom favicon
Also expand documentation about custom templates
2023-01-20 08:35:08 +01:00
João Costa c1692a296d Use object name in the RSS feed title if possible
Articles have a title stored in the object name. It makes sense to also use
this title in the RSS entry.
2023-01-20 08:30:26 +01:00
Thomas Sileo ce6f9238f3 Use newer security context instead of identity for LD sig 2023-01-14 10:54:22 +01:00
Thomas Sileo 3f129855d1 LD sig hack 2023-01-14 10:32:36 +01:00
Thomas Sileo 3fc567861b Tweak README 2023-01-07 09:42:33 +01:00
Thomas Sileo 7b784e3011 Tweak code highlight 2023-01-06 21:21:53 +01:00
Thomas Sileo 5d1ae0c9cd More doc tweaks 2023-01-05 19:47:56 +01:00
Thomas Sileo 88dd2443d7 Fix doc for migration/move support 2023-01-05 19:44:56 +01:00
Thomas Sileo 4045902068 Proper mf2 for the articles listing 2023-01-02 09:48:08 +01:00
Thomas Sileo 20109b45da Fail gracefully when looking reply actor 2023-01-02 09:34:31 +01:00
Thomas Sileo 94d14fbef3 Tweak webfinger endpoint 2023-01-01 15:33:59 +01:00
Thomas Sileo f34e0b376b Fix webfinger support for custom domains 2022-12-31 19:23:22 +01:00
Thomas Sileo 51c596dd1d Improve webmentions 2022-12-31 16:53:05 +01:00
Thomas Sileo dfc7ab0470 Document how to run on subdomains 2022-12-26 10:48:28 +01:00
Thomas Sileo 5d35d5c0a0 Fix attachment scaling 2022-12-26 10:21:20 +01:00
Thomas Sileo 17921c1097 Default to Python 3.11 in the Dockerfile 2022-12-24 09:50:51 +01:00
Thomas Sileo 24147aedef Tweak CSS for small attachments 2022-12-24 09:50:27 +01:00
Thomas Sileo 673baf0d7f Patch invoke for Python 3.11 support 2022-12-23 09:32:40 +01:00
Thomas Sileo 9c65919070 Tweak feeds 2022-12-23 09:25:50 +01:00
Thomas Sileo c506299089 Fix webfinger logic to fetch handle 2022-12-19 21:17:34 +01:00
Thomas Sileo adbdf6f320 Fix webfinger domain support 2022-12-19 21:07:08 +01:00
Thomas Sileo f34bce180c Add support for custom webfinger domain 2022-12-19 20:49:19 +01:00
Thomas Sileo 0b86df413a Support creating note via C2S 2022-12-18 16:05:41 +01:00
Thomas Sileo ed214cf0e7 Add OAuth refresh token support 2022-12-18 12:55:24 +01:00
Thomas Sileo 3fb36d6119 C2S API for the inbox 2022-12-18 10:52:06 +01:00
Thomas Sileo 1de108b019 Tweak OAuth2 registration params 2022-12-16 22:05:45 +01:00
Thomas Sileo 7b506f2519 More AP C2S support 2022-12-16 20:20:40 +01:00
Thomas Sileo 5cf54c2782 Add support for OAuth 2.0 dynamic client registration 2022-12-16 19:23:22 +01:00
Thomas Sileo db6016394b Fix CSP IndieAuth redirection issue 2022-12-16 09:22:40 +01:00
Thomas Sileo 573a76c0c5 Fix admin redirect 2022-12-15 22:27:14 +01:00
Thomas Sileo 3097dbebe9 Improve Webfinger 2022-12-15 22:14:24 +01:00
Thomas Sileo e378ec94e0 Update deps 2022-12-15 21:07:29 +01:00
Thomas Sileo 15dd7e184b Allow to hide shares from actors 2022-12-12 20:48:05 +01:00
Thomas Sileo 22410862f3 Tweak/fix opengraph parsing 2022-12-11 18:15:30 +01:00
Thomas Sileo 7621a19489 Check browser support before returning webp pictures 2022-12-11 16:15:25 +01:00
Thomas Sileo cad78fe5e8 Document the new follows import task 2022-12-06 20:26:15 +01:00
Thomas Sileo 6a47b6cf4c Update AUTHORS 2022-12-06 19:40:06 +01:00
João Costa 9d6ed4cd28 Fix og:title always being empty on articles 2022-12-06 19:38:44 +01:00
Thomas Sileo 0f10bfddac Oops add missing file 2022-12-05 22:01:37 +01:00
Thomas Sileo 26efd09304 Add task to import Mastodon following export 2022-12-05 21:58:13 +01:00
Thomas Sileo f2e531cf1a Fix Makefile: add --it to the self-destruct command 2022-12-05 21:12:59 +01:00
Thomas Sileo 5d95fd44ac Fix webmention discovery 2022-12-04 12:06:15 +01:00
Thomas Sileo a337b32bcd Blocking server also blocks subdomains 2022-12-04 11:51:52 +01:00
Thomas Sileo e8fcf5a9a2 Tweak video mode 2022-12-03 19:57:13 +01:00
Thomas Sileo 7525744f82 Test new GIF mode for videos without sound 2022-12-03 19:47:11 +01:00
Thomas Sileo 7d3fc35a24 More proxy client tweaks 2022-12-02 19:40:58 +01:00
Thomas Sileo 73dceee0f5 Fix proxy client 2022-12-02 19:28:59 +01:00
Thomas Sileo 34c7cdb5fb Fix Undo{Announce} recipients 2022-12-02 18:48:23 +01:00
Thomas Sileo 0527e34476 Tweak proxy client 2022-12-02 18:48:05 +01:00
Thomas Sileo a82f619e89 Revert "fix unshare by getting recipients from Announce activity instead of Undo"
This reverts commit dcd44ec3b6.
2022-12-02 18:12:24 +01:00
Thomas Sileo a68b3e7318 Don't insert an empty div on the index when there's no pages 2022-11-30 20:11:20 +01:00
Thomas Sileo 436d5ccf1b Tweak in reply to this xyz text 2022-11-30 19:30:26 +01:00
Thomas Sileo a273f26549 Only show local delete for local replies 2022-11-30 17:49:36 +01:00
Thomas Sileo 9d357446d2 Tweak logging 2022-11-30 17:37:08 +01:00
Alexey Shpakovsky 6cabff21db Document running from subpath 2022-11-30 14:14:09 +01:00
João Costa 5df4d420de Whitelist object types in the index query
Select the outbox object types that we want to show on the notes page
instead of removing objects that we don't want to show.
That way, it's easier to ensure that there are no objects messing up the
object count/empty checks.

Partially fixes https://todo.sr.ht/~tsileo/microblog.pub/65
2022-11-30 14:10:28 +01:00
Jane 68884d9afa Use <details> element for sensitive text
The sensitive text feature was implemented with <label> and hidden
checkbox <input> elements. There were two issues with this
implementation:
1. The user couldn't navigate to the "show/hide more" button using
   keyboard.
2. The label indicates two actions at the same time ("show/hide more"),
   making it unclear what the function of the checkbox was and what the
   current show/collapse state was.

As it is generally preferrable to use built-in HTML elements for the
best semantic, this commit moves to use the <details> and <summary>
elements for the sensitive text feature. The browser will open/collapse
the content in <details> automatically when the user clicks on the
<summary>, and keyboard navigation support is built-in.

This commit also changes the button to display "show more" or "show
less" depending on the state for visual clarity. This button is hidden
from the accessibility tree using `aria-label="false"`, as the <details>
element already exposes its state to the tree and we want to avoid
duplicated information.

A few caveats:
* The "show/hide sensitive content" button for sensitive attachments
  hasn't been changed yet as I'd like to get more feedback about the new
  implementation.
* As the summary/content warning text itself is also part of the
  <summary> tag, the user can now also click on them to toggle the
  visibility of the sensitive text. This may not be desirable as the
  current interface does not make it clear that this could happen; the
  user may try to select some text from the summary and be surprised
  by the sensitive text being expanded. One way to improve this would
  be to add an event listener to the summary text and call
  `preventDefault`, but this would introduce JavaScript code.
2022-11-30 12:26:34 +01:00
Thomas Sileo 46a592b11e Switch back to HTTP1 for the media proxy client 2022-11-30 12:26:31 +01:00
Thomas Sileo 5f0b8f5dfd Tweak media proxy client 2022-11-28 20:58:16 +01:00
Thomas Sileo 5adb2bca9a Revert "Update deps"
This reverts commit 08cc74d928.
2022-11-28 20:35:53 +01:00
Thomas Sileo 08cc74d928 Update deps 2022-11-28 20:30:37 +01:00
Thomas Sileo 578581b4dc More mf2 improvements for shares/reposts 2022-11-27 16:29:49 +01:00
Thomas Sileo ec36272bb4 Allow to disable certain notification type 2022-11-27 12:11:42 +01:00
Thomas Sileo e30e0de10e No more HTTP sig check on the actor profile 2022-11-27 11:36:15 +01:00
Thomas Sileo e672d9b9f0 Update AUTHORS 2022-11-27 11:33:46 +01:00
Sam dcd44ec3b6 fix unshare by getting recipients from Announce activity instead of Undo 2022-11-27 11:31:45 +01:00
Sam 71a4ea2425 fix typo on deleted object ap_type 2022-11-27 11:29:54 +01:00
Thomas Sileo 441e3d90b1 Fix formatting 2022-11-23 21:58:59 +01:00
Alexey Shpakovsky d9b9f596d3 Skip custom emojis which don't match emoji regexp
Otherwise, emojis containing forbidden symbols (for example, `-`)
appear in "emoji selector" on admin/new page, but are not replaced
with emoji image on submit.

Also add a note to documentation mentioning allowed characters.
2022-11-23 21:54:02 +01:00
Thomas Sileo 2cc4eda143 Boostrap stream customization (API may change) 2022-11-22 20:30:35 +01:00
Thomas Sileo bd065446bf Hack in HTTP sig to drop Delete requests early on 2022-11-21 21:43:12 +01:00
Thomas Sileo 8475f5bccd Fix admin session timeout 2022-11-21 20:43:51 +01:00
Thomas Sileo a435cd33c9 Allow to delete webmentions 2022-11-20 11:56:58 +01:00
Thomas Sileo d692ec060f Tweak webmention processing 2022-11-20 11:31:00 +01:00
Thomas Sileo 4c6eb51ae2 Proper mf2 for replies 2022-11-20 11:12:34 +01:00
Thomas Sileo d36102255f Merge branch 'v2' into indieweb-merge-part2 2022-11-20 10:48:43 +01:00
Thomas Sileo cdbc545d5e Add a flag on new notifications 2022-11-20 10:13:17 +01:00
Thomas Sileo fbc46e0517 More logging for the admin session 2022-11-20 10:02:28 +01:00
Thomas Sileo ef4608f348 Switch back the proxy client to HTTP2 mode 2022-11-20 09:49:19 +01:00
Thomas Sileo 4638b98fa8 Regenerate AUTHORS file 2022-11-20 09:49:00 +01:00
Cocoa a9f41d6be7 Put 'with_icon' param in the correct macro call
Fix for https://todo.sr.ht/~tsileo/microblog.pub/66
2022-11-20 09:47:54 +01:00
Thomas Sileo 59dfc3d128 Update the install guide 2022-11-19 08:38:51 +01:00
Thomas Sileo 822280c280 Tweak proxy client (increased timeout, no more HTTP2) 2022-11-19 08:32:44 +01:00
Thomas Sileo c83dd30f41 Increase admin session validity to 3 days 2022-11-19 08:16:53 +01:00
Thomas Sileo 9d312bc229 Fix typing 2022-11-19 08:15:36 +01:00
Kevin Wallace b37b77ad34 Make local actor icon optional
If a remote actor has no icon, we show our local default icon.

If we have no icon, we should allow remote instances to show their
default icon, instead of sending ours.
2022-11-19 08:12:49 +01:00
Thomas Sileo 9ee3f3b971 More progess on webmention replies 2022-11-19 08:12:33 +01:00
Thomas Sileo 066f5ec900 Merge branch 'v2' into indieweb-merge-part2 2022-11-18 20:36:58 +01:00
Kevin Wallace a2254f2674 Add return type to hmac_sha256 2022-11-18 20:30:29 +01:00
Kevin Wallace 2151733e4f Add robots meta tags on pages in robots.txt
Useful when app is at a non-root path and we're not handling top-level
/robots.txt requests.
2022-11-18 20:30:29 +01:00
Kevin Wallace 3cff4e4507 Use BASE_URL when generating {proxied,resized}_image_url
Necessary when running at a non-root path
2022-11-18 20:30:29 +01:00
Thomas Sileo 120f92a9ed Display Webmention as replies when applicable 2022-11-18 20:20:58 +01:00
Thomas Sileo ae8029cd22 Fix template 2022-11-17 21:12:16 +01:00
Thomas Sileo 434fd98cd9 Merge IndieWeb likes/reposts with their AP counterpart 2022-11-17 21:03:24 +01:00
Thomas Sileo 89c90fba56 Start to merge IndieWeb and AP interactions 2022-11-17 09:18:06 +01:00
Thomas Sileo e29fe0a079 Fix DM admin page showing deleted objects 2022-11-15 23:07:10 +01:00
Thomas Sileo c5aee435f4 Tweak README 2022-11-15 22:22:56 +01:00
Thomas Sileo 224f5d3f55 Add AUTHORS file 2022-11-15 22:20:28 +01:00
Thomas Sileo 6583feb87d Tweak the documentation about contributions 2022-11-15 22:17:55 +01:00
Thomas Sileo 04e75c78e0 Handle inbox delete handler for actors 2022-11-15 21:47:51 +01:00
Thomas Sileo 68c27e083f Allow to click on picture to see the original one 2022-11-14 21:23:41 +01:00
Thomas Sileo d52528584a Tweak template for the local delete button 2022-11-13 18:32:38 +01:00
Thomas Sileo d352dc104a Add local delete option
Useful for removing replies showing up on the public website.
2022-11-13 18:19:52 +01:00
Thomas Sileo 0c5ce67d4e Tweak remote instance redirection 2022-11-13 17:37:19 +01:00
Kevin Wallace 9db7bdf0fb remote follow: use HTML redirect to work around CSP issue
In Chrome, I get the following when trying to use the remote follow form:

    Refused to send form data to 'https://example.com/remote_follow'
    because it violates the following Content Security Policy directive:
    "form-action 'self'".

It seems some browsers (but notably not Firefox) apply the form-action
policy to the redirect target in addition to the initial form
submission endpoint.  See:

    https://github.com/w3c/webappsec-csp/issues/8

In that thread, this workaround is suggested.
2022-11-13 17:11:02 +01:00
Thomas Sileo 793a939046 Fix OG metadata scraping and improve workers 2022-11-13 13:00:22 +01:00
Thomas Sileo c3eb44add7 Improve mention parsing 2022-11-12 10:04:37 +01:00
Thomas Sileo 9b75020c91 Fix for profile image URL support 2022-11-12 09:26:28 +01:00
Thomas Sileo 36a1a6bd9c Fix for processing objects from Birdsite LIVE 2022-11-12 09:01:56 +01:00
Thomas Sileo 164cd9bd00 Webfinger strips extra space 2022-11-11 15:25:55 +01:00
Thomas Sileo 698a2bae11 Follow up fixes for the image URL support 2022-11-11 15:13:45 +01:00
Alexey Shpakovsky 4613997fe3 Add option to set image_url ("background image") for user
While this option is not used anywhere in microblog.pub itself, some
other servers do occasionally use it when showing remote profiles.

Also, this image _can_ be used in microblog.pub - just add this:

	<img src="{{ local_actor.image_url }}">

in the appropriate place of your template!
2022-11-11 15:08:17 +01:00
Thomas Sileo 4c995957a6 Merge branch 'test-css-tweak' into v2 2022-11-11 15:07:40 +01:00
Thomas Sileo 5c98b8dbfb Revert "Minor styling tweaks: piccalil.li's modern CSS Reset swyx.io's 100 Bytes of CSS to look great everywhere"
This reverts commit a339ff93b1.
2022-11-11 15:07:18 +01:00
Thomas Sileo 48d5914851 Tweak orientation hint for attachments 2022-11-11 14:56:56 +01:00
Ash McAllan 8f00e522d7 pass through width and height of attachments to allow styling based on media orientation 2022-11-11 14:20:59 +01:00
Thomas Sileo 62c9327500 Add support for setting a custom CSP 2022-11-09 21:26:43 +01:00
Cassio Zen a339ff93b1 Minor styling tweaks: piccalil.li's modern CSS Reset swyx.io's 100 Bytes of CSS to look great everywhere 2022-11-09 20:39:27 +01:00
Thomas Sileo afd253a1b4 Fix OG image URL 2022-11-09 09:29:25 +01:00
Thomas Sileo 509e10e79b Fix active URL in the navbar 2022-11-09 08:15:29 +01:00
Thomas Sileo d96ec913d4 Add support for displaying events from Mobilizon 2022-11-07 20:35:23 +01:00
Thomas Sileo 5b505b0e37 Update deps 2022-11-07 18:53:52 +01:00
Thomas Sileo 530491ff10 Fix typing 2022-11-07 18:53:45 +01:00
Kevin Wallace 48740ea8cb Allow templates to be overridden in data/templates/
I'd like to customize my instance's theme beyond what's possible with
_theme.scss.  This patch would allow me to do that, and keep my changes
self-contained in data/ without maintaining a local patchset over
app/templates/.

For utils.html, I've also added scoped blocks around the body of every
macro.  This allows the macros to be overridden individually in
data/templates/utils.html, without copying the whole file.  For example,
to only override the display of a specific actor's name/icon:

    {% extends "app/utils.html" %}
    {% block display_actor %}
    {% if actor.ap_id == "https://me.example.com" %}
    <!-- custom actor display -->
    {% else %}
    {{ super() }}
    {% endif %}
    {% endblock %}
2022-11-07 18:46:21 +01:00
Thomas Sileo 0d7c121781 Fix formatting 2022-11-06 16:57:04 +01:00
Kevin Wallace a4cfd65009 Sign media URLs to avoid becoming an open proxy
Signatures are valid for ~1 week.
2022-11-04 19:36:26 +01:00
Thomas Sileo 540b9d1470 Minor tweaks about non-root handling 2022-11-04 19:28:21 +01:00
Kevin Wallace 1c076049cf Fix URL generation when not at domain root 2022-11-04 19:22:30 +01:00
Kevin Wallace 242bf7b515 fixup! Fix URL generation when not at domain root
Oops -- missed these two!  Sorry for the noise; let me know if you'd
like me to squash and resubmit.
2022-11-04 19:22:30 +01:00
Kevin Wallace 2843155501 Allow actor id to be specified in config
This is useful if the actor won't be at the root of the domain.
2022-11-04 19:22:30 +01:00
Thomas Sileo 0badf0bc1f Fix permalink for Questions 2022-11-03 22:38:29 +01:00
Thomas Sileo 32692a7dcd First shot at supporting custom handler 2022-11-02 08:51:21 +01:00
Thomas Sileo 817dd98c5c Update deps 2022-11-01 19:11:47 +01:00
Thomas Sileo b6f0cd01d3 Less HTML restrictions for local content 2022-10-30 18:47:24 +01:00
Thomas Sileo c985dd84c3 Add slugify helper 2022-10-30 17:51:57 +01:00
Thomas Sileo 3d049da2e5 Add slug support for Article 2022-10-30 17:50:59 +01:00
Thomas Sileo fd5293a05c Fix password reset task 2022-10-23 16:40:56 +02:00
Thomas Sileo 3729500e3e Improve Block support 2022-10-23 16:37:32 +02:00
Thomas Sileo 2853bf2a28 Fix tag dedup 2022-10-20 19:39:55 +02:00
Thomas Sileo 0144a1c0d4 Tweak Mistletoe autolink 2022-10-19 21:09:30 +02:00
Thomas Sileo d93bcf6128 Complete the switch to mistletoe 2022-10-19 20:46:01 +02:00
Josh Washburne 647add2bab Added the ability to use a custom favicon. 2022-10-19 09:03:58 +02:00
Thomas Sileo f50a233ce9 Improved Block support 2022-10-18 21:39:09 +02:00
Thomas Sileo d909bf93a0 Tweak/fix install steps in the docs 2022-10-18 19:26:04 +02:00
Thomas Sileo 8e7fbcc501 Tweak actor refresh 2022-10-11 20:49:06 +02:00
Thomas Sileo 7a665df2b5 Tweak README 2022-10-10 11:05:36 +02:00
Thomas Sileo b5b56e9ed5 More actor refresh improvements 2022-10-09 11:36:00 +02:00
Thomas Sileo 9a36b0edf5 Fix conversation processing 2022-10-07 19:50:14 +02:00
Thomas Sileo 20f996d165 Tweak HTTP sig handling 2022-10-07 19:00:18 +02:00
Thomas Sileo 602da69083 Support actor refresh while checking HTTP sig 2022-10-07 12:05:28 +02:00
Thomas Sileo f6cfe06f66 Force refresh actor once in a while 2022-10-07 08:55:05 +02:00
Thomas Sileo c8a9793638 Make hashtag case insensitive 2022-10-05 20:27:21 +02:00
Thomas Sileo 5eaa0f291b More Markdown improvements 2022-10-05 20:05:16 +02:00
77 changed files with 5628 additions and 2373 deletions

11
AUTHORS Normal file
View File

@ -0,0 +1,11 @@
Thomas Sileo <t@a4.io>
Kevin Wallace <doof@doof.net>
Miguel Jacq <mig@mig5.net>
Alexey Shpakovsky <alexey@shpakovsky.ru>
Josh Washburne <josh@jodh.us>
João Costa <jdpc557@gmail.com>
Sam <samr1.dev@pm.me>
Ash McAllan <acegiak@gmail.com>
Cassio Zen <cassio@hey.com>
Cocoa <momijizukamori@gmail.com>
Jane <jane@janeirl.dev>

View File

@ -1,4 +1,4 @@
FROM python:3.10-slim as python-base
FROM python:3.11-slim as python-base
ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
POETRY_HOME="/opt/poetry" \

View File

@ -12,32 +12,36 @@ config:
.PHONY: update
update:
-docker run --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv update --no-update-deps
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv update --no-update-deps
.PHONY: prune-old-data
prune-old-data:
-docker run --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv prune-old-data
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv prune-old-data
.PHONY: webfinger
webfinger:
-docker run --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv webfinger $(account)
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv webfinger $(account)
.PHONY: move-to
move-to:
-docker run --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv move-to $(account)
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv move-to $(account)
.PHONY: self-destruct
self-destruct:
-docker run --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv self-destruct
-docker run --rm --it --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv self-destruct
.PHONY: reset-password
reset-password:
-docker run --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv reset-password
-docker run --rm -it --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv reset-password
.PHONY: check-config
check-config:
-docker run --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv check-config
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv check-config
.PHONY: compile-scss
compile-scss:
-docker run --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv compile-scss
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv compile-scss
.PHONY: import-mastodon-following-accounts
import-mastodon-following-accounts:
-docker run --rm --volume `pwd`/data:/app/data --volume `pwd`/app/static:/app/app/static microblogpub/microblogpub inv import-mastodon-following-accounts $(path)

View File

@ -10,6 +10,7 @@ Instances in the wild:
- [microblog.pub](https://microblog.pub/) (follow to get updated about the project)
- [hexa.ninja](https://hexa.ninja) (theme customization example)
- [testing.microblog.pub](https://testing.microblog.pub/)
- [Irish Left Archive](https://posts.leftarchive.ie/) (another theme customization example)
There are still some rough edges, but the server is mostly functional.
@ -22,7 +23,7 @@ There are still some rough edges, but the server is mostly functional.
- Author notes in Markdown, with code highlighting support
- Dedicated section for articles/blog posts (enabled when the first article is posted)
- Lightweight
- Uses SQLite, and no external dependencies except Python 3.10+
- Uses SQLite, and Python 3.10+
- Can be deployed on small VPS
- Privacy-aware
- EXIF metadata (like GPS location) are stripped before storage
@ -58,7 +59,7 @@ All the development takes place on [sourcehut](https://sr.ht/~tsileo/microblog.p
- [Issue tracker](https://todo.sr.ht/~tsileo/microblog.pub)
- [Mailing list](https://sr.ht/~tsileo/microblog.pub/lists)
Contributions are welcomed, check out the [documentation](https://docs.microblog.pub) for more details.
Contributions are welcomed, check out the [contributing section of the documentation](https://docs.microblog.pub/developer_guide.html#contributing) for more details.
## License

View File

@ -0,0 +1,48 @@
"""Add a slug field for outbox objects
Revision ID: b28c0551c236
Revises: 604d125ea2fb
Create Date: 2022-10-30 14:09:14.540461+00:00
"""
import sqlalchemy as sa
from sqlalchemy import select
from sqlalchemy.orm.session import Session
from alembic import op
# revision identifiers, used by Alembic.
revision = 'b28c0551c236'
down_revision = '604d125ea2fb'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('outbox', schema=None) as batch_op:
batch_op.add_column(sa.Column('slug', sa.String(), nullable=True))
batch_op.create_index(batch_op.f('ix_outbox_slug'), ['slug'], unique=False)
# ### end Alembic commands ###
# Backfill the slug for existing articles
from app.models import OutboxObject
from app.utils.text import slugify
sess = Session(op.get_bind())
articles = sess.execute(select(OutboxObject).where(
OutboxObject.ap_type == "Article")
).scalars()
for article in articles:
title = article.ap_object["name"]
article.slug = slugify(title)
sess.commit()
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('outbox', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_outbox_slug'))
batch_op.drop_column('slug')
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""Add Webmention.webmention_type
Revision ID: fadfd359ce78
Revises: b28c0551c236
Create Date: 2022-11-16 19:42:56.925512+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'fadfd359ce78'
down_revision = 'b28c0551c236'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('webmention', schema=None) as batch_op:
batch_op.add_column(sa.Column('webmention_type', sa.Enum('UNKNOWN', 'LIKE', 'REPLY', 'REPOST', name='webmentiontype'), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('webmention', schema=None) as batch_op:
batch_op.drop_column('webmention_type')
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""Add option to hide announces from actor
Revision ID: 9b404c47970a
Revises: fadfd359ce78
Create Date: 2022-12-12 19:26:36.912763+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '9b404c47970a'
down_revision = 'fadfd359ce78'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('actor', schema=None) as batch_op:
batch_op.add_column(sa.Column('are_announces_hidden_from_stream', sa.Boolean(), server_default='0', nullable=False))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('actor', schema=None) as batch_op:
batch_op.drop_column('are_announces_hidden_from_stream')
# ### end Alembic commands ###

View File

@ -0,0 +1,48 @@
"""Add OAuth client
Revision ID: 4ab54becec04
Revises: 9b404c47970a
Create Date: 2022-12-16 17:30:54.520477+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '4ab54becec04'
down_revision = '9b404c47970a'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('oauth_client',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
sa.Column('client_name', sa.String(), nullable=False),
sa.Column('redirect_uris', sa.JSON(), nullable=True),
sa.Column('client_uri', sa.String(), nullable=True),
sa.Column('logo_uri', sa.String(), nullable=True),
sa.Column('scope', sa.String(), nullable=True),
sa.Column('client_id', sa.String(), nullable=False),
sa.Column('client_secret', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('client_secret')
)
with op.batch_alter_table('oauth_client', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_oauth_client_client_id'), ['client_id'], unique=True)
batch_op.create_index(batch_op.f('ix_oauth_client_id'), ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('oauth_client', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_oauth_client_id'))
batch_op.drop_index(batch_op.f('ix_oauth_client_client_id'))
op.drop_table('oauth_client')
# ### end Alembic commands ###

View File

@ -0,0 +1,36 @@
"""Add OAuth refresh token support
Revision ID: a209f0333f5a
Revises: 4ab54becec04
Create Date: 2022-12-18 11:26:31.976348+00:00
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = 'a209f0333f5a'
down_revision = '4ab54becec04'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
batch_op.add_column(sa.Column('refresh_token', sa.String(), nullable=True))
batch_op.add_column(sa.Column('was_refreshed', sa.Boolean(), server_default='0', nullable=False))
batch_op.create_index(batch_op.f('ix_indieauth_access_token_refresh_token'), ['refresh_token'], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('indieauth_access_token', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_indieauth_access_token_refresh_token'))
batch_op.drop_column('was_refreshed')
batch_op.drop_column('refresh_token')
# ### end Alembic commands ###

View File

@ -6,7 +6,6 @@ from typing import Any
import httpx
from loguru import logger
from markdown import markdown
from app import config
from app.config import ALSO_KNOWN_AS
@ -14,6 +13,7 @@ from app.config import AP_CONTENT_TYPE # noqa: F401
from app.config import MOVED_TO
from app.httpsig import auth
from app.key import get_pubkey_as_pem
from app.source import dedup_tags
from app.source import hashtagify
from app.utils.url import check_url
@ -101,6 +101,19 @@ class VisibilityEnum(str, enum.Enum):
_LOCAL_ACTOR_SUMMARY, _LOCAL_ACTOR_TAGS = hashtagify(config.CONFIG.summary)
_LOCAL_ACTOR_METADATA = []
if config.CONFIG.metadata:
for kv in config.CONFIG.metadata:
kv_value, kv_tags = hashtagify(kv.value)
_LOCAL_ACTOR_METADATA.append(
{
"name": kv.key,
"type": "PropertyValue",
"value": kv_value,
}
)
_LOCAL_ACTOR_TAGS.extend(kv_tags)
ME = {
"@context": AS_EXTENDED_CTX,
@ -113,7 +126,7 @@ ME = {
"outbox": config.BASE_URL + "/outbox",
"preferredUsername": config.USERNAME,
"name": config.CONFIG.name,
"summary": markdown(_LOCAL_ACTOR_SUMMARY, extensions=["mdx_linkify"]),
"summary": _LOCAL_ACTOR_SUMMARY,
"endpoints": {
# For compat with servers expecting a sharedInbox...
"sharedInbox": config.BASE_URL
@ -121,35 +134,35 @@ ME = {
},
"url": config.ID + "/", # XXX: the path is important for Mastodon compat
"manuallyApprovesFollowers": config.CONFIG.manually_approves_followers,
"attachment": [
{
"name": kv.key,
"type": "PropertyValue",
"value": markdown(kv.value, extensions=["mdx_linkify", "fenced_code"]),
}
for kv in config.CONFIG.metadata
]
if config.CONFIG.metadata
else [],
"icon": {
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
"type": "Image",
"url": config.CONFIG.icon_url,
},
"attachment": _LOCAL_ACTOR_METADATA,
"publicKey": {
"id": f"{config.ID}#main-key",
"owner": config.ID,
"publicKeyPem": get_pubkey_as_pem(config.KEY_PATH),
},
"tag": _LOCAL_ACTOR_TAGS,
"tag": dedup_tags(_LOCAL_ACTOR_TAGS),
}
if config.CONFIG.icon_url:
ME["icon"] = {
"mediaType": mimetypes.guess_type(config.CONFIG.icon_url)[0],
"type": "Image",
"url": config.CONFIG.icon_url,
}
if ALSO_KNOWN_AS:
ME["alsoKnownAs"] = [ALSO_KNOWN_AS]
if MOVED_TO:
ME["movedTo"] = MOVED_TO
if config.CONFIG.image_url:
ME["image"] = {
"mediaType": mimetypes.guess_type(config.CONFIG.image_url)[0],
"type": "Image",
"url": config.CONFIG.image_url,
}
class NotAnObjectError(Exception):
def __init__(self, url: str, resp: httpx.Response | None = None) -> None:

View File

@ -1,17 +1,25 @@
import hashlib
import typing
from dataclasses import dataclass
from datetime import timedelta
from functools import cached_property
from typing import Union
from urllib.parse import urlparse
import httpx
from loguru import logger
from sqlalchemy import select
from sqlalchemy.orm import joinedload
from app import activitypub as ap
from app import media
from app.config import BASE_URL
from app.config import USER_AGENT
from app.config import USERNAME
from app.config import WEBFINGER_DOMAIN
from app.database import AsyncSession
from app.utils.datetime import as_utc
from app.utils.datetime import now
if typing.TYPE_CHECKING:
from app.models import Actor as ActorModel
@ -23,7 +31,38 @@ def _handle(raw_actor: ap.RawObject) -> str:
if not domain.hostname:
raise ValueError(f"Invalid actor ID {ap_id}")
return f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore
handle = f'@{raw_actor["preferredUsername"]}@{domain.hostname}' # type: ignore
# TODO: cleanup this
# Next, check for custom webfinger domains
resp: httpx.Response | None = None
for url in {
f"https://{domain.hostname}/.well-known/webfinger",
f"http://{domain.hostname}/.well-known/webfinger",
}:
try:
logger.info(f"Webfinger {handle} at {url}")
resp = httpx.get(
url,
params={"resource": f"acct:{handle[1:]}"},
headers={
"User-Agent": USER_AGENT,
},
follow_redirects=True,
)
resp.raise_for_status()
break
except Exception:
logger.exception(f"Failed to webfinger {handle}")
if resp:
try:
json_resp = resp.json()
if json_resp.get("subject", "").startswith("acct:"):
return "@" + json_resp["subject"].removeprefix("acct:")
except Exception:
logger.exception(f"Failed to parse webfinger response for {handle}")
return handle
class Actor:
@ -57,7 +96,7 @@ class Actor:
return self.name
return self.preferred_username
@property
@cached_property
def handle(self) -> str:
return _handle(self.ap_actor)
@ -79,11 +118,21 @@ class Actor:
@property
def icon_url(self) -> str | None:
return self.ap_actor.get("icon", {}).get("url")
if icon := self.ap_actor.get("icon"):
return icon.get("url")
return None
@property
def icon_media_type(self) -> str | None:
return self.ap_actor.get("icon", {}).get("mediaType")
if icon := self.ap_actor.get("icon"):
return icon.get("mediaType")
return None
@property
def image_url(self) -> str | None:
if image := self.ap_actor.get("image"):
return image.get("url")
return None
@property
def public_key_as_pem(self) -> str:
@ -98,14 +147,14 @@ class Actor:
if self.icon_url:
return media.proxied_media_url(self.icon_url)
else:
return "/static/nopic.png"
return BASE_URL + "/static/nopic.png"
@property
def resized_icon_url(self) -> str:
if self.icon_url:
return media.resized_media_url(self.icon_url, 50)
else:
return "/static/nopic.png"
return BASE_URL + "/static/nopic.png"
@property
def tags(self) -> list[ap.RawObject]:
@ -129,13 +178,18 @@ class Actor:
class RemoteActor(Actor):
def __init__(self, ap_actor: ap.RawObject) -> None:
def __init__(self, ap_actor: ap.RawObject, handle: str | None = None) -> None:
if (ap_type := ap_actor.get("type")) not in ap.ACTOR_TYPES:
raise ValueError(f"Unexpected actor type: {ap_type}")
self._ap_actor = ap_actor
self._ap_type = ap_type
if handle is None:
handle = _handle(ap_actor)
self._handle = handle
@property
def ap_actor(self) -> ap.RawObject:
return self._ap_actor
@ -148,8 +202,12 @@ class RemoteActor(Actor):
def is_from_db(self) -> bool:
return False
@property
def handle(self) -> str:
return self._handle
LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME)
LOCAL_ACTOR = RemoteActor(ap_actor=ap.ME, handle=f"@{USERNAME}@{WEBFINGER_DOMAIN}")
async def save_actor(db_session: AsyncSession, ap_actor: ap.RawObject) -> "ActorModel":
@ -189,26 +247,64 @@ async def fetch_actor(
if existing_actor:
if existing_actor.is_deleted:
raise ap.ObjectNotFoundError(f"{actor_id} was deleted")
return existing_actor
else:
if save_if_not_found:
ap_actor = await ap.fetch(actor_id)
# Some softwares uses URL when we expect ID
if actor_id == ap_actor.get("url"):
# Which mean we may already have it in DB
existing_actor_by_url = (
await db_session.scalars(
select(models.Actor).where(
models.Actor.ap_id == ap.get_id(ap_actor),
)
)
).one_or_none()
if existing_actor_by_url:
return existing_actor_by_url
return await save_actor(db_session, ap_actor)
if now() - as_utc(existing_actor.updated_at) > timedelta(hours=24):
logger.info(
f"Refreshing {actor_id=} last updated {existing_actor.updated_at}"
)
try:
ap_actor = await ap.fetch(actor_id)
await update_actor_if_needed(
db_session,
existing_actor,
RemoteActor(ap_actor),
)
return existing_actor
except Exception:
logger.exception(f"Failed to refresh {actor_id}")
# If we fail to refresh the actor, return the cached one
return existing_actor
else:
raise ap.ObjectNotFoundError(actor_id)
return existing_actor
if save_if_not_found:
ap_actor = await ap.fetch(actor_id)
# Some softwares uses URL when we expect ID or uses a different casing
# (like Birdsite LIVE) , which mean we may already have it in DB
existing_actor_by_url = (
await db_session.scalars(
select(models.Actor).where(
models.Actor.ap_id == ap.get_id(ap_actor),
)
)
).one_or_none()
if existing_actor_by_url:
# Update the actor as we had to fetch it anyway
await update_actor_if_needed(
db_session,
existing_actor_by_url,
RemoteActor(ap_actor),
)
return existing_actor_by_url
return await save_actor(db_session, ap_actor)
else:
raise ap.ObjectNotFoundError(actor_id)
async def update_actor_if_needed(
db_session: AsyncSession,
actor_in_db: "ActorModel",
ra: RemoteActor,
) -> None:
# Check if we actually need to udpte the actor in DB
if _actor_hash(ra) != _actor_hash(actor_in_db):
actor_in_db.ap_actor = ra.ap_actor
actor_in_db.handle = ra.handle
actor_in_db.ap_type = ra.ap_type
actor_in_db.updated_at = now()
await db_session.flush()
@dataclass
@ -217,9 +313,11 @@ class ActorMetadata:
is_following: bool
is_follower: bool
is_follow_request_sent: bool
is_follow_request_rejected: bool
outbox_follow_ap_id: str | None
inbox_follow_ap_id: str | None
moved_to: typing.Optional["ActorModel"]
has_blocked_local_actor: bool
ActorsMetadata = dict[str, ActorMetadata]
@ -262,6 +360,26 @@ async def get_actors_metadata(
)
)
}
rejected_follow_requests = {
reject.activity_object_ap_id
for reject in await db_session.execute(
select(models.InboxObject.activity_object_ap_id).where(
models.InboxObject.ap_type == "Reject",
models.InboxObject.ap_actor_id.in_(ap_actor_ids),
)
)
}
blocks = {
block.ap_actor_id
for block in await db_session.execute(
select(models.InboxObject.ap_actor_id).where(
models.InboxObject.ap_type == "Block",
models.InboxObject.undone_by_inbox_object_id.is_(None),
models.InboxObject.ap_actor_id.in_(ap_actor_ids),
)
)
}
idx: ActorsMetadata = {}
for actor in actors:
if not actor.ap_id:
@ -284,9 +402,15 @@ async def get_actors_metadata(
is_following=actor.ap_id in following,
is_follower=actor.ap_id in followers,
is_follow_request_sent=actor.ap_id in sent_follow_requests,
is_follow_request_rejected=bool(
sent_follow_requests[actor.ap_id] in rejected_follow_requests
)
if actor.ap_id in sent_follow_requests
else False,
outbox_follow_ap_id=sent_follow_requests.get(actor.ap_id),
inbox_follow_ap_id=followers.get(actor.ap_id),
moved_to=moved_to,
has_blocked_local_actor=actor.ap_id in blocks,
)
return idx
@ -311,6 +435,9 @@ def _actor_hash(actor: Actor) -> bytes:
if actor.icon_url:
h.update(actor.icon_url.encode())
if actor.image_url:
h.update(actor.image_url.encode())
if actor.attachments:
for a in actor.attachments:
if a.get("type") != "PropertyValue":

View File

@ -1,4 +1,5 @@
from datetime import datetime
from urllib.parse import quote
import httpx
from fastapi import APIRouter
@ -11,6 +12,7 @@ from fastapi.exceptions import HTTPException
from fastapi.responses import RedirectResponse
from loguru import logger
from sqlalchemy import and_
from sqlalchemy import delete
from sqlalchemy import func
from sqlalchemy import or_
from sqlalchemy import select
@ -25,8 +27,11 @@ from app.actor import fetch_actor
from app.actor import get_actors_metadata
from app.boxes import get_inbox_object_by_ap_id
from app.boxes import get_outbox_object_by_ap_id
from app.boxes import send_block
from app.boxes import send_follow
from app.boxes import send_unblock
from app.config import EMOJIS
from app.config import SESSION_TIMEOUT
from app.config import generate_csrf_token
from app.config import session_serializer
from app.config import verify_csrf_token
@ -55,18 +60,23 @@ async def user_session_or_redirect(
_RedirectToLoginPage = HTTPException(
status_code=302,
headers={"Location": request.url_for("login") + f"?redirect={redirect_url}"},
headers={
"Location": request.url_for("login") + f"?redirect={quote(redirect_url)}"
},
)
if not session:
logger.info("No existing admin session")
raise _RedirectToLoginPage
try:
loaded_session = session_serializer.loads(session, max_age=3600 * 12)
loaded_session = session_serializer.loads(session, max_age=SESSION_TIMEOUT)
except Exception:
logger.exception("Failed to validate admin session")
raise _RedirectToLoginPage
if not loaded_session.get("is_logged_in"):
logger.info(f"Admin session invalidated: {loaded_session}")
raise _RedirectToLoginPage
return None
@ -179,8 +189,11 @@ async def admin_new(
content += f"{in_reply_to_object.actor.handle} "
for tag in in_reply_to_object.tags:
if tag.get("type") == "Mention" and tag["name"] != LOCAL_ACTOR.handle:
mentioned_actor = await fetch_actor(db_session, tag["href"])
content += f"{mentioned_actor.handle} "
try:
mentioned_actor = await fetch_actor(db_session, tag["href"])
content += f"{mentioned_actor.handle} "
except Exception:
logger.exception(f"Failed to lookup {mentioned_actor}")
# Copy the content warning if any
if in_reply_to_object.summary:
@ -340,6 +353,7 @@ async def admin_inbox(
"Update",
"Undo",
"Read",
"Reject",
"Add",
"Remove",
"EmojiReact",
@ -436,6 +450,7 @@ async def admin_direct_messages(
models.InboxObject.ap_context.is_not(None),
# Skip transient object like poll relies
models.InboxObject.is_transient.is_(False),
models.InboxObject.is_deleted.is_(False),
)
.group_by(models.InboxObject.ap_context, models.InboxObject.actor_id)
)
@ -458,6 +473,7 @@ async def admin_direct_messages(
models.OutboxObject.ap_context.is_not(None),
# Skip transient object like poll relies
models.OutboxObject.is_transient.is_(False),
models.OutboxObject.is_deleted.is_(False),
)
.group_by(models.OutboxObject.ap_context)
)
@ -713,13 +729,9 @@ async def get_notifications(
actors_metadata = await get_actors_metadata(
db_session, [notif.actor for notif in notifications if notif.actor]
)
for notif in notifications:
notif.is_new = False
await db_session.commit()
more_unread_count = 0
next_cursor = None
if notifications and remaining_count > page_size:
decoded_next_cursor = notifications[-1].created_at
next_cursor = pagination.encode_cursor(decoded_next_cursor)
@ -733,7 +745,8 @@ async def get_notifications(
)
)
return await templates.render_template(
# Render the template before we change the new flag on notifications
tpl_resp = await templates.render_template(
db_session,
request,
"notifications.html",
@ -745,6 +758,13 @@ async def get_notifications(
},
)
if len({notif.id for notif in notifications if notif.is_new}):
for notif in notifications:
notif.is_new = False
await db_session.commit()
return tpl_resp
@router.get("/object")
async def admin_object(
@ -847,6 +867,66 @@ async def admin_profile(
)
@router.post("/actions/force_delete")
async def admin_actions_force_delete(
request: Request,
ap_object_id: str = Form(),
redirect_url: str = Form(),
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
ap_object_to_delete = await get_inbox_object_by_ap_id(db_session, ap_object_id)
if not ap_object_to_delete:
raise ValueError(f"Cannot find {ap_object_id}")
logger.info(f"Deleting {ap_object_to_delete.ap_type}/{ap_object_to_delete.ap_id}")
await boxes._revert_side_effect_for_deleted_object(
db_session,
None,
ap_object_to_delete,
None,
)
ap_object_to_delete.is_deleted = True
await db_session.commit()
return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/force_delete_webmention")
async def admin_actions_force_delete_webmention(
request: Request,
webmention_id: int = Form(),
redirect_url: str = Form(),
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
webmention = await boxes.get_webmention_by_id(db_session, webmention_id)
if not webmention:
raise ValueError(f"Cannot find {webmention_id}")
if not webmention.outbox_object:
raise ValueError(f"Missing related outbox object for {webmention_id}")
# TODO: move this
logger.info(f"Deleting {webmention_id}")
webmention.is_deleted = True
await db_session.flush()
from app.webmentions import _handle_webmention_side_effects
await _handle_webmention_side_effects(
db_session, webmention, webmention.outbox_object
)
# Delete related notifications
notif_deletion_result = await db_session.execute(
delete(models.Notification)
.where(models.Notification.webmention_id == webmention.id)
.execution_options(synchronize_session=False)
)
logger.info(
f"Deleted {notif_deletion_result.rowcount} notifications" # type: ignore
)
await db_session.commit()
return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/follow")
async def admin_actions_follow(
request: Request,
@ -868,10 +948,7 @@ async def admin_actions_block(
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
logger.info(f"Blocking {ap_actor_id}")
actor = await fetch_actor(db_session, ap_actor_id)
actor.is_blocked = True
await db_session.commit()
await send_block(db_session, ap_actor_id)
return RedirectResponse(redirect_url, status_code=302)
@ -884,8 +961,34 @@ async def admin_actions_unblock(
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
logger.info(f"Unblocking {ap_actor_id}")
await send_unblock(db_session, ap_actor_id)
return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/hide_announces")
async def admin_actions_hide_announces(
request: Request,
ap_actor_id: str = Form(),
redirect_url: str = Form(),
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
actor = await fetch_actor(db_session, ap_actor_id)
actor.is_blocked = False
actor.are_announces_hidden_from_stream = True
await db_session.commit()
return RedirectResponse(redirect_url, status_code=302)
@router.post("/actions/show_announces")
async def admin_actions_show_announces(
request: Request,
ap_actor_id: str = Form(),
redirect_url: str = Form(),
csrf_check: None = Depends(verify_csrf_token),
db_session: AsyncSession = Depends(get_db_session),
) -> RedirectResponse:
actor = await fetch_actor(db_session, ap_actor_id)
actor.are_announces_hidden_from_stream = False
await db_session.commit()
return RedirectResponse(redirect_url, status_code=302)
@ -1082,7 +1185,7 @@ async def admin_actions_new(
elif name:
ap_type = "Article"
public_id = await boxes.send_create(
public_id, _ = await boxes.send_create(
db_session,
ap_type=ap_type,
source=content,

View File

@ -6,12 +6,13 @@ from typing import Any
import pydantic
from bs4 import BeautifulSoup # type: ignore
from markdown import markdown
from mistletoe import markdown # type: ignore
from app import activitypub as ap
from app.actor import LOCAL_ACTOR
from app.actor import Actor
from app.actor import RemoteActor
from app.config import ID
from app.media import proxied_media_url
from app.utils.datetime import now
from app.utils.datetime import parse_isoformat
@ -96,6 +97,9 @@ class Object:
def attachments(self) -> list["Attachment"]:
attachments = []
for obj in ap.as_list(self.ap_object.get("attachment", [])):
if obj.get("type") == "PropertyValue":
continue
if obj.get("type") == "Link":
attachments.append(
Attachment.parse_obj(
@ -176,7 +180,7 @@ class Object:
# PeerTube returns the content as markdown
if self.ap_object.get("mediaType") == "text/markdown":
content = markdown(content, extensions=["mdx_linkify"])
content = markdown(content)
return content
@ -209,6 +213,15 @@ class Object:
def in_reply_to(self) -> str | None:
return self.ap_object.get("inReplyTo")
@property
def is_local_reply(self) -> bool:
if not self.in_reply_to:
return False
return bool(
self.in_reply_to.startswith(ID) and self.content # Hide votes from Question
)
@property
def is_in_reply_to_from_inbox(self) -> bool | None:
if not self.in_reply_to:
@ -277,6 +290,9 @@ class Attachment(BaseModel):
proxied_url: str | None = None
resized_url: str | None = None
width: int | None = None
height: int | None = None
@property
def mimetype(self) -> str:
mimetype = self.media_type

View File

@ -1,4 +1,5 @@
"""Actions related to the AP inbox/outbox."""
import datetime
import uuid
from collections import defaultdict
from dataclasses import dataclass
@ -24,14 +25,17 @@ from app.actor import Actor
from app.actor import RemoteActor
from app.actor import fetch_actor
from app.actor import save_actor
from app.actor import update_actor_if_needed
from app.ap_object import RemoteObject
from app.config import BASE_URL
from app.config import BLOCKED_SERVERS
from app.config import ID
from app.config import MANUALLY_APPROVES_FOLLOWERS
from app.config import set_moved_to
from app.config import stream_visibility_callback
from app.customization import ObjectInfo
from app.database import AsyncSession
from app.outgoing_activities import new_outgoing_activity
from app.source import dedup_tags
from app.source import markdownify
from app.uploads import upload_to_attachment
from app.utils import opengraph
@ -39,10 +43,24 @@ from app.utils import webmentions
from app.utils.datetime import as_utc
from app.utils.datetime import now
from app.utils.datetime import parse_isoformat
from app.utils.facepile import WebmentionReply
from app.utils.text import slugify
from app.utils.url import is_hostname_blocked
AnyboxObject = models.InboxObject | models.OutboxObject
def is_notification_enabled(notification_type: models.NotificationType) -> bool:
"""Checks if a given notification type is enabled."""
if notification_type.value == "pending_incoming_follower":
# This one cannot be disabled as it would prevent manually reviewing
# follow requests.
return True
if notification_type.value in config.CONFIG.disabled_notifications:
return False
return True
def allocate_outbox_id() -> str:
return uuid.uuid4().hex
@ -61,6 +79,7 @@ async def save_outbox_object(
source: str | None = None,
is_transient: bool = False,
conversation: str | None = None,
slug: str | None = None,
) -> models.OutboxObject:
ro = await RemoteObject.from_raw_object(raw_object)
@ -80,6 +99,7 @@ async def save_outbox_object(
source=source,
is_transient=is_transient,
conversation=conversation,
slug=slug,
)
db_session.add(outbox_object)
await db_session.flush()
@ -88,6 +108,88 @@ async def save_outbox_object(
return outbox_object
async def send_unblock(db_session: AsyncSession, ap_actor_id: str) -> None:
actor = await fetch_actor(db_session, ap_actor_id)
block_activity = (
await db_session.scalars(
select(models.OutboxObject).where(
models.OutboxObject.activity_object_ap_id == actor.ap_id,
models.OutboxObject.is_deleted.is_(False),
)
)
).one_or_none()
if not block_activity:
raise ValueError(f"No Block activity for {ap_actor_id}")
await _send_undo(db_session, block_activity.ap_id)
await db_session.commit()
async def send_block(db_session: AsyncSession, ap_actor_id: str) -> None:
logger.info(f"Blocking {ap_actor_id}")
actor = await fetch_actor(db_session, ap_actor_id)
actor.is_blocked = True
# 1. Unfollow the actor
following = (
await db_session.scalars(
select(models.Following)
.options(joinedload(models.Following.outbox_object))
.where(
models.Following.ap_actor_id == actor.ap_id,
)
)
).one_or_none()
if following:
await _send_undo(db_session, following.outbox_object.ap_id)
# 2. If the blocked actor is a follower, reject the follow request
follower = (
await db_session.scalars(
select(models.Follower)
.options(joinedload(models.Follower.inbox_object))
.where(
models.Follower.ap_actor_id == actor.ap_id,
)
)
).one_or_none()
if follower:
await _send_reject(db_session, actor, follower.inbox_object)
await db_session.delete(follower)
# 3. Send a block
block_id = allocate_outbox_id()
block = {
"@context": ap.AS_EXTENDED_CTX,
"id": outbox_object_id(block_id),
"type": "Block",
"actor": LOCAL_ACTOR.ap_id,
"object": actor.ap_id,
}
outbox_object = await save_outbox_object(
db_session,
block_id,
block,
)
if not outbox_object.id:
raise ValueError("Should never happen")
await new_outgoing_activity(db_session, actor.inbox_url, outbox_object.id)
# 4. Create a notification
if is_notification_enabled(models.NotificationType.BLOCK):
notif = models.Notification(
notification_type=models.NotificationType.BLOCK,
actor_id=actor.id,
outbox_object_id=outbox_object.id,
)
db_session.add(notif)
await db_session.commit()
async def send_delete(db_session: AsyncSession, ap_object_id: str) -> None:
outbox_object_to_delete = await get_outbox_object_by_ap_id(db_session, ap_object_id)
if not outbox_object_to_delete:
@ -115,7 +217,7 @@ async def send_delete(db_session: AsyncSession, ap_object_id: str) -> None:
raise ValueError("Should never happen")
outbox_object_to_delete.is_deleted = True
await db_session.commit()
await db_session.flush()
# Compute the original recipients
recipients = await _compute_recipients(
@ -130,14 +232,17 @@ async def send_delete(db_session: AsyncSession, ap_object_id: str) -> None:
db_session, outbox_object_to_delete.in_reply_to
)
if replied_object:
new_replies_count = await _get_replies_count(
db_session, replied_object.ap_id
)
if replied_object.is_from_outbox:
# Different helper here because we also count webmentions
new_replies_count = await _get_outbox_replies_count(
db_session, replied_object # type: ignore
)
else:
new_replies_count = await _get_replies_count(
db_session, replied_object.ap_id
)
replied_object.replies_count = new_replies_count
if replied_object.replies_count < 0:
logger.warning("negative replies count for {replied_object.ap_id}")
replied_object.replies_count = 0
else:
logger.info(f"{outbox_object_to_delete.in_reply_to} not found")
@ -264,7 +369,7 @@ async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None:
if not outbox_object_to_undo:
raise ValueError(f"{ap_object_id} not found in the outbox")
if outbox_object_to_undo.ap_type not in ["Follow", "Like", "Announce"]:
if outbox_object_to_undo.ap_type not in ["Follow", "Like", "Announce", "Block"]:
raise ValueError(
f"Cannot build Undo for {outbox_object_to_undo.ap_type} activity"
)
@ -334,9 +439,36 @@ async def _send_undo(db_session: AsyncSession, ap_object_id: str) -> None:
announced_object.announced_via_outbox_object_ap_id = None
# Send the Undo to the original recipients
recipients = await _compute_recipients(db_session, outbox_object.ap_object)
recipients = await _compute_recipients(
db_session, outbox_object_to_undo.ap_object
)
for rcp in recipients:
await new_outgoing_activity(db_session, rcp, outbox_object.id)
elif outbox_object_to_undo.ap_type == "Block":
if not outbox_object_to_undo.activity_object_ap_id:
raise ValueError(f"Invalid block activity {outbox_object_to_undo.ap_id}")
# Send the Undo to the blocked actor
blocked_actor = await fetch_actor(
db_session, outbox_object_to_undo.activity_object_ap_id
)
blocked_actor.is_blocked = False
await new_outgoing_activity(
db_session,
blocked_actor.inbox_url, # type: ignore
outbox_object.id,
)
if is_notification_enabled(models.NotificationType.UNBLOCK):
notif = models.Notification(
notification_type=models.NotificationType.UNBLOCK,
actor_id=blocked_actor.id,
outbox_object_id=outbox_object.id,
)
db_session.add(notif)
else:
raise ValueError("Should never happen")
@ -347,6 +479,7 @@ async def fetch_conversation_root(
db_session: AsyncSession,
obj: AnyboxObject | RemoteObject,
is_root: bool = False,
depth: int = 0,
) -> str:
"""Some softwares do not set the context/conversation field (like Misskey).
This means we have to track conversation ourselves. To do so, we fetch
@ -354,12 +487,13 @@ async def fetch_conversation_root(
- use the context field if set
- or build a custom conversation ID
"""
if not obj.in_reply_to or is_root:
if obj.ap_context:
return obj.ap_context
else:
# Use the root AP ID if there'no context
return f"microblogpub:root:{obj.ap_id}"
logger.info(f"Fetching convo root for ap_id={obj.ap_id}/{depth=}")
if obj.ap_context:
return obj.ap_context
if not obj.in_reply_to or is_root or depth > 10:
# Use the root AP ID if there'no context
return f"microblogpub:root:{obj.ap_id}"
else:
in_reply_to_object: AnyboxObject | RemoteObject | None = (
await get_anybox_object_by_ap_id(db_session, obj.in_reply_to)
@ -375,15 +509,21 @@ async def fetch_conversation_root(
ap.FetchError,
ap.NotAnObjectError,
):
return await fetch_conversation_root(db_session, obj, is_root=True)
return await fetch_conversation_root(
db_session, obj, is_root=True, depth=depth + 1
)
except httpx.HTTPStatusError as http_status_error:
if 400 <= http_status_error.response.status_code < 500:
# We may not have access, in this case consider if root
return await fetch_conversation_root(db_session, obj, is_root=True)
return await fetch_conversation_root(
db_session, obj, is_root=True, depth=depth + 1
)
else:
raise
return await fetch_conversation_root(db_session, in_reply_to_object)
return await fetch_conversation_root(
db_session, in_reply_to_object, depth=depth + 1
)
async def send_move(
@ -452,7 +592,7 @@ async def send_create(
poll_answers: list[str] | None = None,
poll_duration_in_minutes: int | None = None,
name: str | None = None,
) -> str:
) -> tuple[str, models.OutboxObject]:
note_id = allocate_outbox_id()
published = now().replace(microsecond=0).isoformat().replace("+00:00", "Z")
context = f"{ID}/contexts/" + uuid.uuid4().hex
@ -499,6 +639,9 @@ async def send_create(
else:
raise ValueError(f"Unhandled visibility {visibility}")
slug = None
url = outbox_object_id(note_id)
extra_obj_attrs = {}
if ap_type == "Question":
if not poll_answers or len(poll_answers) < 2:
@ -528,6 +671,8 @@ async def send_create(
if not name:
raise ValueError("Article must have a name")
slug = slugify(name)
url = f"{BASE_URL}/articles/{note_id[:7]}/{slug}"
extra_obj_attrs = {"name": name}
obj = {
@ -541,8 +686,8 @@ async def send_create(
"published": published,
"context": context,
"conversation": context,
"url": outbox_object_id(note_id),
"tag": tags,
"url": url,
"tag": dedup_tags(tags),
"summary": content_warning,
"inReplyTo": in_reply_to,
"sensitive": is_sensitive,
@ -555,6 +700,7 @@ async def send_create(
obj,
source=source,
conversation=conversation,
slug=slug,
)
if not outbox_object.id:
raise ValueError("Should never happen")
@ -562,7 +708,7 @@ async def send_create(
for tag in tags:
if tag["type"] == "Hashtag":
tagged_object = models.TaggedOutboxObject(
tag=tag["name"][1:],
tag=tag["name"][1:].lower(),
outbox_object_id=outbox_object.id,
)
db_session.add(tagged_object)
@ -621,7 +767,7 @@ async def send_create(
await db_session.commit()
return note_id
return note_id, outbox_object
async def send_vote(
@ -804,7 +950,7 @@ async def compute_all_known_recipients(db_session: AsyncSession) -> set[str]:
}
async def _get_following(db_session: AsyncSession) -> list[models.Follower]:
async def _get_following(db_session: AsyncSession) -> list[models.Following]:
return (
(
await db_session.scalars(
@ -924,6 +1070,32 @@ async def get_outbox_object_by_ap_id(
) # type: ignore
async def get_outbox_object_by_slug_and_short_id(
db_session: AsyncSession,
slug: str,
short_id: str,
) -> models.OutboxObject | None:
return (
(
await db_session.execute(
select(models.OutboxObject)
.options(
joinedload(models.OutboxObject.outbox_object_attachments).options(
joinedload(models.OutboxObjectAttachment.upload)
)
)
.where(
models.OutboxObject.public_id.like(f"{short_id}%"),
models.OutboxObject.slug == slug,
models.OutboxObject.is_deleted.is_(False),
)
)
)
.unique()
.scalar_one_or_none()
)
async def get_anybox_object_by_ap_id(
db_session: AsyncSession, ap_id: str
) -> AnyboxObject | None:
@ -933,6 +1105,20 @@ async def get_anybox_object_by_ap_id(
return await get_inbox_object_by_ap_id(db_session, ap_id)
async def get_webmention_by_id(
db_session: AsyncSession, webmention_id: int
) -> models.Webmention | None:
return (
await db_session.execute(
select(models.Webmention)
.where(models.Webmention.id == webmention_id)
.options(
joinedload(models.Webmention.outbox_object),
)
)
).scalar_one_or_none() # type: ignore
async def _handle_delete_activity(
db_session: AsyncSession,
from_actor: models.Actor,
@ -1000,6 +1186,23 @@ async def _handle_delete_activity(
logger.info("Removing actor from follower")
await db_session.delete(follower)
# Also mark Follow activities for this actor as deleted
follow_activities = (
await db_session.scalars(
select(models.OutboxObject).where(
models.OutboxObject.ap_type == "Follow",
models.OutboxObject.relates_to_actor_id
== ap_object_to_delete.id,
models.OutboxObject.is_deleted.is_(False),
)
)
).all()
for follow_activity in follow_activities:
logger.info(
f"Marking Follow activity {follow_activity.ap_id} as deleted"
)
follow_activity.is_deleted = True
following = (
await db_session.scalars(
select(models.Following).where(
@ -1060,9 +1263,70 @@ async def _get_replies_count(
)
async def _get_outbox_replies_count(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> int:
return (await _get_replies_count(db_session, outbox_object.ap_id)) + (
await db_session.scalar(
select(func.count(models.Webmention.id)).where(
models.Webmention.is_deleted.is_(False),
models.Webmention.outbox_object_id == outbox_object.id,
models.Webmention.webmention_type == models.WebmentionType.REPLY,
)
)
)
async def _get_outbox_likes_count(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> int:
return (
await db_session.scalar(
select(func.count(models.InboxObject.id)).where(
models.InboxObject.ap_type == "Like",
models.InboxObject.relates_to_outbox_object_id == outbox_object.id,
models.InboxObject.is_deleted.is_(False),
)
)
) + (
await db_session.scalar(
select(func.count(models.Webmention.id)).where(
models.Webmention.is_deleted.is_(False),
models.Webmention.outbox_object_id == outbox_object.id,
models.Webmention.webmention_type == models.WebmentionType.LIKE,
)
)
)
async def _get_outbox_announces_count(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> int:
return (
await db_session.scalar(
select(func.count(models.InboxObject.id)).where(
models.InboxObject.ap_type == "Announce",
models.InboxObject.relates_to_outbox_object_id == outbox_object.id,
models.InboxObject.is_deleted.is_(False),
)
)
) + (
await db_session.scalar(
select(func.count(models.Webmention.id)).where(
models.Webmention.is_deleted.is_(False),
models.Webmention.outbox_object_id == outbox_object.id,
models.Webmention.webmention_type == models.WebmentionType.REPOST,
)
)
)
async def _revert_side_effect_for_deleted_object(
db_session: AsyncSession,
delete_activity: models.InboxObject,
delete_activity: models.InboxObject | None,
deleted_ap_object: models.InboxObject,
forwarded_by_actor: models.Actor | None,
) -> None:
@ -1090,8 +1354,8 @@ async def _revert_side_effect_for_deleted_object(
# also needs to be forwarded
is_delete_needs_to_be_forwarded = True
new_replies_count = await _get_replies_count(
db_session, replied_object.ap_id
new_replies_count = await _get_outbox_replies_count(
db_session, replied_object # type: ignore
)
await db_session.execute(
@ -1099,7 +1363,7 @@ async def _revert_side_effect_for_deleted_object(
.where(
models.OutboxObject.id == replied_object.id,
)
.values(replies_count=new_replies_count)
.values(replies_count=new_replies_count - 1)
)
else:
new_replies_count = await _get_replies_count(
@ -1111,7 +1375,7 @@ async def _revert_side_effect_for_deleted_object(
.where(
models.InboxObject.id == replied_object.id,
)
.values(replies_count=new_replies_count)
.values(replies_count=new_replies_count - 1)
)
if deleted_ap_object.ap_type == "Like" and deleted_ap_object.activity_object_ap_id:
@ -1121,15 +1385,16 @@ async def _revert_side_effect_for_deleted_object(
)
if related_object:
if related_object.is_from_outbox:
likes_count = await _get_outbox_likes_count(db_session, related_object)
await db_session.execute(
update(models.OutboxObject)
.where(
models.OutboxObject.id == related_object.id,
)
.values(likes_count=models.OutboxObject.likes_count - 1)
.values(likes_count=likes_count - 1)
)
elif (
deleted_ap_object.ap_type == "Annouce"
deleted_ap_object.ap_type == "Announce"
and deleted_ap_object.activity_object_ap_id
):
related_object = await get_outbox_object_by_ap_id(
@ -1138,12 +1403,15 @@ async def _revert_side_effect_for_deleted_object(
)
if related_object:
if related_object.is_from_outbox:
announces_count = await _get_outbox_announces_count(
db_session, related_object
)
await db_session.execute(
update(models.OutboxObject)
.where(
models.OutboxObject.id == related_object.id,
)
.values(announces_count=models.OutboxObject.announces_count - 1)
.values(announces_count=announces_count - 1)
)
# Delete any Like/Announce
@ -1158,7 +1426,8 @@ async def _revert_side_effect_for_deleted_object(
# If it's a local replies, it was forwarded, so we also need to forward
# the Delete activity if possible
if (
delete_activity.activity_object_ap_id == deleted_ap_object.ap_id
delete_activity
and delete_activity.activity_object_ap_id == deleted_ap_object.ap_id
and delete_activity.has_ld_signature
and is_delete_needs_to_be_forwarded
):
@ -1271,11 +1540,12 @@ async def _send_accept(
raise ValueError("Should never happen")
await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id)
notif = models.Notification(
notification_type=models.NotificationType.NEW_FOLLOWER,
actor_id=from_actor.id,
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.NEW_FOLLOWER):
notif = models.Notification(
notification_type=models.NotificationType.NEW_FOLLOWER,
actor_id=from_actor.id,
)
db_session.add(notif)
async def send_reject(
@ -1314,11 +1584,12 @@ async def _send_reject(
raise ValueError("Should never happen")
await new_outgoing_activity(db_session, from_actor.inbox_url, outbox_activity.id)
notif = models.Notification(
notification_type=models.NotificationType.REJECTED_FOLLOWER,
actor_id=from_actor.id,
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.REJECTED_FOLLOWER):
notif = models.Notification(
notification_type=models.NotificationType.REJECTED_FOLLOWER,
actor_id=from_actor.id,
)
db_session.add(notif)
async def _handle_undo_activity(
@ -1344,11 +1615,12 @@ async def _handle_undo_activity(
models.Follower.inbox_object_id == ap_activity_to_undo.id
)
)
notif = models.Notification(
notification_type=models.NotificationType.UNFOLLOW,
actor_id=from_actor.id,
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.UNFOLLOW):
notif = models.Notification(
notification_type=models.NotificationType.UNFOLLOW,
actor_id=from_actor.id,
)
db_session.add(notif)
elif ap_activity_to_undo.ap_type == "Like":
if not ap_activity_to_undo.activity_object_ap_id:
@ -1364,14 +1636,21 @@ async def _handle_undo_activity(
)
return
liked_obj.likes_count = models.OutboxObject.likes_count - 1
notif = models.Notification(
notification_type=models.NotificationType.UNDO_LIKE,
actor_id=from_actor.id,
outbox_object_id=liked_obj.id,
inbox_object_id=ap_activity_to_undo.id,
liked_obj.likes_count = (
await _get_outbox_likes_count(
db_session,
liked_obj,
)
- 1
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.UNDO_LIKE):
notif = models.Notification(
notification_type=models.NotificationType.UNDO_LIKE,
actor_id=from_actor.id,
outbox_object_id=liked_obj.id,
inbox_object_id=ap_activity_to_undo.id,
)
db_session.add(notif)
elif ap_activity_to_undo.ap_type == "Announce":
if not ap_activity_to_undo.activity_object_ap_id:
@ -1389,13 +1668,22 @@ async def _handle_undo_activity(
announced_obj_from_outbox.announces_count = (
models.OutboxObject.announces_count - 1
)
notif = models.Notification(
notification_type=models.NotificationType.UNDO_ANNOUNCE,
actor_id=from_actor.id,
outbox_object_id=announced_obj_from_outbox.id,
inbox_object_id=ap_activity_to_undo.id,
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.UNDO_ANNOUNCE):
notif = models.Notification(
notification_type=models.NotificationType.UNDO_ANNOUNCE,
actor_id=from_actor.id,
outbox_object_id=announced_obj_from_outbox.id,
inbox_object_id=ap_activity_to_undo.id,
)
db_session.add(notif)
elif ap_activity_to_undo.ap_type == "Block":
if is_notification_enabled(models.NotificationType.UNBLOCKED):
notif = models.Notification(
notification_type=models.NotificationType.UNBLOCKED,
actor_id=from_actor.id,
inbox_object_id=ap_activity_to_undo.id,
)
db_session.add(notif)
else:
logger.warning(f"Don't know how to undo {ap_activity_to_undo.ap_type} activity")
@ -1459,12 +1747,13 @@ async def _handle_move_activity(
else:
logger.info(f"Already following target {new_actor_id}")
notif = models.Notification(
notification_type=models.NotificationType.MOVE,
actor_id=new_actor.id,
inbox_object_id=move_activity.id,
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.MOVE):
notif = models.Notification(
notification_type=models.NotificationType.MOVE,
actor_id=new_actor.id,
inbox_object_id=move_activity.id,
)
db_session.add(notif)
async def _handle_update_activity(
@ -1490,7 +1779,7 @@ async def _handle_update_activity(
)
# Update the actor
from_actor.ap_actor = updated_actor.ap_actor
await update_actor_if_needed(db_session, from_actor, updated_actor)
elif (ap_type := wrapped_object["type"]) in [
"Question",
"Note",
@ -1513,6 +1802,7 @@ async def _handle_update_activity(
# Everything looks correct, update the object in the inbox
logger.info(f"Updating {existing_object.ap_id}")
existing_object.ap_object = wrapped_object
existing_object.updated_at = now()
else:
# TODO(ts): support updating objects
logger.info(f'Cannot update {wrapped_object["type"]}')
@ -1621,16 +1911,26 @@ async def _process_note_object(
is_from_following = ro.actor.ap_id in {f.ap_actor_id for f in following}
is_reply = bool(ro.in_reply_to)
is_local_reply = (
ro.in_reply_to
and ro.in_reply_to.startswith(BASE_URL)
and ro.content # Hide votes from Question
)
is_local_reply = ro.is_local_reply
is_mention = False
hashtags = []
tags = ro.ap_object.get("tag", [])
for tag in ap.as_list(tags):
if tag.get("name") == LOCAL_ACTOR.handle or tag.get("href") == LOCAL_ACTOR.url:
is_mention = True
if tag.get("type") == "Hashtag":
if tag_name := tag.get("name"):
hashtags.append(tag_name)
object_info = ObjectInfo(
is_reply=is_reply,
is_local_reply=is_local_reply,
is_mention=is_mention,
is_from_following=is_from_following,
hashtags=hashtags,
actor_handle=ro.actor.handle,
remote_object=ro,
)
inbox_object = models.InboxObject(
server=urlparse(ro.ap_id).hostname,
@ -1648,9 +1948,7 @@ async def _process_note_object(
activity_object_ap_id=ro.activity_object_ap_id,
og_meta=await opengraph.og_meta_from_note(db_session, ro),
# Hide replies from the stream
is_hidden_from_stream=not (
(not is_reply and is_from_following) or is_mention or is_local_reply
),
is_hidden_from_stream=not stream_visibility_callback(object_info),
# We may already have some replies in DB
replies_count=await _get_replies_count(db_session, ro.ap_id),
)
@ -1676,8 +1974,8 @@ async def _process_note_object(
replied_object, # type: ignore # outbox check below
)
else:
new_replies_count = await _get_replies_count(
db_session, replied_object.ap_id
new_replies_count = await _get_outbox_replies_count(
db_session, replied_object # type: ignore
)
await db_session.execute(
@ -1725,7 +2023,7 @@ async def _process_note_object(
inbox_object_id=parent_activity.id,
)
if is_mention:
if is_mention and is_notification_enabled(models.NotificationType.MENTION):
notif = models.Notification(
notification_type=models.NotificationType.MENTION,
actor_id=from_actor.id,
@ -1824,13 +2122,14 @@ async def _handle_announce_activity(
models.OutboxObject.announces_count + 1
)
notif = models.Notification(
notification_type=models.NotificationType.ANNOUNCE,
actor_id=actor.id,
outbox_object_id=relates_to_outbox_object.id,
inbox_object_id=announce_activity.id,
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.ANNOUNCE):
notif = models.Notification(
notification_type=models.NotificationType.ANNOUNCE,
actor_id=actor.id,
outbox_object_id=relates_to_outbox_object.id,
inbox_object_id=announce_activity.id,
)
db_session.add(notif)
else:
# Only show the announce in the stream if it comes from an actor
# in the following collection
@ -1906,7 +2205,10 @@ async def _handle_announce_activity(
db_session.add(announced_inbox_object)
await db_session.flush()
announce_activity.relates_to_inbox_object_id = announced_inbox_object.id
announce_activity.is_hidden_from_stream = not is_from_following
announce_activity.is_hidden_from_stream = (
not is_from_following
or announce_activity.actor.are_announces_hidden_from_stream
)
async def _handle_like_activity(
@ -1923,13 +2225,40 @@ async def _handle_like_activity(
)
await db_session.delete(like_activity)
else:
relates_to_outbox_object.likes_count = models.OutboxObject.likes_count + 1
relates_to_outbox_object.likes_count = await _get_outbox_likes_count(
db_session,
relates_to_outbox_object,
)
if is_notification_enabled(models.NotificationType.LIKE):
notif = models.Notification(
notification_type=models.NotificationType.LIKE,
actor_id=actor.id,
outbox_object_id=relates_to_outbox_object.id,
inbox_object_id=like_activity.id,
)
db_session.add(notif)
async def _handle_block_activity(
db_session: AsyncSession,
actor: models.Actor,
block_activity: models.InboxObject,
):
if block_activity.activity_object_ap_id != LOCAL_ACTOR.ap_id:
logger.warning(
"Received invalid Block activity "
f"{block_activity.activity_object_ap_id=}"
)
await db_session.delete(block_activity)
return
# Create a notification
if is_notification_enabled(models.NotificationType.BLOCKED):
notif = models.Notification(
notification_type=models.NotificationType.LIKE,
notification_type=models.NotificationType.BLOCKED,
actor_id=actor.id,
outbox_object_id=relates_to_outbox_object.id,
inbox_object_id=like_activity.id,
inbox_object_id=block_activity.id,
)
db_session.add(notif)
@ -1986,7 +2315,7 @@ async def save_to_inbox(
logger.exception("Failed to fetch actor")
return
if actor.server in BLOCKED_SERVERS:
if is_hostname_blocked(actor.server):
logger.warning(f"Server {actor.server} is blocked")
return
@ -1994,8 +2323,10 @@ async def save_to_inbox(
await _process_transient_object(db_session, raw_object, actor)
return None
if actor.is_blocked:
logger.warning("Actor {actor.ap_id} is blocked, ignoring object")
# If we just blocked an actor, we want to process any undo sent as side
# effects
if actor.is_blocked and ap.as_list(raw_object["type"])[0] != "Undo":
logger.warning(f"Actor {actor.ap_id} is blocked, ignoring object")
return None
raw_object_id = ap.get_id(raw_object)
@ -2132,12 +2463,13 @@ async def save_to_inbox(
if activity_ro.ap_type == "Accept"
else models.NotificationType.FOLLOW_REQUEST_REJECTED
)
notif = models.Notification(
notification_type=notif_type,
actor_id=actor.id,
inbox_object_id=inbox_object.id,
)
db_session.add(notif)
if is_notification_enabled(notif_type):
notif = models.Notification(
notification_type=notif_type,
actor_id=actor.id,
inbox_object_id=inbox_object.id,
)
db_session.add(notif)
if activity_ro.ap_type == "Accept":
following = models.Following(
@ -2198,6 +2530,12 @@ async def save_to_inbox(
elif activity_ro.ap_type == "View":
# View is used by Peertube, there's nothing useful we can do with it
await db_session.delete(inbox_object)
elif activity_ro.ap_type == "Block":
await _handle_block_activity(
db_session,
actor,
inbox_object,
)
else:
logger.warning(f"Received an unknown {inbox_object.ap_type} object")
@ -2304,11 +2642,21 @@ async def fetch_actor_collection(db_session: AsyncSession, url: str) -> list[Act
@dataclass
class ReplyTreeNode:
ap_object: AnyboxObject
ap_object: AnyboxObject | None
wm_reply: WebmentionReply | None
children: list["ReplyTreeNode"]
is_requested: bool = False
is_root: bool = False
@property
def published_at(self) -> datetime.datetime:
if self.ap_object:
return self.ap_object.ap_published_at # type: ignore
elif self.wm_reply:
return self.wm_reply.published_at
else:
raise ValueError(f"Should never happen: {self}")
async def get_replies_tree(
db_session: AsyncSession,
@ -2331,7 +2679,9 @@ async def get_replies_tree(
.where(
models.InboxObject.conversation
== requested_object.conversation,
models.InboxObject.ap_type.in_(["Note", "Page", "Article"]),
models.InboxObject.ap_type.in_(
["Note", "Page", "Article", "Question"]
),
models.InboxObject.is_deleted.is_(False),
models.InboxObject.visibility.in_(allowed_visibility),
)
@ -2349,7 +2699,9 @@ async def get_replies_tree(
models.OutboxObject.conversation
== requested_object.conversation,
models.OutboxObject.is_deleted.is_(False),
models.OutboxObject.ap_type.in_(["Note", "Page", "Article"]),
models.OutboxObject.ap_type.in_(
["Note", "Page", "Article", "Question"]
),
models.OutboxObject.visibility.in_(allowed_visibility),
)
.options(
@ -2378,6 +2730,7 @@ async def get_replies_tree(
for child in index.get(node.ap_object.ap_id, []): # type: ignore
child_node = ReplyTreeNode(
ap_object=child,
wm_reply=None,
is_requested=child.ap_id == requested_object.ap_id, # type: ignore
children=[],
)
@ -2386,7 +2739,7 @@ async def get_replies_tree(
return sorted(
children,
key=lambda node: node.ap_object.ap_published_at, # type: ignore
key=lambda node: node.published_at,
)
if None in nodes_by_in_reply_to:
@ -2399,6 +2752,7 @@ async def get_replies_tree(
root_node = ReplyTreeNode(
ap_object=root_ap_object,
wm_reply=None,
is_root=True,
is_requested=root_ap_object.ap_id == requested_object.ap_id,
children=[],

View File

@ -1,4 +1,5 @@
import hashlib
import hmac
import os
import secrets
from pathlib import Path
@ -12,8 +13,11 @@ from fastapi import HTTPException
from fastapi import Request
from itsdangerous import URLSafeTimedSerializer
from loguru import logger
from markdown import markdown
from mistletoe import markdown # type: ignore
from app.customization import _CUSTOM_ROUTES
from app.customization import _StreamVisibilityCallback
from app.customization import default_stream_visibility_callback
from app.utils.emoji import _load_emojis
from app.utils.version import get_version_commit
@ -40,11 +44,14 @@ except FileNotFoundError:
JS_HASH = "none"
try:
# To keep things simple, we keep a single hash for the 2 files
js_data_common = (ROOT_DIR / "app" / "static" / "common-admin.js").read_bytes()
js_data_new = (ROOT_DIR / "app" / "static" / "new.js").read_bytes()
JS_HASH = hashlib.md5(
js_data_common + js_data_new, usedforsecurity=False
).hexdigest()
dat = b""
for j in [
ROOT_DIR / "app" / "static" / "common.js",
ROOT_DIR / "app" / "static" / "common-admin.js",
ROOT_DIR / "app" / "static" / "new.js",
]:
dat += j.read_bytes()
JS_HASH = hashlib.md5(dat, usedforsecurity=False).hexdigest()
except FileNotFoundError:
pass
@ -89,7 +96,8 @@ class Config(pydantic.BaseModel):
name: str
summary: str
https: bool
icon_url: str
icon_url: str | None = None
image_url: str | None = None
secret: str
debug: bool = False
trusted_hosts: list[str] = ["127.0.0.1"]
@ -107,10 +115,22 @@ class Config(pydantic.BaseModel):
inbox_retention_days: int = 15
custom_content_security_policy: str | None = None
webfinger_domain: str | None = None
# Config items to make tests easier
sqlalchemy_database: str | None = None
key_path: str | None = None
session_timeout: int = 3600 * 24 * 3 # in seconds, 3 days by default
csrf_token_exp: int = 3600
disabled_notifications: list[str] = []
# Only set when the app is served on a non-root path
id: str | None = None
def load_config() -> Config:
try:
@ -145,7 +165,16 @@ CONFIG = load_config()
DOMAIN = CONFIG.domain
_SCHEME = "https" if CONFIG.https else "http"
ID = f"{_SCHEME}://{DOMAIN}"
# When running the app on a path, the ID maybe set by the config, but in this
# case, a valid webfinger must be served on the root domain
if CONFIG.id:
ID = CONFIG.id
USERNAME = CONFIG.username
# Allow to use @handle@webfinger-domain.tld while hosting the server at domain.tld
WEBFINGER_DOMAIN = CONFIG.webfinger_domain or DOMAIN
MANUALLY_APPROVES_FOLLOWERS = CONFIG.manually_approves_followers
HIDES_FOLLOWERS = CONFIG.hides_followers
HIDES_FOLLOWING = CONFIG.hides_following
@ -155,12 +184,12 @@ if CONFIG.privacy_replace:
BLOCKED_SERVERS = {blocked_server.hostname for blocked_server in CONFIG.blocked_servers}
ALSO_KNOWN_AS = CONFIG.also_known_as
CUSTOM_CONTENT_SECURITY_POLICY = CONFIG.custom_content_security_policy
INBOX_RETENTION_DAYS = CONFIG.inbox_retention_days
SESSION_TIMEOUT = CONFIG.session_timeout
CUSTOM_FOOTER = (
markdown(
CONFIG.custom_footer.replace("{version}", VERSION), extensions=["mdx_linkify"]
)
markdown(CONFIG.custom_footer.replace("{version}", VERSION))
if CONFIG.custom_footer
else None
)
@ -177,7 +206,9 @@ if CONFIG.emoji:
EMOJIS = CONFIG.emoji
# Emoji template for the FE
EMOJI_TPL = '<img src="/static/twemoji/{filename}.svg" alt="{raw}" class="emoji">'
EMOJI_TPL = (
'<img src="{base_url}/static/twemoji/{filename}.svg" alt="{raw}" class="emoji">'
)
_load_emojis(ROOT_DIR, BASE_URL)
@ -186,6 +217,31 @@ CODE_HIGHLIGHTING_THEME = CONFIG.code_highlighting_theme
MOVED_TO = _get_moved_to()
_NavBarItem = tuple[str, str]
class NavBarItems:
EXTRA_NAVBAR_ITEMS: list[_NavBarItem] = []
INDEX_NAVBAR_ITEM: _NavBarItem | None = None
NOTES_PATH = "/"
def load_custom_routes() -> None:
try:
from data import custom_routes # type: ignore # noqa: F401
except ImportError:
pass
for path, custom_handler in _CUSTOM_ROUTES.items():
# If a handler wants to replace the root, move the index to /notes
if path == "/":
NavBarItems.NOTES_PATH = "/notes"
NavBarItems.INDEX_NAVBAR_ITEM = (path, custom_handler.title)
else:
if custom_handler.show_in_navbar:
NavBarItems.EXTRA_NAVBAR_ITEMS.append((path, custom_handler.title))
session_serializer = URLSafeTimedSerializer(
CONFIG.secret,
salt=f"{ID}.session",
@ -208,7 +264,7 @@ def verify_csrf_token(
if redirect_url:
please_try_again = f'<a href="{redirect_url}">please try again</a>'
try:
csrf_serializer.loads(csrf_token, max_age=1800)
csrf_serializer.loads(csrf_token, max_age=CONFIG.csrf_token_exp)
except (itsdangerous.BadData, itsdangerous.SignatureExpired):
logger.exception("Failed to verify CSRF token")
raise HTTPException(
@ -216,3 +272,18 @@ def verify_csrf_token(
detail=f"The security token has expired, {please_try_again}",
)
return None
def hmac_sha256() -> hmac.HMAC:
return hmac.new(CONFIG.secret.encode(), digestmod=hashlib.sha256)
stream_visibility_callback: _StreamVisibilityCallback
try:
from data.stream import ( # type: ignore # noqa: F401, E501
custom_stream_visibility_callback,
)
stream_visibility_callback = custom_stream_visibility_callback
except ImportError:
stream_visibility_callback = default_stream_visibility_callback

155
app/customization.py Normal file
View File

@ -0,0 +1,155 @@
from dataclasses import dataclass
from pathlib import Path
from typing import TYPE_CHECKING
from typing import Any
from typing import Callable
from fastapi import APIRouter
from fastapi import Depends
from fastapi import Request
from loguru import logger
from starlette.responses import JSONResponse
if TYPE_CHECKING:
from app.ap_object import RemoteObject
_DATA_DIR = Path().parent.resolve() / "data"
_Handler = Callable[..., Any]
class HTMLPage:
def __init__(
self,
title: str,
html_file: str,
show_in_navbar: bool,
) -> None:
self.title = title
self.html_file = _DATA_DIR / html_file
self.show_in_navbar = show_in_navbar
class RawHandler:
def __init__(
self,
title: str,
handler: Any,
show_in_navbar: bool,
) -> None:
self.title = title
self.handler = handler
self.show_in_navbar = show_in_navbar
_CUSTOM_ROUTES: dict[str, HTMLPage | RawHandler] = {}
def register_html_page(
path: str,
*,
title: str,
html_file: str,
show_in_navbar: bool = True,
) -> None:
if path in _CUSTOM_ROUTES:
raise ValueError(f"{path} is already registered")
_CUSTOM_ROUTES[path] = HTMLPage(title, html_file, show_in_navbar)
def register_raw_handler(
path: str,
*,
title: str,
handler: _Handler,
show_in_navbar: bool = True,
) -> None:
if path in _CUSTOM_ROUTES:
raise ValueError(f"{path} is already registered")
_CUSTOM_ROUTES[path] = RawHandler(title, handler, show_in_navbar)
class ActivityPubResponse(JSONResponse):
media_type = "application/activity+json"
def _custom_page_handler(path: str, html_page: HTMLPage) -> Any:
from app import templates
from app.actor import LOCAL_ACTOR
from app.config import is_activitypub_requested
from app.database import AsyncSession
from app.database import get_db_session
async def _handler(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
) -> templates.TemplateResponse | ActivityPubResponse:
if path == "/" and is_activitypub_requested(request):
return ActivityPubResponse(LOCAL_ACTOR.ap_actor)
return await templates.render_template(
db_session,
request,
"custom_page.html",
{
"page_content": html_page.html_file.read_text(),
"title": html_page.title,
},
)
return _handler
def get_custom_router() -> APIRouter | None:
if not _CUSTOM_ROUTES:
return None
router = APIRouter()
for path, handler in _CUSTOM_ROUTES.items():
if isinstance(handler, HTMLPage):
router.add_api_route(
path, _custom_page_handler(path, handler), methods=["GET"]
)
else:
router.add_api_route(path, handler.handler)
return router
@dataclass
class ObjectInfo:
# Is it a reply?
is_reply: bool
# Is it a reply to an outbox object
is_local_reply: bool
# Is the object mentioning the local actor
is_mention: bool
# Is it from someone the local actor is following
is_from_following: bool
# List of hashtags, e.g. #microblogpub
hashtags: list[str]
# @dev@microblog.pub
actor_handle: str
remote_object: "RemoteObject"
_StreamVisibilityCallback = Callable[[ObjectInfo], bool]
def default_stream_visibility_callback(object_info: ObjectInfo) -> bool:
result = (
(not object_info.is_reply and object_info.is_from_following)
or object_info.is_mention
or object_info.is_local_reply
)
logger.info(f"{object_info=}/{result=}")
return result

View File

@ -1,5 +1,6 @@
import base64
import hashlib
import json
import typing
from dataclasses import dataclass
from datetime import datetime
@ -22,12 +23,12 @@ from sqlalchemy import select
from app import activitypub as ap
from app import config
from app.config import BLOCKED_SERVERS
from app.config import KEY_PATH
from app.database import AsyncSession
from app.database import get_db_session
from app.key import Key
from app.utils.datetime import now
from app.utils.url import is_hostname_blocked
_KEY_CACHE: MutableMapping[str, Key] = LFUCache(256)
@ -88,8 +89,12 @@ def _body_digest(body: bytes) -> str:
return "SHA-256=" + base64.b64encode(h.digest()).decode("utf-8")
async def _get_public_key(db_session: AsyncSession, key_id: str) -> Key:
if cached_key := _KEY_CACHE.get(key_id):
async def _get_public_key(
db_session: AsyncSession,
key_id: str,
should_skip_cache: bool = False,
) -> Key:
if not should_skip_cache and (cached_key := _KEY_CACHE.get(key_id)):
logger.info(f"Key {key_id} found in cache")
return cached_key
@ -101,15 +106,18 @@ async def _get_public_key(db_session: AsyncSession, key_id: str) -> Key:
select(models.Actor).where(models.Actor.ap_id == key_id.split("#")[0])
)
).one_or_none()
if existing_actor and existing_actor.public_key_id == key_id:
k = Key(existing_actor.ap_id, key_id)
k.load_pub(existing_actor.public_key_as_pem)
logger.info(f"Found {key_id} on an existing actor")
_KEY_CACHE[key_id] = k
return k
if not should_skip_cache:
if existing_actor and existing_actor.public_key_id == key_id:
k = Key(existing_actor.ap_id, key_id)
k.load_pub(existing_actor.public_key_as_pem)
logger.info(f"Found {key_id} on an existing actor")
_KEY_CACHE[key_id] = k
return k
# Fetch it
from app import activitypub as ap
from app.actor import RemoteActor
from app.actor import update_actor_if_needed
# Without signing the request as if it's the first contact, the 2 servers
# might race to fetch each other key
@ -133,6 +141,12 @@ async def _get_public_key(db_session: AsyncSession, key_id: str) -> Key:
f"failed to fetch requested key {key_id}: got {actor['publicKey']}"
)
if should_skip_cache and actor["type"] != "Key" and existing_actor:
# We had to skip the cache, which means the actor key probably changed
# and we want to update our cached version
await update_actor_if_needed(db_session, existing_actor, RemoteActor(actor))
await db_session.commit()
_KEY_CACHE[key_id] = k
return k
@ -170,7 +184,7 @@ async def httpsig_checker(
)
server = urlparse(key_id).hostname
if server in BLOCKED_SERVERS:
if is_hostname_blocked(server):
return HTTPSigInfo(
has_valid_signature=False,
server=server,
@ -185,6 +199,32 @@ async def httpsig_checker(
server=server,
)
# Try to drop Delete activity spams early on, this prevent making an extra
# HTTP requests trying to fetch an unavailable actor to verify the HTTP sig
try:
if request.method == "POST" and request.url.path.endswith("/inbox"):
from app import models # TODO: solve this circular import
activity = json.loads(body)
actor_id = ap.get_id(activity["actor"])
if (
ap.as_list(activity["type"])[0] == "Delete"
and actor_id == ap.get_id(activity["object"])
and not (
await db_session.scalars(
select(models.Actor).where(
models.Actor.ap_id == actor_id,
)
)
).one_or_none()
):
logger.info(f"Dropping Delete activity early for {body=}")
raise fastapi.HTTPException(status_code=202)
except fastapi.HTTPException as http_exc:
raise http_exc
except Exception:
logger.exception("Failed to check for Delete spam")
# logger.debug(f"hsig={hsig}")
signed_string, signature_date = _build_signed_string(
hsig["headers"],
@ -216,7 +256,17 @@ async def httpsig_checker(
has_valid_signature = _verify_h(
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
)
# FIXME: fetch/update the user if the signature is wrong
# If the signature is not valid, we may have to update the cached actor
if not has_valid_signature:
logger.info("Invalid signature, trying to refresh actor")
try:
k = await _get_public_key(db_session, hsig["keyId"], should_skip_cache=True)
has_valid_signature = _verify_h(
signed_string, base64.b64decode(hsig["signature"]), k.pubkey
)
except Exception:
logger.exception("Failed to refresh actor")
httpsig_info = HTTPSigInfo(
has_valid_signature=has_valid_signature,

View File

@ -3,7 +3,6 @@ import traceback
from datetime import datetime
from datetime import timedelta
import httpx
from loguru import logger
from sqlalchemy import func
from sqlalchemy import select
@ -61,7 +60,7 @@ def _set_next_try(
if not outgoing_activity.tries:
raise ValueError("Should never happen")
if outgoing_activity.tries == _MAX_RETRIES:
if outgoing_activity.tries >= _MAX_RETRIES:
outgoing_activity.is_errored = True
outgoing_activity.next_try = None
else:
@ -108,6 +107,7 @@ async def process_next_incoming_activity(
next_activity.tries = next_activity.tries + 1
next_activity.last_try = now()
await db_session.commit()
if next_activity.ap_object and next_activity.sent_by_ap_actor_id:
try:
@ -120,13 +120,16 @@ async def process_next_incoming_activity(
),
timeout=60,
)
except httpx.TimeoutException as exc:
url = exc._request.url if exc._request else None
logger.error(f"Failed, HTTP timeout when fetching {url}")
except asyncio.exceptions.TimeoutError:
logger.error("Activity took too long to process")
await db_session.rollback()
await db_session.refresh(next_activity)
next_activity.error = traceback.format_exc()
_set_next_try(next_activity)
except Exception:
logger.exception("Failed")
await db_session.rollback()
await db_session.refresh(next_activity)
next_activity.error = traceback.format_exc()
_set_next_try(next_activity)
else:

View File

@ -10,9 +10,12 @@ from fastapi import Form
from fastapi import HTTPException
from fastapi import Request
from fastapi.responses import JSONResponse
from fastapi.responses import RedirectResponse
from fastapi.security import HTTPBasic
from fastapi.security import HTTPBasicCredentials
from loguru import logger
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.orm import joinedload
from app import config
from app import models
@ -21,9 +24,12 @@ from app.admin import user_session_or_redirect
from app.config import verify_csrf_token
from app.database import AsyncSession
from app.database import get_db_session
from app.redirect import redirect
from app.utils import indieauth
from app.utils.datetime import now
basic_auth = HTTPBasic()
router = APIRouter()
@ -38,9 +44,55 @@ async def well_known_authorization_server(
"code_challenge_methods_supported": ["S256"],
"revocation_endpoint": request.url_for("indieauth_revocation_endpoint"),
"revocation_endpoint_auth_methods_supported": ["none"],
"registration_endpoint": request.url_for("oauth_registration_endpoint"),
"introspection_endpoint": request.url_for("oauth_introspection_endpoint"),
}
class OAuthRegisterClientRequest(BaseModel):
client_name: str
redirect_uris: list[str] | str
client_uri: str | None = None
logo_uri: str | None = None
scope: str | None = None
@router.post("/oauth/register")
async def oauth_registration_endpoint(
register_client_request: OAuthRegisterClientRequest,
db_session: AsyncSession = Depends(get_db_session),
) -> JSONResponse:
"""Implements OAuth 2.0 Dynamic Registration."""
client = models.OAuthClient(
client_name=register_client_request.client_name,
redirect_uris=[register_client_request.redirect_uris]
if isinstance(register_client_request.redirect_uris, str)
else register_client_request.redirect_uris,
client_uri=register_client_request.client_uri,
logo_uri=register_client_request.logo_uri,
scope=register_client_request.scope,
client_id=secrets.token_hex(16),
client_secret=secrets.token_hex(32),
)
db_session.add(client)
await db_session.commit()
return JSONResponse(
content={
**register_client_request.dict(),
"client_id_issued_at": int(client.created_at.timestamp()), # type: ignore
"grant_types": ["authorization_code", "refresh_token"],
"client_secret_expires_at": 0,
"client_id": client.client_id,
"client_secret": client.client_secret,
},
status_code=201,
)
@router.get("/auth")
async def indieauth_authorization_endpoint(
request: Request,
@ -56,12 +108,29 @@ async def indieauth_authorization_endpoint(
code_challenge = request.query_params.get("code_challenge", "")
code_challenge_method = request.query_params.get("code_challenge_method", "")
# Check if the authorization request is coming from an OAuth client
registered_client = (
await db_session.scalars(
select(models.OAuthClient).where(
models.OAuthClient.client_id == client_id,
)
)
).one_or_none()
if registered_client:
client = {
"name": registered_client.client_name,
"logo": registered_client.logo_uri,
"url": registered_client.client_uri,
}
else:
client = await indieauth.get_client_id_data(client_id) # type: ignore
return await templates.render_template(
db_session,
request,
"indieauth_flow.html",
dict(
client=await indieauth.get_client_id_data(client_id),
client=client,
scopes=scope,
redirect_uri=redirect_uri,
state=state,
@ -80,7 +149,7 @@ async def indieauth_flow(
db_session: AsyncSession = Depends(get_db_session),
csrf_check: None = Depends(verify_csrf_token),
_: None = Depends(user_session_or_redirect),
) -> RedirectResponse:
) -> templates.TemplateResponse:
form_data = await request.form()
logger.info(f"{form_data=}")
@ -114,9 +183,8 @@ async def indieauth_flow(
db_session.add(auth_request)
await db_session.commit()
return RedirectResponse(
redirect_uri + f"?code={code}&state={state}&iss={iss}",
status_code=302,
return await redirect(
request, db_session, redirect_uri + f"?code={code}&state={state}&iss={iss}"
)
@ -207,29 +275,54 @@ async def indieauth_token_endpoint(
form_data = await request.form()
logger.info(f"{form_data=}")
grant_type = form_data.get("grant_type", "authorization_code")
if grant_type != "authorization_code":
if grant_type not in ["authorization_code", "refresh_token"]:
raise ValueError(f"Invalid grant_type {grant_type}")
code = form_data["code"]
# These must match the params from the first request
client_id = form_data["client_id"]
redirect_uri = form_data["redirect_uri"]
# code_verifier is optional for backward compat
code_verifier = form_data.get("code_verifier")
is_code_valid, auth_code_request = await _check_auth_code(
db_session,
code=code,
client_id=client_id,
redirect_uri=redirect_uri,
code_verifier=code_verifier,
)
if not is_code_valid or (auth_code_request and not auth_code_request.scope):
return JSONResponse(
content={"error": "invalid_grant"},
status_code=400,
if grant_type == "authorization_code":
code = form_data["code"]
redirect_uri = form_data["redirect_uri"]
# code_verifier is optional for backward compat
is_code_valid, auth_code_request = await _check_auth_code(
db_session,
code=code,
client_id=client_id,
redirect_uri=redirect_uri,
code_verifier=code_verifier,
)
if not is_code_valid or (auth_code_request and not auth_code_request.scope):
return JSONResponse(
content={"error": "invalid_grant"},
status_code=400,
)
elif grant_type == "refresh_token":
refresh_token = form_data["refresh_token"]
access_token = (
await db_session.scalars(
select(models.IndieAuthAccessToken)
.where(
models.IndieAuthAccessToken.refresh_token == refresh_token,
models.IndieAuthAccessToken.was_refreshed.is_(False),
)
.options(
joinedload(
models.IndieAuthAccessToken.indieauth_authorization_request
)
)
)
).one_or_none()
if not access_token:
raise ValueError("invalid refresh token")
if access_token.indieauth_authorization_request.client_id != client_id:
raise ValueError("invalid client ID")
auth_code_request = access_token.indieauth_authorization_request
access_token.was_refreshed = True
if not auth_code_request:
raise ValueError("Should never happen")
@ -237,6 +330,7 @@ async def indieauth_token_endpoint(
access_token = models.IndieAuthAccessToken(
indieauth_authorization_request_id=auth_code_request.id,
access_token=secrets.token_urlsafe(32),
refresh_token=secrets.token_urlsafe(32),
expires_in=3600,
scope=auth_code_request.scope,
)
@ -246,6 +340,7 @@ async def indieauth_token_endpoint(
return JSONResponse(
content={
"access_token": access_token.access_token,
"refresh_token": access_token.refresh_token,
"token_type": "Bearer",
"scope": auth_code_request.scope,
"me": config.ID + "/",
@ -261,8 +356,10 @@ async def _check_access_token(
) -> tuple[bool, models.IndieAuthAccessToken | None]:
access_token_info = (
await db_session.scalars(
select(models.IndieAuthAccessToken).where(
models.IndieAuthAccessToken.access_token == token
select(models.IndieAuthAccessToken)
.where(models.IndieAuthAccessToken.access_token == token)
.options(
joinedload(models.IndieAuthAccessToken.indieauth_authorization_request)
)
)
).one_or_none()
@ -285,6 +382,9 @@ async def _check_access_token(
@dataclass(frozen=True)
class AccessTokenInfo:
scopes: list[str]
client_id: str | None
access_token: str
exp: int
async def verify_access_token(
@ -311,9 +411,71 @@ async def verify_access_token(
return AccessTokenInfo(
scopes=access_token.scope.split(),
client_id=(
access_token.indieauth_authorization_request.client_id
if access_token.indieauth_authorization_request
else None
),
access_token=access_token.access_token,
exp=int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
)
async def check_access_token(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
) -> AccessTokenInfo | None:
token = request.headers.get("Authorization", "").removeprefix("Bearer ")
if not token:
return None
is_token_valid, access_token = await _check_access_token(db_session, token)
if not is_token_valid:
return None
if not access_token or not access_token.scope:
raise ValueError("Should never happen")
access_token_info = AccessTokenInfo(
scopes=access_token.scope.split(),
client_id=(
access_token.indieauth_authorization_request.client_id
if access_token.indieauth_authorization_request
else None
),
access_token=access_token.access_token,
exp=int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
)
logger.info(
"Authenticated with access token from client_id="
f"{access_token_info.client_id} scopes={access_token.scope}"
)
return access_token_info
async def enforce_access_token(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
) -> AccessTokenInfo:
maybe_access_token_info = await check_access_token(request, db_session)
if not maybe_access_token_info:
raise HTTPException(status_code=401, detail="access token required")
return maybe_access_token_info
@router.post("/revoke_token")
async def indieauth_revocation_endpoint(
request: Request,
@ -333,3 +495,58 @@ async def indieauth_revocation_endpoint(
content={},
status_code=200,
)
@router.post("/token_introspection")
async def oauth_introspection_endpoint(
request: Request,
credentials: HTTPBasicCredentials = Depends(basic_auth),
db_session: AsyncSession = Depends(get_db_session),
token: str = Form(),
) -> JSONResponse:
registered_client = (
await db_session.scalars(
select(models.OAuthClient).where(
models.OAuthClient.client_id == credentials.username,
models.OAuthClient.client_secret == credentials.password,
)
)
).one_or_none()
if not registered_client:
raise HTTPException(status_code=401, detail="unauthenticated")
access_token = (
await db_session.scalars(
select(models.IndieAuthAccessToken)
.where(models.IndieAuthAccessToken.access_token == token)
.join(
models.IndieAuthAuthorizationRequest,
models.IndieAuthAccessToken.indieauth_authorization_request_id
== models.IndieAuthAuthorizationRequest.id,
)
.where(
models.IndieAuthAuthorizationRequest.client_id == credentials.username
)
)
).one_or_none()
if not access_token:
return JSONResponse(content={"active": False})
is_token_valid, _ = await _check_access_token(db_session, token)
if not is_token_valid:
return JSONResponse(content={"active": False})
return JSONResponse(
content={
"active": True,
"client_id": credentials.username,
"scope": access_token.scope,
"exp": int(
(
access_token.created_at.replace(tzinfo=timezone.utc)
+ timedelta(seconds=access_token.expires_in)
).timestamp()
),
},
status_code=200,
)

View File

@ -23,6 +23,13 @@ requests_loader = pyld.documentloader.requests.requests_document_loader()
def _loader(url, options={}):
# See https://github.com/digitalbazaar/pyld/issues/133
options["headers"]["Accept"] = "application/ld+json"
# XXX: temp fix/hack is it seems to be down for now
if url == "https://w3id.org/identity/v1":
url = (
"https://raw.githubusercontent.com/web-payments/web-payments.org"
"/master/contexts/identity-v1.jsonld"
)
return requests_loader(url, options)
@ -34,7 +41,7 @@ def _options_hash(doc: ap.RawObject) -> str:
for k in ["type", "id", "signatureValue"]:
if k in doc:
del doc[k]
doc["@context"] = "https://w3id.org/identity/v1"
doc["@context"] = "https://w3id.org/security/v1"
normalized = jsonld.normalize(
doc, {"algorithm": "URDNA2015", "format": "application/nquads"}
)

View File

@ -48,6 +48,7 @@ from app import boxes
from app import config
from app import httpsig
from app import indieauth
from app import media
from app import micropub
from app import models
from app import templates
@ -61,8 +62,10 @@ from app.config import DOMAIN
from app.config import ID
from app.config import USER_AGENT
from app.config import USERNAME
from app.config import WEBFINGER_DOMAIN
from app.config import is_activitypub_requested
from app.config import verify_csrf_token
from app.customization import get_custom_router
from app.database import AsyncSession
from app.database import async_session
from app.database import get_db_session
@ -71,12 +74,15 @@ from app.templates import is_current_user_admin
from app.uploads import UPLOAD_DIR
from app.utils import pagination
from app.utils.emoji import EMOJIS_BY_NAME
from app.utils.facepile import Face
from app.utils.facepile import WebmentionReply
from app.utils.facepile import merge_faces
from app.utils.highlight import HIGHLIGHT_CSS_HASH
from app.utils.url import check_url
from app.webfinger import get_remote_follow_template
# Only images <1MB will be cached, so 64MB of data will be cached
_RESIZED_CACHE: MutableMapping[tuple[str, int], tuple[bytes, str, Any]] = LFUCache(64)
# Only images <1MB will be cached, so 32MB of data will be cached
_RESIZED_CACHE: MutableMapping[tuple[str, int], tuple[bytes, str, Any]] = LFUCache(32)
# TODO(ts):
@ -135,9 +141,15 @@ class CustomMiddleware:
headers["x-frame-options"] = "DENY"
headers["permissions-policy"] = "interest-cohort=()"
headers["content-security-policy"] = (
f"default-src 'self'; "
f"style-src 'self' 'sha256-{HIGHLIGHT_CSS_HASH}'; "
f"frame-ancestors 'none'; base-uri 'self'; form-action 'self';"
(
f"default-src 'self'; "
f"style-src 'self' 'sha256-{HIGHLIGHT_CSS_HASH}'; "
f"frame-ancestors 'none'; base-uri 'self'; form-action 'self';"
)
if not config.CUSTOM_CONTENT_SECURITY_POLICY
else config.CUSTOM_CONTENT_SECURITY_POLICY.format(
HIGHLIGHT_CSS_HASH=HIGHLIGHT_CSS_HASH
)
)
if not DEBUG:
headers["strict-transport-security"] = "max-age=63072000;"
@ -192,6 +204,9 @@ app.include_router(admin.unauthenticated_router, prefix="/admin")
app.include_router(indieauth.router)
app.include_router(micropub.router)
app.include_router(webmentions.router)
config.load_custom_routes()
if custom_router := get_custom_router():
app.include_router(custom_router)
# XXX: order matters, the proxy middleware needs to be last
app.add_middleware(CustomMiddleware)
@ -243,11 +258,34 @@ class ActivityPubResponse(JSONResponse):
media_type = "application/activity+json"
@app.get("/")
async def redirect_to_remote_instance(
request: Request,
db_session: AsyncSession,
url: str,
) -> templates.TemplateResponse:
"""
Similar to RedirectResponse, but uses a 200 response with HTML.
Needed for remote redirects on form submission endpoints,
since our CSP policy disallows remote form submission.
https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984
"""
return await templates.render_template(
db_session,
request,
"redirect_to_remote_instance.html",
{
"request": request,
"url": url,
},
headers={"Refresh": "0;url=" + url},
)
@app.get(config.NavBarItems.NOTES_PATH)
async def index(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
page: int | None = None,
) -> templates.TemplateResponse | ActivityPubResponse:
if is_activitypub_requested(request):
@ -259,7 +297,7 @@ async def index(
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.is_deleted.is_(False),
models.OutboxObject.is_hidden_from_homepage.is_(False),
models.OutboxObject.ap_type != "Article",
models.OutboxObject.ap_type.in_(["Announce", "Note", "Video", "Question"]),
)
q = select(models.OutboxObject).where(*where)
total_count = await db_session.scalar(
@ -427,7 +465,12 @@ async def followers(
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse:
if is_activitypub_requested(request):
if config.HIDES_FOLLOWERS:
maybe_access_token_info = await indieauth.check_access_token(
request,
db_session,
)
if config.HIDES_FOLLOWERS and not maybe_access_token_info:
return ActivityPubResponse(
await _empty_followx_collection(
db_session=db_session,
@ -486,7 +529,12 @@ async def following(
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse:
if is_activitypub_requested(request):
if config.HIDES_FOLLOWING:
maybe_access_token_info = await indieauth.check_access_token(
request,
db_session,
)
if config.HIDES_FOLLOWING and not maybe_access_token_info:
return ActivityPubResponse(
await _empty_followx_collection(
db_session=db_session,
@ -542,22 +590,34 @@ async def following(
@app.get("/outbox")
async def outbox(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse:
maybe_access_token_info = await indieauth.check_access_token(
request,
db_session,
)
# Default restrictions unless the request is authenticated with an access token
restricted_where = [
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.ap_type.in_(["Create", "Note", "Article", "Announce"]),
]
# By design, we only show the last 20 public activities in the oubox
outbox_objects = (
await db_session.scalars(
select(models.OutboxObject)
.where(
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.is_deleted.is_(False),
models.OutboxObject.ap_type.in_(["Create", "Announce"]),
*([] if maybe_access_token_info else restricted_where),
)
.order_by(models.OutboxObject.ap_published_at.desc())
.limit(20)
)
).all()
return ActivityPubResponse(
{
"@context": ap.AS_EXTENDED_CTX,
@ -572,6 +632,49 @@ async def outbox(
)
@app.post("/outbox")
async def post_outbox(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
access_token_info: indieauth.AccessTokenInfo = Depends(
indieauth.enforce_access_token
),
) -> ActivityPubResponse:
payload = await request.json()
logger.info(f"{payload=}")
if payload.get("type") == "Create":
assert payload["actor"] == ID
obj = payload["object"]
to_and_cc = obj.get("to", []) + obj.get("cc", [])
if ap.AS_PUBLIC in obj.get("to", []) and ID + "/followers" in to_and_cc:
visibility = ap.VisibilityEnum.PUBLIC
elif ap.AS_PUBLIC in to_and_cc and ID + "/followers" in to_and_cc:
visibility = ap.VisibilityEnum.UNLISTED
else:
visibility = ap.VisibilityEnum.DIRECT
object_id, outbox_object = await boxes.send_create(
db_session,
ap_type=obj["type"],
source=obj["content"],
uploads=[],
in_reply_to=obj.get("inReplyTo"),
visibility=visibility,
content_warning=obj.get("summary"),
is_sensitive=obj.get("sensitive", False),
)
else:
raise ValueError("TODO")
return ActivityPubResponse(
outbox_object.ap_object,
status_code=201,
headers={"Location": boxes.outbox_object_id(object_id)},
)
@app.get("/featured")
async def featured(
db_session: AsyncSession = Depends(get_db_session),
@ -609,6 +712,14 @@ async def _check_outbox_object_acl(
if templates.is_current_user_admin(request):
return None
maybe_access_token_info = await indieauth.check_access_token(
request,
db_session,
)
if maybe_access_token_info:
# TODO: check scopes
return None
if ap_object.visibility in [
ap.VisibilityEnum.PUBLIC,
ap.VisibilityEnum.UNLISTED,
@ -632,13 +743,75 @@ async def _check_outbox_object_acl(
raise HTTPException(status_code=404)
async def _fetch_likes(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> list[models.InboxObject]:
return (
(
await db_session.scalars(
select(models.InboxObject)
.where(
models.InboxObject.ap_type == "Like",
models.InboxObject.activity_object_ap_id == outbox_object.ap_id,
models.InboxObject.is_deleted.is_(False),
)
.options(joinedload(models.InboxObject.actor))
.order_by(models.InboxObject.ap_published_at.desc())
.limit(10)
)
)
.unique()
.all()
)
async def _fetch_shares(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> list[models.InboxObject]:
return (
(
await db_session.scalars(
select(models.InboxObject)
.filter(
models.InboxObject.ap_type == "Announce",
models.InboxObject.activity_object_ap_id == outbox_object.ap_id,
models.InboxObject.is_deleted.is_(False),
)
.options(joinedload(models.InboxObject.actor))
.order_by(models.InboxObject.ap_published_at.desc())
.limit(10)
)
)
.unique()
.all()
)
async def _fetch_webmentions(
db_session: AsyncSession,
outbox_object: models.OutboxObject,
) -> list[models.Webmention]:
return (
await db_session.scalars(
select(models.Webmention)
.filter(
models.Webmention.outbox_object_id == outbox_object.id,
models.Webmention.is_deleted.is_(False),
)
.limit(50)
)
).all()
@app.get("/o/{public_id}")
async def outbox_by_public_id(
public_id: str,
request: Request,
db_session: AsyncSession = Depends(get_db_session),
httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse:
) -> ActivityPubResponse | templates.TemplateResponse | RedirectResponse:
maybe_object = (
(
await db_session.execute(
@ -665,69 +838,148 @@ async def outbox_by_public_id(
if is_activitypub_requested(request):
return ActivityPubResponse(maybe_object.ap_object)
if maybe_object.ap_type == "Article":
return RedirectResponse(
f"{BASE_URL}/articles/{public_id[:7]}/{maybe_object.slug}",
status_code=301,
)
replies_tree = await boxes.get_replies_tree(
db_session,
maybe_object,
is_current_user_admin=is_current_user_admin(request),
)
likes = (
(
await db_session.scalars(
select(models.InboxObject)
.where(
models.InboxObject.ap_type == "Like",
models.InboxObject.activity_object_ap_id == maybe_object.ap_id,
models.InboxObject.is_deleted.is_(False),
)
.options(joinedload(models.InboxObject.actor))
.order_by(models.InboxObject.ap_published_at.desc())
.limit(10)
)
)
.unique()
.all()
)
shares = (
(
await db_session.scalars(
select(models.InboxObject)
.filter(
models.InboxObject.ap_type == "Announce",
models.InboxObject.activity_object_ap_id == maybe_object.ap_id,
models.InboxObject.is_deleted.is_(False),
)
.options(joinedload(models.InboxObject.actor))
.order_by(models.InboxObject.ap_published_at.desc())
.limit(10)
)
)
.unique()
.all()
)
webmentions = (
await db_session.scalars(
select(models.Webmention)
.filter(
models.Webmention.outbox_object_id == maybe_object.id,
models.Webmention.is_deleted.is_(False),
)
.limit(10)
)
).all()
webmentions = await _fetch_webmentions(db_session, maybe_object)
likes = await _fetch_likes(db_session, maybe_object)
shares = await _fetch_shares(db_session, maybe_object)
return await templates.render_template(
db_session,
request,
"object.html",
{
"replies_tree": replies_tree,
"replies_tree": _merge_replies(replies_tree, webmentions),
"outbox_object": maybe_object,
"likes": likes,
"shares": shares,
"webmentions": webmentions,
"likes": _merge_faces_from_inbox_object_and_webmentions(
likes,
webmentions,
models.WebmentionType.LIKE,
),
"shares": _merge_faces_from_inbox_object_and_webmentions(
shares,
webmentions,
models.WebmentionType.REPOST,
),
"webmentions": _filter_webmentions(webmentions),
},
)
def _filter_webmentions(
webmentions: list[models.Webmention],
) -> list[models.Webmention]:
return [
wm
for wm in webmentions
if wm.webmention_type
not in [
models.WebmentionType.LIKE,
models.WebmentionType.REPOST,
models.WebmentionType.REPLY,
]
]
def _merge_faces_from_inbox_object_and_webmentions(
inbox_objects: list[models.InboxObject],
webmentions: list[models.Webmention],
webmention_type: models.WebmentionType,
) -> list[Face]:
wm_faces = []
for wm in webmentions:
if wm.webmention_type != webmention_type:
continue
if face := Face.from_webmention(wm):
wm_faces.append(face)
return merge_faces(
[Face.from_inbox_object(obj) for obj in inbox_objects] + wm_faces
)
def _merge_replies(
reply_tree_node: boxes.ReplyTreeNode,
webmentions: list[models.Webmention],
) -> boxes.ReplyTreeNode:
# TODO: return None as we update the object in place
webmention_replies = []
for wm in [
wm for wm in webmentions if wm.webmention_type == models.WebmentionType.REPLY
]:
if rep := WebmentionReply.from_webmention(wm):
webmention_replies.append(
boxes.ReplyTreeNode(
ap_object=None,
wm_reply=rep,
is_requested=False,
children=[],
)
)
reply_tree_node.children = sorted(
reply_tree_node.children + webmention_replies,
key=lambda node: node.published_at,
reverse=True,
)
return reply_tree_node
@app.get("/articles/{short_id}/{slug}")
async def article_by_slug(
short_id: str,
slug: str,
request: Request,
db_session: AsyncSession = Depends(get_db_session),
httpsig_info: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse | RedirectResponse:
maybe_object = await boxes.get_outbox_object_by_slug_and_short_id(
db_session, slug, short_id
)
if not maybe_object:
raise HTTPException(status_code=404)
await _check_outbox_object_acl(request, db_session, maybe_object, httpsig_info)
if is_activitypub_requested(request):
return ActivityPubResponse(maybe_object.ap_object)
replies_tree = await boxes.get_replies_tree(
db_session,
maybe_object,
is_current_user_admin=is_current_user_admin(request),
)
likes = await _fetch_likes(db_session, maybe_object)
shares = await _fetch_shares(db_session, maybe_object)
webmentions = await _fetch_webmentions(db_session, maybe_object)
return await templates.render_template(
db_session,
request,
"object.html",
{
"replies_tree": _merge_replies(replies_tree, webmentions),
"outbox_object": maybe_object,
"likes": _merge_faces_from_inbox_object_and_webmentions(
likes,
webmentions,
models.WebmentionType.LIKE,
),
"shares": _merge_faces_from_inbox_object_and_webmentions(
shares,
webmentions,
models.WebmentionType.REPOST,
),
"webmentions": _filter_webmentions(webmentions),
},
)
@ -763,7 +1015,7 @@ async def tag_by_name(
_: httpsig.HTTPSigInfo = Depends(httpsig.httpsig_checker),
) -> ActivityPubResponse | templates.TemplateResponse:
where = [
models.TaggedOutboxObject.tag == tag,
models.TaggedOutboxObject.tag == tag.lower(),
models.OutboxObject.visibility == ap.VisibilityEnum.PUBLIC,
models.OutboxObject.is_deleted.is_(False),
]
@ -789,7 +1041,7 @@ async def tag_by_name(
return ActivityPubResponse(
{
"@context": ap.AS_CTX,
"id": BASE_URL + f"/t/{tag}",
"id": BASE_URL + f"/t/{tag.lower()}",
"type": "OrderedCollection",
"totalItems": tagged_count,
"orderedItems": [
@ -837,6 +1089,78 @@ def emoji_by_name(name: str) -> ActivityPubResponse:
return ActivityPubResponse({"@context": ap.AS_EXTENDED_CTX, **emoji})
@app.get("/inbox")
async def get_inbox(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
access_token_info: indieauth.AccessTokenInfo = Depends(
indieauth.enforce_access_token
),
page: bool | None = None,
next_cursor: str | None = None,
) -> ActivityPubResponse:
where = [
models.InboxObject.ap_type.in_(
["Create", "Follow", "Like", "Announce", "Undo", "Update"]
)
]
total_items = await db_session.scalar(
select(func.count(models.InboxObject.id)).where(*where)
)
if not page and not next_cursor:
return ActivityPubResponse(
{
"@context": ap.AS_CTX,
"id": ID + "/inbox",
"first": ID + "/inbox?page=true",
"type": "OrderedCollection",
"totalItems": total_items,
}
)
q = (
select(models.InboxObject)
.where(*where)
.order_by(models.InboxObject.created_at.desc())
) # type: ignore
if next_cursor:
q = q.where(
models.InboxObject.created_at
< pagination.decode_cursor(next_cursor) # type: ignore
)
q = q.limit(20)
items = [item for item in (await db_session.scalars(q)).all()]
next_cursor = None
if (
items
and await db_session.scalar(
select(func.count(models.InboxObject.id)).where(
*where, models.InboxObject.created_at < items[-1].created_at
)
)
> 0
):
next_cursor = pagination.encode_cursor(items[-1].created_at)
collection_page = {
"@context": ap.AS_CTX,
"id": (
ID + "/inbox?page=true"
if not next_cursor
else ID + f"/inbox?next_cursor={next_cursor}"
),
"partOf": ID + "/inbox",
"type": "OrderedCollectionPage",
"orderedItems": [item.ap_object for item in items],
}
if next_cursor:
collection_page["next"] = ID + f"/inbox?next_cursor={next_cursor}"
return ActivityPubResponse(collection_page)
@app.post("/inbox")
async def inbox(
request: Request,
@ -866,9 +1190,10 @@ async def get_remote_follow(
@app.post("/remote_follow")
async def post_remote_follow(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
csrf_check: None = Depends(verify_csrf_token),
profile: str = Form(),
) -> RedirectResponse:
) -> templates.TemplateResponse:
if not profile.startswith("@"):
profile = f"@{profile}"
@ -877,9 +1202,10 @@ async def post_remote_follow(
# TODO(ts): error message to user
raise HTTPException(status_code=404)
return RedirectResponse(
return await redirect_to_remote_instance(
request,
db_session,
remote_follow_template.format(uri=ID),
status_code=302,
)
@ -907,10 +1233,11 @@ async def remote_interaction(
@app.post("/remote_interaction")
async def post_remote_interaction(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
csrf_check: None = Depends(verify_csrf_token),
profile: str = Form(),
ap_id: str = Form(),
) -> RedirectResponse:
) -> templates.TemplateResponse:
if not profile.startswith("@"):
profile = f"@{profile}"
@ -919,21 +1246,26 @@ async def post_remote_interaction(
# TODO(ts): error message to user
raise HTTPException(status_code=404)
return RedirectResponse(
remote_follow_template.format(uri=ap_id),
status_code=302,
return await redirect_to_remote_instance(
request,
db_session,
remote_follow_template.format(uri=ID),
)
@app.get("/.well-known/webfinger")
async def wellknown_webfinger(resource: str) -> JSONResponse:
"""Exposes/servers WebFinger data."""
if resource not in [f"acct:{USERNAME}@{DOMAIN}", ID]:
if resource not in [
f"acct:{USERNAME}@{WEBFINGER_DOMAIN}",
ID,
f"acct:{USERNAME}@{DOMAIN}",
]:
logger.info(f"Got invalid req for {resource}")
raise HTTPException(status_code=404)
out = {
"subject": f"acct:{USERNAME}@{DOMAIN}",
"subject": f"acct:{USERNAME}@{WEBFINGER_DOMAIN}",
"aliases": [ID],
"links": [
{
@ -997,11 +1329,11 @@ async def nodeinfo(
)
proxy_client = httpx.AsyncClient(follow_redirects=True, http2=True)
async def _proxy_get(
request: starlette.requests.Request, url: str, stream: bool
proxy_client: httpx.AsyncClient,
request: starlette.requests.Request,
url: str,
stream: bool,
) -> httpx.Response:
# Request the URL (and filter request headers)
proxy_req = proxy_client.build_request(
@ -1042,21 +1374,35 @@ def _add_cache_control(headers: dict[str, str]) -> dict[str, str]:
return {**headers, "Cache-Control": "max-age=31536000"}
@app.get("/proxy/media/{encoded_url}")
@app.get("/proxy/media/{exp}/{sig}/{encoded_url}")
async def serve_proxy_media(
request: Request,
exp: int,
sig: str,
encoded_url: str,
background_tasks: fastapi.BackgroundTasks,
) -> StreamingResponse | PlainTextResponse:
# Decode the base64-encoded URL
url = base64.urlsafe_b64decode(encoded_url).decode()
check_url(url)
media.verify_proxied_media_sig(exp, url, sig)
proxy_resp = await _proxy_get(request, url, stream=True)
proxy_client = httpx.AsyncClient(
follow_redirects=True,
timeout=httpx.Timeout(timeout=10.0),
transport=httpx.AsyncHTTPTransport(retries=1),
)
async def _close_proxy_client():
await proxy_client.aclose()
background_tasks.add_task(_close_proxy_client)
proxy_resp = await _proxy_get(proxy_client, request, url, stream=True)
if proxy_resp.status_code >= 300:
logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}")
await proxy_resp.aclose()
return PlainTextResponse(
"proxy error",
status_code=proxy_resp.status_code,
)
@ -1067,6 +1413,7 @@ async def serve_proxy_media(
_filter_proxy_resp_headers(
proxy_resp,
[
"content-encoding",
"content-length",
"content-type",
"content-range",
@ -1082,20 +1429,26 @@ async def serve_proxy_media(
)
@app.get("/proxy/media/{encoded_url}/{size}")
@app.get("/proxy/media/{exp}/{sig}/{encoded_url}/{size}")
async def serve_proxy_media_resized(
request: Request,
exp: int,
sig: str,
encoded_url: str,
size: int,
background_tasks: fastapi.BackgroundTasks,
) -> PlainTextResponse:
if size not in {50, 740}:
raise ValueError("Unsupported size")
is_webp_supported = "image/webp" in request.headers.get("accept")
# Decode the base64-encoded URL
url = base64.urlsafe_b64decode(encoded_url).decode()
check_url(url)
media.verify_proxied_media_sig(exp, url, sig)
if cached_resp := _RESIZED_CACHE.get((url, size)):
if (cached_resp := _RESIZED_CACHE.get((url, size))) and is_webp_supported:
resized_content, resized_mimetype, resp_headers = cached_resp
return PlainTextResponse(
resized_content,
@ -1103,11 +1456,21 @@ async def serve_proxy_media_resized(
headers=resp_headers,
)
proxy_resp = await _proxy_get(request, url, stream=False)
proxy_client = httpx.AsyncClient(
follow_redirects=True,
timeout=httpx.Timeout(timeout=10.0),
transport=httpx.AsyncHTTPTransport(retries=1),
)
async def _close_proxy_client():
await proxy_client.aclose()
background_tasks.add_task(_close_proxy_client)
proxy_resp = await _proxy_get(proxy_client, request, url, stream=False)
if proxy_resp.status_code >= 300:
logger.info(f"failed to proxy {url}, got {proxy_resp.status_code}")
await proxy_resp.aclose()
return PlainTextResponse(
"proxy error",
status_code=proxy_resp.status_code,
)
@ -1133,10 +1496,10 @@ async def serve_proxy_media_resized(
is_webp = False
try:
resized_buf = BytesIO()
i.save(resized_buf, format="webp")
is_webp = True
i.save(resized_buf, format="webp" if is_webp_supported else i.format)
is_webp = is_webp_supported
except Exception:
logger.exception("Failed to convert to webp")
logger.exception("Failed to create thumbnail")
resized_buf = BytesIO()
i.save(resized_buf, format=i.format)
resized_buf.seek(0)
@ -1194,6 +1557,7 @@ async def serve_attachment(
@app.get("/attachments/thumbnails/{content_hash}/{filename}")
async def serve_attachment_thumbnail(
request: Request,
content_hash: str,
filename: str,
db_session: AsyncSession = Depends(get_db_session),
@ -1208,11 +1572,20 @@ async def serve_attachment_thumbnail(
if not upload or not upload.has_thumbnail:
raise HTTPException(status_code=404)
return FileResponse(
UPLOAD_DIR / (content_hash + "_resized"),
media_type="image/webp",
headers={"Cache-Control": "max-age=31536000"},
)
is_webp_supported = "image/webp" in request.headers.get("accept")
if is_webp_supported:
return FileResponse(
UPLOAD_DIR / (content_hash + "_resized"),
media_type="image/webp",
headers={"Cache-Control": "max-age=31536000"},
)
else:
return FileResponse(
UPLOAD_DIR / content_hash,
media_type=upload.content_type,
headers={"Cache-Control": "max-age=31536000"},
)
@app.get("/robots.txt", response_class=PlainTextResponse)
@ -1271,22 +1644,27 @@ async def json_feed(
],
}
)
return {
"version": "https://jsonfeed.org/version/1",
result = {
"version": "https://jsonfeed.org/version/1.1",
"title": f"{LOCAL_ACTOR.display_name}'s microblog'",
"home_page_url": LOCAL_ACTOR.url,
"feed_url": BASE_URL + "/feed.json",
"author": {
"name": LOCAL_ACTOR.display_name,
"url": LOCAL_ACTOR.url,
"avatar": LOCAL_ACTOR.icon_url,
},
"authors": [
{
"name": LOCAL_ACTOR.display_name,
"url": LOCAL_ACTOR.url,
}
],
"items": data,
}
if LOCAL_ACTOR.icon_url:
result["authors"][0]["avatar"] = LOCAL_ACTOR.icon_url # type: ignore
return result
async def _gen_rss_feed(
db_session: AsyncSession,
is_rss: bool,
):
fg = FeedGenerator()
fg.id(BASE_URL + "/feed.rss")
@ -1294,7 +1672,8 @@ async def _gen_rss_feed(
fg.description(f"{LOCAL_ACTOR.display_name}'s microblog")
fg.author({"name": LOCAL_ACTOR.display_name})
fg.link(href=LOCAL_ACTOR.url, rel="alternate")
fg.logo(LOCAL_ACTOR.icon_url)
if LOCAL_ACTOR.icon_url:
fg.logo(LOCAL_ACTOR.icon_url)
fg.language("en")
outbox_objects = await _get_outbox_for_feed(db_session)
@ -1316,8 +1695,12 @@ async def _gen_rss_feed(
fe = fg.add_entry()
fe.id(outbox_object.url)
if outbox_object.name is not None:
fe.title(outbox_object.name)
elif not is_rss: # Atom feeds require a title
fe.title(outbox_object.url)
fe.link(href=outbox_object.url)
fe.title(outbox_object.url)
fe.description(content)
fe.content(content)
fe.published(outbox_object.ap_published_at.replace(tzinfo=timezone.utc))
@ -1330,7 +1713,7 @@ async def rss_feed(
db_session: AsyncSession = Depends(get_db_session),
) -> PlainTextResponse:
return PlainTextResponse(
(await _gen_rss_feed(db_session)).rss_str(),
(await _gen_rss_feed(db_session, is_rss=True)).rss_str(),
headers={"Content-Type": "application/rss+xml"},
)
@ -1340,6 +1723,6 @@ async def atom_feed(
db_session: AsyncSession = Depends(get_db_session),
) -> PlainTextResponse:
return PlainTextResponse(
(await _gen_rss_feed(db_session)).atom_str(),
(await _gen_rss_feed(db_session, is_rss=False)).atom_str(),
headers={"Content-Type": "application/atom+xml"},
)

View File

@ -1,15 +1,44 @@
import base64
import time
from app.config import BASE_URL
from app.config import hmac_sha256
SUPPORTED_RESIZE = [50, 740]
EXPIRY_PERIOD = 86400
EXPIRY_LENGTH = 7
class InvalidProxySignatureError(Exception):
pass
def proxied_media_sig(expires: int, url: str) -> str:
hm = hmac_sha256()
hm.update(f"{expires}".encode())
hm.update(b"|")
hm.update(url.encode())
return base64.urlsafe_b64encode(hm.digest()).decode()
def verify_proxied_media_sig(expires: int, url: str, sig: str) -> None:
now = int(time.time() / EXPIRY_PERIOD)
expected = proxied_media_sig(expires, url)
if now > expires or sig != expected:
raise InvalidProxySignatureError("invalid or expired media")
def proxied_media_url(url: str) -> str:
if url.startswith(BASE_URL):
return url
expires = int(time.time() / EXPIRY_PERIOD) + EXPIRY_LENGTH
sig = proxied_media_sig(expires, url)
return "/proxy/media/" + base64.urlsafe_b64encode(url.encode()).decode()
return (
BASE_URL
+ f"/proxy/media/{expires}/{sig}/"
+ base64.urlsafe_b64encode(url.encode()).decode()
)
def resized_media_url(url: str, size: int) -> str:

View File

@ -132,7 +132,7 @@ async def post_micropub_endpoint(
h = form_data["h"]
entry_type = f"h-{h}"
logger.info(f"Creating {entry_type}")
logger.info(f"Creating {entry_type=} with {access_token_info=}")
if entry_type != "h-entry":
return JSONResponse(
@ -150,7 +150,7 @@ async def post_micropub_endpoint(
else:
content = form_data["content"]
public_id = await send_create(
public_id, _ = await send_create(
db_session,
"Note",
content,

View File

@ -1,4 +1,5 @@
import enum
from datetime import datetime
from typing import Any
from typing import Optional
from typing import Union
@ -54,6 +55,10 @@ class Actor(Base, BaseActor):
is_blocked = Column(Boolean, nullable=False, default=False, server_default="0")
is_deleted = Column(Boolean, nullable=False, default=False, server_default="0")
are_announces_hidden_from_stream = Column(
Boolean, nullable=False, default=False, server_default="0"
)
@property
def is_from_db(self) -> bool:
return True
@ -158,6 +163,7 @@ class OutboxObject(Base, BaseObject):
is_hidden_from_homepage = Column(Boolean, nullable=False, default=False)
public_id = Column(String, nullable=False, index=True)
slug = Column(String, nullable=True, index=True)
ap_type = Column(String, nullable=False, index=True)
ap_id: Mapped[str] = Column(String, nullable=False, unique=True, index=True)
@ -250,6 +256,8 @@ class OutboxObject(Base, BaseObject):
"mediaType": attachment.upload.content_type,
"name": attachment.alt or attachment.filename,
"url": url,
"width": attachment.upload.width,
"height": attachment.upload.height,
"proxiedUrl": url,
"resizedUrl": BASE_URL
+ (
@ -281,6 +289,13 @@ class OutboxObject(Base, BaseObject):
def is_from_outbox(self) -> bool:
return True
@property
def url(self) -> str | None:
# XXX: rewrite old URL here for compat
if self.ap_type == "Article" and self.slug and self.public_id:
return f"{BASE_URL}/articles/{self.public_id[:7]}/{self.slug}"
return super().url
class Follower(Base):
__tablename__ = "follower"
@ -422,7 +437,7 @@ class OutboxObjectAttachment(Base):
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
upload_id = Column(Integer, ForeignKey("upload.id"), nullable=False)
upload = relationship(Upload, uselist=False)
upload: Mapped["Upload"] = relationship(Upload, uselist=False)
class IndieAuthAuthorizationRequest(Base):
@ -445,17 +460,53 @@ class IndieAuthAccessToken(Base):
__tablename__ = "indieauth_access_token"
id = Column(Integer, primary_key=True, index=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
created_at: Mapped[datetime] = Column(
DateTime(timezone=True), nullable=False, default=now
)
# Will be null for personal access tokens
indieauth_authorization_request_id = Column(
Integer, ForeignKey("indieauth_authorization_request.id"), nullable=True
)
indieauth_authorization_request = relationship(
IndieAuthAuthorizationRequest,
uselist=False,
)
access_token = Column(String, nullable=False, unique=True, index=True)
expires_in = Column(Integer, nullable=False)
access_token: Mapped[str] = Column(String, nullable=False, unique=True, index=True)
refresh_token = Column(String, nullable=True, unique=True, index=True)
expires_in: Mapped[int] = Column(Integer, nullable=False)
scope = Column(String, nullable=False)
is_revoked = Column(Boolean, nullable=False, default=False)
was_refreshed = Column(Boolean, nullable=False, default=False, server_default="0")
class OAuthClient(Base):
__tablename__ = "oauth_client"
id = Column(Integer, primary_key=True, index=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=now)
# Request
client_name = Column(String, nullable=False)
redirect_uris: Mapped[list[str]] = Column(JSON, nullable=True)
# Optional from request
client_uri = Column(String, nullable=True)
logo_uri = Column(String, nullable=True)
scope = Column(String, nullable=True)
# Response
client_id = Column(String, nullable=False, unique=True, index=True)
client_secret = Column(String, nullable=False, unique=True)
@enum.unique
class WebmentionType(str, enum.Enum):
UNKNOWN = "unknown"
LIKE = "like"
REPLY = "reply"
REPOST = "repost"
class Webmention(Base):
@ -474,6 +525,8 @@ class Webmention(Base):
outbox_object_id = Column(Integer, ForeignKey("outbox.id"), nullable=False)
outbox_object = relationship(OutboxObject, uselist=False)
webmention_type = Column(Enum(WebmentionType), nullable=True)
@property
def as_facepile_item(self) -> webmentions.Webmention | None:
if not self.source_microformats:
@ -483,6 +536,7 @@ class Webmention(Base):
self.source_microformats["items"], self.source
)
except Exception:
# TODO: return a facepile with the unknown image
logger.warning(
f"Failed to generate facefile item for Webmention id={self.id}"
)
@ -551,6 +605,14 @@ class NotificationType(str, enum.Enum):
UPDATED_WEBMENTION = "updated_webmention"
DELETED_WEBMENTION = "deleted_webmention"
# incoming
BLOCKED = "blocked"
UNBLOCKED = "unblocked"
# outgoing
BLOCK = "block"
UNBLOCK = "unblock"
class Notification(Base):
__tablename__ = "notifications"

View File

@ -151,7 +151,7 @@ def _set_next_try(
if not outgoing_activity.tries:
raise ValueError("Should never happen")
if outgoing_activity.tries == _MAX_RETRIES:
if outgoing_activity.tries >= _MAX_RETRIES:
outgoing_activity.is_errored = True
outgoing_activity.next_try = None
else:

View File

@ -102,6 +102,8 @@ async def _prune_old_inbox_objects(
models.InboxObject.ap_type.in_(["Note"]),
)
),
# Keep Move object as they are linked to notifications
models.InboxObject.ap_type.not_in(["Move"]),
# Filter by retention days
models.InboxObject.ap_published_at
< now() - timedelta(days=INBOX_RETENTION_DAYS),

28
app/redirect.py Normal file
View File

@ -0,0 +1,28 @@
from fastapi import Request
from app import templates
from app.database import AsyncSession
async def redirect(
request: Request,
db_session: AsyncSession,
url: str,
) -> templates.TemplateResponse:
"""
Similar to RedirectResponse, but uses a 200 response with HTML.
Needed for remote redirects on form submission endpoints,
since our CSP policy disallows remote form submission.
https://github.com/w3c/webappsec-csp/issues/8#issuecomment-810108984
"""
return await templates.render_template(
db_session,
request,
"redirect.html",
{
"request": request,
"url": url,
},
headers={"Refresh": "0;url=" + url},
)

View File

@ -51,17 +51,20 @@ $code-highlight-background: #f0f0f0;
.p-summary {
display: inline-block;
}
label {
.show-more-btn {
margin-left: 5px;
}
.show-more-state {
display: none;
summary {
display: inline-block;
}
.show-more-state ~ .obj-content {
margin-top: 0;
summary::-webkit-details-marker {
display: none
}
.show-more-state:checked ~ .obj-content {
display: none;
&:not([open]) .show-more-btn::after {
content: 'show more';
}
&[open] .show-more-btn::after {
content: 'show less';
}
}
.sensitive-attachment {
@ -429,8 +432,7 @@ a.label-btn {
.activity-attachment {
margin: 30px 0 20px 0;
img, audio, video {
width: 100%;
max-width: 740px;
max-width: calc(min(740px, 100%));
}
}
img.inline-img {
@ -456,7 +458,7 @@ a.label-btn {
border: 2px dashed $secondary-color;
}
.error-box {
.error-box, .scolor {
color: $secondary-color;
}
@ -467,6 +469,9 @@ a.label-btn {
span {
color: $muted-color;
}
span.new {
color: $secondary-color;
}
}
.actor-metadata {
color: $muted-color;
@ -531,3 +536,36 @@ a.label-btn {
text-decoration: underline;
}
}
.ap-place {
h3 {
display: inline;
font-weight: normal;
}
h3::after {
content: ': ';
}
}
.margin-top-20 {
margin-top: 20px;
}
.video-wrapper {
position: relative;
}
.video-gif-overlay {
display: none;
}
.video-gif-mode + .video-gif-overlay {
display: block;
position: absolute;
top: 5px;
left: 5px;
padding: 0 3px;
font-size: 0.8em;
background: rgba(0,0,0,.5);
color: #fff;
}

View File

@ -1,13 +1,14 @@
import re
import typing
from loguru import logger
from mistletoe import Document # type: ignore
from mistletoe.block_token import CodeFence # type: ignore
from mistletoe.html_renderer import HTMLRenderer # type: ignore
from mistletoe.span_token import SpanToken # type: ignore
from pygments import highlight # type: ignore
from pygments.formatters import HtmlFormatter # type: ignore
from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore
from pygments.lexers import guess_lexer # type: ignore
from pygments.util import ClassNotFound # type: ignore
from sqlalchemy import select
from app import webfinger
@ -21,15 +22,16 @@ if typing.TYPE_CHECKING:
_FORMATTER = HtmlFormatter(style=CODE_HIGHLIGHTING_THEME)
_HASHTAG_REGEX = re.compile(r"(#[\d\w]+)")
_MENTION_REGEX = re.compile(r"@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+")
_MENTION_REGEX = re.compile(r"(@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+)")
_URL_REGEX = re.compile(
"(https?:\\/\\/(?:www\\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6}\\b(?:[-a-zA-Z0-9()@:%_\\+.~#?&\\/=]*))" # noqa: E501
)
class AutoLink(SpanToken):
parse_inner = False
precedence = 10
pattern = re.compile(
"(https?:\\/\\/(?:www\\.)?[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,6}\\b(?:[-a-zA-Z0-9()@:%_\\+.~#?&\\/=]*))" # noqa: E501
)
precedence = 1
pattern = _URL_REGEX
def __init__(self, match_obj: re.Match) -> None:
self.target = match_obj.group()
@ -38,7 +40,7 @@ class AutoLink(SpanToken):
class Mention(SpanToken):
parse_inner = False
precedence = 10
pattern = re.compile(r"(@[\d\w_.+-]+@[\d\w-]+\.[\d\w\-.]+)")
pattern = _MENTION_REGEX
def __init__(self, match_obj: re.Match) -> None:
self.target = match_obj.group()
@ -47,7 +49,7 @@ class Mention(SpanToken):
class Hashtag(SpanToken):
parse_inner = False
precedence = 10
pattern = re.compile(r"(#[\d\w]+)")
pattern = _HASHTAG_REGEX
def __init__(self, match_obj: re.Match) -> None:
self.target = match_obj.group()
@ -77,27 +79,41 @@ class CustomRenderer(HTMLRenderer):
def render_mention(self, token: Mention) -> str:
mention = token.target
suffix = ""
if mention.endswith("."):
mention = mention[:-1]
suffix = "."
actor = self.mentioned_actors.get(mention)
if not actor:
return mention
self.tags.append(dict(type="Mention", href=actor.ap_id, name=mention))
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>' # noqa: E501
link = f'<span class="h-card"><a href="{actor.url}" class="u-url mention">{actor.handle}</a></span>{suffix}' # noqa: E501
return link
def render_hashtag(self, token: Hashtag) -> str:
tag = token.target[1:]
link = f'<a href="{BASE_URL}/t/{tag}" class="mention hashtag" rel="tag">#<span>{tag}</span></a>' # noqa: E501
link = f'<a href="{BASE_URL}/t/{tag.lower()}" class="mention hashtag" rel="tag">#<span>{tag}</span></a>' # noqa: E501
self.tags.append(
dict(href=f"{BASE_URL}/t/{tag}", name=token.target, type="Hashtag")
dict(
href=f"{BASE_URL}/t/{tag.lower()}",
name=token.target.lower(),
type="Hashtag",
)
)
return link
def render_block_code(self, token: typing.Any) -> str:
def render_block_code(self, token: CodeFence) -> str:
lexer_attr = ""
try:
lexer = get_lexer(token.language)
lexer_attr = f' data-microblogpub-lexer="{lexer.aliases[0]}"'
except ClassNotFound:
pass
code = token.children[0].content
lexer = get_lexer(token.language) if token.language else guess_lexer(code)
return highlight(code, lexer, _FORMATTER)
return f"<pre><code{lexer_attr}>\n{code}\n</code></pre>"
async def _prefetch_mentioned_actors(
@ -113,38 +129,50 @@ async def _prefetch_mentioned_actors(
if mention in actors:
continue
_, username, domain = mention.split("@")
actor = (
await db_session.execute(
select(models.Actor).where(
models.Actor.handle == mention,
models.Actor.is_deleted.is_(False),
)
)
).scalar_one_or_none()
if not actor:
actor_url = await webfinger.get_actor_url(mention)
if not actor_url:
# FIXME(ts): raise an error?
continue
actor = await fetch_actor(db_session, actor_url)
# XXX: the regex catches stuff like `@toto@example.com.`
if mention.endswith("."):
mention = mention[:-1]
actors[mention] = actor
try:
_, username, domain = mention.split("@")
actor = (
await db_session.execute(
select(models.Actor).where(
models.Actor.handle == mention,
models.Actor.is_deleted.is_(False),
)
)
).scalar_one_or_none()
if not actor:
actor_url = await webfinger.get_actor_url(mention)
if not actor_url:
# FIXME(ts): raise an error?
continue
actor = await fetch_actor(db_session, actor_url)
actors[mention] = actor
except Exception:
logger.exception(f"Failed to prefetch {mention}")
return actors
def hashtagify(content: str) -> tuple[str, list[dict[str, str]]]:
# TODO: fix this, switch to mistletoe?
def hashtagify(
content: str,
) -> tuple[str, list[dict[str, str]]]:
tags = []
hashtags = re.findall(_HASHTAG_REGEX, content)
hashtags = sorted(set(hashtags), reverse=True) # unique tags, longest first
for hashtag in hashtags:
tag = hashtag[1:]
link = f'<a href="{BASE_URL}/t/{tag}" class="mention hashtag" rel="tag">#<span>{tag}</span></a>' # noqa: E501
tags.append(dict(href=f"{BASE_URL}/t/{tag}", name=hashtag, type="Hashtag"))
content = content.replace(hashtag, link)
return content, tags
with CustomRenderer(
mentioned_actors={},
enable_mentionify=False,
enable_hashtagify=True,
) as renderer:
rendered_content = renderer.render(Document(content))
tags.extend(renderer.tags)
# Handle custom emoji
tags.extend(emoji.tags(content))
return rendered_content, tags
async def markdownify(
@ -173,4 +201,18 @@ async def markdownify(
# Handle custom emoji
tags.extend(emoji.tags(content))
return rendered_content, tags, list(mentioned_actors.values())
return rendered_content, dedup_tags(tags), list(mentioned_actors.values())
def dedup_tags(tags: list[dict[str, str]]) -> list[dict[str, str]]:
idx = set()
deduped_tags = []
for tag in tags:
tag_idx = (tag["type"], tag["name"])
if tag_idx in idx:
continue
idx.add(tag_idx)
deduped_tags.append(tag)
return deduped_tags

32
app/static/common.js Normal file
View File

@ -0,0 +1,32 @@
function hasAudio (video) {
return video.mozHasAudio ||
Boolean(video.webkitAudioDecodedByteCount) ||
Boolean(video.audioTracks && video.audioTracks.length);
}
function setVideoInGIFMode(video) {
if (!hasAudio(video)) {
if (typeof video.loop == 'boolean' && video.duration <= 10.0) {
video.classList.add("video-gif-mode");
video.loop = true;
video.controls = false;
video.addEventListener("mouseover", () => {
video.play();
})
video.addEventListener("mouseleave", () => {
video.pause();
})
}
};
}
var items = document.getElementsByTagName("video")
for (var i = 0; i < items.length; i++) {
if (items[i].duration) {
setVideoInGIFMode(items[i]);
} else {
items[i].addEventListener("loadeddata", function() {
setVideoInGIFMode(this);
});
}
}

View File

@ -1,4 +1,3 @@
import base64
from datetime import datetime
from datetime import timezone
from functools import lru_cache
@ -28,6 +27,7 @@ from app.ap_object import Object
from app.config import BASE_URL
from app.config import CUSTOM_FOOTER
from app.config import DEBUG
from app.config import SESSION_TIMEOUT
from app.config import VERSION
from app.config import generate_csrf_token
from app.config import session_serializer
@ -39,7 +39,7 @@ from app.utils.highlight import HIGHLIGHT_CSS
from app.utils.highlight import highlight
_templates = Jinja2Templates(
directory="app/templates",
directory=["data/templates", "app/templates"], # type: ignore # bad typing
trim_blocks=True,
lstrip_blocks=True,
)
@ -59,13 +59,8 @@ def _filter_domain(text: str) -> str:
def _media_proxy_url(url: str | None) -> str:
if not url:
return "/static/nopic.png"
if url.startswith(BASE_URL):
return url
encoded_url = base64.urlsafe_b64encode(url.encode()).decode()
return f"/proxy/media/{encoded_url}"
return BASE_URL + "/static/nopic.png"
return proxied_media_url(url)
def is_current_user_admin(request: Request) -> bool:
@ -75,10 +70,10 @@ def is_current_user_admin(request: Request) -> bool:
try:
loaded_session = session_serializer.loads(
session_cookie,
max_age=3600 * 12,
max_age=SESSION_TIMEOUT,
)
except Exception:
pass
logger.exception("Failed to validate session timeout")
else:
is_admin = loaded_session.get("is_logged_in")
@ -91,6 +86,7 @@ async def render_template(
template: str,
template_args: dict[str, Any] | None = None,
status_code: int = 200,
headers: dict[str, str] | None = None,
) -> TemplateResponse:
if template_args is None:
template_args = {}
@ -135,6 +131,7 @@ async def render_template(
**template_args,
},
status_code=status_code,
headers=headers,
)
@ -291,6 +288,10 @@ ALLOWED_ATTRIBUTES: dict[str, list[str] | Callable[[str, str, str], bool]] = {
}
def _allow_all_attributes(tag: Any, name: Any, value: Any) -> bool:
return True
@lru_cache(maxsize=256)
def _update_inline_imgs(content):
soup = BeautifulSoup(content, "html5lib")
@ -320,7 +321,11 @@ def _clean_html(html: str, note: Object) -> str:
_update_inline_imgs(highlight(html))
),
tags=ALLOWED_TAGS,
attributes=ALLOWED_ATTRIBUTES,
attributes=(
_allow_all_attributes
if note.ap_id.startswith(config.ID)
else ALLOWED_ATTRIBUTES
),
strip=True,
),
note,
@ -331,6 +336,14 @@ def _clean_html(html: str, note: Object) -> str:
raise
def _clean_html_wm(html: str) -> str:
return bleach.clean(
html,
attributes=ALLOWED_ATTRIBUTES,
strip=True,
)
def _timeago(original_dt: datetime) -> str:
dt = original_dt
if dt.tzinfo:
@ -380,7 +393,7 @@ def _html2text(content: str) -> str:
def _replace_emoji(u: str, _) -> str:
filename = "-".join(hex(ord(c))[2:] for c in u)
return config.EMOJI_TPL.format(filename=filename, raw=u)
return config.EMOJI_TPL.format(base_url=BASE_URL, filename=filename, raw=u)
def _emojify(text: str, is_local: bool) -> str:
@ -407,6 +420,7 @@ def _poll_item_pct(item: ap.RawObject, voters_count: int) -> int:
_templates.env.filters["domain"] = _filter_domain
_templates.env.filters["media_proxy_url"] = _media_proxy_url
_templates.env.filters["clean_html"] = _clean_html
_templates.env.filters["clean_html_wm"] = _clean_html_wm
_templates.env.filters["timeago"] = _timeago
_templates.env.filters["format_date"] = _format_date
_templates.env.filters["has_media_type"] = _has_media_type
@ -421,3 +435,5 @@ _templates.env.globals["CSS_HASH"] = config.CSS_HASH
_templates.env.globals["BASE_URL"] = config.BASE_URL
_templates.env.globals["HIDES_FOLLOWERS"] = config.HIDES_FOLLOWERS
_templates.env.globals["HIDES_FOLLOWING"] = config.HIDES_FOLLOWING
_templates.env.globals["NAVBAR_ITEMS"] = config.NavBarItems
_templates.env.globals["ICON_URL"] = config.CONFIG.icon_url

View File

@ -90,5 +90,5 @@
</p>
</form>
</div>
<script src="/static/new.js?v={{ JS_HASH }}"></script>
<script src="{{ BASE_URL }}/static/new.js?v={{ JS_HASH }}"></script>
{% endblock %}

View File

@ -11,8 +11,8 @@
<ul class="h-feed" id="articles">
<data class="p-name" value="{{ local_actor.display_name}}'s articles"></data>
{% for outbox_object in objects %}
<li>
<span class="muted">{{ outbox_object.ap_published_at.strftime("%b %d, %Y") }}</span> <a href="{{ outbox_object.url }}">{{ outbox_object.name }}</a>
<li class="h-entry">
<time class="muted dt-published" datetime="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at.strftime("%b %d, %Y") }}</time> <a href="{{ outbox_object.url }}" class="u-url u-uid p-name">{{ outbox_object.name }}</a>
</li>
{% endfor %}
</ul>

View File

@ -0,0 +1,30 @@
{%- import "utils.html" as utils with context -%}
{% extends "layout.html" %}
{% block head %}
<title>{{ title }}</title>
{% if request.url.path == "/" %}
<link rel="indieauth-metadata" href="{{ url_for("well_known_authorization_server") }}">
<link rel="authorization_endpoint" href="{{ url_for("indieauth_authorization_endpoint") }}">
<link rel="token_endpoint" href="{{ url_for("indieauth_token_endpoint") }}">
<link rel="micropub" href="{{ url_for("micropub_endpoint") }}">
<link rel="alternate" href="{{ local_actor.url }}" title="ActivityPub profile" type="application/activity+json">
<meta content="profile" property="og:type" />
<meta content="{{ local_actor.url }}" property="og:url" />
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
<meta content="Homepage" property="og:title" />
<meta content="{{ local_actor.summary | html2text | trim }}" property="og:description" />
<meta content="{{ ICON_URL }}" property="og:image" />
<meta content="summary" property="twitter:card" />
<meta content="{{ local_actor.handle }}" property="profile:username" />
{% endif %}
{% endblock %}
{% block content %}
{% include "header.html" %}
<div class="box">
{{ page_content | safe }}
</div>
{% endblock %}

View File

@ -3,6 +3,7 @@
{% block head %}
<title>{{ local_actor.display_name }}'s followers</title>
<meta name="robots" content="noindex, nofollow">
{% endblock %}
{% block content %}

View File

@ -3,6 +3,7 @@
{% block head %}
<title>{{ local_actor.display_name }}'s follows</title>
<meta name="robots" content="noindex, nofollow">
{% endblock %}
{% block content %}

View File

@ -25,13 +25,21 @@
</div>
{%- macro header_link(url, text) -%}
{% set url_for = request.app.router.url_path_for(url) %}
<a href="{{ url_for }}" {% if request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
{% set url_for = BASE_URL + request.app.router.url_path_for(url) %}
<a href="{{ url_for }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
{% endmacro %}
{%- macro navbar_item_link(navbar_item) -%}
{% set url_for = BASE_URL + navbar_item[0] %}
<a href="{{ navbar_item[0] }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ navbar_item[1] }}</a>
{% endmacro %}
<div class="public-top-menu">
<nav class="flexbox">
<ul>
{% if NAVBAR_ITEMS.INDEX_NAVBAR_ITEM %}
<li>{{ navbar_item_link(NAVBAR_ITEMS.INDEX_NAVBAR_ITEM) }}</li>
{% endif %}
<li>{{ header_link("index", "Notes") }}</li>
{% if articles_count %}
<li>{{ header_link("articles", "Articles") }}</li>
@ -43,6 +51,9 @@
<li>{{ header_link("following", "Following") }} <span class="counter">{{ following_count }}</span></li>
{% endif %}
<li>{{ header_link("get_remote_follow", "Remote follow") }}</li>
{% for navbar_item in NAVBAR_ITEMS.EXTRA_NAVBAR_ITEMS %}
{{ navbar_item_link(navbar_item) }}
{% endfor %}
</ul>
</nav>
</div>

View File

@ -13,7 +13,7 @@
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
<meta content="Homepage" property="og:title" />
<meta content="{{ local_actor.summary | html2text | trim }}" property="og:description" />
<meta content="{{ local_actor.url }}" property="og:image" />
<meta content="{{ ICON_URL }}" property="og:image" />
<meta content="summary" property="twitter:card" />
<meta content="{{ local_actor.handle }}" property="profile:username" />
{% endblock %}
@ -26,24 +26,30 @@
<div class="h-feed">
<data class="p-name" value="{{ local_actor.display_name}}'s notes"></data>
{% for outbox_object in objects %}
{% if outbox_object.ap_type in ["Note", "Article", "Video", "Question"] %}
{% if outbox_object.ap_type in ["Note", "Video", "Question"] %}
{{ utils.display_object(outbox_object) }}
{% elif outbox_object.ap_type == "Announce" %}
<div class="shared-header"><strong>{{ utils.display_tiny_actor_icon(local_actor) }} {{ local_actor.display_name | clean_html(local_actor) | safe }}</strong> shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
{{ utils.display_object(outbox_object.relates_to_anybox_object) }}
<div class="h-entry" id="{{ outbox_object.permalink_id }}">
<div class="shared-header"><strong><a class="p-author h-card" href="{{ local_actor.url }}">{{ utils.display_tiny_actor_icon(local_actor) }} {{ local_actor.display_name | clean_html(local_actor) | safe }}</a></strong> shared <span title="{{ outbox_object.ap_published_at.isoformat() }}">{{ outbox_object.ap_published_at | timeago }}</span></div>
<div class="h-cite u-repost-of">
{{ utils.display_object(outbox_object.relates_to_anybox_object, is_h_entry=False) }}
</div>
</div>
{% endif %}
{% endfor %}
</div>
<div class="box">
{% if has_previous_page %}
<a href="{{ url_for("index") }}?page={{ current_page - 1 }}">Previous</a>
{% endif %}
{% if has_previous_page or has_next_page %}
<div class="box">
{% if has_previous_page %}
<a href="{{ url_for("index") }}?page={{ current_page - 1 }}">Previous</a>
{% endif %}
{% if has_next_page %}
<a href="{{ url_for("index") }}?page={{ current_page + 1 }}">Next</a>
{% endif %}
</div>
{% if has_next_page %}
<a href="{{ url_for("index") }}?page={{ current_page + 1 }}">Next</a>
{% endif %}
</div>
{% endif %}
{% else %}
<div class="empty-state">

View File

@ -10,8 +10,12 @@
{% endif %}
<div class="indieauth-details">
<div>
<a class="lcolor" href="{{ client.url }}">{{ client.name }}</a>
<p>wants you to login as <strong class="lcolor">{{ me }}</strong> with the following redirect URI: <code>{{ redirect_uri }}</code>.</p>
{% if client.url %}
<a class="scolor" href="{{ client.url }}">{{ client.name }}</a>
{% else %}
<span class="scolor">{{ client.name }}</span>
{% endif %}
<p>wants you to login{% if me %} as <strong class="lcolor">{{ me }}</strong>{% endif %} with the following redirect URI: <code>{{ redirect_uri }}</code>.</p>
<form method="POST" action="{{ url_for('indieauth_flow') }}" class="form">

View File

@ -4,11 +4,11 @@
<meta charset="utf-8">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<link rel="stylesheet" href="/static/css/main.css?v={{ CSS_HASH }}">
<link rel="stylesheet" href="{{ BASE_URL }}/static/css/main.css?v={{ CSS_HASH }}">
<link rel="alternate" title="{{ local_actor.display_name}}'s microblog" type="application/json" href="{{ url_for("json_feed") }}" />
<link rel="alternate" href="{{ url_for("rss_feed") }}" type="application/rss+xml" title="{{ local_actor.display_name}}'s microblog">
<link rel="alternate" href="{{ url_for("atom_feed") }}" type="application/atom+xml" title="{{ local_actor.display_name}}'s microblog">
<link rel="icon" type="image/x-icon" href="/static/favicon.ico">
<link rel="icon" type="image/x-icon" href="{{ BASE_URL }}/static/favicon.ico">
<style>{{ highlight_css }}</style>
{% block head %}{% endblock %}
</head>
@ -18,8 +18,8 @@
{% if is_admin %}
<div id="admin">
{% macro admin_link(url, text) %}
{% set url_for = request.app.router.url_path_for(url) %}
<a href="{{ url_for }}" {% if request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
{% set url_for = BASE_URL + request.app.router.url_path_for(url) %}
<a href="{{ url_for }}" {% if BASE_URL + request.url.path == url_for %}class="active"{% endif %}>{{ text }}</a>
{% endmacro %}
<div class="admin-menu">
<nav class="flexbox">
@ -53,7 +53,8 @@
</div>
</footer>
{% if is_admin %}
<script src="/static/common-admin.js?v={{ JS_HASH }}"></script>
<script src="{{ BASE_URL }}/static/common-admin.js?v={{ JS_HASH }}"></script>
{% endif %}
<script src="{{ BASE_URL }}/static/common.js?v={{ JS_HASH }}"></script>
</body>
</html>

View File

@ -1,5 +1,8 @@
{%- import "utils.html" as utils with context -%}
{% extends "layout.html" %}
{% block head %}
<meta name="robots" content="noindex, nofollow">
{% endblock %}
{% block main_tag %} class="main-flex"{% endblock %}
{% block content %}
<div class="centered">
@ -7,7 +10,7 @@
{% if error %}
<p class="primary-color">Invalid password.</p>
{% endif %}
<form class="form" action="/admin/login" method="POST">
<form class="form" action="{{ BASE_URL }}/admin/login" method="POST">
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
<input type="hidden" name="redirect" value="{{ redirect }}">
<input type="password" placeholder="password" name="password" autofocus>

View File

@ -10,6 +10,9 @@
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.actor.ap_id }}">
{% if with_icon %}{{ utils.display_tiny_actor_icon(notif.actor) }}{% endif %} {{ notif.actor.display_name | clean_html(notif.actor) | safe }}</a> {{ text }}
<span title="{{ notif.created_at.isoformat() }}">{{ notif.created_at | timeago }}</span>
{% if notif.is_new %}
<span class="new">new</span>
{% endif %}
</div>
{% endmacro %}
@ -36,7 +39,19 @@
{%- elif notif.notification_type.value == "follow_request_rejected" %}
{{ notif_actor_action(notif, "rejected your follow request") }}
{{ utils.display_actor(notif.actor, actors_metadata) }}
{%- elif notif.notification_type.value == "move" %}
{% elif notif.notification_type.value == "blocked" %}
{{ notif_actor_action(notif, "blocked you") }}
{{ utils.display_actor(notif.actor, actors_metadata) }}
{% elif notif.notification_type.value == "unblocked" %}
{{ notif_actor_action(notif, "unblocked you") }}
{{ utils.display_actor(notif.actor, actors_metadata) }}
{% elif notif.notification_type.value == "block" %}
{{ notif_actor_action(notif, "was blocked") }}
{{ utils.display_actor(notif.actor, actors_metadata) }}
{% elif notif.notification_type.value == "unblock" %}
{{ notif_actor_action(notif, "was unblocked") }}
{{ utils.display_actor(notif.actor, actors_metadata) }}
{%- elif notif.notification_type.value == "move" and notif.inbox_object %}
{# for move notif, the actor is the target and the inbox object the Move activity #}
<div class="actor-action">
<a href="{{ url_for("admin_profile") }}?actor_id={{ notif.inbox_object.actor.ap_id }}">
@ -54,8 +69,8 @@
{{ notif_actor_action(notif, "shared a post", with_icon=True) }}
{{ utils.display_object(notif.outbox_object) }}
{% elif notif.notification_type.value == "undo_announce" %}
{{ notif_actor_action(notif, "unshared a post") }}
{{ utils.display_object(notif.outbox_object, with_icon=True) }}
{{ notif_actor_action(notif, "unshared a post", with_icon=True) }}
{{ utils.display_object(notif.outbox_object) }}
{% elif notif.notification_type.value == "mention" %}
{{ notif_actor_action(notif, "mentioned you") }}
{{ utils.display_object(notif.inbox_object) }}

View File

@ -15,7 +15,7 @@
<meta content="article" property="og:type" />
<meta content="{{ outbox_object.url }}" property="og:url" />
<meta content="{{ local_actor.display_name }}'s microblog" property="og:site_name" />
<meta content="{% if outbox_object.name %}{{ name }}{% else %}Note{% endif %}" property="og:title" />
<meta content="{% if outbox_object.name %}{{ outbox_object.name }}{% else %}Note{% endif %}" property="og:title" />
<meta content="{{ excerpt }}" property="og:description" />
<meta content="{{ local_actor.icon_url }}" property="og:image" />
<meta content="summary" property="twitter:card" />
@ -31,9 +31,16 @@
{% macro display_replies_tree(replies_tree_node) %}
{% if replies_tree_node.is_requested %}
{{ utils.display_object(replies_tree_node.ap_object, likes=likes, shares=shares, webmentions=webmentions, expanded=not replies_tree_node.is_root, is_object_page=True) }}
{{ utils.display_object(replies_tree_node.ap_object, likes=likes, shares=shares, webmentions=webmentions, expanded=not replies_tree_node.is_root, is_object_page=True, is_h_entry=False) }}
{% else %}
{{ utils.display_object(replies_tree_node.ap_object) }}
{% if replies_tree_node.wm_reply %}
{# u-comment h-cite is displayed by default for webmention #}
{{ utils.display_webmention_reply(replies_tree_node.wm_reply) }}
{% else %}
<div class="u-comment h-cite">
{{ utils.display_object(replies_tree_node.ap_object, is_h_entry=False) }}
</div>
{% endif %}
{% endif %}
{% for child in replies_tree_node.children %}
@ -42,6 +49,8 @@
{% endmacro %}
<div class="h-entry">
{{ display_replies_tree(replies_tree) }}
</div>
{% endblock %}

View File

@ -0,0 +1,15 @@
{%- import "utils.html" as utils with context -%}
{% extends "layout.html" %}
{% block head %}
<title>{{ local_actor.display_name }}'s microblog - Redirect</title>
{% endblock %}
{% block content %}
{% include "header.html" %}
<div class="box">
<p>You are being redirected to: <a href="{{ url }}">{{ url }}</a></p>
</div>
{% endblock %}

View File

@ -0,0 +1,15 @@
{%- import "utils.html" as utils with context -%}
{% extends "layout.html" %}
{% block head %}
<title>{{ local_actor.display_name }}'s microblog - Redirect</title>
{% endblock %}
{% block content %}
{% include "header.html" %}
<div class="box">
<p>You are being redirected to your instance: <a href="{{ url }}">{{ url }}</a></p>
</div>
{% endblock %}

View File

@ -3,6 +3,7 @@
{% block head %}
<title>Remote follow {{ local_actor.display_name }}</title>
<meta name="robots" content="noindex, nofollow">
{% endblock %}
{% block content %}

View File

@ -3,6 +3,7 @@
{% block head %}
<title>Interact from your instance</title>
<meta name="robots" content="noindex, nofollow">
{% endblock %}
{% block content %}

View File

@ -1,168 +1,254 @@
{% macro embed_csrf_token() %}
{% block embed_csrf_token scoped %}
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
{% endblock %}
{% endmacro %}
{% macro embed_redirect_url(permalink_id=None) %}
{% block embed_redirect_url scoped %}
<input type="hidden" name="redirect_url" value="{{ request.url }}{% if permalink_id %}#{{ permalink_id }}{% endif %}">
{% endblock %}
{% endmacro %}
{% macro admin_block_button(actor) %}
{% block admin_block_button scoped %}
<form action="{{ request.url_for("admin_actions_block") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
<input type="submit" value="block">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_unblock_button(actor) %}
{% block admin_unblock_button scoped %}
<form action="{{ request.url_for("admin_actions_unblock") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
<input type="submit" value="unblock">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_hide_shares_button(actor) %}
{% block admin_hide_shares_button scoped %}
<form action="{{ request.url_for("admin_actions_hide_announces") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
<input type="submit" value="hide shares">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_show_shares_button(actor) %}
{% block admin_show_shares_button scoped %}
<form action="{{ request.url_for("admin_actions_show_announces") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
<input type="submit" value="show shares">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_follow_button(actor) %}
{% block admin_follow_button scoped %}
<form action="{{ request.url_for("admin_actions_follow") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="ap_actor_id" value="{{ actor.ap_id }}">
<input type="submit" value="follow">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_accept_incoming_follow_button(notif) %}
{% block admin_accept_incoming_follow_button scoped %}
<form action="{{ request.url_for("admin_actions_accept_incoming_follow") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="notification_id" value="{{ notif.id }}">
<input type="submit" value="accept follow">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_reject_incoming_follow_button(notif) %}
{% block admin_reject_incoming_follow_button scoped %}
<form action="{{ request.url_for("admin_actions_reject_incoming_follow") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url() }}
<input type="hidden" name="notification_id" value="{{ notif.id }}">
<input type="submit" value="reject follow">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_like_button(ap_object_id, permalink_id) %}
{% block admin_like_button scoped %}
<form action="{{ request.url_for("admin_actions_like") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="like">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_bookmark_button(ap_object_id, permalink_id) %}
{% block admin_bookmark_button scoped %}
<form action="{{ request.url_for("admin_actions_bookmark") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="bookmark">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_unbookmark_button(ap_object_id, permalink_id) %}
{% block admin_unbookmark_button scoped %}
<form action="{{ request.url_for("admin_actions_unbookmark") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="unbookmark">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_pin_button(ap_object_id, permalink_id) %}
{% block admin_pin_button scoped %}
<form action="{{ request.url_for("admin_actions_pin") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="pin">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_unpin_button(ap_object_id, permalink_id) %}
{% block admin_unpin_button scoped %}
<form action="{{ request.url_for("admin_actions_unpin") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="unpin">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_delete_button(ap_object) %}
{% block admin_delete_button scoped %}
<form action="{{ request.url_for("admin_actions_delete") }}" class="object-delete-form" method="POST">
{{ embed_csrf_token() }}
<input type="hidden" name="redirect_url" value="{% if request.url.path.endswith("/" + ap_object.public_id) or (request.url.path == "/admin/object" and request.query_params.ap_id.endswith("/" + ap_object.public_id)) %}{{ request.base_url}}{% else %}{{ request.url }}{% endif %}">
<input type="hidden" name="ap_object_id" value="{{ ap_object.ap_id }}">
<input type="submit" value="delete">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_force_delete_button(ap_object_id, permalink_id=None) %}
{% block admin_force_delete_button scoped %}
<form action="{{ request.url_for("admin_actions_force_delete") }}" class="object-delete-form" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="local delete">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_force_delete_webmention_button(webmention_id, permalink_id=None) %}
{% block admin_force_delete_webmention_button scoped %}
<form action="{{ request.url_for("admin_actions_force_delete_webmention") }}" class="object-delete-form" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="webmention_id" value="{{ webmention_id }}">
<input type="submit" value="local delete">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_announce_button(ap_object_id, permalink_id=None) %}
{% block admin_announce_button scoped %}
<form action="{{ request.url_for("admin_actions_announce") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="share">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_undo_button(ap_object_id, action="undo", permalink_id=None) %}
{% block admin_undo_button scoped %}
<form action="{{ request.url_for("admin_actions_undo") }}" method="POST">
{{ embed_csrf_token() }}
{{ embed_redirect_url(permalink_id) }}
<input type="hidden" name="ap_object_id" value="{{ ap_object_id }}">
<input type="submit" value="{{ action }}">
</form>
{% endblock %}
{% endmacro %}
{% macro admin_reply_button(ap_object_id) %}
<form action="/admin/new" method="GET">
{% block admin_reply_button scoped %}
<form action="{{ BASE_URL }}/admin/new" method="GET">
<input type="hidden" name="in_reply_to" value="{{ ap_object_id }}">
<button type="submit">reply</button>
</form>
{% endblock %}
{% endmacro %}
{% macro admin_dm_button(actor_handle) %}
<form action="/admin/new" method="GET">
{% block admin_dm_button scoped %}
<form action="{{ BASE_URL }}/admin/new" method="GET">
<input type="hidden" name="with_content" value="{{ actor_handle }}">
<input type="hidden" name="with_visibility" value="DIRECT">
<button type="submit">direct message</button>
</form>
{% endblock %}
{% endmacro %}
{% macro admin_mention_button(actor_handle) %}
<form action="/admin/new" method="GET">
{% block admin_mention_button scoped %}
<form action="{{ BASE_URL }}/admin/new" method="GET">
<input type="hidden" name="with_content" value="{{ actor_handle }}">
<button type="submit">mention</button>
</form>
{% endblock %}
{% endmacro %}
{% macro admin_profile_button(ap_actor_id) %}
{% block admin_profile_button scoped %}
<form action="{{ url_for("admin_profile") }}" method="GET">
<input type="hidden" name="actor_id" value="{{ ap_actor_id }}">
<button type="submit">profile</button>
</form>
{% endblock %}
{% endmacro %}
{% macro admin_expand_button(ap_object) %}
{% block admin_expand_button scoped %}
{# TODO turn these into a regular link and append permalink ID if it's a reply #}
<form action="{{ url_for("admin_object") }}" method="GET">
<input type="hidden" name="ap_id" value="{{ ap_object.ap_id }}">
<button type="submit">expand</button>
</form>
{% endblock %}
{% endmacro %}
{% macro display_box_filters(route) %}
{% block display_box_filters scoped %}
<nav class="flexbox box">
<ul>
<li>Filter by</li>
@ -179,13 +265,17 @@
{% endif %}
</ul>
</nav>
{% endblock %}
{% endmacro %}
{% macro display_tiny_actor_icon(actor) %}
<img class="tiny-actor-icon" src="{{ actor.resized_icon_url }}" alt="{{ actor.display_name }}'s avatar">
{% block display_tiny_actor_icon scoped %}
<img class="tiny-actor-icon" src="{{ actor.resized_icon_url }}" alt="">
{% endblock %}
{% endmacro %}
{% macro actor_action(inbox_object, text, with_icon=False) %}
{% block actor_action scoped %}
<div class="actor-action">
<a href="{{ url_for("admin_profile") }}?actor_id={{ inbox_object.actor.ap_id }}">
{% if with_icon %}{{ display_tiny_actor_icon(inbox_object.actor) }}{% endif %} {{ inbox_object.actor.display_name | clean_html(inbox_object.actor) | safe }}
@ -193,9 +283,11 @@
<span title="{{ inbox_object.ap_published_at.isoformat() }}">{{ inbox_object.ap_published_at | timeago }}</span>
</div>
{% endblock %}
{% endmacro %}
{% macro display_actor(actor, actors_metadata={}, embedded=False, with_details=False, pending_incoming_follow_notif=None) %}
{% block display_actor scoped %}
{% set metadata = actors_metadata.get(actor.ap_id) %}
{% if not embedded %}
@ -216,6 +308,9 @@
<div>
<nav class="flexbox actor-metadata">
<ul>
{% if metadata.has_blocked_local_actor %}
<li>blocked you</li>
{% endif %}
{% if metadata.is_following %}
<li>already following</li>
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "unfollow")}}</li>
@ -223,8 +318,15 @@
<li>{{ admin_profile_button(actor.ap_id) }}</li>
{% endif %}
{% elif metadata.is_follow_request_sent %}
<li>follow request sent</li>
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "undo follow") }}</li>
{% if metadata.is_follow_request_rejected %}
<li>follow request rejected</li>
{% if not metadata.has_blocked_local_actor %}
<li>{{ admin_follow_button(actor) }}</li>
{% endif %}
{% else %}
<li>follow request sent</li>
<li>{{ admin_undo_button(metadata.outbox_follow_ap_id, "undo follow") }}</li>
{% endif %}
{% elif not actor.moved_to %}
<li>{{ admin_follow_button(actor) }}</li>
{% endif %}
@ -263,6 +365,11 @@
<li>rejected</li>
{% endif %}
{% endif %}
{% if actor.are_announces_hidden_from_stream %}
<li>{{ admin_show_shares_button(actor) }}</li>
{% else %}
<li>{{ admin_hide_shares_button(actor) }}</li>
{% endif %}
{% if with_details %}
<li><a href="{{ actor.url }}" class="label-btn">remote profile</a></li>
{% endif %}
@ -296,9 +403,11 @@
</div>
{% endif %}
{% endblock %}
{% endmacro %}
{% macro display_og_meta(object) %}
{% block display_og_meta scoped %}
{% if object.og_meta %}
{% for og_meta in object.og_meta[:1] %}
<div class="activity-og-meta">
@ -316,32 +425,44 @@
</div>
{% endfor %}
{% endif %}
{% endblock %}
{% endmacro %}
{% macro display_attachments(object) %}
{% block display_attachments scoped %}
{% for attachment in object.attachments %}
{% if attachment.type != "PropertyValue" %}
{% set orientation = "unknown" %}
{% if attachment.width %}
{% set orientation = "portrait" if attachment.width < attachment.height else "landscape" %}
{% endif %}
{% if object.sensitive and (attachment.type == "Image" or (attachment | has_media_type("image")) or attachment.type == "Video" or (attachment | has_media_type("video"))) %}
<div class="attachment-wrapper">
<label for="{{attachment.proxied_url}}" class="label-btn show-hide-sensitive-btn">show/hide sensitive content</label>
<div>
<div class="sensitive-attachment">
<input class="sensitive-attachment-state" type="checkbox" id="{{attachment.proxied_url}}" aria-hidden="true">
<div class="sensitive-attachment-box">
<div class="sensitive-attachment-box attachment-orientation-{{orientation}}">
<div></div>
{% else %}
<div class="attachment-item">
<div class="attachment-item attachment-orientation-{{orientation}}">
{% endif %}
{% if attachment.type == "Image" or (attachment | has_media_type("image")) %}
{% if attachment.url not in object.inlined_images %}
<img src="{{ attachment.resized_url or attachment.proxied_url }}"{% if attachment.name %} title="{{ attachment.name }}" alt="{{ attachment.name }}"{% endif %} class="attachment">
<a class="media-link" href="{{ attachment.proxied_url }}" target="_blank">
<img src="{{ attachment.resized_url or attachment.proxied_url }}"{% if attachment.name %} title="{{ attachment.name }}" alt="{{ attachment.name }}"{% endif %} class="attachment u-photo">
</a>
{% endif %}
{% elif attachment.type == "Video" or (attachment | has_media_type("video")) %}
<video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %}></video>
<div class="video-wrapper">
<video controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name %} title="{{ attachment.name }}"{% endif %} class="u-video"></video>
<div class="video-gif-overlay">GIF</div>
</div>
{% elif attachment.type == "Audio" or (attachment | has_media_type("audio")) %}
<audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment"></audio>
<audio controls preload="metadata" src="{{ attachment.url | media_proxy_url }}"{% if attachment.name%} title="{{ attachment.name }}"{% endif %} class="attachment u-audio"></audio>
{% elif attachment.type == "Link" %}
<a href="{{ attachment.url }}" class="attachment">{{ attachment.url | truncate(64, True) }}</a> ({{ attachment.mimetype}})
{% else %}
@ -357,13 +478,60 @@
{% else %}
</div>
{% endif %}
{% endif %}
{% endfor %}
{% endblock %}
{% endmacro %}
{% macro display_object(object, likes=[], shares=[], webmentions=[], expanded=False, actors_metadata={}, is_object_page=False) %}
{% macro display_webmention_reply(wm_reply) %}
{% block display_webmention_reply scoped %}
<div class="ap-object u-comment h-cite">
<div class="actor-box h-card p-author">
<div class="icon-box">
<img src="{{ wm_reply.face.picture_url }}" alt="{{ wm_reply.face.name }}'s avatar" class="actor-icon u-photo">
</div>
<a href="{{ wm_reply.face.url }}" class="u-url">
<div><strong class="p-name">{{ wm_reply.face.name | clean_html_wm | safe }}</strong></div>
<div class="actor-handle">{{ wm_reply.face.url | truncate(64, True) }}</div>
</a>
</div>
<p class="in-reply-to">in reply to <a href="{{ wm_reply.in_reply_to }}" title="{{ wm_reply.in_reply_to }}" rel="nofollow">
this object
</a></p>
<div class="obj-content margin-top-20">
<div class="e-content">
{{ wm_reply.content | clean_html_wm | safe }}
</div>
</div>
<nav class="flexbox activity-bar margin-top-20">
<ul>
<li>
<div><a href="{{ wm_reply.url }}" rel="nofollow" class="object-permalink u-url u-uid">permalink</a></div>
</li>
<li>
<time class="dt-published" datetime="{{ wm_reply.published_at.replace(microsecond=0).isoformat() }}" title="{{ wm_reply.published_at.replace(microsecond=0).isoformat() }}">{{ wm_reply.published_at | timeago }}</time>
</li>
{% if is_admin %}
<li>
{{ admin_force_delete_webmention_button(wm_reply.webmention_id) }}
</li>
{% endif %}
</ul>
</nav>
</div>
{% endblock %}
{% endmacro %}
{% macro display_object(object, likes=[], shares=[], webmentions=[], expanded=False, actors_metadata={}, is_object_page=False, is_h_entry=True) %}
{% block display_object scoped %}
{% set is_article_mode = object.is_from_outbox and object.ap_type == "Article" and is_object_page %}
{% if object.ap_type in ["Note", "Article", "Video", "Page", "Question"] %}
<div class="ap-object {% if expanded %}ap-object-expanded {% endif %}h-entry" id="{{ object.permalink_id }}">
{% if object.ap_type in ["Note", "Article", "Video", "Page", "Question", "Event"] %}
<div class="ap-object {% if expanded %}ap-object-expanded {% endif %}{% if is_h_entry %}h-entry{% endif %}" id="{{ object.permalink_id }}">
{% if is_article_mode %}
<data class="h-card">
@ -377,25 +545,48 @@
{% if object.in_reply_to %}
<p class="in-reply-to">in reply to <a href="{% if is_admin and object.is_in_reply_to_from_inbox %}{{ url_for("get_lookup") }}?query={% endif %}{{ object.in_reply_to }}" title="{{ object.in_reply_to }}" rel="nofollow">
this {{ object.ap_type|lower }}
this object
</a></p>
{% endif %}
{% if object.ap_type == "Article" %}
{% if object.ap_type in ["Article", "Event"] %}
<h2 class="p-name no-margin-top">{{ object.name }}</h2>
{% endif %}
{% if object.ap_type == "Event" %}
{% if object.ap_object.get("endTime") and object.ap_object.get("startTime") %}
<p>On {{ object.ap_object.startTime | parse_datetime | format_date }}
(ends {{ object.ap_object.endTime | parse_datetime | format_date }})</p>
{% endif %}
{% endif %}
{% if object.ap_object.get("location") %}
{% set loc = object.ap_object.get("location") %}
{% if loc.type == "Place" and loc.latitude and loc.longitude %}
<div class="ap-place">
<h3>Location</h3>
{% if loc.name %}{{ loc.name }}{% endif %}
<span class="h-geo">
<data class="p-latitude" value="{{ loc.latitude}}"></data>
<data class="p-longitude" value="{{ loc.longitude }}"></data>
<a href="https://www.openstreetmap.org/?mlat={{ loc.latitude }}&mlon={{ loc.longitude }}#map=16/{{loc.latitude}}/{{loc.longitude}}">{{loc.latitude}},{{loc.longitude}}</a>
</span>
</div>
{% endif %}
{% endif %}
{% if is_article_mode %}
<time class="dt-published muted" datetime="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}" title="{{ object.ap_published_at.replace(microsecond=0).isoformat() }}">{{ object.ap_published_at.strftime("%b %d, %Y") }}</time>
{% endif %}
{% if object.summary %}
<div class="show-more-wrapper">
<div class="p-summary">
<p>{{ object.summary | clean_html(object) | safe }}</p>
</div>
<label for="show-more-{{ object.permalink_id }}" class="show-more-btn">show/hide more</label>
<input class="show-more-state" type="checkbox" aria-hidden="true" id="show-more-{{ object.permalink_id }}" checked>
<details class="show-more-wrapper">
<summary>
<div class="p-summary">
<p>{{ object.summary | clean_html(object) | safe }}</p>
</div>
<span class="show-more-btn" aria-hidden="true"></span>
</summary>
{% endif %}
<div class="obj-content">
<div class="e-content">
@ -456,7 +647,7 @@
</div>
{% if object.summary %}
</div>
</details>
{% endif %}
<div class="activity-attachment">
@ -591,6 +782,11 @@
{{ admin_expand_button(object) }}
</li>
{% endif %}
{% if object.is_from_inbox and not object.announced_via_outbox_object_ap_id and object.is_local_reply %}
<li>
{{ admin_force_delete_button(object.ap_id) }}
</li>
{% endif %}
</ul>
</nav>
{% endif %}
@ -602,8 +798,8 @@
<div class="interactions-block">Likes
<div class="facepile-wrapper">
{% for like in likes %}
<a href="{% if is_admin %}{{ url_for("admin_profile") }}?actor_id={{ like.actor.ap_id }}{% else %}{{ like.actor.url }}{% endif %}" title="{{ like.actor.handle }}" rel="noreferrer">
<img src="{{ like.actor.resized_icon_url }}" alt="{{ like.actor.handle}}">
<a href="{% if is_admin and like.ap_actor_id %}{{ url_for("admin_profile") }}?actor_id={{ like.ap_actor_id }}{% else %}{{ like.url }}{% endif %}" title="{{ like.name }}" rel="noreferrer">
<img src="{{ like.picture_url }}" alt="{{ like.name }}">
</a>
{% endfor %}
{% if object.likes_count > likes | length %}
@ -619,8 +815,8 @@
<div class="interactions-block">Shares
<div class="facepile-wrapper">
{% for share in shares %}
<a href="{% if is_admin %}{{ url_for("admin_profile") }}?actor_id={{ share.actor.ap_id }}{% else %}{{ share.actor.url }}{% endif %}" title="{{ share.actor.handle }}" rel="noreferrer">
<img src="{{ share.actor.resized_icon_url }}" alt="{{ share.actor.handle}}">
<a href="{% if is_admin and share.ap_actor_id %}{{ url_for("admin_profile") }}?actor_id={{ share.ap_actor_id }}{% else %}{{ share.url }}{% endif %}" title="{{ share.name }}" rel="noreferrer">
<img src="{{ share.picture_url }}" alt="{{ share.name }}">
</a>
{% endfor %}
{% if object.announces_count > shares | length %}
@ -653,4 +849,5 @@
</div>
{% endif %}
{% endblock %}
{% endmacro %}

View File

@ -60,7 +60,7 @@ async def save_upload(db_session: AsyncSession, f: UploadFile) -> models.Upload:
destination_image.putdata(original_image.getdata())
destination_image.save(
dest_filename,
format=_original_image.format,
format=_original_image.format, # type: ignore
)
with open(dest_filename, "rb") as dest_f:

View File

@ -0,0 +1,32 @@
from typing import Any
from typing import Awaitable
from typing import Callable
from fastapi import Depends
from fastapi import Request
from fastapi.responses import JSONResponse
from app.actor import LOCAL_ACTOR
from app.config import is_activitypub_requested
from app.database import AsyncSession
from app.database import get_db_session
_Handler = Callable[[Request, AsyncSession], Awaitable[Any]]
def build_custom_index_handler(handler: _Handler) -> _Handler:
async def custom_index(
request: Request,
db_session: AsyncSession = Depends(get_db_session),
) -> Any:
# Serve the AP actor if requested
if is_activitypub_requested(request):
return JSONResponse(
LOCAL_ACTOR.ap_actor,
media_type="application/activity+json",
)
# Defer to the custom handler
return await handler(request, db_session)
return custom_index

View File

@ -23,6 +23,8 @@ def _load_emojis(root_dir: Path, base_url: str) -> None:
mt = mimetypes.guess_type(emoji.name)[0]
if mt and mt.startswith("image/"):
name = emoji.name.split(".")[0]
if not re.match(EMOJI_REGEX, f":{name}:"):
continue
ap_emoji: "RawObject" = {
"type": "Emoji",
"name": f":{name}:",

172
app/utils/facepile.py Normal file
View File

@ -0,0 +1,172 @@
import datetime
from dataclasses import dataclass
from datetime import timezone
from typing import Any
from typing import Optional
from loguru import logger
from app import media
from app.models import InboxObject
from app.models import Webmention
from app.utils.datetime import parse_isoformat
from app.utils.url import must_make_abs
@dataclass
class Face:
ap_actor_id: str | None
url: str
name: str
picture_url: str
created_at: datetime.datetime
@classmethod
def from_inbox_object(cls, like: InboxObject) -> "Face":
return cls(
ap_actor_id=like.actor.ap_id,
url=like.actor.url, # type: ignore
name=like.actor.handle, # type: ignore
picture_url=like.actor.resized_icon_url,
created_at=like.created_at, # type: ignore
)
@classmethod
def from_webmention(cls, webmention: Webmention) -> Optional["Face"]:
items = webmention.source_microformats.get("items", []) # type: ignore
for item in items:
if item["type"][0] == "h-card":
try:
return cls(
ap_actor_id=None,
url=(
must_make_abs(
item["properties"]["url"][0], webmention.source
)
if item["properties"].get("url")
else webmention.source
),
name=item["properties"]["name"][0],
picture_url=media.resized_media_url(
must_make_abs(
item["properties"]["photo"][0], webmention.source
), # type: ignore
50,
),
created_at=webmention.created_at, # type: ignore
)
except Exception:
logger.exception(
f"Failed to build Face for webmention id={webmention.id}"
)
break
elif item["type"][0] == "h-entry":
author = item["properties"]["author"][0]
try:
return cls(
ap_actor_id=None,
url=webmention.source,
name=author["properties"]["name"][0],
picture_url=media.resized_media_url(
must_make_abs(
author["properties"]["photo"][0], webmention.source
), # type: ignore
50,
),
created_at=webmention.created_at, # type: ignore
)
except Exception:
logger.exception(
f"Failed to build Face for webmention id={webmention.id}"
)
break
return None
def merge_faces(faces: list[Face]) -> list[Face]:
return sorted(
faces,
key=lambda f: f.created_at,
reverse=True,
)[:10]
def _parse_face(webmention: Webmention, items: list[dict[str, Any]]) -> Face | None:
for item in items:
if item["type"][0] == "h-card":
try:
return Face(
ap_actor_id=None,
url=(
must_make_abs(item["properties"]["url"][0], webmention.source)
if item["properties"].get("url")
else webmention.source
),
name=item["properties"]["name"][0],
picture_url=media.resized_media_url(
must_make_abs(
item["properties"]["photo"][0], webmention.source
), # type: ignore
50,
),
created_at=webmention.created_at, # type: ignore
)
except Exception:
logger.exception(
f"Failed to build Face for webmention id={webmention.id}"
)
break
return None
@dataclass
class WebmentionReply:
face: Face
content: str
url: str
published_at: datetime.datetime
in_reply_to: str
webmention_id: int
@classmethod
def from_webmention(cls, webmention: Webmention) -> Optional["WebmentionReply"]:
items = webmention.source_microformats.get("items", []) # type: ignore
for item in items:
if item["type"][0] == "h-entry":
try:
face = _parse_face(webmention, item["properties"].get("author", []))
if not face:
logger.info(
"Failed to build WebmentionReply/Face for "
f"webmention id={webmention.id}"
)
break
if "published" in item["properties"]:
published_at = (
parse_isoformat(item["properties"]["published"][0])
.astimezone(timezone.utc)
.replace(tzinfo=None)
)
else:
published_at = webmention.created_at # type: ignore
return cls(
face=face,
content=item["properties"]["content"][0]["html"],
url=must_make_abs(
item["properties"]["url"][0], webmention.source
),
published_at=published_at,
in_reply_to=webmention.target, # type: ignore
webmention_id=webmention.id, # type: ignore
)
except Exception:
logger.exception(
f"Failed to build Face for webmention id={webmention.id}"
)
break
return None

View File

@ -32,23 +32,22 @@ def highlight(html: str) -> str:
# If this comes from a microblog.pub instance we may have the language
# in the class name
if "class" in code.attrs and code.attrs["class"][0].startswith("language-"):
if "data-microblogpub-lexer" in code.attrs:
try:
lexer = get_lexer_by_name(
code.attrs["class"][0].removeprefix("language-")
)
lexer = get_lexer_by_name(code.attrs["data-microblogpub-lexer"])
except Exception:
lexer = guess_lexer(code_content)
else:
lexer = guess_lexer(code_content)
# Replace the code with Pygment output
# XXX: the HTML escaping causes issue with Python type annotations
code_content = code_content.replace(") -&gt; ", ") -> ")
code.parent.replaceWith(
BeautifulSoup(
phighlight(code_content, lexer, _FORMATTER), "html5lib"
).body.next
)
# Replace the code with Pygment output
# XXX: the HTML escaping causes issue with Python type annotations
code_content = code_content.replace(") -&gt; ", ") -> ")
code.parent.replaceWith(
BeautifulSoup(
phighlight(code_content, lexer, _FORMATTER), "html5lib"
).body.next
)
else:
code.name = "div"
code["class"] = code.get("class", []) + ["highlight"]
return soup.body.encode_contents().decode()

View File

@ -10,7 +10,7 @@ from app.utils.url import make_abs
class IndieAuthClient:
logo: str | None
name: str
url: str
url: str | None
def _get_prop(props: dict[str, Any], name: str, default=None) -> Any:

32
app/utils/mastodon.py Normal file
View File

@ -0,0 +1,32 @@
from pathlib import Path
from loguru import logger
from app.webfinger import get_actor_url
def _load_mastodon_following_accounts_csv_file(path: str) -> list[str]:
handles = []
for line in Path(path).read_text().splitlines()[1:]:
handle = line.split(",")[0]
handles.append(handle)
return handles
async def get_actor_urls_from_following_accounts_csv_file(
path: str,
) -> list[tuple[str, str]]:
actor_urls = []
for handle in _load_mastodon_following_accounts_csv_file(path):
try:
actor_url = await get_actor_url(handle)
except Exception:
logger.error("Failed to fetch actor URL for {handle=}")
else:
if actor_url:
actor_urls.append((handle, actor_url))
else:
logger.info(f"No actor URL found for {handle=}")
return actor_urls

View File

@ -1,12 +1,15 @@
import asyncio
import mimetypes
import re
import signal
from concurrent.futures import TimeoutError
from typing import Any
from urllib.parse import urlparse
import httpx
from bs4 import BeautifulSoup # type: ignore
from loguru import logger
from pebble import concurrent # type: ignore
from pydantic import BaseModel
from app import activitypub as ap
@ -29,7 +32,11 @@ class OpenGraphMeta(BaseModel):
site_name: str
@concurrent.process(timeout=5)
def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
# Prevent SIGTERM to bubble up to the worker
signal.signal(signal.SIGTERM, signal.SIG_IGN)
soup = BeautifulSoup(html, "html5lib")
ogs = {
og.attrs["property"]: og.attrs.get("content")
@ -55,9 +62,20 @@ def _scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
if u := raw.get(maybe_rel):
raw[maybe_rel] = make_abs(u, url)
if not is_url_valid(raw[maybe_rel]):
logger.info(f"Invalid url {raw[maybe_rel]}")
if maybe_rel == "url":
raw["url"] = url
elif maybe_rel == "image":
raw["image"] = None
return OpenGraphMeta.parse_obj(raw)
def scrap_og_meta(url: str, html: str) -> OpenGraphMeta | None:
return _scrap_og_meta(url, html).result()
async def external_urls(
db_session: AsyncSession,
ro: ap_object.RemoteObject | OutboxObject | InboxObject,
@ -126,7 +144,10 @@ async def _og_meta_from_url(url: str) -> OpenGraphMeta | None:
return None
try:
return _scrap_og_meta(url, resp.text)
return scrap_og_meta(url, resp.text)
except TimeoutError:
logger.info(f"Timed out when scraping OG meta for {url}")
return None
except Exception:
logger.info(f"Failed to scrap OG meta for {url}")
return None

8
app/utils/text.py Normal file
View File

@ -0,0 +1,8 @@
import re
import unicodedata
def slugify(text: str) -> str:
value = unicodedata.normalize("NFKC", text)
value = re.sub(r"[^\w\s-]", "", value.lower())
return re.sub(r"[-\s]+", "-", value).strip("-_")

View File

@ -21,6 +21,13 @@ def make_abs(url: str | None, parent: str) -> str | None:
)
def must_make_abs(url: str | None, parent: str) -> str:
abs_url = make_abs(url, parent)
if not abs_url:
raise ValueError("missing URL")
return abs_url
class InvalidURLError(Exception):
pass
@ -54,7 +61,7 @@ def is_url_valid(url: str) -> bool:
if not parsed.hostname or parsed.hostname.lower() in ["localhost"]:
return False
if parsed.hostname in BLOCKED_SERVERS:
if is_hostname_blocked(parsed.hostname):
logger.warning(f"{parsed.hostname} is blocked")
return False
@ -81,3 +88,11 @@ def check_url(url: str) -> None:
raise InvalidURLError(f'"{url}" is invalid')
return None
@functools.lru_cache(maxsize=256)
def is_hostname_blocked(hostname: str) -> bool:
for blocked_hostname in BLOCKED_SERVERS:
if hostname == blocked_hostname or hostname.endswith(f".{blocked_hostname}"):
return True
return False

View File

@ -24,7 +24,7 @@ async def _discover_webmention_endoint(url: str) -> str | None:
follow_redirects=True,
)
resp.raise_for_status()
except (httpx.HTTPError, httpx.HTTPStatusError):
except Exception:
logger.exception(f"Failed to discover webmention endpoint for {url}")
return None

View File

@ -69,5 +69,5 @@ class Worker(Generic[T]):
logger.info("stopping loop")
async def _shutdown(self, sig: signal.Signals) -> None:
logger.info(f"Caught {signal=}")
logger.info(f"Caught {sig=}")
self._stop_event.set()

View File

@ -1,3 +1,4 @@
import xml.etree.ElementTree as ET
from typing import Any
from urllib.parse import urlparse
@ -8,32 +9,85 @@ from app import config
from app.utils.url import check_url
async def get_webfinger_via_host_meta(host: str) -> str | None:
resp: httpx.Response | None = None
is_404 = False
async with httpx.AsyncClient() as client:
for i, proto in enumerate({"http", "https"}):
try:
url = f"{proto}://{host}/.well-known/host-meta"
check_url(url)
resp = await client.get(
url,
headers={
"User-Agent": config.USER_AGENT,
},
follow_redirects=True,
)
resp.raise_for_status()
break
except httpx.HTTPStatusError as http_error:
logger.exception("HTTP error")
if http_error.response.status_code in [403, 404, 410]:
is_404 = True
continue
raise
except httpx.HTTPError:
logger.exception("req failed")
# If we tried https first and the domain is "http only"
if i == 0:
continue
break
if is_404:
return None
if resp:
tree = ET.fromstring(resp.text)
maybe_link = tree.find(
"./{http://docs.oasis-open.org/ns/xri/xrd-1.0}Link[@rel='lrdd']"
)
if maybe_link is not None:
return maybe_link.attrib.get("template")
return None
async def webfinger(
resource: str,
webfinger_url: str | None = None,
) -> dict[str, Any] | None: # noqa: C901
"""Mastodon-like WebFinger resolution to retrieve the activity stream Actor URL."""
resource = resource.strip()
logger.info(f"performing webfinger resolution for {resource}")
protos = ["https", "http"]
if resource.startswith("http://"):
protos.reverse()
host = urlparse(resource).netloc
elif resource.startswith("https://"):
host = urlparse(resource).netloc
urls = []
host = None
if webfinger_url:
urls = [webfinger_url]
else:
if resource.startswith("acct:"):
resource = resource[5:]
if resource.startswith("@"):
resource = resource[1:]
_, host = resource.split("@", 1)
resource = "acct:" + resource
if resource.startswith("http://"):
host = urlparse(resource).netloc
url = f"http://{host}/.well-known/webfinger"
elif resource.startswith("https://"):
host = urlparse(resource).netloc
url = f"https://{host}/.well-known/webfinger"
else:
protos = ["https", "http"]
_, host = resource.split("@", 1)
urls = [f"{proto}://{host}/.well-known/webfinger" for proto in protos]
if resource.startswith("acct:"):
resource = resource[5:]
if resource.startswith("@"):
resource = resource[1:]
resource = "acct:" + resource
is_404 = False
resp: httpx.Response | None = None
async with httpx.AsyncClient() as client:
for i, proto in enumerate(protos):
for i, url in enumerate(urls):
try:
url = f"{proto}://{host}/.well-known/webfinger"
check_url(url)
resp = await client.get(
url,
@ -57,7 +111,14 @@ async def webfinger(
if i == 0:
continue
break
if is_404:
if not webfinger_url and host:
if webfinger_url := (await get_webfinger_via_host_meta(host)):
return await webfinger(
resource,
webfinger_url=webfinger_url,
)
return None
if resp:

View File

@ -1,3 +1,5 @@
from urllib.parse import urlparse
import httpx
from bs4 import BeautifulSoup # type: ignore
from fastapi import APIRouter
@ -6,13 +8,21 @@ from fastapi import HTTPException
from fastapi import Request
from fastapi.responses import JSONResponse
from loguru import logger
from sqlalchemy import func
from sqlalchemy import select
from app import models
from app.boxes import _get_outbox_announces_count
from app.boxes import _get_outbox_likes_count
from app.boxes import _get_outbox_replies_count
from app.boxes import get_outbox_object_by_ap_id
from app.boxes import get_outbox_object_by_slug_and_short_id
from app.boxes import is_notification_enabled
from app.database import AsyncSession
from app.database import get_db_session
from app.utils import microformats
from app.utils.facepile import Face
from app.utils.facepile import WebmentionReply
from app.utils.url import check_url
from app.utils.url import is_url_valid
@ -47,6 +57,7 @@ async def webmention_endpoint(
check_url(source)
check_url(target)
parsed_target_url = urlparse(target)
except Exception:
logger.exception("Invalid webmention request")
raise HTTPException(status_code=400, detail="Invalid payload")
@ -65,6 +76,16 @@ async def webmention_endpoint(
logger.info("Found existing Webmention, will try to update or delete")
mentioned_object = await get_outbox_object_by_ap_id(db_session, target)
if not mentioned_object and parsed_target_url.path.startswith("/articles/"):
try:
_, _, short_id, slug = parsed_target_url.path.split("/")
mentioned_object = await get_outbox_object_by_slug_and_short_id(
db_session, slug, short_id
)
except Exception:
logger.exception(f"Failed to match {target}")
if not mentioned_object:
logger.info(f"Invalid target {target=}")
@ -90,15 +111,21 @@ async def webmention_endpoint(
logger.warning(f"target {target=} not found in source")
if existing_webmention_in_db:
logger.info("Deleting existing Webmention")
mentioned_object.webmentions_count = mentioned_object.webmentions_count - 1
existing_webmention_in_db.is_deleted = True
await db_session.flush()
notif = models.Notification(
notification_type=models.NotificationType.DELETED_WEBMENTION,
outbox_object_id=mentioned_object.id,
webmention_id=existing_webmention_in_db.id,
# Revert side effects
await _handle_webmention_side_effects(
db_session, existing_webmention_in_db, mentioned_object
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.DELETED_WEBMENTION):
notif = models.Notification(
notification_type=models.NotificationType.DELETED_WEBMENTION,
outbox_object_id=mentioned_object.id,
webmention_id=existing_webmention_in_db.id,
)
db_session.add(notif)
await db_session.commit()
@ -110,36 +137,96 @@ async def webmention_endpoint(
else:
return JSONResponse(content={}, status_code=200)
webmention_type = models.WebmentionType.UNKNOWN
webmention: models.Webmention
if existing_webmention_in_db:
# Undelete if needed
existing_webmention_in_db.is_deleted = False
existing_webmention_in_db.source_microformats = data
await db_session.flush()
webmention = existing_webmention_in_db
notif = models.Notification(
notification_type=models.NotificationType.UPDATED_WEBMENTION,
outbox_object_id=mentioned_object.id,
webmention_id=existing_webmention_in_db.id,
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.UPDATED_WEBMENTION):
notif = models.Notification(
notification_type=models.NotificationType.UPDATED_WEBMENTION,
outbox_object_id=mentioned_object.id,
webmention_id=existing_webmention_in_db.id,
)
db_session.add(notif)
else:
new_webmention = models.Webmention(
source=source,
target=target,
source_microformats=data,
outbox_object_id=mentioned_object.id,
webmention_type=webmention_type,
)
db_session.add(new_webmention)
await db_session.flush()
webmention = new_webmention
notif = models.Notification(
notification_type=models.NotificationType.NEW_WEBMENTION,
outbox_object_id=mentioned_object.id,
webmention_id=new_webmention.id,
)
db_session.add(notif)
if is_notification_enabled(models.NotificationType.NEW_WEBMENTION):
notif = models.Notification(
notification_type=models.NotificationType.NEW_WEBMENTION,
outbox_object_id=mentioned_object.id,
webmention_id=new_webmention.id,
)
db_session.add(notif)
mentioned_object.webmentions_count = mentioned_object.webmentions_count + 1
# Determine the webmention type
for item in data.get("items", []):
if target in item.get("properties", {}).get(
"in-reply-to", []
) and WebmentionReply.from_webmention(webmention):
webmention_type = models.WebmentionType.REPLY
break
elif target in item.get("properties", {}).get(
"like-of", []
) and Face.from_webmention(webmention):
webmention_type = models.WebmentionType.LIKE
break
elif target in item.get("properties", {}).get(
"repost-of", []
) and Face.from_webmention(webmention):
webmention_type = models.WebmentionType.REPOST
break
if webmention_type != models.WebmentionType.UNKNOWN:
webmention.webmention_type = webmention_type
await db_session.flush()
# Handle side effect
await _handle_webmention_side_effects(db_session, webmention, mentioned_object)
await db_session.commit()
return JSONResponse(content={}, status_code=200)
async def _handle_webmention_side_effects(
db_session: AsyncSession,
webmention: models.Webmention,
mentioned_object: models.OutboxObject,
) -> None:
if webmention.webmention_type == models.WebmentionType.UNKNOWN:
# TODO: recount everything
mentioned_object.webmentions_count = await db_session.scalar(
select(func.count(models.Webmention.id)).where(
models.Webmention.is_deleted.is_(False),
models.Webmention.outbox_object_id == mentioned_object.id,
models.Webmention.webmention_type == models.WebmentionType.UNKNOWN,
)
)
elif webmention.webmention_type == models.WebmentionType.LIKE:
mentioned_object.likes_count = await _get_outbox_likes_count(
db_session, mentioned_object
)
elif webmention.webmention_type == models.WebmentionType.REPOST:
mentioned_object.announces_count = await _get_outbox_announces_count(
db_session, mentioned_object
)
elif webmention.webmention_type == models.WebmentionType.REPLY:
mentioned_object.replies_count = await _get_outbox_replies_count(
db_session, mentioned_object
)
else:
raise ValueError(f"Unhandled {webmention.webmention_type} webmention")

1
data/templates/app Symbolic link
View File

@ -0,0 +1 @@
../../app/templates/

View File

@ -5,6 +5,7 @@ admin_password = "$2b$12$OwCyZM33uXQUVrChgER.h.qgFJ4fBp6tdFwArR3Lm1LV8NgMvIxVa"
name = "test"
summary = "<p>Hello</p>"
https = false
id = "http://localhost:8000"
icon_url = "https://localhost:8000/static/nopic.png"
secret = "1dd4079e0474d1a519052b8fe3cb5fa6"
debug = true

View File

@ -58,3 +58,24 @@ And check out the result by starting a static server using Python standard libra
cd docs/dist
python -m http.server 8001
```
## Contributing
Contributions/patches are welcome, but please start a discussion in a [ticket](https://todo.sr.ht/~tsileo/microblog.pub) or a [thread in the mailing list](https://lists.sr.ht/~tsileo/microblog.pub-devel) before working on anything consequent.
### Patches
Please ensure your code passes the code quality checks:
```bash
inv autoformat
inv lint
```
And that the tests suite is passing:
```bash
inv tests
```
Please also consider adding new test cases if needed.

View File

@ -89,6 +89,12 @@ Setup config.
poetry run inv configuration-wizard
```
Setup the database.
```bash
poetry run inv migrate-db
```
Grab your virtualenv path.
```bash
@ -185,6 +191,72 @@ http {
}
```
## (Advanced) Running on a subdomain
It is possible to run microblogpub on a subdomain (`sub.domain.tld`) while being reachable from the root root domain (`domain.tld`) using the `name@domain.tld` handle.
This requires forwarding/proxying requests from the root domain to the subdomain, for example using NGINX:
```nginx
location /.well-known/webfinger {
add_header Access-Control-Allow-Origin '*';
return 301 https://sub.domain.tld$request_uri;
}
```
And updating `data/profile.toml` to specify the root domain as the webfinger domain:
```toml
webfinger_domain = "domain.tld"
```
Once configured correctly, people will be able to follow you using `name@domain.tld`, while using `sub.domain.tld` for the web interface.
## (Advanced) Running from subpath
It is possible to configure microblogpub to run from subpath.
To achieve this, do the following configuration _between_ config and start steps.
i.e. _after_ you run `make config` or `poetry run inv configuration-wizard`,
but _before_ you run `docker compose up` or `poetry run supervisord`.
Changing this settings on an instance which has some posts or was seen by other instances will likely break links to these posts or federation (i.e. links to your instance, posts and profile from other instances).
The following steps will explain how to configure instance to be available at `https://example.com/subdir`.
Change them to your actual domain and subdir.
* Edit `data/profile.toml` file, add this line:
id = "https://example.com/subdir"
* Edit `misc/*-supervisord.conf` file which is relevant to you (it depends on how you start microblogpub - if in doubt, do the same change in all of them) - in `[program:uvicorn]` section, in the line which starts with `command`, add this argument at the very end: ` --root-path /subdir`
Above two steps are enough to configure microblogpub.
Next, you also need to configure reverse proxy.
It might slightly differ if you plan to have other services running on the same domain, but for [NGINX config shown above](#reverse-proxy), the following changes are enough:
* Add subdir to location, so location block starts like this:
location /subdir {
* Add `/` at the end of `proxy_pass` directive, like this:
proxy_pass http://localhost:8000/;
These two changes will instruct NGINX that requests sent to `https://example.com/subdir/...` should be forwarded to `http://localhost:8000/...`.
* Inside `server` block, add redirects for well-known URLs (add these lines after `client_max_body_size`, remember to replace `subdir` with your actual subdir!):
location /.well-known/webfinger { return 301 /subdir$request_uri; }
location /.well-known/nodeinfo { return 301 /subdir$request_uri; }
location /.well-known/oauth-authorization-server { return 301 /subdir$request_uri; }
* Optionally, [check robots.txt from a running microblogpub instance](https://microblog.pub/robots.txt) and integrate it into robots.txt file in the root of your server - remember to prepend `subdir` to URLs, so for example `Disallow: /admin` becomes `Disallow: /subdir/admin`.
## YunoHost edition
[YunoHost](https://yunohost.org/) support is a work in progress.
[YunoHost](https://yunohost.org/) support is available (although it is not an official package for now): <https://git.sr.ht/~tsileo/microblog.pub_ynh>.
## Available tutorial/guides
- [Opalstack](https://community.opalstack.com/d/1055-howto-install-and-run-microblogpub-on-opalstack), thanks to [@defulmere@mastodon.social](https://mastodon.online/@defulmere).

View File

@ -63,7 +63,7 @@ nav a:hover, main a:hover, header p a:hover {
max-width: 960px;
margin: 50px auto;
}
pre code {
pre {
padding: 10px;
overflow: auto;
display: block;

View File

@ -25,9 +25,10 @@ As these two config items define your ActivityPub handle `@handle@domain`.
You can tweak your profile by tweaking these items:
- `name`
- `summary` (using Markdown)
- `icon_url`
- `name`: The name shown with your profile.
- `summary`: The summary or 'bio' part of your profile, written in Markdown.
- `icon_url`: Your profile image or avatar.
- `image_url`: This provides a 'header' or 'banner' image. Note that it is not shown by the default Microblog.pub templates. It will be used by Mastodon (which uses a 3:1 ratio image) and Pleroma. Pixelfed and Peertube, for example, don't show these images by default.
Whenever one of these config items is updated, an `Update` activity will be sent to all known servers to update your remote profile.
@ -35,6 +36,15 @@ The server will need to be restarted for taking changes into account.
Before restarting the server, you can ensure you haven't made any mistakes by running the [configuration checking task](/user_guide.html#configuration-checking).
Note that currently `image_url` is not used anywhere in microblog.pub itself, but other clients/servers do occasionally use it when showing remote profiles as a background image.
Also, this image _can_ be used in microblog.pub - just add this:
```html
<img src="{{ local_actor.image_url | media_proxy_url }}">
```
to an appropriate place of your template (most likely, `header.html`).
For more information, see a section about [custom templates](/user_guide.html#custom-templates) further in this document.
### Profile metadata
@ -98,6 +108,39 @@ privacy_replace = [
]
```
### Disabling certain notification types
All notifications are enabled by default.
You can disabled specific notifications by adding them to the `disabled_notifications` list.
This example disables likes and shares notifications:
```
disabled_notifications = ["like", "announce"]
```
#### Available notification types
- `new_follower`
- `rejected_follower`
- `unfollow`
- `follow_request_accepted`
- `follow_request_rejected`
- `move`
- `like`
- `undo_like`
- `announce`
- `undo_announce`
- `mention`
- `new_webmention`
- `updated_webmention`
- `deleted_webmention`
- `blocked`
- `unblocked`
- `block`
- `unblock`
### Customization
#### Default emoji
@ -113,6 +156,7 @@ You can copy/paste them from [getemoji.com](https://getemoji.com/).
#### Custom emoji
You can add custom emoji in the `data/custom_emoji` directory and they will be picked automatically.
Do not use exotic characters in filename - only letters, numbers, and underscore symbol `_` are allowed.
#### Custom CSS
@ -127,9 +171,48 @@ $secondary-color: #32cd32;
See `app/scss/main.scss` to see what variables can be overridden.
You will need to [recompile CSS](#recompiling-css-files) after doing any CSS changes (for actual css files to be updates) and restart microblog.pub (for css link in HTML documents to be updated with a new checksum - otherwise, browsers that downloaded old CSS will keep using it).
#### Custom favicon
By default, microblog.pub favicon is a square of `$primary-color` CSS color (see above section on how to redefine CSS colors).
You can change it to any icon you like - just save a desired file as `data/favicon.ico`.
After that, run the "[recompile CSS](#recompiling-css-files)" task to copy it to `app/static/favicon.ico`.
#### Custom templates
If you'd like to customize your instance's theme beyond CSS, you can modify the app's HTML by placing templates in `data/templates` which overwrite the defaults in `app/templates`.
Templates are written using [Jinja](https://jinja.palletsprojects.com/en/latest/templates/) templating language.
Moreover, `utils.html` has scoped blocks around the body of every macro.
This allows macros to be overridden individually in `data/templates/utils.html`, without copying the whole file.
For example, to only override the display of a specific actor's name/icon, you can create `data/templates/utils.html` file with following content:
```jinja
{% extends "app/utils.html" %}
{% block display_actor %}
{% if actor.ap_id == "https://me.example.com" %}
<!-- custom actor display -->
{% else %}
{{ super() }}
{% endif %}
{% endblock %}
```
#### Custom Content Security Policy (CSP)
You can override the default Content Security Policy by adding a line in `data/profile.toml`:
```toml
custom_content_security_policy = "default-src 'self'; style-src 'self' 'sha256-{HIGHLIGHT_CSS_HASH}'; frame-ancestors 'none'; base-uri 'self'; form-action 'self';"
```
This example will output the default CSP, note that `{HIGHLIGHT_CSS_HASH}` will be dynamically replaced by the correct value (the hash of the CSS needed for syntax highlighting).
#### Code highlighting theme
You can switch to one of the [styles supported by Pygments](https://pygments.org/styles/) by adding a line in `profile.toml`:
You can switch to one of the [styles supported by Pygments](https://pygments.org/styles/) by adding a line in `data/profile.toml`:
```toml
code_highlighting_theme = "solarized-dark"
@ -272,7 +355,7 @@ First you need to grab the "ActivityPub actor URL" for your existing account:
```bash
# For a Python install
poetry run inv webfinger username@domain.tld
poetry run inv webfinger username@instance-you-want-to-move-from.tld
```
Edit the config.
@ -281,7 +364,7 @@ Edit the config.
```bash
# For a Docker install
make account=username@domain.tld webfinger
make account=username@instance-you-want-to-move-from.tld webfinger
```
Edit the config.
@ -291,11 +374,35 @@ Edit the config.
And add a reference to your old/existing account in `profile.toml`:
```toml
also_known_as = "my@old-account.com"
also_known_as = "https://instance-you-want-to-move-form.tld/users/username"
```
Restart the server, and you should be able to complete the move from your existing account.
Note that if you already have a redirect in place on Mastodon, you may have to remove it before initiating the migration.
## Import follows from Mastodon
You can import the list of follows/following accounts from Mastodon.
It requires downloading the "Follows" CSV file from your Mastodon instance via "Settings" / "Import and export" / "Data export".
Then you need to run the import task:
### Python edition
```bash
# For a Python install
poetry run inv import-mastodon-following-accounts following_accounts.csv
```
### Docker edition
```bash
# For a Docker install
make path=following_accounts.csv import-mastodon-following-accounts
```
## Tasks
### Configuration checking
@ -333,13 +440,13 @@ make compile-scss
### Password reset
If have lost your password, you can generate a new one using the `password-reset` task.
If have lost your password, you can generate a new one using the `reset-password` task.
#### Python edition
```bash
# shutdown supervisord
poetry run inv password-reset
poetry run inv reset-password
# edit data/profile.toml
# restart supervisord
```
@ -348,7 +455,7 @@ poetry run inv password-reset
```bash
docker compose stop
make password-reset
make reset-password
# edit data/profile.toml
docker compose up -d
```
@ -451,6 +558,7 @@ make self-destruct
If the server is not (re)starting, you can:
- [Ensure that the configuration is valid](/user_guide.html#configuration-checking)
- [Verify if you haven't any syntax error in the custom theme by recompiling the CSS](/user_guide.html#recompiling-css-files)
- Look at the log files
- [Ensure that the configuration is valid](/user_guide.html#configuration-checking).
- [Verify if you haven't any syntax error in the custom theme by recompiling the CSS](/user_guide.html#recompiling-css-files).
- Look at the log files (in `data/uvicorn.log`, `data/incoming.log` and `data/outgoing.log`).
- If the CSS is not working, ensure your reverse proxy is serving the static file correctly.

3731
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -14,11 +14,10 @@ bcrypt = "^3.2.2"
itsdangerous = "^2.1.2"
python-multipart = "^0.0.5"
tomli = "^2.0.1"
httpx = {extras = ["http2"], version = "^0.23.0"}
httpx = {version = "0.23.0", extras = ["http2"]}
SQLAlchemy = {extras = ["asyncio"], version = "^1.4.39"}
alembic = "^1.8.0"
bleach = "^5.0.0"
Markdown = "^3.3.7"
prompt-toolkit = "^3.0.29"
tomli-w = "^1.0.0"
python-dateutil = "^2.8.2"
@ -27,7 +26,6 @@ html5lib = "^1.1"
mf2py = "^1.1.2"
Pygments = "^2.12.0"
loguru = "^0.6.0"
mdx-linkify = "^2.1"
Pillow = "^9.1.1"
blurhash-python = "^1.1.3"
html2text = "^2020.1.16"
@ -46,6 +44,7 @@ uvicorn = {extras = ["standard"], version = "^0.18.3"}
Brotli = "^1.0.9"
greenlet = "^1.1.3"
mistletoe = "^0.9.0"
Pebble = "^5.0.2"
[tool.poetry.dev-dependencies]
black = "^22.3.0"

View File

@ -1,19 +1,115 @@
import re
import shutil
import typing
from pathlib import Path
from typing import Any
from jinja2 import Environment
from jinja2 import FileSystemLoader
from jinja2 import select_autoescape
from markdown import markdown
from mistletoe import Document # type: ignore
from mistletoe import HTMLRenderer # type: ignore
from mistletoe import block_token # type: ignore
from pygments import highlight # type: ignore
from pygments.formatters import HtmlFormatter # type: ignore
from pygments.lexers import get_lexer_by_name as get_lexer # type: ignore
from pygments.lexers import guess_lexer # type: ignore
from app.config import VERSION
from app.source import CustomRenderer
from app.utils.datetime import now
_FORMATTER = HtmlFormatter()
_FORMATTER.noclasses = True
def markdownify(content: str) -> str:
return markdown(
content, extensions=["mdx_linkify", "fenced_code", "codehilite", "toc"]
)
class DocRenderer(CustomRenderer):
def __init__(
self,
depth=5,
omit_title=True,
filter_conds=[],
) -> None:
super().__init__(
enable_mentionify=False,
enable_hashtagify=False,
)
self._headings: list[tuple[int, str, str]] = []
self._ids: set[str] = set()
self.depth = depth
self.omit_title = omit_title
self.filter_conds = filter_conds
@property
def toc(self):
"""
Returns table of contents as a block_token.List instance.
"""
def get_indent(level):
if self.omit_title:
level -= 1
return " " * 4 * (level - 1)
def build_list_item(heading):
level, content, title_id = heading
template = '{indent}- <a href="#{id}" rel="nofollow">{content}</a>\n'
return template.format(
indent=get_indent(level), content=content, id=title_id
)
lines = [build_list_item(heading) for heading in self._headings]
items = block_token.tokenize(lines)
return items[0]
def render_heading(self, token):
"""
Overrides super().render_heading; stores rendered heading first,
then returns it.
"""
template = '<h{level} id="{id}">{inner}</h{level}>'
inner = self.render_inner(token)
title_id = inner.lower().replace(" ", "-")
if title_id in self._ids:
i = 1
while 1:
title_id = f"{title_id}_{i}"
if title_id not in self._ids:
break
self._ids.add(title_id)
rendered = template.format(level=token.level, inner=inner, id=title_id)
content = self.parse_rendered_heading(rendered)
if not (
self.omit_title
and token.level == 1
or token.level > self.depth
or any(cond(content) for cond in self.filter_conds)
):
self._headings.append((token.level, content, title_id))
return rendered
@staticmethod
def parse_rendered_heading(rendered):
"""
Helper method; converts rendered heading to plain text.
"""
return re.sub(r"<.+?>", "", rendered)
def render_block_code(self, token: typing.Any) -> str:
code = token.children[0].content
lexer = get_lexer(token.language) if token.language else guess_lexer(code)
return highlight(code, lexer, _FORMATTER)
def markdownify(content: str) -> tuple[str, Any]:
with DocRenderer() as renderer:
rendered_content = renderer.render(Document(content))
with HTMLRenderer() as html_renderer:
toc = html_renderer.render(renderer.toc)
return rendered_content, toc
def main() -> None:
@ -30,32 +126,36 @@ def main() -> None:
last_updated = now().replace(second=0, microsecond=0).isoformat()
readme = Path("README.md")
content, toc = markdownify(readme.read_text().removeprefix("# microblog.pub"))
template.stream(
content=markdownify(readme.read_text().removeprefix("# microblog.pub")),
content=content,
version=VERSION,
path="/",
last_updated=last_updated,
).dump("docs/dist/index.html")
install = Path("docs/install.md")
content, toc = markdownify(install.read_text())
template.stream(
content=markdownify(install.read_text()),
content=content.replace("[TOC]", toc),
version=VERSION,
path="/installing.html",
last_updated=last_updated,
).dump("docs/dist/installing.html")
user_guide = Path("docs/user_guide.md")
content, toc = markdownify(user_guide.read_text())
template.stream(
content=markdownify(user_guide.read_text()),
content=content.replace("[TOC]", toc),
version=VERSION,
path="/user_guide.html",
last_updated=last_updated,
).dump("docs/dist/user_guide.html")
developer_guide = Path("docs/developer_guide.md")
content, toc = markdownify(developer_guide.read_text())
template.stream(
content=markdownify(developer_guide.read_text()),
content=content.replace("[TOC]", toc),
version=VERSION,
path="/developer_guide.html",
last_updated=last_updated,

View File

@ -75,9 +75,10 @@ def main() -> None:
proto = "http"
print("Note that you can put your icon/avatar in the static/ directory")
dat["icon_url"] = prompt(
if icon_url := prompt(
"icon URL: ", default=f'{proto}://{dat["domain"]}/static/nopic.png'
)
):
dat["icon_url"] = icon_url
dat["secret"] = os.urandom(16).hex()
with config_file.open("w") as f:

View File

@ -1,17 +1,50 @@
import asyncio
import io
import shutil
import tarfile
from collections import namedtuple
from contextlib import contextmanager
from inspect import getfullargspec
from pathlib import Path
from typing import Generator
from typing import Optional
from unittest.mock import patch
import httpx
import invoke # type: ignore
from invoke import Context # type: ignore
from invoke import run # type: ignore
from invoke import task # type: ignore
def fix_annotations():
"""
Pyinvoke doesn't accept annotations by default, this fix that
Based on: @zelo's fix in https://github.com/pyinvoke/invoke/pull/606
Context in: https://github.com/pyinvoke/invoke/issues/357
Python 3.11 https://github.com/pyinvoke/invoke/issues/833
"""
ArgSpec = namedtuple("ArgSpec", ["args", "defaults"])
def patched_inspect_getargspec(func):
spec = getfullargspec(func)
return ArgSpec(spec.args, spec.defaults)
org_task_argspec = invoke.tasks.Task.argspec
def patched_task_argspec(*args, **kwargs):
with patch(
target="inspect.getargspec", new=patched_inspect_getargspec, create=True
):
return org_task_argspec(*args, **kwargs)
invoke.tasks.Task.argspec = patched_task_argspec
fix_annotations()
@task
def generate_db_migration(ctx, message):
# type: (Context, str) -> None
@ -45,7 +78,12 @@ def compile_scss(ctx, watch=False):
# type: (Context, bool) -> None
from app.utils.favicon import build_favicon
build_favicon()
favicon_file = Path("data/favicon.ico")
if not favicon_file.exists():
build_favicon()
else:
shutil.copy2(favicon_file, "app/static/favicon.ico")
theme_file = Path("data/_theme.scss")
if not theme_file.exists():
theme_file.write_text("// override vars for theming here")
@ -347,3 +385,40 @@ def check_config(ctx):
sys.exit(1)
else:
print("Config is OK")
@task
def import_mastodon_following_accounts(ctx, path):
# type: (Context, str) -> None
from loguru import logger
from app.boxes import _get_following
from app.boxes import _send_follow
from app.database import async_session
from app.utils.mastodon import get_actor_urls_from_following_accounts_csv_file
async def _import_following() -> int:
count = 0
async with async_session() as db_session:
followings = {
following.ap_actor_id for following in await _get_following(db_session)
}
for (
handle,
actor_url,
) in await get_actor_urls_from_following_accounts_csv_file(path):
if actor_url in followings:
logger.info(f"Already following {handle}")
continue
logger.info(f"Importing {actor_url=}")
await _send_follow(db_session, actor_url)
count += 1
await db_session.commit()
return count
count = asyncio.run(_import_following())
logger.info(f"Import done, {count} follow requests sent")

View File

@ -68,6 +68,20 @@ def build_accept_activity(
}
def build_block_activity(
from_remote_actor: actor.RemoteActor,
for_remote_actor: actor.RemoteActor,
outbox_public_id: str | None = None,
) -> ap.RawObject:
return {
"@context": ap.AS_CTX,
"type": "Block",
"id": from_remote_actor.ap_id + "/block/" + (outbox_public_id or uuid4().hex),
"actor": from_remote_actor.ap_id,
"object": for_remote_actor.ap_id,
}
def build_move_activity(
from_remote_actor: actor.RemoteActor,
for_remote_object: actor.RemoteActor,

View File

@ -20,12 +20,16 @@ async def test_fetch_actor(async_db_session: AsyncSession, respx_mock) -> None:
public_key="pk",
)
respx_mock.get(ra.ap_id).mock(return_value=httpx.Response(200, json=ra.ap_actor))
respx_mock.get(
"https://example.com/.well-known/webfinger",
params={"resource": "acct%3Atoto%40example.com"},
).mock(return_value=httpx.Response(200, json={"subject": "acct:toto@example.com"}))
# When fetching this actor for the first time
saved_actor = await fetch_actor(async_db_session, ra.ap_id)
# Then it has been fetched and saved in DB
assert respx.calls.call_count == 1
assert respx.calls.call_count == 2
assert (
await async_db_session.execute(select(models.Actor))
).scalar_one().ap_id == saved_actor.ap_id
@ -38,7 +42,7 @@ async def test_fetch_actor(async_db_session: AsyncSession, respx_mock) -> None:
assert (
await async_db_session.execute(select(func.count(models.Actor.id)))
).scalar_one() == 1
assert respx.calls.call_count == 1
assert respx.calls.call_count == 2
def test_sqlalchemy_factory(db: Session) -> None:

View File

@ -423,3 +423,53 @@ def test_inbox__move_activity(
).scalar_one()
assert notif.actor.ap_id == new_ra.ap_id
assert notif.inbox_object_id == inbox_activity.id
def test_inbox__block_activity(
db: Session,
client: TestClient,
respx_mock: respx.MockRouter,
) -> None:
# Given a remote actor
ra = setup_remote_actor(respx_mock)
# Which is followed by the local actor
setup_remote_actor_as_following(ra)
# When receiving a Block activity
follow_activity = RemoteObject(
factories.build_block_activity(
from_remote_actor=ra,
for_remote_actor=LOCAL_ACTOR,
),
ra,
)
with mock_httpsig_checker(ra):
response = client.post(
"/inbox",
headers={"Content-Type": ap.AS_CTX},
json=follow_activity.ap_object,
)
# Then the server returns a 202
assert response.status_code == 202
run_process_next_incoming_activity()
# And the actor was saved in DB
saved_actor = db.execute(select(models.Actor)).scalar_one()
assert saved_actor.ap_id == ra.ap_id
# And the Block activity was saved in the inbox
inbox_activity = db.execute(
select(models.InboxObject).where(models.InboxObject.ap_type == "Block")
).scalar_one()
# And a notification was created
notif = db.execute(
select(models.Notification).where(
models.Notification.notification_type == models.NotificationType.BLOCKED
)
).scalar_one()
assert notif.actor.ap_id == ra.ap_id
assert notif.inbox_object_id == inbox_activity.id

19
tests/test_utils.py Normal file
View File

@ -0,0 +1,19 @@
from unittest import mock
import pytest
from app.utils.url import is_hostname_blocked
@pytest.mark.parametrize(
"hostname,should_be_blocked",
[
("example.com", True),
("subdomain.example.com", True),
("example.xyz", False),
],
)
def test_is_hostname_blocked(hostname: str, should_be_blocked: bool) -> None:
with mock.patch("app.utils.url.BLOCKED_SERVERS", ["example.com"]):
is_hostname_blocked.cache_clear()
assert is_hostname_blocked(hostname) is should_be_blocked