2018-05-07 14:46:59 +02:00
|
|
|
from django import template
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
from urllib import parse
|
|
|
|
from django.urls import reverse
|
2018-11-07 19:07:21 +01:00
|
|
|
from django.utils.translation import gettext as _
|
2018-06-06 21:21:00 +02:00
|
|
|
from pdb import set_trace
|
2018-05-07 14:46:59 +02:00
|
|
|
|
|
|
|
register = template.Library()
|
|
|
|
|
2018-06-06 21:21:00 +02:00
|
|
|
@register.filter
|
|
|
|
def pdb(element):
|
|
|
|
set_trace()
|
|
|
|
return element
|
|
|
|
|
2018-05-07 14:46:59 +02:00
|
|
|
@register.filter
|
|
|
|
def relink_tags(value):
|
|
|
|
'''Treat the text as html, and replace tag links with app-internal tag links
|
|
|
|
|
|
|
|
Currently, this only works for tags in toots coming from Mastodon servers,
|
|
|
|
not necessarily GNU Social, Pleroma, or other fediverse servers, because
|
|
|
|
it relies on the markup that Mastodon puts on tags.
|
|
|
|
|
2018-05-09 03:47:17 +02:00
|
|
|
FIXME: handle arbitrary tag links
|
2018-05-07 14:46:59 +02:00
|
|
|
'''
|
2018-06-06 23:13:26 +02:00
|
|
|
value = value.replace(''', "'")
|
2018-05-07 14:46:59 +02:00
|
|
|
soup = BeautifulSoup(value, 'html.parser')
|
|
|
|
for link in soup.find_all('a', class_='hashtag'):
|
2018-09-05 11:21:02 +02:00
|
|
|
try:
|
|
|
|
link['href'] = reverse('tag', args=[link.span.string])
|
|
|
|
except:
|
|
|
|
continue
|
2018-06-06 21:21:00 +02:00
|
|
|
return soup.decode(formatter='html')
|
2018-05-07 14:46:59 +02:00
|
|
|
|
2018-05-09 03:47:17 +02:00
|
|
|
@register.filter
|
|
|
|
def relink_mentions(value):
|
|
|
|
'''Treat the text as html, and replace mention links with app-internal links
|
|
|
|
|
|
|
|
Currently, this only works for mentions in toots coming from Mastodon servers,
|
|
|
|
not necessarily GNU Social, Pleroma, or other fediverse servers, because
|
|
|
|
it relies on the markup that Mastodon puts on mentions.
|
|
|
|
|
|
|
|
FIXME: handle arbitrary mention links
|
|
|
|
'''
|
2018-06-06 23:13:26 +02:00
|
|
|
value = value.replace(''', "'")
|
2018-05-09 03:47:17 +02:00
|
|
|
soup = BeautifulSoup(value, 'html.parser')
|
|
|
|
for link in soup.find_all('a', class_='mention'):
|
|
|
|
parsed = parse.urlparse(link['href'])
|
|
|
|
try:
|
|
|
|
instance = parsed[1]
|
|
|
|
user = parsed[2][2:]
|
|
|
|
link['href'] = reverse('user', args=[user+'@'+instance])
|
|
|
|
except:
|
|
|
|
continue
|
2018-06-06 21:21:00 +02:00
|
|
|
return soup.decode(formatter='html')
|
2018-05-09 03:47:17 +02:00
|
|
|
|
2018-05-07 14:46:59 +02:00
|
|
|
@register.filter
|
|
|
|
def relink_toot(value):
|
2018-05-09 03:47:17 +02:00
|
|
|
return relink_tags(relink_mentions(value))
|
2018-05-23 14:27:45 +02:00
|
|
|
|
|
|
|
@register.filter
|
|
|
|
def localuser(value):
|
|
|
|
'''Convert a remote user link to local'''
|
|
|
|
try:
|
|
|
|
parsed = parse.urlparse(value)
|
|
|
|
instance = parsed[1]
|
2019-01-24 14:47:16 +01:00
|
|
|
if parsed[2].startswith('/@'):
|
2019-01-24 14:38:47 +01:00
|
|
|
user = parsed[2][2:]
|
|
|
|
else:
|
|
|
|
user = parsed[2].split('/')[-1]
|
2018-05-23 14:27:45 +02:00
|
|
|
local = reverse('user', args=[user+'@'+instance])
|
|
|
|
except:
|
|
|
|
local = value
|
|
|
|
return local
|
2018-08-17 01:21:51 +02:00
|
|
|
|
|
|
|
@register.filter
|
|
|
|
def fix_emojos(value, emojos):
|
|
|
|
'''Replace instances of recognized custom emoji :shortcodes: in value with image link tags
|
|
|
|
'''
|
|
|
|
for emojo in emojos:
|
|
|
|
try:
|
|
|
|
value = value.replace(":%(shortcode)s:" % emojo,
|
|
|
|
'<img src="%(url)s" title=":%(shortcode)s:" alt=":%(shortcode)s:" class="emoji">' % emojo)
|
|
|
|
except:
|
2018-09-05 11:21:02 +02:00
|
|
|
continue
|
2018-08-17 01:21:51 +02:00
|
|
|
return value
|