2017-08-11 00:31:02 +02:00
|
|
|
from celery import Celery, Task
|
2017-07-27 20:20:59 +02:00
|
|
|
from app import app as flaskapp
|
|
|
|
from app import db
|
2017-08-29 14:46:32 +02:00
|
|
|
from model import Session, Account, TwitterArchive, Post, OAuthToken,\
|
|
|
|
MastodonInstance
|
2017-09-20 23:02:36 +02:00
|
|
|
import libforget.twitter
|
|
|
|
import libforget.mastodon
|
2021-11-09 10:07:56 +01:00
|
|
|
import libforget.misskey
|
2017-09-20 14:39:31 +02:00
|
|
|
from datetime import timedelta, datetime, timezone
|
2019-07-16 20:50:25 +02:00
|
|
|
from time import time
|
2017-07-31 00:07:34 +02:00
|
|
|
from zipfile import ZipFile
|
|
|
|
from io import BytesIO, TextIOWrapper
|
2017-07-31 01:57:03 +02:00
|
|
|
import json
|
2017-08-01 20:57:15 +02:00
|
|
|
from kombu import Queue
|
2017-08-07 23:42:38 +02:00
|
|
|
import version
|
2017-09-20 23:02:36 +02:00
|
|
|
from libforget.exceptions import PermanentError, TemporaryError
|
2017-09-20 14:39:31 +02:00
|
|
|
import redis
|
|
|
|
from functools import wraps
|
2017-09-20 14:44:01 +02:00
|
|
|
import pickle
|
2019-07-19 05:46:53 +02:00
|
|
|
import logging
|
2017-07-27 20:20:59 +02:00
|
|
|
|
2018-08-14 03:47:48 +02:00
|
|
|
app = Celery(
|
|
|
|
'tasks',
|
|
|
|
broker=flaskapp.config['CELERY_BROKER'],
|
|
|
|
task_serializer='pickle',
|
|
|
|
task_soft_time_limit=600,
|
|
|
|
task_time_limit=1200,
|
|
|
|
)
|
2017-08-01 20:57:15 +02:00
|
|
|
app.conf.task_queues = (
|
2018-08-14 03:47:48 +02:00
|
|
|
Queue('default', routing_key='celery'),
|
|
|
|
Queue('high_prio', routing_key='high'),
|
|
|
|
Queue('higher_prio', routing_key='higher'),
|
2017-08-01 20:57:15 +02:00
|
|
|
)
|
|
|
|
app.conf.task_default_queue = 'default'
|
|
|
|
app.conf.task_default_exchange = 'celery'
|
|
|
|
app.conf.task_default_exchange_type = 'direct'
|
2017-07-27 20:20:59 +02:00
|
|
|
|
2017-09-05 13:01:33 +02:00
|
|
|
sentry = None
|
|
|
|
|
2017-08-07 13:46:05 +02:00
|
|
|
if 'SENTRY_DSN' in flaskapp.config:
|
|
|
|
from raven import Client
|
|
|
|
from raven.contrib.celery import register_signal, register_logger_signal
|
2018-08-14 03:47:48 +02:00
|
|
|
sentry = Client(
|
|
|
|
flaskapp.config['SENTRY_DSN'],
|
|
|
|
release=version.get_versions()['version'])
|
2017-08-07 13:46:05 +02:00
|
|
|
register_logger_signal(sentry)
|
|
|
|
register_signal(sentry)
|
|
|
|
|
2017-08-11 00:31:02 +02:00
|
|
|
|
|
|
|
class DBTask(Task):
|
|
|
|
def __call__(self, *args, **kwargs):
|
|
|
|
try:
|
|
|
|
super().__call__(*args, **kwargs)
|
|
|
|
finally:
|
|
|
|
db.session.close()
|
|
|
|
|
2017-08-29 14:46:32 +02:00
|
|
|
|
2017-08-11 00:31:02 +02:00
|
|
|
app.Task = DBTask
|
|
|
|
|
2017-09-21 02:27:30 +02:00
|
|
|
r = None
|
2018-08-14 03:47:48 +02:00
|
|
|
|
|
|
|
|
2017-09-20 14:39:31 +02:00
|
|
|
def unique(fun):
|
2017-09-21 02:27:30 +02:00
|
|
|
global r
|
|
|
|
if not r:
|
|
|
|
r = redis.StrictRedis.from_url(flaskapp.config['REDIS_URI'])
|
2017-09-20 14:39:31 +02:00
|
|
|
|
|
|
|
@wraps(fun)
|
|
|
|
def wrapper(*args, **kwargs):
|
2019-07-16 20:50:25 +02:00
|
|
|
logging.info('Checking for dupes for unique task %s', (fun.__name__, args, kwargs))
|
|
|
|
key = 'celery_unique_lock:epoch1:{}:{}'.format(
|
|
|
|
fun.__name__, pickle.dumps((args, kwargs)))
|
2017-09-20 14:39:31 +02:00
|
|
|
has_lock = False
|
2019-02-24 13:22:28 +01:00
|
|
|
result = None
|
2017-09-20 14:39:31 +02:00
|
|
|
try:
|
2018-08-14 03:47:48 +02:00
|
|
|
if r.set(key, 1, nx=True, ex=60 * 5):
|
2019-07-16 20:50:25 +02:00
|
|
|
logging.info('No dupes for unique, running task %s', (fun.__name__, args, kwargs))
|
2017-09-20 14:39:31 +02:00
|
|
|
has_lock = True
|
2019-02-24 13:22:28 +01:00
|
|
|
result = fun(*args, **kwargs)
|
2019-07-16 20:50:25 +02:00
|
|
|
else:
|
|
|
|
logging.info('Unique task has a dupe, skipping %s', (fun.__name__, args, kwargs))
|
2017-09-20 14:39:31 +02:00
|
|
|
finally:
|
|
|
|
if has_lock:
|
|
|
|
r.delete(key)
|
2019-02-24 13:22:28 +01:00
|
|
|
return result
|
2017-09-20 14:39:31 +02:00
|
|
|
|
2017-09-20 14:42:45 +02:00
|
|
|
return wrapper
|
|
|
|
|
2017-09-20 14:39:31 +02:00
|
|
|
|
2017-08-29 14:46:32 +02:00
|
|
|
def noop(*args, **kwargs):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2017-09-04 22:04:03 +02:00
|
|
|
def make_dormant(acc):
|
|
|
|
acc.reason = '''
|
|
|
|
Your account was temporarily disabled because your {service}
|
|
|
|
account was suspended or otherwise inaccessible. By logging into
|
|
|
|
it, you have reactivated your account, but be aware that some posts
|
|
|
|
may be missing from Forget's database, and it may take some time to
|
|
|
|
get back in sync.
|
2017-09-05 00:08:23 +02:00
|
|
|
'''.format(service=acc.service)
|
2017-09-04 22:04:03 +02:00
|
|
|
acc.dormant = True
|
|
|
|
|
|
|
|
|
2018-08-14 03:47:48 +02:00
|
|
|
@app.task(autoretry_for=(TemporaryError, ))
|
2019-03-11 12:08:08 +01:00
|
|
|
@unique
|
2019-02-24 13:22:28 +01:00
|
|
|
def fetch_acc(id_):
|
2018-07-06 01:44:30 +02:00
|
|
|
account = Account.query.get(id_)
|
2019-03-11 12:12:04 +01:00
|
|
|
print("Fetching {}".format(account))
|
2017-07-29 12:01:32 +02:00
|
|
|
try:
|
2019-02-24 13:22:28 +01:00
|
|
|
if not account.fetch_history_complete:
|
|
|
|
oldest = (db.session.query(Post)
|
|
|
|
.with_parent(account, 'posts')
|
|
|
|
.order_by(db.asc(Post.created_at))
|
|
|
|
.first())
|
|
|
|
# ^ None if this is our first fetch ever, otherwise the oldest post
|
|
|
|
if oldest:
|
|
|
|
max_id = oldest.remote_id
|
|
|
|
else:
|
|
|
|
max_id = None
|
|
|
|
since_id = None
|
|
|
|
elif account.fetch_current_batch_end:
|
|
|
|
oldest = (db.session.query(Post)
|
|
|
|
.with_parent(account, 'posts')
|
|
|
|
.filter(Post.created_at > account.fetch_current_batch_end.created_at)
|
|
|
|
.order_by(db.asc(Post.created_at))
|
|
|
|
.first())
|
|
|
|
# ^ None if this is our first fetch of this batch, otherwise oldest of this batch
|
|
|
|
if oldest:
|
|
|
|
max_id = oldest.remote_id
|
|
|
|
else:
|
|
|
|
max_id = None
|
|
|
|
since_id = account.fetch_current_batch_end.remote_id
|
|
|
|
else:
|
|
|
|
# we shouldn't get here unless the user had no posts on the service last time we fetched
|
|
|
|
max_id = None
|
|
|
|
latest = (db.session.query(Post)
|
|
|
|
.with_parent(account, 'posts')
|
|
|
|
.order_by(db.desc(Post.created_at))
|
|
|
|
.limit(1)
|
|
|
|
.scalar())
|
|
|
|
# ^ should be None considering the user has no posts
|
|
|
|
# will be the latest post in the off chance that something goes weird
|
|
|
|
if latest:
|
|
|
|
since_id = latest.remote_id
|
|
|
|
else:
|
|
|
|
since_id = None
|
|
|
|
|
|
|
|
|
|
|
|
fetch_posts = noop
|
2018-08-14 03:47:48 +02:00
|
|
|
if (account.service == 'twitter'):
|
2019-02-24 13:22:28 +01:00
|
|
|
fetch_posts = libforget.twitter.fetch_posts
|
2018-08-14 03:47:48 +02:00
|
|
|
elif (account.service == 'mastodon'):
|
2019-02-24 13:22:28 +01:00
|
|
|
fetch_posts = libforget.mastodon.fetch_posts
|
2021-11-09 10:07:56 +01:00
|
|
|
elif (account.service == 'misskey'):
|
|
|
|
fetch_posts = libforget.misskey.fetch_posts
|
2019-02-24 13:22:28 +01:00
|
|
|
posts = fetch_posts(account, max_id, since_id)
|
|
|
|
|
|
|
|
if posts is None:
|
|
|
|
# ???
|
|
|
|
raise TemporaryError("Fetching posts went horribly wrong")
|
|
|
|
|
2019-08-28 11:23:11 +02:00
|
|
|
if (
|
|
|
|
len([post for post in posts if post.remote_id not in (max_id, since_id)])
|
|
|
|
== 0
|
|
|
|
):
|
|
|
|
# if there are no posts other than the edges
|
2019-02-24 13:22:28 +01:00
|
|
|
# we either finished the historic fetch
|
|
|
|
# or we finished the current batch
|
|
|
|
account.fetch_history_complete = True
|
2019-03-09 15:56:07 +01:00
|
|
|
batch_end = (Post.query.with_parent(account, 'posts').order_by(
|
|
|
|
db.desc(Post.created_at)).first())
|
|
|
|
if batch_end:
|
|
|
|
account.fetch_current_batch_end_id = batch_end.id
|
|
|
|
else:
|
|
|
|
account.fetch_current_batch_end_id = None
|
2019-02-24 13:22:28 +01:00
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
else:
|
|
|
|
for post in posts:
|
|
|
|
db.session.merge(post)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
if not account.fetch_history_complete:
|
|
|
|
# reschedule immediately if we're still doing the historic fetch
|
2019-09-13 03:07:58 +02:00
|
|
|
print("{} is not done fetching history, resheduling.".format(account))
|
|
|
|
fetch_acc.apply_async((id_,), countdown=1)
|
2019-02-24 13:22:28 +01:00
|
|
|
|
|
|
|
|
2018-07-06 01:44:30 +02:00
|
|
|
except TemporaryError:
|
|
|
|
db.session.rollback()
|
|
|
|
account.backoff()
|
2017-09-06 13:08:06 +02:00
|
|
|
except PermanentError:
|
2017-09-04 22:04:03 +02:00
|
|
|
db.session.rollback()
|
2018-07-06 01:44:30 +02:00
|
|
|
make_dormant(account)
|
2017-09-05 13:01:33 +02:00
|
|
|
if sentry:
|
|
|
|
sentry.captureException()
|
2017-07-29 12:01:32 +02:00
|
|
|
finally:
|
|
|
|
db.session.rollback()
|
2018-07-06 01:44:30 +02:00
|
|
|
account.touch_fetch()
|
2017-07-29 12:01:32 +02:00
|
|
|
db.session.commit()
|
|
|
|
|
2017-08-29 14:46:32 +02:00
|
|
|
|
2017-09-04 22:04:03 +02:00
|
|
|
@app.task()
|
2017-07-31 01:57:03 +02:00
|
|
|
def import_twitter_archive_month(archive_id, month_path):
|
|
|
|
ta = TwitterArchive.query.get(archive_id)
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
|
|
|
with ZipFile(BytesIO(ta.body), 'r') as zipfile:
|
|
|
|
with TextIOWrapper(zipfile.open(month_path, 'r')) as f:
|
|
|
|
|
|
|
|
# seek past header
|
|
|
|
f.readline()
|
|
|
|
|
|
|
|
tweets = json.load(f)
|
|
|
|
|
|
|
|
for tweet in tweets:
|
2017-09-20 23:02:36 +02:00
|
|
|
post = libforget.twitter.post_from_api_tweet_object(tweet)
|
2017-07-31 03:53:05 +02:00
|
|
|
existing_post = db.session.query(Post).get(post.id)
|
|
|
|
|
2017-08-29 14:46:32 +02:00
|
|
|
if post.author_id != ta.account_id or\
|
|
|
|
existing_post and existing_post.author_id != ta.account_id:
|
2017-07-31 03:53:05 +02:00
|
|
|
raise Exception("Shenanigans!")
|
|
|
|
|
2017-07-31 01:57:03 +02:00
|
|
|
post = db.session.merge(post)
|
|
|
|
|
|
|
|
ta.chunks_successful = TwitterArchive.chunks_successful + 1
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
db.session.rollback()
|
|
|
|
ta.chunks_failed = TwitterArchive.chunks_failed + 1
|
|
|
|
db.session.commit()
|
|
|
|
raise e
|
|
|
|
|
2017-07-27 20:20:59 +02:00
|
|
|
|
2017-09-16 20:08:06 +02:00
|
|
|
@app.task()
|
2017-09-20 14:39:31 +02:00
|
|
|
@unique
|
2017-08-02 01:35:09 +02:00
|
|
|
def delete_from_account(account_id):
|
|
|
|
account = Account.query.get(account_id)
|
2017-09-20 14:39:31 +02:00
|
|
|
if account.next_delete > datetime.now(timezone.utc):
|
|
|
|
return
|
|
|
|
|
2019-02-24 13:22:28 +01:00
|
|
|
latest_n_posts = (Post.query.with_parent(account, 'posts').order_by(
|
2018-08-14 03:47:48 +02:00
|
|
|
db.desc(Post.created_at)).limit(account.policy_keep_latest)
|
2017-08-29 20:45:42 +02:00
|
|
|
.cte(name='latest'))
|
2017-08-29 14:46:32 +02:00
|
|
|
posts = (
|
2019-02-24 13:22:28 +01:00
|
|
|
Post.query.with_parent(account, 'posts')
|
2018-08-14 03:47:48 +02:00
|
|
|
.filter(Post.created_at + account.policy_keep_younger <= db.func.now())
|
2019-05-11 20:21:43 +02:00
|
|
|
.filter(~Post.id.in_(db.select((latest_n_posts.c.id, )))))
|
|
|
|
|
|
|
|
if(account.policy_keep_favourites != 'none'):
|
|
|
|
posts = posts.filter(db.or_(
|
|
|
|
Post.favourite == (account.policy_keep_favourites == 'deleteonly'),
|
|
|
|
Post.is_reblog))
|
|
|
|
if(account.policy_keep_media != 'none'):
|
|
|
|
posts = posts.filter(db.or_(
|
|
|
|
Post.has_media == (account.policy_keep_media == 'deleteonly'),
|
|
|
|
Post.is_reblog))
|
|
|
|
if(account.policy_keep_direct):
|
|
|
|
posts = posts.filter(~Post.direct)
|
|
|
|
|
|
|
|
limit = 100
|
|
|
|
if account.service == 'mastodon':
|
|
|
|
limit = 10
|
|
|
|
|
|
|
|
posts = posts.order_by(db.func.random()).limit(limit).all()
|
2017-08-02 01:35:09 +02:00
|
|
|
|
2017-08-29 20:48:04 +02:00
|
|
|
to_delete = None
|
2017-08-19 14:18:33 +02:00
|
|
|
|
2017-12-28 03:02:08 +01:00
|
|
|
def is_eligible(post):
|
2018-08-14 03:47:48 +02:00
|
|
|
return (post.is_reblog or (
|
|
|
|
(account.policy_keep_favourites == 'none' or
|
|
|
|
(account.policy_keep_favourites == 'keeponly'
|
|
|
|
and not post.favourite) or
|
|
|
|
(account.policy_keep_favourites == 'deleteonly'
|
|
|
|
and post.favourite)) and
|
|
|
|
(account.policy_keep_media == 'none' or
|
|
|
|
(account.policy_keep_media == 'keeponly' and not post.has_media)
|
|
|
|
or (account.policy_keep_media == 'deleteonly' and post.has_media))
|
|
|
|
and (not account.policy_keep_direct or not post.direct)))
|
2017-12-28 03:02:08 +01:00
|
|
|
|
2018-07-06 01:44:30 +02:00
|
|
|
try:
|
|
|
|
action = noop
|
|
|
|
if account.service == 'twitter':
|
|
|
|
action = libforget.twitter.delete
|
|
|
|
posts = refresh_posts(posts)
|
|
|
|
to_delete = next(filter(is_eligible, posts), None)
|
|
|
|
elif account.service == 'mastodon':
|
|
|
|
action = libforget.mastodon.delete
|
|
|
|
for post in posts:
|
2018-08-14 03:47:48 +02:00
|
|
|
refreshed = refresh_posts((post, ))
|
2018-07-06 01:44:30 +02:00
|
|
|
if refreshed and is_eligible(refreshed[0]):
|
|
|
|
to_delete = refreshed[0]
|
|
|
|
break
|
2021-11-09 10:07:56 +01:00
|
|
|
elif account.service == 'misskey':
|
|
|
|
action = libforget.misskey.delete
|
|
|
|
posts = refresh_posts(posts)
|
|
|
|
to_delete = next(filter(is_eligible, posts), None)
|
2018-07-06 01:44:30 +02:00
|
|
|
|
|
|
|
if to_delete:
|
2019-03-11 12:12:04 +01:00
|
|
|
print("Deleting {}".format(to_delete))
|
2018-07-06 01:44:30 +02:00
|
|
|
account.touch_delete()
|
|
|
|
action(to_delete)
|
|
|
|
account.reset_backoff()
|
|
|
|
else:
|
|
|
|
account.next_delete = db.func.now() + timedelta(minutes=3)
|
|
|
|
|
|
|
|
except TemporaryError:
|
|
|
|
db.session.rollback()
|
|
|
|
account.backoff()
|
2017-08-02 01:35:09 +02:00
|
|
|
|
2018-07-06 01:44:30 +02:00
|
|
|
finally:
|
|
|
|
db.session.commit()
|
2017-08-02 01:35:09 +02:00
|
|
|
|
2017-08-29 14:46:32 +02:00
|
|
|
|
2017-08-03 21:37:00 +02:00
|
|
|
def refresh_posts(posts):
|
|
|
|
posts = list(posts)
|
|
|
|
if len(posts) == 0:
|
|
|
|
return []
|
|
|
|
|
|
|
|
if posts[0].service == 'twitter':
|
2017-09-20 23:02:36 +02:00
|
|
|
return libforget.twitter.refresh_posts(posts)
|
2017-08-19 13:11:16 +02:00
|
|
|
elif posts[0].service == 'mastodon':
|
2017-09-20 23:02:36 +02:00
|
|
|
return libforget.mastodon.refresh_posts(posts)
|
2021-11-09 10:07:56 +01:00
|
|
|
elif posts[0].service == 'misskey':
|
|
|
|
return libforget.misskey.refresh_posts(posts)
|
2017-08-03 21:37:00 +02:00
|
|
|
|
2017-08-29 14:46:32 +02:00
|
|
|
|
2017-09-16 20:08:06 +02:00
|
|
|
@app.task()
|
2017-09-20 14:39:31 +02:00
|
|
|
@unique
|
2017-08-03 21:37:00 +02:00
|
|
|
def refresh_account(account_id):
|
|
|
|
account = Account.query.get(account_id)
|
|
|
|
|
2019-02-24 16:23:32 +01:00
|
|
|
print("Refreshing account {}".format(account))
|
|
|
|
|
2017-09-04 22:04:03 +02:00
|
|
|
try:
|
|
|
|
limit = 100
|
|
|
|
if account.service == 'mastodon':
|
2017-09-16 20:08:06 +02:00
|
|
|
limit = 3
|
2019-02-24 13:22:28 +01:00
|
|
|
posts = (Post.query.with_parent(account, 'posts').order_by(
|
2018-08-14 03:47:48 +02:00
|
|
|
db.asc(Post.updated_at)).limit(limit).all())
|
2017-09-04 22:04:03 +02:00
|
|
|
|
|
|
|
posts = refresh_posts(posts)
|
|
|
|
account.touch_refresh()
|
2018-07-06 01:44:30 +02:00
|
|
|
account.reset_backoff()
|
|
|
|
except TemporaryError:
|
|
|
|
db.session.rollback()
|
|
|
|
account.backoff()
|
2017-09-06 13:08:06 +02:00
|
|
|
except PermanentError:
|
2017-09-04 22:04:03 +02:00
|
|
|
db.session.rollback()
|
|
|
|
make_dormant(account)
|
2017-09-05 13:01:33 +02:00
|
|
|
if sentry:
|
|
|
|
sentry.captureException()
|
2019-03-11 03:02:47 +01:00
|
|
|
except Exception as e:
|
|
|
|
db.session.rollback()
|
|
|
|
account.backoff()
|
|
|
|
db.session.commit()
|
|
|
|
raise e
|
2018-07-06 01:44:30 +02:00
|
|
|
finally:
|
|
|
|
db.session.commit()
|
2017-09-04 22:04:03 +02:00
|
|
|
|
|
|
|
|
|
|
|
@app.task
|
2017-09-20 14:39:31 +02:00
|
|
|
@unique
|
2017-09-04 22:04:03 +02:00
|
|
|
def periodic_cleanup():
|
|
|
|
# delete sessions after 48 hours
|
2018-08-14 03:47:48 +02:00
|
|
|
(Session.query.filter(
|
|
|
|
Session.updated_at < (db.func.now() - timedelta(hours=48)))
|
2017-09-04 22:04:03 +02:00
|
|
|
.delete(synchronize_session=False))
|
|
|
|
|
|
|
|
# delete twitter archives after 3 days
|
2018-08-14 03:47:48 +02:00
|
|
|
(TwitterArchive.query.filter(
|
|
|
|
TwitterArchive.updated_at < (db.func.now() - timedelta(days=3)))
|
2017-09-04 22:04:03 +02:00
|
|
|
.delete(synchronize_session=False))
|
|
|
|
|
|
|
|
# delete anonymous oauth tokens after 1 day
|
2018-08-14 03:47:48 +02:00
|
|
|
(OAuthToken.query.filter(
|
|
|
|
OAuthToken.updated_at < (db.func.now() - timedelta(days=1))).filter(
|
|
|
|
OAuthToken.account_id == None) # noqa: E711
|
2017-09-04 22:04:03 +02:00
|
|
|
.delete(synchronize_session=False))
|
|
|
|
|
|
|
|
# disable and log out users with no tokens
|
|
|
|
unreachable = (
|
2018-08-14 03:47:48 +02:00
|
|
|
Account.query.outerjoin(Account.tokens)
|
|
|
|
.group_by(Account).having(db.func.count(OAuthToken.token) == 0)
|
|
|
|
.filter(Account.policy_enabled == True)) # noqa: E712
|
2017-09-04 22:04:03 +02:00
|
|
|
for account in unreachable:
|
|
|
|
account.force_log_out()
|
|
|
|
account.policy_enabled = False
|
|
|
|
account.reason = """
|
|
|
|
Your account was disabled because Forget no longer had access to
|
|
|
|
your {service} account. Perhaps you had revoked it? By logging in,
|
|
|
|
you have restored access and you can now re-enable Forget if you wish.
|
|
|
|
""".format(service=account.service.capitalize())
|
|
|
|
|
2017-08-03 21:37:00 +02:00
|
|
|
db.session.commit()
|
|
|
|
|
2017-08-29 14:46:32 +02:00
|
|
|
|
2017-09-04 22:04:03 +02:00
|
|
|
@app.task
|
2017-09-20 14:39:31 +02:00
|
|
|
@unique
|
2017-09-04 22:04:03 +02:00
|
|
|
def queue_fetch_for_most_stale_accounts(
|
|
|
|
min_staleness=timedelta(minutes=2), limit=20):
|
2018-08-14 03:47:48 +02:00
|
|
|
accs = (Account.query.join(Account.tokens).group_by(Account)
|
2017-09-04 22:04:03 +02:00
|
|
|
.filter(Account.last_fetch < db.func.now() - min_staleness)
|
2018-07-06 01:44:30 +02:00
|
|
|
.filter(Account.backoff_until < db.func.now())
|
2018-08-14 03:47:48 +02:00
|
|
|
.filter(~Account.dormant).order_by(db.asc(
|
|
|
|
Account.last_fetch)).limit(limit))
|
2017-09-04 22:04:03 +02:00
|
|
|
for acc in accs:
|
|
|
|
fetch_acc.s(acc.id).delay()
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
|
|
|
|
@app.task
|
2017-09-20 14:39:31 +02:00
|
|
|
@unique
|
2017-09-04 22:04:03 +02:00
|
|
|
def queue_deletes():
|
|
|
|
eligible_accounts = (
|
2018-08-14 03:47:48 +02:00
|
|
|
Account.query.filter(Account.policy_enabled == True) # noqa: E712
|
|
|
|
.filter(Account.next_delete < db.func.now())
|
|
|
|
.filter(Account.backoff_until < db.func.now())
|
|
|
|
.filter(~Account.dormant))
|
2017-09-04 22:04:03 +02:00
|
|
|
for account in eligible_accounts:
|
|
|
|
delete_from_account.s(account.id).apply_async()
|
|
|
|
|
|
|
|
|
|
|
|
@app.task
|
2017-09-20 14:39:31 +02:00
|
|
|
@unique
|
2017-08-03 21:37:00 +02:00
|
|
|
def refresh_account_with_oldest_post():
|
2019-07-16 20:50:25 +02:00
|
|
|
then = time()
|
2020-12-29 21:49:27 +01:00
|
|
|
post = db.session.query(Post).from_statement(db.text("""
|
|
|
|
SELECT posts.id, posts.author_id
|
|
|
|
FROM posts, accounts, oauth_tokens
|
|
|
|
WHERE accounts.id = posts.author_id
|
|
|
|
AND accounts.id = oauth_tokens.account_id
|
|
|
|
AND accounts.backoff_until < now()
|
|
|
|
AND NOT accounts.dormant
|
|
|
|
ORDER BY posts.updated_at ASC
|
|
|
|
LIMIT 1;
|
|
|
|
""").columns(Post.id, Post.author_id)).one()
|
2019-03-08 01:24:13 +01:00
|
|
|
if post:
|
2019-07-16 20:50:25 +02:00
|
|
|
aid = post.author_id
|
|
|
|
refresh_account(aid)
|
|
|
|
now = time()
|
|
|
|
logging.info('Refreshed posts for {} for having oldest post in {}s'.format(aid, now-then))
|
2017-08-03 21:37:00 +02:00
|
|
|
|
2017-08-29 14:46:32 +02:00
|
|
|
|
2017-09-04 22:04:03 +02:00
|
|
|
@app.task
|
2017-09-20 14:39:31 +02:00
|
|
|
@unique
|
2017-08-12 23:22:22 +02:00
|
|
|
def refresh_account_with_longest_time_since_refresh():
|
2017-08-29 14:46:32 +02:00
|
|
|
acc = (Account.query.join(Account.tokens).group_by(Account)
|
2018-08-14 03:47:48 +02:00
|
|
|
.filter(Account.backoff_until < db.func.now())
|
|
|
|
.filter(~Account.dormant).order_by(db.asc(
|
|
|
|
Account.last_refresh)).first())
|
2019-03-08 01:24:13 +01:00
|
|
|
if acc:
|
|
|
|
refresh_account(acc.id)
|
2017-08-12 23:22:22 +02:00
|
|
|
|
|
|
|
|
2017-09-17 15:12:46 +02:00
|
|
|
@app.task
|
|
|
|
def update_mastodon_instances_popularity():
|
|
|
|
# bump score for each active account
|
2018-08-14 03:47:48 +02:00
|
|
|
for acct in (Account.query.options(db.joinedload(Account.sessions))
|
|
|
|
.filter(~Account.dormant).filter(
|
|
|
|
Account.id.like('mastodon:%'))):
|
2017-09-17 15:12:46 +02:00
|
|
|
instance = MastodonInstance.query.get(acct.mastodon_instance)
|
|
|
|
if not instance:
|
2018-08-14 03:47:48 +02:00
|
|
|
instance = MastodonInstance(
|
|
|
|
instance=acct.mastodon_instance, popularity=10)
|
2017-09-17 15:12:46 +02:00
|
|
|
db.session.add(instance)
|
2017-12-28 15:30:02 +01:00
|
|
|
amount = 0.01
|
2017-09-21 02:19:42 +02:00
|
|
|
if acct.policy_enabled:
|
2017-12-28 15:30:02 +01:00
|
|
|
amount = 0.5
|
2017-09-24 23:54:03 +02:00
|
|
|
for _ in acct.sessions:
|
2017-12-28 15:30:02 +01:00
|
|
|
amount += 0.1
|
|
|
|
instance.bump(amount / max(1, instance.popularity))
|
2017-09-21 02:19:42 +02:00
|
|
|
|
2017-12-28 13:12:55 +01:00
|
|
|
# normalise scores so the top is 20
|
2018-08-14 03:47:48 +02:00
|
|
|
top_pop = (db.session.query(db.func.max(MastodonInstance.popularity))
|
|
|
|
.scalar())
|
2017-12-28 15:30:02 +01:00
|
|
|
MastodonInstance.query.update({
|
|
|
|
MastodonInstance.popularity:
|
2018-08-14 03:47:48 +02:00
|
|
|
MastodonInstance.popularity * 20 / top_pop
|
2017-12-28 15:30:02 +01:00
|
|
|
})
|
2017-09-17 15:21:48 +02:00
|
|
|
db.session.commit()
|
2017-09-17 15:12:46 +02:00
|
|
|
|
2021-11-09 10:07:56 +01:00
|
|
|
@app.task
|
|
|
|
def update_misskey_instances_popularity():
|
|
|
|
# bump score for each active account
|
|
|
|
for acct in (Account.query.options(db.joinedload(Account.sessions))
|
|
|
|
.filter(~Account.dormant).filter(
|
|
|
|
Account.id.like('misskey:%'))):
|
|
|
|
instance = MisskeyInstance.query.get(acct.misskey_instance)
|
|
|
|
if not instance:
|
|
|
|
instance = MisskeyInstance(
|
|
|
|
instance=acct.Misskey_instance, popularity=10)
|
|
|
|
db.session.add(instance)
|
|
|
|
amount = 0.01
|
|
|
|
if acct.policy_enabled:
|
|
|
|
amount = 0.5
|
|
|
|
for _ in acct.sessions:
|
|
|
|
amount += 0.1
|
|
|
|
instance.bump(amount / max(1, instance.popularity))
|
|
|
|
|
|
|
|
# normalise scores so the top is 20
|
|
|
|
top_pop = (db.session.query(db.func.max(MisskeyInstance.popularity))
|
|
|
|
.scalar())
|
|
|
|
MisskeyInstance.query.update({
|
|
|
|
MisskeyInstance.popularity:
|
|
|
|
MisskeyInstance.popularity * 20 / top_pop
|
|
|
|
})
|
|
|
|
db.session.commit()
|
|
|
|
|
2017-09-17 15:12:46 +02:00
|
|
|
|
2017-09-01 01:45:24 +02:00
|
|
|
app.add_periodic_task(40, queue_fetch_for_most_stale_accounts)
|
2017-12-27 22:01:45 +01:00
|
|
|
app.add_periodic_task(9, queue_deletes)
|
2019-07-16 20:50:25 +02:00
|
|
|
app.add_periodic_task(6, refresh_account_with_oldest_post)
|
2019-03-11 12:18:51 +01:00
|
|
|
app.add_periodic_task(50, refresh_account_with_longest_time_since_refresh)
|
|
|
|
app.add_periodic_task(300, periodic_cleanup)
|
|
|
|
app.add_periodic_task(300, update_mastodon_instances_popularity)
|
2021-11-09 10:07:56 +01:00
|
|
|
app.add_periodic_task(300, update_misskey_instances_popularity)
|
2017-07-27 20:20:59 +02:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
app.worker_main()
|