code quality fixes

also ran yapf on tasks.py
This commit is contained in:
codl 2018-08-14 03:47:48 +02:00
parent 49d87fd6d4
commit c54390801c
No known key found for this signature in database
GPG Key ID: 6CD7C8891ED1233A
4 changed files with 71 additions and 86 deletions

1
.codacy.yml Normal file
View File

@ -0,0 +1 @@
exclude_paths: ['version.py', 'versioneer.py']

153
tasks.py
View File

@ -10,23 +10,23 @@ from zipfile import ZipFile
from io import BytesIO, TextIOWrapper from io import BytesIO, TextIOWrapper
import json import json
from kombu import Queue from kombu import Queue
import random
import version import version
from libforget.exceptions import PermanentError, TemporaryError from libforget.exceptions import PermanentError, TemporaryError
import redis import redis
from functools import wraps from functools import wraps
import pickle import pickle
app = Celery(
app = Celery('tasks', broker=flaskapp.config['CELERY_BROKER'], 'tasks',
task_serializer='pickle', broker=flaskapp.config['CELERY_BROKER'],
task_soft_time_limit=600, task_serializer='pickle',
task_time_limit=1200, task_soft_time_limit=600,
) task_time_limit=1200,
)
app.conf.task_queues = ( app.conf.task_queues = (
Queue('default', routing_key='celery'), Queue('default', routing_key='celery'),
Queue('high_prio', routing_key='high'), Queue('high_prio', routing_key='high'),
Queue('higher_prio', routing_key='higher'), Queue('higher_prio', routing_key='higher'),
) )
app.conf.task_default_queue = 'default' app.conf.task_default_queue = 'default'
app.conf.task_default_exchange = 'celery' app.conf.task_default_exchange = 'celery'
@ -37,7 +37,9 @@ sentry = None
if 'SENTRY_DSN' in flaskapp.config: if 'SENTRY_DSN' in flaskapp.config:
from raven import Client from raven import Client
from raven.contrib.celery import register_signal, register_logger_signal from raven.contrib.celery import register_signal, register_logger_signal
sentry = Client(flaskapp.config['SENTRY_DSN'], release=version.get_versions()['version']) sentry = Client(
flaskapp.config['SENTRY_DSN'],
release=version.get_versions()['version'])
register_logger_signal(sentry) register_logger_signal(sentry)
register_signal(sentry) register_signal(sentry)
@ -53,6 +55,8 @@ class DBTask(Task):
app.Task = DBTask app.Task = DBTask
r = None r = None
def unique(fun): def unique(fun):
global r global r
if not r: if not r:
@ -60,10 +64,11 @@ def unique(fun):
@wraps(fun) @wraps(fun)
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
key = 'celery_unique_lock:{}'.format(pickle.dumps((fun.__name__, args, kwargs))) key = 'celery_unique_lock:{}'.format(
pickle.dumps((fun.__name__, args, kwargs)))
has_lock = False has_lock = False
try: try:
if r.set(key, 1, nx=True, ex=60*5): if r.set(key, 1, nx=True, ex=60 * 5):
has_lock = True has_lock = True
return fun(*args, **kwargs) return fun(*args, **kwargs)
finally: finally:
@ -73,7 +78,6 @@ def unique(fun):
return wrapper return wrapper
def noop(*args, **kwargs): def noop(*args, **kwargs):
pass pass
@ -89,16 +93,16 @@ def make_dormant(acc):
acc.dormant = True acc.dormant = True
@app.task(autoretry_for=(TemporaryError,)) @app.task(autoretry_for=(TemporaryError, ))
@unique @unique
def fetch_acc(id_, cursor=None): def fetch_acc(id_, cursor=None):
account = Account.query.get(id_) account = Account.query.get(id_)
print(f'fetching {account}') print(f'fetching {account}')
try: try:
action = noop action = noop
if(account.service == 'twitter'): if (account.service == 'twitter'):
action = libforget.twitter.fetch_acc action = libforget.twitter.fetch_acc
elif(account.service == 'mastodon'): elif (account.service == 'mastodon'):
action = libforget.mastodon.fetch_acc action = libforget.mastodon.fetch_acc
cursor = action(account, cursor) cursor = action(account, cursor)
if cursor: if cursor:
@ -158,37 +162,28 @@ def delete_from_account(account_id):
if account.next_delete > datetime.now(timezone.utc): if account.next_delete > datetime.now(timezone.utc):
return return
latest_n_posts = (Post.query.with_parent(account) latest_n_posts = (Post.query.with_parent(account).order_by(
.order_by(db.desc(Post.created_at)) db.desc(Post.created_at)).limit(account.policy_keep_latest)
.limit(account.policy_keep_latest)
.cte(name='latest')) .cte(name='latest'))
posts = ( posts = (
Post.query.with_parent(account) Post.query.with_parent(account)
.filter( .filter(Post.created_at + account.policy_keep_younger <= db.func.now())
Post.created_at + account.policy_keep_younger <= db.func.now()) .filter(~Post.id.in_(db.select((latest_n_posts.c.id, )))).order_by(
.filter(~Post.id.in_(db.select((latest_n_posts.c.id,)))) db.func.random()).limit(100).with_for_update().all())
.order_by(db.func.random())
.limit(100).with_for_update().all())
to_delete = None to_delete = None
def is_eligible(post): def is_eligible(post):
return ( return (post.is_reblog or (
post.is_reblog or (account.policy_keep_favourites == 'none' or
( (account.policy_keep_favourites == 'keeponly'
( and not post.favourite) or
account.policy_keep_favourites == 'none' or (account.policy_keep_favourites == 'deleteonly'
(account.policy_keep_favourites == 'keeponly' and not post.favourite) or and post.favourite)) and
(account.policy_keep_favourites == 'deleteonly' and post.favourite) (account.policy_keep_media == 'none' or
) and ( (account.policy_keep_media == 'keeponly' and not post.has_media)
account.policy_keep_media == 'none' or or (account.policy_keep_media == 'deleteonly' and post.has_media))
(account.policy_keep_media == 'keeponly' and not post.has_media) or and (not account.policy_keep_direct or not post.direct)))
(account.policy_keep_media == 'deleteonly' and post.has_media)
) and (
not account.policy_keep_direct or not post.direct
)
)
)
try: try:
action = noop action = noop
@ -199,7 +194,7 @@ def delete_from_account(account_id):
elif account.service == 'mastodon': elif account.service == 'mastodon':
action = libforget.mastodon.delete action = libforget.mastodon.delete
for post in posts: for post in posts:
refreshed = refresh_posts((post,)) refreshed = refresh_posts((post, ))
if refreshed and is_eligible(refreshed[0]): if refreshed and is_eligible(refreshed[0]):
to_delete = refreshed[0] to_delete = refreshed[0]
break break
@ -212,7 +207,6 @@ def delete_from_account(account_id):
else: else:
account.next_delete = db.func.now() + timedelta(minutes=3) account.next_delete = db.func.now() + timedelta(minutes=3)
except TemporaryError: except TemporaryError:
db.session.rollback() db.session.rollback()
account.backoff() account.backoff()
@ -241,8 +235,8 @@ def refresh_account(account_id):
limit = 100 limit = 100
if account.service == 'mastodon': if account.service == 'mastodon':
limit = 3 limit = 3
posts = (Post.query.with_parent(account) posts = (Post.query.with_parent(account).order_by(
.order_by(db.asc(Post.updated_at)).limit(limit).all()) db.asc(Post.updated_at)).limit(limit).all())
posts = refresh_posts(posts) posts = refresh_posts(posts)
account.touch_refresh() account.touch_refresh()
@ -263,27 +257,26 @@ def refresh_account(account_id):
@unique @unique
def periodic_cleanup(): def periodic_cleanup():
# delete sessions after 48 hours # delete sessions after 48 hours
(Session.query (Session.query.filter(
.filter(Session.updated_at < (db.func.now() - timedelta(hours=48))) Session.updated_at < (db.func.now() - timedelta(hours=48)))
.delete(synchronize_session=False)) .delete(synchronize_session=False))
# delete twitter archives after 3 days # delete twitter archives after 3 days
(TwitterArchive.query (TwitterArchive.query.filter(
.filter(TwitterArchive.updated_at < (db.func.now() - timedelta(days=3))) TwitterArchive.updated_at < (db.func.now() - timedelta(days=3)))
.delete(synchronize_session=False)) .delete(synchronize_session=False))
# delete anonymous oauth tokens after 1 day # delete anonymous oauth tokens after 1 day
(OAuthToken.query (OAuthToken.query.filter(
.filter(OAuthToken.updated_at < (db.func.now() - timedelta(days=1))) OAuthToken.updated_at < (db.func.now() - timedelta(days=1))).filter(
.filter(OAuthToken.account_id == None) # noqa: E711 OAuthToken.account_id == None) # noqa: E711
.delete(synchronize_session=False)) .delete(synchronize_session=False))
# disable and log out users with no tokens # disable and log out users with no tokens
unreachable = ( unreachable = (
Account.query Account.query.outerjoin(Account.tokens)
.outerjoin(Account.tokens) .group_by(Account).having(db.func.count(OAuthToken.token) == 0)
.group_by(Account).having(db.func.count(OAuthToken.token) == 0) .filter(Account.policy_enabled == True)) # noqa: E712
.filter(Account.policy_enabled == True)) # noqa: E712
for account in unreachable: for account in unreachable:
account.force_log_out() account.force_log_out()
account.policy_enabled = False account.policy_enabled = False
@ -300,14 +293,11 @@ def periodic_cleanup():
@unique @unique
def queue_fetch_for_most_stale_accounts( def queue_fetch_for_most_stale_accounts(
min_staleness=timedelta(minutes=2), limit=20): min_staleness=timedelta(minutes=2), limit=20):
accs = (Account.query accs = (Account.query.join(Account.tokens).group_by(Account)
.join(Account.tokens).group_by(Account)
.filter(Account.last_fetch < db.func.now() - min_staleness) .filter(Account.last_fetch < db.func.now() - min_staleness)
.filter(Account.backoff_until < db.func.now()) .filter(Account.backoff_until < db.func.now())
.filter(~Account.dormant) .filter(~Account.dormant).order_by(db.asc(
.order_by(db.asc(Account.last_fetch)) Account.last_fetch)).limit(limit))
.limit(limit)
)
for acc in accs: for acc in accs:
fetch_acc.s(acc.id).delay() fetch_acc.s(acc.id).delay()
db.session.commit() db.session.commit()
@ -317,10 +307,10 @@ def queue_fetch_for_most_stale_accounts(
@unique @unique
def queue_deletes(): def queue_deletes():
eligible_accounts = ( eligible_accounts = (
Account.query.filter(Account.policy_enabled == True) # noqa: E712 Account.query.filter(Account.policy_enabled == True) # noqa: E712
.filter(Account.next_delete < db.func.now()) .filter(Account.next_delete < db.func.now())
.filter(Account.backoff_until < db.func.now()) .filter(Account.backoff_until < db.func.now())
.filter(~Account.dormant)) .filter(~Account.dormant))
for account in eligible_accounts: for account in eligible_accounts:
delete_from_account.s(account.id).apply_async() delete_from_account.s(account.id).apply_async()
@ -330,8 +320,8 @@ def queue_deletes():
def refresh_account_with_oldest_post(): def refresh_account_with_oldest_post():
post = (Post.query.outerjoin(Post.author).join(Account.tokens) post = (Post.query.outerjoin(Post.author).join(Account.tokens)
.filter(Account.backoff_until < db.func.now()) .filter(Account.backoff_until < db.func.now())
.filter(~Account.dormant) .filter(~Account.dormant).group_by(Post).order_by(
.group_by(Post).order_by(db.asc(Post.updated_at)).first()) db.asc(Post.updated_at)).first())
refresh_account(post.author_id) refresh_account(post.author_id)
@ -339,24 +329,22 @@ def refresh_account_with_oldest_post():
@unique @unique
def refresh_account_with_longest_time_since_refresh(): def refresh_account_with_longest_time_since_refresh():
acc = (Account.query.join(Account.tokens).group_by(Account) acc = (Account.query.join(Account.tokens).group_by(Account)
.filter(Account.backoff_until < db.func.now()) .filter(Account.backoff_until < db.func.now())
.filter(~Account.dormant) .filter(~Account.dormant).order_by(db.asc(
.order_by(db.asc(Account.last_refresh)).first()) Account.last_refresh)).first())
refresh_account(acc.id) refresh_account(acc.id)
@app.task @app.task
def update_mastodon_instances_popularity(): def update_mastodon_instances_popularity():
# bump score for each active account # bump score for each active account
for acct in ( for acct in (Account.query.options(db.joinedload(Account.sessions))
Account.query .filter(~Account.dormant).filter(
.options(db.joinedload(Account.sessions)) Account.id.like('mastodon:%'))):
.filter(~Account.dormant)
.filter(Account.id.like('mastodon:%'))):
instance = MastodonInstance.query.get(acct.mastodon_instance) instance = MastodonInstance.query.get(acct.mastodon_instance)
if not instance: if not instance:
instance = MastodonInstance(instance=acct.mastodon_instance, instance = MastodonInstance(
popularity=10) instance=acct.mastodon_instance, popularity=10)
db.session.add(instance) db.session.add(instance)
amount = 0.01 amount = 0.01
if acct.policy_enabled: if acct.policy_enabled:
@ -365,15 +353,12 @@ def update_mastodon_instances_popularity():
amount += 0.1 amount += 0.1
instance.bump(amount / max(1, instance.popularity)) instance.bump(amount / max(1, instance.popularity))
# normalise scores so the top is 20 # normalise scores so the top is 20
top_pop = ( top_pop = (db.session.query(db.func.max(MastodonInstance.popularity))
db.session.query(db.func.max(MastodonInstance.popularity)) .scalar())
.scalar()
)
MastodonInstance.query.update({ MastodonInstance.query.update({
MastodonInstance.popularity: MastodonInstance.popularity:
MastodonInstance.popularity * 20 / top_pop MastodonInstance.popularity * 20 / top_pop
}) })
db.session.commit() db.session.commit()

View File

@ -5,6 +5,6 @@ from unittest.mock import patch
def test_doit(): def test_doit():
with patch('sys.exit') as _exit: with patch('sys.exit') as _exit:
with patch('sys.argv') as argv: with patch('sys.argv'):
doit.run(dodo) doit.run(dodo)
_exit.assert_called_with(0) _exit.assert_called_with(0)

View File

@ -1,4 +1,3 @@
import re
import version import version