code quality fixes

also ran yapf on tasks.py
This commit is contained in:
codl 2018-08-14 03:47:48 +02:00
parent 49d87fd6d4
commit c54390801c
No known key found for this signature in database
GPG Key ID: 6CD7C8891ED1233A
4 changed files with 71 additions and 86 deletions

1
.codacy.yml Normal file
View File

@ -0,0 +1 @@
exclude_paths: ['version.py', 'versioneer.py']

113
tasks.py
View File

@ -10,15 +10,15 @@ from zipfile import ZipFile
from io import BytesIO, TextIOWrapper
import json
from kombu import Queue
import random
import version
from libforget.exceptions import PermanentError, TemporaryError
import redis
from functools import wraps
import pickle
app = Celery('tasks', broker=flaskapp.config['CELERY_BROKER'],
app = Celery(
'tasks',
broker=flaskapp.config['CELERY_BROKER'],
task_serializer='pickle',
task_soft_time_limit=600,
task_time_limit=1200,
@ -37,7 +37,9 @@ sentry = None
if 'SENTRY_DSN' in flaskapp.config:
from raven import Client
from raven.contrib.celery import register_signal, register_logger_signal
sentry = Client(flaskapp.config['SENTRY_DSN'], release=version.get_versions()['version'])
sentry = Client(
flaskapp.config['SENTRY_DSN'],
release=version.get_versions()['version'])
register_logger_signal(sentry)
register_signal(sentry)
@ -53,6 +55,8 @@ class DBTask(Task):
app.Task = DBTask
r = None
def unique(fun):
global r
if not r:
@ -60,7 +64,8 @@ def unique(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
key = 'celery_unique_lock:{}'.format(pickle.dumps((fun.__name__, args, kwargs)))
key = 'celery_unique_lock:{}'.format(
pickle.dumps((fun.__name__, args, kwargs)))
has_lock = False
try:
if r.set(key, 1, nx=True, ex=60 * 5):
@ -73,7 +78,6 @@ def unique(fun):
return wrapper
def noop(*args, **kwargs):
pass
@ -158,37 +162,28 @@ def delete_from_account(account_id):
if account.next_delete > datetime.now(timezone.utc):
return
latest_n_posts = (Post.query.with_parent(account)
.order_by(db.desc(Post.created_at))
.limit(account.policy_keep_latest)
latest_n_posts = (Post.query.with_parent(account).order_by(
db.desc(Post.created_at)).limit(account.policy_keep_latest)
.cte(name='latest'))
posts = (
Post.query.with_parent(account)
.filter(
Post.created_at + account.policy_keep_younger <= db.func.now())
.filter(~Post.id.in_(db.select((latest_n_posts.c.id,))))
.order_by(db.func.random())
.limit(100).with_for_update().all())
.filter(Post.created_at + account.policy_keep_younger <= db.func.now())
.filter(~Post.id.in_(db.select((latest_n_posts.c.id, )))).order_by(
db.func.random()).limit(100).with_for_update().all())
to_delete = None
def is_eligible(post):
return (
post.is_reblog or
(
(
account.policy_keep_favourites == 'none' or
(account.policy_keep_favourites == 'keeponly' and not post.favourite) or
(account.policy_keep_favourites == 'deleteonly' and post.favourite)
) and (
account.policy_keep_media == 'none' or
(account.policy_keep_media == 'keeponly' and not post.has_media) or
(account.policy_keep_media == 'deleteonly' and post.has_media)
) and (
not account.policy_keep_direct or not post.direct
)
)
)
return (post.is_reblog or (
(account.policy_keep_favourites == 'none' or
(account.policy_keep_favourites == 'keeponly'
and not post.favourite) or
(account.policy_keep_favourites == 'deleteonly'
and post.favourite)) and
(account.policy_keep_media == 'none' or
(account.policy_keep_media == 'keeponly' and not post.has_media)
or (account.policy_keep_media == 'deleteonly' and post.has_media))
and (not account.policy_keep_direct or not post.direct)))
try:
action = noop
@ -212,7 +207,6 @@ def delete_from_account(account_id):
else:
account.next_delete = db.func.now() + timedelta(minutes=3)
except TemporaryError:
db.session.rollback()
account.backoff()
@ -241,8 +235,8 @@ def refresh_account(account_id):
limit = 100
if account.service == 'mastodon':
limit = 3
posts = (Post.query.with_parent(account)
.order_by(db.asc(Post.updated_at)).limit(limit).all())
posts = (Post.query.with_parent(account).order_by(
db.asc(Post.updated_at)).limit(limit).all())
posts = refresh_posts(posts)
account.touch_refresh()
@ -263,25 +257,24 @@ def refresh_account(account_id):
@unique
def periodic_cleanup():
# delete sessions after 48 hours
(Session.query
.filter(Session.updated_at < (db.func.now() - timedelta(hours=48)))
(Session.query.filter(
Session.updated_at < (db.func.now() - timedelta(hours=48)))
.delete(synchronize_session=False))
# delete twitter archives after 3 days
(TwitterArchive.query
.filter(TwitterArchive.updated_at < (db.func.now() - timedelta(days=3)))
(TwitterArchive.query.filter(
TwitterArchive.updated_at < (db.func.now() - timedelta(days=3)))
.delete(synchronize_session=False))
# delete anonymous oauth tokens after 1 day
(OAuthToken.query
.filter(OAuthToken.updated_at < (db.func.now() - timedelta(days=1)))
.filter(OAuthToken.account_id == None) # noqa: E711
(OAuthToken.query.filter(
OAuthToken.updated_at < (db.func.now() - timedelta(days=1))).filter(
OAuthToken.account_id == None) # noqa: E711
.delete(synchronize_session=False))
# disable and log out users with no tokens
unreachable = (
Account.query
.outerjoin(Account.tokens)
Account.query.outerjoin(Account.tokens)
.group_by(Account).having(db.func.count(OAuthToken.token) == 0)
.filter(Account.policy_enabled == True)) # noqa: E712
for account in unreachable:
@ -300,14 +293,11 @@ def periodic_cleanup():
@unique
def queue_fetch_for_most_stale_accounts(
min_staleness=timedelta(minutes=2), limit=20):
accs = (Account.query
.join(Account.tokens).group_by(Account)
accs = (Account.query.join(Account.tokens).group_by(Account)
.filter(Account.last_fetch < db.func.now() - min_staleness)
.filter(Account.backoff_until < db.func.now())
.filter(~Account.dormant)
.order_by(db.asc(Account.last_fetch))
.limit(limit)
)
.filter(~Account.dormant).order_by(db.asc(
Account.last_fetch)).limit(limit))
for acc in accs:
fetch_acc.s(acc.id).delay()
db.session.commit()
@ -330,8 +320,8 @@ def queue_deletes():
def refresh_account_with_oldest_post():
post = (Post.query.outerjoin(Post.author).join(Account.tokens)
.filter(Account.backoff_until < db.func.now())
.filter(~Account.dormant)
.group_by(Post).order_by(db.asc(Post.updated_at)).first())
.filter(~Account.dormant).group_by(Post).order_by(
db.asc(Post.updated_at)).first())
refresh_account(post.author_id)
@ -340,23 +330,21 @@ def refresh_account_with_oldest_post():
def refresh_account_with_longest_time_since_refresh():
acc = (Account.query.join(Account.tokens).group_by(Account)
.filter(Account.backoff_until < db.func.now())
.filter(~Account.dormant)
.order_by(db.asc(Account.last_refresh)).first())
.filter(~Account.dormant).order_by(db.asc(
Account.last_refresh)).first())
refresh_account(acc.id)
@app.task
def update_mastodon_instances_popularity():
# bump score for each active account
for acct in (
Account.query
.options(db.joinedload(Account.sessions))
.filter(~Account.dormant)
.filter(Account.id.like('mastodon:%'))):
for acct in (Account.query.options(db.joinedload(Account.sessions))
.filter(~Account.dormant).filter(
Account.id.like('mastodon:%'))):
instance = MastodonInstance.query.get(acct.mastodon_instance)
if not instance:
instance = MastodonInstance(instance=acct.mastodon_instance,
popularity=10)
instance = MastodonInstance(
instance=acct.mastodon_instance, popularity=10)
db.session.add(instance)
amount = 0.01
if acct.policy_enabled:
@ -365,12 +353,9 @@ def update_mastodon_instances_popularity():
amount += 0.1
instance.bump(amount / max(1, instance.popularity))
# normalise scores so the top is 20
top_pop = (
db.session.query(db.func.max(MastodonInstance.popularity))
.scalar()
)
top_pop = (db.session.query(db.func.max(MastodonInstance.popularity))
.scalar())
MastodonInstance.query.update({
MastodonInstance.popularity:
MastodonInstance.popularity * 20 / top_pop

View File

@ -5,6 +5,6 @@ from unittest.mock import patch
def test_doit():
with patch('sys.exit') as _exit:
with patch('sys.argv') as argv:
with patch('sys.argv'):
doit.run(dodo)
_exit.assert_called_with(0)

View File

@ -1,4 +1,3 @@
import re
import version