an attempt at preventing hitting rate limits on mastodon

most tasks don't need to be retried if they have failed, they will be
queued up again in the next batch
This commit is contained in:
codl 2017-09-16 20:08:06 +02:00
parent 8d82780e32
commit 7c8c49a2ae
No known key found for this signature in database
GPG Key ID: 6CD7C8891ED1233A
1 changed files with 4 additions and 4 deletions

View File

@ -121,7 +121,7 @@ def import_twitter_archive_month(archive_id, month_path):
raise e
@app.task(autoretry_for=(TemporaryError,))
@app.task()
def delete_from_account(account_id):
account = Account.query.get(account_id)
latest_n_posts = (Post.query.with_parent(account)
@ -180,14 +180,14 @@ def refresh_posts(posts):
return lib.mastodon.refresh_posts(posts)
@app.task(autoretry_for=(TemporaryError,))
@app.task()
def refresh_account(account_id):
account = Account.query.get(account_id)
try:
limit = 100
if account.service == 'mastodon':
limit = 5
limit = 3
posts = (Post.query.with_parent(account)
.order_by(db.asc(Post.updated_at)).limit(limit).all())
@ -290,7 +290,7 @@ def refresh_account_with_longest_time_since_refresh():
app.add_periodic_task(120, periodic_cleanup)
app.add_periodic_task(40, queue_fetch_for_most_stale_accounts)
app.add_periodic_task(15, queue_deletes)
app.add_periodic_task(17, queue_deletes)
app.add_periodic_task(60, refresh_account_with_oldest_post)
app.add_periodic_task(180, refresh_account_with_longest_time_since_refresh)