Convert fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate to async await.

This commit is contained in:
Brent Simmons 2024-04-04 18:44:18 -07:00
parent e523e06efe
commit 2b6bb5c615
6 changed files with 116 additions and 128 deletions

View File

@ -760,10 +760,11 @@ public enum FetchType {
}
/// Fetch articleIDs for articles that we should have, but dont. These articles are either (starred) or (newer than the article cutoff date).
public func fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate(_ completion: @escaping ArticleIDsCompletionBlock) {
database.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate(completion)
public func fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate() async throws -> Set<String>? {
try await database.articleIDsForStatusesWithoutArticlesNewerThanCutoffDate()
}
public func unreadCount(for feed: Feed) -> Int {
return unreadCounts[feed.feedID] ?? 0
}

View File

@ -1375,67 +1375,61 @@ private extension FeedbinAccountDelegate {
}
}
}
func refreshMissingArticles(_ account: Account, completion: @escaping ((Result<Void, Error>) -> Void)) {
os_log(.debug, log: log, "Refreshing missing articles...")
account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate { result in
Task { @MainActor in
Task { @MainActor in
do {
let fetchedArticleIDs = try await account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate() ?? Set<String>()
@MainActor func process(_ fetchedArticleIDs: Set<String>) {
let group = DispatchGroup()
var errorOccurred = false
let group = DispatchGroup()
var errorOccurred = false
let articleIDs = Array(fetchedArticleIDs)
let chunkedArticleIDs = articleIDs.chunked(into: 100)
let articleIDs = Array(fetchedArticleIDs)
let chunkedArticleIDs = articleIDs.chunked(into: 100)
for chunk in chunkedArticleIDs {
group.enter()
self.caller.retrieveEntries(articleIDs: chunk) { result in
for chunk in chunkedArticleIDs {
group.enter()
self.caller.retrieveEntries(articleIDs: chunk) { result in
switch result {
case .success(let entries):
switch result {
case .success(let entries):
self.processEntries(account: account, entries: entries) { error in
self.processEntries(account: account, entries: entries) { error in
Task { @MainActor in
Task { @MainActor in
group.leave()
if error != nil {
errorOccurred = true
}
group.leave()
if error != nil {
errorOccurred = true
}
}
case .failure(let error):
errorOccurred = true
os_log(.error, log: self.log, "Refresh missing articles failed: %@.", error.localizedDescription)
group.leave()
}
}
}
group.notify(queue: DispatchQueue.main) {
Task { @MainActor in
self.refreshProgress.completeTask()
os_log(.debug, log: self.log, "Done refreshing missing articles.")
if errorOccurred {
completion(.failure(FeedbinAccountDelegateError.unknown))
} else {
completion(.success(()))
}
case .failure(let error):
errorOccurred = true
os_log(.error, log: self.log, "Refresh missing articles failed: %@.", error.localizedDescription)
group.leave()
}
}
}
switch result {
case .success(let fetchedArticleIDs):
process(fetchedArticleIDs)
case .failure(let error):
self.refreshProgress.completeTask()
completion(.failure(error))
group.notify(queue: DispatchQueue.main) {
Task { @MainActor in
self.refreshProgress.completeTask()
os_log(.debug, log: self.log, "Done refreshing missing articles.")
if errorOccurred {
completion(.failure(FeedbinAccountDelegateError.unknown))
} else {
completion(.success(()))
}
}
}
} catch {
self.refreshProgress.completeTask()
completion(.failure(error))
}
}
}

View File

@ -12,26 +12,26 @@ import os.log
final class FeedlyFetchIdsForMissingArticlesOperation: FeedlyOperation, FeedlyEntryIdentifierProviding {
private let account: Account
private let log: OSLog
private(set) var entryIds = Set<String>()
init(account: Account, log: OSLog) {
init(account: Account) {
self.account = account
self.log = log
}
override func run() {
account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate { result in
MainActor.assumeIsolated {
switch result {
case .success(let articleIds):
self.entryIds.formUnion(articleIds)
self.didFinish()
case .failure(let error):
self.didFinish(with: error)
Task { @MainActor in
do {
if let articleIDs = try await account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate() {
self.entryIds.formUnion(articleIDs)
}
self.didFinish()
} catch {
self.didFinish(with: error)
}
}
}

View File

@ -97,7 +97,7 @@ final class FeedlySyncAllOperation: FeedlyOperation {
self.operationQueue.add(getStarred)
// Now all the possible article ids we need have a status, fetch the article ids for missing articles.
let getMissingIds = FeedlyFetchIdsForMissingArticlesOperation(account: account, log: log)
let getMissingIds = FeedlyFetchIdsForMissingArticlesOperation(account: account)
getMissingIds.delegate = self
getMissingIds.downloadProgress = downloadProgress
getMissingIds.addDependency(getAllArticleIds)

View File

@ -317,56 +317,52 @@ final class NewsBlurAccountDelegate: AccountDelegate {
func refreshMissingStories(for account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
os_log(.debug, log: log, "Refreshing missing stories...")
account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate { result in
Task { @MainActor in
MainActor.assumeIsolated {
@MainActor func process(_ fetchedHashes: Set<String>) {
let group = DispatchGroup()
var errorOccurred = false
do {
let fetchedArticleIDs = try await account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate() ?? Set<String>()
let storyHashes = Array(fetchedHashes).map {
NewsBlurStoryHash(hash: $0, timestamp: Date())
}
let chunkedStoryHashes = storyHashes.chunked(into: 100)
let group = DispatchGroup()
var errorOccurred = false
for chunk in chunkedStoryHashes {
group.enter()
self.caller.retrieveStories(hashes: chunk) { result in
let storyHashes = Array(fetchedArticleIDs).map {
NewsBlurStoryHash(hash: $0, timestamp: Date())
}
let chunkedStoryHashes = storyHashes.chunked(into: 100)
switch result {
case .success((let stories, _)):
self.processStories(account: account, stories: stories) { result in
group.leave()
if case .failure = result {
errorOccurred = true
}
}
case .failure(let error):
errorOccurred = true
os_log(.error, log: self.log, "Refresh missing stories failed: %@.", error.localizedDescription)
for chunk in chunkedStoryHashes {
group.enter()
self.caller.retrieveStories(hashes: chunk) { result in
switch result {
case .success((let stories, _)):
self.processStories(account: account, stories: stories) { result in
group.leave()
if case .failure = result {
errorOccurred = true
}
}
}
}
group.notify(queue: DispatchQueue.main) {
self.refreshProgress.completeTask()
os_log(.debug, log: self.log, "Done refreshing missing stories.")
if errorOccurred {
completion(.failure(NewsBlurError.unknown))
} else {
completion(.success(()))
case .failure(let error):
errorOccurred = true
os_log(.error, log: self.log, "Refresh missing stories failed: %@.", error.localizedDescription)
group.leave()
}
}
}
switch result {
case .success(let fetchedArticleIDs):
process(fetchedArticleIDs)
case .failure(let error):
group.notify(queue: DispatchQueue.main) {
self.refreshProgress.completeTask()
completion(.failure(error))
os_log(.debug, log: self.log, "Done refreshing missing stories.")
if errorOccurred {
completion(.failure(NewsBlurError.unknown))
} else {
completion(.success(()))
}
}
} catch {
self.refreshProgress.completeTask()
completion(.failure(error))
}
}
}

View File

@ -1195,55 +1195,52 @@ private extension ReaderAPIAccountDelegate {
}
func refreshMissingArticles(_ account: Account, completion: @escaping VoidCompletionBlock) {
account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate { articleIDsResult in
MainActor.assumeIsolated {
@MainActor func process(_ fetchedArticleIDs: Set<String>) {
guard !fetchedArticleIDs.isEmpty else {
completion()
return
}
Task { @MainActor in
os_log(.debug, log: self.log, "Refreshing missing articles...")
let group = DispatchGroup()
do {
let fetchedArticleIDs = try await account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate() ?? Set<String>()
let articleIDs = Array(fetchedArticleIDs)
let chunkedArticleIDs = articleIDs.chunked(into: 150)
guard !fetchedArticleIDs.isEmpty else {
completion()
return
}
self.refreshProgress.addToNumberOfTasksAndRemaining(chunkedArticleIDs.count - 1)
os_log(.debug, log: self.log, "Refreshing missing articles...")
let group = DispatchGroup()
for chunk in chunkedArticleIDs {
group.enter()
self.caller.retrieveEntries(articleIDs: chunk) { result in
self.refreshProgress.completeTask()
let articleIDs = Array(fetchedArticleIDs)
let chunkedArticleIDs = articleIDs.chunked(into: 150)
switch result {
case .success(let entries):
self.processEntries(account: account, entries: entries) {
group.leave()
}
self.refreshProgress.addToNumberOfTasksAndRemaining(chunkedArticleIDs.count - 1)
case .failure(let error):
os_log(.error, log: self.log, "Refresh missing articles failed: %@.", error.localizedDescription)
for chunk in chunkedArticleIDs {
group.enter()
self.caller.retrieveEntries(articleIDs: chunk) { result in
self.refreshProgress.completeTask()
switch result {
case .success(let entries):
self.processEntries(account: account, entries: entries) {
group.leave()
}
case .failure(let error):
os_log(.error, log: self.log, "Refresh missing articles failed: %@.", error.localizedDescription)
group.leave()
}
}
group.notify(queue: DispatchQueue.main) {
self.refreshProgress.completeTask()
os_log(.debug, log: self.log, "Done refreshing missing articles.")
completion()
}
}
switch articleIDsResult {
case .success(let articleIDs):
process(articleIDs)
case .failure:
group.notify(queue: DispatchQueue.main) {
self.refreshProgress.completeTask()
os_log(.debug, log: self.log, "Done refreshing missing articles.")
completion()
}
} catch {
self.refreshProgress.completeTask()
completion()
}
}
}