Finish making Account and AccountManager compatible with DatabaseQueue changes.
This commit is contained in:
parent
9516392c61
commit
51393967e2
|
@ -624,7 +624,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
||||||
public func fetchArticles(_ fetchType: FetchType) throws -> Set<Article> {
|
public func fetchArticles(_ fetchType: FetchType) throws -> Set<Article> {
|
||||||
switch fetchType {
|
switch fetchType {
|
||||||
case .starred:
|
case .starred:
|
||||||
return fetchStarredArticles()
|
return try fetchStarredArticles()
|
||||||
case .unread:
|
case .unread:
|
||||||
return try fetchUnreadArticles()
|
return try fetchUnreadArticles()
|
||||||
case .today:
|
case .today:
|
||||||
|
@ -784,7 +784,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
||||||
/// Update statuses specified by articleIDs — set a key and value.
|
/// Update statuses specified by articleIDs — set a key and value.
|
||||||
/// This updates the database, and sends a .StatusesDidChange notification.
|
/// This updates the database, and sends a .StatusesDidChange notification.
|
||||||
/// Any statuses that don’t exist will be automatically created.
|
/// Any statuses that don’t exist will be automatically created.
|
||||||
func mark(articleIDs: Set<String>, statusKey: ArticleStatus.Key, flag: Bool, completion: @escaping DatabaseCompletionBlock? = nil) {
|
func mark(articleIDs: Set<String>, statusKey: ArticleStatus.Key, flag: Bool, completion: DatabaseCompletionBlock? = nil) {
|
||||||
// TODO
|
// TODO
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -853,7 +853,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
||||||
public func debugRunSearch() {
|
public func debugRunSearch() {
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
let t1 = Date()
|
let t1 = Date()
|
||||||
let articles = fetchArticlesMatching("Brent NetNewsWire")
|
let articles = try! fetchArticlesMatching("Brent NetNewsWire")
|
||||||
let t2 = Date()
|
let t2 = Date()
|
||||||
print(t2.timeIntervalSince(t1))
|
print(t2.timeIntervalSince(t1))
|
||||||
print(articles.count)
|
print(articles.count)
|
||||||
|
@ -940,8 +940,8 @@ extension Account: WebFeedMetadataDelegate {
|
||||||
|
|
||||||
private extension Account {
|
private extension Account {
|
||||||
|
|
||||||
func fetchStarredArticles() -> Set<Article> {
|
func fetchStarredArticles() throws -> Set<Article> {
|
||||||
return database.fetchStarredArticles(flattenedWebFeeds().webFeedIDs())
|
return try database.fetchStarredArticles(flattenedWebFeeds().webFeedIDs())
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchStarredArticlesAsync(_ completion: @escaping ArticleSetResultBlock) {
|
func fetchStarredArticlesAsync(_ completion: @escaping ArticleSetResultBlock) {
|
||||||
|
@ -987,9 +987,14 @@ private extension Account {
|
||||||
}
|
}
|
||||||
|
|
||||||
func fetchArticlesAsync(webFeed: WebFeed, _ completion: @escaping ArticleSetResultBlock) {
|
func fetchArticlesAsync(webFeed: WebFeed, _ completion: @escaping ArticleSetResultBlock) {
|
||||||
database.fetchArticlesAsync(webFeed.webFeedID) { [weak self] (articles) in
|
database.fetchArticlesAsync(webFeed.webFeedID) { [weak self] articleSetResult in
|
||||||
|
switch articleSetResult {
|
||||||
|
case .success(let articles):
|
||||||
self?.validateUnreadCount(webFeed, articles)
|
self?.validateUnreadCount(webFeed, articles)
|
||||||
completion(articles)
|
completion(.success(articles))
|
||||||
|
case .failure(let databaseError):
|
||||||
|
completion(.failure(databaseError))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1040,9 +1045,14 @@ private extension Account {
|
||||||
|
|
||||||
func fetchArticlesAsync(forContainer container: Container, _ completion: @escaping ArticleSetResultBlock) {
|
func fetchArticlesAsync(forContainer container: Container, _ completion: @escaping ArticleSetResultBlock) {
|
||||||
let webFeeds = container.flattenedWebFeeds()
|
let webFeeds = container.flattenedWebFeeds()
|
||||||
database.fetchArticlesAsync(webFeeds.webFeedIDs()) { [weak self] (articles) in
|
database.fetchArticlesAsync(webFeeds.webFeedIDs()) { [weak self] (articleSetResult) in
|
||||||
|
switch articleSetResult {
|
||||||
|
case .success(let articles):
|
||||||
self?.validateUnreadCountsAfterFetchingUnreadArticles(webFeeds, articles)
|
self?.validateUnreadCountsAfterFetchingUnreadArticles(webFeeds, articles)
|
||||||
completion(articles)
|
completion(.success(articles))
|
||||||
|
case .failure(let databaseError):
|
||||||
|
completion(.failure(databaseError))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1055,9 +1065,14 @@ private extension Account {
|
||||||
|
|
||||||
func fetchUnreadArticlesAsync(forContainer container: Container, _ completion: @escaping ArticleSetResultBlock) {
|
func fetchUnreadArticlesAsync(forContainer container: Container, _ completion: @escaping ArticleSetResultBlock) {
|
||||||
let webFeeds = container.flattenedWebFeeds()
|
let webFeeds = container.flattenedWebFeeds()
|
||||||
database.fetchUnreadArticlesAsync(webFeeds.webFeedIDs()) { [weak self] (articles) in
|
database.fetchUnreadArticlesAsync(webFeeds.webFeedIDs()) { [weak self] (articleSetResult) in
|
||||||
|
switch articleSetResult {
|
||||||
|
case .success(let articles):
|
||||||
self?.validateUnreadCountsAfterFetchingUnreadArticles(webFeeds, articles)
|
self?.validateUnreadCountsAfterFetchingUnreadArticles(webFeeds, articles)
|
||||||
completion(articles)
|
completion(.success(articles))
|
||||||
|
case .failure(let databaseError):
|
||||||
|
completion(.failure(databaseError))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
import Foundation
|
import Foundation
|
||||||
import RSCore
|
import RSCore
|
||||||
import Articles
|
import Articles
|
||||||
|
import ArticlesDatabase
|
||||||
|
|
||||||
// Main thread only.
|
// Main thread only.
|
||||||
|
|
||||||
|
@ -255,17 +256,17 @@ public final class AccountManager: UnreadCountProvider {
|
||||||
|
|
||||||
// These fetch articles from active accounts and return a merged Set<Article>.
|
// These fetch articles from active accounts and return a merged Set<Article>.
|
||||||
|
|
||||||
public func fetchArticles(_ fetchType: FetchType) -> Set<Article> {
|
public func fetchArticles(_ fetchType: FetchType) throws -> Set<Article> {
|
||||||
precondition(Thread.isMainThread)
|
precondition(Thread.isMainThread)
|
||||||
|
|
||||||
var articles = Set<Article>()
|
var articles = Set<Article>()
|
||||||
for account in activeAccounts {
|
for account in activeAccounts {
|
||||||
articles.formUnion(account.fetchArticles(fetchType))
|
articles.formUnion(try account.fetchArticles(fetchType))
|
||||||
}
|
}
|
||||||
return articles
|
return articles
|
||||||
}
|
}
|
||||||
|
|
||||||
public func fetchArticlesAsync(_ fetchType: FetchType, _ completion: @escaping ArticleSetBlock) {
|
public func fetchArticlesAsync(_ fetchType: FetchType, _ completion: @escaping ArticleSetResultBlock) {
|
||||||
precondition(Thread.isMainThread)
|
precondition(Thread.isMainThread)
|
||||||
|
|
||||||
var allFetchedArticles = Set<Article>()
|
var allFetchedArticles = Set<Article>()
|
||||||
|
@ -273,11 +274,18 @@ public final class AccountManager: UnreadCountProvider {
|
||||||
var accountsReporting = 0
|
var accountsReporting = 0
|
||||||
|
|
||||||
for account in activeAccounts {
|
for account in activeAccounts {
|
||||||
account.fetchArticlesAsync(fetchType) { (articles) in
|
account.fetchArticlesAsync(fetchType) { (articleSetResult) in
|
||||||
allFetchedArticles.formUnion(articles)
|
|
||||||
accountsReporting += 1
|
accountsReporting += 1
|
||||||
|
|
||||||
|
switch articleSetResult {
|
||||||
|
case .success(let articles):
|
||||||
|
allFetchedArticles.formUnion(articles)
|
||||||
if accountsReporting == numberOfAccounts {
|
if accountsReporting == numberOfAccounts {
|
||||||
completion(allFetchedArticles)
|
completion(.success(allFetchedArticles))
|
||||||
|
}
|
||||||
|
case .failure(let databaseError):
|
||||||
|
completion(.failure(databaseError))
|
||||||
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue