2020-03-14 16:44:23 -04:00
|
|
|
|
//
|
2020-03-21 17:16:35 -04:00
|
|
|
|
// NewsBlurAccountDelegate+Internal.swift
|
2020-03-14 16:44:23 -04:00
|
|
|
|
// Mostly adapted from FeedbinAccountDelegate.swift
|
|
|
|
|
// Account
|
|
|
|
|
//
|
|
|
|
|
// Created by Anh Quang Do on 2020-03-14.
|
|
|
|
|
// Copyright (c) 2020 Ranchero Software, LLC. All rights reserved.
|
|
|
|
|
//
|
|
|
|
|
|
|
|
|
|
import Articles
|
|
|
|
|
import RSCore
|
|
|
|
|
import RSDatabase
|
|
|
|
|
import RSParser
|
|
|
|
|
import RSWeb
|
|
|
|
|
import SyncDatabase
|
|
|
|
|
import os.log
|
|
|
|
|
|
|
|
|
|
extension NewsBlurAccountDelegate {
|
|
|
|
|
func refreshFeeds(for account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
os_log(.debug, log: log, "Refreshing feeds...")
|
|
|
|
|
|
|
|
|
|
caller.retrieveFeeds { result in
|
|
|
|
|
switch result {
|
|
|
|
|
case .success((let feeds, let folders)):
|
|
|
|
|
BatchUpdate.shared.perform {
|
|
|
|
|
self.syncFolders(account, folders)
|
|
|
|
|
self.syncFeeds(account, feeds)
|
|
|
|
|
self.syncFeedFolderRelationship(account, folders)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
completion(.success(()))
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func syncFolders(_ account: Account, _ folders: [NewsBlurFolder]?) {
|
|
|
|
|
guard let folders = folders else { return }
|
|
|
|
|
assert(Thread.isMainThread)
|
|
|
|
|
|
|
|
|
|
os_log(.debug, log: log, "Syncing folders with %ld folders.", folders.count)
|
|
|
|
|
|
|
|
|
|
let folderNames = folders.map { $0.name }
|
|
|
|
|
|
|
|
|
|
// Delete any folders not at NewsBlur
|
|
|
|
|
if let folders = account.folders {
|
|
|
|
|
folders.forEach { folder in
|
|
|
|
|
if !folderNames.contains(folder.name ?? "") {
|
|
|
|
|
for feed in folder.topLevelWebFeeds {
|
|
|
|
|
account.addWebFeed(feed)
|
|
|
|
|
clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
|
|
|
|
}
|
|
|
|
|
account.removeFolder(folder)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let accountFolderNames: [String] = {
|
|
|
|
|
if let folders = account.folders {
|
|
|
|
|
return folders.map { $0.name ?? "" }
|
|
|
|
|
} else {
|
|
|
|
|
return [String]()
|
|
|
|
|
}
|
|
|
|
|
}()
|
|
|
|
|
|
|
|
|
|
// Make any folders NewsBlur has, but we don't
|
2020-03-21 20:54:16 -04:00
|
|
|
|
// Ignore account-level folder
|
2020-03-14 16:44:23 -04:00
|
|
|
|
folderNames.forEach { folderName in
|
2020-03-21 20:54:16 -04:00
|
|
|
|
if !accountFolderNames.contains(folderName) && folderName != " " {
|
2020-03-14 16:44:23 -04:00
|
|
|
|
_ = account.ensureFolder(with: folderName)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func syncFeeds(_ account: Account, _ feeds: [NewsBlurFeed]?) {
|
|
|
|
|
guard let feeds = feeds else { return }
|
|
|
|
|
assert(Thread.isMainThread)
|
|
|
|
|
|
|
|
|
|
os_log(.debug, log: log, "Syncing feeds with %ld feeds.", feeds.count)
|
|
|
|
|
|
2020-03-21 22:56:41 -04:00
|
|
|
|
let newsBlurFeedIds = feeds.map { String($0.feedID) }
|
2020-03-14 16:44:23 -04:00
|
|
|
|
|
|
|
|
|
// Remove any feeds that are no longer in the subscriptions
|
|
|
|
|
if let folders = account.folders {
|
|
|
|
|
for folder in folders {
|
|
|
|
|
for feed in folder.topLevelWebFeeds {
|
2020-03-21 22:56:41 -04:00
|
|
|
|
if !newsBlurFeedIds.contains(feed.webFeedID) {
|
2020-03-14 16:44:23 -04:00
|
|
|
|
folder.removeWebFeed(feed)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for feed in account.topLevelWebFeeds {
|
2020-03-21 22:56:41 -04:00
|
|
|
|
if !newsBlurFeedIds.contains(feed.webFeedID) {
|
2020-03-14 16:44:23 -04:00
|
|
|
|
account.removeWebFeed(feed)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add any feeds we don't have and update any we do
|
|
|
|
|
var feedsToAdd = Set<NewsBlurFeed>()
|
|
|
|
|
feeds.forEach { feed in
|
|
|
|
|
let subFeedId = String(feed.feedID)
|
|
|
|
|
|
|
|
|
|
if let webFeed = account.existingWebFeed(withWebFeedID: subFeedId) {
|
|
|
|
|
webFeed.name = feed.name
|
|
|
|
|
// If the name has been changed on the server remove the locally edited name
|
|
|
|
|
webFeed.editedName = nil
|
2020-03-21 17:16:35 -04:00
|
|
|
|
webFeed.homePageURL = feed.homePageURL
|
2020-03-21 18:33:31 -04:00
|
|
|
|
webFeed.externalID = String(feed.feedID)
|
2020-03-14 16:44:23 -04:00
|
|
|
|
webFeed.faviconURL = feed.faviconURL
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
feedsToAdd.insert(feed)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Actually add feeds all in one go, so we don’t trigger various rebuilding things that Account does.
|
|
|
|
|
feedsToAdd.forEach { feed in
|
2020-03-21 17:16:35 -04:00
|
|
|
|
let webFeed = account.createWebFeed(with: feed.name, url: feed.feedURL, webFeedID: String(feed.feedID), homePageURL: feed.homePageURL)
|
2020-03-21 18:33:31 -04:00
|
|
|
|
webFeed.externalID = String(feed.feedID)
|
2020-03-14 16:44:23 -04:00
|
|
|
|
account.addWebFeed(webFeed)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func syncFeedFolderRelationship(_ account: Account, _ folders: [NewsBlurFolder]?) {
|
|
|
|
|
guard let folders = folders else { return }
|
|
|
|
|
assert(Thread.isMainThread)
|
|
|
|
|
|
|
|
|
|
os_log(.debug, log: log, "Syncing folders with %ld folders.", folders.count)
|
|
|
|
|
|
|
|
|
|
// Set up some structures to make syncing easier
|
|
|
|
|
let relationships = folders.map({ $0.asRelationships }).flatMap { $0 }
|
|
|
|
|
let folderDict = nameToFolderDictionary(with: account.folders)
|
2020-03-21 20:54:16 -04:00
|
|
|
|
let newsBlurFolderDict = relationships.reduce([String: [NewsBlurFolderRelationship]]()) { (dict, relationship) in
|
2020-03-14 16:44:23 -04:00
|
|
|
|
var feedInFolders = dict
|
|
|
|
|
if var feedInFolder = feedInFolders[relationship.folderName] {
|
|
|
|
|
feedInFolder.append(relationship)
|
|
|
|
|
feedInFolders[relationship.folderName] = feedInFolder
|
|
|
|
|
} else {
|
|
|
|
|
feedInFolders[relationship.folderName] = [relationship]
|
|
|
|
|
}
|
|
|
|
|
return feedInFolders
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Sync the folders
|
2020-03-21 20:54:16 -04:00
|
|
|
|
for (folderName, folderRelationships) in newsBlurFolderDict {
|
|
|
|
|
let newsBlurFolderFeedIDs = folderRelationships.map { String($0.feedID) }
|
2020-03-14 16:44:23 -04:00
|
|
|
|
|
2020-03-21 23:58:27 -04:00
|
|
|
|
// Handle account-level folder
|
|
|
|
|
if folderName == " " {
|
|
|
|
|
for feed in account.topLevelWebFeeds {
|
|
|
|
|
if !newsBlurFolderFeedIDs.contains(feed.webFeedID) {
|
|
|
|
|
account.removeWebFeed(feed)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
guard let folder = folderDict[folderName] else { return }
|
|
|
|
|
|
2020-03-14 16:44:23 -04:00
|
|
|
|
// Move any feeds not in the folder to the account
|
|
|
|
|
for feed in folder.topLevelWebFeeds {
|
2020-03-21 20:54:16 -04:00
|
|
|
|
if !newsBlurFolderFeedIDs.contains(feed.webFeedID) {
|
2020-03-14 16:44:23 -04:00
|
|
|
|
folder.removeWebFeed(feed)
|
|
|
|
|
clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
|
|
|
|
account.addWebFeed(feed)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Add any feeds not in the folder
|
|
|
|
|
let folderFeedIds = folder.topLevelWebFeeds.map { $0.webFeedID }
|
|
|
|
|
|
|
|
|
|
for relationship in folderRelationships {
|
|
|
|
|
let folderFeedID = String(relationship.feedID)
|
|
|
|
|
if !folderFeedIds.contains(folderFeedID) {
|
|
|
|
|
guard let feed = account.existingWebFeed(withWebFeedID: folderFeedID) else {
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
saveFolderRelationship(for: feed, withFolderName: folderName, id: relationship.folderName)
|
|
|
|
|
folder.addWebFeed(feed)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func clearFolderRelationship(for feed: WebFeed, withFolderName folderName: String) {
|
|
|
|
|
if var folderRelationship = feed.folderRelationship {
|
|
|
|
|
folderRelationship[folderName] = nil
|
|
|
|
|
feed.folderRelationship = folderRelationship
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func saveFolderRelationship(for feed: WebFeed, withFolderName folderName: String, id: String) {
|
|
|
|
|
if var folderRelationship = feed.folderRelationship {
|
|
|
|
|
folderRelationship[folderName] = id
|
|
|
|
|
feed.folderRelationship = folderRelationship
|
|
|
|
|
} else {
|
|
|
|
|
feed.folderRelationship = [folderName: id]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func nameToFolderDictionary(with folders: Set<Folder>?) -> [String: Folder] {
|
|
|
|
|
guard let folders = folders else {
|
|
|
|
|
return [String: Folder]()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var d = [String: Folder]()
|
|
|
|
|
for folder in folders {
|
|
|
|
|
let name = folder.name ?? ""
|
|
|
|
|
if d[name] == nil {
|
|
|
|
|
d[name] = folder
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return d
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func refreshUnreadStories(for account: Account, hashes: [NewsBlurStoryHash]?, updateFetchDate: Date?, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
guard let hashes = hashes, !hashes.isEmpty else {
|
|
|
|
|
if let lastArticleFetch = updateFetchDate {
|
|
|
|
|
self.accountMetadata?.lastArticleFetchStartTime = lastArticleFetch
|
|
|
|
|
self.accountMetadata?.lastArticleFetchEndTime = Date()
|
|
|
|
|
}
|
|
|
|
|
completion(.success(()))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let numberOfStories = min(hashes.count, 100) // api limit
|
|
|
|
|
let hashesToFetch = Array(hashes[..<numberOfStories])
|
|
|
|
|
|
|
|
|
|
caller.retrieveStories(hashes: hashesToFetch) { result in
|
|
|
|
|
switch result {
|
2020-03-21 23:28:29 -04:00
|
|
|
|
case .success((let stories, let date)):
|
2020-03-21 17:16:35 -04:00
|
|
|
|
self.processStories(account: account, stories: stories) { result in
|
2020-03-14 16:44:23 -04:00
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
2020-03-21 17:16:35 -04:00
|
|
|
|
if case .failure(let error) = result {
|
2020-03-14 16:44:23 -04:00
|
|
|
|
completion(.failure(error))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-21 23:28:29 -04:00
|
|
|
|
self.refreshUnreadStories(for: account, hashes: Array(hashes[numberOfStories...]), updateFetchDate: date) { result in
|
2020-03-14 16:44:23 -04:00
|
|
|
|
os_log(.debug, log: self.log, "Done refreshing stories.")
|
|
|
|
|
switch result {
|
|
|
|
|
case .success:
|
|
|
|
|
completion(.success(()))
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func mapStoriesToParsedItems(stories: [NewsBlurStory]?) -> Set<ParsedItem> {
|
|
|
|
|
guard let stories = stories else { return Set<ParsedItem>() }
|
|
|
|
|
|
|
|
|
|
let parsedItems: [ParsedItem] = stories.map { story in
|
|
|
|
|
let author = Set([ParsedAuthor(name: story.authorName, url: nil, avatarURL: nil, emailAddress: nil)])
|
2020-04-03 13:42:59 -05:00
|
|
|
|
return ParsedItem(syncServiceID: story.storyID, uniqueID: String(story.storyID), feedURL: String(story.feedID), url: story.url, externalURL: nil, title: story.title, language: nil, contentHTML: story.contentHTML, contentText: nil, summary: nil, imageURL: story.imageURL, bannerImageURL: nil, datePublished: story.datePublished, dateModified: nil, authors: author, tags: Set(story.tags ?? []), attachments: nil)
|
2020-03-14 16:44:23 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return Set(parsedItems)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func sendStoryStatuses(_ statuses: [SyncStatus],
|
|
|
|
|
throttle: Bool,
|
|
|
|
|
apiCall: ([String], @escaping (Result<Void, Error>) -> Void) -> Void,
|
|
|
|
|
completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
guard !statuses.isEmpty else {
|
|
|
|
|
completion(.success(()))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
var errorOccurred = false
|
|
|
|
|
|
|
|
|
|
let storyHashes = statuses.compactMap { $0.articleID }
|
|
|
|
|
let storyHashGroups = storyHashes.chunked(into: throttle ? 1 : 5) // api limit
|
|
|
|
|
for storyHashGroup in storyHashGroups {
|
|
|
|
|
group.enter()
|
|
|
|
|
apiCall(storyHashGroup) { result in
|
|
|
|
|
switch result {
|
|
|
|
|
case .success:
|
|
|
|
|
self.database.deleteSelectedForProcessing(storyHashGroup.map { String($0) } )
|
|
|
|
|
group.leave()
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
errorOccurred = true
|
|
|
|
|
os_log(.error, log: self.log, "Story status sync call failed: %@.", error.localizedDescription)
|
|
|
|
|
self.database.resetSelectedForProcessing(storyHashGroup.map { String($0) } )
|
|
|
|
|
group.leave()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
group.notify(queue: DispatchQueue.main) {
|
|
|
|
|
if errorOccurred {
|
|
|
|
|
completion(.failure(NewsBlurError.unknown))
|
|
|
|
|
} else {
|
|
|
|
|
completion(.success(()))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func syncStoryReadState(account: Account, hashes: [NewsBlurStoryHash]?) {
|
|
|
|
|
guard let hashes = hashes else { return }
|
|
|
|
|
|
|
|
|
|
database.selectPendingReadStatusArticleIDs() { result in
|
|
|
|
|
func process(_ pendingStoryHashes: Set<String>) {
|
|
|
|
|
|
|
|
|
|
let newsBlurUnreadStoryHashes = Set(hashes.map { $0.hash } )
|
|
|
|
|
let updatableNewsBlurUnreadStoryHashes = newsBlurUnreadStoryHashes.subtracting(pendingStoryHashes)
|
|
|
|
|
|
|
|
|
|
account.fetchUnreadArticleIDs { articleIDsResult in
|
|
|
|
|
guard let currentUnreadArticleIDs = try? articleIDsResult.get() else {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Mark articles as unread
|
|
|
|
|
let deltaUnreadArticleIDs = updatableNewsBlurUnreadStoryHashes.subtracting(currentUnreadArticleIDs)
|
|
|
|
|
account.markAsUnread(deltaUnreadArticleIDs)
|
|
|
|
|
|
|
|
|
|
// Mark articles as read
|
|
|
|
|
let deltaReadArticleIDs = currentUnreadArticleIDs.subtracting(updatableNewsBlurUnreadStoryHashes)
|
|
|
|
|
account.markAsRead(deltaReadArticleIDs)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
|
case .success(let pendingArticleIDs):
|
|
|
|
|
process(pendingArticleIDs)
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
os_log(.error, log: self.log, "Sync Story Read Status failed: %@.", error.localizedDescription)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func syncStoryStarredState(account: Account, hashes: [NewsBlurStoryHash]?) {
|
|
|
|
|
guard let hashes = hashes else { return }
|
|
|
|
|
|
|
|
|
|
database.selectPendingStarredStatusArticleIDs() { result in
|
|
|
|
|
func process(_ pendingStoryHashes: Set<String>) {
|
|
|
|
|
|
|
|
|
|
let newsBlurStarredStoryHashes = Set(hashes.map { $0.hash } )
|
|
|
|
|
let updatableNewsBlurUnreadStoryHashes = newsBlurStarredStoryHashes.subtracting(pendingStoryHashes)
|
|
|
|
|
|
|
|
|
|
account.fetchStarredArticleIDs { articleIDsResult in
|
|
|
|
|
guard let currentStarredArticleIDs = try? articleIDsResult.get() else {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Mark articles as starred
|
|
|
|
|
let deltaStarredArticleIDs = updatableNewsBlurUnreadStoryHashes.subtracting(currentStarredArticleIDs)
|
|
|
|
|
account.markAsStarred(deltaStarredArticleIDs)
|
|
|
|
|
|
|
|
|
|
// Mark articles as unstarred
|
|
|
|
|
let deltaUnstarredArticleIDs = currentStarredArticleIDs.subtracting(updatableNewsBlurUnreadStoryHashes)
|
|
|
|
|
account.markAsUnstarred(deltaUnstarredArticleIDs)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
|
case .success(let pendingArticleIDs):
|
|
|
|
|
process(pendingArticleIDs)
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
os_log(.error, log: self.log, "Sync Story Starred Status failed: %@.", error.localizedDescription)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-03-21 17:16:35 -04:00
|
|
|
|
|
|
|
|
|
func createFeed(account: Account, feed: NewsBlurFeed?, name: String?, container: Container, completion: @escaping (Result<WebFeed, Error>) -> Void) {
|
|
|
|
|
guard let feed = feed else {
|
|
|
|
|
completion(.failure(NewsBlurError.invalidParameter))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
|
let webFeed = account.createWebFeed(with: feed.name, url: feed.feedURL, webFeedID: String(feed.feedID), homePageURL: feed.homePageURL)
|
2020-03-21 18:33:31 -04:00
|
|
|
|
webFeed.externalID = String(feed.feedID)
|
2020-03-21 17:16:35 -04:00
|
|
|
|
webFeed.faviconURL = feed.faviconURL
|
|
|
|
|
|
|
|
|
|
account.addWebFeed(webFeed, to: container) { result in
|
|
|
|
|
switch result {
|
|
|
|
|
case .success:
|
|
|
|
|
if let name = name {
|
|
|
|
|
account.renameWebFeed(webFeed, to: name) { result in
|
|
|
|
|
switch result {
|
|
|
|
|
case .success:
|
|
|
|
|
self.initialFeedDownload(account: account, feed: webFeed, completion: completion)
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
self.initialFeedDownload(account: account, feed: webFeed, completion: completion)
|
|
|
|
|
}
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func downloadFeed(account: Account, feed: WebFeed, page: Int, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(1)
|
|
|
|
|
|
|
|
|
|
caller.retrieveStories(feedID: feed.webFeedID, page: page) { result in
|
|
|
|
|
switch result {
|
2020-03-21 23:28:29 -04:00
|
|
|
|
case .success((let stories, _)):
|
2020-03-21 17:16:35 -04:00
|
|
|
|
// No more stories
|
|
|
|
|
guard let stories = stories, stories.count > 0 else {
|
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
|
|
|
|
completion(.success(()))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-22 01:11:00 -04:00
|
|
|
|
let since: Date? = Calendar.current.date(byAdding: .month, value: -3, to: Date())
|
2020-03-21 23:28:29 -04:00
|
|
|
|
|
2020-03-21 17:16:35 -04:00
|
|
|
|
self.processStories(account: account, stories: stories, since: since) { result in
|
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
|
|
|
|
if case .failure(let error) = result {
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// No more recent stories
|
|
|
|
|
if case .success(let hasStories) = result, !hasStories {
|
|
|
|
|
completion(.success(()))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.downloadFeed(account: account, feed: feed, page: page + 1, completion: completion)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func initialFeedDownload(account: Account, feed: WebFeed, completion: @escaping (Result<WebFeed, Error>) -> Void) {
|
|
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(1)
|
|
|
|
|
|
|
|
|
|
// Download the initial articles
|
|
|
|
|
downloadFeed(account: account, feed: feed, page: 1) { result in
|
|
|
|
|
self.refreshArticleStatus(for: account) { result in
|
|
|
|
|
switch result {
|
|
|
|
|
case .success:
|
|
|
|
|
self.refreshMissingStories(for: account) { result in
|
|
|
|
|
switch result {
|
|
|
|
|
case .success:
|
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
|
completion(.success(feed))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-03-21 18:29:20 -04:00
|
|
|
|
|
|
|
|
|
func deleteFeed(for account: Account, with feed: WebFeed, from container: Container?, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
// This error should never happen
|
2020-03-21 18:33:31 -04:00
|
|
|
|
guard let feedID = feed.externalID else {
|
2020-03-21 18:29:20 -04:00
|
|
|
|
completion(.failure(NewsBlurError.invalidParameter))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(1)
|
|
|
|
|
|
|
|
|
|
let folderName = (container as? Folder)?.name
|
|
|
|
|
caller.deleteFeed(feedID: feedID, folder: folderName) { result in
|
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
|
case .success:
|
|
|
|
|
DispatchQueue.main.async {
|
2020-03-21 21:25:39 -04:00
|
|
|
|
let feedID = feed.webFeedID
|
|
|
|
|
|
|
|
|
|
if folderName == nil {
|
|
|
|
|
account.removeWebFeed(feed)
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-21 18:29:20 -04:00
|
|
|
|
if let folders = account.folders {
|
|
|
|
|
for folder in folders where folderName != nil && folder.name == folderName {
|
|
|
|
|
folder.removeWebFeed(feed)
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-03-21 21:25:39 -04:00
|
|
|
|
|
2020-03-21 22:56:41 -04:00
|
|
|
|
if account.existingWebFeed(withWebFeedID: feedID) != nil {
|
2020-03-21 21:25:39 -04:00
|
|
|
|
account.clearWebFeedMetadata(feed)
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-21 18:29:20 -04:00
|
|
|
|
completion(.success(()))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
|
completion(.failure(wrappedError))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-03-14 16:44:23 -04:00
|
|
|
|
}
|