2019-05-03 01:17:52 +02:00
|
|
|
//
|
|
|
|
// FeedbinAccountDelegate.swift
|
|
|
|
// Account
|
|
|
|
//
|
|
|
|
// Created by Maurice Parker on 5/2/19.
|
|
|
|
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
|
|
|
//
|
|
|
|
|
2019-05-05 22:41:20 +02:00
|
|
|
#if os(macOS)
|
|
|
|
import AppKit
|
|
|
|
#else
|
|
|
|
import UIKit
|
|
|
|
import RSCore
|
|
|
|
#endif
|
2019-05-14 22:34:05 +02:00
|
|
|
import Articles
|
2019-05-07 18:10:00 +02:00
|
|
|
import RSCore
|
2019-05-11 23:07:27 +02:00
|
|
|
import RSParser
|
2019-05-03 01:17:52 +02:00
|
|
|
import RSWeb
|
2019-05-15 01:24:19 +02:00
|
|
|
import SyncDatabase
|
2019-05-08 15:20:29 +02:00
|
|
|
import os.log
|
2019-05-03 01:17:52 +02:00
|
|
|
|
2019-05-09 01:13:54 +02:00
|
|
|
public enum FeedbinAccountDelegateError: String, Error {
|
|
|
|
case invalidParameter = "There was an invalid parameter passed."
|
|
|
|
}
|
|
|
|
|
2019-05-03 01:17:52 +02:00
|
|
|
final class FeedbinAccountDelegate: AccountDelegate {
|
2019-05-08 15:20:29 +02:00
|
|
|
|
2019-05-15 01:24:19 +02:00
|
|
|
private let database: SyncDatabase
|
|
|
|
|
2019-05-08 15:20:29 +02:00
|
|
|
private let caller: FeedbinAPICaller
|
|
|
|
private var log = OSLog(subsystem: Bundle.main.bundleIdentifier!, category: "Feedbin")
|
|
|
|
|
2019-05-03 01:17:52 +02:00
|
|
|
let supportsSubFolders = false
|
2019-05-04 18:48:48 +02:00
|
|
|
let server: String? = "api.feedbin.com"
|
2019-05-17 17:44:22 +02:00
|
|
|
var opmlImportInProgress = false
|
2019-05-03 01:17:52 +02:00
|
|
|
|
2019-05-05 10:25:21 +02:00
|
|
|
var credentials: Credentials? {
|
|
|
|
didSet {
|
|
|
|
caller.credentials = credentials
|
|
|
|
}
|
|
|
|
}
|
2019-05-03 01:17:52 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
weak var accountMetadata: AccountMetadata? {
|
2019-05-05 14:49:59 +02:00
|
|
|
didSet {
|
|
|
|
caller.accountMetadata = accountMetadata
|
|
|
|
}
|
|
|
|
}
|
2019-05-05 14:21:26 +02:00
|
|
|
|
2019-05-15 01:24:19 +02:00
|
|
|
init(dataFolder: String, transport: Transport?) {
|
|
|
|
|
|
|
|
let databaseFilePath = (dataFolder as NSString).appendingPathComponent("Sync.sqlite3")
|
|
|
|
database = SyncDatabase(databaseFilePath: databaseFilePath)
|
2019-05-12 14:22:33 +02:00
|
|
|
|
|
|
|
if transport != nil {
|
|
|
|
|
|
|
|
caller = FeedbinAPICaller(transport: transport!)
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
let sessionConfiguration = URLSessionConfiguration.default
|
|
|
|
sessionConfiguration.requestCachePolicy = .reloadIgnoringLocalCacheData
|
|
|
|
sessionConfiguration.timeoutIntervalForRequest = 60.0
|
|
|
|
sessionConfiguration.httpShouldSetCookies = false
|
|
|
|
sessionConfiguration.httpCookieAcceptPolicy = .never
|
|
|
|
sessionConfiguration.httpMaximumConnectionsPerHost = 1
|
|
|
|
sessionConfiguration.httpCookieStorage = nil
|
|
|
|
sessionConfiguration.urlCache = nil
|
|
|
|
|
|
|
|
if let userAgentHeaders = UserAgent.headers() {
|
|
|
|
sessionConfiguration.httpAdditionalHeaders = userAgentHeaders
|
|
|
|
}
|
|
|
|
|
|
|
|
caller = FeedbinAPICaller(transport: URLSession(configuration: sessionConfiguration))
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-03 01:17:52 +02:00
|
|
|
}
|
|
|
|
|
2019-05-04 15:54:07 +02:00
|
|
|
var refreshProgress = DownloadProgress(numberOfTasks: 0)
|
|
|
|
|
2019-05-26 18:54:32 +02:00
|
|
|
func refreshAll(for account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-05-13 01:32:32 +02:00
|
|
|
|
2019-05-17 21:56:27 +02:00
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(6)
|
2019-05-14 20:10:07 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
refreshAccount(account) { result in
|
2019-05-05 22:41:20 +02:00
|
|
|
switch result {
|
|
|
|
case .success():
|
2019-05-13 01:32:32 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
self.refreshArticles(account) {
|
|
|
|
self.refreshArticleStatus(for: account) {
|
|
|
|
self.refreshMissingArticles(account) {
|
|
|
|
self.refreshProgress.clear()
|
2019-05-17 21:56:27 +02:00
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
completion(.success(()))
|
2019-05-17 21:56:27 +02:00
|
|
|
}
|
|
|
|
}
|
2019-05-14 00:04:12 +02:00
|
|
|
}
|
2019-05-07 17:51:41 +02:00
|
|
|
}
|
2019-05-13 01:32:32 +02:00
|
|
|
|
2019-05-05 22:41:20 +02:00
|
|
|
case .failure(let error):
|
2019-05-07 17:51:41 +02:00
|
|
|
DispatchQueue.main.async {
|
2019-05-22 22:40:34 +02:00
|
|
|
self.refreshProgress.clear()
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-07 17:51:41 +02:00
|
|
|
}
|
2019-05-06 17:53:20 +02:00
|
|
|
}
|
2019-05-13 01:32:32 +02:00
|
|
|
|
2019-05-06 17:53:20 +02:00
|
|
|
}
|
2019-05-13 01:32:32 +02:00
|
|
|
|
2019-05-06 17:53:20 +02:00
|
|
|
}
|
|
|
|
|
2019-05-15 18:52:56 +02:00
|
|
|
func sendArticleStatus(for account: Account, completion: @escaping (() -> Void)) {
|
2019-05-14 00:04:12 +02:00
|
|
|
|
2019-05-15 18:52:56 +02:00
|
|
|
os_log(.debug, log: log, "Sending article statuses...")
|
|
|
|
|
|
|
|
let syncStatuses = database.selectForProcessing()
|
|
|
|
let createUnreadStatuses = syncStatuses.filter { $0.key == ArticleStatus.Key.read && $0.flag == false }
|
|
|
|
let deleteUnreadStatuses = syncStatuses.filter { $0.key == ArticleStatus.Key.read && $0.flag == true }
|
|
|
|
let createStarredStatuses = syncStatuses.filter { $0.key == ArticleStatus.Key.starred && $0.flag == true }
|
|
|
|
let deleteStarredStatuses = syncStatuses.filter { $0.key == ArticleStatus.Key.starred && $0.flag == false }
|
|
|
|
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
|
|
|
group.enter()
|
|
|
|
sendArticleStatuses(createUnreadStatuses, apiCall: caller.createUnreadEntries) {
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
|
|
|
|
group.enter()
|
|
|
|
sendArticleStatuses(deleteUnreadStatuses, apiCall: caller.deleteUnreadEntries) {
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
|
|
|
|
group.enter()
|
|
|
|
sendArticleStatuses(createStarredStatuses, apiCall: caller.createStarredEntries) {
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
|
|
|
|
group.enter()
|
|
|
|
sendArticleStatuses(deleteStarredStatuses, apiCall: caller.deleteStarredEntries) {
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
group.notify(queue: DispatchQueue.main) {
|
2019-05-15 18:52:56 +02:00
|
|
|
os_log(.debug, log: self.log, "Done sending article statuses.")
|
|
|
|
completion()
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func refreshArticleStatus(for account: Account, completion: @escaping (() -> Void)) {
|
|
|
|
|
2019-05-14 00:04:12 +02:00
|
|
|
os_log(.debug, log: log, "Refreshing article statuses...")
|
|
|
|
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
|
|
|
group.enter()
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.retrieveUnreadEntries() { result in
|
2019-05-14 00:04:12 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let articleIDs):
|
2019-05-22 22:40:34 +02:00
|
|
|
self.syncArticleReadState(account: account, articleIDs: articleIDs)
|
2019-05-14 13:20:53 +02:00
|
|
|
group.leave()
|
2019-05-14 00:04:12 +02:00
|
|
|
case .failure(let error):
|
|
|
|
os_log(.info, log: self.log, "Retrieving unread entries failed: %@.", error.localizedDescription)
|
2019-05-22 02:15:24 +02:00
|
|
|
group.leave()
|
2019-05-14 00:04:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
group.enter()
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.retrieveStarredEntries() { result in
|
2019-05-14 00:04:12 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let articleIDs):
|
2019-05-22 22:40:34 +02:00
|
|
|
self.syncArticleStarredState(account: account, articleIDs: articleIDs)
|
2019-05-14 13:20:53 +02:00
|
|
|
group.leave()
|
2019-05-14 00:04:12 +02:00
|
|
|
case .failure(let error):
|
|
|
|
os_log(.info, log: self.log, "Retrieving starred entries failed: %@.", error.localizedDescription)
|
2019-05-22 02:15:24 +02:00
|
|
|
group.leave()
|
2019-05-14 00:04:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
group.notify(queue: DispatchQueue.main) {
|
2019-05-14 00:04:12 +02:00
|
|
|
os_log(.debug, log: self.log, "Done refreshing article statuses.")
|
2019-05-15 18:52:56 +02:00
|
|
|
completion()
|
2019-05-14 00:04:12 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-11 19:26:23 +02:00
|
|
|
func importOPML(for account:Account, opmlFile: URL, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-05-17 17:44:22 +02:00
|
|
|
|
2019-05-11 23:07:27 +02:00
|
|
|
var fileData: Data?
|
|
|
|
|
|
|
|
do {
|
|
|
|
fileData = try Data(contentsOf: opmlFile)
|
|
|
|
} catch {
|
|
|
|
completion(.failure(error))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
guard let opmlData = fileData else {
|
|
|
|
completion(.success(()))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-05-17 17:04:13 +02:00
|
|
|
os_log(.debug, log: log, "Begin importing OPML...")
|
2019-05-17 17:44:22 +02:00
|
|
|
opmlImportInProgress = true
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.importOPML(opmlData: opmlData) { result in
|
2019-05-17 17:04:13 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let importResult):
|
|
|
|
if importResult.complete {
|
|
|
|
os_log(.debug, log: self.log, "Import OPML done.")
|
2019-05-17 17:44:22 +02:00
|
|
|
self.opmlImportInProgress = false
|
2019-05-17 17:04:13 +02:00
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
} else {
|
2019-05-22 22:40:34 +02:00
|
|
|
self.checkImportResult(opmlImportResultID: importResult.importResultID, completion: completion)
|
2019-05-17 17:04:13 +02:00
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.debug, log: self.log, "Import OPML failed.")
|
2019-05-17 17:44:22 +02:00
|
|
|
self.opmlImportInProgress = false
|
2019-05-17 17:04:13 +02:00
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-17 17:04:13 +02:00
|
|
|
}
|
|
|
|
}
|
2019-05-11 23:07:27 +02:00
|
|
|
}
|
|
|
|
|
2019-05-17 17:04:13 +02:00
|
|
|
}
|
|
|
|
|
2019-05-07 00:34:41 +02:00
|
|
|
func renameFolder(for account: Account, with folder: Folder, to name: String, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-05-06 17:53:20 +02:00
|
|
|
|
|
|
|
caller.renameTag(oldName: folder.name ?? "", newName: name) { result in
|
|
|
|
switch result {
|
|
|
|
case .success:
|
2019-05-07 17:51:41 +02:00
|
|
|
DispatchQueue.main.async {
|
|
|
|
folder.name = name
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
2019-05-06 17:53:20 +02:00
|
|
|
case .failure(let error):
|
2019-05-07 17:51:41 +02:00
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-07 17:51:41 +02:00
|
|
|
}
|
2019-05-05 22:41:20 +02:00
|
|
|
}
|
|
|
|
}
|
2019-05-06 17:53:20 +02:00
|
|
|
|
2019-05-03 01:17:52 +02:00
|
|
|
}
|
2019-05-07 00:34:41 +02:00
|
|
|
|
|
|
|
func deleteFolder(for account: Account, with folder: Folder, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
|
|
|
// Feedbin uses tags and if at least one feed isn't tagged, then the folder doesn't exist on their system
|
|
|
|
guard folder.hasAtLeastOneFeed() else {
|
|
|
|
account.deleteFolder(folder)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-05-08 00:41:32 +02:00
|
|
|
// After we successfully delete at Feedbin, we add all the feeds to the account to save them. We then
|
|
|
|
// delete the folder. We then sync the taggings we received on the delete to remove any feeds from
|
|
|
|
// the account that might be in another folder.
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.deleteTag(name: folder.name ?? "") { result in
|
2019-05-07 00:34:41 +02:00
|
|
|
switch result {
|
2019-05-08 00:41:32 +02:00
|
|
|
case .success(let taggings):
|
|
|
|
DispatchQueue.main.sync {
|
|
|
|
BatchUpdate.shared.perform {
|
|
|
|
for feed in folder.topLevelFeeds {
|
2019-05-09 20:31:18 +02:00
|
|
|
account.addFeed(feed)
|
2019-05-22 22:40:34 +02:00
|
|
|
self.clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
|
|
|
account.deleteFolder(folder)
|
|
|
|
}
|
2019-05-07 17:51:41 +02:00
|
|
|
completion(.success(()))
|
|
|
|
}
|
2019-05-22 22:40:34 +02:00
|
|
|
self.syncTaggings(account, taggings)
|
2019-05-07 00:34:41 +02:00
|
|
|
case .failure(let error):
|
2019-05-07 17:51:41 +02:00
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-07 17:51:41 +02:00
|
|
|
}
|
2019-05-07 00:34:41 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2019-05-03 01:17:52 +02:00
|
|
|
|
2019-05-28 16:45:02 +02:00
|
|
|
func createFeed(for account: Account, url: String, name: String?, container: Container, completion: @escaping (Result<Feed, Error>) -> Void) {
|
2019-05-09 00:41:19 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.createSubscription(url: url) { result in
|
2019-05-09 00:41:19 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let subResult):
|
|
|
|
switch subResult {
|
2019-05-10 17:14:24 +02:00
|
|
|
case .created(let subscription):
|
2019-05-28 16:45:02 +02:00
|
|
|
self.createFeed(account: account, subscription: subscription, name: name, container: container, completion: completion)
|
2019-05-10 17:14:24 +02:00
|
|
|
case .multipleChoice(let choices):
|
2019-05-28 16:45:02 +02:00
|
|
|
self.decideBestFeedChoice(account: account, url: url, name: name, container: container, choices: choices, completion: completion)
|
2019-05-09 00:41:19 +02:00
|
|
|
case .alreadySubscribed:
|
|
|
|
DispatchQueue.main.async {
|
2019-05-10 17:14:24 +02:00
|
|
|
completion(.failure(AccountError.createErrorAlreadySubscribed))
|
2019-05-09 00:41:19 +02:00
|
|
|
}
|
|
|
|
case .notFound:
|
|
|
|
DispatchQueue.main.async {
|
2019-05-10 17:14:24 +02:00
|
|
|
completion(.failure(AccountError.createErrorNotFound))
|
2019-05-09 00:41:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-09 00:41:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func renameFeed(for account: Account, with feed: Feed, to name: String, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
2019-05-09 01:13:54 +02:00
|
|
|
// This error should never happen
|
|
|
|
guard let subscriptionID = feed.subscriptionID else {
|
|
|
|
completion(.failure(FeedbinAccountDelegateError.invalidParameter))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-05-09 14:25:45 +02:00
|
|
|
caller.renameSubscription(subscriptionID: subscriptionID, newName: name) { result in
|
2019-05-09 00:41:19 +02:00
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
feed.editedName = name
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-09 00:41:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-09 20:31:18 +02:00
|
|
|
func deleteFeed(for account: Account, with feed: Feed, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-05-09 14:25:45 +02:00
|
|
|
|
|
|
|
// This error should never happen
|
|
|
|
guard let subscriptionID = feed.subscriptionID else {
|
|
|
|
completion(.failure(FeedbinAccountDelegateError.invalidParameter))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
caller.deleteSubscription(subscriptionID: subscriptionID) { result in
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
DispatchQueue.main.async {
|
2019-05-09 20:31:18 +02:00
|
|
|
account.removeFeed(feed)
|
|
|
|
if let folders = account.folders {
|
|
|
|
for folder in folders {
|
|
|
|
folder.removeFeed(feed)
|
|
|
|
}
|
2019-05-09 14:25:45 +02:00
|
|
|
}
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-09 14:25:45 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-09 20:31:18 +02:00
|
|
|
func addFeed(for account: Account, to container: Container, with feed: Feed, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
|
|
|
if let folder = container as? Folder, let feedID = Int(feed.feedID) {
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.createTagging(feedID: feedID, name: folder.name ?? "") { result in
|
2019-05-09 20:31:18 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let taggingID):
|
|
|
|
DispatchQueue.main.async {
|
2019-05-22 22:40:34 +02:00
|
|
|
self.saveFolderRelationship(for: feed, withFolderName: folder.name ?? "", id: String(taggingID))
|
2019-05-28 20:38:40 +02:00
|
|
|
account.removeFeed(feed)
|
2019-05-09 20:31:18 +02:00
|
|
|
folder.addFeed(feed)
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-09 20:31:18 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if let account = container as? Account {
|
2019-05-28 20:38:40 +02:00
|
|
|
account.addFeedIfNotInAnyFolder(feed)
|
2019-05-09 20:31:18 +02:00
|
|
|
}
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func removeFeed(for account: Account, from container: Container, with feed: Feed, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
|
|
|
if let folder = container as? Folder, let feedTaggingID = feed.folderRelationship?[folder.name ?? ""] {
|
|
|
|
caller.deleteTagging(taggingID: feedTaggingID) { result in
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
folder.removeFeed(feed)
|
2019-05-28 20:38:40 +02:00
|
|
|
account.addFeedIfNotInAnyFolder(feed)
|
2019-05-09 20:31:18 +02:00
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
2019-05-26 18:54:32 +02:00
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
2019-05-09 20:31:18 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if let account = container as? Account {
|
|
|
|
account.removeFeed(feed)
|
|
|
|
}
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2019-05-09 23:09:21 +02:00
|
|
|
|
2019-05-28 16:45:02 +02:00
|
|
|
func restoreFeed(for account: Account, feed: Feed, container: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-05-09 23:09:21 +02:00
|
|
|
|
2019-05-28 16:45:02 +02:00
|
|
|
createFeed(for: account, url: feed.url, name: feed.editedName, container: container) { result in
|
2019-05-09 23:09:21 +02:00
|
|
|
switch result {
|
2019-05-28 16:45:02 +02:00
|
|
|
case .success:
|
|
|
|
completion(.success(()))
|
2019-05-09 23:09:21 +02:00
|
|
|
case .failure(let error):
|
2019-05-28 16:45:02 +02:00
|
|
|
completion(.failure(error))
|
2019-05-09 23:09:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-10 01:20:09 +02:00
|
|
|
func restoreFolder(for account: Account, folder: Folder, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-05-09 23:09:21 +02:00
|
|
|
|
2019-05-10 01:20:09 +02:00
|
|
|
account.addFolder(folder)
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
|
|
|
for feed in folder.topLevelFeeds {
|
2019-05-09 23:09:21 +02:00
|
|
|
|
2019-05-10 01:20:09 +02:00
|
|
|
group.enter()
|
|
|
|
addFeed(for: account, to: folder, with: feed) { result in
|
|
|
|
if account.topLevelFeeds.contains(feed) {
|
|
|
|
account.removeFeed(feed)
|
2019-05-09 23:09:21 +02:00
|
|
|
}
|
2019-05-10 01:20:09 +02:00
|
|
|
group.leave()
|
2019-05-09 23:09:21 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-10 01:20:09 +02:00
|
|
|
group.notify(queue: DispatchQueue.main) {
|
|
|
|
completion(.success(()))
|
2019-05-09 23:09:21 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-14 22:34:05 +02:00
|
|
|
func markArticles(for account: Account, articles: Set<Article>, statusKey: ArticleStatus.Key, flag: Bool) -> Set<Article>? {
|
2019-05-15 01:24:19 +02:00
|
|
|
|
|
|
|
let syncStatuses = articles.map { article in
|
|
|
|
return SyncStatus(articleID: article.articleID, key: statusKey, flag: flag)
|
|
|
|
}
|
|
|
|
database.insertStatuses(syncStatuses)
|
|
|
|
|
2019-05-27 19:03:12 +02:00
|
|
|
if database.selectPendingCount() > 100 {
|
|
|
|
sendArticleStatus(for: account) {}
|
|
|
|
}
|
|
|
|
|
2019-05-14 22:34:05 +02:00
|
|
|
return account.update(articles, statusKey: statusKey, flag: flag)
|
2019-05-15 01:24:19 +02:00
|
|
|
|
2019-05-14 22:34:05 +02:00
|
|
|
}
|
|
|
|
|
2019-05-03 01:17:52 +02:00
|
|
|
func accountDidInitialize(_ account: Account) {
|
2019-05-05 14:21:26 +02:00
|
|
|
credentials = try? account.retrieveBasicCredentials()
|
2019-05-06 00:46:53 +02:00
|
|
|
accountMetadata = account.metadata
|
2019-05-03 01:17:52 +02:00
|
|
|
}
|
|
|
|
|
2019-05-06 17:53:20 +02:00
|
|
|
static func validateCredentials(transport: Transport, credentials: Credentials, completion: @escaping (Result<Bool, Error>) -> Void) {
|
|
|
|
|
|
|
|
let caller = FeedbinAPICaller(transport: transport)
|
|
|
|
caller.credentials = credentials
|
|
|
|
caller.validateCredentials() { result in
|
2019-05-07 17:51:41 +02:00
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(result)
|
|
|
|
}
|
2019-05-06 17:53:20 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-03 01:17:52 +02:00
|
|
|
}
|
2019-05-05 22:41:20 +02:00
|
|
|
|
|
|
|
// MARK: Private
|
|
|
|
|
|
|
|
private extension FeedbinAccountDelegate {
|
|
|
|
|
2019-05-13 01:32:32 +02:00
|
|
|
func refreshAccount(_ account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-05-05 22:41:20 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.retrieveTags { result in
|
2019-05-05 22:41:20 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let tags):
|
2019-05-07 18:10:00 +02:00
|
|
|
BatchUpdate.shared.perform {
|
2019-05-22 22:40:34 +02:00
|
|
|
self.syncFolders(account, tags)
|
2019-05-07 18:10:00 +02:00
|
|
|
}
|
2019-05-22 22:40:34 +02:00
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
self.refreshFeeds(account, completion: completion)
|
2019-05-05 22:41:20 +02:00
|
|
|
case .failure(let error):
|
2019-05-06 12:28:02 +02:00
|
|
|
completion(.failure(error))
|
2019-05-05 22:41:20 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-17 17:44:22 +02:00
|
|
|
func checkImportResult(opmlImportResultID: Int, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
Timer.scheduledTimer(withTimeInterval: 15, repeats: true) { timer in
|
2019-05-17 17:44:22 +02:00
|
|
|
|
|
|
|
os_log(.debug, log: self.log, "Checking status of OPML import...")
|
|
|
|
|
|
|
|
self.caller.retrieveOPMLImportResult(importID: opmlImportResultID) { result in
|
|
|
|
switch result {
|
|
|
|
case .success(let importResult):
|
|
|
|
if let result = importResult, result.complete {
|
|
|
|
os_log(.debug, log: self.log, "Checking status of OPML import successfully completed.")
|
|
|
|
timer.invalidate()
|
|
|
|
self.opmlImportInProgress = false
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.debug, log: self.log, "Import OPML check failed.")
|
|
|
|
timer.invalidate()
|
|
|
|
self.opmlImportInProgress = false
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-06 12:28:02 +02:00
|
|
|
func syncFolders(_ account: Account, _ tags: [FeedbinTag]?) {
|
|
|
|
|
|
|
|
guard let tags = tags else { return }
|
2019-05-08 15:20:29 +02:00
|
|
|
|
|
|
|
os_log(.debug, log: log, "Syncing folders with %ld tags.", tags.count)
|
|
|
|
|
2019-05-05 22:41:20 +02:00
|
|
|
let tagNames = tags.map { $0.name }
|
|
|
|
|
|
|
|
// Delete any folders not at Feedbin
|
|
|
|
if let folders = account.folders {
|
|
|
|
folders.forEach { folder in
|
|
|
|
if !tagNames.contains(folder.name ?? "") {
|
2019-05-07 17:51:41 +02:00
|
|
|
DispatchQueue.main.sync {
|
2019-05-08 00:41:32 +02:00
|
|
|
for feed in folder.topLevelFeeds {
|
2019-05-09 20:31:18 +02:00
|
|
|
account.addFeed(feed)
|
2019-05-08 01:34:45 +02:00
|
|
|
clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
2019-05-07 17:51:41 +02:00
|
|
|
account.deleteFolder(folder)
|
|
|
|
}
|
2019-05-05 22:41:20 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let folderNames: [String] = {
|
|
|
|
if let folders = account.folders {
|
|
|
|
return folders.map { $0.name ?? "" }
|
|
|
|
} else {
|
|
|
|
return [String]()
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
// Make any folders Feedbin has, but we don't
|
|
|
|
tagNames.forEach { tagName in
|
|
|
|
if !folderNames.contains(tagName) {
|
2019-05-07 17:51:41 +02:00
|
|
|
DispatchQueue.main.sync {
|
|
|
|
_ = account.ensureFolder(with: tagName)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func refreshFeeds(_ account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-05-08 00:41:32 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.retrieveSubscriptions { result in
|
2019-05-07 17:51:41 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let subscriptions):
|
2019-05-08 00:41:32 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
self.caller.retrieveTaggings { result in
|
2019-05-08 00:41:32 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let taggings):
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
self.caller.retrieveIcons { result in
|
2019-05-08 00:41:32 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let icons):
|
|
|
|
|
|
|
|
BatchUpdate.shared.perform {
|
2019-05-22 22:40:34 +02:00
|
|
|
self.syncFeeds(account, subscriptions)
|
|
|
|
self.syncTaggings(account, taggings)
|
|
|
|
self.syncFavicons(account, icons)
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
self.refreshProgress.completeTask()
|
2019-05-08 00:41:32 +02:00
|
|
|
completion(.success(()))
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-07 17:51:41 +02:00
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
2019-05-08 00:41:32 +02:00
|
|
|
|
2019-05-07 17:51:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func syncFeeds(_ account: Account, _ subscriptions: [FeedbinSubscription]?) {
|
2019-05-08 00:41:32 +02:00
|
|
|
|
2019-05-07 17:51:41 +02:00
|
|
|
guard let subscriptions = subscriptions else { return }
|
2019-05-08 00:41:32 +02:00
|
|
|
|
2019-05-08 15:20:29 +02:00
|
|
|
os_log(.debug, log: log, "Syncing feeds with %ld subscriptions.", subscriptions.count)
|
|
|
|
|
2019-05-08 00:41:32 +02:00
|
|
|
let subFeedIds = subscriptions.map { String($0.feedID) }
|
|
|
|
|
|
|
|
// Remove any feeds that are no longer in the subscriptions
|
|
|
|
if let folders = account.folders {
|
|
|
|
for folder in folders {
|
|
|
|
for feed in folder.topLevelFeeds {
|
|
|
|
if !subFeedIds.contains(feed.feedID) {
|
|
|
|
DispatchQueue.main.sync {
|
2019-05-09 20:31:18 +02:00
|
|
|
folder.removeFeed(feed)
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-05-07 18:10:00 +02:00
|
|
|
}
|
2019-05-07 17:51:41 +02:00
|
|
|
}
|
2019-05-08 00:41:32 +02:00
|
|
|
|
|
|
|
for feed in account.topLevelFeeds {
|
|
|
|
if !subFeedIds.contains(feed.feedID) {
|
|
|
|
DispatchQueue.main.sync {
|
2019-05-09 20:31:18 +02:00
|
|
|
account.removeFeed(feed)
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Add any feeds we don't have and update any we do
|
|
|
|
subscriptions.forEach { subscription in
|
|
|
|
|
|
|
|
let subFeedId = String(subscription.feedID)
|
|
|
|
|
|
|
|
DispatchQueue.main.sync {
|
|
|
|
if let feed = account.idToFeedDictionary[subFeedId] {
|
|
|
|
feed.name = subscription.name
|
|
|
|
feed.homePageURL = subscription.homePageURL
|
|
|
|
} else {
|
2019-05-09 00:41:19 +02:00
|
|
|
let feed = account.createFeed(with: subscription.name, url: subscription.url, feedID: subFeedId, homePageURL: subscription.homePageURL)
|
2019-05-09 01:13:54 +02:00
|
|
|
feed.subscriptionID = String(subscription.subscriptionID)
|
2019-05-09 20:31:18 +02:00
|
|
|
account.addFeed(feed)
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-07 17:51:41 +02:00
|
|
|
}
|
|
|
|
|
2019-05-08 00:41:32 +02:00
|
|
|
func syncTaggings(_ account: Account, _ taggings: [FeedbinTagging]?) {
|
2019-05-07 17:51:41 +02:00
|
|
|
|
2019-05-08 00:41:32 +02:00
|
|
|
guard let taggings = taggings else { return }
|
|
|
|
|
2019-05-08 15:20:29 +02:00
|
|
|
os_log(.debug, log: log, "Syncing taggings with %ld taggings.", taggings.count)
|
|
|
|
|
2019-05-08 00:41:32 +02:00
|
|
|
// Set up some structures to make syncing easier
|
|
|
|
let folderDict: [String: Folder] = {
|
|
|
|
if let folders = account.folders {
|
|
|
|
return Dictionary(uniqueKeysWithValues: folders.map { ($0.name ?? "", $0) } )
|
2019-05-07 17:51:41 +02:00
|
|
|
} else {
|
2019-05-08 00:41:32 +02:00
|
|
|
return [String: Folder]()
|
2019-05-05 22:41:20 +02:00
|
|
|
}
|
2019-05-08 00:41:32 +02:00
|
|
|
}()
|
|
|
|
|
2019-05-08 01:34:45 +02:00
|
|
|
let taggingsDict = taggings.reduce([String: [FeedbinTagging]]()) { (dict, tagging) in
|
2019-05-08 00:41:32 +02:00
|
|
|
var taggedFeeds = dict
|
|
|
|
if var taggedFeed = taggedFeeds[tagging.name] {
|
2019-05-08 01:34:45 +02:00
|
|
|
taggedFeed.append(tagging)
|
2019-05-08 00:41:32 +02:00
|
|
|
taggedFeeds[tagging.name] = taggedFeed
|
|
|
|
} else {
|
2019-05-08 01:34:45 +02:00
|
|
|
taggedFeeds[tagging.name] = [tagging]
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
|
|
|
return taggedFeeds
|
2019-05-05 22:41:20 +02:00
|
|
|
}
|
2019-05-07 17:51:41 +02:00
|
|
|
|
2019-05-08 00:41:32 +02:00
|
|
|
// Sync the folders
|
2019-05-08 01:34:45 +02:00
|
|
|
for (folderName, groupedTaggings) in taggingsDict {
|
2019-05-08 00:41:32 +02:00
|
|
|
|
|
|
|
guard let folder = folderDict[folderName] else { return }
|
2019-05-08 01:34:45 +02:00
|
|
|
|
|
|
|
let taggingFeedIDs = groupedTaggings.map { String($0.feedID) }
|
|
|
|
|
2019-05-08 00:41:32 +02:00
|
|
|
// Move any feeds not in the folder to the account
|
|
|
|
for feed in folder.topLevelFeeds {
|
2019-05-08 01:34:45 +02:00
|
|
|
if !taggingFeedIDs.contains(feed.feedID) {
|
2019-05-08 00:41:32 +02:00
|
|
|
DispatchQueue.main.sync {
|
2019-05-09 20:31:18 +02:00
|
|
|
folder.removeFeed(feed)
|
2019-05-08 01:34:45 +02:00
|
|
|
clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
2019-05-09 20:31:18 +02:00
|
|
|
account.addFeed(feed)
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Add any feeds not in the folder
|
|
|
|
let folderFeedIds = folder.topLevelFeeds.map { $0.feedID }
|
|
|
|
|
2019-05-08 01:34:45 +02:00
|
|
|
for tagging in groupedTaggings {
|
|
|
|
let taggingFeedID = String(tagging.feedID)
|
|
|
|
if !folderFeedIds.contains(taggingFeedID) {
|
|
|
|
guard let feed = account.idToFeedDictionary[taggingFeedID] else {
|
2019-05-08 00:41:32 +02:00
|
|
|
continue
|
|
|
|
}
|
2019-05-09 20:31:18 +02:00
|
|
|
DispatchQueue.main.sync {
|
2019-05-09 20:35:40 +02:00
|
|
|
saveFolderRelationship(for: feed, withFolderName: folderName, id: String(tagging.taggingID))
|
2019-05-09 20:31:18 +02:00
|
|
|
folder.addFeed(feed)
|
|
|
|
}
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2019-05-05 22:41:20 +02:00
|
|
|
|
2019-05-08 01:34:45 +02:00
|
|
|
let taggedFeedIDs = Set(taggings.map { String($0.feedID) })
|
2019-05-08 00:41:32 +02:00
|
|
|
|
2019-05-08 15:20:29 +02:00
|
|
|
// Remove all feeds from the account container that have a tag
|
2019-05-08 00:41:32 +02:00
|
|
|
DispatchQueue.main.sync {
|
2019-05-09 20:31:18 +02:00
|
|
|
for feed in account.topLevelFeeds {
|
|
|
|
if taggedFeedIDs.contains(feed.feedID) {
|
|
|
|
account.removeFeed(feed)
|
|
|
|
}
|
|
|
|
}
|
2019-05-08 00:41:32 +02:00
|
|
|
}
|
2019-05-09 20:31:18 +02:00
|
|
|
|
2019-05-07 17:51:41 +02:00
|
|
|
}
|
|
|
|
|
2019-05-08 00:41:32 +02:00
|
|
|
func syncFavicons(_ account: Account, _ icons: [FeedbinIcon]?) {
|
2019-05-07 17:51:41 +02:00
|
|
|
|
|
|
|
guard let icons = icons else { return }
|
|
|
|
|
2019-05-08 15:20:29 +02:00
|
|
|
os_log(.debug, log: log, "Syncing favicons with %ld icons.", icons.count)
|
|
|
|
|
2019-05-07 17:51:41 +02:00
|
|
|
let iconDict = Dictionary(uniqueKeysWithValues: icons.map { ($0.host, $0.url) } )
|
|
|
|
|
|
|
|
for feed in account.flattenedFeeds() {
|
|
|
|
for (key, value) in iconDict {
|
|
|
|
if feed.homePageURL?.contains(key) ?? false {
|
|
|
|
DispatchQueue.main.sync {
|
|
|
|
feed.faviconURL = value
|
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-05 22:41:20 +02:00
|
|
|
}
|
|
|
|
|
2019-05-15 18:52:56 +02:00
|
|
|
|
|
|
|
func sendArticleStatuses(_ statuses: [SyncStatus],
|
|
|
|
apiCall: ([Int], @escaping (Result<Void, Error>) -> Void) -> Void,
|
|
|
|
completion: @escaping (() -> Void)) {
|
|
|
|
|
|
|
|
guard !statuses.isEmpty else {
|
|
|
|
completion()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-05-17 21:56:27 +02:00
|
|
|
let group = DispatchGroup()
|
|
|
|
|
2019-05-15 18:52:56 +02:00
|
|
|
let articleIDs = statuses.compactMap { Int($0.articleID) }
|
|
|
|
let articleIDGroups = articleIDs.chunked(into: 1000)
|
|
|
|
for articleIDGroup in articleIDGroups {
|
|
|
|
|
2019-05-17 21:56:27 +02:00
|
|
|
group.enter()
|
2019-05-22 22:40:34 +02:00
|
|
|
apiCall(articleIDGroup) { result in
|
2019-05-15 18:52:56 +02:00
|
|
|
switch result {
|
|
|
|
case .success:
|
2019-05-22 22:40:34 +02:00
|
|
|
self.database.deleteSelectedForProcessing(articleIDGroup.map { String($0) } )
|
2019-05-17 21:56:27 +02:00
|
|
|
group.leave()
|
2019-05-15 18:52:56 +02:00
|
|
|
case .failure(let error):
|
|
|
|
os_log(.error, log: self.log, "Article status sync call failed: %@.", error.localizedDescription)
|
|
|
|
self.database.resetSelectedForProcessing(articleIDGroup.map { String($0) } )
|
2019-05-17 21:56:27 +02:00
|
|
|
group.leave()
|
2019-05-15 18:52:56 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2019-05-17 21:56:27 +02:00
|
|
|
|
|
|
|
group.notify(queue: DispatchQueue.main) {
|
|
|
|
completion()
|
|
|
|
}
|
|
|
|
|
2019-05-15 18:52:56 +02:00
|
|
|
}
|
2019-05-13 01:32:32 +02:00
|
|
|
|
2019-05-08 01:34:45 +02:00
|
|
|
func clearFolderRelationship(for feed: Feed, withFolderName folderName: String) {
|
2019-05-09 20:35:40 +02:00
|
|
|
if var folderRelationship = feed.folderRelationship {
|
|
|
|
folderRelationship[folderName] = nil
|
|
|
|
feed.folderRelationship = folderRelationship
|
2019-05-08 01:34:45 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func saveFolderRelationship(for feed: Feed, withFolderName folderName: String, id: String) {
|
2019-05-09 20:35:40 +02:00
|
|
|
if var folderRelationship = feed.folderRelationship {
|
|
|
|
folderRelationship[folderName] = id
|
|
|
|
feed.folderRelationship = folderRelationship
|
|
|
|
} else {
|
|
|
|
feed.folderRelationship = [folderName: id]
|
2019-05-08 01:34:45 +02:00
|
|
|
}
|
|
|
|
}
|
2019-05-10 17:14:24 +02:00
|
|
|
|
2019-05-28 16:45:02 +02:00
|
|
|
func decideBestFeedChoice(account: Account, url: String, name: String?, container: Container, choices: [FeedbinSubscriptionChoice], completion: @escaping (Result<Feed, Error>) -> Void) {
|
2019-05-10 17:14:24 +02:00
|
|
|
|
|
|
|
let feedSpecifiers: [FeedSpecifier] = choices.map { choice in
|
|
|
|
let source = url == choice.url ? FeedSpecifier.Source.UserEntered : FeedSpecifier.Source.HTMLLink
|
|
|
|
let specifier = FeedSpecifier(title: choice.name, urlString: choice.url, source: source)
|
|
|
|
return specifier
|
|
|
|
}
|
|
|
|
|
|
|
|
if let bestSpecifier = FeedSpecifier.bestFeed(in: Set(feedSpecifiers)) {
|
|
|
|
if let bestSubscription = choices.filter({ bestSpecifier.urlString == $0.url }).first {
|
2019-05-28 16:45:02 +02:00
|
|
|
createFeed(for: account, url: bestSubscription.url, name: name, container: container, completion: completion)
|
2019-05-10 17:14:24 +02:00
|
|
|
} else {
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.failure(FeedbinAccountDelegateError.invalidParameter))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.failure(FeedbinAccountDelegateError.invalidParameter))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-28 16:45:02 +02:00
|
|
|
func createFeed( account: Account, subscription sub: FeedbinSubscription, name: String?, container: Container, completion: @escaping (Result<Feed, Error>) -> Void) {
|
2019-05-13 18:18:54 +02:00
|
|
|
|
2019-05-28 16:45:02 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
DispatchQueue.main.async {
|
2019-05-13 18:18:54 +02:00
|
|
|
|
2019-05-10 17:14:24 +02:00
|
|
|
let feed = account.createFeed(with: sub.name, url: sub.url, feedID: String(sub.feedID), homePageURL: sub.homePageURL)
|
|
|
|
feed.subscriptionID = String(sub.subscriptionID)
|
2019-05-13 18:18:54 +02:00
|
|
|
|
2019-05-28 16:45:02 +02:00
|
|
|
container.addFeed(feed) { result in
|
2019-05-13 18:18:54 +02:00
|
|
|
switch result {
|
2019-05-28 16:45:02 +02:00
|
|
|
case .success:
|
|
|
|
if let name = name {
|
|
|
|
account.renameFeed(feed, to: name) { result in
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
self.initialFeedDownload(account: account, feed: feed, completion: completion)
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
2019-05-13 18:18:54 +02:00
|
|
|
}
|
|
|
|
}
|
2019-05-28 16:45:02 +02:00
|
|
|
} else {
|
|
|
|
self.initialFeedDownload(account: account, feed: feed, completion: completion)
|
2019-05-13 18:18:54 +02:00
|
|
|
}
|
|
|
|
case .failure(let error):
|
2019-05-28 16:45:02 +02:00
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func initialFeedDownload( account: Account, feed: Feed, completion: @escaping (Result<Feed, Error>) -> Void) {
|
|
|
|
|
|
|
|
// Download the initial articles
|
|
|
|
self.caller.retrieveEntries(feedID: feed.feedID) { result in
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success(let (entries, page)):
|
|
|
|
|
|
|
|
self.processEntries(account: account, entries: entries) {
|
|
|
|
self.refreshArticles(account, page: page) {
|
|
|
|
self.refreshArticleStatus(for: account) {
|
|
|
|
self.refreshMissingArticles(account) {
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.success(feed))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-05-13 18:18:54 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-28 16:45:02 +02:00
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
2019-05-13 18:18:54 +02:00
|
|
|
}
|
2019-05-28 16:45:02 +02:00
|
|
|
|
2019-05-10 17:14:24 +02:00
|
|
|
}
|
2019-05-28 16:45:02 +02:00
|
|
|
|
2019-05-13 18:18:54 +02:00
|
|
|
|
2019-05-10 17:14:24 +02:00
|
|
|
}
|
2019-05-28 16:45:02 +02:00
|
|
|
|
2019-05-13 13:34:27 +02:00
|
|
|
func refreshArticles(_ account: Account, completion: @escaping (() -> Void)) {
|
2019-05-13 01:32:32 +02:00
|
|
|
|
|
|
|
os_log(.debug, log: log, "Refreshing articles...")
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.retrieveEntries() { result in
|
2019-05-13 13:34:27 +02:00
|
|
|
|
2019-05-13 17:29:34 +02:00
|
|
|
switch result {
|
2019-05-14 20:10:07 +02:00
|
|
|
case .success(let (entries, page, lastPageNumber)):
|
|
|
|
|
|
|
|
if let last = lastPageNumber {
|
2019-05-22 22:40:34 +02:00
|
|
|
self.refreshProgress.addToNumberOfTasksAndRemaining(last - 1)
|
2019-05-14 20:10:07 +02:00
|
|
|
}
|
2019-05-13 01:32:32 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
self.processEntries(account: account, entries: entries) {
|
2019-05-14 20:10:07 +02:00
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
self.refreshArticles(account, page: page) {
|
2019-05-13 17:29:34 +02:00
|
|
|
os_log(.debug, log: self.log, "Done refreshing articles.")
|
|
|
|
completion()
|
2019-05-13 13:34:27 +02:00
|
|
|
}
|
2019-05-14 20:10:07 +02:00
|
|
|
|
2019-05-13 01:32:32 +02:00
|
|
|
}
|
2019-05-13 17:29:34 +02:00
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.error, log: self.log, "Refresh articles failed: %@.", error.localizedDescription)
|
2019-05-13 18:18:54 +02:00
|
|
|
completion()
|
2019-05-13 01:32:32 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-17 21:56:27 +02:00
|
|
|
func refreshMissingArticles(_ account: Account, completion: @escaping (() -> Void)) {
|
|
|
|
|
|
|
|
os_log(.debug, log: log, "Refreshing missing articles...")
|
|
|
|
let articleIDs = Array(account.fetchArticleIDsForStatusesWithoutArticles())
|
|
|
|
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
|
|
|
let chunkedArticleIDs = articleIDs.chunked(into: 100)
|
|
|
|
|
|
|
|
for chunk in chunkedArticleIDs {
|
|
|
|
|
|
|
|
group.enter()
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.retrieveEntries(articleIDs: chunk) { result in
|
2019-05-17 21:56:27 +02:00
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success(let entries):
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
self.processEntries(account: account, entries: entries) {
|
2019-05-17 21:56:27 +02:00
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.error, log: self.log, "Refresh missing articles failed: %@.", error.localizedDescription)
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
group.notify(queue: DispatchQueue.main) {
|
2019-05-21 15:34:05 +02:00
|
|
|
self.refreshProgress.completeTask()
|
2019-05-17 21:56:27 +02:00
|
|
|
os_log(.debug, log: self.log, "Done refreshing missing articles.")
|
|
|
|
completion()
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-13 13:34:27 +02:00
|
|
|
func refreshArticles(_ account: Account, page: String?, completion: @escaping (() -> Void)) {
|
2019-05-13 01:32:32 +02:00
|
|
|
|
|
|
|
guard let page = page else {
|
2019-05-13 13:34:27 +02:00
|
|
|
completion()
|
2019-05-13 01:32:32 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
caller.retrieveEntries(page: page) { result in
|
2019-05-13 01:32:32 +02:00
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success(let (entries, nextPage)):
|
|
|
|
|
2019-05-22 22:40:34 +02:00
|
|
|
self.processEntries(account: account, entries: entries) {
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
self.refreshArticles(account, page: nextPage, completion: completion)
|
2019-05-13 13:34:27 +02:00
|
|
|
}
|
2019-05-13 01:32:32 +02:00
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.error, log: self.log, "Refresh articles for additional pages failed: %@.", error.localizedDescription)
|
2019-05-22 02:15:24 +02:00
|
|
|
completion()
|
2019-05-13 01:32:32 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-05-13 13:34:27 +02:00
|
|
|
func processEntries(account: Account, entries: [FeedbinEntry]?, completion: @escaping (() -> Void)) {
|
2019-05-13 01:32:32 +02:00
|
|
|
|
|
|
|
let parsedItems = mapEntriesToParsedItems(entries: entries)
|
|
|
|
let parsedMap = Dictionary(grouping: parsedItems, by: { item in item.feedURL } )
|
|
|
|
|
2019-05-13 17:29:34 +02:00
|
|
|
let group = DispatchGroup()
|
|
|
|
|
2019-05-13 01:32:32 +02:00
|
|
|
for (feedID, mapItems) in parsedMap {
|
2019-05-13 17:29:34 +02:00
|
|
|
|
|
|
|
group.enter()
|
|
|
|
|
2019-05-13 01:32:32 +02:00
|
|
|
if let feed = account.idToFeedDictionary[feedID] {
|
|
|
|
DispatchQueue.main.async {
|
2019-05-13 15:32:03 +02:00
|
|
|
account.update(feed, parsedItems: Set(mapItems), defaultRead: true) {
|
2019-05-13 17:29:34 +02:00
|
|
|
group.leave()
|
2019-05-13 01:32:32 +02:00
|
|
|
}
|
|
|
|
}
|
2019-05-13 18:18:54 +02:00
|
|
|
} else {
|
|
|
|
group.leave()
|
2019-05-13 01:32:32 +02:00
|
|
|
}
|
2019-05-13 17:29:34 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
group.notify(queue: DispatchQueue.main) {
|
|
|
|
completion()
|
2019-05-13 01:32:32 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func mapEntriesToParsedItems(entries: [FeedbinEntry]?) -> Set<ParsedItem> {
|
|
|
|
|
|
|
|
guard let entries = entries else {
|
|
|
|
return Set<ParsedItem>()
|
|
|
|
}
|
|
|
|
|
|
|
|
let parsedItems: [ParsedItem] = entries.map { entry in
|
2019-05-27 01:35:54 +02:00
|
|
|
let authors = Set([ParsedAuthor(name: entry.authorName, url: entry.jsonFeed?.jsonFeedAuthor?.url, avatarURL: entry.jsonFeed?.jsonFeedAuthor?.avatarURL, emailAddress: nil)])
|
2019-05-13 01:32:32 +02:00
|
|
|
return ParsedItem(syncServiceID: String(entry.articleID), uniqueID: String(entry.articleID), feedURL: String(entry.feedID), url: nil, externalURL: entry.url, title: entry.title, contentHTML: entry.contentHTML, contentText: nil, summary: entry.summary, imageURL: nil, bannerImageURL: nil, datePublished: entry.parseDatePublished(), dateModified: nil, authors: authors, tags: nil, attachments: nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
return Set(parsedItems)
|
|
|
|
|
|
|
|
}
|
2019-05-08 01:34:45 +02:00
|
|
|
|
2019-05-14 13:20:53 +02:00
|
|
|
func syncArticleReadState(account: Account, articleIDs: [Int]?) {
|
2019-05-14 00:04:12 +02:00
|
|
|
|
2019-05-16 13:01:10 +02:00
|
|
|
guard let articleIDs = articleIDs else {
|
2019-05-14 00:04:12 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-05-14 13:20:53 +02:00
|
|
|
let feedbinUnreadArticleIDs = Set(articleIDs.map { String($0) } )
|
|
|
|
let currentUnreadArticleIDs = account.fetchUnreadArticleIDs()
|
2019-05-14 00:04:12 +02:00
|
|
|
|
2019-05-14 17:06:29 +02:00
|
|
|
// Mark articles as unread
|
2019-05-14 13:20:53 +02:00
|
|
|
let deltaUnreadArticleIDs = feedbinUnreadArticleIDs.subtracting(currentUnreadArticleIDs)
|
|
|
|
let markUnreadArticles = account.fetchArticles(forArticleIDs: deltaUnreadArticleIDs)
|
|
|
|
DispatchQueue.main.async {
|
2019-05-15 18:52:56 +02:00
|
|
|
_ = account.update(markUnreadArticles, statusKey: .read, flag: false)
|
2019-05-14 13:20:53 +02:00
|
|
|
}
|
|
|
|
|
2019-05-27 04:05:05 +02:00
|
|
|
// Save any unread statuses for articles we haven't yet received
|
|
|
|
let markUnreadArticleIDs = Set(markUnreadArticles.map { $0.articleID })
|
|
|
|
let missingUnreadArticleIDs = deltaUnreadArticleIDs.subtracting(markUnreadArticleIDs)
|
|
|
|
if !missingUnreadArticleIDs.isEmpty {
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
account.ensureStatuses(missingUnreadArticleIDs, .read, false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-14 17:06:29 +02:00
|
|
|
// Mark articles as read
|
2019-05-14 13:20:53 +02:00
|
|
|
let deltaReadArticleIDs = currentUnreadArticleIDs.subtracting(feedbinUnreadArticleIDs)
|
|
|
|
let markReadArticles = account.fetchArticles(forArticleIDs: deltaReadArticleIDs)
|
|
|
|
DispatchQueue.main.async {
|
2019-05-15 18:52:56 +02:00
|
|
|
_ = account.update(markReadArticles, statusKey: .read, flag: true)
|
2019-05-14 13:20:53 +02:00
|
|
|
}
|
2019-05-14 17:06:29 +02:00
|
|
|
|
2019-05-27 04:05:05 +02:00
|
|
|
// Save any read statuses for articles we haven't yet received
|
|
|
|
let markReadArticleIDs = Set(markReadArticles.map { $0.articleID })
|
|
|
|
let missingReadArticleIDs = deltaReadArticleIDs.subtracting(markReadArticleIDs)
|
|
|
|
if !missingReadArticleIDs.isEmpty {
|
2019-05-14 17:06:29 +02:00
|
|
|
DispatchQueue.main.async {
|
2019-05-27 04:05:05 +02:00
|
|
|
account.ensureStatuses(missingReadArticleIDs, .read, true)
|
2019-05-14 17:06:29 +02:00
|
|
|
}
|
|
|
|
}
|
2019-05-27 04:05:05 +02:00
|
|
|
|
2019-05-14 00:04:12 +02:00
|
|
|
}
|
|
|
|
|
2019-05-14 13:20:53 +02:00
|
|
|
func syncArticleStarredState(account: Account, articleIDs: [Int]?) {
|
2019-05-14 00:04:12 +02:00
|
|
|
|
2019-05-16 13:01:10 +02:00
|
|
|
guard let articleIDs = articleIDs else {
|
2019-05-14 00:04:12 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-05-14 13:20:53 +02:00
|
|
|
let feedbinStarredArticleIDs = Set(articleIDs.map { String($0) } )
|
|
|
|
let currentStarredArticleIDs = account.fetchStarredArticleIDs()
|
2019-05-14 00:04:12 +02:00
|
|
|
|
2019-05-14 17:06:29 +02:00
|
|
|
// Mark articles as starred
|
2019-05-14 13:20:53 +02:00
|
|
|
let deltaStarredArticleIDs = feedbinStarredArticleIDs.subtracting(currentStarredArticleIDs)
|
|
|
|
let markStarredArticles = account.fetchArticles(forArticleIDs: deltaStarredArticleIDs)
|
|
|
|
DispatchQueue.main.async {
|
2019-05-15 18:52:56 +02:00
|
|
|
_ = account.update(markStarredArticles, statusKey: .starred, flag: true)
|
2019-05-14 13:20:53 +02:00
|
|
|
}
|
|
|
|
|
2019-05-27 04:05:05 +02:00
|
|
|
// Save any starred statuses for articles we haven't yet received
|
|
|
|
let markStarredArticleIDs = Set(markStarredArticles.map { $0.articleID })
|
|
|
|
let missingStarredArticleIDs = deltaStarredArticleIDs.subtracting(markStarredArticleIDs)
|
|
|
|
if !missingStarredArticleIDs.isEmpty {
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
account.ensureStatuses(missingStarredArticleIDs, .starred, true)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-14 17:06:29 +02:00
|
|
|
// Mark articles as unstarred
|
2019-05-14 13:20:53 +02:00
|
|
|
let deltaUnstarredArticleIDs = currentStarredArticleIDs.subtracting(feedbinStarredArticleIDs)
|
|
|
|
let markUnstarredArticles = account.fetchArticles(forArticleIDs: deltaUnstarredArticleIDs)
|
|
|
|
DispatchQueue.main.async {
|
2019-05-15 18:52:56 +02:00
|
|
|
_ = account.update(markUnstarredArticles, statusKey: .starred, flag: false)
|
2019-05-14 13:20:53 +02:00
|
|
|
}
|
2019-05-14 00:04:12 +02:00
|
|
|
|
2019-05-27 04:05:05 +02:00
|
|
|
// Save any unstarred statuses for articles we haven't yet received
|
|
|
|
let markUnstarredArticleIDs = Set(markUnstarredArticles.map { $0.articleID })
|
|
|
|
let missingUnstarredArticleIDs = deltaUnstarredArticleIDs.subtracting(markUnstarredArticleIDs)
|
|
|
|
if !missingUnstarredArticleIDs.isEmpty {
|
2019-05-14 17:06:29 +02:00
|
|
|
DispatchQueue.main.async {
|
2019-05-27 04:05:05 +02:00
|
|
|
account.ensureStatuses(missingUnstarredArticleIDs, .starred, false)
|
2019-05-14 17:06:29 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-14 00:04:12 +02:00
|
|
|
}
|
|
|
|
|
2019-05-05 22:41:20 +02:00
|
|
|
}
|