2019-09-18 01:18:06 +02:00
|
|
|
//
|
|
|
|
// FeedlyAccountDelegate.swift
|
|
|
|
// Account
|
|
|
|
//
|
|
|
|
// Created by Kiel Gillard on 3/9/19.
|
|
|
|
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
|
|
|
//
|
|
|
|
|
|
|
|
import Articles
|
|
|
|
import RSCore
|
|
|
|
import RSParser
|
|
|
|
import RSWeb
|
|
|
|
import SyncDatabase
|
|
|
|
import os.log
|
|
|
|
|
|
|
|
final class FeedlyAccountDelegate: AccountDelegate {
|
2019-12-17 07:45:59 +01:00
|
|
|
|
2019-11-07 10:44:51 +01:00
|
|
|
/// Feedly has a sandbox API and a production API.
|
|
|
|
/// This property is referred to when clients need to know which environment it should be pointing to.
|
2019-11-07 23:51:59 +01:00
|
|
|
/// The value of this proptery must match any `OAuthAuthorizationClient` used.
|
2019-11-12 01:18:59 +01:00
|
|
|
/// Currently this is always returning the cloud API, but we are leaving it stubbed out for now.
|
2019-11-07 08:54:41 +01:00
|
|
|
static var environment: FeedlyAPICaller.API {
|
|
|
|
return .cloud
|
|
|
|
}
|
2019-09-20 18:34:31 +02:00
|
|
|
|
|
|
|
// TODO: Kiel, if you decide not to support OPML import you will have to disallow it in the behaviors
|
|
|
|
// See https://developer.feedly.com/v3/opml/
|
|
|
|
var behaviors: AccountBehaviors = [.disallowFeedInRootFolder]
|
2019-09-18 01:18:06 +02:00
|
|
|
|
|
|
|
let isOPMLImportSupported = false
|
|
|
|
|
|
|
|
var isOPMLImportInProgress = false
|
|
|
|
|
|
|
|
var server: String? {
|
|
|
|
return caller.server
|
|
|
|
}
|
|
|
|
|
2019-09-19 04:56:43 +02:00
|
|
|
var credentials: Credentials? {
|
|
|
|
didSet {
|
|
|
|
// https://developer.feedly.com/v3/developer/
|
|
|
|
if let devToken = ProcessInfo.processInfo.environment["FEEDLY_DEV_ACCESS_TOKEN"], !devToken.isEmpty {
|
2019-10-03 05:30:43 +02:00
|
|
|
caller.credentials = Credentials(type: .oauthAccessToken, username: "Developer", secret: devToken)
|
2019-09-19 04:56:43 +02:00
|
|
|
} else {
|
|
|
|
caller.credentials = credentials
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
|
2019-11-08 08:35:22 +01:00
|
|
|
let oauthAuthorizationClient: OAuthAuthorizationClient
|
2019-11-07 23:51:59 +01:00
|
|
|
|
2019-09-18 01:18:06 +02:00
|
|
|
var accountMetadata: AccountMetadata?
|
|
|
|
|
|
|
|
var refreshProgress = DownloadProgress(numberOfTasks: 0)
|
|
|
|
|
2019-11-07 08:54:41 +01:00
|
|
|
internal let caller: FeedlyAPICaller
|
|
|
|
|
2019-09-19 04:56:43 +02:00
|
|
|
private let log = OSLog(subsystem: Bundle.main.bundleIdentifier!, category: "Feedly")
|
2019-10-15 09:31:24 +02:00
|
|
|
private let database: SyncDatabase
|
2019-09-18 01:18:06 +02:00
|
|
|
|
2019-10-18 23:21:02 +02:00
|
|
|
private weak var currentSyncAllOperation: FeedlySyncAllOperation?
|
2019-11-07 10:44:51 +01:00
|
|
|
private let operationQueue: OperationQueue
|
2019-10-18 23:21:02 +02:00
|
|
|
|
2019-11-07 08:54:41 +01:00
|
|
|
init(dataFolder: String, transport: Transport?, api: FeedlyAPICaller.API) {
|
2019-11-07 10:44:51 +01:00
|
|
|
self.operationQueue = OperationQueue()
|
|
|
|
// Many operations have their own operation queues, such as the sync all operation.
|
|
|
|
// Making this a serial queue at this higher level of abstraction means we can ensure,
|
|
|
|
// for example, a `FeedlyRefreshAccessTokenOperation` occurs before a `FeedlySyncAllOperation`,
|
|
|
|
// improving our ability to debug, reason about and predict the behaviour of the code.
|
|
|
|
self.operationQueue.maxConcurrentOperationCount = 1
|
2019-09-18 01:18:06 +02:00
|
|
|
|
|
|
|
if let transport = transport {
|
2019-11-07 10:44:51 +01:00
|
|
|
self.caller = FeedlyAPICaller(transport: transport, api: api)
|
2019-09-18 01:18:06 +02:00
|
|
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
let sessionConfiguration = URLSessionConfiguration.default
|
|
|
|
sessionConfiguration.requestCachePolicy = .reloadIgnoringLocalCacheData
|
|
|
|
sessionConfiguration.timeoutIntervalForRequest = 60.0
|
|
|
|
sessionConfiguration.httpShouldSetCookies = false
|
|
|
|
sessionConfiguration.httpCookieAcceptPolicy = .never
|
|
|
|
sessionConfiguration.httpMaximumConnectionsPerHost = 1
|
|
|
|
sessionConfiguration.httpCookieStorage = nil
|
|
|
|
sessionConfiguration.urlCache = nil
|
|
|
|
|
|
|
|
if let userAgentHeaders = UserAgent.headers() {
|
|
|
|
sessionConfiguration.httpAdditionalHeaders = userAgentHeaders
|
|
|
|
}
|
|
|
|
|
|
|
|
let session = URLSession(configuration: sessionConfiguration)
|
2019-11-07 10:44:51 +01:00
|
|
|
self.caller = FeedlyAPICaller(transport: session, api: api)
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
2019-11-07 08:54:41 +01:00
|
|
|
|
2019-10-15 09:31:24 +02:00
|
|
|
let databaseFilePath = (dataFolder as NSString).appendingPathComponent("Sync.sqlite3")
|
|
|
|
self.database = SyncDatabase(databaseFilePath: databaseFilePath)
|
2019-11-08 08:35:22 +01:00
|
|
|
self.oauthAuthorizationClient = api.oauthAuthorizationClient
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// MARK: Account API
|
|
|
|
|
|
|
|
func refreshAll(for account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-10-18 23:21:02 +02:00
|
|
|
assert(Thread.isMainThread)
|
|
|
|
|
|
|
|
guard currentSyncAllOperation == nil else {
|
|
|
|
os_log(.debug, log: log, "Ignoring refreshAll: Feedly sync already in progress.")
|
|
|
|
completion(.success(()))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
guard let credentials = credentials else {
|
|
|
|
os_log(.debug, log: log, "Ignoring refreshAll: Feedly account has no credentials.")
|
|
|
|
completion(.failure(FeedlyAccountDelegateError.notLoggedIn))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-09-19 04:56:43 +02:00
|
|
|
let log = self.log
|
2019-12-10 02:34:26 +01:00
|
|
|
let operation = FeedlySyncAllOperation(account: account, credentials: credentials, caller: caller, database: database, lastSuccessfulFetchStartDate: accountMetadata?.lastArticleFetchStartTime, downloadProgress: refreshProgress, log: log)
|
2019-10-18 23:21:02 +02:00
|
|
|
|
2019-11-25 08:19:33 +01:00
|
|
|
operation.downloadProgress = refreshProgress
|
2019-10-18 23:21:02 +02:00
|
|
|
let date = Date()
|
|
|
|
operation.syncCompletionHandler = { [weak self] result in
|
2019-11-11 07:42:14 +01:00
|
|
|
if case .success = result {
|
2019-12-10 02:34:26 +01:00
|
|
|
self?.accountMetadata?.lastArticleFetchStartTime = date
|
|
|
|
self?.accountMetadata?.lastArticleFetchEndTime = Date()
|
2019-11-11 07:42:14 +01:00
|
|
|
}
|
2019-10-18 23:21:02 +02:00
|
|
|
|
|
|
|
os_log(.debug, log: log, "Sync took %{public}.3f seconds", -date.timeIntervalSinceNow)
|
2019-10-03 13:23:49 +02:00
|
|
|
completion(result)
|
2019-09-19 04:56:43 +02:00
|
|
|
}
|
2019-10-18 23:21:02 +02:00
|
|
|
|
|
|
|
currentSyncAllOperation = operation
|
|
|
|
|
2019-11-07 10:44:51 +01:00
|
|
|
operationQueue.addOperation(operation)
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
2019-11-05 03:24:21 +01:00
|
|
|
func sendArticleStatus(for account: Account, completion: @escaping ((Result<Void, Error>) -> Void)) {
|
2019-09-23 09:29:53 +02:00
|
|
|
// Ensure remote articles have the same status as they do locally.
|
2019-10-18 23:21:02 +02:00
|
|
|
let send = FeedlySendArticleStatusesOperation(database: database, service: caller, log: log)
|
2019-10-15 09:31:24 +02:00
|
|
|
send.completionBlock = {
|
|
|
|
DispatchQueue.main.async {
|
2019-11-05 03:24:21 +01:00
|
|
|
completion(.success(()))
|
2019-10-15 09:31:24 +02:00
|
|
|
}
|
|
|
|
}
|
2019-11-07 10:44:51 +01:00
|
|
|
operationQueue.addOperation(send)
|
2019-09-23 09:29:53 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Attempts to ensure local articles have the same status as they do remotely.
|
|
|
|
/// So if the user is using another client roughly simultaneously with this app,
|
|
|
|
/// this app does its part to ensure the articles have a consistent status between both.
|
|
|
|
///
|
|
|
|
/// - Parameter account: The account whose articles have a remote status.
|
|
|
|
/// - Parameter completion: Call on the main queue.
|
2019-11-05 03:24:21 +01:00
|
|
|
func refreshArticleStatus(for account: Account, completion: @escaping ((Result<Void, Error>) -> Void)) {
|
2019-10-18 23:21:02 +02:00
|
|
|
guard let credentials = credentials else {
|
2019-11-05 03:24:21 +01:00
|
|
|
return completion(.success(()))
|
2019-10-18 23:21:02 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
2020-01-09 06:24:47 +01:00
|
|
|
let ingestUnread = FeedlyIngestUnreadArticleIdsOperation(account: account, credentials: credentials, service: caller, newerThan: nil, log: log)
|
2019-10-18 23:21:02 +02:00
|
|
|
|
|
|
|
group.enter()
|
2020-01-09 06:24:47 +01:00
|
|
|
ingestUnread.completionBlock = {
|
2019-10-18 23:21:02 +02:00
|
|
|
group.leave()
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2020-01-09 06:24:47 +01:00
|
|
|
let ingestStarred = FeedlyIngestStarredArticleIdsOperation(account: account, credentials: credentials, service: caller, newerThan: nil, log: log)
|
2019-10-18 23:21:02 +02:00
|
|
|
|
|
|
|
group.enter()
|
2020-01-09 06:24:47 +01:00
|
|
|
ingestStarred.completionBlock = {
|
2019-10-18 23:21:02 +02:00
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
|
|
|
|
group.notify(queue: .main) {
|
2019-11-05 03:24:21 +01:00
|
|
|
completion(.success(()))
|
2019-09-23 09:29:53 +02:00
|
|
|
}
|
2019-10-18 23:21:02 +02:00
|
|
|
|
2020-01-09 06:24:47 +01:00
|
|
|
operationQueue.addOperations([ingestUnread, ingestStarred], waitUntilFinished: false)
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func importOPML(for account: Account, opmlFile: URL, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-09-23 09:29:53 +02:00
|
|
|
let data: Data
|
|
|
|
|
|
|
|
do {
|
|
|
|
data = try Data(contentsOf: opmlFile)
|
|
|
|
} catch {
|
|
|
|
completion(.failure(error))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
os_log(.debug, log: log, "Begin importing OPML...")
|
|
|
|
isOPMLImportInProgress = true
|
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(1)
|
|
|
|
|
|
|
|
caller.importOpml(data) { result in
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
os_log(.debug, log: self.log, "Import OPML done.")
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
self.isOPMLImportInProgress = false
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.debug, log: self.log, "Import OPML failed.")
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
self.isOPMLImportInProgress = false
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func addFolder(for account: Account, name: String, completion: @escaping (Result<Folder, Error>) -> Void) {
|
2019-10-09 10:06:59 +02:00
|
|
|
|
|
|
|
let progress = refreshProgress
|
|
|
|
progress.addToNumberOfTasksAndRemaining(1)
|
|
|
|
|
2019-09-27 10:13:27 +02:00
|
|
|
caller.createCollection(named: name) { result in
|
2019-10-09 10:06:59 +02:00
|
|
|
progress.completeTask()
|
|
|
|
|
2019-09-27 10:13:27 +02:00
|
|
|
switch result {
|
|
|
|
case .success(let collection):
|
|
|
|
if let folder = account.ensureFolder(with: collection.label) {
|
|
|
|
folder.externalID = collection.id
|
|
|
|
completion(.success(folder))
|
|
|
|
} else {
|
2019-10-09 09:38:12 +02:00
|
|
|
// Is the name empty? Or one of the global resource names?
|
|
|
|
completion(.failure(FeedlyAccountDelegateError.unableToAddFolder(name)))
|
2019-09-27 10:13:27 +02:00
|
|
|
}
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func renameFolder(for account: Account, with folder: Folder, to name: String, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-09-27 10:13:27 +02:00
|
|
|
guard let id = folder.externalID else {
|
2019-10-09 09:38:12 +02:00
|
|
|
return DispatchQueue.main.async {
|
|
|
|
completion(.failure(FeedlyAccountDelegateError.unableToRenameFolder(folder.nameForDisplay, name)))
|
|
|
|
}
|
2019-09-27 10:13:27 +02:00
|
|
|
}
|
2019-10-09 09:38:12 +02:00
|
|
|
|
|
|
|
let nameBefore = folder.name
|
|
|
|
|
2019-09-27 10:13:27 +02:00
|
|
|
caller.renameCollection(with: id, to: name) { result in
|
|
|
|
switch result {
|
|
|
|
case .success(let collection):
|
|
|
|
folder.name = collection.label
|
|
|
|
completion(.success(()))
|
|
|
|
case .failure(let error):
|
2019-10-09 09:38:12 +02:00
|
|
|
folder.name = nameBefore
|
2019-09-27 10:13:27 +02:00
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
2019-10-09 09:38:12 +02:00
|
|
|
|
|
|
|
folder.name = name
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func removeFolder(for account: Account, with folder: Folder, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-09-27 10:13:27 +02:00
|
|
|
guard let id = folder.externalID else {
|
2019-10-09 09:38:12 +02:00
|
|
|
return DispatchQueue.main.async {
|
|
|
|
completion(.failure(FeedlyAccountDelegateError.unableToRemoveFolder(folder.nameForDisplay)))
|
|
|
|
}
|
2019-09-27 10:13:27 +02:00
|
|
|
}
|
2019-10-09 09:38:12 +02:00
|
|
|
|
2019-10-09 10:06:59 +02:00
|
|
|
let progress = refreshProgress
|
|
|
|
progress.addToNumberOfTasksAndRemaining(1)
|
|
|
|
|
2019-09-27 10:13:27 +02:00
|
|
|
caller.deleteCollection(with: id) { result in
|
2019-10-09 10:06:59 +02:00
|
|
|
progress.completeTask()
|
|
|
|
|
2019-09-27 10:13:27 +02:00
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
account.removeFolder(folder)
|
|
|
|
completion(.success(()))
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
2019-10-10 12:24:45 +02:00
|
|
|
|
2019-11-30 00:13:52 +01:00
|
|
|
func createWebFeed(for account: Account, url: String, name: String?, container: Container, completion: @escaping (Result<WebFeed, Error>) -> Void) {
|
2019-10-10 12:24:45 +02:00
|
|
|
|
2019-11-30 00:13:52 +01:00
|
|
|
do {
|
|
|
|
guard let credentials = credentials else {
|
|
|
|
throw FeedlyAccountDelegateError.notLoggedIn
|
|
|
|
}
|
|
|
|
|
|
|
|
let addNewFeed = try FeedlyAddNewFeedOperation(account: account,
|
|
|
|
credentials: credentials,
|
2019-12-01 21:39:05 +01:00
|
|
|
url: url,
|
2019-11-30 00:13:52 +01:00
|
|
|
feedName: name,
|
2019-12-01 21:39:05 +01:00
|
|
|
searchService: caller,
|
|
|
|
addToCollectionService: caller,
|
|
|
|
syncUnreadIdsService: caller,
|
|
|
|
getStreamContentsService: caller,
|
2019-11-30 00:13:52 +01:00
|
|
|
container: container,
|
|
|
|
progress: refreshProgress,
|
|
|
|
log: log)
|
|
|
|
|
|
|
|
addNewFeed.addCompletionHandler = { result in
|
|
|
|
completion(result)
|
|
|
|
}
|
|
|
|
|
|
|
|
operationQueue.addOperation(addNewFeed)
|
|
|
|
|
|
|
|
} catch {
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
2019-10-09 09:38:12 +02:00
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
func renameWebFeed(for account: Account, with feed: WebFeed, to name: String, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-10-09 09:38:12 +02:00
|
|
|
let folderCollectionIds = account.folders?.filter { $0.has(feed) }.compactMap { $0.externalID }
|
|
|
|
guard let collectionIds = folderCollectionIds, let collectionId = collectionIds.first else {
|
2019-10-11 11:32:21 +02:00
|
|
|
completion(.failure(FeedlyAccountDelegateError.unableToRenameFeed(feed.nameForDisplay, name)))
|
2019-10-09 09:38:12 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
let feedId = FeedlyFeedResourceId(id: feed.webFeedID)
|
2019-10-09 09:38:12 +02:00
|
|
|
let editedNameBefore = feed.editedName
|
|
|
|
|
2019-10-09 10:06:59 +02:00
|
|
|
// Adding an existing feed updates it.
|
|
|
|
// Updating feed name in one folder/collection updates it for all folders/collections.
|
2019-10-09 09:38:12 +02:00
|
|
|
caller.addFeed(with: feedId, title: name, toCollectionWith: collectionId) { result in
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
completion(.success(()))
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
feed.editedName = editedNameBefore
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// optimistically set the name
|
|
|
|
feed.editedName = name
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
func addWebFeed(for account: Account, with feed: WebFeed, to container: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-10-11 11:32:21 +02:00
|
|
|
|
2019-11-30 00:13:52 +01:00
|
|
|
do {
|
|
|
|
guard let credentials = credentials else {
|
|
|
|
throw FeedlyAccountDelegateError.notLoggedIn
|
|
|
|
}
|
2019-10-11 11:32:21 +02:00
|
|
|
|
2019-11-30 00:13:52 +01:00
|
|
|
let resource = FeedlyFeedResourceId(id: feed.webFeedID)
|
|
|
|
let addExistingFeed = try FeedlyAddExistingFeedOperation(account: account,
|
|
|
|
credentials: credentials,
|
|
|
|
resource: resource,
|
2019-12-01 21:39:05 +01:00
|
|
|
service: caller,
|
2019-11-30 00:13:52 +01:00
|
|
|
container: container,
|
|
|
|
progress: refreshProgress,
|
|
|
|
log: log)
|
2019-10-11 11:32:21 +02:00
|
|
|
|
2019-11-30 00:13:52 +01:00
|
|
|
|
|
|
|
addExistingFeed.addCompletionHandler = { result in
|
|
|
|
completion(result)
|
|
|
|
}
|
|
|
|
|
|
|
|
operationQueue.addOperation(addExistingFeed)
|
|
|
|
|
|
|
|
} catch {
|
|
|
|
DispatchQueue.main.async {
|
2019-10-09 09:38:12 +02:00
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
func removeWebFeed(for account: Account, with feed: WebFeed, from container: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-10-09 09:38:12 +02:00
|
|
|
guard let folder = container as? Folder, let collectionId = folder.externalID else {
|
|
|
|
return DispatchQueue.main.async {
|
|
|
|
completion(.failure(FeedlyAccountDelegateError.unableToRemoveFeed(feed)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
caller.removeFeed(feed.webFeedID, fromCollectionWith: collectionId) { result in
|
2019-10-09 09:38:12 +02:00
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
completion(.success(()))
|
|
|
|
case .failure(let error):
|
2019-11-15 03:11:41 +01:00
|
|
|
folder.addWebFeed(feed)
|
2019-10-09 09:38:12 +02:00
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
folder.removeWebFeed(feed)
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
func moveWebFeed(for account: Account, with feed: WebFeed, from: Container, to: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-10-09 09:38:12 +02:00
|
|
|
guard let from = from as? Folder, let to = to as? Folder else {
|
|
|
|
return DispatchQueue.main.async {
|
|
|
|
completion(.failure(FeedlyAccountDelegateError.addFeedChooseFolder))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
addWebFeed(for: account, with: feed, to: to) { [weak self] addResult in
|
2019-10-09 09:38:12 +02:00
|
|
|
switch addResult {
|
|
|
|
// now that we have added the feed, remove it from the other collection
|
|
|
|
case .success:
|
2019-11-15 03:11:41 +01:00
|
|
|
self?.removeWebFeed(for: account, with: feed, from: from) { removeResult in
|
2019-10-09 09:38:12 +02:00
|
|
|
switch removeResult {
|
|
|
|
case .success:
|
|
|
|
completion(.success(()))
|
|
|
|
case .failure:
|
2019-11-15 03:11:41 +01:00
|
|
|
from.addWebFeed(feed)
|
2019-10-09 09:38:12 +02:00
|
|
|
completion(.failure(FeedlyAccountDelegateError.unableToMoveFeedBetweenFolders(feed, from, to)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case .failure(let error):
|
2019-11-15 03:11:41 +01:00
|
|
|
from.addWebFeed(feed)
|
|
|
|
to.removeWebFeed(feed)
|
2019-10-09 09:38:12 +02:00
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// optimistically move the feed, undoing as appropriate to the failure
|
2019-11-15 03:11:41 +01:00
|
|
|
from.removeWebFeed(feed)
|
|
|
|
to.addWebFeed(feed)
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
func restoreWebFeed(for account: Account, feed: WebFeed, container: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
if let existingFeed = account.existingWebFeed(withURL: feed.url) {
|
|
|
|
account.addWebFeed(existingFeed, to: container) { result in
|
2019-10-09 10:42:12 +02:00
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
completion(.success(()))
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2019-11-15 03:11:41 +01:00
|
|
|
createWebFeed(for: account, url: feed.url, name: feed.editedName, container: container) { result in
|
2019-10-09 10:42:12 +02:00
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
completion(.success(()))
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func restoreFolder(for account: Account, folder: Folder, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-10-09 10:44:52 +02:00
|
|
|
let group = DispatchGroup()
|
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
for feed in folder.topLevelWebFeeds {
|
2019-10-09 10:44:52 +02:00
|
|
|
|
2019-11-15 03:11:41 +01:00
|
|
|
folder.topLevelWebFeeds.remove(feed)
|
2019-10-09 10:44:52 +02:00
|
|
|
|
|
|
|
group.enter()
|
2019-11-15 03:11:41 +01:00
|
|
|
restoreWebFeed(for: account, feed: feed, container: folder) { result in
|
2019-10-09 10:44:52 +02:00
|
|
|
group.leave()
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
break
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.error, log: self.log, "Restore folder feed error: %@.", error.localizedDescription)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
group.notify(queue: .main) {
|
|
|
|
account.addFolder(folder)
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
2019-12-17 07:45:59 +01:00
|
|
|
func markArticles(for account: Account, articles: Set<Article>, statusKey: ArticleStatus.Key, flag: Bool) -> Set<Article>? {
|
2019-09-19 04:56:43 +02:00
|
|
|
|
2019-10-15 09:31:24 +02:00
|
|
|
let syncStatuses = articles.map { article in
|
|
|
|
return SyncStatus(articleID: article.articleID, key: statusKey, flag: flag)
|
|
|
|
}
|
|
|
|
|
|
|
|
database.insertStatuses(syncStatuses)
|
|
|
|
os_log(.debug, log: log, "Marking %@ as %@.", articles.map { $0.title }, syncStatuses)
|
2019-12-16 19:50:13 +01:00
|
|
|
|
|
|
|
database.selectPendingCount { result in
|
|
|
|
if let count = try? result.get(), count > 100 {
|
|
|
|
self.sendArticleStatus(for: account) { _ in }
|
|
|
|
}
|
2019-10-15 09:31:24 +02:00
|
|
|
}
|
2019-12-16 19:50:13 +01:00
|
|
|
|
2019-12-17 07:45:59 +01:00
|
|
|
return try? account.update(articles, statusKey: statusKey, flag: flag)
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
2019-09-23 09:29:53 +02:00
|
|
|
|
2019-09-18 01:18:06 +02:00
|
|
|
func accountDidInitialize(_ account: Account) {
|
|
|
|
credentials = try? account.retrieveCredentials(type: .oauthAccessToken)
|
2019-11-07 08:54:41 +01:00
|
|
|
|
2019-11-08 08:35:22 +01:00
|
|
|
let refreshAccessToken = FeedlyRefreshAccessTokenOperation(account: account, service: self, oauthClient: oauthAuthorizationClient, log: log)
|
|
|
|
operationQueue.addOperation(refreshAccessToken)
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
|
|
|
|
2019-11-11 08:42:31 +01:00
|
|
|
func accountWillBeDeleted(_ account: Account) {
|
2019-11-15 09:09:14 +01:00
|
|
|
let logout = FeedlyLogoutOperation(account: account, service: caller, log: log)
|
|
|
|
// Dispatch on the main queue because the lifetime of the account delegate is uncertain.
|
|
|
|
OperationQueue.main.addOperation(logout)
|
2019-11-11 08:42:31 +01:00
|
|
|
}
|
|
|
|
|
2019-09-18 01:18:06 +02:00
|
|
|
static func validateCredentials(transport: Transport, credentials: Credentials, endpoint: URL?, completion: @escaping (Result<Credentials?, Error>) -> Void) {
|
2019-11-07 10:44:51 +01:00
|
|
|
assertionFailure("An `account` instance should enqueue an \(FeedlyRefreshAccessTokenOperation.self) instead.")
|
|
|
|
completion(.success(credentials))
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|
2019-11-30 07:57:14 +01:00
|
|
|
|
|
|
|
// MARK: Suspend and Resume (for iOS)
|
|
|
|
|
2019-12-05 01:27:39 +01:00
|
|
|
/// Suspend all network activity
|
|
|
|
func suspendNetwork() {
|
2019-12-05 20:14:05 +01:00
|
|
|
caller.suspend()
|
2019-12-02 21:14:35 +01:00
|
|
|
operationQueue.cancelAllOperations()
|
2019-12-05 01:27:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Suspend the SQLLite databases
|
|
|
|
func suspendDatabase() {
|
2019-11-30 07:57:14 +01:00
|
|
|
database.suspend()
|
|
|
|
}
|
2019-12-05 01:27:39 +01:00
|
|
|
|
|
|
|
/// Make sure no SQLite databases are open and we are ready to issue network requests.
|
2019-11-30 07:57:14 +01:00
|
|
|
func resume() {
|
|
|
|
database.resume()
|
2019-12-05 20:14:05 +01:00
|
|
|
caller.resume()
|
2019-11-30 07:57:14 +01:00
|
|
|
}
|
2019-09-18 01:18:06 +02:00
|
|
|
}
|