2019-09-28 06:44:58 +02:00
|
|
|
//
|
|
|
|
// FeedWranglerAccountDelegate.swift
|
|
|
|
// Account
|
|
|
|
//
|
|
|
|
// Created by Jonathan Bennett on 2019-08-29.
|
|
|
|
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
|
|
|
//
|
|
|
|
|
|
|
|
import Articles
|
|
|
|
import RSCore
|
|
|
|
import RSParser
|
|
|
|
import RSWeb
|
|
|
|
import SyncDatabase
|
|
|
|
import os.log
|
|
|
|
|
|
|
|
final class FeedWranglerAccountDelegate: AccountDelegate {
|
|
|
|
|
|
|
|
var behaviors: AccountBehaviors = []
|
|
|
|
|
|
|
|
var isOPMLImportInProgress = false
|
|
|
|
var server: String? = FeedWranglerConfig.clientPath
|
2019-10-16 14:30:11 +02:00
|
|
|
var credentials: Credentials? {
|
|
|
|
didSet {
|
|
|
|
caller.credentials = credentials
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-28 06:44:58 +02:00
|
|
|
var accountMetadata: AccountMetadata?
|
|
|
|
var refreshProgress = DownloadProgress(numberOfTasks: 0)
|
|
|
|
|
|
|
|
private let caller: FeedWranglerAPICaller
|
|
|
|
private let log = OSLog(subsystem: Bundle.main.bundleIdentifier!, category: "Feed Wrangler")
|
|
|
|
private let database: SyncDatabase
|
|
|
|
|
|
|
|
init(dataFolder: String, transport: Transport?) {
|
|
|
|
if let transport = transport {
|
|
|
|
caller = FeedWranglerAPICaller(transport: transport)
|
|
|
|
} else {
|
|
|
|
let sessionConfiguration = URLSessionConfiguration.default
|
|
|
|
sessionConfiguration.requestCachePolicy = .reloadIgnoringLocalCacheData
|
|
|
|
sessionConfiguration.timeoutIntervalForRequest = 60.0
|
|
|
|
sessionConfiguration.httpShouldSetCookies = false
|
|
|
|
sessionConfiguration.httpCookieAcceptPolicy = .never
|
|
|
|
sessionConfiguration.httpMaximumConnectionsPerHost = 1
|
|
|
|
sessionConfiguration.httpCookieStorage = nil
|
|
|
|
sessionConfiguration.urlCache = nil
|
|
|
|
|
|
|
|
if let userAgentHeaders = UserAgent.headers() {
|
|
|
|
sessionConfiguration.httpAdditionalHeaders = userAgentHeaders
|
|
|
|
}
|
|
|
|
|
|
|
|
let session = URLSession(configuration: sessionConfiguration)
|
|
|
|
caller = FeedWranglerAPICaller(transport: session)
|
|
|
|
}
|
|
|
|
|
2019-10-17 07:05:18 +02:00
|
|
|
database = SyncDatabase(databaseFilePath: dataFolder.appending("/DB.sqlite3"))
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
func accountWillBeDeleted(_ account: Account) {
|
2019-11-21 07:17:34 +01:00
|
|
|
caller.logout() { _ in }
|
2019-11-20 22:02:25 +01:00
|
|
|
}
|
|
|
|
|
2020-03-30 09:48:25 +02:00
|
|
|
func receiveRemoteNotification(for account: Account, userInfo: [AnyHashable : Any], completion: @escaping () -> Void) {
|
|
|
|
completion()
|
|
|
|
}
|
|
|
|
|
2019-09-28 06:44:58 +02:00
|
|
|
func refreshAll(for account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(6)
|
|
|
|
|
2019-10-16 14:30:11 +02:00
|
|
|
self.refreshCredentials(for: account) {
|
2019-11-20 18:27:21 +01:00
|
|
|
self.refreshProgress.completeTask()
|
2019-11-20 22:32:50 +01:00
|
|
|
self.refreshSubscriptions(for: account) { result in
|
2019-11-20 18:27:21 +01:00
|
|
|
self.refreshProgress.completeTask()
|
2019-11-20 22:32:50 +01:00
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
self.sendArticleStatus(for: account) { result in
|
2019-11-20 18:27:21 +01:00
|
|
|
self.refreshProgress.completeTask()
|
2019-11-20 22:32:50 +01:00
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
self.refreshArticleStatus(for: account) { result in
|
2019-11-20 18:27:21 +01:00
|
|
|
self.refreshProgress.completeTask()
|
2019-11-20 22:32:50 +01:00
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
self.refreshArticles(for: account) { result in
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
self.refreshMissingArticles(for: account) { result in
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
2019-10-16 14:30:11 +02:00
|
|
|
}
|
|
|
|
}
|
2019-11-20 22:32:50 +01:00
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
}
|
2019-11-20 22:32:50 +01:00
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-16 14:30:11 +02:00
|
|
|
func refreshCredentials(for account: Account, completion: @escaping (() -> Void)) {
|
|
|
|
os_log(.debug, log: log, "Refreshing credentials...")
|
|
|
|
// MARK: TODO
|
|
|
|
credentials = try? account.retrieveCredentials(type: .feedWranglerToken)
|
|
|
|
completion()
|
|
|
|
}
|
|
|
|
|
|
|
|
func refreshSubscriptions(for account: Account, completion: @escaping ((Result<Void, Error>) -> Void)) {
|
|
|
|
os_log(.debug, log: log, "Refreshing subscriptions...")
|
|
|
|
caller.retrieveSubscriptions { result in
|
|
|
|
switch result {
|
|
|
|
case .success(let subscriptions):
|
|
|
|
self.syncFeeds(account, subscriptions)
|
|
|
|
completion(.success(()))
|
|
|
|
|
|
|
|
case .failure(let error):
|
2019-10-16 15:04:24 +02:00
|
|
|
os_log(.debug, log: self.log, "Failed to refresh subscriptions: %@", error.localizedDescription)
|
2019-10-16 14:30:11 +02:00
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-20 22:32:50 +01:00
|
|
|
func refreshArticles(for account: Account, page: Int = 0, completion: @escaping ((Result<Void, Error>) -> Void)) {
|
2019-10-17 07:05:18 +02:00
|
|
|
os_log(.debug, log: log, "Refreshing articles, page: %d...", page)
|
|
|
|
|
|
|
|
caller.retrieveFeedItems(page: page) { result in
|
|
|
|
switch result {
|
|
|
|
case .success(let items):
|
|
|
|
self.syncFeedItems(account, items) {
|
|
|
|
if items.count == 0 {
|
2019-11-20 22:32:50 +01:00
|
|
|
completion(.success(()))
|
2019-10-17 07:05:18 +02:00
|
|
|
} else {
|
|
|
|
self.refreshArticles(for: account, page: (page + 1), completion: completion)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-20 22:32:50 +01:00
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
2019-10-17 07:05:18 +02:00
|
|
|
}
|
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
2019-11-20 22:32:50 +01:00
|
|
|
func refreshMissingArticles(for account: Account, completion: @escaping ((Result<Void, Error>)-> Void)) {
|
2019-12-18 06:15:20 +01:00
|
|
|
account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate { articleIDsResult in
|
2019-12-17 07:45:59 +01:00
|
|
|
|
2019-12-18 06:15:20 +01:00
|
|
|
func process(_ fetchedArticleIDs: Set<String>) {
|
|
|
|
os_log(.debug, log: self.log, "Refreshing missing articles...")
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
|
|
|
let articleIDs = Array(fetchedArticleIDs)
|
|
|
|
let chunkedArticleIDs = articleIDs.chunked(into: 100)
|
|
|
|
|
|
|
|
for chunk in chunkedArticleIDs {
|
|
|
|
group.enter()
|
|
|
|
self.caller.retrieveEntries(articleIDs: chunk) { result in
|
|
|
|
switch result {
|
|
|
|
case .success(let entries):
|
|
|
|
self.syncFeedItems(account, entries) {
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.error, log: self.log, "Refresh missing articles failed: %@", error.localizedDescription)
|
|
|
|
group.leave()
|
|
|
|
}
|
2019-11-20 12:13:30 +01:00
|
|
|
}
|
2019-12-18 06:15:20 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
group.notify(queue: DispatchQueue.main) {
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
os_log(.debug, log: self.log, "Done refreshing missing articles.")
|
|
|
|
completion(.success(()))
|
2019-11-20 12:13:30 +01:00
|
|
|
}
|
|
|
|
}
|
2019-12-18 06:15:20 +01:00
|
|
|
|
|
|
|
switch articleIDsResult {
|
|
|
|
case .success(let articleIDs):
|
|
|
|
process(articleIDs)
|
|
|
|
case .failure(let databaseError):
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
completion(.failure(databaseError))
|
|
|
|
}
|
2019-11-20 12:13:30 +01:00
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
2019-12-16 21:49:07 +01:00
|
|
|
func sendArticleStatus(for account: Account, completion: @escaping VoidResultCompletionBlock) {
|
2019-09-28 06:44:58 +02:00
|
|
|
os_log(.debug, log: log, "Sending article status...")
|
2019-12-16 21:49:07 +01:00
|
|
|
|
|
|
|
database.selectForProcessing { result in
|
|
|
|
|
|
|
|
func processStatuses(_ syncStatuses: [SyncStatus]) {
|
|
|
|
let articleStatuses = Dictionary(grouping: syncStatuses, by: { $0.articleID })
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
|
|
|
articleStatuses.forEach { articleID, statuses in
|
|
|
|
group.enter()
|
|
|
|
self.caller.updateArticleStatus(articleID, statuses) {
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
group.notify(queue: DispatchQueue.main) {
|
|
|
|
os_log(.debug, log: self.log, "Done sending article statuses.")
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success(let syncStatuses):
|
|
|
|
processStatuses(syncStatuses)
|
|
|
|
case .failure(let databaseError):
|
|
|
|
completion(.failure(databaseError))
|
2019-10-17 07:05:18 +02:00
|
|
|
}
|
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
func refreshArticleStatus(for account: Account, completion: @escaping ((Result<Void, Error>) -> Void)) {
|
2019-09-28 06:44:58 +02:00
|
|
|
os_log(.debug, log: log, "Refreshing article status...")
|
2019-10-17 07:05:18 +02:00
|
|
|
let group = DispatchGroup()
|
|
|
|
|
|
|
|
group.enter()
|
|
|
|
caller.retrieveAllUnreadFeedItems { result in
|
|
|
|
switch result {
|
|
|
|
case .success(let items):
|
|
|
|
self.syncArticleReadState(account, items)
|
|
|
|
group.leave()
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.info, log: self.log, "Retrieving unread entries failed: %@.", error.localizedDescription)
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// starred
|
|
|
|
group.enter()
|
|
|
|
caller.retrieveAllStarredFeedItems { result in
|
|
|
|
switch result {
|
|
|
|
case .success(let items):
|
|
|
|
self.syncArticleStarredState(account, items)
|
|
|
|
group.leave()
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
os_log(.info, log: self.log, "Retrieving starred entries failed: %@.", error.localizedDescription)
|
|
|
|
group.leave()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
group.notify(queue: DispatchQueue.main) {
|
|
|
|
os_log(.debug, log: self.log, "Done refreshing article statuses.")
|
2019-11-20 22:02:25 +01:00
|
|
|
completion(.success(()))
|
2019-10-17 07:05:18 +02:00
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func importOPML(for account: Account, opmlFile: URL, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
fatalError()
|
|
|
|
}
|
|
|
|
|
2020-03-30 20:35:02 +02:00
|
|
|
func createFolder(for account: Account, name: String, completion: @escaping (Result<Folder, Error>) -> Void) {
|
2019-09-28 06:44:58 +02:00
|
|
|
fatalError()
|
|
|
|
}
|
|
|
|
|
|
|
|
func renameFolder(for account: Account, with folder: Folder, to name: String, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
fatalError()
|
|
|
|
}
|
|
|
|
|
|
|
|
func removeFolder(for account: Account, with folder: Folder, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
fatalError()
|
|
|
|
}
|
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
func createWebFeed(for account: Account, url: String, name: String?, container: Container, completion: @escaping (Result<WebFeed, Error>) -> Void) {
|
2019-11-21 07:09:09 +01:00
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(2)
|
2019-10-24 17:48:12 +02:00
|
|
|
|
|
|
|
self.refreshCredentials(for: account) {
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
self.caller.addSubscription(url: url) { result in
|
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
2019-11-21 07:09:09 +01:00
|
|
|
switch result {
|
|
|
|
case .success(let subscription):
|
|
|
|
self.addFeedWranglerSubscription(account: account, subscription: subscription, name: name, container: container, completion: completion)
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
2019-10-24 17:48:12 +02:00
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
2019-11-21 07:09:09 +01:00
|
|
|
private func addFeedWranglerSubscription(account: Account, subscription sub: FeedWranglerSubscription, name: String?, container: Container, completion: @escaping (Result<WebFeed, Error>) -> Void) {
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
let feed = account.createWebFeed(with: sub.title, url: sub.feedURL, webFeedID: String(sub.feedID), homePageURL: sub.siteURL)
|
|
|
|
|
|
|
|
account.addWebFeed(feed, to: container) { result in
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
if let name = name {
|
|
|
|
account.renameWebFeed(feed, to: name) { result in
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
self.initialFeedDownload(account: account, feed: feed, completion: completion)
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
self.initialFeedDownload(account: account, feed: feed, completion: completion)
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private func initialFeedDownload(account: Account, feed: WebFeed, completion: @escaping (Result<WebFeed, Error>) -> Void) {
|
|
|
|
|
|
|
|
self.caller.retrieveFeedItems(page: 0, feed: feed) { results in
|
|
|
|
switch results {
|
|
|
|
case .success(let entries):
|
|
|
|
self.syncFeedItems(account, entries) {
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.success(feed))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(.failure(error))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
func renameWebFeed(for account: Account, with feed: WebFeed, to name: String, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-10-16 17:32:22 +02:00
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(2)
|
|
|
|
|
|
|
|
self.refreshCredentials(for: account) {
|
|
|
|
self.refreshProgress.completeTask()
|
2019-11-20 22:02:25 +01:00
|
|
|
self.caller.renameSubscription(feedID: feed.webFeedID, newName: name) { result in
|
2019-10-16 17:32:22 +02:00
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
feed.editedName = name
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
2019-11-21 07:09:09 +01:00
|
|
|
func addWebFeed(for account: Account, with feed: WebFeed, to container: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
// just add to account, folders are not supported
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
account.addFeedIfNotInAnyFolder(feed)
|
|
|
|
completion(.success(()))
|
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
func removeWebFeed(for account: Account, with feed: WebFeed, from container: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-10-16 17:43:49 +02:00
|
|
|
refreshProgress.addToNumberOfTasksAndRemaining(2)
|
|
|
|
|
|
|
|
self.refreshCredentials(for: account) {
|
|
|
|
self.refreshProgress.completeTask()
|
2019-11-20 22:02:25 +01:00
|
|
|
self.caller.removeSubscription(feedID: feed.webFeedID) { result in
|
2019-10-16 17:43:49 +02:00
|
|
|
self.refreshProgress.completeTask()
|
|
|
|
|
|
|
|
switch result {
|
|
|
|
case .success:
|
|
|
|
DispatchQueue.main.async {
|
2019-11-20 22:02:25 +01:00
|
|
|
account.clearWebFeedMetadata(feed)
|
|
|
|
account.removeWebFeed(feed)
|
2019-10-16 17:43:49 +02:00
|
|
|
completion(.success(()))
|
|
|
|
}
|
|
|
|
|
|
|
|
case .failure(let error):
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
let wrappedError = AccountError.wrappedError(error: error, account: account)
|
|
|
|
completion(.failure(wrappedError))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
func moveWebFeed(for account: Account, with feed: WebFeed, from: Container, to: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-09-28 06:44:58 +02:00
|
|
|
fatalError()
|
|
|
|
}
|
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
func restoreWebFeed(for account: Account, feed: WebFeed, container: Container, completion: @escaping (Result<Void, Error>) -> Void) {
|
2019-09-28 06:44:58 +02:00
|
|
|
fatalError()
|
|
|
|
}
|
|
|
|
|
|
|
|
func restoreFolder(for account: Account, folder: Folder, completion: @escaping (Result<Void, Error>) -> Void) {
|
|
|
|
fatalError()
|
|
|
|
}
|
|
|
|
|
|
|
|
func markArticles(for account: Account, articles: Set<Article>, statusKey: ArticleStatus.Key, flag: Bool) -> Set<Article>? {
|
2019-10-17 07:05:18 +02:00
|
|
|
let syncStatuses = articles.map { SyncStatus(articleID: $0.articleID, key: statusKey, flag: flag)}
|
|
|
|
database.insertStatuses(syncStatuses)
|
|
|
|
|
2019-12-16 19:50:13 +01:00
|
|
|
database.selectPendingCount { result in
|
|
|
|
if let count = try? result.get(), count > 0 {
|
|
|
|
self.sendArticleStatus(for: account) { _ in }
|
2019-11-20 22:02:25 +01:00
|
|
|
}
|
2019-10-17 07:05:18 +02:00
|
|
|
}
|
|
|
|
|
2019-12-17 07:45:59 +01:00
|
|
|
return try? account.update(articles, statusKey: statusKey, flag: flag)
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func accountDidInitialize(_ account: Account) {
|
|
|
|
credentials = try? account.retrieveCredentials(type: .feedWranglerToken)
|
|
|
|
}
|
|
|
|
|
|
|
|
static func validateCredentials(transport: Transport, credentials: Credentials, endpoint: URL? = nil, completion: @escaping (Result<Credentials?, Error>) -> Void) {
|
|
|
|
let caller = FeedWranglerAPICaller(transport: transport)
|
|
|
|
caller.credentials = credentials
|
|
|
|
caller.validateCredentials() { result in
|
|
|
|
DispatchQueue.main.async {
|
|
|
|
completion(result)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-11-30 07:57:14 +01:00
|
|
|
|
|
|
|
// MARK: Suspend and Resume (for iOS)
|
|
|
|
|
2019-12-05 01:27:39 +01:00
|
|
|
/// Suspend all network activity
|
|
|
|
func suspendNetwork() {
|
2019-12-02 22:59:45 +01:00
|
|
|
caller.cancelAll()
|
2019-12-05 01:27:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Suspend the SQLLite databases
|
|
|
|
func suspendDatabase() {
|
2019-11-30 07:57:14 +01:00
|
|
|
database.suspend()
|
|
|
|
}
|
2019-12-05 01:27:39 +01:00
|
|
|
|
|
|
|
/// Make sure no SQLite databases are open and we are ready to issue network requests.
|
2019-11-30 07:57:14 +01:00
|
|
|
func resume() {
|
|
|
|
database.resume()
|
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// MARK: Private
|
|
|
|
private extension FeedWranglerAccountDelegate {
|
2019-10-16 14:30:11 +02:00
|
|
|
|
|
|
|
func syncFeeds(_ account: Account, _ subscriptions: [FeedWranglerSubscription]) {
|
|
|
|
assert(Thread.isMainThread)
|
2019-10-16 15:04:24 +02:00
|
|
|
let feedIds = subscriptions.map { String($0.feedID) }
|
2019-10-16 14:30:11 +02:00
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
let feedsToRemove = account.topLevelWebFeeds.filter { !feedIds.contains($0.webFeedID) }
|
2019-10-16 14:30:11 +02:00
|
|
|
account.removeFeeds(feedsToRemove)
|
2019-09-28 06:44:58 +02:00
|
|
|
|
2019-10-16 14:30:11 +02:00
|
|
|
var subscriptionsToAdd = Set<FeedWranglerSubscription>()
|
|
|
|
subscriptions.forEach { subscription in
|
2019-10-16 15:04:24 +02:00
|
|
|
let subscriptionId = String(subscription.feedID)
|
2019-10-16 14:30:11 +02:00
|
|
|
|
2019-11-20 22:02:25 +01:00
|
|
|
if let feed = account.existingWebFeed(withWebFeedID: subscriptionId) {
|
2019-10-16 14:30:11 +02:00
|
|
|
feed.name = subscription.title
|
2019-10-16 17:32:22 +02:00
|
|
|
feed.editedName = nil
|
2019-10-16 15:04:24 +02:00
|
|
|
feed.homePageURL = subscription.siteURL
|
2020-03-21 17:33:13 +01:00
|
|
|
feed.externalID = nil // MARK: TODO What should this be?
|
2019-10-16 14:30:11 +02:00
|
|
|
} else {
|
|
|
|
subscriptionsToAdd.insert(subscription)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
subscriptionsToAdd.forEach { subscription in
|
2019-10-16 15:04:24 +02:00
|
|
|
let feedId = String(subscription.feedID)
|
2019-11-20 22:02:25 +01:00
|
|
|
let feed = account.createWebFeed(with: subscription.title, url: subscription.feedURL, webFeedID: feedId, homePageURL: subscription.siteURL)
|
2020-03-21 17:33:13 +01:00
|
|
|
feed.externalID = nil
|
2019-11-20 22:02:25 +01:00
|
|
|
account.addWebFeed(feed)
|
2019-10-16 14:30:11 +02:00
|
|
|
}
|
|
|
|
}
|
2019-10-17 07:05:18 +02:00
|
|
|
|
2019-12-17 07:45:59 +01:00
|
|
|
func syncFeedItems(_ account: Account, _ feedItems: [FeedWranglerFeedItem], completion: @escaping VoidCompletionBlock) {
|
2019-10-17 07:05:18 +02:00
|
|
|
let parsedItems = feedItems.map { (item: FeedWranglerFeedItem) -> ParsedItem in
|
|
|
|
let itemID = String(item.feedItemID)
|
|
|
|
// let authors = ...
|
2020-04-03 20:42:59 +02:00
|
|
|
let parsedItem = ParsedItem(syncServiceID: itemID, uniqueID: itemID, feedURL: String(item.feedID), url: nil, externalURL: item.url, title: item.title, language: nil, contentHTML: item.body, contentText: nil, summary: nil, imageURL: nil, bannerImageURL: nil, datePublished: item.publishedDate, dateModified: item.updatedDate, authors: nil, tags: nil, attachments: nil)
|
2019-10-17 07:05:18 +02:00
|
|
|
|
|
|
|
return parsedItem
|
|
|
|
}
|
|
|
|
|
|
|
|
let feedIDsAndItems = Dictionary(grouping: parsedItems, by: { $0.feedURL }).mapValues { Set($0) }
|
2019-12-17 07:45:59 +01:00
|
|
|
account.update(webFeedIDsAndItems: feedIDsAndItems, defaultRead: true) { _ in
|
|
|
|
completion()
|
|
|
|
}
|
2019-10-17 07:05:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func syncArticleReadState(_ account: Account, _ unreadFeedItems: [FeedWranglerFeedItem]) {
|
|
|
|
let unreadServerItemIDs = Set(unreadFeedItems.map { String($0.feedItemID) })
|
2019-12-17 07:45:59 +01:00
|
|
|
account.fetchUnreadArticleIDs { articleIDsResult in
|
|
|
|
guard let unreadLocalItemIDs = try? articleIDsResult.get() else {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
account.markAsUnread(unreadServerItemIDs)
|
2019-12-08 05:57:23 +01:00
|
|
|
|
|
|
|
let readItemIDs = unreadLocalItemIDs.subtracting(unreadServerItemIDs)
|
2019-12-17 07:45:59 +01:00
|
|
|
account.markAsRead(readItemIDs)
|
2019-12-08 05:57:23 +01:00
|
|
|
}
|
2019-10-17 07:05:18 +02:00
|
|
|
}
|
|
|
|
|
2019-12-17 07:45:59 +01:00
|
|
|
func syncArticleStarredState(_ account: Account, _ starredFeedItems: [FeedWranglerFeedItem]) {
|
|
|
|
let starredServerItemIDs = Set(starredFeedItems.map { String($0.feedItemID) })
|
|
|
|
account.fetchStarredArticleIDs { articleIDsResult in
|
|
|
|
guard let starredLocalItemIDs = try? articleIDsResult.get() else {
|
|
|
|
return
|
|
|
|
}
|
2019-12-08 05:57:23 +01:00
|
|
|
|
2019-12-17 07:45:59 +01:00
|
|
|
account.markAsStarred(starredServerItemIDs)
|
2019-12-08 05:57:23 +01:00
|
|
|
|
2019-12-17 07:45:59 +01:00
|
|
|
let unstarredItemIDs = starredLocalItemIDs.subtracting(starredServerItemIDs)
|
|
|
|
account.markAsUnstarred(unstarredItemIDs)
|
2019-12-08 05:57:23 +01:00
|
|
|
}
|
2019-10-17 07:05:18 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func syncArticleState(_ account: Account, key: ArticleStatus.Key, flag: Bool, serverFeedItems: [FeedWranglerFeedItem]) {
|
2019-11-30 08:04:29 +01:00
|
|
|
let _ /*serverFeedItemIDs*/ = serverFeedItems.map { String($0.feedID) }
|
2019-10-17 07:05:18 +02:00
|
|
|
|
|
|
|
// todo generalize this logic
|
|
|
|
}
|
2019-09-28 06:44:58 +02:00
|
|
|
}
|