start supporting article sync

This commit is contained in:
Jonathan Bennett 2019-10-17 01:05:18 -04:00
parent c7d0d23146
commit 09faf1a0c2
5 changed files with 394 additions and 7 deletions

View File

@ -13,6 +13,8 @@
3BF6112423572A62000EF978 /* FeedWranglerSubscription.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3BF6112323572A62000EF978 /* FeedWranglerSubscription.swift */; };
3BF6112623572E43000EF978 /* FeedWranglerSubscriptionsRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3BF6112523572E43000EF978 /* FeedWranglerSubscriptionsRequest.swift */; };
3BF6119023577173000EF978 /* FeedWranglerGenericResult.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3BF6118F23577173000EF978 /* FeedWranglerGenericResult.swift */; };
3BF611922357877E000EF978 /* FeedWranglerFeedItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3BF611912357877E000EF978 /* FeedWranglerFeedItem.swift */; };
3BF6119423578F55000EF978 /* FeedWranglerFeedItemsRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3BF6119323578F55000EF978 /* FeedWranglerFeedItemsRequest.swift */; };
5107A099227DE42E00C7C3C5 /* AccountCredentialsTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5107A098227DE42E00C7C3C5 /* AccountCredentialsTest.swift */; };
5107A09B227DE49500C7C3C5 /* TestAccountManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5107A09A227DE49500C7C3C5 /* TestAccountManager.swift */; };
5107A09D227DE77700C7C3C5 /* TestTransport.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5107A09C227DE77700C7C3C5 /* TestTransport.swift */; };
@ -197,6 +199,8 @@
3BF6112323572A62000EF978 /* FeedWranglerSubscription.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedWranglerSubscription.swift; sourceTree = "<group>"; };
3BF6112523572E43000EF978 /* FeedWranglerSubscriptionsRequest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedWranglerSubscriptionsRequest.swift; sourceTree = "<group>"; };
3BF6118F23577173000EF978 /* FeedWranglerGenericResult.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedWranglerGenericResult.swift; sourceTree = "<group>"; };
3BF611912357877E000EF978 /* FeedWranglerFeedItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedWranglerFeedItem.swift; sourceTree = "<group>"; };
3BF6119323578F55000EF978 /* FeedWranglerFeedItemsRequest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedWranglerFeedItemsRequest.swift; sourceTree = "<group>"; };
5107A098227DE42E00C7C3C5 /* AccountCredentialsTest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AccountCredentialsTest.swift; sourceTree = "<group>"; };
5107A09A227DE49500C7C3C5 /* TestAccountManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestAccountManager.swift; sourceTree = "<group>"; };
5107A09C227DE77700C7C3C5 /* TestTransport.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestTransport.swift; sourceTree = "<group>"; };
@ -358,6 +362,8 @@
3BF6112323572A62000EF978 /* FeedWranglerSubscription.swift */,
3BF6112523572E43000EF978 /* FeedWranglerSubscriptionsRequest.swift */,
3BF6118F23577173000EF978 /* FeedWranglerGenericResult.swift */,
3BF611912357877E000EF978 /* FeedWranglerFeedItem.swift */,
3BF6119323578F55000EF978 /* FeedWranglerFeedItemsRequest.swift */,
);
path = FeedWrangler;
sourceTree = "<group>";
@ -939,6 +945,7 @@
9E1D154F233371DD00F4944C /* FeedlyGetCollectionsOperation.swift in Sources */,
9EAEC626233318400085D7C9 /* FeedlyStream.swift in Sources */,
9EAEC60C2332FE830085D7C9 /* FeedlyCollection.swift in Sources */,
3BF6119423578F55000EF978 /* FeedWranglerFeedItemsRequest.swift in Sources */,
51E3EB41229AF61B00645299 /* AccountError.swift in Sources */,
9E1D155D233447F000F4944C /* FeedlyUpdateAccountFeedsWithItemsOperation.swift in Sources */,
51E59599228C77BC00FCC42B /* FeedbinUnreadEntry.swift in Sources */,
@ -964,6 +971,7 @@
3BF6112623572E43000EF978 /* FeedWranglerSubscriptionsRequest.swift in Sources */,
841974011F6DD1EC006346C4 /* Folder.swift in Sources */,
510BD111232C3801002692E4 /* AccountMetadataFile.swift in Sources */,
3BF611922357877E000EF978 /* FeedWranglerFeedItem.swift in Sources */,
846E774F1F6EF9C000A165E2 /* LocalAccountDelegate.swift in Sources */,
515E4EB52324FF8C0057B0E7 /* CredentialsManager.swift in Sources */,
844B297F210CE37E004020B3 /* UnreadCountProvider.swift in Sources */,

View File

@ -9,6 +9,7 @@
import Foundation
import Foundation
import SyncDatabase
import RSWeb
final class FeedWranglerAPICaller: NSObject {
@ -125,4 +126,161 @@ final class FeedWranglerAPICaller: NSObject {
}
}
}
// MARK: FeedItems
func retrieveFeedItems(page: Int = 0, completion: @escaping (Result<[FeedWranglerFeedItem], Error>) -> Void) {
guard var components = URLComponents(url: FeedWranglerConfig.clientURL.appendingPathComponent("feed_items/list"), resolvingAgainstBaseURL: false) else {
completion(.failure(TransportError.noURL))
return
}
// todo: handle initial sync better
components.queryItems = [
URLQueryItem(name: "read", value: "false"),
URLQueryItem(name: "offset", value: String(page * FeedWranglerConfig.pageSize)),
// URLQueryItem(name: "created_since", value: feedID),
// URLQueryItem(name: "updated_since", value: feedID),
]
guard let url = components.url else {
completion(.failure(TransportError.noURL))
return
}
let request = URLRequest(url: url, credentials: credentials)
transport.send(request: request, resultType: FeedWranglerFeedItemsRequest.self) { result in
switch result {
case .success(let (_, results)):
completion(.success(results?.feedItems ?? []))
case .failure(let error):
completion(.failure(error))
}
}
}
func retrieveUnreadFeedItems(page: Int = 0, completion: @escaping (Result<[FeedWranglerFeedItem], Error>) -> Void) {
guard var components = URLComponents(url: FeedWranglerConfig.clientURL.appendingPathComponent("feed_items/list"), resolvingAgainstBaseURL: false) else {
completion(.failure(TransportError.noURL))
return
}
components.queryItems = [
URLQueryItem(name: "read", value: "false"),
URLQueryItem(name: "offset", value: String(page * FeedWranglerConfig.pageSize)),
// URLQueryItem(name: "created_since", value: feedID),
// URLQueryItem(name: "updated_since", value: feedID),
]
guard let url = components.url else {
completion(.failure(TransportError.noURL))
return
}
let request = URLRequest(url: url, credentials: credentials)
transport.send(request: request, resultType: FeedWranglerFeedItemsRequest.self) { result in
switch result {
case .success(let (_, results)):
completion(.success(results?.feedItems ?? []))
case .failure(let error):
completion(.failure(error))
}
}
}
func retrieveAllUnreadFeedItems(foundItems: [FeedWranglerFeedItem] = [], page: Int = 0, completion: @escaping (Result<[FeedWranglerFeedItem], Error>) -> Void) {
retrieveUnreadFeedItems(page: page) { result in
switch result {
case .success(let newItems):
if newItems.count > 0 {
self.retrieveAllUnreadFeedItems(foundItems: foundItems + newItems, page: (page + 1), completion: completion)
} else {
completion(.success(foundItems + newItems))
}
case .failure(let error):
completion(.failure(error))
}
}
}
func retrieveStarredFeedItems(page: Int = 0, completion: @escaping (Result<[FeedWranglerFeedItem], Error>) -> Void) {
guard var components = URLComponents(url: FeedWranglerConfig.clientURL.appendingPathComponent("feed_items/list"), resolvingAgainstBaseURL: false) else {
completion(.failure(TransportError.noURL))
return
}
components.queryItems = [
URLQueryItem(name: "starred", value: "true"),
URLQueryItem(name: "offset", value: String(page * FeedWranglerConfig.pageSize)),
// URLQueryItem(name: "created_since", value: feedID),
// URLQueryItem(name: "updated_since", value: feedID),
]
guard let url = components.url else {
completion(.failure(TransportError.noURL))
return
}
let request = URLRequest(url: url, credentials: credentials)
transport.send(request: request, resultType: FeedWranglerFeedItemsRequest.self) { result in
switch result {
case .success(let (_, results)):
completion(.success(results?.feedItems ?? []))
case .failure(let error):
completion(.failure(error))
}
}
}
func retrieveAllStarredFeedItems(foundItems: [FeedWranglerFeedItem] = [], page: Int = 0, completion: @escaping (Result<[FeedWranglerFeedItem], Error>) -> Void) {
retrieveStarredFeedItems(page: page) { result in
switch result {
case .success(let newItems):
if newItems.count > 0 {
self.retrieveAllStarredFeedItems(foundItems: foundItems + newItems, page: (page + 1), completion: completion)
} else {
completion(.success(foundItems + newItems))
}
case .failure(let error):
completion(.failure(error))
}
}
}
func updateArticleStatus(_ articleID: String, _ statuses: [SyncStatus], completion: @escaping () -> Void) {
guard var components = URLComponents(url: FeedWranglerConfig.clientURL.appendingPathComponent("feed_items/update"), resolvingAgainstBaseURL: false) else {
completion()
return
}
var queryItems = statuses.compactMap { status -> URLQueryItem? in
switch status.key {
case .read:
return URLQueryItem(name: "read", value: status.flag.description)
case .starred:
return URLQueryItem(name: "starred", value: status.flag.description)
case .userDeleted:
return nil
}
}
queryItems.append(URLQueryItem(name: "feed_item_id", value: articleID))
components.queryItems = (components.queryItems ?? []) + queryItems
guard let url = components.url else {
completion()
return
}
let request = URLRequest(url: url, credentials: credentials)
transport.send(request: request, resultType: FeedWranglerGenericResult.self) { result in
completion()
}
}
}

View File

@ -53,7 +53,7 @@ final class FeedWranglerAccountDelegate: AccountDelegate {
caller = FeedWranglerAPICaller(transport: session)
}
database = SyncDatabase(databaseFilePath: dataFolder.appending("/Sync.sqlite3"))
database = SyncDatabase(databaseFilePath: dataFolder.appending("/DB.sqlite3"))
}
func refreshAll(for account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
@ -100,9 +100,25 @@ final class FeedWranglerAccountDelegate: AccountDelegate {
}
}
func refreshArticles(for account: Account, completion: @escaping (() -> Void)) {
os_log(.debug, log: log, "Refreshing articles...")
completion()
func refreshArticles(for account: Account, page: Int = 0, completion: @escaping (() -> Void)) {
os_log(.debug, log: log, "Refreshing articles, page: %d...", page)
caller.retrieveFeedItems(page: page) { result in
switch result {
case .success(let items):
self.syncFeedItems(account, items) {
if items.count == 0 {
completion()
} else {
self.refreshArticles(for: account, page: (page + 1), completion: completion)
}
}
case .failure:
// TODO Handle error
completion()
}
}
}
func refreshMissingArticles(for account: Account, completion: @escaping (() -> Void)) {
@ -112,12 +128,60 @@ final class FeedWranglerAccountDelegate: AccountDelegate {
func sendArticleStatus(for account: Account, completion: @escaping (() -> Void)) {
os_log(.debug, log: log, "Sending article status...")
completion()
let syncStatuses = database.selectForProcessing()
let articleStatuses = Dictionary(grouping: syncStatuses, by: { $0.articleID })
let group = DispatchGroup()
articleStatuses.forEach { articleID, statuses in
group.enter()
caller.updateArticleStatus(articleID, statuses) {
group.leave()
}
}
group.notify(queue: DispatchQueue.main) {
os_log(.debug, log: self.log, "Done sending article statuses.")
completion()
}
}
func refreshArticleStatus(for account: Account, completion: @escaping (() -> Void)) {
os_log(.debug, log: log, "Refreshing article status...")
completion()
let group = DispatchGroup()
group.enter()
caller.retrieveAllUnreadFeedItems { result in
switch result {
case .success(let items):
self.syncArticleReadState(account, items)
group.leave()
case .failure(let error):
os_log(.info, log: self.log, "Retrieving unread entries failed: %@.", error.localizedDescription)
group.leave()
}
}
// starred
group.enter()
caller.retrieveAllStarredFeedItems { result in
switch result {
case .success(let items):
self.syncArticleStarredState(account, items)
group.leave()
case .failure(let error):
os_log(.info, log: self.log, "Retrieving starred entries failed: %@.", error.localizedDescription)
group.leave()
}
}
group.notify(queue: DispatchQueue.main) {
os_log(.debug, log: self.log, "Done refreshing article statuses.")
completion()
}
}
func importOPML(for account: Account, opmlFile: URL, completion: @escaping (Result<Void, Error>) -> Void) {
@ -208,7 +272,14 @@ final class FeedWranglerAccountDelegate: AccountDelegate {
}
func markArticles(for account: Account, articles: Set<Article>, statusKey: ArticleStatus.Key, flag: Bool) -> Set<Article>? {
fatalError()
let syncStatuses = articles.map { SyncStatus(articleID: $0.articleID, key: statusKey, flag: flag)}
database.insertStatuses(syncStatuses)
if database.selectPendingCount() > 0 {
sendArticleStatus(for: account) {} // do it in the background
}
return account.update(articles, statusKey: statusKey, flag: flag)
}
func accountDidInitialize(_ account: Account) {
@ -257,4 +328,67 @@ private extension FeedWranglerAccountDelegate {
account.addFeed(feed)
}
}
func syncFeedItems(_ account: Account, _ feedItems: [FeedWranglerFeedItem], completion: @escaping (() -> Void)) {
let parsedItems = feedItems.map { (item: FeedWranglerFeedItem) -> ParsedItem in
let itemID = String(item.feedItemID)
// let authors = ...
let parsedItem = ParsedItem(syncServiceID: itemID, uniqueID: itemID, feedURL: String(item.feedID), url: nil, externalURL: item.url, title: item.title, contentHTML: item.body, contentText: nil, summary: nil, imageURL: nil, bannerImageURL: nil, datePublished: item.publishedDate, dateModified: item.updatedDate, authors: nil, tags: nil, attachments: nil)
return parsedItem
}
let feedIDsAndItems = Dictionary(grouping: parsedItems, by: { $0.feedURL }).mapValues { Set($0) }
account.update(feedIDsAndItems: feedIDsAndItems, defaultRead: true, completion: completion)
}
func syncArticleReadState(_ account: Account, _ unreadFeedItems: [FeedWranglerFeedItem]) {
let unreadServerItemIDs = Set(unreadFeedItems.map { String($0.feedItemID) })
let unreadLocalItemIDs = account.fetchUnreadArticleIDs()
// unread if unread on server
let unreadDiffItemIDs = unreadServerItemIDs.subtracting(unreadLocalItemIDs)
let unreadFoundArticles = account.fetchArticles(.articleIDs(unreadDiffItemIDs))
account.update(unreadFoundArticles, statusKey: .read, flag: false)
let unreadFoundItemIDs = Set(unreadFoundArticles.map { $0.articleID })
let missingArticleIDs = unreadDiffItemIDs.subtracting(unreadFoundItemIDs)
account.ensureStatuses(missingArticleIDs, true, .read, false)
let readItemIDs = unreadLocalItemIDs.subtracting(unreadServerItemIDs)
let readArtices = account.fetchArticles(.articleIDs(readItemIDs))
account.update(readArtices, statusKey: .read, flag: true)
let foundReadArticleIDs = Set(readArtices.map { $0.articleID })
let readMissingIDs = readItemIDs.subtracting(foundReadArticleIDs)
account.ensureStatuses(readMissingIDs, true, .read, true)
}
func syncArticleStarredState(_ account: Account, _ unreadFeedItems: [FeedWranglerFeedItem]) {
let unreadServerItemIDs = Set(unreadFeedItems.map { String($0.feedItemID) })
let unreadLocalItemIDs = account.fetchUnreadArticleIDs()
// starred if start on server
let unreadDiffItemIDs = unreadServerItemIDs.subtracting(unreadLocalItemIDs)
let unreadFoundArticles = account.fetchArticles(.articleIDs(unreadDiffItemIDs))
account.update(unreadFoundArticles, statusKey: .starred, flag: true)
let unreadFoundItemIDs = Set(unreadFoundArticles.map { $0.articleID })
let missingArticleIDs = unreadDiffItemIDs.subtracting(unreadFoundItemIDs)
account.ensureStatuses(missingArticleIDs, true, .starred, true)
let readItemIDs = unreadLocalItemIDs.subtracting(unreadServerItemIDs)
let readArtices = account.fetchArticles(.articleIDs(readItemIDs))
account.update(readArtices, statusKey: .starred, flag: false)
let foundReadArticleIDs = Set(readArtices.map { $0.articleID })
let readMissingIDs = readItemIDs.subtracting(foundReadArticleIDs)
account.ensureStatuses(readMissingIDs, true, .starred, false)
}
func syncArticleState(_ account: Account, key: ArticleStatus.Key, flag: Bool, serverFeedItems: [FeedWranglerFeedItem]) {
let serverFeedItemIDs = serverFeedItems.map { String($0.feedID) }
// todo generalize this logic
}
}

View File

@ -0,0 +1,62 @@
//
// FeedWranglerFeedItem.swift
// Account
//
// Created by Jonathan Bennett on 2019-10-16.4// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
//
import Foundation
struct FeedWranglerFeedItem: Hashable, Codable {
let feedItemID: Int
let publishedAt: Int
let createdAt: Int
let versionKey: Int
let updatedAt: Int
let url: String
let title: String
let starred: Bool
let read: Bool
let readLater: Bool
let body: String
let author: String?
let feedID: Int
let feedName: String
var publishedDate: Date {
get {
Date(timeIntervalSince1970: Double(publishedAt))
}
}
var createdDate: Date {
get {
Date(timeIntervalSince1970: Double(createdAt))
}
}
var updatedDate: Date {
get {
Date(timeIntervalSince1970: Double(updatedAt))
}
}
enum CodingKeys: String, CodingKey {
case feedItemID = "feed_item_id"
case publishedAt = "published_at"
case createdAt = "created_at"
case versionKey = "version_key"
case updatedAt = "updated_at"
case url = "url"
case title = "title"
case starred = "starred"
case read = "read"
case readLater = "read_later"
case body = "body"
case author = "author"
case feedID = "feed_id"
case feedName = "feed_name"
}
}

View File

@ -0,0 +1,25 @@
//
// FeedWranglerFeedItemsRequest.swift
// Account
//
// Created by Jonathan Bennett on 2019-10-16.
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
//
import Foundation
struct FeedWranglerFeedItemsRequest: Hashable, Codable {
let count: Int
let feedItems: [FeedWranglerFeedItem]
let error: String?
let result: String
enum CodingKeys: String, CodingKey {
case count = "count"
case feedItems = "feed_items"
case error = "error"
case result = "result"
}
}