Fix starred story parsing using incorrect JSON field
This commit is contained in:
parent
b7e7e17656
commit
bde6a607ba
|
@ -11,7 +11,8 @@
|
|||
179DB0B17A6C51B95ABC1741 /* NewsBlurStoryStatusChange.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179DB5B421C5433B45C5F13E /* NewsBlurStoryStatusChange.swift */; };
|
||||
179DB28CF49F73A945EBF5DB /* NewsBlurLoginResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179DB088236E3236010462E8 /* NewsBlurLoginResponse.swift */; };
|
||||
179DB49A960F8B78C4924458 /* NewsBlurGenericCodingKeys.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179DB66D933E976C29159DEE /* NewsBlurGenericCodingKeys.swift */; };
|
||||
179DBED55C9B4D6A413486C1 /* NewsBlurUnreadStory.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179DB818180A51098A9816B2 /* NewsBlurUnreadStory.swift */; };
|
||||
179DB96B984E67DC101E470D /* NewsBlurAccountDelegate+Private.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179DB55DC2CAD332D4376416 /* NewsBlurAccountDelegate+Private.swift */; };
|
||||
179DBED55C9B4D6A413486C1 /* NewsBlurStoryHash.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179DB818180A51098A9816B2 /* NewsBlurStoryHash.swift */; };
|
||||
179DBF4DE2562D4C532F6008 /* NewsBlurFeed.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179DB1B909672E0E807B5E8C /* NewsBlurFeed.swift */; };
|
||||
3B3A33E7238D3D6800314204 /* Secrets.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3B3A33E6238D3D6800314204 /* Secrets.swift */; };
|
||||
3B826DA72385C81C00FC1ADB /* FeedWranglerAuthorizationResult.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3B826D9E2385C81C00FC1ADB /* FeedWranglerAuthorizationResult.swift */; };
|
||||
|
@ -230,10 +231,11 @@
|
|||
/* Begin PBXFileReference section */
|
||||
179DB088236E3236010462E8 /* NewsBlurLoginResponse.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NewsBlurLoginResponse.swift; sourceTree = "<group>"; };
|
||||
179DB1B909672E0E807B5E8C /* NewsBlurFeed.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NewsBlurFeed.swift; sourceTree = "<group>"; };
|
||||
179DB55DC2CAD332D4376416 /* NewsBlurAccountDelegate+Private.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "NewsBlurAccountDelegate+Private.swift"; sourceTree = "<group>"; };
|
||||
179DB5B421C5433B45C5F13E /* NewsBlurStoryStatusChange.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NewsBlurStoryStatusChange.swift; sourceTree = "<group>"; };
|
||||
179DB66D933E976C29159DEE /* NewsBlurGenericCodingKeys.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NewsBlurGenericCodingKeys.swift; sourceTree = "<group>"; };
|
||||
179DB7399814F6FB3247825C /* NewsBlurStory.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NewsBlurStory.swift; sourceTree = "<group>"; };
|
||||
179DB818180A51098A9816B2 /* NewsBlurUnreadStory.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NewsBlurUnreadStory.swift; sourceTree = "<group>"; };
|
||||
179DB818180A51098A9816B2 /* NewsBlurStoryHash.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NewsBlurStoryHash.swift; sourceTree = "<group>"; };
|
||||
3B3A33E6238D3D6800314204 /* Secrets.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Secrets.swift; path = ../../Shared/Secrets.swift; sourceTree = "<group>"; };
|
||||
3B826D9E2385C81C00FC1ADB /* FeedWranglerAuthorizationResult.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FeedWranglerAuthorizationResult.swift; sourceTree = "<group>"; };
|
||||
3B826D9F2385C81C00FC1ADB /* FeedWranglerFeedItem.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FeedWranglerFeedItem.swift; sourceTree = "<group>"; };
|
||||
|
@ -458,7 +460,7 @@
|
|||
179DB1B909672E0E807B5E8C /* NewsBlurFeed.swift */,
|
||||
179DB7399814F6FB3247825C /* NewsBlurStory.swift */,
|
||||
179DB66D933E976C29159DEE /* NewsBlurGenericCodingKeys.swift */,
|
||||
179DB818180A51098A9816B2 /* NewsBlurUnreadStory.swift */,
|
||||
179DB818180A51098A9816B2 /* NewsBlurStoryHash.swift */,
|
||||
179DB5B421C5433B45C5F13E /* NewsBlurStoryStatusChange.swift */,
|
||||
);
|
||||
path = Models;
|
||||
|
@ -558,6 +560,7 @@
|
|||
769F2A8DF190549E24B5D110 /* NewsBlurAccountDelegate.swift */,
|
||||
769F275FD5D942502C5B4716 /* NewsBlurAPICaller.swift */,
|
||||
179DBD810D353D9CED7C3BED /* Models */,
|
||||
179DB55DC2CAD332D4376416 /* NewsBlurAccountDelegate+Private.swift */,
|
||||
);
|
||||
path = NewsBlur;
|
||||
sourceTree = "<group>";
|
||||
|
@ -1153,8 +1156,9 @@
|
|||
179DBF4DE2562D4C532F6008 /* NewsBlurFeed.swift in Sources */,
|
||||
179DB02FFBC17AC9798F0EBC /* NewsBlurStory.swift in Sources */,
|
||||
179DB49A960F8B78C4924458 /* NewsBlurGenericCodingKeys.swift in Sources */,
|
||||
179DBED55C9B4D6A413486C1 /* NewsBlurUnreadStory.swift in Sources */,
|
||||
179DBED55C9B4D6A413486C1 /* NewsBlurStoryHash.swift in Sources */,
|
||||
179DB0B17A6C51B95ABC1741 /* NewsBlurStoryStatusChange.swift in Sources */,
|
||||
179DB96B984E67DC101E470D /* NewsBlurAccountDelegate+Private.swift in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
//
|
||||
// NewsBlurStoryHash.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Anh Quang Do on 2020-03-13.
|
||||
// Copyright (c) 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import RSCore
|
||||
import RSParser
|
||||
|
||||
typealias NewsBlurStoryHash = NewsBlurStoryHashesResponse.StoryHash
|
||||
|
||||
struct NewsBlurStoryHashesResponse: Decodable {
|
||||
typealias StoryHashDictionary = [String: [StoryHash]]
|
||||
|
||||
var unread: StoryHashDictionary?
|
||||
var starred: StoryHashDictionary?
|
||||
|
||||
struct StoryHash: Hashable, Codable {
|
||||
var hash: String
|
||||
var timestamp: Date
|
||||
}
|
||||
}
|
||||
|
||||
extension NewsBlurStoryHashesResponse {
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
case unread = "unread_feed_story_hashes"
|
||||
case starred = "starred_story_hashes"
|
||||
}
|
||||
|
||||
init(from decoder: Decoder) throws {
|
||||
let container = try decoder.container(keyedBy: CodingKeys.self)
|
||||
|
||||
// Parse unread
|
||||
if let unreadContainer = try? container.nestedContainer(keyedBy: NewsBlurGenericCodingKeys.self, forKey: .unread) {
|
||||
self.unread = try NewsBlurStoryHashesResponse.extractHashes(container: unreadContainer)
|
||||
}
|
||||
|
||||
// Parse starred
|
||||
if let starredContainer = try? container.nestedContainer(keyedBy: NewsBlurGenericCodingKeys.self, forKey: .starred) {
|
||||
self.starred = try NewsBlurStoryHashesResponse.extractHashes(container: starredContainer)
|
||||
}
|
||||
}
|
||||
|
||||
static func extractHashes<Key>(container: KeyedDecodingContainer<Key>) throws -> StoryHashDictionary where Key: CodingKey {
|
||||
var dict: StoryHashDictionary = [:]
|
||||
for key in container.allKeys {
|
||||
dict[key.stringValue] = []
|
||||
var hashArrayContainer = try container.nestedUnkeyedContainer(forKey: key)
|
||||
while !hashArrayContainer.isAtEnd {
|
||||
var hashContainer = try hashArrayContainer.nestedUnkeyedContainer()
|
||||
let hash = try hashContainer.decode(String.self)
|
||||
let timestamp = try hashContainer.decode(Date.self)
|
||||
let storyHash = StoryHash(hash: hash, timestamp: timestamp)
|
||||
|
||||
dict[key.stringValue]?.append(storyHash)
|
||||
}
|
||||
}
|
||||
|
||||
return dict
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
//
|
||||
// NewsBlurUnreadStory.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Anh Quang Do on 2020-03-13.
|
||||
// Copyright (c) 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import RSCore
|
||||
import RSParser
|
||||
|
||||
typealias NewsBlurStoryHash = NewsBlurUnreadStoryHashesResponse.StoryHash
|
||||
|
||||
struct NewsBlurUnreadStoryHashesResponse: Decodable {
|
||||
let feeds: [String: [StoryHash]]
|
||||
|
||||
struct StoryHash: Hashable, Codable {
|
||||
var hash: String
|
||||
var timestamp: Date
|
||||
}
|
||||
}
|
||||
|
||||
extension NewsBlurUnreadStoryHashesResponse {
|
||||
private enum CodingKeys: String, CodingKey {
|
||||
case feeds = "unread_feed_story_hashes"
|
||||
}
|
||||
|
||||
init(from decoder: Decoder) throws {
|
||||
let container = try decoder.container(keyedBy: CodingKeys.self)
|
||||
|
||||
// Parse feeds
|
||||
var feeds: [String: [StoryHash]] = [:]
|
||||
let feedContainer = try container.nestedContainer(keyedBy: NewsBlurGenericCodingKeys.self, forKey: .feeds)
|
||||
try feedContainer.allKeys.forEach { key in
|
||||
feeds[key.stringValue] = []
|
||||
var hashArrayContainer = try feedContainer.nestedUnkeyedContainer(forKey: key)
|
||||
while !hashArrayContainer.isAtEnd {
|
||||
var hashContainer = try hashArrayContainer.nestedUnkeyedContainer()
|
||||
let hash = try hashContainer.decode(String.self)
|
||||
let timestamp = try hashContainer.decode(Date.self)
|
||||
let storyHash = StoryHash(hash: hash, timestamp: timestamp)
|
||||
|
||||
feeds[key.stringValue]?.append(storyHash)
|
||||
}
|
||||
}
|
||||
|
||||
self.feeds = feeds
|
||||
}
|
||||
}
|
|
@ -205,7 +205,7 @@ extension NewsBlurAPICaller {
|
|||
}
|
||||
|
||||
let request = URLRequest(url: callURL, credentials: credentials)
|
||||
transport.send(request: request, resultType: NewsBlurUnreadStoryHashesResponse.self, dateDecoding: .secondsSince1970) { result in
|
||||
transport.send(request: request, resultType: NewsBlurStoryHashesResponse.self, dateDecoding: .secondsSince1970) { result in
|
||||
if self.suspended {
|
||||
completion(.failure(TransportError.suspended))
|
||||
return
|
||||
|
@ -213,7 +213,8 @@ extension NewsBlurAPICaller {
|
|||
|
||||
switch result {
|
||||
case .success((_, let payload)):
|
||||
completion(.success(payload?.feeds.values.flatMap { $0 }))
|
||||
let hashes = payload?.unread ?? payload?.starred
|
||||
completion(.success(hashes?.values.flatMap { $0 }))
|
||||
case .failure(let error):
|
||||
completion(.failure(error))
|
||||
}
|
||||
|
|
|
@ -0,0 +1,374 @@
|
|||
//
|
||||
// NewsBlurAccountDelegate+Private.swift
|
||||
// Mostly adapted from FeedbinAccountDelegate.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Anh Quang Do on 2020-03-14.
|
||||
// Copyright (c) 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Articles
|
||||
import RSCore
|
||||
import RSDatabase
|
||||
import RSParser
|
||||
import RSWeb
|
||||
import SyncDatabase
|
||||
import os.log
|
||||
|
||||
extension NewsBlurAccountDelegate {
|
||||
func refreshFeeds(for account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
|
||||
os_log(.debug, log: log, "Refreshing feeds...")
|
||||
|
||||
caller.retrieveFeeds { result in
|
||||
switch result {
|
||||
case .success((let feeds, let folders)):
|
||||
BatchUpdate.shared.perform {
|
||||
self.syncFolders(account, folders)
|
||||
self.syncFeeds(account, feeds)
|
||||
self.syncFeedFolderRelationship(account, folders)
|
||||
}
|
||||
|
||||
self.refreshProgress.completeTask()
|
||||
completion(.success(()))
|
||||
case .failure(let error):
|
||||
completion(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func syncFolders(_ account: Account, _ folders: [NewsBlurFolder]?) {
|
||||
guard let folders = folders else { return }
|
||||
assert(Thread.isMainThread)
|
||||
|
||||
os_log(.debug, log: log, "Syncing folders with %ld folders.", folders.count)
|
||||
|
||||
let folderNames = folders.map { $0.name }
|
||||
|
||||
// Delete any folders not at NewsBlur
|
||||
if let folders = account.folders {
|
||||
folders.forEach { folder in
|
||||
if !folderNames.contains(folder.name ?? "") {
|
||||
for feed in folder.topLevelWebFeeds {
|
||||
account.addWebFeed(feed)
|
||||
clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
||||
}
|
||||
account.removeFolder(folder)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let accountFolderNames: [String] = {
|
||||
if let folders = account.folders {
|
||||
return folders.map { $0.name ?? "" }
|
||||
} else {
|
||||
return [String]()
|
||||
}
|
||||
}()
|
||||
|
||||
// Make any folders NewsBlur has, but we don't
|
||||
folderNames.forEach { folderName in
|
||||
if !accountFolderNames.contains(folderName) {
|
||||
_ = account.ensureFolder(with: folderName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func syncFeeds(_ account: Account, _ feeds: [NewsBlurFeed]?) {
|
||||
guard let feeds = feeds else { return }
|
||||
assert(Thread.isMainThread)
|
||||
|
||||
os_log(.debug, log: log, "Syncing feeds with %ld feeds.", feeds.count)
|
||||
|
||||
let subFeedIds = feeds.map { String($0.feedID) }
|
||||
|
||||
// Remove any feeds that are no longer in the subscriptions
|
||||
if let folders = account.folders {
|
||||
for folder in folders {
|
||||
for feed in folder.topLevelWebFeeds {
|
||||
if !subFeedIds.contains(feed.webFeedID) {
|
||||
folder.removeWebFeed(feed)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for feed in account.topLevelWebFeeds {
|
||||
if !subFeedIds.contains(feed.webFeedID) {
|
||||
account.removeWebFeed(feed)
|
||||
}
|
||||
}
|
||||
|
||||
// Add any feeds we don't have and update any we do
|
||||
var feedsToAdd = Set<NewsBlurFeed>()
|
||||
feeds.forEach { feed in
|
||||
let subFeedId = String(feed.feedID)
|
||||
|
||||
if let webFeed = account.existingWebFeed(withWebFeedID: subFeedId) {
|
||||
webFeed.name = feed.name
|
||||
// If the name has been changed on the server remove the locally edited name
|
||||
webFeed.editedName = nil
|
||||
webFeed.homePageURL = feed.homepageURL
|
||||
webFeed.subscriptionID = String(feed.feedID)
|
||||
webFeed.faviconURL = feed.faviconURL
|
||||
}
|
||||
else {
|
||||
feedsToAdd.insert(feed)
|
||||
}
|
||||
}
|
||||
|
||||
// Actually add feeds all in one go, so we don’t trigger various rebuilding things that Account does.
|
||||
feedsToAdd.forEach { feed in
|
||||
let webFeed = account.createWebFeed(with: feed.name, url: feed.feedURL, webFeedID: String(feed.feedID), homePageURL: feed.homepageURL)
|
||||
webFeed.subscriptionID = String(feed.feedID)
|
||||
account.addWebFeed(webFeed)
|
||||
}
|
||||
}
|
||||
|
||||
func syncFeedFolderRelationship(_ account: Account, _ folders: [NewsBlurFolder]?) {
|
||||
guard let folders = folders else { return }
|
||||
assert(Thread.isMainThread)
|
||||
|
||||
os_log(.debug, log: log, "Syncing folders with %ld folders.", folders.count)
|
||||
|
||||
// Set up some structures to make syncing easier
|
||||
let relationships = folders.map({ $0.asRelationships }).flatMap { $0 }
|
||||
let folderDict = nameToFolderDictionary(with: account.folders)
|
||||
let foldersDict = relationships.reduce([String: [NewsBlurFolderRelationship]]()) { (dict, relationship) in
|
||||
var feedInFolders = dict
|
||||
if var feedInFolder = feedInFolders[relationship.folderName] {
|
||||
feedInFolder.append(relationship)
|
||||
feedInFolders[relationship.folderName] = feedInFolder
|
||||
} else {
|
||||
feedInFolders[relationship.folderName] = [relationship]
|
||||
}
|
||||
return feedInFolders
|
||||
}
|
||||
|
||||
// Sync the folders
|
||||
for (folderName, folderRelationships) in foldersDict {
|
||||
guard let folder = folderDict[folderName] else { return }
|
||||
|
||||
let folderFeedIDs = folderRelationships.map { String($0.feedID) }
|
||||
|
||||
// Move any feeds not in the folder to the account
|
||||
for feed in folder.topLevelWebFeeds {
|
||||
if !folderFeedIDs.contains(feed.webFeedID) {
|
||||
folder.removeWebFeed(feed)
|
||||
clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
||||
account.addWebFeed(feed)
|
||||
}
|
||||
}
|
||||
|
||||
// Add any feeds not in the folder
|
||||
let folderFeedIds = folder.topLevelWebFeeds.map { $0.webFeedID }
|
||||
|
||||
for relationship in folderRelationships {
|
||||
let folderFeedID = String(relationship.feedID)
|
||||
if !folderFeedIds.contains(folderFeedID) {
|
||||
guard let feed = account.existingWebFeed(withWebFeedID: folderFeedID) else {
|
||||
continue
|
||||
}
|
||||
saveFolderRelationship(for: feed, withFolderName: folderName, id: relationship.folderName)
|
||||
folder.addWebFeed(feed)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
let folderFeedIDs = Set(relationships.map { String($0.feedID) })
|
||||
|
||||
// Remove all feeds from the account container that have a tag
|
||||
for feed in account.topLevelWebFeeds {
|
||||
if folderFeedIDs.contains(feed.webFeedID) {
|
||||
account.removeWebFeed(feed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func clearFolderRelationship(for feed: WebFeed, withFolderName folderName: String) {
|
||||
if var folderRelationship = feed.folderRelationship {
|
||||
folderRelationship[folderName] = nil
|
||||
feed.folderRelationship = folderRelationship
|
||||
}
|
||||
}
|
||||
|
||||
func saveFolderRelationship(for feed: WebFeed, withFolderName folderName: String, id: String) {
|
||||
if var folderRelationship = feed.folderRelationship {
|
||||
folderRelationship[folderName] = id
|
||||
feed.folderRelationship = folderRelationship
|
||||
} else {
|
||||
feed.folderRelationship = [folderName: id]
|
||||
}
|
||||
}
|
||||
|
||||
func nameToFolderDictionary(with folders: Set<Folder>?) -> [String: Folder] {
|
||||
guard let folders = folders else {
|
||||
return [String: Folder]()
|
||||
}
|
||||
|
||||
var d = [String: Folder]()
|
||||
for folder in folders {
|
||||
let name = folder.name ?? ""
|
||||
if d[name] == nil {
|
||||
d[name] = folder
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
func refreshUnreadStories(for account: Account, hashes: [NewsBlurStoryHash]?, updateFetchDate: Date?, completion: @escaping (Result<Void, Error>) -> Void) {
|
||||
guard let hashes = hashes, !hashes.isEmpty else {
|
||||
if let lastArticleFetch = updateFetchDate {
|
||||
self.accountMetadata?.lastArticleFetchStartTime = lastArticleFetch
|
||||
self.accountMetadata?.lastArticleFetchEndTime = Date()
|
||||
}
|
||||
completion(.success(()))
|
||||
return
|
||||
}
|
||||
|
||||
let numberOfStories = min(hashes.count, 100) // api limit
|
||||
let hashesToFetch = Array(hashes[..<numberOfStories])
|
||||
|
||||
caller.retrieveStories(hashes: hashesToFetch) { result in
|
||||
switch result {
|
||||
case .success(let stories):
|
||||
self.processStories(account: account, stories: stories) { error in
|
||||
self.refreshProgress.completeTask()
|
||||
|
||||
if let error = error {
|
||||
completion(.failure(error))
|
||||
return
|
||||
}
|
||||
|
||||
self.refreshUnreadStories(for: account, hashes: Array(hashes[numberOfStories...]), updateFetchDate: updateFetchDate) { result in
|
||||
os_log(.debug, log: self.log, "Done refreshing stories.")
|
||||
switch result {
|
||||
case .success:
|
||||
completion(.success(()))
|
||||
case .failure(let error):
|
||||
completion(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
case .failure(let error):
|
||||
completion(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func mapStoriesToParsedItems(stories: [NewsBlurStory]?) -> Set<ParsedItem> {
|
||||
guard let stories = stories else { return Set<ParsedItem>() }
|
||||
|
||||
let parsedItems: [ParsedItem] = stories.map { story in
|
||||
let author = Set([ParsedAuthor(name: story.authorName, url: nil, avatarURL: nil, emailAddress: nil)])
|
||||
return ParsedItem(syncServiceID: story.storyID, uniqueID: String(story.storyID), feedURL: String(story.feedID), url: story.url, externalURL: nil, title: story.title, contentHTML: story.contentHTML, contentText: nil, summary: nil, imageURL: nil, bannerImageURL: nil, datePublished: story.datePublished, dateModified: nil, authors: author, tags: nil, attachments: nil)
|
||||
}
|
||||
|
||||
return Set(parsedItems)
|
||||
}
|
||||
|
||||
func sendStoryStatuses(_ statuses: [SyncStatus],
|
||||
throttle: Bool,
|
||||
apiCall: ([String], @escaping (Result<Void, Error>) -> Void) -> Void,
|
||||
completion: @escaping (Result<Void, Error>) -> Void) {
|
||||
guard !statuses.isEmpty else {
|
||||
completion(.success(()))
|
||||
return
|
||||
}
|
||||
|
||||
let group = DispatchGroup()
|
||||
var errorOccurred = false
|
||||
|
||||
let storyHashes = statuses.compactMap { $0.articleID }
|
||||
let storyHashGroups = storyHashes.chunked(into: throttle ? 1 : 5) // api limit
|
||||
for storyHashGroup in storyHashGroups {
|
||||
group.enter()
|
||||
apiCall(storyHashGroup) { result in
|
||||
switch result {
|
||||
case .success:
|
||||
self.database.deleteSelectedForProcessing(storyHashGroup.map { String($0) } )
|
||||
group.leave()
|
||||
case .failure(let error):
|
||||
errorOccurred = true
|
||||
os_log(.error, log: self.log, "Story status sync call failed: %@.", error.localizedDescription)
|
||||
self.database.resetSelectedForProcessing(storyHashGroup.map { String($0) } )
|
||||
group.leave()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
group.notify(queue: DispatchQueue.main) {
|
||||
if errorOccurred {
|
||||
completion(.failure(NewsBlurError.unknown))
|
||||
} else {
|
||||
completion(.success(()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func syncStoryReadState(account: Account, hashes: [NewsBlurStoryHash]?) {
|
||||
guard let hashes = hashes else { return }
|
||||
|
||||
database.selectPendingReadStatusArticleIDs() { result in
|
||||
func process(_ pendingStoryHashes: Set<String>) {
|
||||
|
||||
let newsBlurUnreadStoryHashes = Set(hashes.map { $0.hash } )
|
||||
let updatableNewsBlurUnreadStoryHashes = newsBlurUnreadStoryHashes.subtracting(pendingStoryHashes)
|
||||
|
||||
account.fetchUnreadArticleIDs { articleIDsResult in
|
||||
guard let currentUnreadArticleIDs = try? articleIDsResult.get() else {
|
||||
return
|
||||
}
|
||||
|
||||
// Mark articles as unread
|
||||
let deltaUnreadArticleIDs = updatableNewsBlurUnreadStoryHashes.subtracting(currentUnreadArticleIDs)
|
||||
account.markAsUnread(deltaUnreadArticleIDs)
|
||||
|
||||
// Mark articles as read
|
||||
let deltaReadArticleIDs = currentUnreadArticleIDs.subtracting(updatableNewsBlurUnreadStoryHashes)
|
||||
account.markAsRead(deltaReadArticleIDs)
|
||||
}
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let pendingArticleIDs):
|
||||
process(pendingArticleIDs)
|
||||
case .failure(let error):
|
||||
os_log(.error, log: self.log, "Sync Story Read Status failed: %@.", error.localizedDescription)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func syncStoryStarredState(account: Account, hashes: [NewsBlurStoryHash]?) {
|
||||
guard let hashes = hashes else { return }
|
||||
|
||||
database.selectPendingStarredStatusArticleIDs() { result in
|
||||
func process(_ pendingStoryHashes: Set<String>) {
|
||||
|
||||
let newsBlurStarredStoryHashes = Set(hashes.map { $0.hash } )
|
||||
let updatableNewsBlurUnreadStoryHashes = newsBlurStarredStoryHashes.subtracting(pendingStoryHashes)
|
||||
|
||||
account.fetchStarredArticleIDs { articleIDsResult in
|
||||
guard let currentStarredArticleIDs = try? articleIDsResult.get() else {
|
||||
return
|
||||
}
|
||||
|
||||
// Mark articles as starred
|
||||
let deltaStarredArticleIDs = updatableNewsBlurUnreadStoryHashes.subtracting(currentStarredArticleIDs)
|
||||
account.markAsStarred(deltaStarredArticleIDs)
|
||||
|
||||
// Mark articles as unstarred
|
||||
let deltaUnstarredArticleIDs = currentStarredArticleIDs.subtracting(updatableNewsBlurUnreadStoryHashes)
|
||||
account.markAsUnstarred(deltaUnstarredArticleIDs)
|
||||
}
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let pendingArticleIDs):
|
||||
process(pendingArticleIDs)
|
||||
case .failure(let error):
|
||||
os_log(.error, log: self.log, "Sync Story Starred Status failed: %@.", error.localizedDescription)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -29,9 +29,9 @@ final class NewsBlurAccountDelegate: AccountDelegate {
|
|||
var accountMetadata: AccountMetadata? = nil
|
||||
var refreshProgress = DownloadProgress(numberOfTasks: 0)
|
||||
|
||||
private let caller: NewsBlurAPICaller
|
||||
private let log = OSLog(subsystem: Bundle.main.bundleIdentifier!, category: "NewsBlur")
|
||||
private let database: SyncDatabase
|
||||
let caller: NewsBlurAPICaller
|
||||
let log = OSLog(subsystem: Bundle.main.bundleIdentifier!, category: "NewsBlur")
|
||||
let database: SyncDatabase
|
||||
|
||||
init(dataFolder: String, transport: Transport?) {
|
||||
if let transport = transport {
|
||||
|
@ -341,361 +341,3 @@ final class NewsBlurAccountDelegate: AccountDelegate {
|
|||
database.resume()
|
||||
}
|
||||
}
|
||||
|
||||
extension NewsBlurAccountDelegate {
|
||||
private func refreshFeeds(for account: Account, completion: @escaping (Result<Void, Error>) -> Void) {
|
||||
os_log(.debug, log: log, "Refreshing feeds...")
|
||||
|
||||
caller.retrieveFeeds { result in
|
||||
switch result {
|
||||
case .success((let feeds, let folders)):
|
||||
BatchUpdate.shared.perform {
|
||||
self.syncFolders(account, folders)
|
||||
self.syncFeeds(account, feeds)
|
||||
self.syncFeedFolderRelationship(account, folders)
|
||||
}
|
||||
|
||||
self.refreshProgress.completeTask()
|
||||
completion(.success(()))
|
||||
case .failure(let error):
|
||||
completion(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func syncFolders(_ account: Account, _ folders: [NewsBlurFolder]?) {
|
||||
guard let folders = folders else { return }
|
||||
assert(Thread.isMainThread)
|
||||
|
||||
os_log(.debug, log: log, "Syncing folders with %ld folders.", folders.count)
|
||||
|
||||
let folderNames = folders.map { $0.name }
|
||||
|
||||
// Delete any folders not at NewsBlur
|
||||
if let folders = account.folders {
|
||||
folders.forEach { folder in
|
||||
if !folderNames.contains(folder.name ?? "") {
|
||||
for feed in folder.topLevelWebFeeds {
|
||||
account.addWebFeed(feed)
|
||||
clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
||||
}
|
||||
account.removeFolder(folder)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let accountFolderNames: [String] = {
|
||||
if let folders = account.folders {
|
||||
return folders.map { $0.name ?? "" }
|
||||
} else {
|
||||
return [String]()
|
||||
}
|
||||
}()
|
||||
|
||||
// Make any folders NewsBlur has, but we don't
|
||||
folderNames.forEach { folderName in
|
||||
if !accountFolderNames.contains(folderName) {
|
||||
_ = account.ensureFolder(with: folderName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func syncFeeds(_ account: Account, _ feeds: [NewsBlurFeed]?) {
|
||||
guard let feeds = feeds else { return }
|
||||
assert(Thread.isMainThread)
|
||||
|
||||
os_log(.debug, log: log, "Syncing feeds with %ld feeds.", feeds.count)
|
||||
|
||||
let subFeedIds = feeds.map { String($0.feedID) }
|
||||
|
||||
// Remove any feeds that are no longer in the subscriptions
|
||||
if let folders = account.folders {
|
||||
for folder in folders {
|
||||
for feed in folder.topLevelWebFeeds {
|
||||
if !subFeedIds.contains(feed.webFeedID) {
|
||||
folder.removeWebFeed(feed)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for feed in account.topLevelWebFeeds {
|
||||
if !subFeedIds.contains(feed.webFeedID) {
|
||||
account.removeWebFeed(feed)
|
||||
}
|
||||
}
|
||||
|
||||
// Add any feeds we don't have and update any we do
|
||||
var feedsToAdd = Set<NewsBlurFeed>()
|
||||
feeds.forEach { feed in
|
||||
let subFeedId = String(feed.feedID)
|
||||
|
||||
if let webFeed = account.existingWebFeed(withWebFeedID: subFeedId) {
|
||||
webFeed.name = feed.name
|
||||
// If the name has been changed on the server remove the locally edited name
|
||||
webFeed.editedName = nil
|
||||
webFeed.homePageURL = feed.homepageURL
|
||||
webFeed.subscriptionID = String(feed.feedID)
|
||||
webFeed.faviconURL = feed.faviconURL
|
||||
}
|
||||
else {
|
||||
feedsToAdd.insert(feed)
|
||||
}
|
||||
}
|
||||
|
||||
// Actually add feeds all in one go, so we don’t trigger various rebuilding things that Account does.
|
||||
feedsToAdd.forEach { feed in
|
||||
let webFeed = account.createWebFeed(with: feed.name, url: feed.feedURL, webFeedID: String(feed.feedID), homePageURL: feed.homepageURL)
|
||||
webFeed.subscriptionID = String(feed.feedID)
|
||||
account.addWebFeed(webFeed)
|
||||
}
|
||||
}
|
||||
|
||||
private func syncFeedFolderRelationship(_ account: Account, _ folders: [NewsBlurFolder]?) {
|
||||
guard let folders = folders else { return }
|
||||
assert(Thread.isMainThread)
|
||||
|
||||
os_log(.debug, log: log, "Syncing folders with %ld folders.", folders.count)
|
||||
|
||||
// Set up some structures to make syncing easier
|
||||
let relationships = folders.map({ $0.asRelationships }).flatMap { $0 }
|
||||
let folderDict = nameToFolderDictionary(with: account.folders)
|
||||
let foldersDict = relationships.reduce([String: [NewsBlurFolderRelationship]]()) { (dict, relationship) in
|
||||
var feedInFolders = dict
|
||||
if var feedInFolder = feedInFolders[relationship.folderName] {
|
||||
feedInFolder.append(relationship)
|
||||
feedInFolders[relationship.folderName] = feedInFolder
|
||||
} else {
|
||||
feedInFolders[relationship.folderName] = [relationship]
|
||||
}
|
||||
return feedInFolders
|
||||
}
|
||||
|
||||
// Sync the folders
|
||||
for (folderName, folderRelationships) in foldersDict {
|
||||
guard let folder = folderDict[folderName] else { return }
|
||||
|
||||
let folderFeedIDs = folderRelationships.map { String($0.feedID) }
|
||||
|
||||
// Move any feeds not in the folder to the account
|
||||
for feed in folder.topLevelWebFeeds {
|
||||
if !folderFeedIDs.contains(feed.webFeedID) {
|
||||
folder.removeWebFeed(feed)
|
||||
clearFolderRelationship(for: feed, withFolderName: folder.name ?? "")
|
||||
account.addWebFeed(feed)
|
||||
}
|
||||
}
|
||||
|
||||
// Add any feeds not in the folder
|
||||
let folderFeedIds = folder.topLevelWebFeeds.map { $0.webFeedID }
|
||||
|
||||
for relationship in folderRelationships {
|
||||
let folderFeedID = String(relationship.feedID)
|
||||
if !folderFeedIds.contains(folderFeedID) {
|
||||
guard let feed = account.existingWebFeed(withWebFeedID: folderFeedID) else {
|
||||
continue
|
||||
}
|
||||
saveFolderRelationship(for: feed, withFolderName: folderName, id: relationship.folderName)
|
||||
folder.addWebFeed(feed)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
let folderFeedIDs = Set(relationships.map { String($0.feedID) })
|
||||
|
||||
// Remove all feeds from the account container that have a tag
|
||||
for feed in account.topLevelWebFeeds {
|
||||
if folderFeedIDs.contains(feed.webFeedID) {
|
||||
account.removeWebFeed(feed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func clearFolderRelationship(for feed: WebFeed, withFolderName folderName: String) {
|
||||
if var folderRelationship = feed.folderRelationship {
|
||||
folderRelationship[folderName] = nil
|
||||
feed.folderRelationship = folderRelationship
|
||||
}
|
||||
}
|
||||
|
||||
private func saveFolderRelationship(for feed: WebFeed, withFolderName folderName: String, id: String) {
|
||||
if var folderRelationship = feed.folderRelationship {
|
||||
folderRelationship[folderName] = id
|
||||
feed.folderRelationship = folderRelationship
|
||||
} else {
|
||||
feed.folderRelationship = [folderName: id]
|
||||
}
|
||||
}
|
||||
|
||||
private func nameToFolderDictionary(with folders: Set<Folder>?) -> [String: Folder] {
|
||||
guard let folders = folders else {
|
||||
return [String: Folder]()
|
||||
}
|
||||
|
||||
var d = [String: Folder]()
|
||||
for folder in folders {
|
||||
let name = folder.name ?? ""
|
||||
if d[name] == nil {
|
||||
d[name] = folder
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
private func refreshUnreadStories(for account: Account, hashes: [NewsBlurStoryHash]?, updateFetchDate: Date?, completion: @escaping (Result<Void, Error>) -> Void) {
|
||||
guard let hashes = hashes, !hashes.isEmpty else {
|
||||
if let lastArticleFetch = updateFetchDate {
|
||||
self.accountMetadata?.lastArticleFetchStartTime = lastArticleFetch
|
||||
self.accountMetadata?.lastArticleFetchEndTime = Date()
|
||||
}
|
||||
completion(.success(()))
|
||||
return
|
||||
}
|
||||
|
||||
let numberOfStories = min(hashes.count, 100) // api limit
|
||||
let hashesToFetch = Array(hashes[..<numberOfStories])
|
||||
|
||||
caller.retrieveStories(hashes: hashesToFetch) { result in
|
||||
switch result {
|
||||
case .success(let stories):
|
||||
self.processStories(account: account, stories: stories) { error in
|
||||
self.refreshProgress.completeTask()
|
||||
|
||||
if let error = error {
|
||||
completion(.failure(error))
|
||||
return
|
||||
}
|
||||
|
||||
self.refreshUnreadStories(for: account, hashes: Array(hashes[numberOfStories...]), updateFetchDate: updateFetchDate) { result in
|
||||
os_log(.debug, log: self.log, "Done refreshing stories.")
|
||||
switch result {
|
||||
case .success:
|
||||
completion(.success(()))
|
||||
case .failure(let error):
|
||||
completion(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
case .failure(let error):
|
||||
completion(.failure(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func mapStoriesToParsedItems(stories: [NewsBlurStory]?) -> Set<ParsedItem> {
|
||||
guard let stories = stories else { return Set<ParsedItem>() }
|
||||
|
||||
let parsedItems: [ParsedItem] = stories.map { story in
|
||||
let author = Set([ParsedAuthor(name: story.authorName, url: nil, avatarURL: nil, emailAddress: nil)])
|
||||
return ParsedItem(syncServiceID: story.storyID, uniqueID: String(story.storyID), feedURL: String(story.feedID), url: story.url, externalURL: nil, title: story.title, contentHTML: story.contentHTML, contentText: nil, summary: nil, imageURL: nil, bannerImageURL: nil, datePublished: story.datePublished, dateModified: nil, authors: author, tags: nil, attachments: nil)
|
||||
}
|
||||
|
||||
return Set(parsedItems)
|
||||
}
|
||||
|
||||
private func sendStoryStatuses(_ statuses: [SyncStatus],
|
||||
throttle: Bool,
|
||||
apiCall: ([String], @escaping (Result<Void, Error>) -> Void) -> Void,
|
||||
completion: @escaping (Result<Void, Error>) -> Void) {
|
||||
guard !statuses.isEmpty else {
|
||||
completion(.success(()))
|
||||
return
|
||||
}
|
||||
|
||||
let group = DispatchGroup()
|
||||
var errorOccurred = false
|
||||
|
||||
let storyHashes = statuses.compactMap { $0.articleID }
|
||||
let storyHashGroups = storyHashes.chunked(into: throttle ? 1 : 5) // api limit
|
||||
for storyHashGroup in storyHashGroups {
|
||||
group.enter()
|
||||
apiCall(storyHashGroup) { result in
|
||||
switch result {
|
||||
case .success:
|
||||
self.database.deleteSelectedForProcessing(storyHashGroup.map { String($0) } )
|
||||
group.leave()
|
||||
case .failure(let error):
|
||||
errorOccurred = true
|
||||
os_log(.error, log: self.log, "Story status sync call failed: %@.", error.localizedDescription)
|
||||
self.database.resetSelectedForProcessing(storyHashGroup.map { String($0) } )
|
||||
group.leave()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
group.notify(queue: DispatchQueue.main) {
|
||||
if errorOccurred {
|
||||
completion(.failure(NewsBlurError.unknown))
|
||||
} else {
|
||||
completion(.success(()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func syncStoryReadState(account: Account, hashes: [NewsBlurStoryHash]?) {
|
||||
guard let hashes = hashes else { return }
|
||||
|
||||
database.selectPendingReadStatusArticleIDs() { result in
|
||||
func process(_ pendingStoryHashes: Set<String>) {
|
||||
|
||||
let newsBlurUnreadStoryHashes = Set(hashes.map { $0.hash } )
|
||||
let updatableNewsBlurUnreadStoryHashes = newsBlurUnreadStoryHashes.subtracting(pendingStoryHashes)
|
||||
|
||||
account.fetchUnreadArticleIDs { articleIDsResult in
|
||||
guard let currentUnreadArticleIDs = try? articleIDsResult.get() else {
|
||||
return
|
||||
}
|
||||
|
||||
// Mark articles as unread
|
||||
let deltaUnreadArticleIDs = updatableNewsBlurUnreadStoryHashes.subtracting(currentUnreadArticleIDs)
|
||||
account.markAsUnread(deltaUnreadArticleIDs)
|
||||
|
||||
// Mark articles as read
|
||||
let deltaReadArticleIDs = currentUnreadArticleIDs.subtracting(updatableNewsBlurUnreadStoryHashes)
|
||||
account.markAsRead(deltaReadArticleIDs)
|
||||
}
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let pendingArticleIDs):
|
||||
process(pendingArticleIDs)
|
||||
case .failure(let error):
|
||||
os_log(.error, log: self.log, "Sync Story Read Status failed: %@.", error.localizedDescription)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func syncStoryStarredState(account: Account, hashes: [NewsBlurStoryHash]?) {
|
||||
guard let hashes = hashes else { return }
|
||||
|
||||
database.selectPendingStarredStatusArticleIDs() { result in
|
||||
func process(_ pendingStoryHashes: Set<String>) {
|
||||
|
||||
let newsBlurStarredStoryHashes = Set(hashes.map { $0.hash } )
|
||||
let updatableNewsBlurUnreadStoryHashes = newsBlurStarredStoryHashes.subtracting(pendingStoryHashes)
|
||||
|
||||
account.fetchStarredArticleIDs { articleIDsResult in
|
||||
guard let currentStarredArticleIDs = try? articleIDsResult.get() else {
|
||||
return
|
||||
}
|
||||
|
||||
// Mark articles as starred
|
||||
let deltaStarredArticleIDs = updatableNewsBlurUnreadStoryHashes.subtracting(currentStarredArticleIDs)
|
||||
account.markAsStarred(deltaStarredArticleIDs)
|
||||
|
||||
// Mark articles as unstarred
|
||||
let deltaUnstarredArticleIDs = currentStarredArticleIDs.subtracting(updatableNewsBlurUnreadStoryHashes)
|
||||
account.markAsUnstarred(deltaUnstarredArticleIDs)
|
||||
}
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let pendingArticleIDs):
|
||||
process(pendingArticleIDs)
|
||||
case .failure(let error):
|
||||
os_log(.error, log: self.log, "Sync Story Starred Status failed: %@.", error.localizedDescription)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue