Display flat feed list

This commit is contained in:
Anh Do 2020-03-13 18:57:38 -04:00
parent f4a0c56a54
commit 8e99f8deea
No known key found for this signature in database
GPG Key ID: 451E3092F917B62D
3 changed files with 107 additions and 54 deletions

View File

@ -18,7 +18,7 @@ struct NewsBlurFeedsResponse: Decodable {
struct Feed: Hashable, Codable {
let title: String
let feedId: Int
let feedID: Int
let feedURL: String
let siteURL: String?
let favicon: String?
@ -26,7 +26,7 @@ struct NewsBlurFeedsResponse: Decodable {
struct Folder: Hashable, Codable {
let name: String
let feedIds: [Int]
let feedIDs: [Int]
}
}
@ -53,7 +53,7 @@ extension NewsBlurFeedsResponse {
let folderContainer = try container.nestedContainer(keyedBy: NewsBlurGenericCodingKeys.self, forKey: .folders)
try folderContainer.allKeys.forEach { key in
let subscriptionIds = try folderContainer.decode([Int].self, forKey: key)
let folder = Folder(name: key.stringValue, feedIds: subscriptionIds)
let folder = Folder(name: key.stringValue, feedIDs: subscriptionIds)
folders.append(folder)
}
@ -66,7 +66,7 @@ extension NewsBlurFeedsResponse {
extension NewsBlurFeedsResponse.Feed {
private enum CodingKeys: String, CodingKey {
case title = "feed_title"
case feedId = "id"
case feedID = "id"
case feedURL = "feed_address"
case siteURL = "feed_link"
case favicon = "favicon_url"

View File

@ -16,8 +16,8 @@ struct NewsBlurStoriesResponse: Decodable {
let stories: [Story]
struct Story: Decodable {
let storyId: String
let feedId: Int
let storyID: String
let feedID: Int
let title: String?
let url: String?
let authorName: String?
@ -34,8 +34,8 @@ extension NewsBlurStoriesResponse {
extension NewsBlurStoriesResponse.Story {
private enum CodingKeys: String, CodingKey {
case storyId = "story_hash"
case feedId = "story_feed_id"
case storyID = "story_hash"
case feedID = "story_feed_id"
case title = "story_title"
case url = "story_permalink"
case authorName = "story_authors"

View File

@ -139,38 +139,6 @@ final class NewsBlurAccountDelegate: AccountDelegate {
}
}
func refreshUnreadStories(for account: Account, hashes: [NewsBlurStoryHash]?, updateFetchDate: Date?, completion: @escaping (Result<Void, Error>) -> Void) {
guard let hashes = hashes, !hashes.isEmpty else {
if let lastArticleFetch = updateFetchDate {
self.accountMetadata?.lastArticleFetchStartTime = lastArticleFetch
self.accountMetadata?.lastArticleFetchEndTime = Date()
}
completion(.success(()))
return
}
let numberOfStories = min(hashes.count, 100) // api limit
let hashesToFetch = Array(hashes[..<numberOfStories])
caller.retrieveStories(hashes: hashesToFetch) { result in
switch result {
case .success(let stories):
self.processStories(account: account, stories: stories) { error in
self.refreshProgress.completeTask()
if let error = error {
completion(.failure(error))
return
}
self.refreshUnreadStories(for: account, hashes: Array(hashes[numberOfStories...]), updateFetchDate: updateFetchDate, completion: completion)
}
case .failure(let error):
completion(.failure(error))
}
}
}
func refreshMissingStories(for account: Account, completion: @escaping (Result<Void, Error>)-> Void) {
completion(.success(()))
}
@ -181,19 +149,6 @@ final class NewsBlurAccountDelegate: AccountDelegate {
account.update(webFeedIDsAndItems: webFeedIDsAndItems, defaultRead: true, completion: completion)
}
func mapStoriesToParsedItems(stories: [NewsBlurStory]?) -> Set<ParsedItem> {
guard let stories = stories else {
return Set<ParsedItem>()
}
let parsedItems: [ParsedItem] = stories.map { story in
let author = Set([ParsedAuthor(name: story.authorName, url: nil, avatarURL: nil, emailAddress: nil)])
return ParsedItem(syncServiceID: story.storyId, uniqueID: String(story.storyId), feedURL: String(story.feedId), url: story.url, externalURL: nil, title: story.title, contentHTML: story.contentHTML, contentText: nil, summary: nil, imageURL: nil, bannerImageURL: nil, datePublished: story.datePublished, dateModified: nil, authors: author, tags: nil, attachments: nil)
}
return Set(parsedItems)
}
func importOPML(for account: Account, opmlFile: URL, completion: @escaping (Result<Void, Error>) -> ()) {
completion(.success(()))
}
@ -277,11 +232,109 @@ extension NewsBlurAccountDelegate {
caller.retrieveFeeds { result in
switch result {
case .success(let feeds):
print(feeds)
self.refreshProgress.completeTask()
self.syncFeeds(account, feeds)
completion(.success(()))
case .failure(let error):
completion(.failure(error))
}
}
}
private func syncFeeds(_ account: Account, _ feeds: [NewsBlurFeed]?) {
guard let feeds = feeds else { return }
os_log(.debug, log: log, "Syncing feeds with %ld feeds.", feeds.count)
let subFeedIds = feeds.map { String($0.feedID) }
// Remove any feeds that are no longer in the subscriptions
if let folders = account.folders {
for folder in folders {
for feed in folder.topLevelWebFeeds {
if !subFeedIds.contains(feed.webFeedID) {
folder.removeWebFeed(feed)
}
}
}
}
for feed in account.topLevelWebFeeds {
if !subFeedIds.contains(feed.webFeedID) {
account.removeWebFeed(feed)
}
}
// Add any feeds we don't have and update any we do
var feedsToAdd = Set<NewsBlurFeed>()
feeds.forEach { feed in
let subFeedId = String(feed.feedID)
if let webFeed = account.existingWebFeed(withWebFeedID: subFeedId) {
webFeed.name = feed.title
// If the name has been changed on the server remove the locally edited name
webFeed.editedName = nil
webFeed.homePageURL = feed.siteURL
webFeed.subscriptionID = String(feed.feedID)
webFeed.faviconURL = feed.favicon
webFeed.iconURL = feed.favicon
}
else {
feedsToAdd.insert(feed)
}
}
// Actually add feeds all in one go, so we dont trigger various rebuilding things that Account does.
feedsToAdd.forEach { feed in
let webFeed = account.createWebFeed(with: feed.title, url: feed.feedURL, webFeedID: String(feed.feedID), homePageURL: feed.siteURL)
webFeed.subscriptionID = String(feed.feedID)
account.addWebFeed(webFeed)
}
}
private func refreshUnreadStories(for account: Account, hashes: [NewsBlurStoryHash]?, updateFetchDate: Date?, completion: @escaping (Result<Void, Error>) -> Void) {
guard let hashes = hashes, !hashes.isEmpty else {
if let lastArticleFetch = updateFetchDate {
self.accountMetadata?.lastArticleFetchStartTime = lastArticleFetch
self.accountMetadata?.lastArticleFetchEndTime = Date()
}
completion(.success(()))
return
}
let numberOfStories = min(hashes.count, 100) // api limit
let hashesToFetch = Array(hashes[..<numberOfStories])
caller.retrieveStories(hashes: hashesToFetch) { result in
switch result {
case .success(let stories):
self.processStories(account: account, stories: stories) { error in
self.refreshProgress.completeTask()
if let error = error {
completion(.failure(error))
return
}
self.refreshUnreadStories(for: account, hashes: Array(hashes[numberOfStories...]), updateFetchDate: updateFetchDate, completion: completion)
}
case .failure(let error):
completion(.failure(error))
}
}
}
private func mapStoriesToParsedItems(stories: [NewsBlurStory]?) -> Set<ParsedItem> {
guard let stories = stories else {
return Set<ParsedItem>()
}
let parsedItems: [ParsedItem] = stories.map { story in
let author = Set([ParsedAuthor(name: story.authorName, url: nil, avatarURL: nil, emailAddress: nil)])
return ParsedItem(syncServiceID: story.storyID, uniqueID: String(story.storyID), feedURL: String(story.feedID), url: story.url, externalURL: nil, title: story.title, contentHTML: story.contentHTML, contentText: nil, summary: nil, imageURL: nil, bannerImageURL: nil, datePublished: story.datePublished, dateModified: nil, authors: author, tags: nil, attachments: nil)
}
return Set(parsedItems)
}
}