Merge branch 'ios-candidate'
This commit is contained in:
commit
ad7dde8564
|
@ -31,10 +31,9 @@ public extension Notification.Name {
|
|||
static let AccountDidDownloadArticles = Notification.Name(rawValue: "AccountDidDownloadArticles")
|
||||
static let AccountStateDidChange = Notification.Name(rawValue: "AccountStateDidChange")
|
||||
static let StatusesDidChange = Notification.Name(rawValue: "StatusesDidChange")
|
||||
static let WebFeedMetadataDidChange = Notification.Name(rawValue: "WebFeedMetadataDidChange")
|
||||
}
|
||||
|
||||
public enum AccountType: Int {
|
||||
public enum AccountType: Int, Codable {
|
||||
// Raw values should not change since they’re stored on disk.
|
||||
case onMyMac = 1
|
||||
case feedly = 16
|
||||
|
@ -199,8 +198,6 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
typealias WebFeedMetadataDictionary = [String: WebFeedMetadata]
|
||||
var webFeedMetadata = WebFeedMetadataDictionary()
|
||||
|
||||
var startingUp = true
|
||||
|
||||
public var unreadCount = 0 {
|
||||
didSet {
|
||||
if unreadCount != oldValue {
|
||||
|
@ -230,7 +227,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
var refreshProgress: DownloadProgress {
|
||||
return delegate.refreshProgress
|
||||
}
|
||||
|
||||
|
||||
init?(dataFolder: String, type: AccountType, accountID: String, transport: Transport? = nil) {
|
||||
switch type {
|
||||
case .onMyMac:
|
||||
|
@ -287,7 +284,6 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
}
|
||||
|
||||
self.delegate.accountDidInitialize(self)
|
||||
startingUp = false
|
||||
}
|
||||
|
||||
// MARK: - API
|
||||
|
@ -414,11 +410,8 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
}
|
||||
|
||||
public func suspendDatabase() {
|
||||
database.suspend()
|
||||
database.cancelAndSuspend()
|
||||
save()
|
||||
metadataFile.suspend()
|
||||
webFeedMetadataFile.suspend()
|
||||
opmlFile.suspend()
|
||||
}
|
||||
|
||||
/// Re-open the SQLite database and allow database calls.
|
||||
|
@ -430,12 +423,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
|
||||
/// Reload OPML, etc.
|
||||
public func resume() {
|
||||
metadataFile.resume()
|
||||
webFeedMetadataFile.resume()
|
||||
opmlFile.resume()
|
||||
metadataFile.load()
|
||||
webFeedMetadataFile.load()
|
||||
opmlFile.load()
|
||||
fetchAllUnreadCounts()
|
||||
}
|
||||
|
||||
public func save() {
|
||||
|
@ -447,7 +435,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
public func prepareForDeletion() {
|
||||
delegate.accountWillBeDeleted(self)
|
||||
}
|
||||
|
||||
|
||||
func loadOPMLItems(_ items: [RSOPMLItem], parentFolder: Folder?) {
|
||||
var feedsToAdd = Set<WebFeed>()
|
||||
|
||||
|
@ -487,14 +475,6 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
|
||||
}
|
||||
|
||||
public func resetWebFeedMetadataAndUnreadCounts() {
|
||||
for feed in flattenedWebFeeds() {
|
||||
feed.metadata = webFeedMetadata(feedURL: feed.url, webFeedID: feed.webFeedID)
|
||||
}
|
||||
fetchAllUnreadCounts()
|
||||
NotificationCenter.default.post(name: .WebFeedMetadataDidChange, object: self, userInfo: nil)
|
||||
}
|
||||
|
||||
public func markArticles(_ articles: Set<Article>, statusKey: ArticleStatus.Key, flag: Bool) -> Set<Article>? {
|
||||
return delegate.markArticles(for: self, articles: articles, statusKey: statusKey, flag: flag)
|
||||
}
|
||||
|
@ -605,22 +585,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
}
|
||||
|
||||
public func updateUnreadCounts(for webFeeds: Set<WebFeed>, completion: VoidCompletionBlock? = nil) {
|
||||
if webFeeds.isEmpty {
|
||||
completion?()
|
||||
return
|
||||
}
|
||||
|
||||
database.fetchUnreadCounts(for: webFeeds.webFeedIDs()) { unreadCountDictionaryResult in
|
||||
if let unreadCountDictionary = try? unreadCountDictionaryResult.get() {
|
||||
for webFeed in webFeeds {
|
||||
if let unreadCount = unreadCountDictionary[webFeed.webFeedID] {
|
||||
webFeed.unreadCount = unreadCount
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
completion?()
|
||||
}
|
||||
fetchUnreadCounts(for: webFeeds, completion: completion)
|
||||
}
|
||||
|
||||
public func fetchArticles(_ fetchType: FetchType) throws -> Set<Article> {
|
||||
|
@ -689,7 +654,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
database.fetchStarredArticleIDsAsync(webFeedIDs: flattenedWebFeeds().webFeedIDs(), completion: completion)
|
||||
}
|
||||
|
||||
/// Fetch articleIDs for articles that we should have, but don’t. These articles are not userDeleted, and they are either (starred) or (unread and newer than the article cutoff date).
|
||||
/// Fetch articleIDs for articles that we should have, but don’t. These articles are not userDeleted, and they are either (starred) or (newer than the article cutoff date).
|
||||
public func fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate(_ completion: @escaping ArticleIDsCompletionBlock) {
|
||||
database.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate(completion)
|
||||
}
|
||||
|
@ -705,9 +670,7 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
public func structureDidChange() {
|
||||
// Feeds were added or deleted. Or folders added or deleted.
|
||||
// Or feeds inside folders were added or deleted.
|
||||
if !startingUp {
|
||||
opmlFile.markAsDirty()
|
||||
}
|
||||
opmlFile.markAsDirty()
|
||||
flattenedWebFeedsNeedUpdate = true
|
||||
webFeedDictionaryNeedsUpdate = true
|
||||
}
|
||||
|
@ -791,6 +754,24 @@ public final class Account: DisplayNameProvider, UnreadCountProvider, Container,
|
|||
return updatedArticles
|
||||
}
|
||||
|
||||
/// Make sure statuses exist. Any existing statuses won’t be touched.
|
||||
/// All created statuses will be marked as read and not starred.
|
||||
/// Sends a .StatusesDidChange notification.
|
||||
func createStatusesIfNeeded(articleIDs: Set<String>, completion: DatabaseCompletionBlock? = nil) {
|
||||
guard !articleIDs.isEmpty else {
|
||||
completion?(nil)
|
||||
return
|
||||
}
|
||||
database.createStatusesIfNeeded(articleIDs: articleIDs) { error in
|
||||
if let error = error {
|
||||
completion?(error)
|
||||
return
|
||||
}
|
||||
self.noteStatusesForArticleIDsDidChange(articleIDs)
|
||||
completion?(nil)
|
||||
}
|
||||
}
|
||||
|
||||
/// Mark articleIDs statuses based on statusKey and flag.
|
||||
/// Will create statuses in the database and in memory as needed. Sends a .StatusesDidChange notification.
|
||||
func mark(articleIDs: Set<String>, statusKey: ArticleStatus.Key, flag: Bool, completion: DatabaseCompletionBlock? = nil) {
|
||||
|
@ -1206,26 +1187,69 @@ private extension Account {
|
|||
NotificationCenter.default.post(name: .StatusesDidChange, object: self, userInfo: [UserInfoKey.articleIDs: articleIDs])
|
||||
}
|
||||
|
||||
func fetchAllUnreadCounts() {
|
||||
/// Fetch unread counts for zero or more feeds.
|
||||
///
|
||||
/// Uses the most efficient method based on how many feeds were passed in.
|
||||
func fetchUnreadCounts(for feeds: Set<WebFeed>, completion: VoidCompletionBlock?) {
|
||||
if feeds.isEmpty {
|
||||
completion?()
|
||||
return
|
||||
}
|
||||
if feeds.count == 1, let feed = feeds.first {
|
||||
fetchUnreadCount(feed, completion)
|
||||
}
|
||||
else if feeds.count < 10 {
|
||||
fetchUnreadCounts(feeds, completion)
|
||||
}
|
||||
else {
|
||||
fetchAllUnreadCounts(completion)
|
||||
}
|
||||
}
|
||||
|
||||
func fetchUnreadCount(_ feed: WebFeed, _ completion: VoidCompletionBlock?) {
|
||||
database.fetchUnreadCount(feed.webFeedID) { result in
|
||||
if let unreadCount = try? result.get() {
|
||||
feed.unreadCount = unreadCount
|
||||
}
|
||||
completion?()
|
||||
}
|
||||
}
|
||||
|
||||
func fetchUnreadCounts(_ feeds: Set<WebFeed>, _ completion: VoidCompletionBlock?) {
|
||||
let webFeedIDs = Set(feeds.map { $0.webFeedID })
|
||||
database.fetchUnreadCounts(for: webFeedIDs) { result in
|
||||
if let unreadCountDictionary = try? result.get() {
|
||||
self.processUnreadCounts(unreadCountDictionary: unreadCountDictionary, feeds: feeds)
|
||||
}
|
||||
completion?()
|
||||
}
|
||||
}
|
||||
|
||||
func fetchAllUnreadCounts(_ completion: VoidCompletionBlock? = nil) {
|
||||
fetchingAllUnreadCounts = true
|
||||
database.fetchAllUnreadCounts { result in
|
||||
guard let unreadCountDictionary = try? result.get() else {
|
||||
completion?()
|
||||
return
|
||||
}
|
||||
self.processUnreadCounts(unreadCountDictionary: unreadCountDictionary, feeds: self.flattenedWebFeeds())
|
||||
|
||||
database.fetchAllNonZeroUnreadCounts { (unreadCountDictionaryResult) in
|
||||
if let unreadCountDictionary = try? unreadCountDictionaryResult.get() {
|
||||
self.flattenedWebFeeds().forEach{ (feed) in
|
||||
// When the unread count is zero, it won’t appear in unreadCountDictionary.
|
||||
if let unreadCount = unreadCountDictionary[feed.webFeedID] {
|
||||
feed.unreadCount = unreadCount
|
||||
}
|
||||
else {
|
||||
feed.unreadCount = 0
|
||||
}
|
||||
}
|
||||
self.fetchingAllUnreadCounts = false
|
||||
self.updateUnreadCount()
|
||||
|
||||
self.fetchingAllUnreadCounts = false
|
||||
self.updateUnreadCount()
|
||||
if !self.isUnreadCountsInitialized {
|
||||
self.isUnreadCountsInitialized = true
|
||||
self.postUnreadCountDidInitializeNotification()
|
||||
}
|
||||
completion?()
|
||||
}
|
||||
}
|
||||
|
||||
func processUnreadCounts(unreadCountDictionary: UnreadCountDictionary, feeds: Set<WebFeed>) {
|
||||
for feed in feeds {
|
||||
// When the unread count is zero, it won’t appear in unreadCountDictionary.
|
||||
let unreadCount = unreadCountDictionary[feed.webFeedID] ?? 0
|
||||
feed.unreadCount = unreadCount
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1243,13 +1267,13 @@ extension Account {
|
|||
|
||||
extension Account: OPMLRepresentable {
|
||||
|
||||
public func OPMLString(indentLevel: Int, strictConformance: Bool) -> String {
|
||||
public func OPMLString(indentLevel: Int, allowCustomAttributes: Bool) -> String {
|
||||
var s = ""
|
||||
for feed in topLevelWebFeeds.sorted(by: { $0.nameForDisplay < $1.nameForDisplay }) {
|
||||
s += feed.OPMLString(indentLevel: indentLevel + 1, strictConformance: strictConformance)
|
||||
for feed in topLevelWebFeeds.sorted() {
|
||||
s += feed.OPMLString(indentLevel: indentLevel + 1, allowCustomAttributes: allowCustomAttributes)
|
||||
}
|
||||
for folder in folders!.sorted(by: { $0.nameForDisplay < $1.nameForDisplay }) {
|
||||
s += folder.OPMLString(indentLevel: indentLevel + 1, strictConformance: strictConformance)
|
||||
for folder in folders!.sorted() {
|
||||
s += folder.OPMLString(indentLevel: indentLevel + 1, allowCustomAttributes: allowCustomAttributes)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
3B826DAE2385C81C00FC1ADB /* FeedWranglerSubscriptionsRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3B826DA52385C81C00FC1ADB /* FeedWranglerSubscriptionsRequest.swift */; };
|
||||
3B826DAF2385C81C00FC1ADB /* FeedWranglerGenericResult.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3B826DA62385C81C00FC1ADB /* FeedWranglerGenericResult.swift */; };
|
||||
3BC23AB92385ECB100371CBA /* FeedWranglerSubscriptionResult.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3BC23AB82385ECB100371CBA /* FeedWranglerSubscriptionResult.swift */; };
|
||||
5107A099227DE42E00C7C3C5 /* AccountCredentialsTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5107A098227DE42E00C7C3C5 /* AccountCredentialsTest.swift */; };
|
||||
5107A09B227DE49500C7C3C5 /* TestAccountManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5107A09A227DE49500C7C3C5 /* TestAccountManager.swift */; };
|
||||
5107A09D227DE77700C7C3C5 /* TestTransport.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5107A09C227DE77700C7C3C5 /* TestTransport.swift */; };
|
||||
510BD111232C3801002692E4 /* AccountMetadataFile.swift in Sources */ = {isa = PBXBuildFile; fileRef = 510BD110232C3801002692E4 /* AccountMetadataFile.swift */; };
|
||||
|
@ -88,10 +87,8 @@
|
|||
84EAC4822148CC6300F154AB /* RSDatabase.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 84EAC4812148CC6300F154AB /* RSDatabase.framework */; };
|
||||
84F1F06E2243524700DA0616 /* AccountMetadata.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84AF4EA3222CFDD100F6A800 /* AccountMetadata.swift */; };
|
||||
84F73CF1202788D90000BCEF /* ArticleFetcher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84F73CF0202788D80000BCEF /* ArticleFetcher.swift */; };
|
||||
9E0260CB236FF99A00D122D3 /* FeedlyRefreshAccessTokenOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E0260CA236FF99A00D122D3 /* FeedlyRefreshAccessTokenOperationTests.swift */; };
|
||||
9E03C11C235D921400FB6D9E /* FeedlyOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E03C11B235D921400FB6D9E /* FeedlyOperationTests.swift */; };
|
||||
9E03C11E235D976500FB6D9E /* FeedlyGetCollectionsOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E03C11D235D976500FB6D9E /* FeedlyGetCollectionsOperationTests.swift */; };
|
||||
9E03C120235E62A500FB6D9E /* FeedlyMirrorCollectionsAsFoldersOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E03C11F235E62A500FB6D9E /* FeedlyMirrorCollectionsAsFoldersOperationTests.swift */; };
|
||||
9E03C122235E62E100FB6D9E /* FeedlyTestSupport.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E03C121235E62E100FB6D9E /* FeedlyTestSupport.swift */; };
|
||||
9E12B0202334696A00ADE5A0 /* FeedlyCreateFeedsForCollectionFoldersOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E12B01F2334696A00ADE5A0 /* FeedlyCreateFeedsForCollectionFoldersOperation.swift */; };
|
||||
9E1773D32345700F0056A5A8 /* FeedlyLink.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1773D22345700E0056A5A8 /* FeedlyLink.swift */; };
|
||||
|
@ -99,7 +96,6 @@
|
|||
9E1773D7234575AB0056A5A8 /* FeedlyTag.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1773D6234575AB0056A5A8 /* FeedlyTag.swift */; };
|
||||
9E1773D923458D590056A5A8 /* FeedlyResourceId.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1773D823458D590056A5A8 /* FeedlyResourceId.swift */; };
|
||||
9E1773DB234593CF0056A5A8 /* FeedlyResourceIdTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1773DA234593CF0056A5A8 /* FeedlyResourceIdTests.swift */; };
|
||||
9E1AF38B2353D41A008BD1D5 /* FeedlySetStarredArticlesOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1AF38A2353D41A008BD1D5 /* FeedlySetStarredArticlesOperation.swift */; };
|
||||
9E1D154D233370D800F4944C /* FeedlySyncAllOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1D154C233370D800F4944C /* FeedlySyncAllOperation.swift */; };
|
||||
9E1D154F233371DD00F4944C /* FeedlyGetCollectionsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1D154E233371DD00F4944C /* FeedlyGetCollectionsOperation.swift */; };
|
||||
9E1D15512334282100F4944C /* FeedlyMirrorCollectionsAsFoldersOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1D15502334282100F4944C /* FeedlyMirrorCollectionsAsFoldersOperation.swift */; };
|
||||
|
@ -110,28 +106,22 @@
|
|||
9E1D155D233447F000F4944C /* FeedlyUpdateAccountFeedsWithItemsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1D155C233447F000F4944C /* FeedlyUpdateAccountFeedsWithItemsOperation.swift */; };
|
||||
9E1FF8602368216B00834C24 /* TestGetStreamIdsService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1FF85F2368216B00834C24 /* TestGetStreamIdsService.swift */; };
|
||||
9E1FF8622368219B00834C24 /* TestGetPagedStreamIdsService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1FF8612368219B00834C24 /* TestGetPagedStreamIdsService.swift */; };
|
||||
9E1FF8642368EC2400834C24 /* FeedlySyncAllOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1FF8632368EC2400834C24 /* FeedlySyncAllOperationTests.swift */; };
|
||||
9E1FF8662368ED7E00834C24 /* TestMarkArticlesService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1FF8652368ED7E00834C24 /* TestMarkArticlesService.swift */; };
|
||||
9E1FF8682368EE4900834C24 /* TestGetCollectionsService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E1FF8672368EE4900834C24 /* TestGetCollectionsService.swift */; };
|
||||
9E3CFFFD2368202000BA7365 /* FeedlySyncUnreadStatusesOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E3CFFFC2368202000BA7365 /* FeedlySyncUnreadStatusesOperationTests.swift */; };
|
||||
9E4828F223617F4A00D68691 /* FeedlySetUnreadArticlesOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E4828F123617F4A00D68691 /* FeedlySetUnreadArticlesOperationTests.swift */; };
|
||||
9E489E8D2360652C004372EE /* FeedlyGetStreamIdsOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E489E8C2360652C004372EE /* FeedlyGetStreamIdsOperationTests.swift */; };
|
||||
9E489E912360ED30004372EE /* FeedlyOrganiseParsedItemsByFeedOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E489E902360ED30004372EE /* FeedlyOrganiseParsedItemsByFeedOperationTests.swift */; };
|
||||
9E489E93236101FC004372EE /* FeedlyUpdateAccountFeedsWithItemsOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E489E92236101FC004372EE /* FeedlyUpdateAccountFeedsWithItemsOperationTests.swift */; };
|
||||
9E44C90F23C6FF3600CCC286 /* FeedlyIngestStreamArticleIdsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E44C90E23C6FF3600CCC286 /* FeedlyIngestStreamArticleIdsOperation.swift */; };
|
||||
9E5ABE9A236BE6BD00B5DE9F /* feedly-1-initial in Resources */ = {isa = PBXBuildFile; fileRef = 9E5ABE99236BE6BC00B5DE9F /* feedly-1-initial */; };
|
||||
9E5DE60E23C3F4B70064DA30 /* FeedlyFetchIdsForMissingArticlesOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E5DE60D23C3F4B70064DA30 /* FeedlyFetchIdsForMissingArticlesOperation.swift */; };
|
||||
9E5EC15923E01D8A00A4E503 /* FeedlyCollectionParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E5EC15823E01D8A00A4E503 /* FeedlyCollectionParser.swift */; };
|
||||
9E5EC15B23E01DEF00A4E503 /* FeedlyRTLTextSanitizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E5EC15A23E01DEF00A4E503 /* FeedlyRTLTextSanitizer.swift */; };
|
||||
9E5EC15D23E0D58500A4E503 /* FeedlyFeedParser.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E5EC15C23E0D58500A4E503 /* FeedlyFeedParser.swift */; };
|
||||
9E672394236F7CA0000BE141 /* FeedlyRefreshAccessTokenOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E672393236F7CA0000BE141 /* FeedlyRefreshAccessTokenOperation.swift */; };
|
||||
9E672396236F7E68000BE141 /* OAuthAcessTokenRefreshing.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E672395236F7E68000BE141 /* OAuthAcessTokenRefreshing.swift */; };
|
||||
9E713653233AD63E00765C84 /* FeedlySetUnreadArticlesOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E713652233AD63E00765C84 /* FeedlySetUnreadArticlesOperation.swift */; };
|
||||
9E7299D723505E9600DAEFB7 /* FeedlyAddFeedToCollectionOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E7299D623505E9600DAEFB7 /* FeedlyAddFeedToCollectionOperation.swift */; };
|
||||
9E7299D9235062A200DAEFB7 /* FeedlyResourceProviding.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E7299D8235062A200DAEFB7 /* FeedlyResourceProviding.swift */; };
|
||||
9E784EBE237E890600099B1B /* FeedlyLogoutOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E784EBD237E890600099B1B /* FeedlyLogoutOperation.swift */; };
|
||||
9E784EC0237E8BE100099B1B /* FeedlyLogoutOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E784EBF237E8BE100099B1B /* FeedlyLogoutOperationTests.swift */; };
|
||||
9E79F7742395C9F00031DB98 /* feedly-add-new-feed in Resources */ = {isa = PBXBuildFile; fileRef = 9E79F7732395C9EF0031DB98 /* feedly-add-new-feed */; };
|
||||
9E7F88AC235EDDC2009AB9DF /* FeedlyCreateFeedsForCollectionFoldersOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E7F88AB235EDDC2009AB9DF /* FeedlyCreateFeedsForCollectionFoldersOperationTests.swift */; };
|
||||
9E7F88AE235FBB11009AB9DF /* FeedlyGetStreamContentsOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E7F88AD235FBB11009AB9DF /* FeedlyGetStreamContentsOperationTests.swift */; };
|
||||
9E84DC472359A23200D6E809 /* FeedlySyncUnreadStatusesOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E84DC462359A23200D6E809 /* FeedlySyncUnreadStatusesOperation.swift */; };
|
||||
9E84DC472359A23200D6E809 /* FeedlyIngestUnreadArticleIdsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E84DC462359A23200D6E809 /* FeedlyIngestUnreadArticleIdsOperation.swift */; };
|
||||
9E84DC492359A73600D6E809 /* FeedlyCheckpointOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E84DC482359A73600D6E809 /* FeedlyCheckpointOperation.swift */; };
|
||||
9E85C8E42366FE0100D0F1F7 /* FeedlySyncStarredArticlesOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E85C8E32366FE0100D0F1F7 /* FeedlySyncStarredArticlesOperationTests.swift */; };
|
||||
9E85C8E62366FED600D0F1F7 /* TestGetStreamContentsService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E85C8E52366FED600D0F1F7 /* TestGetStreamContentsService.swift */; };
|
||||
9E85C8E82366FF4200D0F1F7 /* TestGetPagedStreamContentsService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E85C8E72366FF4200D0F1F7 /* TestGetPagedStreamContentsService.swift */; };
|
||||
9E85C8EB236700E600D0F1F7 /* FeedlyGetEntriesOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9E85C8E9236700AD00D0F1F7 /* FeedlyGetEntriesOperation.swift */; };
|
||||
|
@ -142,7 +132,7 @@
|
|||
9EA643CF2391D3560018A28C /* FeedlyAddExistingFeedOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EA643CE2391D3550018A28C /* FeedlyAddExistingFeedOperation.swift */; };
|
||||
9EA643D3239305680018A28C /* FeedlySearchOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EA643D2239305680018A28C /* FeedlySearchOperation.swift */; };
|
||||
9EA643D5239306AC0018A28C /* FeedlyFeedsSearchResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EA643D4239306AC0018A28C /* FeedlyFeedsSearchResponse.swift */; };
|
||||
9EA643D923945CE00018A28C /* FeedlyAddNewFeedOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EA643D823945CE00018A28C /* FeedlyAddNewFeedOperationTests.swift */; };
|
||||
9EAADA1023C93144003A801F /* TestGetEntriesService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EAADA0F23C93144003A801F /* TestGetEntriesService.swift */; };
|
||||
9EAEC60C2332FE830085D7C9 /* FeedlyCollection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EAEC60B2332FE830085D7C9 /* FeedlyCollection.swift */; };
|
||||
9EAEC60E2332FEC20085D7C9 /* FeedlyFeed.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EAEC60D2332FEC20085D7C9 /* FeedlyFeed.swift */; };
|
||||
9EAEC624233315F60085D7C9 /* FeedlyEntry.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EAEC623233315F60085D7C9 /* FeedlyEntry.swift */; };
|
||||
|
@ -150,10 +140,9 @@
|
|||
9EAEC62823331C350085D7C9 /* FeedlyCategory.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EAEC62723331C350085D7C9 /* FeedlyCategory.swift */; };
|
||||
9EAEC62A23331EE70085D7C9 /* FeedlyOrigin.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EAEC62923331EE70085D7C9 /* FeedlyOrigin.swift */; };
|
||||
9EB1D576238E6A3900A753D7 /* FeedlyAddNewFeedOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EB1D575238E6A3900A753D7 /* FeedlyAddNewFeedOperation.swift */; };
|
||||
9EC228552362C17F00766EF8 /* FeedlySetStarredArticlesOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EC228542362C17F00766EF8 /* FeedlySetStarredArticlesOperationTests.swift */; };
|
||||
9EBD49C023C67602005AD5CD /* FeedlyDownloadArticlesOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EBD49BF23C67602005AD5CD /* FeedlyDownloadArticlesOperation.swift */; };
|
||||
9EBD49C223C67784005AD5CD /* FeedlyEntryIdentifierProviding.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EBD49C123C67784005AD5CD /* FeedlyEntryIdentifierProviding.swift */; };
|
||||
9EC228572362C7F900766EF8 /* FeedlyCheckpointOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EC228562362C7F900766EF8 /* FeedlyCheckpointOperationTests.swift */; };
|
||||
9EC228592362D0EA00766EF8 /* FeedlySendArticleStatusesOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EC228582362D0EA00766EF8 /* FeedlySendArticleStatusesOperationTests.swift */; };
|
||||
9EC2285B23639A6500766EF8 /* FeedlySyncStreamContentsOperationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EC2285A23639A6500766EF8 /* FeedlySyncStreamContentsOperationTests.swift */; };
|
||||
9EC688EA232B973C00A8D0A2 /* FeedlyAPICaller.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EC688E9232B973C00A8D0A2 /* FeedlyAPICaller.swift */; };
|
||||
9EC688EC232C583300A8D0A2 /* FeedlyAccountDelegate+OAuth.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EC688EB232C583300A8D0A2 /* FeedlyAccountDelegate+OAuth.swift */; };
|
||||
9EC688EE232C58E800A8D0A2 /* OAuthAuthorizationCodeGranting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EC688ED232C58E800A8D0A2 /* OAuthAuthorizationCodeGranting.swift */; };
|
||||
|
@ -173,11 +162,15 @@
|
|||
9EEAE073235D01AE00E3FEE4 /* FeedlyGetStreamIdsService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EEAE072235D01AE00E3FEE4 /* FeedlyGetStreamIdsService.swift */; };
|
||||
9EEAE075235D01C400E3FEE4 /* FeedlyMarkArticlesService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EEAE074235D01C400E3FEE4 /* FeedlyMarkArticlesService.swift */; };
|
||||
9EEEF71F23545CB4009E9D80 /* FeedlySendArticleStatusesOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EEEF71E23545CB4009E9D80 /* FeedlySendArticleStatusesOperation.swift */; };
|
||||
9EEEF7212355277F009E9D80 /* FeedlySyncStarredArticlesOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EEEF7202355277F009E9D80 /* FeedlySyncStarredArticlesOperation.swift */; };
|
||||
9EEEF7212355277F009E9D80 /* FeedlyIngestStarredArticleIdsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EEEF7202355277F009E9D80 /* FeedlyIngestStarredArticleIdsOperation.swift */; };
|
||||
9EF1B10323584B4C000A486A /* FeedlySyncStreamContentsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF1B10223584B4C000A486A /* FeedlySyncStreamContentsOperation.swift */; };
|
||||
9EF1B10723590D61000A486A /* FeedlyGetStreamIdsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF1B10623590D61000A486A /* FeedlyGetStreamIdsOperation.swift */; };
|
||||
9EF1B10923590E93000A486A /* FeedlyStreamIds.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF1B10823590E93000A486A /* FeedlyStreamIds.swift */; };
|
||||
9EF35F7A234E830E003AE2AE /* FeedlyCompoundOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF35F79234E830E003AE2AE /* FeedlyCompoundOperation.swift */; };
|
||||
9EF2602C23C91FFE006D160C /* FeedlyGetUpdatedArticleIdsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF2602B23C91FFE006D160C /* FeedlyGetUpdatedArticleIdsOperation.swift */; };
|
||||
9EF58EB023E1606000992A2B /* FeedlyTextSanitizationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF58EAF23E1606000992A2B /* FeedlyTextSanitizationTests.swift */; };
|
||||
9EF58EB223E1647400992A2B /* FeedlyCollectionParserTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF58EB123E1647400992A2B /* FeedlyCollectionParserTests.swift */; };
|
||||
9EF58EB423E1655300992A2B /* FeedlyFeedParserTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF58EB323E1655300992A2B /* FeedlyFeedParserTests.swift */; };
|
||||
9EF58EB623E1669F00992A2B /* FeedlyEntryParserTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9EF58EB523E1669F00992A2B /* FeedlyEntryParserTests.swift */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXContainerItemProxy section */
|
||||
|
@ -322,7 +315,6 @@
|
|||
9E1773D6234575AB0056A5A8 /* FeedlyTag.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyTag.swift; sourceTree = "<group>"; };
|
||||
9E1773D823458D590056A5A8 /* FeedlyResourceId.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyResourceId.swift; sourceTree = "<group>"; };
|
||||
9E1773DA234593CF0056A5A8 /* FeedlyResourceIdTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyResourceIdTests.swift; sourceTree = "<group>"; };
|
||||
9E1AF38A2353D41A008BD1D5 /* FeedlySetStarredArticlesOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySetStarredArticlesOperation.swift; sourceTree = "<group>"; };
|
||||
9E1D154C233370D800F4944C /* FeedlySyncAllOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySyncAllOperation.swift; sourceTree = "<group>"; };
|
||||
9E1D154E233371DD00F4944C /* FeedlyGetCollectionsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyGetCollectionsOperation.swift; sourceTree = "<group>"; };
|
||||
9E1D15502334282100F4944C /* FeedlyMirrorCollectionsAsFoldersOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyMirrorCollectionsAsFoldersOperation.swift; sourceTree = "<group>"; };
|
||||
|
@ -336,15 +328,17 @@
|
|||
9E1FF8632368EC2400834C24 /* FeedlySyncAllOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySyncAllOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E1FF8652368ED7E00834C24 /* TestMarkArticlesService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestMarkArticlesService.swift; sourceTree = "<group>"; };
|
||||
9E1FF8672368EE4900834C24 /* TestGetCollectionsService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestGetCollectionsService.swift; sourceTree = "<group>"; };
|
||||
9E3CFFFC2368202000BA7365 /* FeedlySyncUnreadStatusesOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySyncUnreadStatusesOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E4828F123617F4A00D68691 /* FeedlySetUnreadArticlesOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySetUnreadArticlesOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E44C90E23C6FF3600CCC286 /* FeedlyIngestStreamArticleIdsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyIngestStreamArticleIdsOperation.swift; sourceTree = "<group>"; };
|
||||
9E489E8C2360652C004372EE /* FeedlyGetStreamIdsOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyGetStreamIdsOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E489E902360ED30004372EE /* FeedlyOrganiseParsedItemsByFeedOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyOrganiseParsedItemsByFeedOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E489E92236101FC004372EE /* FeedlyUpdateAccountFeedsWithItemsOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyUpdateAccountFeedsWithItemsOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E5ABE99236BE6BC00B5DE9F /* feedly-1-initial */ = {isa = PBXFileReference; lastKnownFileType = folder; path = "feedly-1-initial"; sourceTree = "<group>"; };
|
||||
9E5DE60D23C3F4B70064DA30 /* FeedlyFetchIdsForMissingArticlesOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyFetchIdsForMissingArticlesOperation.swift; sourceTree = "<group>"; };
|
||||
9E5EC15823E01D8A00A4E503 /* FeedlyCollectionParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyCollectionParser.swift; sourceTree = "<group>"; };
|
||||
9E5EC15A23E01DEF00A4E503 /* FeedlyRTLTextSanitizer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyRTLTextSanitizer.swift; sourceTree = "<group>"; };
|
||||
9E5EC15C23E0D58500A4E503 /* FeedlyFeedParser.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyFeedParser.swift; sourceTree = "<group>"; };
|
||||
9E672393236F7CA0000BE141 /* FeedlyRefreshAccessTokenOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyRefreshAccessTokenOperation.swift; sourceTree = "<group>"; };
|
||||
9E672395236F7E68000BE141 /* OAuthAcessTokenRefreshing.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OAuthAcessTokenRefreshing.swift; sourceTree = "<group>"; };
|
||||
9E713652233AD63E00765C84 /* FeedlySetUnreadArticlesOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySetUnreadArticlesOperation.swift; sourceTree = "<group>"; };
|
||||
9E7299D623505E9600DAEFB7 /* FeedlyAddFeedToCollectionOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyAddFeedToCollectionOperation.swift; sourceTree = "<group>"; };
|
||||
9E7299D8235062A200DAEFB7 /* FeedlyResourceProviding.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyResourceProviding.swift; sourceTree = "<group>"; };
|
||||
9E784EBD237E890600099B1B /* FeedlyLogoutOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyLogoutOperation.swift; sourceTree = "<group>"; };
|
||||
|
@ -352,9 +346,8 @@
|
|||
9E79F7732395C9EF0031DB98 /* feedly-add-new-feed */ = {isa = PBXFileReference; lastKnownFileType = folder; path = "feedly-add-new-feed"; sourceTree = "<group>"; };
|
||||
9E7F88AB235EDDC2009AB9DF /* FeedlyCreateFeedsForCollectionFoldersOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyCreateFeedsForCollectionFoldersOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E7F88AD235FBB11009AB9DF /* FeedlyGetStreamContentsOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyGetStreamContentsOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E84DC462359A23200D6E809 /* FeedlySyncUnreadStatusesOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySyncUnreadStatusesOperation.swift; sourceTree = "<group>"; };
|
||||
9E84DC462359A23200D6E809 /* FeedlyIngestUnreadArticleIdsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyIngestUnreadArticleIdsOperation.swift; sourceTree = "<group>"; };
|
||||
9E84DC482359A73600D6E809 /* FeedlyCheckpointOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyCheckpointOperation.swift; sourceTree = "<group>"; };
|
||||
9E85C8E32366FE0100D0F1F7 /* FeedlySyncStarredArticlesOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySyncStarredArticlesOperationTests.swift; sourceTree = "<group>"; };
|
||||
9E85C8E52366FED600D0F1F7 /* TestGetStreamContentsService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestGetStreamContentsService.swift; sourceTree = "<group>"; };
|
||||
9E85C8E72366FF4200D0F1F7 /* TestGetPagedStreamContentsService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestGetPagedStreamContentsService.swift; sourceTree = "<group>"; };
|
||||
9E85C8E9236700AD00D0F1F7 /* FeedlyGetEntriesOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyGetEntriesOperation.swift; sourceTree = "<group>"; };
|
||||
|
@ -366,6 +359,7 @@
|
|||
9EA643D2239305680018A28C /* FeedlySearchOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySearchOperation.swift; sourceTree = "<group>"; };
|
||||
9EA643D4239306AC0018A28C /* FeedlyFeedsSearchResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyFeedsSearchResponse.swift; sourceTree = "<group>"; };
|
||||
9EA643D823945CE00018A28C /* FeedlyAddNewFeedOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyAddNewFeedOperationTests.swift; sourceTree = "<group>"; };
|
||||
9EAADA0F23C93144003A801F /* TestGetEntriesService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestGetEntriesService.swift; sourceTree = "<group>"; };
|
||||
9EAEC60B2332FE830085D7C9 /* FeedlyCollection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyCollection.swift; sourceTree = "<group>"; };
|
||||
9EAEC60D2332FEC20085D7C9 /* FeedlyFeed.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyFeed.swift; sourceTree = "<group>"; };
|
||||
9EAEC623233315F60085D7C9 /* FeedlyEntry.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyEntry.swift; sourceTree = "<group>"; };
|
||||
|
@ -373,7 +367,8 @@
|
|||
9EAEC62723331C350085D7C9 /* FeedlyCategory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyCategory.swift; sourceTree = "<group>"; };
|
||||
9EAEC62923331EE70085D7C9 /* FeedlyOrigin.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyOrigin.swift; sourceTree = "<group>"; };
|
||||
9EB1D575238E6A3900A753D7 /* FeedlyAddNewFeedOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyAddNewFeedOperation.swift; sourceTree = "<group>"; };
|
||||
9EC228542362C17F00766EF8 /* FeedlySetStarredArticlesOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySetStarredArticlesOperationTests.swift; sourceTree = "<group>"; };
|
||||
9EBD49BF23C67602005AD5CD /* FeedlyDownloadArticlesOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyDownloadArticlesOperation.swift; sourceTree = "<group>"; };
|
||||
9EBD49C123C67784005AD5CD /* FeedlyEntryIdentifierProviding.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyEntryIdentifierProviding.swift; sourceTree = "<group>"; };
|
||||
9EC228562362C7F900766EF8 /* FeedlyCheckpointOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyCheckpointOperationTests.swift; sourceTree = "<group>"; };
|
||||
9EC228582362D0EA00766EF8 /* FeedlySendArticleStatusesOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySendArticleStatusesOperationTests.swift; sourceTree = "<group>"; };
|
||||
9EC2285A23639A6500766EF8 /* FeedlySyncStreamContentsOperationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySyncStreamContentsOperationTests.swift; sourceTree = "<group>"; };
|
||||
|
@ -396,11 +391,15 @@
|
|||
9EEAE072235D01AE00E3FEE4 /* FeedlyGetStreamIdsService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyGetStreamIdsService.swift; sourceTree = "<group>"; };
|
||||
9EEAE074235D01C400E3FEE4 /* FeedlyMarkArticlesService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyMarkArticlesService.swift; sourceTree = "<group>"; };
|
||||
9EEEF71E23545CB4009E9D80 /* FeedlySendArticleStatusesOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySendArticleStatusesOperation.swift; sourceTree = "<group>"; };
|
||||
9EEEF7202355277F009E9D80 /* FeedlySyncStarredArticlesOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySyncStarredArticlesOperation.swift; sourceTree = "<group>"; };
|
||||
9EEEF7202355277F009E9D80 /* FeedlyIngestStarredArticleIdsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyIngestStarredArticleIdsOperation.swift; sourceTree = "<group>"; };
|
||||
9EF1B10223584B4C000A486A /* FeedlySyncStreamContentsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlySyncStreamContentsOperation.swift; sourceTree = "<group>"; };
|
||||
9EF1B10623590D61000A486A /* FeedlyGetStreamIdsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyGetStreamIdsOperation.swift; sourceTree = "<group>"; };
|
||||
9EF1B10823590E93000A486A /* FeedlyStreamIds.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyStreamIds.swift; sourceTree = "<group>"; };
|
||||
9EF35F79234E830E003AE2AE /* FeedlyCompoundOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyCompoundOperation.swift; sourceTree = "<group>"; };
|
||||
9EF2602B23C91FFE006D160C /* FeedlyGetUpdatedArticleIdsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyGetUpdatedArticleIdsOperation.swift; sourceTree = "<group>"; };
|
||||
9EF58EAF23E1606000992A2B /* FeedlyTextSanitizationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyTextSanitizationTests.swift; sourceTree = "<group>"; };
|
||||
9EF58EB123E1647400992A2B /* FeedlyCollectionParserTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyCollectionParserTests.swift; sourceTree = "<group>"; };
|
||||
9EF58EB323E1655300992A2B /* FeedlyFeedParserTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyFeedParserTests.swift; sourceTree = "<group>"; };
|
||||
9EF58EB523E1669F00992A2B /* FeedlyEntryParserTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedlyEntryParserTests.swift; sourceTree = "<group>"; };
|
||||
D511EEB5202422BB00712EC3 /* Account_project_debug.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Account_project_debug.xcconfig; sourceTree = "<group>"; };
|
||||
D511EEB6202422BB00712EC3 /* Account_target.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Account_target.xcconfig; sourceTree = "<group>"; };
|
||||
D511EEB7202422BB00712EC3 /* Account_project_release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = Account_project_release.xcconfig; sourceTree = "<group>"; };
|
||||
|
@ -648,6 +647,7 @@
|
|||
9E85C8E72366FF4200D0F1F7 /* TestGetPagedStreamContentsService.swift */,
|
||||
9E1FF8612368219B00834C24 /* TestGetPagedStreamIdsService.swift */,
|
||||
9E1FF8672368EE4900834C24 /* TestGetCollectionsService.swift */,
|
||||
9EAADA0F23C93144003A801F /* TestGetEntriesService.swift */,
|
||||
9E1FF8652368ED7E00834C24 /* TestMarkArticlesService.swift */,
|
||||
9E03C11B235D921400FB6D9E /* FeedlyOperationTests.swift */,
|
||||
9E03C11D235D976500FB6D9E /* FeedlyGetCollectionsOperationTests.swift */,
|
||||
|
@ -657,19 +657,19 @@
|
|||
9E489E8C2360652C004372EE /* FeedlyGetStreamIdsOperationTests.swift */,
|
||||
9E489E902360ED30004372EE /* FeedlyOrganiseParsedItemsByFeedOperationTests.swift */,
|
||||
9E489E92236101FC004372EE /* FeedlyUpdateAccountFeedsWithItemsOperationTests.swift */,
|
||||
9E4828F123617F4A00D68691 /* FeedlySetUnreadArticlesOperationTests.swift */,
|
||||
9EC228542362C17F00766EF8 /* FeedlySetStarredArticlesOperationTests.swift */,
|
||||
9EC228562362C7F900766EF8 /* FeedlyCheckpointOperationTests.swift */,
|
||||
9EC228582362D0EA00766EF8 /* FeedlySendArticleStatusesOperationTests.swift */,
|
||||
9EC2285A23639A6500766EF8 /* FeedlySyncStreamContentsOperationTests.swift */,
|
||||
9E85C8E32366FE0100D0F1F7 /* FeedlySyncStarredArticlesOperationTests.swift */,
|
||||
9E3CFFFC2368202000BA7365 /* FeedlySyncUnreadStatusesOperationTests.swift */,
|
||||
9E1FF8632368EC2400834C24 /* FeedlySyncAllOperationTests.swift */,
|
||||
9EC804E2236C18AB0057CFCB /* FeedlySyncAllMockResponseProvider.swift */,
|
||||
9E1773DA234593CF0056A5A8 /* FeedlyResourceIdTests.swift */,
|
||||
9E0260CA236FF99A00D122D3 /* FeedlyRefreshAccessTokenOperationTests.swift */,
|
||||
9E784EBF237E8BE100099B1B /* FeedlyLogoutOperationTests.swift */,
|
||||
9EA643D823945CE00018A28C /* FeedlyAddNewFeedOperationTests.swift */,
|
||||
9EF58EAF23E1606000992A2B /* FeedlyTextSanitizationTests.swift */,
|
||||
9EF58EB123E1647400992A2B /* FeedlyCollectionParserTests.swift */,
|
||||
9EF58EB323E1655300992A2B /* FeedlyFeedParserTests.swift */,
|
||||
9EF58EB523E1669F00992A2B /* FeedlyEntryParserTests.swift */,
|
||||
9E79F7732395C9EF0031DB98 /* feedly-add-new-feed */,
|
||||
9E5ABE99236BE6BC00B5DE9F /* feedly-1-initial */,
|
||||
9EC804E4236C1A7F0057CFCB /* feedly-2-changestatuses */,
|
||||
|
@ -704,7 +704,7 @@
|
|||
isa = PBXGroup;
|
||||
children = (
|
||||
9E1D1554233431A600F4944C /* FeedlyOperation.swift */,
|
||||
9EF35F79234E830E003AE2AE /* FeedlyCompoundOperation.swift */,
|
||||
9E1D154C233370D800F4944C /* FeedlySyncAllOperation.swift */,
|
||||
9EA643D2239305680018A28C /* FeedlySearchOperation.swift */,
|
||||
9E7299D623505E9600DAEFB7 /* FeedlyAddFeedToCollectionOperation.swift */,
|
||||
9EB1D575238E6A3900A753D7 /* FeedlyAddNewFeedOperation.swift */,
|
||||
|
@ -718,16 +718,17 @@
|
|||
9E1D155A2334423300F4944C /* FeedlyOrganiseParsedItemsByFeedOperation.swift */,
|
||||
9E1D155C233447F000F4944C /* FeedlyUpdateAccountFeedsWithItemsOperation.swift */,
|
||||
9E85C8E9236700AD00D0F1F7 /* FeedlyGetEntriesOperation.swift */,
|
||||
9E713652233AD63E00765C84 /* FeedlySetUnreadArticlesOperation.swift */,
|
||||
9E1AF38A2353D41A008BD1D5 /* FeedlySetStarredArticlesOperation.swift */,
|
||||
9EEEF71E23545CB4009E9D80 /* FeedlySendArticleStatusesOperation.swift */,
|
||||
9E84DC482359A73600D6E809 /* FeedlyCheckpointOperation.swift */,
|
||||
9EF1B10223584B4C000A486A /* FeedlySyncStreamContentsOperation.swift */,
|
||||
9EEEF7202355277F009E9D80 /* FeedlySyncStarredArticlesOperation.swift */,
|
||||
9E84DC462359A23200D6E809 /* FeedlySyncUnreadStatusesOperation.swift */,
|
||||
9E1D154C233370D800F4944C /* FeedlySyncAllOperation.swift */,
|
||||
9E44C90E23C6FF3600CCC286 /* FeedlyIngestStreamArticleIdsOperation.swift */,
|
||||
9EEEF7202355277F009E9D80 /* FeedlyIngestStarredArticleIdsOperation.swift */,
|
||||
9E84DC462359A23200D6E809 /* FeedlyIngestUnreadArticleIdsOperation.swift */,
|
||||
9EF2602B23C91FFE006D160C /* FeedlyGetUpdatedArticleIdsOperation.swift */,
|
||||
9E672393236F7CA0000BE141 /* FeedlyRefreshAccessTokenOperation.swift */,
|
||||
9E784EBD237E890600099B1B /* FeedlyLogoutOperation.swift */,
|
||||
9E5DE60D23C3F4B70064DA30 /* FeedlyFetchIdsForMissingArticlesOperation.swift */,
|
||||
9EBD49BF23C67602005AD5CD /* FeedlyDownloadArticlesOperation.swift */,
|
||||
);
|
||||
path = Operations;
|
||||
sourceTree = "<group>";
|
||||
|
@ -737,7 +738,9 @@
|
|||
children = (
|
||||
9E1773D823458D590056A5A8 /* FeedlyResourceId.swift */,
|
||||
9EAEC60B2332FE830085D7C9 /* FeedlyCollection.swift */,
|
||||
9E5EC15823E01D8A00A4E503 /* FeedlyCollectionParser.swift */,
|
||||
9EAEC60D2332FEC20085D7C9 /* FeedlyFeed.swift */,
|
||||
9E5EC15C23E0D58500A4E503 /* FeedlyFeedParser.swift */,
|
||||
9EAEC623233315F60085D7C9 /* FeedlyEntry.swift */,
|
||||
9E1773D4234570E30056A5A8 /* FeedlyEntryParser.swift */,
|
||||
9EAEC625233318400085D7C9 /* FeedlyStream.swift */,
|
||||
|
@ -747,6 +750,8 @@
|
|||
9E1773D22345700E0056A5A8 /* FeedlyLink.swift */,
|
||||
9E1773D6234575AB0056A5A8 /* FeedlyTag.swift */,
|
||||
9EA643D4239306AC0018A28C /* FeedlyFeedsSearchResponse.swift */,
|
||||
9EBD49C123C67784005AD5CD /* FeedlyEntryIdentifierProviding.swift */,
|
||||
9E5EC15A23E01DEF00A4E503 /* FeedlyRTLTextSanitizer.swift */,
|
||||
);
|
||||
path = Models;
|
||||
sourceTree = "<group>";
|
||||
|
@ -839,11 +844,11 @@
|
|||
848934F51F62484F00CEBD24 = {
|
||||
CreatedOnToolsVersion = 9.0;
|
||||
LastSwiftMigration = 0900;
|
||||
ProvisioningStyle = Automatic;
|
||||
ProvisioningStyle = Manual;
|
||||
};
|
||||
848934FE1F62484F00CEBD24 = {
|
||||
CreatedOnToolsVersion = 9.0;
|
||||
ProvisioningStyle = Automatic;
|
||||
ProvisioningStyle = Manual;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
@ -975,7 +980,6 @@
|
|||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
84C8B3F41F89DE430053CCA6 /* DataExtensions.swift in Sources */,
|
||||
9EF35F7A234E830E003AE2AE /* FeedlyCompoundOperation.swift in Sources */,
|
||||
552032F9229D5D5A009559E0 /* ReaderAPISubscription.swift in Sources */,
|
||||
84C3654A1F899F3B001EC85C /* CombinedRefreshProgress.swift in Sources */,
|
||||
9EC688EE232C58E800A8D0A2 /* OAuthAuthorizationCodeGranting.swift in Sources */,
|
||||
|
@ -994,27 +998,31 @@
|
|||
51E5959B228C781500FCC42B /* FeedbinStarredEntry.swift in Sources */,
|
||||
846E77451F6EF9B900A165E2 /* Container.swift in Sources */,
|
||||
9EA643D3239305680018A28C /* FeedlySearchOperation.swift in Sources */,
|
||||
9E5EC15D23E0D58500A4E503 /* FeedlyFeedParser.swift in Sources */,
|
||||
9E1D15532334304B00F4944C /* FeedlyGetStreamContentsOperation.swift in Sources */,
|
||||
9E12B0202334696A00ADE5A0 /* FeedlyCreateFeedsForCollectionFoldersOperation.swift in Sources */,
|
||||
552032FD229D5D5A009559E0 /* ReaderAPITagging.swift in Sources */,
|
||||
9EAEC62A23331EE70085D7C9 /* FeedlyOrigin.swift in Sources */,
|
||||
9E5EC15B23E01DEF00A4E503 /* FeedlyRTLTextSanitizer.swift in Sources */,
|
||||
511B9804237CD4270028BCAA /* FeedIdentifier.swift in Sources */,
|
||||
84F73CF1202788D90000BCEF /* ArticleFetcher.swift in Sources */,
|
||||
9E713653233AD63E00765C84 /* FeedlySetUnreadArticlesOperation.swift in Sources */,
|
||||
841974251F6DDCE4006346C4 /* AccountDelegate.swift in Sources */,
|
||||
510BD113232C3E9D002692E4 /* WebFeedMetadataFile.swift in Sources */,
|
||||
5165D73122837F3400D9D53D /* InitialFeedDownloader.swift in Sources */,
|
||||
9E784EBE237E890600099B1B /* FeedlyLogoutOperation.swift in Sources */,
|
||||
9EEEF71F23545CB4009E9D80 /* FeedlySendArticleStatusesOperation.swift in Sources */,
|
||||
9EBD49C223C67784005AD5CD /* FeedlyEntryIdentifierProviding.swift in Sources */,
|
||||
846E77541F6F00E300A165E2 /* AccountManager.swift in Sources */,
|
||||
515E4EB72324FF8C0057B0E7 /* Credentials.swift in Sources */,
|
||||
51E490362288C37100C791F0 /* FeedbinDate.swift in Sources */,
|
||||
9EEAE06E235D002D00E3FEE4 /* FeedlyGetCollectionsService.swift in Sources */,
|
||||
5165D72922835F7A00D9D53D /* FeedSpecifier.swift in Sources */,
|
||||
9E85C8ED2367020700D0F1F7 /* FeedlyGetEntriesService.swift in Sources */,
|
||||
9E5EC15923E01D8A00A4E503 /* FeedlyCollectionParser.swift in Sources */,
|
||||
9E84DC492359A73600D6E809 /* FeedlyCheckpointOperation.swift in Sources */,
|
||||
9E85C8EB236700E600D0F1F7 /* FeedlyGetEntriesOperation.swift in Sources */,
|
||||
9E1D154D233370D800F4944C /* FeedlySyncAllOperation.swift in Sources */,
|
||||
9E44C90F23C6FF3600CCC286 /* FeedlyIngestStreamArticleIdsOperation.swift in Sources */,
|
||||
844B297D2106C7EC004020B3 /* WebFeed.swift in Sources */,
|
||||
3B826DA72385C81C00FC1ADB /* FeedWranglerAuthorizationResult.swift in Sources */,
|
||||
9E964EBA23754B4000A7AF2E /* OAuthAccountAuthorizationOperation.swift in Sources */,
|
||||
|
@ -1031,10 +1039,10 @@
|
|||
9EF1B10323584B4C000A486A /* FeedlySyncStreamContentsOperation.swift in Sources */,
|
||||
5154367B228EEB28005E1CDF /* FeedbinImportResult.swift in Sources */,
|
||||
84B2D4D02238CD8A00498ADA /* WebFeedMetadata.swift in Sources */,
|
||||
9E84DC472359A23200D6E809 /* FeedlySyncUnreadStatusesOperation.swift in Sources */,
|
||||
9E84DC472359A23200D6E809 /* FeedlyIngestUnreadArticleIdsOperation.swift in Sources */,
|
||||
9EAEC624233315F60085D7C9 /* FeedlyEntry.swift in Sources */,
|
||||
9EEAE073235D01AE00E3FEE4 /* FeedlyGetStreamIdsService.swift in Sources */,
|
||||
9EEEF7212355277F009E9D80 /* FeedlySyncStarredArticlesOperation.swift in Sources */,
|
||||
9EEEF7212355277F009E9D80 /* FeedlyIngestStarredArticleIdsOperation.swift in Sources */,
|
||||
3BC23AB92385ECB100371CBA /* FeedWranglerSubscriptionResult.swift in Sources */,
|
||||
5144EA49227B497600D19003 /* FeedbinAPICaller.swift in Sources */,
|
||||
84B99C9F1FAE8D3200ECDEDB /* ContainerPath.swift in Sources */,
|
||||
|
@ -1044,6 +1052,7 @@
|
|||
55203300229D5D5A009559E0 /* ReaderAPICaller.swift in Sources */,
|
||||
9E1D154F233371DD00F4944C /* FeedlyGetCollectionsOperation.swift in Sources */,
|
||||
9EAEC626233318400085D7C9 /* FeedlyStream.swift in Sources */,
|
||||
9E5DE60E23C3F4B70064DA30 /* FeedlyFetchIdsForMissingArticlesOperation.swift in Sources */,
|
||||
3B826DA92385C81C00FC1ADB /* FeedWranglerAPICaller.swift in Sources */,
|
||||
9EAEC60C2332FE830085D7C9 /* FeedlyCollection.swift in Sources */,
|
||||
51E3EB41229AF61B00645299 /* AccountError.swift in Sources */,
|
||||
|
@ -1052,6 +1061,7 @@
|
|||
552032F8229D5D5A009559E0 /* ReaderAPIEntry.swift in Sources */,
|
||||
552032FB229D5D5A009559E0 /* ReaderAPITag.swift in Sources */,
|
||||
5165D72822835F7800D9D53D /* FeedFinder.swift in Sources */,
|
||||
9EBD49C023C67602005AD5CD /* FeedlyDownloadArticlesOperation.swift in Sources */,
|
||||
51D58755227F53BE00900287 /* FeedbinTag.swift in Sources */,
|
||||
9E1D155B2334423300F4944C /* FeedlyOrganiseParsedItemsByFeedOperation.swift in Sources */,
|
||||
552032FE229D5D5A009559E0 /* ReaderAPIAccountDelegate.swift in Sources */,
|
||||
|
@ -1079,10 +1089,10 @@
|
|||
9E1773D5234570E30056A5A8 /* FeedlyEntryParser.swift in Sources */,
|
||||
51BFDECE238B508D00216323 /* ContainerIdentifier.swift in Sources */,
|
||||
9E1D1555233431A600F4944C /* FeedlyOperation.swift in Sources */,
|
||||
9E1AF38B2353D41A008BD1D5 /* FeedlySetStarredArticlesOperation.swift in Sources */,
|
||||
84F1F06E2243524700DA0616 /* AccountMetadata.swift in Sources */,
|
||||
9EF1B10723590D61000A486A /* FeedlyGetStreamIdsOperation.swift in Sources */,
|
||||
84245C851FDDD8CB0074AFBB /* FeedbinSubscription.swift in Sources */,
|
||||
9EF2602C23C91FFE006D160C /* FeedlyGetUpdatedArticleIdsOperation.swift in Sources */,
|
||||
3B826DAA2385C81C00FC1ADB /* FeedWranglerSubscription.swift in Sources */,
|
||||
3B826DAC2385C81C00FC1ADB /* FeedWranglerAccountDelegate.swift in Sources */,
|
||||
);
|
||||
|
@ -1094,38 +1104,26 @@
|
|||
files = (
|
||||
9EC228572362C7F900766EF8 /* FeedlyCheckpointOperationTests.swift in Sources */,
|
||||
9E03C122235E62E100FB6D9E /* FeedlyTestSupport.swift in Sources */,
|
||||
9E3CFFFD2368202000BA7365 /* FeedlySyncUnreadStatusesOperationTests.swift in Sources */,
|
||||
9E784EC0237E8BE100099B1B /* FeedlyLogoutOperationTests.swift in Sources */,
|
||||
9EC228552362C17F00766EF8 /* FeedlySetStarredArticlesOperationTests.swift in Sources */,
|
||||
9E03C120235E62A500FB6D9E /* FeedlyMirrorCollectionsAsFoldersOperationTests.swift in Sources */,
|
||||
9E489E912360ED30004372EE /* FeedlyOrganiseParsedItemsByFeedOperationTests.swift in Sources */,
|
||||
9E0260CB236FF99A00D122D3 /* FeedlyRefreshAccessTokenOperationTests.swift in Sources */,
|
||||
9EAADA1023C93144003A801F /* TestGetEntriesService.swift in Sources */,
|
||||
9EF58EB423E1655300992A2B /* FeedlyFeedParserTests.swift in Sources */,
|
||||
9E1FF8622368219B00834C24 /* TestGetPagedStreamIdsService.swift in Sources */,
|
||||
9EA643D923945CE00018A28C /* FeedlyAddNewFeedOperationTests.swift in Sources */,
|
||||
9E7F88AC235EDDC2009AB9DF /* FeedlyCreateFeedsForCollectionFoldersOperationTests.swift in Sources */,
|
||||
9E03C11E235D976500FB6D9E /* FeedlyGetCollectionsOperationTests.swift in Sources */,
|
||||
9E85C8E62366FED600D0F1F7 /* TestGetStreamContentsService.swift in Sources */,
|
||||
9E1FF8662368ED7E00834C24 /* TestMarkArticlesService.swift in Sources */,
|
||||
9E03C11C235D921400FB6D9E /* FeedlyOperationTests.swift in Sources */,
|
||||
9E1FF8642368EC2400834C24 /* FeedlySyncAllOperationTests.swift in Sources */,
|
||||
9E1FF8602368216B00834C24 /* TestGetStreamIdsService.swift in Sources */,
|
||||
9E85C8E82366FF4200D0F1F7 /* TestGetPagedStreamContentsService.swift in Sources */,
|
||||
9EC228592362D0EA00766EF8 /* FeedlySendArticleStatusesOperationTests.swift in Sources */,
|
||||
9EF58EB023E1606000992A2B /* FeedlyTextSanitizationTests.swift in Sources */,
|
||||
5165D7122282080C00D9D53D /* AccountFeedbinFolderContentsSyncTest.swift in Sources */,
|
||||
9E489E93236101FC004372EE /* FeedlyUpdateAccountFeedsWithItemsOperationTests.swift in Sources */,
|
||||
9E489E8D2360652C004372EE /* FeedlyGetStreamIdsOperationTests.swift in Sources */,
|
||||
51D5875E227F643C00900287 /* AccountFeedbinFolderSyncTest.swift in Sources */,
|
||||
9EF58EB623E1669F00992A2B /* FeedlyEntryParserTests.swift in Sources */,
|
||||
9EC804E3236C18AB0057CFCB /* FeedlySyncAllMockResponseProvider.swift in Sources */,
|
||||
9E1FF8682368EE4900834C24 /* TestGetCollectionsService.swift in Sources */,
|
||||
9E4828F223617F4A00D68691 /* FeedlySetUnreadArticlesOperationTests.swift in Sources */,
|
||||
5107A09B227DE49500C7C3C5 /* TestAccountManager.swift in Sources */,
|
||||
513323082281070D00C30F19 /* AccountFeedbinSyncTest.swift in Sources */,
|
||||
9E85C8E42366FE0100D0F1F7 /* FeedlySyncStarredArticlesOperationTests.swift in Sources */,
|
||||
5107A09D227DE77700C7C3C5 /* TestTransport.swift in Sources */,
|
||||
5107A099227DE42E00C7C3C5 /* AccountCredentialsTest.swift in Sources */,
|
||||
9EC2285B23639A6500766EF8 /* FeedlySyncStreamContentsOperationTests.swift in Sources */,
|
||||
9E1773DB234593CF0056A5A8 /* FeedlyResourceIdTests.swift in Sources */,
|
||||
9E7F88AE235FBB11009AB9DF /* FeedlyGetStreamContentsOperationTests.swift in Sources */,
|
||||
9EF58EB223E1647400992A2B /* FeedlyCollectionParserTests.swift in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
|
|
|
@ -14,25 +14,28 @@ import Foundation
|
|||
user interface as much as possible. For example some sync services don't allow
|
||||
feeds to be in the root folder of the account.
|
||||
*/
|
||||
public struct AccountBehaviors: OptionSet {
|
||||
public typealias AccountBehaviors = [AccountBehavior]
|
||||
|
||||
public enum AccountBehavior: Equatable {
|
||||
|
||||
/**
|
||||
Account doesn't support copies of a feed that are in a folder to be made to the root folder.
|
||||
*/
|
||||
public static let disallowFeedCopyInRootFolder = AccountBehaviors(rawValue: 1)
|
||||
case disallowFeedCopyInRootFolder
|
||||
|
||||
/**
|
||||
Account doesn't support feeds in the root folder.
|
||||
*/
|
||||
public static let disallowFeedInRootFolder = AccountBehaviors(rawValue: 2)
|
||||
case disallowFeedInRootFolder
|
||||
|
||||
/**
|
||||
Account doesn't support OPML imports
|
||||
*/
|
||||
public static let disallowOPMLImports = AccountBehaviors(rawValue: 3)
|
||||
case disallowOPMLImports
|
||||
|
||||
public let rawValue: Int
|
||||
public init(rawValue: Int) {
|
||||
self.rawValue = rawValue
|
||||
}
|
||||
/**
|
||||
Account doesn't allow mark as read after a period of days
|
||||
*/
|
||||
case disallowMarkAsUnreadAfterPeriod(Int)
|
||||
|
||||
}
|
||||
|
|
|
@ -16,6 +16,23 @@ public enum AccountError: LocalizedError {
|
|||
case opmlImportInProgress
|
||||
case wrappedError(error: Error, account: Account)
|
||||
|
||||
public var acount: Account? {
|
||||
if case .wrappedError(_, let account) = self {
|
||||
return account
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public var isCredentialsError: Bool {
|
||||
if case .wrappedError(let error, _) = self {
|
||||
if case TransportError.httpError(let status) = error {
|
||||
return isCredentialsError(status: status)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
public var errorDescription: String? {
|
||||
switch self {
|
||||
case .createErrorNotFound:
|
||||
|
@ -27,7 +44,7 @@ public enum AccountError: LocalizedError {
|
|||
case .wrappedError(let error, let account):
|
||||
switch error {
|
||||
case TransportError.httpError(let status):
|
||||
if status == 401 {
|
||||
if isCredentialsError(status: status) {
|
||||
let localizedText = NSLocalizedString("Your “%@” credentials are invalid or expired.", comment: "Invalid or expired")
|
||||
return NSString.localizedStringWithFormat(localizedText as NSString, account.nameForDisplay) as String
|
||||
} else {
|
||||
|
@ -48,7 +65,7 @@ public enum AccountError: LocalizedError {
|
|||
case .wrappedError(let error, _):
|
||||
switch error {
|
||||
case TransportError.httpError(let status):
|
||||
if status == 401 || status == 403 {
|
||||
if isCredentialsError(status: status) {
|
||||
return NSLocalizedString("Please update your credentials for this account, or ensure that your account with this service is still valid.", comment: "Expired credentials")
|
||||
} else {
|
||||
return NSLocalizedString("Please try again later.", comment: "Try later")
|
||||
|
@ -61,8 +78,19 @@ public enum AccountError: LocalizedError {
|
|||
}
|
||||
}
|
||||
|
||||
private func unknownError(_ error: Error, _ account: Account) -> String {
|
||||
}
|
||||
|
||||
// MARK: Private
|
||||
|
||||
private extension AccountError {
|
||||
|
||||
func unknownError(_ error: Error, _ account: Account) -> String {
|
||||
let localizedText = NSLocalizedString("An error occurred while processing the “%@” account: %@", comment: "Unknown error")
|
||||
return NSString.localizedStringWithFormat(localizedText as NSString, account.nameForDisplay, error.localizedDescription) as String
|
||||
}
|
||||
|
||||
func isCredentialsError(status: Int) -> Bool {
|
||||
return status == 401 || status == 403
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -90,14 +90,6 @@ public final class AccountManager: UnreadCountProvider {
|
|||
return CombinedRefreshProgress(downloadProgressArray: downloadProgressArray)
|
||||
}
|
||||
|
||||
public convenience init() {
|
||||
let appGroup = Bundle.main.object(forInfoDictionaryKey: "AppGroup") as! String
|
||||
let accountsURL = FileManager.default.containerURL(forSecurityApplicationGroupIdentifier: appGroup)
|
||||
let accountsFolder = accountsURL!.appendingPathComponent("Accounts").absoluteString
|
||||
let accountsFolderPath = accountsFolder.suffix(from: accountsFolder.index(accountsFolder.startIndex, offsetBy: 7))
|
||||
self.init(accountsFolder: String(accountsFolderPath))
|
||||
}
|
||||
|
||||
public init(accountsFolder: String) {
|
||||
self.accountsFolder = accountsFolder
|
||||
|
||||
|
@ -272,6 +264,11 @@ public final class AccountManager: UnreadCountProvider {
|
|||
var allFetchedArticles = Set<Article>()
|
||||
let numberOfAccounts = activeAccounts.count
|
||||
var accountsReporting = 0
|
||||
|
||||
guard numberOfAccounts > 0 else {
|
||||
completion(.success(allFetchedArticles))
|
||||
return
|
||||
}
|
||||
|
||||
for account in activeAccounts {
|
||||
account.fetchArticlesAsync(fetchType) { (articleSetResult) in
|
||||
|
@ -389,7 +386,7 @@ private struct AccountSpecifier {
|
|||
|
||||
|
||||
init?(folderPath: String) {
|
||||
if !FileManager.default.rs_fileIsFolder(folderPath) {
|
||||
if !FileManager.default.isFolder(atPath: folderPath) {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -16,41 +16,26 @@ final class AccountMetadataFile {
|
|||
|
||||
private let fileURL: URL
|
||||
private let account: Account
|
||||
private lazy var managedFile = ManagedResourceFile(fileURL: fileURL, load: loadCallback, save: saveCallback)
|
||||
|
||||
private var isDirty = false {
|
||||
didSet {
|
||||
queueSaveToDiskIfNeeded()
|
||||
}
|
||||
}
|
||||
private let saveQueue = CoalescingQueue(name: "Save Queue", interval: 0.5)
|
||||
|
||||
init(filename: String, account: Account) {
|
||||
self.fileURL = URL(fileURLWithPath: filename)
|
||||
self.account = account
|
||||
}
|
||||
|
||||
func markAsDirty() {
|
||||
managedFile.markAsDirty()
|
||||
isDirty = true
|
||||
}
|
||||
|
||||
func load() {
|
||||
managedFile.load()
|
||||
}
|
||||
|
||||
func save() {
|
||||
managedFile.saveIfNecessary()
|
||||
}
|
||||
|
||||
func suspend() {
|
||||
managedFile.suspend()
|
||||
}
|
||||
|
||||
func resume() {
|
||||
managedFile.resume()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private extension AccountMetadataFile {
|
||||
|
||||
func loadCallback() {
|
||||
|
||||
let errorPointer: NSErrorPointer = nil
|
||||
let fileCoordinator = NSFileCoordinator(filePresenter: managedFile)
|
||||
let fileCoordinator = NSFileCoordinator()
|
||||
|
||||
fileCoordinator.coordinate(readingItemAt: fileURL, options: [], error: errorPointer, byAccessor: { readURL in
|
||||
if let fileData = try? Data(contentsOf: readURL) {
|
||||
|
@ -63,17 +48,16 @@ private extension AccountMetadataFile {
|
|||
if let error = errorPointer?.pointee {
|
||||
os_log(.error, log: log, "Read from disk coordination failed: %@.", error.localizedDescription)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func saveCallback() {
|
||||
func save() {
|
||||
guard !account.isDeleted else { return }
|
||||
|
||||
let encoder = PropertyListEncoder()
|
||||
encoder.outputFormat = .binary
|
||||
|
||||
let errorPointer: NSErrorPointer = nil
|
||||
let fileCoordinator = NSFileCoordinator(filePresenter: managedFile)
|
||||
let fileCoordinator = NSFileCoordinator()
|
||||
|
||||
fileCoordinator.coordinate(writingItemAt: fileURL, options: [], error: errorPointer, byAccessor: { writeURL in
|
||||
do {
|
||||
|
@ -90,3 +74,18 @@ private extension AccountMetadataFile {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
private extension AccountMetadataFile {
|
||||
|
||||
func queueSaveToDiskIfNeeded() {
|
||||
saveQueue.add(self, #selector(saveToDiskIfNeeded))
|
||||
}
|
||||
|
||||
@objc func saveToDiskIfNeeded() {
|
||||
if isDirty {
|
||||
isDirty = false
|
||||
save()
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import XCTest
|
||||
@testable import Account
|
||||
import RSWeb
|
||||
import RSCore
|
||||
|
||||
class FeedlyAddNewFeedOperationTests: XCTestCase {
|
||||
|
||||
|
@ -42,17 +43,17 @@ class FeedlyAddNewFeedOperationTests: XCTestCase {
|
|||
let getCollections = FeedlyGetCollectionsOperation(service: caller, log: support.log)
|
||||
|
||||
let mirrorCollectionsAsFolders = FeedlyMirrorCollectionsAsFoldersOperation(account: account, collectionsProvider: getCollections, log: support.log)
|
||||
mirrorCollectionsAsFolders.addDependency(getCollections)
|
||||
|
||||
MainThreadOperationQueue.shared.make(mirrorCollectionsAsFolders, dependOn: getCollections)
|
||||
|
||||
let createFolders = FeedlyCreateFeedsForCollectionFoldersOperation(account: account, feedsAndFoldersProvider: mirrorCollectionsAsFolders, log: support.log)
|
||||
createFolders.addDependency(mirrorCollectionsAsFolders)
|
||||
MainThreadOperationQueue.shared.make(createFolders, dependOn: mirrorCollectionsAsFolders)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
createFolders.completionBlock = {
|
||||
createFolders.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperations([getCollections, mirrorCollectionsAsFolders, createFolders], waitUntilFinished: false)
|
||||
MainThreadOperationQueue.shared.addOperations([getCollections, mirrorCollectionsAsFolders, createFolders])
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -89,6 +90,7 @@ class FeedlyAddNewFeedOperationTests: XCTestCase {
|
|||
}
|
||||
|
||||
let progress = DownloadProgress(numberOfTasks: 0)
|
||||
let container = support.makeTestDatabaseContainer()
|
||||
let _ = expectationForCompletion(of: progress)
|
||||
|
||||
let addNewFeed = try! FeedlyAddNewFeedOperation(account: account,
|
||||
|
@ -99,17 +101,18 @@ class FeedlyAddNewFeedOperationTests: XCTestCase {
|
|||
addToCollectionService: caller,
|
||||
syncUnreadIdsService: caller,
|
||||
getStreamContentsService: caller,
|
||||
database: container.database,
|
||||
container: folder,
|
||||
progress: progress,
|
||||
log: support.log)
|
||||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
addNewFeed.completionBlock = {
|
||||
addNewFeed.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(addNewFeed)
|
||||
MainThreadOperationQueue.shared.addOperation(addNewFeed)
|
||||
|
||||
XCTAssert(progress.numberRemaining > 0)
|
||||
|
||||
|
@ -120,12 +123,13 @@ class FeedlyAddNewFeedOperationTests: XCTestCase {
|
|||
XCTAssert(progress.isComplete)
|
||||
}
|
||||
|
||||
func testAddNewFeedSuccess() {
|
||||
func testAddNewFeedSuccess() throws {
|
||||
guard let folder = getFolderByLoadingInitialContent() else {
|
||||
return
|
||||
}
|
||||
|
||||
let progress = DownloadProgress(numberOfTasks: 0)
|
||||
let container = support.makeTestDatabaseContainer()
|
||||
let _ = expectationForCompletion(of: progress)
|
||||
|
||||
let subdirectory = "feedly-add-new-feed"
|
||||
|
@ -145,17 +149,18 @@ class FeedlyAddNewFeedOperationTests: XCTestCase {
|
|||
addToCollectionService: caller,
|
||||
syncUnreadIdsService: caller,
|
||||
getStreamContentsService: caller,
|
||||
database: container.database,
|
||||
container: folder,
|
||||
progress: progress,
|
||||
log: support.log)
|
||||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
addNewFeed.completionBlock = {
|
||||
addNewFeed.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(addNewFeed)
|
||||
MainThreadOperationQueue.shared.addOperation(addNewFeed)
|
||||
|
||||
XCTAssert(progress.numberRemaining > 0)
|
||||
|
||||
|
@ -163,7 +168,7 @@ class FeedlyAddNewFeedOperationTests: XCTestCase {
|
|||
|
||||
XCTAssert(progress.isComplete)
|
||||
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "feedStream", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "feedStream", subdirectory: subdirectory)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds", subdirectory: subdirectory, testCase: self)
|
||||
}
|
||||
|
||||
|
@ -191,6 +196,7 @@ class FeedlyAddNewFeedOperationTests: XCTestCase {
|
|||
}
|
||||
|
||||
let progress = DownloadProgress(numberOfTasks: 0)
|
||||
let container = support.makeTestDatabaseContainer()
|
||||
let _ = expectationForCompletion(of: progress)
|
||||
|
||||
let subdirectory = "feedly-add-new-feed"
|
||||
|
@ -220,17 +226,18 @@ class FeedlyAddNewFeedOperationTests: XCTestCase {
|
|||
addToCollectionService: service,
|
||||
syncUnreadIdsService: caller,
|
||||
getStreamContentsService: caller,
|
||||
database: container.database,
|
||||
container: folder,
|
||||
progress: progress,
|
||||
log: support.log)
|
||||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
addNewFeed.completionBlock = {
|
||||
addNewFeed.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(addNewFeed)
|
||||
MainThreadOperationQueue.shared.addOperation(addNewFeed)
|
||||
|
||||
XCTAssert(progress.numberRemaining > 0)
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSCore
|
||||
|
||||
class FeedlyCheckpointOperationTests: XCTestCase {
|
||||
|
||||
|
@ -28,11 +29,11 @@ class FeedlyCheckpointOperationTests: XCTestCase {
|
|||
operation.checkpointDelegate = delegate
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
operation.completionBlock = {
|
||||
operation.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(operation)
|
||||
MainThreadOperationQueue.shared.add(operation)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
@ -48,13 +49,13 @@ class FeedlyCheckpointOperationTests: XCTestCase {
|
|||
operation.checkpointDelegate = delegate
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
operation.completionBlock = {
|
||||
operation.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(operation)
|
||||
MainThreadOperationQueue.shared.add(operation)
|
||||
|
||||
operation.cancel()
|
||||
MainThreadOperationQueue.shared.cancelOperations([operation])
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
//
|
||||
// FeedlyCollectionParserTests.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 29/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
|
||||
class FeedlyCollectionParserTests: XCTestCase {
|
||||
|
||||
func testParsing() {
|
||||
let collection = FeedlyCollection(feeds: [], label: "Test Collection", id: "test/collection/1")
|
||||
let parser = FeedlyCollectionParser(collection: collection)
|
||||
XCTAssertEqual(parser.folderName, collection.label)
|
||||
XCTAssertEqual(parser.externalID, collection.id)
|
||||
}
|
||||
|
||||
func testSanitization() {
|
||||
let name = "Test Collection"
|
||||
let collection = FeedlyCollection(feeds: [], label: "<div style=\"direction:rtl;text-align:right\">\(name)</div>", id: "test/collection/1")
|
||||
let parser = FeedlyCollectionParser(collection: collection)
|
||||
XCTAssertEqual(parser.folderName, name)
|
||||
XCTAssertEqual(parser.externalID, collection.id)
|
||||
}
|
||||
}
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSCore
|
||||
|
||||
class FeedlyCreateFeedsForCollectionFoldersOperationTests: XCTestCase {
|
||||
|
||||
|
@ -54,13 +55,13 @@ class FeedlyCreateFeedsForCollectionFoldersOperationTests: XCTestCase {
|
|||
|
||||
let createFeeds = FeedlyCreateFeedsForCollectionFoldersOperation(account: account, feedsAndFoldersProvider: provider, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
createFeeds.completionBlock = {
|
||||
createFeeds.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
XCTAssertTrue(account.flattenedWebFeeds().isEmpty, "Expected empty account.")
|
||||
|
||||
OperationQueue.main.addOperation(createFeeds)
|
||||
MainThreadOperationQueue.shared.addOperation(createFeeds)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -125,13 +126,13 @@ class FeedlyCreateFeedsForCollectionFoldersOperationTests: XCTestCase {
|
|||
|
||||
let createFeeds = FeedlyCreateFeedsForCollectionFoldersOperation(account: account, feedsAndFoldersProvider: provider, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
createFeeds.completionBlock = {
|
||||
createFeeds.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
XCTAssertTrue(account.flattenedWebFeeds().isEmpty, "Expected empty account.")
|
||||
|
||||
OperationQueue.main.addOperation(createFeeds)
|
||||
MainThreadOperationQueue.shared.addOperation(createFeeds)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
@ -149,11 +150,11 @@ class FeedlyCreateFeedsForCollectionFoldersOperationTests: XCTestCase {
|
|||
|
||||
let removeFeeds = FeedlyCreateFeedsForCollectionFoldersOperation(account: account, feedsAndFoldersProvider: provider, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
removeFeeds.completionBlock = {
|
||||
removeFeeds.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(removeFeeds)
|
||||
MainThreadOperationQueue.shared.addOperation(removeFeeds)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
|
|
@ -0,0 +1,196 @@
|
|||
//
|
||||
// FeedlyEntryParserTests.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 29/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
|
||||
class FeedlyEntryParserTests: XCTestCase {
|
||||
|
||||
func testParsing() {
|
||||
let content = FeedlyEntry.Content(content: "Test Content", direction: .leftToRight)
|
||||
let summary = FeedlyEntry.Content(content: "Test Summary", direction: .leftToRight)
|
||||
let origin = FeedlyOrigin(title: "Test Feed", streamId: "tests://feeds/1", htmlUrl: nil)
|
||||
let canonicalLink = FeedlyLink(href: "tests://feeds/1/entries/1", type: "text/html")
|
||||
let tags = [
|
||||
FeedlyTag(id: "tests/tags/1", label: "Tag 1"),
|
||||
FeedlyTag(id: "tests/tags/2", label: "Tag 2")
|
||||
]
|
||||
let entry = FeedlyEntry(id: "tests/feeds/1/entries/1",
|
||||
title: "Test Entry 1",
|
||||
content: content,
|
||||
summary: summary,
|
||||
author: "Bob Alice",
|
||||
crawled: .distantPast,
|
||||
recrawled: Date(timeIntervalSinceReferenceDate: 0),
|
||||
origin: origin,
|
||||
canonical: [canonicalLink],
|
||||
alternate: nil,
|
||||
unread: false,
|
||||
tags: tags,
|
||||
categories: nil,
|
||||
enclosure: nil)
|
||||
|
||||
let parser = FeedlyEntryParser(entry: entry)
|
||||
|
||||
XCTAssertEqual(parser.id, entry.id)
|
||||
XCTAssertEqual(parser.feedUrl, origin.streamId)
|
||||
XCTAssertEqual(parser.externalUrl, canonicalLink.href)
|
||||
XCTAssertEqual(parser.title, entry.title)
|
||||
XCTAssertEqual(parser.contentHMTL, content.content)
|
||||
XCTAssertEqual(parser.summary, summary.content)
|
||||
XCTAssertEqual(parser.datePublished, .distantPast)
|
||||
XCTAssertEqual(parser.dateModified, Date(timeIntervalSinceReferenceDate: 0))
|
||||
|
||||
guard let item = parser.parsedItemRepresentation else {
|
||||
XCTFail("Expected a parsed item representation.")
|
||||
return
|
||||
}
|
||||
|
||||
XCTAssertEqual(item.syncServiceID, entry.id)
|
||||
XCTAssertEqual(item.uniqueID, entry.id)
|
||||
|
||||
// The following is not an error.
|
||||
// The feedURL must match the webFeedID for the article to be connected to its matching feed.
|
||||
XCTAssertEqual(item.feedURL, origin.streamId)
|
||||
XCTAssertEqual(item.title, entry.title)
|
||||
XCTAssertEqual(item.contentHTML, content.content)
|
||||
XCTAssertEqual(item.contentText, nil, "Is it now free of HTML characters?")
|
||||
XCTAssertEqual(item.summary, summary.content)
|
||||
XCTAssertEqual(item.datePublished, entry.crawled)
|
||||
XCTAssertEqual(item.dateModified, entry.recrawled)
|
||||
|
||||
let expectedTags = Set(tags.compactMap { $0.label })
|
||||
XCTAssertEqual(item.tags, expectedTags)
|
||||
|
||||
let expectedAuthors = Set([entry.author])
|
||||
let calculatedAuthors = Set(item.authors?.compactMap { $0.name } ?? [])
|
||||
XCTAssertEqual(calculatedAuthors, expectedAuthors)
|
||||
}
|
||||
|
||||
func testSanitization() {
|
||||
let content = FeedlyEntry.Content(content: "<div style=\"direction:rtl;text-align:right\">Test Content</div>", direction: .rightToLeft)
|
||||
let summaryContent = "Test Summary"
|
||||
let summary = FeedlyEntry.Content(content: "<div style=\"direction:rtl;text-align:right\">\(summaryContent)</div>", direction: .rightToLeft)
|
||||
let origin = FeedlyOrigin(title: "Test Feed", streamId: "tests://feeds/1", htmlUrl: nil)
|
||||
let title = "Test Entry 1"
|
||||
let entry = FeedlyEntry(id: "tests/feeds/1/entries/1",
|
||||
title: "<div style=\"direction:rtl;text-align:right\">\(title)</div>",
|
||||
content: content,
|
||||
summary: summary,
|
||||
author: nil,
|
||||
crawled: .distantPast,
|
||||
recrawled: nil,
|
||||
origin: origin,
|
||||
canonical: nil,
|
||||
alternate: nil,
|
||||
unread: false,
|
||||
tags: nil,
|
||||
categories: nil,
|
||||
enclosure: nil)
|
||||
|
||||
let parser = FeedlyEntryParser(entry: entry)
|
||||
|
||||
// These should be sanitized
|
||||
XCTAssertEqual(parser.title, title)
|
||||
XCTAssertEqual(parser.summary, summaryContent)
|
||||
|
||||
// These should not be sanitized because it is supposed to be HTML content.
|
||||
XCTAssertEqual(parser.contentHMTL, content.content)
|
||||
}
|
||||
|
||||
func testLocatesCanonicalExternalUrl() {
|
||||
let canonicalLink = FeedlyLink(href: "tests://feeds/1/entries/1", type: "text/html")
|
||||
let alternateLink = FeedlyLink(href: "tests://feeds/1/entries/alternate/1", type: "text/html")
|
||||
let entry = FeedlyEntry(id: "tests/feeds/1/entries/1",
|
||||
title: "Test Entry 1",
|
||||
content: nil,
|
||||
summary: nil,
|
||||
author: nil,
|
||||
crawled: .distantPast,
|
||||
recrawled: Date(timeIntervalSinceReferenceDate: 0),
|
||||
origin: nil,
|
||||
canonical: [canonicalLink],
|
||||
alternate: [alternateLink],
|
||||
unread: false,
|
||||
tags: nil,
|
||||
categories: nil,
|
||||
enclosure: nil)
|
||||
|
||||
let parser = FeedlyEntryParser(entry: entry)
|
||||
|
||||
XCTAssertEqual(parser.externalUrl, canonicalLink.href)
|
||||
}
|
||||
|
||||
func testLocatesAlternateExternalUrl() {
|
||||
let canonicalLink = FeedlyLink(href: "tests://feeds/1/entries/1", type: "text/json")
|
||||
let alternateLink = FeedlyLink(href: "tests://feeds/1/entries/alternate/1", type: nil)
|
||||
let entry = FeedlyEntry(id: "tests/feeds/1/entries/1",
|
||||
title: "Test Entry 1",
|
||||
content: nil,
|
||||
summary: nil,
|
||||
author: nil,
|
||||
crawled: .distantPast,
|
||||
recrawled: Date(timeIntervalSinceReferenceDate: 0),
|
||||
origin: nil,
|
||||
canonical: [canonicalLink],
|
||||
alternate: [alternateLink],
|
||||
unread: false,
|
||||
tags: nil,
|
||||
categories: nil,
|
||||
enclosure: nil)
|
||||
|
||||
let parser = FeedlyEntryParser(entry: entry)
|
||||
|
||||
XCTAssertEqual(parser.externalUrl, alternateLink.href)
|
||||
}
|
||||
|
||||
func testContentPreferredToSummary() {
|
||||
let content = FeedlyEntry.Content(content: "Test Content", direction: .leftToRight)
|
||||
let summary = FeedlyEntry.Content(content: "Test Summary", direction: .leftToRight)
|
||||
let entry = FeedlyEntry(id: "tests/feeds/1/entries/1",
|
||||
title: "Test Entry 1",
|
||||
content: content,
|
||||
summary: summary,
|
||||
author: nil,
|
||||
crawled: .distantPast,
|
||||
recrawled: Date(timeIntervalSinceReferenceDate: 0),
|
||||
origin: nil,
|
||||
canonical: nil,
|
||||
alternate: nil,
|
||||
unread: false,
|
||||
tags: nil,
|
||||
categories: nil,
|
||||
enclosure: nil)
|
||||
|
||||
let parser = FeedlyEntryParser(entry: entry)
|
||||
|
||||
XCTAssertEqual(parser.contentHMTL, content.content)
|
||||
}
|
||||
|
||||
func testSummaryUsedAsContentWhenContentMissing() {
|
||||
let summary = FeedlyEntry.Content(content: "Test Summary", direction: .leftToRight)
|
||||
let entry = FeedlyEntry(id: "tests/feeds/1/entries/1",
|
||||
title: "Test Entry 1",
|
||||
content: nil,
|
||||
summary: summary,
|
||||
author: nil,
|
||||
crawled: .distantPast,
|
||||
recrawled: Date(timeIntervalSinceReferenceDate: 0),
|
||||
origin: nil,
|
||||
canonical: nil,
|
||||
alternate: nil,
|
||||
unread: false,
|
||||
tags: nil,
|
||||
categories: nil,
|
||||
enclosure: nil)
|
||||
|
||||
let parser = FeedlyEntryParser(entry: entry)
|
||||
|
||||
XCTAssertEqual(parser.contentHMTL, summary.content)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
//
|
||||
// FeedlyFeedParserTests.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 29/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
|
||||
class FeedlyFeedParserTests: XCTestCase {
|
||||
|
||||
func testParsing() {
|
||||
let name = "Test Feed"
|
||||
let website = "tests://nnw/feed/1"
|
||||
let url = "tests://nnw/feed.xml"
|
||||
let id = "feed/\(url)"
|
||||
let updated = Date.distantPast
|
||||
let feed = FeedlyFeed(id: id, title: name, updated: updated, website: website)
|
||||
let parser = FeedlyFeedParser(feed: feed)
|
||||
XCTAssertEqual(parser.title, name)
|
||||
XCTAssertEqual(parser.homePageURL, website)
|
||||
XCTAssertEqual(parser.url, url)
|
||||
XCTAssertEqual(parser.webFeedID, id)
|
||||
}
|
||||
|
||||
func testSanitization() {
|
||||
let name = "Test Feed"
|
||||
let website = "tests://nnw/feed/1"
|
||||
let url = "tests://nnw/feed.xml"
|
||||
let id = "feed/\(url)"
|
||||
let updated = Date.distantPast
|
||||
let feed = FeedlyFeed(id: id, title: "<div style=\"direction:rtl;text-align:right\">\(name)</div>", updated: updated, website: website)
|
||||
let parser = FeedlyFeedParser(feed: feed)
|
||||
XCTAssertEqual(parser.title, name)
|
||||
XCTAssertEqual(parser.homePageURL, website)
|
||||
XCTAssertEqual(parser.url, url)
|
||||
XCTAssertEqual(parser.webFeedID, id)
|
||||
}
|
||||
}
|
|
@ -9,6 +9,7 @@
|
|||
import XCTest
|
||||
@testable import Account
|
||||
import os.log
|
||||
import RSCore
|
||||
|
||||
class FeedlyGetCollectionsOperationTests: XCTestCase {
|
||||
|
||||
|
@ -20,11 +21,11 @@ class FeedlyGetCollectionsOperationTests: XCTestCase {
|
|||
|
||||
let getCollections = FeedlyGetCollectionsOperation(service: caller, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
getCollections.completionBlock = {
|
||||
getCollections.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(getCollections)
|
||||
MainThreadOperationQueue.shared.add(getCollections)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -78,11 +79,11 @@ class FeedlyGetCollectionsOperationTests: XCTestCase {
|
|||
getCollections.delegate = delegate
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
getCollections.completionBlock = {
|
||||
getCollections.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(getCollections)
|
||||
MainThreadOperationQueue.shared.add(getCollections)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSCore
|
||||
|
||||
class FeedlyGetStreamContentsOperationTests: XCTestCase {
|
||||
|
||||
|
@ -35,11 +36,11 @@ class FeedlyGetStreamContentsOperationTests: XCTestCase {
|
|||
service.mockResult = .failure(URLError(.fileDoesNotExist))
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
getStreamContents.completionBlock = {
|
||||
getStreamContents.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(getStreamContents)
|
||||
MainThreadOperationQueue.shared.addOperation(getStreamContents)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -68,11 +69,11 @@ class FeedlyGetStreamContentsOperationTests: XCTestCase {
|
|||
}
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
getStreamContents.completionBlock = {
|
||||
getStreamContents.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(getStreamContents)
|
||||
MainThreadOperationQueue.shared.addOperation(getStreamContents)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -100,11 +101,11 @@ class FeedlyGetStreamContentsOperationTests: XCTestCase {
|
|||
let getStreamContents = FeedlyGetStreamContentsOperation(account: account, resource: resource, service: caller, continuation: nil, newerThan: nil, unreadOnly: nil, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
getStreamContents.completionBlock = {
|
||||
getStreamContents.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(getStreamContents)
|
||||
MainThreadOperationQueue.shared.addOperation(getStreamContents)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSCore
|
||||
|
||||
class FeedlyGetStreamIdsOperationTests: XCTestCase {
|
||||
|
||||
|
@ -35,11 +36,11 @@ class FeedlyGetStreamIdsOperationTests: XCTestCase {
|
|||
service.mockResult = .failure(URLError(.fileDoesNotExist))
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
getStreamIds.completionBlock = {
|
||||
getStreamIds.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(getStreamIds)
|
||||
MainThreadOperationQueue.shared.addOperation(getStreamIds)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -68,11 +69,11 @@ class FeedlyGetStreamIdsOperationTests: XCTestCase {
|
|||
}
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
getStreamIds.completionBlock = {
|
||||
getStreamIds.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(getStreamIds)
|
||||
MainThreadOperationQueue.shared.addOperation(getStreamIds)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -95,11 +96,11 @@ class FeedlyGetStreamIdsOperationTests: XCTestCase {
|
|||
let getStreamIds = FeedlyGetStreamIdsOperation(account: account, resource: resource, service: caller, continuation: nil, newerThan: nil, unreadOnly: nil, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
getStreamIds.completionBlock = {
|
||||
getStreamIds.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(getStreamIds)
|
||||
MainThreadOperationQueue.shared.addOperation(getStreamIds)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSCore
|
||||
|
||||
class FeedlyLogoutOperationTests: XCTestCase {
|
||||
|
||||
|
@ -68,18 +69,17 @@ class FeedlyLogoutOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
logout.completionBlock = {
|
||||
logout.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(logout)
|
||||
MainThreadOperationQueue.shared.addOperation(logout)
|
||||
|
||||
logout.cancel()
|
||||
MainThreadOperationQueue.shared.cancelOperations([logout])
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
|
||||
XCTAssertTrue(logout.isCancelled)
|
||||
XCTAssertTrue(logout.isFinished)
|
||||
XCTAssertTrue(logout.isCanceled)
|
||||
|
||||
do {
|
||||
let accountAccessToken = try account.retrieveCredentials(type: .oauthAccessToken)
|
||||
|
@ -101,15 +101,15 @@ class FeedlyLogoutOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
logout.completionBlock = {
|
||||
logout.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(logout)
|
||||
MainThreadOperationQueue.shared.addOperation(logout)
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
|
||||
XCTAssertFalse(logout.isCancelled)
|
||||
XCTAssertFalse(logout.isCanceled)
|
||||
|
||||
do {
|
||||
let accountAccessToken = try account.retrieveCredentials(type: .oauthAccessToken)
|
||||
|
@ -147,15 +147,15 @@ class FeedlyLogoutOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
logout.completionBlock = {
|
||||
logout.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(logout)
|
||||
MainThreadOperationQueue.shared.addOperation(logout)
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
|
||||
XCTAssertFalse(logout.isCancelled)
|
||||
XCTAssertFalse(logout.isCanceled)
|
||||
|
||||
do {
|
||||
let accountAccessToken = try account.retrieveCredentials(type: .oauthAccessToken)
|
||||
|
@ -193,15 +193,15 @@ class FeedlyLogoutOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
logout.completionBlock = {
|
||||
logout.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(logout)
|
||||
MainThreadOperationQueue.shared.addOperation(logout)
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
|
||||
XCTAssertFalse(logout.isCancelled)
|
||||
XCTAssertFalse(logout.isCanceled)
|
||||
|
||||
do {
|
||||
let accountAccessToken = try account.retrieveCredentials(type: .oauthAccessToken)
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSCore
|
||||
|
||||
class FeedlyMirrorCollectionsAsFoldersOperationTests: XCTestCase {
|
||||
|
||||
|
@ -37,14 +38,14 @@ class FeedlyMirrorCollectionsAsFoldersOperationTests: XCTestCase {
|
|||
let provider = CollectionsProvider()
|
||||
let mirrorOperation = FeedlyMirrorCollectionsAsFoldersOperation(account: account, collectionsProvider: provider, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
mirrorOperation.completionBlock = {
|
||||
mirrorOperation.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
XCTAssertTrue(mirrorOperation.collectionsAndFolders.isEmpty)
|
||||
XCTAssertTrue(mirrorOperation.feedsAndFolders.isEmpty)
|
||||
|
||||
OperationQueue.main.addOperation(mirrorOperation)
|
||||
MainThreadOperationQueue.shared.addOperation(mirrorOperation)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -69,11 +70,11 @@ class FeedlyMirrorCollectionsAsFoldersOperationTests: XCTestCase {
|
|||
do {
|
||||
let addFolders = FeedlyMirrorCollectionsAsFoldersOperation(account: account, collectionsProvider: provider, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
addFolders.completionBlock = {
|
||||
addFolders.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(addFolders)
|
||||
MainThreadOperationQueue.shared.addOperation(addFolders)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
@ -83,11 +84,11 @@ class FeedlyMirrorCollectionsAsFoldersOperationTests: XCTestCase {
|
|||
|
||||
let removeFolders = FeedlyMirrorCollectionsAsFoldersOperation(account: account, collectionsProvider: provider, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
removeFolders.completionBlock = {
|
||||
removeFolders.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(removeFolders)
|
||||
MainThreadOperationQueue.shared.addOperation(removeFolders)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -131,11 +132,11 @@ class FeedlyMirrorCollectionsAsFoldersOperationTests: XCTestCase {
|
|||
let provider = CollectionsAndFeedsProvider()
|
||||
let mirrorOperation = FeedlyMirrorCollectionsAsFoldersOperation(account: account, collectionsProvider: provider, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
mirrorOperation.completionBlock = {
|
||||
mirrorOperation.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(mirrorOperation)
|
||||
MainThreadOperationQueue.shared.addOperation(mirrorOperation)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -172,14 +173,14 @@ class FeedlyMirrorCollectionsAsFoldersOperationTests: XCTestCase {
|
|||
let addFoldersAndFeeds = FeedlyMirrorCollectionsAsFoldersOperation(account: account, collectionsProvider: provider, log: support.log)
|
||||
|
||||
let createFeeds = FeedlyCreateFeedsForCollectionFoldersOperation(account: account, feedsAndFoldersProvider: addFoldersAndFeeds, log: support.log)
|
||||
createFeeds.addDependency(addFoldersAndFeeds)
|
||||
MainThreadOperationQueue.shared.make(createFeeds, dependOn: addFoldersAndFeeds)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
createFeeds.completionBlock = {
|
||||
createFeeds.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperations([addFoldersAndFeeds, createFeeds], waitUntilFinished: false)
|
||||
MainThreadOperationQueue.shared.addOperations([addFoldersAndFeeds, createFeeds])
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -192,11 +193,11 @@ class FeedlyMirrorCollectionsAsFoldersOperationTests: XCTestCase {
|
|||
|
||||
let removeFolders = FeedlyMirrorCollectionsAsFoldersOperation(account: account, collectionsProvider: provider, log: support.log)
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
removeFolders.completionBlock = {
|
||||
removeFolders.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(removeFolders)
|
||||
MainThreadOperationQueue.shared.addOperation(removeFolders)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import XCTest
|
||||
@testable import Account
|
||||
import RSWeb
|
||||
import RSCore
|
||||
|
||||
class FeedlyOperationTests: XCTestCase {
|
||||
|
||||
|
@ -21,14 +22,15 @@ class FeedlyOperationTests: XCTestCase {
|
|||
var didCallMainExpectation: XCTestExpectation?
|
||||
var mockError: Error?
|
||||
|
||||
override func main() {
|
||||
override func run() {
|
||||
super.run()
|
||||
// Should always call on main thread.
|
||||
XCTAssertTrue(Thread.isMainThread)
|
||||
|
||||
didCallMainExpectation?.fulfill()
|
||||
|
||||
if let error = mockError {
|
||||
didFinish(error)
|
||||
didFinish(with: error)
|
||||
} else {
|
||||
didFinish()
|
||||
}
|
||||
|
@ -50,7 +52,7 @@ class FeedlyOperationTests: XCTestCase {
|
|||
let testOperation = TestOperation()
|
||||
testOperation.didCallMainExpectation = expectation(description: "Did Call Main")
|
||||
|
||||
OperationQueue.main.addOperation(testOperation)
|
||||
MainThreadOperationQueue.shared.add(testOperation)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
@ -65,7 +67,7 @@ class FeedlyOperationTests: XCTestCase {
|
|||
|
||||
testOperation.delegate = delegate
|
||||
|
||||
OperationQueue.main.addOperation(testOperation)
|
||||
MainThreadOperationQueue.shared.add(testOperation)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -81,23 +83,18 @@ class FeedlyOperationTests: XCTestCase {
|
|||
testOperation.didCallMainExpectation = expectation(description: "Did Call Main")
|
||||
|
||||
let completionExpectation = expectation(description: "Operation Completed")
|
||||
testOperation.completionBlock = {
|
||||
testOperation.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
XCTAssertTrue(testOperation.isReady)
|
||||
XCTAssertFalse(testOperation.isFinished)
|
||||
XCTAssertFalse(testOperation.isExecuting)
|
||||
XCTAssertFalse(testOperation.isCancelled)
|
||||
|
||||
XCTAssertFalse(testOperation.isCanceled)
|
||||
|
||||
OperationQueue.main.addOperation(testOperation)
|
||||
MainThreadOperationQueue.shared.add(testOperation)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertTrue(testOperation.isReady)
|
||||
XCTAssertTrue(testOperation.isFinished)
|
||||
XCTAssertFalse(testOperation.isExecuting)
|
||||
XCTAssertFalse(testOperation.isCancelled)
|
||||
XCTAssertFalse(testOperation.isCanceled)
|
||||
}
|
||||
|
||||
func testOperationCancellationFlags() {
|
||||
|
@ -106,43 +103,37 @@ class FeedlyOperationTests: XCTestCase {
|
|||
testOperation.didCallMainExpectation?.isInverted = true
|
||||
|
||||
let completionExpectation = expectation(description: "Operation Completed")
|
||||
testOperation.completionBlock = {
|
||||
testOperation.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
XCTAssertTrue(testOperation.isReady)
|
||||
XCTAssertFalse(testOperation.isFinished)
|
||||
XCTAssertFalse(testOperation.isExecuting)
|
||||
XCTAssertFalse(testOperation.isCancelled)
|
||||
XCTAssertFalse(testOperation.isCanceled)
|
||||
|
||||
OperationQueue.main.addOperation(testOperation)
|
||||
MainThreadOperationQueue.shared.add(testOperation)
|
||||
|
||||
testOperation.cancel()
|
||||
MainThreadOperationQueue.shared.cancelOperations([testOperation])
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertTrue(testOperation.isReady)
|
||||
XCTAssertTrue(testOperation.isFinished)
|
||||
XCTAssertFalse(testOperation.isExecuting)
|
||||
XCTAssertTrue(testOperation.isCancelled)
|
||||
XCTAssertTrue(testOperation.isCanceled)
|
||||
}
|
||||
|
||||
func testDependency() {
|
||||
let testOperation = TestOperation()
|
||||
testOperation.didCallMainExpectation = expectation(description: "Did Call Main")
|
||||
|
||||
let dependencyExpectation = expectation(description: "Did Call Dependency")
|
||||
let blockOperation = BlockOperation {
|
||||
dependencyExpectation.fulfill()
|
||||
}
|
||||
|
||||
blockOperation.addDependency(testOperation)
|
||||
|
||||
XCTAssertTrue(blockOperation.dependencies.contains(testOperation))
|
||||
|
||||
OperationQueue.main.addOperations([testOperation, blockOperation], waitUntilFinished: false)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
// let testOperation = TestOperation()
|
||||
// testOperation.didCallMainExpectation = expectation(description: "Did Call Main")
|
||||
//
|
||||
// let dependencyExpectation = expectation(description: "Did Call Dependency")
|
||||
// let blockOperation = BlockOperation {
|
||||
// dependencyExpectation.fulfill()
|
||||
// }
|
||||
//
|
||||
// MainThreadOperationQueue.shared.make(blockOperation, dependOn: testOperation)
|
||||
//
|
||||
// //XCTAssertTrue(blockOperation.dependencies.contains(testOperation))
|
||||
//
|
||||
// MainThreadOperationQueue.shared.addOperations([testOperation, blockOperation])
|
||||
//
|
||||
// waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testProgressReporting() {
|
||||
|
@ -174,15 +165,15 @@ class FeedlyOperationTests: XCTestCase {
|
|||
testOperation.downloadProgress = progress
|
||||
|
||||
let completionExpectation = expectation(description: "Operation Completed")
|
||||
testOperation.completionBlock = {
|
||||
testOperation.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(testOperation)
|
||||
MainThreadOperationQueue.shared.add(testOperation)
|
||||
|
||||
XCTAssertTrue(progress.numberRemaining == 1)
|
||||
testOperation.cancel()
|
||||
XCTAssertTrue(progress.numberRemaining == 1)
|
||||
MainThreadOperationQueue.shared.cancelOperations([testOperation])
|
||||
XCTAssertTrue(progress.numberRemaining == 0)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -200,11 +191,11 @@ class FeedlyOperationTests: XCTestCase {
|
|||
testOperation.downloadProgress = progress
|
||||
|
||||
let completionExpectation = expectation(description: "Operation Completed")
|
||||
testOperation.completionBlock = {
|
||||
testOperation.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(testOperation)
|
||||
MainThreadOperationQueue.shared.add(testOperation)
|
||||
|
||||
XCTAssertTrue(progress.numberRemaining == 1)
|
||||
|
||||
|
@ -225,11 +216,11 @@ class FeedlyOperationTests: XCTestCase {
|
|||
testOperation.downloadProgress = progress
|
||||
|
||||
let completionExpectation = expectation(description: "Operation Completed")
|
||||
testOperation.completionBlock = {
|
||||
testOperation.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(testOperation)
|
||||
MainThreadOperationQueue.shared.add(testOperation)
|
||||
|
||||
XCTAssertTrue(progress.numberRemaining == 1)
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import XCTest
|
||||
@testable import Account
|
||||
import RSParser
|
||||
import RSCore
|
||||
|
||||
class FeedlyOrganiseParsedItemsByFeedOperationTests: XCTestCase {
|
||||
|
||||
|
@ -28,6 +29,7 @@ class FeedlyOrganiseParsedItemsByFeedOperationTests: XCTestCase {
|
|||
}
|
||||
|
||||
struct TestParsedItemsProvider: FeedlyParsedItemProviding {
|
||||
let parsedItemProviderName = "TestParsedItemsProvider"
|
||||
var resource: FeedlyResourceId
|
||||
var parsedEntries: Set<ParsedItem>
|
||||
}
|
||||
|
@ -41,17 +43,16 @@ class FeedlyOrganiseParsedItemsByFeedOperationTests: XCTestCase {
|
|||
let organise = FeedlyOrganiseParsedItemsByFeedOperation(account: account, parsedItemProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
organise.completionBlock = {
|
||||
organise.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(organise)
|
||||
MainThreadOperationQueue.shared.addOperation(organise)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let itemsAndFeedIds = organise.parsedItemsKeyedByFeedId
|
||||
XCTAssertEqual(itemsAndFeedIds, entries)
|
||||
XCTAssertEqual(resource.id, organise.providerName)
|
||||
}
|
||||
|
||||
func testGroupsOneEntryByFeedId() {
|
||||
|
@ -63,17 +64,16 @@ class FeedlyOrganiseParsedItemsByFeedOperationTests: XCTestCase {
|
|||
let organise = FeedlyOrganiseParsedItemsByFeedOperation(account: account, parsedItemProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
organise.completionBlock = {
|
||||
organise.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(organise)
|
||||
MainThreadOperationQueue.shared.addOperation(organise)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let itemsAndFeedIds = organise.parsedItemsKeyedByFeedId
|
||||
XCTAssertEqual(itemsAndFeedIds, entries)
|
||||
XCTAssertEqual(resource.id, organise.providerName)
|
||||
}
|
||||
|
||||
func testGroupsManyEntriesByFeedId() {
|
||||
|
@ -85,16 +85,15 @@ class FeedlyOrganiseParsedItemsByFeedOperationTests: XCTestCase {
|
|||
let organise = FeedlyOrganiseParsedItemsByFeedOperation(account: account, parsedItemProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
organise.completionBlock = {
|
||||
organise.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(organise)
|
||||
MainThreadOperationQueue.shared.addOperation(organise)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let itemsAndFeedIds = organise.parsedItemsKeyedByFeedId
|
||||
XCTAssertEqual(itemsAndFeedIds, entries)
|
||||
XCTAssertEqual(resource.id, organise.providerName)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import XCTest
|
||||
@testable import Account
|
||||
import RSWeb
|
||||
import RSCore
|
||||
|
||||
class FeedlyRefreshAccessTokenOperationTests: XCTestCase {
|
||||
|
||||
|
@ -56,17 +57,17 @@ class FeedlyRefreshAccessTokenOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
refresh.completionBlock = {
|
||||
refresh.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(refresh)
|
||||
MainThreadOperationQueue.shared.addOperation(refresh)
|
||||
|
||||
refresh.cancel()
|
||||
MainThreadOperationQueue.shared.cancelOperations([refresh])
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
|
||||
XCTAssertTrue(refresh.isCancelled)
|
||||
XCTAssertTrue(refresh.isCanceled)
|
||||
}
|
||||
|
||||
class TestRefreshTokenDelegate: FeedlyOperationDelegate {
|
||||
|
@ -95,11 +96,11 @@ class FeedlyRefreshAccessTokenOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
refresh.completionBlock = {
|
||||
refresh.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(refresh)
|
||||
MainThreadOperationQueue.shared.addOperation(refresh)
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
|
||||
|
@ -142,11 +143,11 @@ class FeedlyRefreshAccessTokenOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
refresh.completionBlock = {
|
||||
refresh.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(refresh)
|
||||
MainThreadOperationQueue.shared.addOperation(refresh)
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
|
||||
|
@ -196,11 +197,11 @@ class FeedlyRefreshAccessTokenOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
refresh.completionBlock = {
|
||||
refresh.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(refresh)
|
||||
MainThreadOperationQueue.shared.addOperation(refresh)
|
||||
|
||||
waitForExpectations(timeout: 1)
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ import XCTest
|
|||
@testable import Account
|
||||
import SyncDatabase
|
||||
import Articles
|
||||
import RSCore
|
||||
|
||||
class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
||||
|
||||
|
@ -36,11 +37,11 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
@ -50,7 +51,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let statuses = articleIds.map { SyncStatus(articleID: $0, key: .read, flag: false) }
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -66,15 +68,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), 0)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, 0)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendUnreadFailure() {
|
||||
|
@ -82,7 +94,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let statuses = articleIds.map { SyncStatus(articleID: $0, key: .read, flag: false) }
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -98,15 +111,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), statuses.count)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, statuses.count)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendReadSuccess() {
|
||||
|
@ -114,7 +137,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let statuses = articleIds.map { SyncStatus(articleID: $0, key: .read, flag: true) }
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -130,15 +154,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), 0)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, 0)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendReadFailure() {
|
||||
|
@ -146,7 +180,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let statuses = articleIds.map { SyncStatus(articleID: $0, key: .read, flag: true) }
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -162,15 +197,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), statuses.count)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, statuses.count)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendStarredSuccess() {
|
||||
|
@ -178,7 +223,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let statuses = articleIds.map { SyncStatus(articleID: $0, key: .starred, flag: true) }
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -194,15 +240,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), 0)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, 0)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendStarredFailure() {
|
||||
|
@ -210,7 +266,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let statuses = articleIds.map { SyncStatus(articleID: $0, key: .starred, flag: true) }
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -226,15 +283,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), statuses.count)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, statuses.count)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendUnstarredSuccess() {
|
||||
|
@ -242,7 +309,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let statuses = articleIds.map { SyncStatus(articleID: $0, key: .starred, flag: false) }
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -258,15 +326,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), 0)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, 0)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendUnstarredFailure() {
|
||||
|
@ -274,7 +352,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let statuses = articleIds.map { SyncStatus(articleID: $0, key: .starred, flag: false) }
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -290,15 +369,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), statuses.count)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let expectedCount = try result.get()
|
||||
XCTAssertEqual(expectedCount, statuses.count)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendAllSuccess() {
|
||||
|
@ -313,7 +402,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
}
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -339,14 +429,25 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
XCTAssertEqual(container.database.selectPendingCount(), 0)
|
||||
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, 0)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSendAllFailure() {
|
||||
|
@ -361,7 +462,8 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
}
|
||||
|
||||
let insertExpectation = expectation(description: "Inserted Statuses")
|
||||
container.database.insertStatuses(statuses) {
|
||||
container.database.insertStatuses(statuses) { error in
|
||||
XCTAssertNil(error)
|
||||
insertExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -388,14 +490,24 @@ class FeedlySendArticleStatusesOperationTests: XCTestCase {
|
|||
let send = FeedlySendArticleStatusesOperation(database: container.database, service: service, log: support.log)
|
||||
|
||||
let didFinishExpectation = expectation(description: "Did Finish")
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { _ in
|
||||
didFinishExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(send)
|
||||
MainThreadOperationQueue.shared.addOperation(send)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
XCTAssertEqual(container.database.selectPendingCount(), statuses.count)
|
||||
let selectPendingCountExpectation = expectation(description: "Did Select Pending Count")
|
||||
container.database.selectPendingCount { result in
|
||||
do {
|
||||
let statusCount = try result.get()
|
||||
XCTAssertEqual(statusCount, statuses.count)
|
||||
selectPendingCountExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping database result: \(error)")
|
||||
}
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,437 +0,0 @@
|
|||
//
|
||||
// FeedlySetStarredArticlesOperationTests.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 25/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSParser
|
||||
|
||||
class FeedlySetStarredArticlesOperationTests: XCTestCase {
|
||||
|
||||
private var account: Account!
|
||||
private let support = FeedlyTestSupport()
|
||||
|
||||
override func setUp() {
|
||||
super.setUp()
|
||||
account = support.makeTestAccount()
|
||||
}
|
||||
|
||||
override func tearDown() {
|
||||
if let account = account {
|
||||
support.destroy(account)
|
||||
}
|
||||
super.tearDown()
|
||||
}
|
||||
|
||||
// MARK: - Ensuring Unread Status By Id
|
||||
|
||||
struct TestStarredArticleProvider: FeedlyStarredEntryIdProviding {
|
||||
var entryIds: Set<String>
|
||||
}
|
||||
|
||||
func testEmptyArticleIds() {
|
||||
let testIds = Set<String>()
|
||||
let provider = TestStarredArticleProvider(entryIds: testIds)
|
||||
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { accountArticlesIDs in
|
||||
XCTAssertTrue(accountArticlesIDs.isEmpty)
|
||||
XCTAssertEqual(accountArticlesIDs, testIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetOneArticleIdStarred() {
|
||||
let testIds = Set<String>(["feed/0/article/0"])
|
||||
let provider = TestStarredArticleProvider(entryIds: testIds)
|
||||
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs.count, testIds.count)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetManyArticleIdsStarred() {
|
||||
let testIds = Set<String>((0..<10_000).map { "feed/0/article/\($0)" })
|
||||
let provider = TestStarredArticleProvider(entryIds: testIds)
|
||||
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs.count, testIds.count)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetSomeArticleIdsUnstarred() {
|
||||
let initialStarredIds = Set<String>((0..<1000).map { "feed/0/article/\($0)" })
|
||||
|
||||
do {
|
||||
let provider = TestStarredArticleProvider(entryIds: initialStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish Setting Initial Unreads")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let remainingStarredIds = Set(initialStarredIds.enumerated().filter { $0.offset % 2 > 0 }.map { $0.element })
|
||||
let provider = TestStarredArticleProvider(entryIds: remainingStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { remainingAccountArticlesIDs in
|
||||
XCTAssertEqual(remainingAccountArticlesIDs, remainingStarredIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetAllArticleIdsUnstarred() {
|
||||
let initialStarredIds = Set<String>((0..<1000).map { "feed/0/article/\($0)" })
|
||||
|
||||
do {
|
||||
let provider = TestStarredArticleProvider(entryIds: initialStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish Setting Initial Unreads")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let remainingStarredIds = Set<String>()
|
||||
let provider = TestStarredArticleProvider(entryIds: remainingStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { remainingAccountArticlesIDs in
|
||||
XCTAssertEqual(remainingAccountArticlesIDs, remainingStarredIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
// MARK: - Updating Article Unread Status
|
||||
|
||||
struct TestItemsByFeedProvider: FeedlyParsedItemsByFeedProviding {
|
||||
var providerName: String
|
||||
var parsedItemsKeyedByFeedId: [String: Set<ParsedItem>]
|
||||
}
|
||||
|
||||
func testSetAllArticlesStarred() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItemsAndFeeds)
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let testItems = Set(testItemsAndFeeds.flatMap { $0.value })
|
||||
let remainingStarredIds = Set(testItems.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(testItems.count, remainingStarredIds.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let provider = TestStarredArticleProvider(entryIds: remainingStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingStarredIds)
|
||||
|
||||
let idsOfStarredArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(remainingStarredIds))
|
||||
.filter { $0.status.boolStatus(forKey: .starred) == true }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfStarredArticles, remainingStarredIds)
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetManyArticlesUnread() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItemsAndFeeds)
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let testItems = Set(testItemsAndFeeds.flatMap { $0.value })
|
||||
let unreadItems = testItems
|
||||
.enumerated()
|
||||
.filter { $0.offset % 2 > 0 }
|
||||
.map { $0.element }
|
||||
|
||||
let remainingStarredIds = Set(unreadItems.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(unreadItems.count, remainingStarredIds.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let provider = TestStarredArticleProvider(entryIds: remainingStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingStarredIds)
|
||||
|
||||
let idsOfStarredArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(remainingStarredIds))
|
||||
.filter { $0.status.boolStatus(forKey: .starred) == true }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfStarredArticles, remainingStarredIds)
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetOneArticleUnread() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItemsAndFeeds)
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let testItems = Set(testItemsAndFeeds.flatMap { $0.value })
|
||||
// Since the test data is completely under the developer's control, not having at least one can be a programmer error.
|
||||
let remainingStarredIds = Set([testItems.compactMap { $0.syncServiceID }.first!])
|
||||
let provider = TestStarredArticleProvider(entryIds: remainingStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingStarredIds)
|
||||
|
||||
let idsOfStarredArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(remainingStarredIds))
|
||||
.filter { $0.status.boolStatus(forKey: .starred) == true }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfStarredArticles, remainingStarredIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetNoArticlesRead() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItemsAndFeeds)
|
||||
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let remainingStarredIds = Set<String>()
|
||||
let provider = TestStarredArticleProvider(entryIds: remainingStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingStarredIds)
|
||||
|
||||
let idsOfStarredArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(remainingStarredIds))
|
||||
.filter { $0.status.boolStatus(forKey: .starred) == true }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfStarredArticles, remainingStarredIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetAllArticlesAndArticleIdsWithSomeArticlesIngested() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
let someItemsAndFeeds = Dictionary(uniqueKeysWithValues: testItemsAndFeeds.enumerated().filter { $0.offset % 2 > 0 }.map { $0.element })
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: someItemsAndFeeds)
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let testItems = Set(testItemsAndFeeds.flatMap { $0.value })
|
||||
let remainingStarredIds = Set(testItems.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(testItems.count, remainingStarredIds.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let provider = TestStarredArticleProvider(entryIds: remainingStarredIds)
|
||||
let setStarred = FeedlySetStarredArticlesOperation(account: account, allStarredEntryIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingStarredIds)
|
||||
|
||||
let someTestItems = Set(someItemsAndFeeds.flatMap { $0.value })
|
||||
let someRemainingStarredIdsOfIngestedArticles = Set(someTestItems.compactMap { $0.syncServiceID })
|
||||
let idsOfStarredArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(someRemainingStarredIdsOfIngestedArticles))
|
||||
.filter { $0.status.boolStatus(forKey: .starred) == true }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfStarredArticles, someRemainingStarredIdsOfIngestedArticles)
|
||||
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
}
|
|
@ -1,435 +0,0 @@
|
|||
//
|
||||
// FeedlySetUnreadArticlesOperationTests.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 24/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSParser
|
||||
|
||||
class FeedlySetUnreadArticlesOperationTests: XCTestCase {
|
||||
|
||||
private var account: Account!
|
||||
private let support = FeedlyTestSupport()
|
||||
|
||||
override func setUp() {
|
||||
super.setUp()
|
||||
account = support.makeTestAccount()
|
||||
}
|
||||
|
||||
override func tearDown() {
|
||||
if let account = account {
|
||||
support.destroy(account)
|
||||
}
|
||||
super.tearDown()
|
||||
}
|
||||
|
||||
// MARK: - Ensuring Unread Status By Id
|
||||
|
||||
struct TestUnreadArticleIdProvider: FeedlyUnreadEntryIdProviding {
|
||||
var entryIds: Set<String>
|
||||
}
|
||||
|
||||
func testEmptyArticleIds() {
|
||||
let testIds = Set<String>()
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: testIds)
|
||||
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { accountArticlesIDs in
|
||||
XCTAssertTrue(accountArticlesIDs.isEmpty)
|
||||
XCTAssertEqual(accountArticlesIDs.count, testIds.count)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetOneArticleIdUnread() {
|
||||
let testIds = Set<String>(["feed/0/article/0"])
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: testIds)
|
||||
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs.count, testIds.count)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetManyArticleIdsUnread() {
|
||||
let testIds = Set<String>((0..<10_000).map { "feed/0/article/\($0)" })
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: testIds)
|
||||
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs.count, testIds.count)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetSomeArticleIdsRead() {
|
||||
let initialUnreadIds = Set<String>((0..<1000).map { "feed/0/article/\($0)" })
|
||||
|
||||
do {
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: initialUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish Setting Initial Unreads")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let remainingUnreadIds = Set(initialUnreadIds.enumerated().filter { $0.offset % 2 > 0 }.map { $0.element })
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: remainingUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { remainingAccountArticlesIDs in
|
||||
XCTAssertEqual(remainingAccountArticlesIDs, remainingUnreadIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetAllArticleIdsRead() {
|
||||
let initialUnreadIds = Set<String>((0..<1000).map { "feed/0/article/\($0)" })
|
||||
|
||||
do {
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: initialUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish Setting Initial Unreads")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let remainingUnreadIds = Set<String>()
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: remainingUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { remainingAccountArticlesIDs in
|
||||
XCTAssertEqual(remainingAccountArticlesIDs, remainingUnreadIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
// MARK: - Updating Article Unread Status
|
||||
|
||||
struct TestItemsByFeedProvider: FeedlyParsedItemsByFeedProviding {
|
||||
var providerName: String
|
||||
var parsedItemsKeyedByFeedId: [String: Set<ParsedItem>]
|
||||
}
|
||||
|
||||
func testSetAllArticlesUnread() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItemsAndFeeds)
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let testItems = Set(testItemsAndFeeds.flatMap { $0.value })
|
||||
let remainingUnreadIds = Set(testItems.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(testItems.count, remainingUnreadIds.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: remainingUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingUnreadIds)
|
||||
let idsOfUnreadArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(remainingUnreadIds))
|
||||
.filter { $0.status.boolStatus(forKey: .read) == false }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfUnreadArticles, remainingUnreadIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetManyArticlesUnread() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItemsAndFeeds)
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let testItems = Set(testItemsAndFeeds.flatMap { $0.value })
|
||||
let unreadItems = testItems
|
||||
.enumerated()
|
||||
.filter { $0.offset % 2 > 0 }
|
||||
.map { $0.element }
|
||||
|
||||
let remainingUnreadIds = Set(unreadItems.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(unreadItems.count, remainingUnreadIds.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: remainingUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingUnreadIds)
|
||||
|
||||
let idsOfUnreadArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(remainingUnreadIds))
|
||||
.filter { $0.status.boolStatus(forKey: .read) == false }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfUnreadArticles, remainingUnreadIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
}
|
||||
|
||||
func testSetOneArticleUnread() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItemsAndFeeds)
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let testItems = Set(testItemsAndFeeds.flatMap { $0.value })
|
||||
// Since the test data is completely under the developer's control, not having at least one can be a programmer error.
|
||||
let remainingUnreadIds = Set([testItems.compactMap { $0.syncServiceID }.first!])
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: remainingUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingUnreadIds)
|
||||
|
||||
let idsOfUnreadArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(remainingUnreadIds))
|
||||
.filter { $0.status.boolStatus(forKey: .read) == false }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfUnreadArticles, remainingUnreadIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testSetNoArticlesRead() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItemsAndFeeds)
|
||||
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let remainingUnreadIds = Set<String>()
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: remainingUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingUnreadIds)
|
||||
|
||||
let idsOfUnreadArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(remainingUnreadIds))
|
||||
.filter { $0.status.boolStatus(forKey: .read) == false }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfUnreadArticles, remainingUnreadIds)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
}
|
||||
|
||||
func testSetAllArticlesAndArticleIdsWithSomeArticlesIngested() {
|
||||
let testItemsAndFeeds = support.makeParsedItemTestDataFor(numberOfFeeds: 5, numberOfItemsInFeeds: 100)
|
||||
let someItemsAndFeeds = Dictionary(uniqueKeysWithValues: testItemsAndFeeds.enumerated().filter { $0.offset % 2 > 0 }.map { $0.element })
|
||||
|
||||
do {
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: someItemsAndFeeds)
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
let testItems = Set(testItemsAndFeeds.flatMap { $0.value })
|
||||
let remainingUnreadIds = Set(testItems.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(testItems.count, remainingUnreadIds.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let provider = TestUnreadArticleIdProvider(entryIds: remainingUnreadIds)
|
||||
let setUnread = FeedlySetUnreadArticlesOperation(account: account, allUnreadIdsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
setUnread.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(setUnread)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetched Articles Ids")
|
||||
account.fetchUnreadArticleIDs { accountArticlesIDs in
|
||||
XCTAssertEqual(accountArticlesIDs, remainingUnreadIds)
|
||||
|
||||
let someTestItems = Set(someItemsAndFeeds.flatMap { $0.value })
|
||||
let someRemainingUnreadIdsOfIngestedArticles = Set(someTestItems.compactMap { $0.syncServiceID })
|
||||
let idsOfUnreadArticles = Set(self.account
|
||||
.fetchArticles(.articleIDs(someRemainingUnreadIdsOfIngestedArticles))
|
||||
.filter { $0.status.boolStatus(forKey: .read) == false }
|
||||
.map { $0.articleID })
|
||||
|
||||
XCTAssertEqual(idsOfUnreadArticles, someRemainingUnreadIdsOfIngestedArticles)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -9,6 +9,7 @@
|
|||
import XCTest
|
||||
@testable import Account
|
||||
import RSWeb
|
||||
import RSCore
|
||||
|
||||
class FeedlySyncAllOperationTests: XCTestCase {
|
||||
|
||||
|
@ -57,29 +58,35 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
getGlobalStreamContents.getStreamContentsExpectation = expectation(description: "Get Contents of global.all")
|
||||
getGlobalStreamContents.getStreamContentsExpectation?.isInverted = true
|
||||
|
||||
let getStarredContents = TestGetStreamContentsService()
|
||||
getStarredContents.getStreamContentsExpectation = expectation(description: "Get Contents of global.saved")
|
||||
getStarredContents.getStreamContentsExpectation?.isInverted = true
|
||||
let getStarredIds = TestGetStreamIdsService()
|
||||
getStarredIds.getStreamIdsExpectation = expectation(description: "Get Ids of global.saved")
|
||||
getStarredIds.getStreamIdsExpectation?.isInverted = true
|
||||
|
||||
let getEntriesService = TestGetEntriesService()
|
||||
getEntriesService.getEntriesExpectation = expectation(description: "Get Entries")
|
||||
getEntriesService.getEntriesExpectation?.isInverted = true
|
||||
|
||||
let progress = DownloadProgress(numberOfTasks: 0)
|
||||
let _ = expectationForCompletion(of: progress)
|
||||
|
||||
let container = support.makeTestDatabaseContainer()
|
||||
let syncAll = FeedlySyncAllOperation(account: account,
|
||||
credentials: support.accessToken,
|
||||
lastSuccessfulFetchStartDate: nil,
|
||||
markArticlesService: markArticlesService,
|
||||
getUnreadService: getStreamIdsService,
|
||||
getCollectionsService: getCollectionsService,
|
||||
getStreamContentsService: getGlobalStreamContents,
|
||||
getStarredArticlesService: getStarredContents,
|
||||
database: container.database,
|
||||
downloadProgress: progress,
|
||||
log: support.log)
|
||||
credentials: support.accessToken,
|
||||
lastSuccessfulFetchStartDate: nil,
|
||||
markArticlesService: markArticlesService,
|
||||
getUnreadService: getStreamIdsService,
|
||||
getCollectionsService: getCollectionsService,
|
||||
getStreamContentsService: getGlobalStreamContents,
|
||||
getStarredService: getStarredIds,
|
||||
getStreamIdsService: getStreamIdsService,
|
||||
getEntriesService: getEntriesService,
|
||||
database: container.database,
|
||||
downloadProgress: progress,
|
||||
log: support.log)
|
||||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncAll.completionBlock = {
|
||||
syncAll.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
|
@ -96,7 +103,7 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
syncCompletionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncAll)
|
||||
MainThreadOperationQueue.shared.addOperation(syncAll)
|
||||
|
||||
XCTAssertTrue(progress.numberOfTasks > 1)
|
||||
|
||||
|
@ -114,18 +121,18 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
return caller
|
||||
}()
|
||||
|
||||
func testSyncing() {
|
||||
func testSyncing() throws {
|
||||
performInitialSync()
|
||||
verifyInitialSync()
|
||||
try verifyInitialSync()
|
||||
|
||||
performChangeStatuses()
|
||||
verifyChangeStatuses()
|
||||
try verifyChangeStatuses()
|
||||
|
||||
performChangeStatusesAgain()
|
||||
verifyChangeStatusesAgain()
|
||||
try verifyChangeStatusesAgain()
|
||||
|
||||
performAddFeedsAndFolders()
|
||||
verifyAddFeedsAndFolders()
|
||||
try verifyAddFeedsAndFolders()
|
||||
}
|
||||
|
||||
// MARK: 1 - Initial Sync
|
||||
|
@ -149,11 +156,11 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncAll.completionBlock = {
|
||||
syncAll.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncAll)
|
||||
MainThreadOperationQueue.shared.addOperation(syncAll)
|
||||
|
||||
XCTAssertTrue(progress.numberOfTasks > 1)
|
||||
|
||||
|
@ -166,15 +173,15 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
loadMockData(inSubdirectoryNamed: "feedly-1-initial")
|
||||
}
|
||||
|
||||
func verifyInitialSync() {
|
||||
func verifyInitialSync() throws {
|
||||
let subdirectory = "feedly-1-initial"
|
||||
support.checkFoldersAndFeeds(in: account, againstCollectionsAndFeedsInJSONNamed: "collections", subdirectory: subdirectory)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all@MTZkOTdkZWQ1NzM6NTE2OjUzYjgyNmEy", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all@MTZkOTdkZWQ1NzM6NTE2OjUzYjgyNmEy", subdirectory: subdirectory)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds", subdirectory: subdirectory, testCase: self)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds@MTZkOTRhOTNhZTQ6MzExOjUzYjgyNmEy", subdirectory: subdirectory, testCase: self)
|
||||
support.checkStarredStatuses(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory, testCase: self)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
}
|
||||
|
||||
// MARK: 2 - Change Statuses
|
||||
|
@ -183,14 +190,14 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
loadMockData(inSubdirectoryNamed: "feedly-2-changestatuses")
|
||||
}
|
||||
|
||||
func verifyChangeStatuses() {
|
||||
func verifyChangeStatuses() throws {
|
||||
let subdirectory = "feedly-2-changestatuses"
|
||||
support.checkFoldersAndFeeds(in: account, againstCollectionsAndFeedsInJSONNamed: "collections", subdirectory: subdirectory)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds", subdirectory: subdirectory, testCase: self)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds@MTZkOTJkNjIwM2Q6MTEzYjpkNDUwNjA3MQ==", subdirectory: subdirectory, testCase: self)
|
||||
support.checkStarredStatuses(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory, testCase: self)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
}
|
||||
|
||||
// MARK: 3 - Change Statuses Again
|
||||
|
@ -199,14 +206,14 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
loadMockData(inSubdirectoryNamed: "feedly-3-changestatusesagain")
|
||||
}
|
||||
|
||||
func verifyChangeStatusesAgain() {
|
||||
func verifyChangeStatusesAgain() throws {
|
||||
let subdirectory = "feedly-3-changestatusesagain"
|
||||
support.checkFoldersAndFeeds(in: account, againstCollectionsAndFeedsInJSONNamed: "collections", subdirectory: subdirectory)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds", subdirectory: subdirectory, testCase: self)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds@MTZkOGRlMjVmM2M6M2YyOmQ0NTA2MDcx", subdirectory: subdirectory, testCase: self)
|
||||
support.checkStarredStatuses(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory, testCase: self)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
}
|
||||
|
||||
// MARK: 4 - Add Feeds and Folders
|
||||
|
@ -215,14 +222,14 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
loadMockData(inSubdirectoryNamed: "feedly-4-addfeedsandfolders")
|
||||
}
|
||||
|
||||
func verifyAddFeedsAndFolders() {
|
||||
func verifyAddFeedsAndFolders() throws {
|
||||
let subdirectory = "feedly-4-addfeedsandfolders"
|
||||
support.checkFoldersAndFeeds(in: account, againstCollectionsAndFeedsInJSONNamed: "collections", subdirectory: subdirectory)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds", subdirectory: subdirectory, testCase: self)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds@MTZkOTE3YTRlMzQ6YWZjOmQ0NTA2MDcx", subdirectory: subdirectory, testCase: self)
|
||||
support.checkStarredStatuses(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory, testCase: self)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
}
|
||||
|
||||
// MARK: 5 - Remove Feeds and Folders
|
||||
|
@ -231,14 +238,14 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
loadMockData(inSubdirectoryNamed: "feedly-5-removefeedsandfolders")
|
||||
}
|
||||
|
||||
func verifyRemoveFeedsAndFolders() {
|
||||
func verifyRemoveFeedsAndFolders() throws {
|
||||
let subdirectory = "feedly-5-removefeedsandfolders"
|
||||
support.checkFoldersAndFeeds(in: account, againstCollectionsAndFeedsInJSONNamed: "collections", subdirectory: subdirectory)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "global.all", subdirectory: subdirectory)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds", subdirectory: subdirectory, testCase: self)
|
||||
support.checkUnreadStatuses(in: account, againstIdsInStreamInJSONNamed: "unreadIds@MTZkOGRlMjVmM2M6M2YxOmQ0NTA2MDcx", subdirectory: subdirectory, testCase: self)
|
||||
support.checkStarredStatuses(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory, testCase: self)
|
||||
support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
try support.checkArticles(in: account, againstItemsInStreamInJSONNamed: "starred", subdirectory: subdirectory)
|
||||
}
|
||||
|
||||
// MARK: Downloading Test Data
|
||||
|
@ -260,13 +267,13 @@ class FeedlySyncAllOperationTests: XCTestCase {
|
|||
|
||||
// If this expectation is not fulfilled, the operation is not calling `didFinish`.
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncAll.completionBlock = {
|
||||
syncAll.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
lastSuccessfulFetchStartDate = Date()
|
||||
|
||||
OperationQueue.main.addOperation(syncAll)
|
||||
MainThreadOperationQueue.shared.addOperation(syncAll)
|
||||
|
||||
XCTAssertTrue(progress.numberOfTasks > 1)
|
||||
|
||||
|
|
|
@ -1,174 +0,0 @@
|
|||
//
|
||||
// FeedlySyncStarredArticlesOperationTests.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 28/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
|
||||
class FeedlySyncStarredArticlesOperationTests: XCTestCase {
|
||||
|
||||
private var account: Account!
|
||||
private let support = FeedlyTestSupport()
|
||||
|
||||
override func setUp() {
|
||||
super.setUp()
|
||||
account = support.makeTestAccount()
|
||||
}
|
||||
|
||||
override func tearDown() {
|
||||
if let account = account {
|
||||
support.destroy(account)
|
||||
}
|
||||
super.tearDown()
|
||||
}
|
||||
|
||||
func testIngestsOnePageSuccess() {
|
||||
let service = TestGetStreamContentsService()
|
||||
let resource = FeedlyCategoryResourceId(id: "user/1234/category/5678")
|
||||
let items = service.makeMockFeedlyEntryItem()
|
||||
service.mockResult = .success(FeedlyStream(id: resource.id, updated: nil, continuation: nil, items: items))
|
||||
|
||||
let getStreamContentsExpectation = expectation(description: "Did Get Page of Stream Contents")
|
||||
getStreamContentsExpectation.expectedFulfillmentCount = 1
|
||||
|
||||
service.getStreamContentsExpectation = getStreamContentsExpectation
|
||||
service.parameterTester = { serviceResource, continuation, serviceNewerThan, serviceUnreadOnly in
|
||||
XCTAssertEqual(serviceResource.id, resource.id)
|
||||
XCTAssertNil(serviceNewerThan)
|
||||
XCTAssertNil(continuation)
|
||||
XCTAssertNil(serviceUnreadOnly)
|
||||
}
|
||||
|
||||
let syncStarred = FeedlySyncStarredArticlesOperation(account: account, resource: resource, service: service, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let expectedArticleIds = Set(items.map { $0.id })
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { starredArticleIds in
|
||||
let missingIds = expectedArticleIds.subtracting(starredArticleIds)
|
||||
XCTAssertTrue(missingIds.isEmpty, "These article ids were not marked as starred.")
|
||||
|
||||
// Fetch articles directly because account.fetchArticles(.starred) fetches starred articles for feeds subscribed to.
|
||||
let expectedArticles = self.account.fetchArticles(.articleIDs(expectedArticleIds))
|
||||
XCTAssertEqual(expectedArticles.count, expectedArticleIds.count, "Did not fetch all the articles.")
|
||||
|
||||
let starredArticles = self.account.fetchArticles(.articleIDs(starredArticleIds))
|
||||
XCTAssertEqual(expectedArticleIds.count, expectedArticles.count)
|
||||
let missingArticles = expectedArticles.subtracting(starredArticles)
|
||||
XCTAssertTrue(missingArticles.isEmpty, "These articles should be starred and fetched.")
|
||||
XCTAssertEqual(expectedArticles, starredArticles)
|
||||
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testIngestsOnePageFailure() {
|
||||
let service = TestGetStreamContentsService()
|
||||
let resource = FeedlyCategoryResourceId(id: "user/1234/category/5678")
|
||||
|
||||
service.mockResult = .failure(URLError(.timedOut))
|
||||
|
||||
let getStreamContentsExpectation = expectation(description: "Did Get Page of Stream Contents")
|
||||
getStreamContentsExpectation.expectedFulfillmentCount = 1
|
||||
|
||||
service.getStreamContentsExpectation = getStreamContentsExpectation
|
||||
service.parameterTester = { serviceResource, continuation, serviceNewerThan, serviceUnreadOnly in
|
||||
XCTAssertEqual(serviceResource.id, resource.id)
|
||||
XCTAssertNil(serviceNewerThan)
|
||||
XCTAssertNil(continuation)
|
||||
XCTAssertNil(serviceUnreadOnly)
|
||||
}
|
||||
|
||||
let syncStarred = FeedlySyncStarredArticlesOperation(account: account, resource: resource, service: service, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { starredArticleIds in
|
||||
XCTAssertTrue(starredArticleIds.isEmpty)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testIngestsManyPagesSuccess() {
|
||||
let service = TestGetPagedStreamContentsService()
|
||||
let resource = FeedlyCategoryResourceId(id: "user/1234/category/5678")
|
||||
|
||||
let continuations = (1...10).map { "\($0)" }
|
||||
service.addAtLeastOnePage(for: resource, continuations: continuations, numberOfEntriesPerPage: 10)
|
||||
|
||||
let getStreamContentsExpectation = expectation(description: "Did Get Page of Stream Contents")
|
||||
getStreamContentsExpectation.expectedFulfillmentCount = 1 + continuations.count
|
||||
|
||||
var remainingContinuations = Set(continuations)
|
||||
let getStreamPageExpectation = expectation(description: "Did Request Page")
|
||||
getStreamPageExpectation.expectedFulfillmentCount = 1 + continuations.count
|
||||
|
||||
service.getStreamContentsExpectation = getStreamContentsExpectation
|
||||
service.parameterTester = { serviceResource, continuation, serviceNewerThan, serviceUnreadOnly in
|
||||
XCTAssertEqual(serviceResource.id, resource.id)
|
||||
XCTAssertNil(serviceNewerThan)
|
||||
XCTAssertNil(serviceUnreadOnly)
|
||||
|
||||
if let continuation = continuation {
|
||||
XCTAssertTrue(remainingContinuations.contains(continuation))
|
||||
remainingContinuations.remove(continuation)
|
||||
}
|
||||
|
||||
getStreamPageExpectation.fulfill()
|
||||
}
|
||||
|
||||
let syncStarred = FeedlySyncStarredArticlesOperation(account: account, resource: resource, service: service, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncStarred.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncStarred)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
// Find articles inserted.
|
||||
let expectedArticleIds = Set(service.pages.values.map { $0.items }.flatMap { $0 }.map { $0.id })
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchStarredArticleIDs { starredArticleIds in
|
||||
let missingIds = expectedArticleIds.subtracting(starredArticleIds)
|
||||
XCTAssertTrue(missingIds.isEmpty, "These article ids were not marked as starred.")
|
||||
|
||||
// Fetch articles directly because account.fetchArticles(.starred) fetches starred articles for feeds subscribed to.
|
||||
let expectedArticles = self.account.fetchArticles(.articleIDs(expectedArticleIds))
|
||||
XCTAssertEqual(expectedArticles.count, expectedArticleIds.count, "Did not fetch all the articles.")
|
||||
|
||||
let starredArticles = self.account.fetchArticles(.articleIDs(starredArticleIds))
|
||||
XCTAssertEqual(expectedArticleIds.count, expectedArticles.count)
|
||||
let missingArticles = expectedArticles.subtracting(starredArticles)
|
||||
XCTAssertTrue(missingArticles.isEmpty, "These articles should be starred and fetched.")
|
||||
XCTAssertEqual(expectedArticles, starredArticles)
|
||||
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
}
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
import RSCore
|
||||
|
||||
class FeedlySyncStreamContentsOperationTests: XCTestCase {
|
||||
|
||||
|
@ -26,7 +27,7 @@ class FeedlySyncStreamContentsOperationTests: XCTestCase {
|
|||
super.tearDown()
|
||||
}
|
||||
|
||||
func testIngestsOnePageSuccess() {
|
||||
func testIngestsOnePageSuccess() throws {
|
||||
let service = TestGetStreamContentsService()
|
||||
let resource = FeedlyCategoryResourceId(id: "user/1234/category/5678")
|
||||
let newerThan: Date? = Date(timeIntervalSinceReferenceDate: 0)
|
||||
|
@ -44,19 +45,19 @@ class FeedlySyncStreamContentsOperationTests: XCTestCase {
|
|||
XCTAssertNil(serviceUnreadOnly)
|
||||
}
|
||||
|
||||
let syncStreamContents = FeedlySyncStreamContentsOperation(account: account, resource: resource, service: service, newerThan: newerThan, log: support.log)
|
||||
let syncStreamContents = FeedlySyncStreamContentsOperation(account: account, resource: resource, service: service, isPagingEnabled: true, newerThan: newerThan, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncStreamContents.completionBlock = {
|
||||
syncStreamContents.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncStreamContents)
|
||||
MainThreadOperationQueue.shared.addOperation(syncStreamContents)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let expectedArticleIds = Set(items.map { $0.id })
|
||||
let expectedArticles = account.fetchArticles(.articleIDs(expectedArticleIds))
|
||||
let expectedArticles = try account.fetchArticles(.articleIDs(expectedArticleIds))
|
||||
XCTAssertEqual(expectedArticles.count, expectedArticleIds.count, "Did not fetch all the articles.")
|
||||
}
|
||||
|
||||
|
@ -78,19 +79,19 @@ class FeedlySyncStreamContentsOperationTests: XCTestCase {
|
|||
XCTAssertNil(serviceUnreadOnly)
|
||||
}
|
||||
|
||||
let syncStreamContents = FeedlySyncStreamContentsOperation(account: account, resource: resource, service: service, newerThan: newerThan, log: support.log)
|
||||
let syncStreamContents = FeedlySyncStreamContentsOperation(account: account, resource: resource, service: service, isPagingEnabled: true, newerThan: newerThan, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncStreamContents.completionBlock = {
|
||||
syncStreamContents.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncStreamContents)
|
||||
MainThreadOperationQueue.shared.addOperation(syncStreamContents)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testIngestsManyPagesSuccess() {
|
||||
func testIngestsManyPagesSuccess() throws {
|
||||
let service = TestGetPagedStreamContentsService()
|
||||
let resource = FeedlyCategoryResourceId(id: "user/1234/category/5678")
|
||||
let newerThan: Date? = Date(timeIntervalSinceReferenceDate: 0)
|
||||
|
@ -119,20 +120,20 @@ class FeedlySyncStreamContentsOperationTests: XCTestCase {
|
|||
getStreamPageExpectation.fulfill()
|
||||
}
|
||||
|
||||
let syncStreamContents = FeedlySyncStreamContentsOperation(account: account, resource: resource, service: service, newerThan: newerThan, log: support.log)
|
||||
let syncStreamContents = FeedlySyncStreamContentsOperation(account: account, resource: resource, service: service, isPagingEnabled: true, newerThan: newerThan, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncStreamContents.completionBlock = {
|
||||
syncStreamContents.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncStreamContents)
|
||||
MainThreadOperationQueue.shared.addOperation(syncStreamContents)
|
||||
|
||||
waitForExpectations(timeout: 30)
|
||||
|
||||
// Find articles inserted.
|
||||
let articleIds = Set(service.pages.values.map { $0.items }.flatMap { $0 }.map { $0.id })
|
||||
let articles = account.fetchArticles(.articleIDs(articleIds))
|
||||
let articles = try account.fetchArticles(.articleIDs(articleIds))
|
||||
XCTAssertEqual(articleIds.count, articles.count)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,152 +0,0 @@
|
|||
//
|
||||
// FeedlySyncUnreadStatusesOperationTests.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 29/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
|
||||
class FeedlySyncUnreadStatusesOperationTests: XCTestCase {
|
||||
|
||||
private var account: Account!
|
||||
private let support = FeedlyTestSupport()
|
||||
|
||||
override func setUp() {
|
||||
super.setUp()
|
||||
account = support.makeTestAccount()
|
||||
}
|
||||
|
||||
override func tearDown() {
|
||||
if let account = account {
|
||||
support.destroy(account)
|
||||
}
|
||||
super.tearDown()
|
||||
}
|
||||
|
||||
func testIngestsOnePageSuccess() {
|
||||
let service = TestGetStreamIdsService()
|
||||
let resource = FeedlyCategoryResourceId(id: "user/1234/category/5678")
|
||||
let ids = [UUID().uuidString]
|
||||
service.mockResult = .success(FeedlyStreamIds(continuation: nil, ids: ids))
|
||||
|
||||
let getStreamIdsExpectation = expectation(description: "Did Get Page of Stream Ids")
|
||||
getStreamIdsExpectation.expectedFulfillmentCount = 1
|
||||
|
||||
service.getStreamIdsExpectation = getStreamIdsExpectation
|
||||
service.parameterTester = { serviceResource, continuation, serviceNewerThan, serviceUnreadOnly in
|
||||
XCTAssertEqual(serviceResource.id, resource.id)
|
||||
XCTAssertNil(serviceNewerThan)
|
||||
XCTAssertNil(continuation)
|
||||
XCTAssertEqual(serviceUnreadOnly, true)
|
||||
}
|
||||
|
||||
let syncUnreads = FeedlySyncUnreadStatusesOperation(account: account, resource: resource, service: service, newerThan: nil, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncUnreads.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncUnreads)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let expectedArticleIds = Set(ids)
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchUnreadArticleIDs { unreadArticleIds in
|
||||
let missingIds = expectedArticleIds.subtracting(unreadArticleIds)
|
||||
XCTAssertTrue(missingIds.isEmpty, "These article ids were not marked as unread.")
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testIngestsOnePageFailure() {
|
||||
let service = TestGetStreamIdsService()
|
||||
let resource = FeedlyCategoryResourceId(id: "user/1234/category/5678")
|
||||
|
||||
service.mockResult = .failure(URLError(.timedOut))
|
||||
|
||||
let getStreamIdsExpectation = expectation(description: "Did Get Page of Stream Contents")
|
||||
getStreamIdsExpectation.expectedFulfillmentCount = 1
|
||||
|
||||
service.getStreamIdsExpectation = getStreamIdsExpectation
|
||||
service.parameterTester = { serviceResource, continuation, serviceNewerThan, serviceUnreadOnly in
|
||||
XCTAssertEqual(serviceResource.id, resource.id)
|
||||
XCTAssertNil(serviceNewerThan)
|
||||
XCTAssertNil(continuation)
|
||||
XCTAssertEqual(serviceUnreadOnly, true)
|
||||
}
|
||||
|
||||
let syncUnreads = FeedlySyncUnreadStatusesOperation(account: account, resource: resource, service: service, newerThan: nil, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncUnreads.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncUnreads)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchUnreadArticleIDs { unreadArticleIds in
|
||||
XCTAssertTrue(unreadArticleIds.isEmpty)
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
|
||||
func testIngestsManyPagesSuccess() {
|
||||
let service = TestGetPagedStreamIdsService()
|
||||
let resource = FeedlyCategoryResourceId(id: "user/1234/category/5678")
|
||||
|
||||
let continuations = (1...10).map { "\($0)" }
|
||||
service.addAtLeastOnePage(for: resource, continuations: continuations, numberOfEntriesPerPage: 1000)
|
||||
|
||||
let getStreamIdsExpectation = expectation(description: "Did Get Page of Stream Contents")
|
||||
getStreamIdsExpectation.expectedFulfillmentCount = 1 + continuations.count
|
||||
|
||||
var remainingContinuations = Set(continuations)
|
||||
let getStreamPageExpectation = expectation(description: "Did Request Page")
|
||||
getStreamPageExpectation.expectedFulfillmentCount = 1 + continuations.count
|
||||
|
||||
service.getStreamIdsExpectation = getStreamIdsExpectation
|
||||
service.parameterTester = { serviceResource, continuation, serviceNewerThan, serviceUnreadOnly in
|
||||
XCTAssertEqual(serviceResource.id, resource.id)
|
||||
XCTAssertNil(serviceNewerThan)
|
||||
XCTAssertEqual(serviceUnreadOnly, true)
|
||||
|
||||
if let continuation = continuation {
|
||||
XCTAssertTrue(remainingContinuations.contains(continuation))
|
||||
remainingContinuations.remove(continuation)
|
||||
}
|
||||
|
||||
getStreamPageExpectation.fulfill()
|
||||
}
|
||||
|
||||
let syncUnreads = FeedlySyncUnreadStatusesOperation(account: account, resource: resource, service: service, newerThan: nil, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
syncUnreads.completionBlock = {
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(syncUnreads)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
// Find statuses inserted.
|
||||
let expectedArticleIds = Set(service.pages.values.map { $0.ids }.flatMap { $0 })
|
||||
let fetchIdsExpectation = expectation(description: "Fetch Article Ids")
|
||||
account.fetchUnreadArticleIDs { unreadArticleIds in
|
||||
let missingIds = expectedArticleIds.subtracting(unreadArticleIds)
|
||||
XCTAssertTrue(missingIds.isEmpty, "These article ids were not marked as unread.")
|
||||
fetchIdsExpectation.fulfill()
|
||||
}
|
||||
waitForExpectations(timeout: 2)
|
||||
}
|
||||
}
|
|
@ -141,13 +141,13 @@ class FeedlyTestSupport {
|
|||
XCTAssertTrue(missingFeedIds.isEmpty, "Feeds with these ids were not found in the \"\(label)\" folder.")
|
||||
}
|
||||
|
||||
func checkArticles(in account: Account, againstItemsInStreamInJSONNamed name: String, subdirectory: String? = nil) {
|
||||
func checkArticles(in account: Account, againstItemsInStreamInJSONNamed name: String, subdirectory: String? = nil) throws {
|
||||
let stream = testJSON(named: name, subdirectory: subdirectory) as! [String:Any]
|
||||
checkArticles(in: account, againstItemsInStreamInJSONPayload: stream)
|
||||
try checkArticles(in: account, againstItemsInStreamInJSONPayload: stream)
|
||||
}
|
||||
|
||||
func checkArticles(in account: Account, againstItemsInStreamInJSONPayload stream: [String: Any]) {
|
||||
checkArticles(in: account, correspondToStreamItemsIn: stream)
|
||||
func checkArticles(in account: Account, againstItemsInStreamInJSONPayload stream: [String: Any]) throws {
|
||||
try checkArticles(in: account, correspondToStreamItemsIn: stream)
|
||||
}
|
||||
|
||||
private struct ArticleItem {
|
||||
|
@ -188,13 +188,13 @@ class FeedlyTestSupport {
|
|||
}
|
||||
|
||||
/// Awkwardly titled to make it clear the JSON given is from a stream response.
|
||||
func checkArticles(in testAccount: Account, correspondToStreamItemsIn stream: [String: Any]) {
|
||||
func checkArticles(in testAccount: Account, correspondToStreamItemsIn stream: [String: Any]) throws {
|
||||
|
||||
let items = stream["items"] as! [[String: Any]]
|
||||
let articleItems = items.map { ArticleItem(item: $0) }
|
||||
let itemIds = Set(articleItems.map { $0.id })
|
||||
|
||||
let articles = testAccount.fetchArticles(.articleIDs(itemIds))
|
||||
let articles = try testAccount.fetchArticles(.articleIDs(itemIds))
|
||||
let articleIds = Set(articles.map { $0.articleID })
|
||||
|
||||
let missing = itemIds.subtracting(articleIds)
|
||||
|
@ -220,12 +220,17 @@ class FeedlyTestSupport {
|
|||
func checkUnreadStatuses(in testAccount: Account, correspondToIdsInJSONPayload streamIds: [String: Any], testCase: XCTestCase) {
|
||||
let ids = Set(streamIds["ids"] as! [String])
|
||||
let fetchIdsExpectation = testCase.expectation(description: "Fetch Article Ids")
|
||||
testAccount.fetchUnreadArticleIDs { articleIds in
|
||||
// Unread statuses can be paged from Feedly.
|
||||
// Instead of joining test data, the best we can do is
|
||||
// make sure that these ids are marked as unread (a subset of the total).
|
||||
XCTAssertTrue(ids.isSubset(of: articleIds), "Some articles in `ids` are not marked as unread.")
|
||||
fetchIdsExpectation.fulfill()
|
||||
testAccount.fetchUnreadArticleIDs { articleIdsResult in
|
||||
do {
|
||||
let articleIds = try articleIdsResult.get()
|
||||
// Unread statuses can be paged from Feedly.
|
||||
// Instead of joining test data, the best we can do is
|
||||
// make sure that these ids are marked as unread (a subset of the total).
|
||||
XCTAssertTrue(ids.isSubset(of: articleIds), "Some articles in `ids` are not marked as unread.")
|
||||
fetchIdsExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping article IDs: \(error)")
|
||||
}
|
||||
}
|
||||
testCase.wait(for: [fetchIdsExpectation], timeout: 2)
|
||||
}
|
||||
|
@ -239,12 +244,17 @@ class FeedlyTestSupport {
|
|||
let items = stream["items"] as! [[String: Any]]
|
||||
let ids = Set(items.map { $0["id"] as! String })
|
||||
let fetchIdsExpectation = testCase.expectation(description: "Fetch Article Ids")
|
||||
testAccount.fetchStarredArticleIDs { articleIds in
|
||||
// Starred articles can be paged from Feedly.
|
||||
// Instead of joining test data, the best we can do is
|
||||
// make sure that these articles are marked as starred (a subset of the total).
|
||||
XCTAssertTrue(ids.isSubset(of: articleIds), "Some articles in `ids` are not marked as starred.")
|
||||
fetchIdsExpectation.fulfill()
|
||||
testAccount.fetchStarredArticleIDs { articleIdsResult in
|
||||
do {
|
||||
let articleIds = try articleIdsResult.get()
|
||||
// Starred articles can be paged from Feedly.
|
||||
// Instead of joining test data, the best we can do is
|
||||
// make sure that these articles are marked as starred (a subset of the total).
|
||||
XCTAssertTrue(ids.isSubset(of: articleIds), "Some articles in `ids` are not marked as starred.")
|
||||
fetchIdsExpectation.fulfill()
|
||||
} catch {
|
||||
XCTFail("Error unwrapping article IDs: \(error)")
|
||||
}
|
||||
}
|
||||
testCase.wait(for: [fetchIdsExpectation], timeout: 2)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
//
|
||||
// FeedlyTextSanitizationTests.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 29/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
|
||||
class FeedlyTextSanitizationTests: XCTestCase {
|
||||
|
||||
func testRTLSanitization() {
|
||||
|
||||
let targetsAndExpectations: [(target: String?, expectation: String?)] = [
|
||||
(nil, nil),
|
||||
("", ""),
|
||||
(" ", " "),
|
||||
("text", "text"),
|
||||
("<div style=\"direction:rtl;text-align:right\">", "<div style=\"direction:rtl;text-align:right\">"),
|
||||
("</div>", "</div>"),
|
||||
("<div style=\"direction:rtl;text-align:right\">text", "<div style=\"direction:rtl;text-align:right\">text"),
|
||||
("text</div>", "text</div>"),
|
||||
("<div style=\"direction:rtl;text-align:right\"></div>", ""),
|
||||
("<DIV style=\"direction:rtl;text-align:right\"></div>", "<DIV style=\"direction:rtl;text-align:right\"></div>"),
|
||||
("<div style=\"direction:rtl;text-align:right\"></DIV>", "<div style=\"direction:rtl;text-align:right\"></DIV>"),
|
||||
("<div style=\"direction:rtl;text-align:right\">text</div>", "text"),
|
||||
]
|
||||
|
||||
let sanitizer = FeedlyRTLTextSanitizer()
|
||||
|
||||
for (target, expectation) in targetsAndExpectations {
|
||||
let calculated = sanitizer.sanitize(target)
|
||||
XCTAssertEqual(expectation, calculated)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -9,6 +9,7 @@
|
|||
import XCTest
|
||||
@testable import Account
|
||||
import RSParser
|
||||
import RSCore
|
||||
|
||||
class FeedlyUpdateAccountFeedsWithItemsOperationTests: XCTestCase {
|
||||
|
||||
|
@ -28,23 +29,23 @@ class FeedlyUpdateAccountFeedsWithItemsOperationTests: XCTestCase {
|
|||
}
|
||||
|
||||
struct TestItemsByFeedProvider: FeedlyParsedItemsByFeedProviding {
|
||||
var providerName: String
|
||||
var parsedItemsByFeedProviderName: String
|
||||
var parsedItemsKeyedByFeedId: [String: Set<ParsedItem>]
|
||||
}
|
||||
|
||||
func testUpdateAccountWithEmptyItems() {
|
||||
func testUpdateAccountWithEmptyItems() throws {
|
||||
let testItems = support.makeParsedItemTestDataFor(numberOfFeeds: 0, numberOfItemsInFeeds: 0)
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItems)
|
||||
let provider = TestItemsByFeedProvider(parsedItemsByFeedProviderName: resource.id, parsedItemsKeyedByFeedId: testItems)
|
||||
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
update.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
MainThreadOperationQueue.shared.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -52,23 +53,23 @@ class FeedlyUpdateAccountFeedsWithItemsOperationTests: XCTestCase {
|
|||
let articleIds = Set(entries.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(articleIds.count, entries.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let accountArticles = account.fetchArticles(.articleIDs(articleIds))
|
||||
let accountArticles = try account.fetchArticles(.articleIDs(articleIds))
|
||||
XCTAssertTrue(accountArticles.isEmpty)
|
||||
}
|
||||
|
||||
func testUpdateAccountWithOneItem() {
|
||||
func testUpdateAccountWithOneItem() throws {
|
||||
let testItems = support.makeParsedItemTestDataFor(numberOfFeeds: 1, numberOfItemsInFeeds: 1)
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItems)
|
||||
let provider = TestItemsByFeedProvider(parsedItemsByFeedProviderName: resource.id, parsedItemsKeyedByFeedId: testItems)
|
||||
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
update.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
MainThreadOperationQueue.shared.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -76,7 +77,7 @@ class FeedlyUpdateAccountFeedsWithItemsOperationTests: XCTestCase {
|
|||
let articleIds = Set(entries.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(articleIds.count, entries.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let accountArticles = account.fetchArticles(.articleIDs(articleIds))
|
||||
let accountArticles = try account.fetchArticles(.articleIDs(articleIds))
|
||||
XCTAssertTrue(accountArticles.count == entries.count)
|
||||
|
||||
let accountArticleIds = Set(accountArticles.map { $0.articleID })
|
||||
|
@ -84,19 +85,19 @@ class FeedlyUpdateAccountFeedsWithItemsOperationTests: XCTestCase {
|
|||
XCTAssertTrue(missingIds.isEmpty)
|
||||
}
|
||||
|
||||
func testUpdateAccountWithManyItems() {
|
||||
func testUpdateAccountWithManyItems() throws {
|
||||
let testItems = support.makeParsedItemTestDataFor(numberOfFeeds: 100, numberOfItemsInFeeds: 100)
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItems)
|
||||
let provider = TestItemsByFeedProvider(parsedItemsByFeedProviderName: resource.id, parsedItemsKeyedByFeedId: testItems)
|
||||
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
update.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
MainThreadOperationQueue.shared.addOperation(update)
|
||||
|
||||
waitForExpectations(timeout: 10) // 10,000 articles takes ~ three seconds for me.
|
||||
|
||||
|
@ -104,7 +105,7 @@ class FeedlyUpdateAccountFeedsWithItemsOperationTests: XCTestCase {
|
|||
let articleIds = Set(entries.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(articleIds.count, entries.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let accountArticles = account.fetchArticles(.articleIDs(articleIds))
|
||||
let accountArticles = try account.fetchArticles(.articleIDs(articleIds))
|
||||
XCTAssertTrue(accountArticles.count == entries.count)
|
||||
|
||||
let accountArticleIds = Set(accountArticles.map { $0.articleID })
|
||||
|
@ -112,21 +113,21 @@ class FeedlyUpdateAccountFeedsWithItemsOperationTests: XCTestCase {
|
|||
XCTAssertTrue(missingIds.isEmpty)
|
||||
}
|
||||
|
||||
func testCancelUpdateAccount() {
|
||||
func testCancelUpdateAccount() throws {
|
||||
let testItems = support.makeParsedItemTestDataFor(numberOfFeeds: 1, numberOfItemsInFeeds: 1)
|
||||
let resource = FeedlyCategoryResourceId(id: "user/12345/category/6789")
|
||||
let provider = TestItemsByFeedProvider(providerName: resource.id, parsedItemsKeyedByFeedId: testItems)
|
||||
let provider = TestItemsByFeedProvider(parsedItemsByFeedProviderName: resource.id, parsedItemsKeyedByFeedId: testItems)
|
||||
|
||||
let update = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: provider, log: support.log)
|
||||
|
||||
let completionExpectation = expectation(description: "Did Finish")
|
||||
update.completionBlock = {
|
||||
update.completionBlock = { _ in
|
||||
completionExpectation.fulfill()
|
||||
}
|
||||
|
||||
OperationQueue.main.addOperation(update)
|
||||
MainThreadOperationQueue.shared.addOperation(update)
|
||||
|
||||
update.cancel()
|
||||
MainThreadOperationQueue.shared.cancelOperations([update])
|
||||
|
||||
waitForExpectations(timeout: 2)
|
||||
|
||||
|
@ -134,7 +135,7 @@ class FeedlyUpdateAccountFeedsWithItemsOperationTests: XCTestCase {
|
|||
let articleIds = Set(entries.compactMap { $0.syncServiceID })
|
||||
XCTAssertEqual(articleIds.count, entries.count, "Not every item has a value for \(\ParsedItem.syncServiceID).")
|
||||
|
||||
let accountArticles = account.fetchArticles(.articleIDs(articleIds))
|
||||
let accountArticles = try account.fetchArticles(.articleIDs(articleIds))
|
||||
XCTAssertTrue(accountArticles.isEmpty)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
//
|
||||
// TestGetEntriesService.swift
|
||||
// AccountTests
|
||||
//
|
||||
// Created by Kiel Gillard on 11/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import XCTest
|
||||
@testable import Account
|
||||
|
||||
final class TestGetEntriesService: FeedlyGetEntriesService {
|
||||
var mockResult: Result<[FeedlyEntry], Error>?
|
||||
var getEntriesExpectation: XCTestExpectation?
|
||||
|
||||
func getEntries(for ids: Set<String>, completion: @escaping (Result<[FeedlyEntry], Error>) -> ()) {
|
||||
guard let result = mockResult else {
|
||||
XCTFail("Missing mock result. Test may time out because the completion will not be called.")
|
||||
return
|
||||
}
|
||||
DispatchQueue.main.async {
|
||||
completion(result)
|
||||
self.getEntriesExpectation?.fulfill()
|
||||
}
|
||||
}
|
||||
}
|
|
@ -12,7 +12,7 @@ public protocol ContainerIdentifiable {
|
|||
var containerID: ContainerIdentifier? { get }
|
||||
}
|
||||
|
||||
public enum ContainerIdentifier: Hashable {
|
||||
public enum ContainerIdentifier: Hashable, Equatable {
|
||||
case smartFeedController
|
||||
case account(String) // accountID
|
||||
case folder(String, String) // accountID, folderName
|
||||
|
@ -55,3 +55,47 @@ public enum ContainerIdentifier: Hashable {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
extension ContainerIdentifier: Encodable {
|
||||
enum CodingKeys: CodingKey {
|
||||
case type
|
||||
case accountID
|
||||
case folderName
|
||||
}
|
||||
|
||||
public func encode(to encoder: Encoder) throws {
|
||||
var container = encoder.container(keyedBy: CodingKeys.self)
|
||||
switch self {
|
||||
case .smartFeedController:
|
||||
try container.encode("smartFeedController", forKey: .type)
|
||||
case .account(let accountID):
|
||||
try container.encode("account", forKey: .type)
|
||||
try container.encode(accountID, forKey: .accountID)
|
||||
case .folder(let accountID, let folderName):
|
||||
try container.encode("folder", forKey: .type)
|
||||
try container.encode(accountID, forKey: .accountID)
|
||||
try container.encode(folderName, forKey: .folderName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension ContainerIdentifier: Decodable {
|
||||
|
||||
public init(from decoder: Decoder) throws {
|
||||
let container = try decoder.container(keyedBy: CodingKeys.self)
|
||||
let type = try container.decode(String.self, forKey: .type)
|
||||
|
||||
switch type {
|
||||
case "smartFeedController":
|
||||
self = .smartFeedController
|
||||
case "account":
|
||||
let accountID = try container.decode(String.self, forKey: .accountID)
|
||||
self = .account(accountID)
|
||||
default:
|
||||
let accountID = try container.decode(String.self, forKey: .accountID)
|
||||
let folderName = try container.decode(String.self, forKey: .folderName)
|
||||
self = .folder(accountID, folderName)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ private extension FeedFinder {
|
|||
}
|
||||
|
||||
static func isHTML(_ data: Data) -> Bool {
|
||||
return (data as NSData).rs_dataIsProbablyHTML()
|
||||
return data.isProbablyHTML
|
||||
}
|
||||
|
||||
static func downloadFeedSpecifiers(_ downloadFeedSpecifiers: Set<FeedSpecifier>, feedSpecifiers: [String: FeedSpecifier], completion: @escaping (Result<Set<FeedSpecifier>, Error>) -> Void) {
|
||||
|
|
|
@ -69,10 +69,10 @@ private extension FeedSpecifier {
|
|||
score = score + 50
|
||||
}
|
||||
|
||||
if urlString.rs_caseInsensitiveContains("comments") {
|
||||
if urlString.caseInsensitiveContains("comments") {
|
||||
score = score - 10
|
||||
}
|
||||
if urlString.rs_caseInsensitiveContains("rss") {
|
||||
if urlString.caseInsensitiveContains("rss") {
|
||||
score = score + 5
|
||||
}
|
||||
if urlString.hasSuffix("/feed/") {
|
||||
|
@ -81,15 +81,15 @@ private extension FeedSpecifier {
|
|||
if urlString.hasSuffix("/feed") {
|
||||
score = score + 4
|
||||
}
|
||||
if urlString.rs_caseInsensitiveContains("json") {
|
||||
if urlString.caseInsensitiveContains("json") {
|
||||
score = score + 6
|
||||
}
|
||||
|
||||
if let title = title {
|
||||
if title.rs_caseInsensitiveContains("comments") {
|
||||
if title.caseInsensitiveContains("comments") {
|
||||
score = score - 10
|
||||
}
|
||||
if title.rs_caseInsensitiveContains("json") {
|
||||
if title.caseInsensitiveContains("json") {
|
||||
score = score + 1
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,16 +29,14 @@ class HTMLFeedFinder {
|
|||
}
|
||||
}
|
||||
|
||||
if let bodyLinks = RSHTMLLinkParser.htmlLinks(with: parserData) {
|
||||
let bodyLinks = RSHTMLLinkParser.htmlLinks(with: parserData)
|
||||
for oneBodyLink in bodyLinks {
|
||||
|
||||
if linkMightBeFeed(oneBodyLink) {
|
||||
let normalizedURL = oneBodyLink.urlString.rs_normalizedURL()
|
||||
if linkMightBeFeed(oneBodyLink), let normalizedURL = oneBodyLink.urlString?.normalizedURL {
|
||||
let oneFeedSpecifier = FeedSpecifier(title: oneBodyLink.text, urlString: normalizedURL, source: .HTMLLink)
|
||||
addFeedSpecifier(oneFeedSpecifier)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -79,5 +79,19 @@ public enum FeedIdentifier: CustomStringConvertible, Hashable {
|
|||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// MARK: - Hashable
|
||||
|
||||
public func hash(into hasher: inout Hasher) {
|
||||
switch self {
|
||||
case .smartFeed(let id):
|
||||
hasher.combine(id)
|
||||
case .script(let id):
|
||||
hasher.combine(id)
|
||||
case .webFeed(_, let webFeedID):
|
||||
hasher.combine(webFeedID)
|
||||
case .folder(_, let folderName):
|
||||
hasher.combine(folderName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -837,14 +837,7 @@ private extension FeedbinAccountDelegate {
|
|||
os_log(.debug, log: log, "Syncing taggings with %ld taggings.", taggings.count)
|
||||
|
||||
// Set up some structures to make syncing easier
|
||||
let folderDict: [String: Folder] = {
|
||||
if let folders = account.folders {
|
||||
return Dictionary(uniqueKeysWithValues: folders.map { ($0.name ?? "", $0) } )
|
||||
} else {
|
||||
return [String: Folder]()
|
||||
}
|
||||
}()
|
||||
|
||||
let folderDict = nameToFolderDictionary(with: account.folders)
|
||||
let taggingsDict = taggings.reduce([String: [FeedbinTagging]]()) { (dict, tagging) in
|
||||
var taggedFeeds = dict
|
||||
if var taggedFeed = taggedFeeds[tagging.name] {
|
||||
|
@ -897,7 +890,22 @@ private extension FeedbinAccountDelegate {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
func nameToFolderDictionary(with folders: Set<Folder>?) -> [String: Folder] {
|
||||
guard let folders = folders else {
|
||||
return [String: Folder]()
|
||||
}
|
||||
|
||||
var d = [String: Folder]()
|
||||
for folder in folders {
|
||||
let name = folder.name ?? ""
|
||||
if d[name] == nil {
|
||||
d[name] = folder
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
func sendArticleStatuses(_ statuses: [SyncStatus],
|
||||
apiCall: ([Int], @escaping (Result<Void, Error>) -> Void) -> Void,
|
||||
completion: @escaping ((Result<Void, Error>) -> Void)) {
|
||||
|
@ -1225,7 +1233,7 @@ private extension FeedbinAccountDelegate {
|
|||
|
||||
let parsedItems: [ParsedItem] = entries.map { entry in
|
||||
let authors = Set([ParsedAuthor(name: entry.authorName, url: entry.jsonFeed?.jsonFeedAuthor?.url, avatarURL: entry.jsonFeed?.jsonFeedAuthor?.avatarURL, emailAddress: nil)])
|
||||
return ParsedItem(syncServiceID: String(entry.articleID), uniqueID: String(entry.articleID), feedURL: String(entry.feedID), url: nil, externalURL: entry.url, title: entry.title, contentHTML: entry.contentHTML, contentText: nil, summary: entry.summary, imageURL: nil, bannerImageURL: nil, datePublished: entry.parsedDatePublished, dateModified: nil, authors: authors, tags: nil, attachments: nil)
|
||||
return ParsedItem(syncServiceID: String(entry.articleID), uniqueID: String(entry.articleID), feedURL: String(entry.feedID), url: entry.url, externalURL: nil, title: entry.title, contentHTML: entry.contentHTML, contentText: nil, summary: entry.summary, imageURL: nil, bannerImageURL: nil, datePublished: entry.parsedDatePublished, dateModified: nil, authors: authors, tags: nil, attachments: nil)
|
||||
}
|
||||
|
||||
return Set(parsedItems)
|
||||
|
@ -1237,20 +1245,38 @@ private extension FeedbinAccountDelegate {
|
|||
return
|
||||
}
|
||||
|
||||
let feedbinUnreadArticleIDs = Set(articleIDs.map { String($0) } )
|
||||
account.fetchUnreadArticleIDs { articleIDsResult in
|
||||
guard let currentUnreadArticleIDs = try? articleIDsResult.get() else {
|
||||
return
|
||||
database.selectPendingReadStatusArticleIDs() { result in
|
||||
|
||||
func process(_ pendingArticleIDs: Set<String>) {
|
||||
|
||||
let feedbinUnreadArticleIDs = Set(articleIDs.map { String($0) } )
|
||||
let updatableFeedbinUnreadArticleIDs = feedbinUnreadArticleIDs.subtracting(pendingArticleIDs)
|
||||
|
||||
account.fetchUnreadArticleIDs { articleIDsResult in
|
||||
guard let currentUnreadArticleIDs = try? articleIDsResult.get() else {
|
||||
return
|
||||
}
|
||||
|
||||
// Mark articles as unread
|
||||
let deltaUnreadArticleIDs = updatableFeedbinUnreadArticleIDs.subtracting(currentUnreadArticleIDs)
|
||||
account.markAsUnread(deltaUnreadArticleIDs)
|
||||
|
||||
// Mark articles as read
|
||||
let deltaReadArticleIDs = currentUnreadArticleIDs.subtracting(updatableFeedbinUnreadArticleIDs)
|
||||
account.markAsRead(deltaReadArticleIDs)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Mark articles as unread
|
||||
let deltaUnreadArticleIDs = feedbinUnreadArticleIDs.subtracting(currentUnreadArticleIDs)
|
||||
account.markAsUnread(deltaUnreadArticleIDs)
|
||||
|
||||
// Mark articles as read
|
||||
let deltaReadArticleIDs = currentUnreadArticleIDs.subtracting(feedbinUnreadArticleIDs)
|
||||
account.markAsRead(deltaReadArticleIDs)
|
||||
|
||||
switch result {
|
||||
case .success(let pendingArticleIDs):
|
||||
process(pendingArticleIDs)
|
||||
case .failure(let error):
|
||||
os_log(.error, log: self.log, "Sync Article Read Status failed: %@.", error.localizedDescription)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func syncArticleStarredState(account: Account, articleIDs: [Int]?) {
|
||||
|
@ -1258,20 +1284,38 @@ private extension FeedbinAccountDelegate {
|
|||
return
|
||||
}
|
||||
|
||||
let feedbinStarredArticleIDs = Set(articleIDs.map { String($0) } )
|
||||
account.fetchStarredArticleIDs { articleIDsResult in
|
||||
guard let currentStarredArticleIDs = try? articleIDsResult.get() else {
|
||||
return
|
||||
database.selectPendingStarredStatusArticleIDs() { result in
|
||||
|
||||
func process(_ pendingArticleIDs: Set<String>) {
|
||||
|
||||
let feedbinStarredArticleIDs = Set(articleIDs.map { String($0) } )
|
||||
let updatableFeedbinUnreadArticleIDs = feedbinStarredArticleIDs.subtracting(pendingArticleIDs)
|
||||
|
||||
account.fetchStarredArticleIDs { articleIDsResult in
|
||||
guard let currentStarredArticleIDs = try? articleIDsResult.get() else {
|
||||
return
|
||||
}
|
||||
|
||||
// Mark articles as starred
|
||||
let deltaStarredArticleIDs = updatableFeedbinUnreadArticleIDs.subtracting(currentStarredArticleIDs)
|
||||
account.markAsStarred(deltaStarredArticleIDs)
|
||||
|
||||
// Mark articles as unstarred
|
||||
let deltaUnstarredArticleIDs = currentStarredArticleIDs.subtracting(updatableFeedbinUnreadArticleIDs)
|
||||
account.markAsUnstarred(deltaUnstarredArticleIDs)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let pendingArticleIDs):
|
||||
process(pendingArticleIDs)
|
||||
case .failure(let error):
|
||||
os_log(.error, log: self.log, "Sync Article Starred Status failed: %@.", error.localizedDescription)
|
||||
}
|
||||
|
||||
// Mark articles as starred
|
||||
let deltaStarredArticleIDs = feedbinStarredArticleIDs.subtracting(currentStarredArticleIDs)
|
||||
account.markAsStarred(deltaStarredArticleIDs)
|
||||
|
||||
// Mark articles as unstarred
|
||||
let deltaUnstarredArticleIDs = currentStarredArticleIDs.subtracting(feedbinStarredArticleIDs)
|
||||
account.markAsUnstarred(deltaUnstarredArticleIDs)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func deleteTagging(for account: Account, with feed: WebFeed, from container: Container?, completion: @escaping (Result<Void, Error>) -> Void) {
|
||||
|
|
|
@ -637,7 +637,7 @@ extension FeedlyAPICaller: FeedlyGetStreamIdsService {
|
|||
}
|
||||
|
||||
queryItems.append(contentsOf: [
|
||||
URLQueryItem(name: "count", value: "1000"),
|
||||
URLQueryItem(name: "count", value: "10000"),
|
||||
URLQueryItem(name: "streamId", value: resource.id),
|
||||
])
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
|
||||
// TODO: Kiel, if you decide not to support OPML import you will have to disallow it in the behaviors
|
||||
// See https://developer.feedly.com/v3/opml/
|
||||
var behaviors: AccountBehaviors = [.disallowFeedInRootFolder]
|
||||
var behaviors: AccountBehaviors = [.disallowFeedInRootFolder, .disallowMarkAsUnreadAfterPeriod(31)]
|
||||
|
||||
let isOPMLImportSupported = false
|
||||
|
||||
|
@ -57,16 +57,14 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
private let log = OSLog(subsystem: Bundle.main.bundleIdentifier!, category: "Feedly")
|
||||
private let database: SyncDatabase
|
||||
|
||||
private weak var currentSyncAllOperation: FeedlySyncAllOperation?
|
||||
private let operationQueue: OperationQueue
|
||||
private weak var currentSyncAllOperation: MainThreadOperation?
|
||||
private let operationQueue = MainThreadOperationQueue()
|
||||
|
||||
init(dataFolder: String, transport: Transport?, api: FeedlyAPICaller.API) {
|
||||
self.operationQueue = OperationQueue()
|
||||
// Many operations have their own operation queues, such as the sync all operation.
|
||||
// Making this a serial queue at this higher level of abstraction means we can ensure,
|
||||
// for example, a `FeedlyRefreshAccessTokenOperation` occurs before a `FeedlySyncAllOperation`,
|
||||
// improving our ability to debug, reason about and predict the behaviour of the code.
|
||||
self.operationQueue.maxConcurrentOperationCount = 1
|
||||
|
||||
if let transport = transport {
|
||||
self.caller = FeedlyAPICaller(transport: transport, api: api)
|
||||
|
@ -129,28 +127,25 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
|
||||
currentSyncAllOperation = operation
|
||||
|
||||
operationQueue.addOperation(operation)
|
||||
operationQueue.add(operation)
|
||||
}
|
||||
|
||||
func sendArticleStatus(for account: Account, completion: @escaping ((Result<Void, Error>) -> Void)) {
|
||||
// Ensure remote articles have the same status as they do locally.
|
||||
let send = FeedlySendArticleStatusesOperation(database: database, service: caller, log: log)
|
||||
send.completionBlock = {
|
||||
send.completionBlock = { operation in
|
||||
// TODO: not call with success if operation was canceled? Not sure.
|
||||
DispatchQueue.main.async {
|
||||
completion(.success(()))
|
||||
}
|
||||
}
|
||||
operationQueue.addOperation(send)
|
||||
operationQueue.add(send)
|
||||
}
|
||||
|
||||
/// Attempts to ensure local articles have the same status as they do remotely.
|
||||
/// So if the user is using another client roughly simultaneously with this app,
|
||||
/// this app does its part to ensure the articles have a consistent status between both.
|
||||
///
|
||||
/// Feedly has no API that allows the app to fetch the identifiers of unread articles only.
|
||||
/// The only way to identify unread articles is to pull all of the article data,
|
||||
/// which is effectively equivalent of a full refresh.
|
||||
///
|
||||
/// - Parameter account: The account whose articles have a remote status.
|
||||
/// - Parameter completion: Call on the main queue.
|
||||
func refreshArticleStatus(for account: Account, completion: @escaping ((Result<Void, Error>) -> Void)) {
|
||||
|
@ -160,18 +155,18 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
|
||||
let group = DispatchGroup()
|
||||
|
||||
let syncUnread = FeedlySyncUnreadStatusesOperation(account: account, credentials: credentials, service: caller, newerThan: nil, log: log)
|
||||
let ingestUnread = FeedlyIngestUnreadArticleIdsOperation(account: account, credentials: credentials, service: caller, database: database, newerThan: nil, log: log)
|
||||
|
||||
group.enter()
|
||||
syncUnread.completionBlock = {
|
||||
ingestUnread.completionBlock = { _ in
|
||||
group.leave()
|
||||
|
||||
}
|
||||
|
||||
let syncStarred = FeedlySyncStarredArticlesOperation(account: account, credentials: credentials, service: caller, log: log)
|
||||
let ingestStarred = FeedlyIngestStarredArticleIdsOperation(account: account, credentials: credentials, service: caller, database: database, newerThan: nil, log: log)
|
||||
|
||||
group.enter()
|
||||
syncStarred.completionBlock = {
|
||||
ingestStarred.completionBlock = { _ in
|
||||
group.leave()
|
||||
}
|
||||
|
||||
|
@ -179,7 +174,7 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
completion(.success(()))
|
||||
}
|
||||
|
||||
operationQueue.addOperations([syncUnread, syncStarred], waitUntilFinished: false)
|
||||
operationQueue.addOperations([ingestUnread, ingestStarred])
|
||||
}
|
||||
|
||||
func importOPML(for account: Account, opmlFile: URL, completion: @escaping (Result<Void, Error>) -> Void) {
|
||||
|
@ -301,6 +296,7 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
addToCollectionService: caller,
|
||||
syncUnreadIdsService: caller,
|
||||
getStreamContentsService: caller,
|
||||
database: database,
|
||||
container: container,
|
||||
progress: refreshProgress,
|
||||
log: log)
|
||||
|
@ -309,7 +305,7 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
completion(result)
|
||||
}
|
||||
|
||||
operationQueue.addOperation(addNewFeed)
|
||||
operationQueue.add(addNewFeed)
|
||||
|
||||
} catch {
|
||||
DispatchQueue.main.async {
|
||||
|
@ -366,7 +362,7 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
completion(result)
|
||||
}
|
||||
|
||||
operationQueue.addOperation(addExistingFeed)
|
||||
operationQueue.add(addExistingFeed)
|
||||
|
||||
} catch {
|
||||
DispatchQueue.main.async {
|
||||
|
@ -498,13 +494,13 @@ final class FeedlyAccountDelegate: AccountDelegate {
|
|||
credentials = try? account.retrieveCredentials(type: .oauthAccessToken)
|
||||
|
||||
let refreshAccessToken = FeedlyRefreshAccessTokenOperation(account: account, service: self, oauthClient: oauthAuthorizationClient, log: log)
|
||||
operationQueue.addOperation(refreshAccessToken)
|
||||
operationQueue.add(refreshAccessToken)
|
||||
}
|
||||
|
||||
func accountWillBeDeleted(_ account: Account) {
|
||||
let logout = FeedlyLogoutOperation(account: account, service: caller, log: log)
|
||||
// Dispatch on the main queue because the lifetime of the account delegate is uncertain.
|
||||
OperationQueue.main.addOperation(logout)
|
||||
// Dispatch on the shared queue because the lifetime of the account delegate is uncertain.
|
||||
MainThreadOperationQueue.shared.add(logout)
|
||||
}
|
||||
|
||||
static func validateCredentials(transport: Transport, credentials: Credentials, endpoint: URL?, completion: @escaping (Result<Credentials?, Error>) -> Void) {
|
||||
|
|
|
@ -10,7 +10,6 @@ import Foundation
|
|||
|
||||
struct FeedlyFeedContainerValidator {
|
||||
var container: Container
|
||||
var userId: String?
|
||||
|
||||
func getValidContainer() throws -> (Folder, String) {
|
||||
guard let folder = container as? Folder else {
|
||||
|
@ -21,16 +20,6 @@ struct FeedlyFeedContainerValidator {
|
|||
throw FeedlyAccountDelegateError.addFeedInvalidFolder(folder)
|
||||
}
|
||||
|
||||
guard let userId = userId else {
|
||||
throw FeedlyAccountDelegateError.notLoggedIn
|
||||
}
|
||||
|
||||
let uncategorized = FeedlyCategoryResourceId.Global.uncategorized(for: userId)
|
||||
|
||||
guard collectionId != uncategorized.id else {
|
||||
throw FeedlyAccountDelegateError.addFeedInvalidFolder(folder)
|
||||
}
|
||||
|
||||
return (folder, collectionId)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,6 @@
|
|||
import Foundation
|
||||
|
||||
struct FeedlyCategory: Decodable {
|
||||
var label: String
|
||||
var id: String
|
||||
let label: String
|
||||
let id: String
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
import Foundation
|
||||
|
||||
struct FeedlyCollection: Codable {
|
||||
var feeds: [FeedlyFeed]
|
||||
var label: String
|
||||
var id: String
|
||||
let feeds: [FeedlyFeed]
|
||||
let label: String
|
||||
let id: String
|
||||
}
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// FeedlyCollectionParser.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 28/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
struct FeedlyCollectionParser {
|
||||
let collection: FeedlyCollection
|
||||
|
||||
private let rightToLeftTextSantizer = FeedlyRTLTextSanitizer()
|
||||
|
||||
var folderName: String {
|
||||
return rightToLeftTextSantizer.sanitize(collection.label) ?? ""
|
||||
}
|
||||
|
||||
var externalID: String {
|
||||
return collection.id
|
||||
}
|
||||
}
|
|
@ -8,84 +8,58 @@
|
|||
|
||||
import Foundation
|
||||
|
||||
enum Direction: String, Codable {
|
||||
case leftToRight = "ltr"
|
||||
case rightToLeft = "rtl"
|
||||
}
|
||||
|
||||
struct FeedlyEntry: Decodable {
|
||||
/// the unique, immutable ID for this particular article.
|
||||
var id: String
|
||||
let id: String
|
||||
|
||||
/// the article’s title. This string does not contain any HTML markup.
|
||||
var title: String?
|
||||
let title: String?
|
||||
|
||||
struct Content: Codable {
|
||||
var content: String?
|
||||
var direction: Direction?
|
||||
struct Content: Decodable {
|
||||
|
||||
enum Direction: String, Decodable {
|
||||
case leftToRight = "ltr"
|
||||
case rightToLeft = "rtl"
|
||||
}
|
||||
|
||||
let content: String?
|
||||
let direction: Direction?
|
||||
}
|
||||
|
||||
/// This object typically has two values: “content” for the content itself, and “direction” (“ltr” for left-to-right, “rtl” for right-to-left). The content itself contains sanitized HTML markup.
|
||||
var content: Content?
|
||||
let content: Content?
|
||||
|
||||
/// content object the article summary. See the content object above.
|
||||
var summary: Content?
|
||||
let summary: Content?
|
||||
|
||||
/// the author’s name
|
||||
var author: String?
|
||||
let author: String?
|
||||
|
||||
/// the immutable timestamp, in ms, when this article was processed by the feedly Cloud servers.
|
||||
var crawled: Date
|
||||
let crawled: Date
|
||||
|
||||
/// the timestamp, in ms, when this article was re-processed and updated by the feedly Cloud servers.
|
||||
var recrawled: Date?
|
||||
let recrawled: Date?
|
||||
|
||||
/// the timestamp, in ms, when this article was published, as reported by the RSS feed (often inaccurate).
|
||||
// var published: Date
|
||||
|
||||
/// the timestamp, in ms, when this article was updated, as reported by the RSS feed
|
||||
// var updated: Date?
|
||||
|
||||
/// the feed from which this article was crawled. If present, “streamId” will contain the feed id, “title” will contain the feed title, and “htmlUrl” will contain the feed’s website.
|
||||
var origin: FeedlyOrigin?
|
||||
let origin: FeedlyOrigin?
|
||||
|
||||
/// Used to help find the URL to visit an article on a web site.
|
||||
/// See https://groups.google.com/forum/#!searchin/feedly-cloud/feed$20url%7Csort:date/feedly-cloud/Rx3dVd4aTFQ/Hf1ZfLJoCQAJ
|
||||
var canonical: [FeedlyLink]?
|
||||
let canonical: [FeedlyLink]?
|
||||
|
||||
/// a list of alternate links for this article. Each link object contains a media type and a URL. Typically, a single object is present, with a link to the original web page.
|
||||
var alternate: [FeedlyLink]?
|
||||
//
|
||||
// // var origin:
|
||||
// // Optional origin object the feed from which this article was crawled. If present, “streamId” will contain the feed id, “title” will contain the feed title, and “htmlUrl” will contain the feed’s website.
|
||||
// var keywords: [String]?
|
||||
//
|
||||
// /// an image URL for this entry. If present, “url” will contain the image URL, “width” and “height” its dimension, and “contentType” its MIME type.
|
||||
// var visual: Image?
|
||||
//
|
||||
let alternate: [FeedlyLink]?
|
||||
|
||||
/// Was this entry read by the user? If an Authorization header is not provided, this will always return false. If an Authorization header is provided, it will reflect if the user has read this entry or not.
|
||||
var unread: Bool
|
||||
//
|
||||
/// a list of tag objects (“id” and “label”) that the user added to this entry. This value is only returned if an Authorization header is provided, and at least one tag has been added. If the entry has been explicitly marked as read (not the feed itself), the “global.read” tag will be present.
|
||||
var tags: [FeedlyTag]?
|
||||
//
|
||||
/// a list of category objects (“id” and “label”) that the user associated with the feed of this entry. This value is only returned if an Authorization header is provided.
|
||||
var categories: [FeedlyCategory]?
|
||||
//
|
||||
// /// an indicator of how popular this entry is. The higher the number, the more readers have read, saved or shared this particular entry.
|
||||
// var engagement: Int?
|
||||
//
|
||||
// /// Timestamp for tagged articles, contains the timestamp when the article was tagged by the user. This will only be returned when the entry is returned through the streams API.
|
||||
// var actionTimestamp: Date?
|
||||
//
|
||||
/// A list of media links (videos, images, sound etc) provided by the feed. Some entries do not have a summary or content, only a collection of media links.
|
||||
var enclosure: [FeedlyLink]?
|
||||
//
|
||||
// /// The article fingerprint. This value might change if the article is updated.
|
||||
// var fingerprint: String
|
||||
|
||||
// originId
|
||||
// string the unique id of this post in the RSS feed (not necessarily a URL!)
|
||||
// sid
|
||||
// Optional string an internal search id.
|
||||
let unread: Bool
|
||||
|
||||
/// a list of tag objects (“id” and “label”) that the user added to this entry. This value is only returned if an Authorization header is provided, and at least one tag has been added. If the entry has been explicitly marked as read (not the feed itself), the “global.read” tag will be present.
|
||||
let tags: [FeedlyTag]?
|
||||
|
||||
/// a list of category objects (“id” and “label”) that the user associated with the feed of this entry. This value is only returned if an Authorization header is provided.
|
||||
let categories: [FeedlyCategory]?
|
||||
|
||||
/// A list of media links (videos, images, sound etc) provided by the feed. Some entries do not have a summary or content, only a collection of media links.
|
||||
let enclosure: [FeedlyLink]?
|
||||
}
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
//
|
||||
// FeedlyEntryIdentifierProviding.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 9/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
protocol FeedlyEntryIdentifierProviding: class {
|
||||
var entryIds: Set<String> { get }
|
||||
}
|
||||
|
||||
final class FeedlyEntryIdentifierProvider: FeedlyEntryIdentifierProviding {
|
||||
private (set) var entryIds: Set<String>
|
||||
|
||||
init(entryIds: Set<String> = Set()) {
|
||||
self.entryIds = entryIds
|
||||
}
|
||||
|
||||
func addEntryIds(from provider: FeedlyEntryIdentifierProviding) {
|
||||
entryIds.formUnion(provider.entryIds)
|
||||
}
|
||||
|
||||
func addEntryIds(in articleIds: [String]) {
|
||||
entryIds.formUnion(articleIds)
|
||||
}
|
||||
}
|
|
@ -11,16 +11,20 @@ import Articles
|
|||
import RSParser
|
||||
|
||||
struct FeedlyEntryParser {
|
||||
var entry: FeedlyEntry
|
||||
let entry: FeedlyEntry
|
||||
|
||||
private let rightToLeftTextSantizer = FeedlyRTLTextSanitizer()
|
||||
|
||||
var id: String {
|
||||
return entry.id
|
||||
}
|
||||
|
||||
var feedUrl: String {
|
||||
/// When ingesting articles, the feedURL must match a feed's `webFeedID` for the article to be reachable between it and its matching feed. It reminds me of a foreign key.
|
||||
var feedUrl: String? {
|
||||
guard let id = entry.origin?.streamId else {
|
||||
assertionFailure()
|
||||
return ""
|
||||
// At this point, check Feedly's API isn't glitching or the response has not changed structure.
|
||||
assertionFailure("Entries need to be traceable to a feed or this entry will be dropped.")
|
||||
return nil
|
||||
}
|
||||
return id
|
||||
}
|
||||
|
@ -36,7 +40,7 @@ struct FeedlyEntryParser {
|
|||
}
|
||||
|
||||
var title: String? {
|
||||
return entry.title
|
||||
return rightToLeftTextSantizer.sanitize(entry.title)
|
||||
}
|
||||
|
||||
var contentHMTL: String? {
|
||||
|
@ -49,7 +53,7 @@ struct FeedlyEntryParser {
|
|||
}
|
||||
|
||||
var summary: String? {
|
||||
return entry.summary?.content
|
||||
return rightToLeftTextSantizer.sanitize(entry.summary?.content)
|
||||
}
|
||||
|
||||
var datePublished: Date {
|
||||
|
@ -67,6 +71,7 @@ struct FeedlyEntryParser {
|
|||
return Set([ParsedAuthor(name: name, url: nil, avatarURL: nil, emailAddress: nil)])
|
||||
}
|
||||
|
||||
/// While there is not yet a tagging interface, articles can still be searched for by tags.
|
||||
var tags: Set<String>? {
|
||||
guard let labels = entry.tags?.compactMap({ $0.label }), !labels.isEmpty else {
|
||||
return nil
|
||||
|
@ -82,7 +87,11 @@ struct FeedlyEntryParser {
|
|||
return attachments.isEmpty ? nil : Set(attachments)
|
||||
}
|
||||
|
||||
var parsedItemRepresentation: ParsedItem {
|
||||
var parsedItemRepresentation: ParsedItem? {
|
||||
guard let feedUrl = feedUrl else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return ParsedItem(syncServiceID: id,
|
||||
uniqueID: id, // This value seems to get ignored or replaced.
|
||||
feedURL: feedUrl,
|
||||
|
|
|
@ -9,8 +9,8 @@
|
|||
import Foundation
|
||||
|
||||
struct FeedlyFeed: Codable {
|
||||
var id: String
|
||||
var title: String?
|
||||
var updated: Date?
|
||||
var website: String?
|
||||
let id: String
|
||||
let title: String?
|
||||
let updated: Date?
|
||||
let website: String?
|
||||
}
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
//
|
||||
// FeedlyFeedParser.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 29/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
struct FeedlyFeedParser {
|
||||
let feed: FeedlyFeed
|
||||
|
||||
private let rightToLeftTextSantizer = FeedlyRTLTextSanitizer()
|
||||
|
||||
var title: String? {
|
||||
return rightToLeftTextSantizer.sanitize(feed.title) ?? ""
|
||||
}
|
||||
|
||||
var webFeedID: String {
|
||||
return feed.id
|
||||
}
|
||||
|
||||
var url: String {
|
||||
let resource = FeedlyFeedResourceId(id: feed.id)
|
||||
return resource.url
|
||||
}
|
||||
|
||||
var homePageURL: String? {
|
||||
return feed.website
|
||||
}
|
||||
}
|
|
@ -11,9 +11,9 @@ import Foundation
|
|||
struct FeedlyFeedsSearchResponse: Decodable {
|
||||
|
||||
struct Feed: Decodable {
|
||||
var title: String
|
||||
var feedId: String
|
||||
let title: String
|
||||
let feedId: String
|
||||
}
|
||||
|
||||
var results: [Feed]
|
||||
let results: [Feed]
|
||||
}
|
||||
|
|
|
@ -9,10 +9,10 @@
|
|||
import Foundation
|
||||
|
||||
struct FeedlyLink: Decodable {
|
||||
var href: String
|
||||
let href: String
|
||||
|
||||
/// The mime type of the resource located by `href`.
|
||||
/// When `nil`, it's probably a web page?
|
||||
/// https://groups.google.com/forum/#!searchin/feedly-cloud/feed$20url%7Csort:date/feedly-cloud/Rx3dVd4aTFQ/Hf1ZfLJoCQAJ
|
||||
var type: String?
|
||||
let type: String?
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
import Foundation
|
||||
|
||||
struct FeedlyOrigin: Decodable {
|
||||
var title: String?
|
||||
var streamId: String
|
||||
var htmlUrl: String
|
||||
let title: String?
|
||||
let streamId: String?
|
||||
let htmlUrl: String?
|
||||
}
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
//
|
||||
// FeedlyRTLTextSanitizer.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 28/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
struct FeedlyRTLTextSanitizer {
|
||||
private let rightToLeftPrefix = "<div style=\"direction:rtl;text-align:right\">"
|
||||
private let rightToLeftSuffix = "</div>"
|
||||
|
||||
func sanitize(_ sourceText: String?) -> String? {
|
||||
guard let source = sourceText, !source.isEmpty else {
|
||||
return sourceText
|
||||
}
|
||||
|
||||
guard source.hasPrefix(rightToLeftPrefix) && source.hasSuffix(rightToLeftSuffix) else {
|
||||
return source
|
||||
}
|
||||
|
||||
let start = source.index(source.startIndex, offsetBy: rightToLeftPrefix.indices.count)
|
||||
let end = source.index(source.endIndex, offsetBy: -rightToLeftSuffix.indices.count)
|
||||
return String(source[start..<end])
|
||||
}
|
||||
}
|
|
@ -17,7 +17,7 @@ protocol FeedlyResourceId {
|
|||
|
||||
/// The Feed Resource is documented here: https://developer.feedly.com/cloud/
|
||||
struct FeedlyFeedResourceId: FeedlyResourceId {
|
||||
var id: String
|
||||
let id: String
|
||||
|
||||
/// The location of the kind of resource a concrete type represents.
|
||||
/// If the conrete type cannot strip the resource type from the Id, it should just return the Id
|
||||
|
@ -45,7 +45,7 @@ extension FeedlyFeedResourceId {
|
|||
}
|
||||
|
||||
struct FeedlyCategoryResourceId: FeedlyResourceId {
|
||||
var id: String
|
||||
let id: String
|
||||
|
||||
enum Global {
|
||||
|
||||
|
@ -72,7 +72,7 @@ struct FeedlyCategoryResourceId: FeedlyResourceId {
|
|||
}
|
||||
|
||||
struct FeedlyTagResourceId: FeedlyResourceId {
|
||||
var id: String
|
||||
let id: String
|
||||
|
||||
enum Global {
|
||||
|
||||
|
|
|
@ -9,16 +9,16 @@
|
|||
import Foundation
|
||||
|
||||
struct FeedlyStream: Decodable {
|
||||
var id: String
|
||||
let id: String
|
||||
|
||||
/// Of the most recent entry for this stream (regardless of continuation, newerThan, etc).
|
||||
var updated: Date?
|
||||
let updated: Date?
|
||||
|
||||
/// Optional string the continuation id to pass to the next stream call, for pagination.
|
||||
/// the continuation id to pass to the next stream call, for pagination.
|
||||
/// This id guarantees that no entry will be duplicated in a stream (meaning, there is no need to de-duplicate entries returned by this call).
|
||||
/// If this value is not returned, it means the end of the stream has been reached.
|
||||
var continuation: String?
|
||||
var items: [FeedlyEntry]
|
||||
let continuation: String?
|
||||
let items: [FeedlyEntry]
|
||||
|
||||
var isStreamEnd: Bool {
|
||||
return continuation == nil
|
||||
|
|
|
@ -9,8 +9,8 @@
|
|||
import Foundation
|
||||
|
||||
struct FeedlyStreamIds: Decodable {
|
||||
var continuation: String?
|
||||
var ids: [String]
|
||||
let continuation: String?
|
||||
let ids: [String]
|
||||
|
||||
var isStreamEnd: Bool {
|
||||
return continuation == nil
|
||||
|
|
|
@ -9,6 +9,6 @@
|
|||
import Foundation
|
||||
|
||||
struct FeedlyTag: Decodable {
|
||||
var id: String
|
||||
var label: String?
|
||||
let id: String
|
||||
let label: String?
|
||||
}
|
||||
|
|
|
@ -8,14 +8,27 @@
|
|||
|
||||
import Foundation
|
||||
import AuthenticationServices
|
||||
import RSCore
|
||||
|
||||
public protocol OAuthAccountAuthorizationOperationDelegate: class {
|
||||
func oauthAccountAuthorizationOperation(_ operation: OAuthAccountAuthorizationOperation, didCreate account: Account)
|
||||
func oauthAccountAuthorizationOperation(_ operation: OAuthAccountAuthorizationOperation, didFailWith error: Error)
|
||||
}
|
||||
|
||||
public final class OAuthAccountAuthorizationOperation: Operation, ASWebAuthenticationPresentationContextProviding {
|
||||
|
||||
@objc public final class OAuthAccountAuthorizationOperation: NSObject, MainThreadOperation, ASWebAuthenticationPresentationContextProviding {
|
||||
|
||||
public var isCanceled: Bool = false {
|
||||
didSet {
|
||||
if isCanceled {
|
||||
cancel()
|
||||
}
|
||||
}
|
||||
}
|
||||
public var id: Int?
|
||||
public weak var operationDelegate: MainThreadOperationDelegate?
|
||||
public var name: String?
|
||||
public var completionBlock: MainThreadOperation.MainThreadOperationCompletionBlock?
|
||||
|
||||
public weak var presentationAnchor: ASPresentationAnchor?
|
||||
public weak var delegate: OAuthAccountAuthorizationOperationDelegate?
|
||||
|
||||
|
@ -28,15 +41,9 @@ public final class OAuthAccountAuthorizationOperation: Operation, ASWebAuthentic
|
|||
self.oauthClient = Account.oauthAuthorizationClient(for: accountType)
|
||||
}
|
||||
|
||||
override public func main() {
|
||||
assert(Thread.isMainThread)
|
||||
public func run() {
|
||||
assert(presentationAnchor != nil, "\(self) outlived presentation anchor.")
|
||||
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
let request = Account.oauthAuthorizationCodeGrantRequest(for: accountType)
|
||||
|
||||
guard let url = request.url else {
|
||||
|
@ -63,13 +70,12 @@ public final class OAuthAccountAuthorizationOperation: Operation, ASWebAuthentic
|
|||
session.start()
|
||||
}
|
||||
|
||||
override public func cancel() {
|
||||
public func cancel() {
|
||||
session?.cancel()
|
||||
super.cancel()
|
||||
}
|
||||
|
||||
private func didEndAuthentication(url: URL?, error: Error?) {
|
||||
guard !isCancelled else {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
@ -102,7 +108,7 @@ public final class OAuthAccountAuthorizationOperation: Operation, ASWebAuthentic
|
|||
}
|
||||
|
||||
private func didEndRequestingAccessToken(_ result: Result<OAuthAuthorizationGrant, Error>) {
|
||||
guard !isCancelled else {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
@ -140,48 +146,12 @@ public final class OAuthAccountAuthorizationOperation: Operation, ASWebAuthentic
|
|||
|
||||
private func didFinish() {
|
||||
assert(Thread.isMainThread)
|
||||
assert(!isFinished, "Finished operation is attempting to finish again.")
|
||||
self.isExecutingOperation = false
|
||||
self.isFinishedOperation = true
|
||||
operationDelegate?.operationDidComplete(self)
|
||||
}
|
||||
|
||||
private func didFinish(_ error: Error) {
|
||||
assert(Thread.isMainThread)
|
||||
assert(!isFinished, "Finished operation is attempting to finish again.")
|
||||
delegate?.oauthAccountAuthorizationOperation(self, didFailWith: error)
|
||||
didFinish()
|
||||
}
|
||||
|
||||
override public func start() {
|
||||
isExecutingOperation = true
|
||||
DispatchQueue.main.async {
|
||||
self.main()
|
||||
}
|
||||
}
|
||||
|
||||
override public var isExecuting: Bool {
|
||||
return isExecutingOperation
|
||||
}
|
||||
|
||||
private var isExecutingOperation = false {
|
||||
willSet {
|
||||
willChangeValue(for: \.isExecuting)
|
||||
}
|
||||
didSet {
|
||||
didChangeValue(for: \.isExecuting)
|
||||
}
|
||||
}
|
||||
|
||||
override public var isFinished: Bool {
|
||||
return isFinishedOperation
|
||||
}
|
||||
|
||||
private var isFinishedOperation = false {
|
||||
willSet {
|
||||
willChangeValue(for: \.isFinished)
|
||||
}
|
||||
didSet {
|
||||
didChangeValue(for: \.isFinished)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,20 +9,20 @@
|
|||
import Foundation
|
||||
import os.log
|
||||
import RSWeb
|
||||
import RSCore
|
||||
|
||||
class FeedlyAddExistingFeedOperation: FeedlyOperation, FeedlyOperationDelegate, FeedlyCheckpointOperationDelegate {
|
||||
private let operationQueue: OperationQueue
|
||||
|
||||
|
||||
private let operationQueue = MainThreadOperationQueue()
|
||||
var addCompletionHandler: ((Result<Void, Error>) -> ())?
|
||||
|
||||
|
||||
init(account: Account, credentials: Credentials, resource: FeedlyFeedResourceId, service: FeedlyAddFeedToCollectionService, container: Container, progress: DownloadProgress, log: OSLog) throws {
|
||||
|
||||
let validator = FeedlyFeedContainerValidator(container: container, userId: credentials.username)
|
||||
let validator = FeedlyFeedContainerValidator(container: container)
|
||||
let (folder, collectionId) = try validator.getValidContainer()
|
||||
|
||||
self.operationQueue = OperationQueue()
|
||||
self.operationQueue.isSuspended = true
|
||||
|
||||
self.operationQueue.suspend()
|
||||
|
||||
super.init()
|
||||
|
||||
self.downloadProgress = progress
|
||||
|
@ -30,31 +30,28 @@ class FeedlyAddExistingFeedOperation: FeedlyOperation, FeedlyOperationDelegate,
|
|||
let addRequest = FeedlyAddFeedToCollectionOperation(account: account, folder: folder, feedResource: resource, feedName: nil, collectionId: collectionId, service: service)
|
||||
addRequest.delegate = self
|
||||
addRequest.downloadProgress = progress
|
||||
self.operationQueue.addOperation(addRequest)
|
||||
self.operationQueue.add(addRequest)
|
||||
|
||||
let createFeeds = FeedlyCreateFeedsForCollectionFoldersOperation(account: account, feedsAndFoldersProvider: addRequest, log: log)
|
||||
createFeeds.downloadProgress = progress
|
||||
createFeeds.addDependency(addRequest)
|
||||
self.operationQueue.addOperation(createFeeds)
|
||||
self.operationQueue.add(createFeeds)
|
||||
|
||||
let finishOperation = FeedlyCheckpointOperation()
|
||||
finishOperation.checkpointDelegate = self
|
||||
finishOperation.downloadProgress = progress
|
||||
finishOperation.addDependency(createFeeds)
|
||||
self.operationQueue.addOperation(finishOperation)
|
||||
self.operationQueue.add(finishOperation)
|
||||
}
|
||||
|
||||
override func cancel() {
|
||||
override func run() {
|
||||
operationQueue.resume()
|
||||
}
|
||||
|
||||
override func didCancel() {
|
||||
operationQueue.cancelAllOperations()
|
||||
super.cancel()
|
||||
didFinish()
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
return
|
||||
}
|
||||
operationQueue.isSuspended = false
|
||||
addCompletionHandler = nil
|
||||
super.didCancel()
|
||||
}
|
||||
|
||||
func feedlyOperation(_ operation: FeedlyOperation, didFailWith error: Error) {
|
||||
|
@ -65,7 +62,7 @@ class FeedlyAddExistingFeedOperation: FeedlyOperation, FeedlyOperationDelegate,
|
|||
}
|
||||
|
||||
func feedlyCheckpointOperationDidReachCheckpoint(_ operation: FeedlyCheckpointOperation) {
|
||||
guard !isCancelled else {
|
||||
guard !isCanceled else {
|
||||
return
|
||||
}
|
||||
|
||||
|
|
|
@ -13,13 +13,14 @@ protocol FeedlyAddFeedToCollectionService {
|
|||
}
|
||||
|
||||
final class FeedlyAddFeedToCollectionOperation: FeedlyOperation, FeedlyFeedsAndFoldersProviding, FeedlyResourceProviding {
|
||||
|
||||
let feedName: String?
|
||||
let collectionId: String
|
||||
let service: FeedlyAddFeedToCollectionService
|
||||
let account: Account
|
||||
let folder: Folder
|
||||
let feedResource: FeedlyFeedResourceId
|
||||
|
||||
|
||||
init(account: Account, folder: Folder, feedResource: FeedlyFeedResourceId, feedName: String? = nil, collectionId: String, service: FeedlyAddFeedToCollectionService) {
|
||||
self.account = account
|
||||
self.folder = folder
|
||||
|
@ -35,23 +36,23 @@ final class FeedlyAddFeedToCollectionOperation: FeedlyOperation, FeedlyFeedsAndF
|
|||
return feedResource
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
return didFinish()
|
||||
}
|
||||
|
||||
override func run() {
|
||||
service.addFeed(with: feedResource, title: feedName, toCollectionWith: collectionId) { [weak self] result in
|
||||
guard let self = self else {
|
||||
return
|
||||
}
|
||||
guard !self.isCancelled else {
|
||||
return self.didFinish()
|
||||
if self.isCanceled {
|
||||
self.didFinish()
|
||||
return
|
||||
}
|
||||
self.didCompleteRequest(result)
|
||||
}
|
||||
}
|
||||
|
||||
private func didCompleteRequest(_ result: Result<[FeedlyFeed], Error>) {
|
||||
}
|
||||
|
||||
private extension FeedlyAddFeedToCollectionOperation {
|
||||
|
||||
func didCompleteRequest(_ result: Result<[FeedlyFeed], Error>) {
|
||||
switch result {
|
||||
case .success(let feedlyFeeds):
|
||||
feedsAndFolders = [(feedlyFeeds, folder)]
|
||||
|
@ -59,13 +60,13 @@ final class FeedlyAddFeedToCollectionOperation: FeedlyOperation, FeedlyFeedsAndF
|
|||
let feedsWithCreatedFeedId = feedlyFeeds.filter { $0.id == resource.id }
|
||||
|
||||
if feedsWithCreatedFeedId.isEmpty {
|
||||
didFinish(AccountError.createErrorNotFound)
|
||||
didFinish(with: AccountError.createErrorNotFound)
|
||||
} else {
|
||||
didFinish()
|
||||
}
|
||||
|
||||
case .failure(let error):
|
||||
didFinish(error)
|
||||
didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,75 +8,71 @@
|
|||
|
||||
import Foundation
|
||||
import os.log
|
||||
import SyncDatabase
|
||||
import RSWeb
|
||||
import RSCore
|
||||
|
||||
class FeedlyAddNewFeedOperation: FeedlyOperation, FeedlyOperationDelegate, FeedlySearchOperationDelegate, FeedlyCheckpointOperationDelegate {
|
||||
private let operationQueue: OperationQueue
|
||||
|
||||
private let operationQueue = MainThreadOperationQueue()
|
||||
private let folder: Folder
|
||||
private let collectionId: String
|
||||
private let url: String
|
||||
private let account: Account
|
||||
private let credentials: Credentials
|
||||
private let database: SyncDatabase
|
||||
private let feedName: String?
|
||||
private let addToCollectionService: FeedlyAddFeedToCollectionService
|
||||
private let syncUnreadIdsService: FeedlyGetStreamIdsService
|
||||
private let getStreamContentsService: FeedlyGetStreamContentsService
|
||||
private let log: OSLog
|
||||
|
||||
private var feedResourceId: FeedlyFeedResourceId?
|
||||
var addCompletionHandler: ((Result<WebFeed, Error>) -> ())?
|
||||
|
||||
init(account: Account, credentials: Credentials, url: String, feedName: String?, searchService: FeedlySearchService, addToCollectionService: FeedlyAddFeedToCollectionService, syncUnreadIdsService: FeedlyGetStreamIdsService, getStreamContentsService: FeedlyGetStreamContentsService, container: Container, progress: DownloadProgress, log: OSLog) throws {
|
||||
|
||||
init(account: Account, credentials: Credentials, url: String, feedName: String?, searchService: FeedlySearchService, addToCollectionService: FeedlyAddFeedToCollectionService, syncUnreadIdsService: FeedlyGetStreamIdsService, getStreamContentsService: FeedlyGetStreamContentsService, database: SyncDatabase, container: Container, progress: DownloadProgress, log: OSLog) throws {
|
||||
|
||||
let validator = FeedlyFeedContainerValidator(container: container, userId: credentials.username)
|
||||
|
||||
let validator = FeedlyFeedContainerValidator(container: container)
|
||||
(self.folder, self.collectionId) = try validator.getValidContainer()
|
||||
|
||||
self.url = url
|
||||
self.operationQueue = OperationQueue()
|
||||
self.operationQueue.isSuspended = true
|
||||
self.operationQueue.suspend()
|
||||
self.account = account
|
||||
self.credentials = credentials
|
||||
self.database = database
|
||||
self.feedName = feedName
|
||||
self.addToCollectionService = addToCollectionService
|
||||
self.syncUnreadIdsService = syncUnreadIdsService
|
||||
self.getStreamContentsService = getStreamContentsService
|
||||
self.log = log
|
||||
|
||||
|
||||
super.init()
|
||||
|
||||
|
||||
self.downloadProgress = progress
|
||||
|
||||
let search = FeedlySearchOperation(query: url, locale: .current, service: searchService)
|
||||
search.delegate = self
|
||||
search.searchDelegate = self
|
||||
search.downloadProgress = progress
|
||||
self.operationQueue.addOperation(search)
|
||||
self.operationQueue.add(search)
|
||||
}
|
||||
|
||||
override func cancel() {
|
||||
override func run() {
|
||||
operationQueue.resume()
|
||||
}
|
||||
|
||||
override func didCancel() {
|
||||
operationQueue.cancelAllOperations()
|
||||
super.cancel()
|
||||
|
||||
didFinish()
|
||||
|
||||
// Operation should silently cancel.
|
||||
addCompletionHandler = nil
|
||||
super.didCancel()
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
return
|
||||
}
|
||||
operationQueue.isSuspended = false
|
||||
}
|
||||
|
||||
private var feedResourceId: FeedlyFeedResourceId?
|
||||
|
||||
|
||||
func feedlySearchOperation(_ operation: FeedlySearchOperation, didGet response: FeedlyFeedsSearchResponse) {
|
||||
guard !isCancelled else {
|
||||
guard !isCanceled else {
|
||||
return
|
||||
}
|
||||
guard let first = response.results.first else {
|
||||
return didFinish(AccountError.createErrorNotFound)
|
||||
return didFinish(with: AccountError.createErrorNotFound)
|
||||
}
|
||||
|
||||
let feedResourceId = FeedlyFeedResourceId(id: first.feedId)
|
||||
|
@ -85,42 +81,47 @@ class FeedlyAddNewFeedOperation: FeedlyOperation, FeedlyOperationDelegate, Feedl
|
|||
let addRequest = FeedlyAddFeedToCollectionOperation(account: account, folder: folder, feedResource: feedResourceId, feedName: feedName, collectionId: collectionId, service: addToCollectionService)
|
||||
addRequest.delegate = self
|
||||
addRequest.downloadProgress = downloadProgress
|
||||
self.operationQueue.addOperation(addRequest)
|
||||
operationQueue.add(addRequest)
|
||||
|
||||
let createFeeds = FeedlyCreateFeedsForCollectionFoldersOperation(account: account, feedsAndFoldersProvider: addRequest, log: log)
|
||||
createFeeds.delegate = self
|
||||
createFeeds.addDependency(addRequest)
|
||||
createFeeds.downloadProgress = downloadProgress
|
||||
self.operationQueue.addOperation(createFeeds)
|
||||
operationQueue.add(createFeeds)
|
||||
|
||||
let syncUnread = FeedlySyncUnreadStatusesOperation(account: account, credentials: credentials, service: syncUnreadIdsService, newerThan: nil, log: log)
|
||||
let syncUnread = FeedlyIngestUnreadArticleIdsOperation(account: account, credentials: credentials, service: syncUnreadIdsService, database: database, newerThan: nil, log: log)
|
||||
syncUnread.addDependency(createFeeds)
|
||||
syncUnread.downloadProgress = downloadProgress
|
||||
self.operationQueue.addOperation(syncUnread)
|
||||
syncUnread.delegate = self
|
||||
operationQueue.add(syncUnread)
|
||||
|
||||
let syncFeed = FeedlySyncStreamContentsOperation(account: account, resource: feedResourceId, service: getStreamContentsService, newerThan: nil, log: log)
|
||||
let syncFeed = FeedlySyncStreamContentsOperation(account: account, resource: feedResourceId, service: getStreamContentsService, isPagingEnabled: false, newerThan: nil, log: log)
|
||||
syncFeed.addDependency(syncUnread)
|
||||
syncFeed.downloadProgress = downloadProgress
|
||||
self.operationQueue.addOperation(syncFeed)
|
||||
syncFeed.delegate = self
|
||||
operationQueue.add(syncFeed)
|
||||
|
||||
let finishOperation = FeedlyCheckpointOperation()
|
||||
finishOperation.checkpointDelegate = self
|
||||
finishOperation.downloadProgress = downloadProgress
|
||||
finishOperation.addDependency(syncFeed)
|
||||
self.operationQueue.addOperation(finishOperation)
|
||||
finishOperation.delegate = self
|
||||
operationQueue.add(finishOperation)
|
||||
}
|
||||
|
||||
func feedlyOperation(_ operation: FeedlyOperation, didFailWith error: Error) {
|
||||
addCompletionHandler?(.failure(error))
|
||||
addCompletionHandler = nil
|
||||
|
||||
os_log(.debug, log: log, "Unable to add new feed: %{public}@.", error as NSError)
|
||||
|
||||
cancel()
|
||||
}
|
||||
|
||||
func feedlyCheckpointOperationDidReachCheckpoint(_ operation: FeedlyCheckpointOperation) {
|
||||
guard !isCancelled else {
|
||||
guard !isCanceled else {
|
||||
return
|
||||
}
|
||||
|
||||
defer {
|
||||
didFinish()
|
||||
}
|
||||
|
@ -128,14 +129,12 @@ class FeedlyAddNewFeedOperation: FeedlyOperation, FeedlyOperationDelegate, Feedl
|
|||
guard let handler = addCompletionHandler else {
|
||||
return
|
||||
}
|
||||
|
||||
if let feedResource = feedResourceId, let feed = folder.existingWebFeed(withWebFeedID: feedResource.id) {
|
||||
handler(.success(feed))
|
||||
|
||||
} else {
|
||||
}
|
||||
else {
|
||||
handler(.failure(AccountError.createErrorNotFound))
|
||||
}
|
||||
|
||||
addCompletionHandler = nil
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,15 +12,14 @@ protocol FeedlyCheckpointOperationDelegate: class {
|
|||
func feedlyCheckpointOperationDidReachCheckpoint(_ operation: FeedlyCheckpointOperation)
|
||||
}
|
||||
|
||||
/// Single responsibility is to let the delegate know an instance is executing. The semantics are up to the delegate.
|
||||
/// Let the delegate know an instance is executing. The semantics are up to the delegate.
|
||||
final class FeedlyCheckpointOperation: FeedlyOperation {
|
||||
|
||||
weak var checkpointDelegate: FeedlyCheckpointOperationDelegate?
|
||||
|
||||
override func main() {
|
||||
defer { didFinish() }
|
||||
guard !isCancelled else {
|
||||
return
|
||||
|
||||
override func run() {
|
||||
defer {
|
||||
didFinish()
|
||||
}
|
||||
checkpointDelegate?.feedlyCheckpointOperationDidReachCheckpoint(self)
|
||||
}
|
||||
|
|
|
@ -15,17 +15,17 @@ final class FeedlyCreateFeedsForCollectionFoldersOperation: FeedlyOperation {
|
|||
let account: Account
|
||||
let feedsAndFoldersProvider: FeedlyFeedsAndFoldersProviding
|
||||
let log: OSLog
|
||||
|
||||
|
||||
init(account: Account, feedsAndFoldersProvider: FeedlyFeedsAndFoldersProviding, log: OSLog) {
|
||||
self.feedsAndFoldersProvider = feedsAndFoldersProvider
|
||||
self.account = account
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
defer { didFinish() }
|
||||
|
||||
guard !isCancelled else { return }
|
||||
override func run() {
|
||||
defer {
|
||||
didFinish()
|
||||
}
|
||||
|
||||
let pairs = feedsAndFoldersProvider.feedsAndFolders
|
||||
|
||||
|
@ -68,9 +68,11 @@ final class FeedlyCreateFeedsForCollectionFoldersOperation: FeedlyOperation {
|
|||
}
|
||||
|
||||
// no exsiting feed, create a new one
|
||||
let id = collectionFeed.id
|
||||
let url = FeedlyFeedResourceId(id: id).url
|
||||
let feed = account.createWebFeed(with: collectionFeed.title, url: url, webFeedID: id, homePageURL: collectionFeed.website)
|
||||
let parser = FeedlyFeedParser(feed: collectionFeed)
|
||||
let feed = account.createWebFeed(with: parser.title,
|
||||
url: parser.url,
|
||||
webFeedID: parser.webFeedID,
|
||||
homePageURL: parser.homePageURL)
|
||||
|
||||
// So the same feed isn't created more than once.
|
||||
feedsAdded.insert(feed)
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
//
|
||||
// FeedlyDownloadArticlesOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 9/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
import RSCore
|
||||
import RSWeb
|
||||
|
||||
class FeedlyDownloadArticlesOperation: FeedlyOperation {
|
||||
|
||||
private let account: Account
|
||||
private let log: OSLog
|
||||
private let missingArticleEntryIdProvider: FeedlyEntryIdentifierProviding
|
||||
private let updatedArticleEntryIdProvider: FeedlyEntryIdentifierProviding
|
||||
private let getEntriesService: FeedlyGetEntriesService
|
||||
private let operationQueue = MainThreadOperationQueue()
|
||||
private let finishOperation: FeedlyCheckpointOperation
|
||||
|
||||
init(account: Account, missingArticleEntryIdProvider: FeedlyEntryIdentifierProviding, updatedArticleEntryIdProvider: FeedlyEntryIdentifierProviding, getEntriesService: FeedlyGetEntriesService, log: OSLog) {
|
||||
self.account = account
|
||||
self.operationQueue.suspend()
|
||||
self.missingArticleEntryIdProvider = missingArticleEntryIdProvider
|
||||
self.updatedArticleEntryIdProvider = updatedArticleEntryIdProvider
|
||||
self.getEntriesService = getEntriesService
|
||||
self.finishOperation = FeedlyCheckpointOperation()
|
||||
self.log = log
|
||||
super.init()
|
||||
self.finishOperation.checkpointDelegate = self
|
||||
self.operationQueue.add(self.finishOperation)
|
||||
}
|
||||
|
||||
override func run() {
|
||||
var articleIds = missingArticleEntryIdProvider.entryIds
|
||||
articleIds.formUnion(updatedArticleEntryIdProvider.entryIds)
|
||||
|
||||
os_log(.debug, log: log, "Requesting %{public}i articles.", articleIds.count)
|
||||
|
||||
let feedlyAPILimitBatchSize = 1000
|
||||
for articleIds in Array(articleIds).chunked(into: feedlyAPILimitBatchSize) {
|
||||
|
||||
let provider = FeedlyEntryIdentifierProvider(entryIds: Set(articleIds))
|
||||
let getEntries = FeedlyGetEntriesOperation(account: account, service: getEntriesService, provider: provider, log: log)
|
||||
getEntries.delegate = self
|
||||
self.operationQueue.add(getEntries)
|
||||
|
||||
let organiseByFeed = FeedlyOrganiseParsedItemsByFeedOperation(account: account,
|
||||
parsedItemProvider: getEntries,
|
||||
log: log)
|
||||
organiseByFeed.delegate = self
|
||||
organiseByFeed.addDependency(getEntries)
|
||||
self.operationQueue.add(organiseByFeed)
|
||||
|
||||
let updateAccount = FeedlyUpdateAccountFeedsWithItemsOperation(account: account,
|
||||
organisedItemsProvider: organiseByFeed,
|
||||
log: log)
|
||||
|
||||
updateAccount.delegate = self
|
||||
updateAccount.addDependency(organiseByFeed)
|
||||
self.operationQueue.add(updateAccount)
|
||||
|
||||
finishOperation.addDependency(updateAccount)
|
||||
}
|
||||
|
||||
operationQueue.resume()
|
||||
}
|
||||
|
||||
override func didCancel() {
|
||||
// TODO: fix error on below line: "Expression type '()' is ambiguous without more context"
|
||||
//os_log(.debug, log: log, "Cancelling %{public}@.", self)
|
||||
operationQueue.cancelAllOperations()
|
||||
super.didCancel()
|
||||
}
|
||||
}
|
||||
|
||||
extension FeedlyDownloadArticlesOperation: FeedlyCheckpointOperationDelegate {
|
||||
|
||||
func feedlyCheckpointOperationDidReachCheckpoint(_ operation: FeedlyCheckpointOperation) {
|
||||
didFinish()
|
||||
}
|
||||
}
|
||||
|
||||
extension FeedlyDownloadArticlesOperation: FeedlyOperationDelegate {
|
||||
|
||||
func feedlyOperation(_ operation: FeedlyOperation, didFailWith error: Error) {
|
||||
assert(Thread.isMainThread)
|
||||
|
||||
// Having this log is useful for debugging missing required JSON keys in the response from Feedly, for example.
|
||||
os_log(.debug, log: log, "%{public}@ failed with error: %{public}@.", String(describing: operation), error as NSError)
|
||||
|
||||
cancel()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
//
|
||||
// FeedlyFetchIdsForMissingArticlesOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 7/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
|
||||
final class FeedlyFetchIdsForMissingArticlesOperation: FeedlyOperation, FeedlyEntryIdentifierProviding {
|
||||
|
||||
private let account: Account
|
||||
private let log: OSLog
|
||||
|
||||
private(set) var entryIds = Set<String>()
|
||||
|
||||
init(account: Account, log: OSLog) {
|
||||
self.account = account
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func run() {
|
||||
account.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate { result in
|
||||
switch result {
|
||||
case .success(let articleIds):
|
||||
self.entryIds.formUnion(articleIds)
|
||||
self.didFinish()
|
||||
|
||||
case .failure(let error):
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -13,25 +13,20 @@ protocol FeedlyCollectionProviding: class {
|
|||
var collections: [FeedlyCollection] { get }
|
||||
}
|
||||
|
||||
/// Single responsibility is to get Collections from Feedly.
|
||||
/// Get Collections from Feedly.
|
||||
final class FeedlyGetCollectionsOperation: FeedlyOperation, FeedlyCollectionProviding {
|
||||
|
||||
let service: FeedlyGetCollectionsService
|
||||
let log: OSLog
|
||||
|
||||
private(set) var collections = [FeedlyCollection]()
|
||||
|
||||
|
||||
init(service: FeedlyGetCollectionsService, log: OSLog) {
|
||||
self.service = service
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
override func run() {
|
||||
os_log(.debug, log: log, "Requesting collections.")
|
||||
|
||||
service.getCollections { result in
|
||||
|
@ -43,7 +38,7 @@ final class FeedlyGetCollectionsOperation: FeedlyOperation, FeedlyCollectionProv
|
|||
|
||||
case .failure(let error):
|
||||
os_log(.debug, log: self.log, "Unable to request collections: %{public}@.", error as NSError)
|
||||
self.didFinish(error)
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,15 +8,17 @@
|
|||
|
||||
import Foundation
|
||||
import os.log
|
||||
import RSParser
|
||||
|
||||
/// Get full entries for the entry identifiers.
|
||||
final class FeedlyGetEntriesOperation: FeedlyOperation, FeedlyEntryProviding, FeedlyParsedItemProviding {
|
||||
|
||||
/// Single responsibility is to get full entries for the entry identifiers.
|
||||
final class FeedlyGetEntriesOperation: FeedlyOperation, FeedlyEntryProviding {
|
||||
let account: Account
|
||||
let service: FeedlyGetEntriesService
|
||||
let provider: FeedlyEntryIdenifierProviding
|
||||
let provider: FeedlyEntryIdentifierProviding
|
||||
let log: OSLog
|
||||
|
||||
init(account: Account, service: FeedlyGetEntriesService, provider: FeedlyEntryIdenifierProviding, log: OSLog) {
|
||||
|
||||
init(account: Account, service: FeedlyGetEntriesService, provider: FeedlyEntryIdentifierProviding, log: OSLog) {
|
||||
self.account = account
|
||||
self.service = service
|
||||
self.provider = provider
|
||||
|
@ -25,12 +27,35 @@ final class FeedlyGetEntriesOperation: FeedlyOperation, FeedlyEntryProviding {
|
|||
|
||||
private (set) var entries = [FeedlyEntry]()
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
private var storedParsedEntries: Set<ParsedItem>?
|
||||
|
||||
var parsedEntries: Set<ParsedItem> {
|
||||
if let entries = storedParsedEntries {
|
||||
return entries
|
||||
}
|
||||
|
||||
let parsed = Set(entries.compactMap {
|
||||
FeedlyEntryParser(entry: $0).parsedItemRepresentation
|
||||
})
|
||||
|
||||
// TODO: Fix the below. There’s an error on the os.log line: "Expression type '()' is ambiguous without more context"
|
||||
// if parsed.count != entries.count {
|
||||
// let entryIds = Set(entries.map { $0.id })
|
||||
// let parsedIds = Set(parsed.map { $0.uniqueID })
|
||||
// let difference = entryIds.subtracting(parsedIds)
|
||||
// os_log(.debug, log: log, "%{public}@ dropping articles with ids: %{public}@.", self, difference)
|
||||
// }
|
||||
|
||||
storedParsedEntries = parsed
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
var parsedItemProviderName: String {
|
||||
return name ?? String(describing: Self.self)
|
||||
}
|
||||
|
||||
override func run() {
|
||||
service.getEntries(for: provider.entryIds) { result in
|
||||
switch result {
|
||||
case .success(let entries):
|
||||
|
@ -39,7 +64,7 @@ final class FeedlyGetEntriesOperation: FeedlyOperation, FeedlyEntryProviding {
|
|||
|
||||
case .failure(let error):
|
||||
os_log(.debug, log: self.log, "Unable to get entries: %{public}@.", error as NSError)
|
||||
self.didFinish(error)
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ protocol FeedlyEntryProviding {
|
|||
}
|
||||
|
||||
protocol FeedlyParsedItemProviding {
|
||||
var resource: FeedlyResourceId { get }
|
||||
var parsedItemProviderName: String { get }
|
||||
var parsedEntries: Set<ParsedItem> { get }
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,7 @@ protocol FeedlyGetStreamContentsOperationDelegate: class {
|
|||
func feedlyGetStreamContentsOperation(_ operation: FeedlyGetStreamContentsOperation, didGetContentsOf stream: FeedlyStream)
|
||||
}
|
||||
|
||||
/// Single responsibility is to get the stream content of a Collection from Feedly.
|
||||
/// Get the stream content of a Collection from Feedly.
|
||||
final class FeedlyGetStreamContentsOperation: FeedlyOperation, FeedlyEntryProviding, FeedlyParsedItemProviding {
|
||||
|
||||
struct ResourceProvider: FeedlyResourceProviding {
|
||||
|
@ -32,13 +32,13 @@ final class FeedlyGetStreamContentsOperation: FeedlyOperation, FeedlyEntryProvid
|
|||
|
||||
let resourceProvider: FeedlyResourceProviding
|
||||
|
||||
var resource: FeedlyResourceId {
|
||||
return resourceProvider.resource
|
||||
var parsedItemProviderName: String {
|
||||
return resourceProvider.resource.id
|
||||
}
|
||||
|
||||
var entries: [FeedlyEntry] {
|
||||
guard let entries = stream?.items else {
|
||||
assert(isFinished, "This should only be called when the operation finishes without error.")
|
||||
// assert(isFinished, "This should only be called when the operation finishes without error.")
|
||||
assertionFailure("Has this operation been addeded as a dependency on the caller?")
|
||||
return []
|
||||
}
|
||||
|
@ -50,7 +50,17 @@ final class FeedlyGetStreamContentsOperation: FeedlyOperation, FeedlyEntryProvid
|
|||
return entries
|
||||
}
|
||||
|
||||
let parsed = Set(entries.map { FeedlyEntryParser(entry: $0).parsedItemRepresentation })
|
||||
let parsed = Set(entries.compactMap {
|
||||
FeedlyEntryParser(entry: $0).parsedItemRepresentation
|
||||
})
|
||||
|
||||
if parsed.count != entries.count {
|
||||
let entryIds = Set(entries.map { $0.id })
|
||||
let parsedIds = Set(parsed.map { $0.uniqueID })
|
||||
let difference = entryIds.subtracting(parsedIds)
|
||||
os_log(.debug, log: log, "Dropping articles with ids: %{public}@.", difference)
|
||||
}
|
||||
|
||||
storedParsedEntries = parsed
|
||||
|
||||
return parsed
|
||||
|
@ -72,7 +82,7 @@ final class FeedlyGetStreamContentsOperation: FeedlyOperation, FeedlyEntryProvid
|
|||
let log: OSLog
|
||||
|
||||
weak var streamDelegate: FeedlyGetStreamContentsOperationDelegate?
|
||||
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamContentsService, continuation: String? = nil, newerThan: Date?, unreadOnly: Bool? = nil, log: OSLog) {
|
||||
self.account = account
|
||||
self.resourceProvider = ResourceProvider(resource: resource)
|
||||
|
@ -87,12 +97,7 @@ final class FeedlyGetStreamContentsOperation: FeedlyOperation, FeedlyEntryProvid
|
|||
self.init(account: account, resource: resourceProvider.resource, service: service, newerThan: newerThan, unreadOnly: unreadOnly, log: log)
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
override func run() {
|
||||
service.getStreamContents(for: resourceProvider.resource, continuation: continuation, newerThan: newerThan, unreadOnly: unreadOnly) { result in
|
||||
switch result {
|
||||
case .success(let stream):
|
||||
|
@ -104,7 +109,7 @@ final class FeedlyGetStreamContentsOperation: FeedlyOperation, FeedlyEntryProvid
|
|||
|
||||
case .failure(let error):
|
||||
os_log(.debug, log: self.log, "Unable to get stream contents: %{public}@.", error as NSError)
|
||||
self.didFinish(error)
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,21 +9,15 @@
|
|||
import Foundation
|
||||
import os.log
|
||||
|
||||
protocol FeedlyEntryIdenifierProviding: class {
|
||||
var resource: FeedlyResourceId { get }
|
||||
var entryIds: Set<String> { get }
|
||||
}
|
||||
|
||||
protocol FeedlyGetStreamIdsOperationDelegate: class {
|
||||
func feedlyGetStreamIdsOperation(_ operation: FeedlyGetStreamIdsOperation, didGet streamIds: FeedlyStreamIds)
|
||||
}
|
||||
|
||||
/// Single responsibility is to get the stream ids from Feedly.
|
||||
final class FeedlyGetStreamIdsOperation: FeedlyOperation, FeedlyEntryIdenifierProviding, FeedlyUnreadEntryIdProviding {
|
||||
final class FeedlyGetStreamIdsOperation: FeedlyOperation, FeedlyEntryIdentifierProviding {
|
||||
|
||||
var entryIds: Set<String> {
|
||||
guard let ids = streamIds?.ids else {
|
||||
assert(isFinished, "This should only be called when the operation finishes without error.")
|
||||
assertionFailure("Has this operation been addeded as a dependency on the caller?")
|
||||
return []
|
||||
}
|
||||
|
@ -39,7 +33,7 @@ final class FeedlyGetStreamIdsOperation: FeedlyOperation, FeedlyEntryIdenifierPr
|
|||
let unreadOnly: Bool?
|
||||
let newerThan: Date?
|
||||
let log: OSLog
|
||||
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamIdsService, continuation: String? = nil, newerThan: Date? = nil, unreadOnly: Bool?, log: OSLog) {
|
||||
self.account = account
|
||||
self.resource = resource
|
||||
|
@ -52,12 +46,7 @@ final class FeedlyGetStreamIdsOperation: FeedlyOperation, FeedlyEntryIdenifierPr
|
|||
|
||||
weak var streamIdsDelegate: FeedlyGetStreamIdsOperationDelegate?
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
override func run() {
|
||||
service.getStreamIds(for: resource, continuation: continuation, newerThan: newerThan, unreadOnly: unreadOnly) { result in
|
||||
switch result {
|
||||
case .success(let stream):
|
||||
|
@ -69,7 +58,7 @@ final class FeedlyGetStreamIdsOperation: FeedlyOperation, FeedlyEntryIdenifierPr
|
|||
|
||||
case .failure(let error):
|
||||
os_log(.debug, log: self.log, "Unable to get stream ids: %{public}@.", error as NSError)
|
||||
self.didFinish(error)
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
//
|
||||
// FeedlyGetUpdatedArticleIdsOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 11/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
|
||||
/// Single responsibility is to identify articles that have changed since a particular date.
|
||||
///
|
||||
/// Typically, it pages through the article ids of the global.all stream.
|
||||
/// When all the article ids are collected, it is the responsibility of another operation to download them when appropriate.
|
||||
class FeedlyGetUpdatedArticleIdsOperation: FeedlyOperation, FeedlyEntryIdentifierProviding {
|
||||
|
||||
private let account: Account
|
||||
private let resource: FeedlyResourceId
|
||||
private let service: FeedlyGetStreamIdsService
|
||||
private let newerThan: Date?
|
||||
private let log: OSLog
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamIdsService, newerThan: Date?, log: OSLog) {
|
||||
self.account = account
|
||||
self.resource = resource
|
||||
self.service = service
|
||||
self.newerThan = newerThan
|
||||
self.log = log
|
||||
}
|
||||
|
||||
convenience init(account: Account, credentials: Credentials, service: FeedlyGetStreamIdsService, newerThan: Date?, log: OSLog) {
|
||||
let all = FeedlyCategoryResourceId.Global.all(for: credentials.username)
|
||||
self.init(account: account, resource: all, service: service, newerThan: newerThan, log: log)
|
||||
}
|
||||
|
||||
var entryIds: Set<String> {
|
||||
return storedUpdatedArticleIds
|
||||
}
|
||||
|
||||
private var storedUpdatedArticleIds = Set<String>()
|
||||
|
||||
override func run() {
|
||||
getStreamIds(nil)
|
||||
}
|
||||
|
||||
private func getStreamIds(_ continuation: String?) {
|
||||
guard let date = newerThan else {
|
||||
os_log(.debug, log: log, "No date provided so everything must be new (nothing is updated).")
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
service.getStreamIds(for: resource, continuation: continuation, newerThan: date, unreadOnly: nil, completion: didGetStreamIds(_:))
|
||||
}
|
||||
|
||||
private func didGetStreamIds(_ result: Result<FeedlyStreamIds, Error>) {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let streamIds):
|
||||
storedUpdatedArticleIds.formUnion(streamIds.ids)
|
||||
|
||||
guard let continuation = streamIds.continuation else {
|
||||
os_log(.debug, log: log, "%{public}i articles updated since last successful sync start date.", storedUpdatedArticleIds.count)
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
getStreamIds(continuation)
|
||||
|
||||
case .failure(let error):
|
||||
didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
//
|
||||
// FeedlyIngestStarredArticleIdsOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 15/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
import SyncDatabase
|
||||
|
||||
/// Clone locally the remote starred article state.
|
||||
///
|
||||
/// Typically, it pages through the article ids of the global.saved stream.
|
||||
/// When all the article ids are collected, a status is created for each.
|
||||
/// The article ids previously marked as starred but not collected become unstarred.
|
||||
/// So this operation has side effects *for the entire account* it operates on.
|
||||
final class FeedlyIngestStarredArticleIdsOperation: FeedlyOperation {
|
||||
|
||||
private let account: Account
|
||||
private let resource: FeedlyResourceId
|
||||
private let service: FeedlyGetStreamIdsService
|
||||
private let database: SyncDatabase
|
||||
private var remoteEntryIds = Set<String>()
|
||||
private let log: OSLog
|
||||
|
||||
convenience init(account: Account, credentials: Credentials, service: FeedlyGetStreamIdsService, database: SyncDatabase, newerThan: Date?, log: OSLog) {
|
||||
let resource = FeedlyTagResourceId.Global.saved(for: credentials.username)
|
||||
self.init(account: account, resource: resource, service: service, database: database, newerThan: newerThan, log: log)
|
||||
}
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamIdsService, database: SyncDatabase, newerThan: Date?, log: OSLog) {
|
||||
self.account = account
|
||||
self.resource = resource
|
||||
self.service = service
|
||||
self.database = database
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func run() {
|
||||
getStreamIds(nil)
|
||||
}
|
||||
|
||||
private func getStreamIds(_ continuation: String?) {
|
||||
service.getStreamIds(for: resource, continuation: continuation, newerThan: nil, unreadOnly: nil, completion: didGetStreamIds(_:))
|
||||
}
|
||||
|
||||
private func didGetStreamIds(_ result: Result<FeedlyStreamIds, Error>) {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let streamIds):
|
||||
|
||||
remoteEntryIds.formUnion(streamIds.ids)
|
||||
|
||||
guard let continuation = streamIds.continuation else {
|
||||
removeEntryIdsWithPendingStatus()
|
||||
return
|
||||
}
|
||||
|
||||
getStreamIds(continuation)
|
||||
|
||||
case .failure(let error):
|
||||
didFinish(with: error)
|
||||
}
|
||||
}
|
||||
|
||||
/// Do not override pending statuses with the remote statuses of the same articles, otherwise an article will temporarily re-acquire the remote status before the pending status is pushed and subseqently pulled.
|
||||
private func removeEntryIdsWithPendingStatus() {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
database.selectPendingStarredStatusArticleIDs { result in
|
||||
switch result {
|
||||
case .success(let pendingArticleIds):
|
||||
self.remoteEntryIds.subtract(pendingArticleIds)
|
||||
|
||||
self.updateStarredStatuses()
|
||||
|
||||
case .failure(let error):
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func updateStarredStatuses() {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
account.fetchStarredArticleIDs { result in
|
||||
switch result {
|
||||
case .success(let localStarredArticleIDs):
|
||||
self.processStarredArticleIDs(localStarredArticleIDs)
|
||||
|
||||
case .failure(let error):
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func processStarredArticleIDs(_ localStarredArticleIDs: Set<String>) {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
let remoteStarredArticleIDs = remoteEntryIds
|
||||
|
||||
let group = DispatchGroup()
|
||||
|
||||
final class StarredStatusResults {
|
||||
var markAsStarredError: Error?
|
||||
var markAsUnstarredError: Error?
|
||||
}
|
||||
|
||||
let results = StarredStatusResults()
|
||||
|
||||
group.enter()
|
||||
account.markAsStarred(remoteStarredArticleIDs) { error in
|
||||
results.markAsStarredError = error
|
||||
group.leave()
|
||||
}
|
||||
|
||||
let deltaUnstarredArticleIDs = localStarredArticleIDs.subtracting(remoteStarredArticleIDs)
|
||||
group.enter()
|
||||
account.markAsUnstarred(deltaUnstarredArticleIDs) { error in
|
||||
results.markAsUnstarredError = error
|
||||
group.leave()
|
||||
}
|
||||
|
||||
group.notify(queue: .main) {
|
||||
let markingError = results.markAsStarredError ?? results.markAsUnstarredError
|
||||
guard let error = markingError else {
|
||||
self.didFinish()
|
||||
return
|
||||
}
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
//
|
||||
// FeedlyIngestStreamArticleIdsOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 9/1/20.
|
||||
// Copyright © 2020 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
|
||||
/// Ensure a status exists for every article id the user might be interested in.
|
||||
///
|
||||
/// Typically, it pages through the article ids of the global.all stream.
|
||||
/// As the article ids are collected, a default read status is created for each.
|
||||
/// So this operation has side effects *for the entire account* it operates on.
|
||||
class FeedlyIngestStreamArticleIdsOperation: FeedlyOperation {
|
||||
|
||||
private let account: Account
|
||||
private let resource: FeedlyResourceId
|
||||
private let service: FeedlyGetStreamIdsService
|
||||
private let log: OSLog
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamIdsService, log: OSLog) {
|
||||
self.account = account
|
||||
self.resource = resource
|
||||
self.service = service
|
||||
self.log = log
|
||||
}
|
||||
|
||||
convenience init(account: Account, credentials: Credentials, service: FeedlyGetStreamIdsService, log: OSLog) {
|
||||
let all = FeedlyCategoryResourceId.Global.all(for: credentials.username)
|
||||
self.init(account: account, resource: all, service: service, log: log)
|
||||
}
|
||||
|
||||
override func run() {
|
||||
getStreamIds(nil)
|
||||
}
|
||||
|
||||
private func getStreamIds(_ continuation: String?) {
|
||||
service.getStreamIds(for: resource, continuation: continuation, newerThan: nil, unreadOnly: nil, completion: didGetStreamIds(_:))
|
||||
}
|
||||
|
||||
private func didGetStreamIds(_ result: Result<FeedlyStreamIds, Error>) {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let streamIds):
|
||||
account.createStatusesIfNeeded(articleIDs: Set(streamIds.ids)) { databaseError in
|
||||
|
||||
if let error = databaseError {
|
||||
self.didFinish(with: error)
|
||||
return
|
||||
}
|
||||
|
||||
guard let continuation = streamIds.continuation else {
|
||||
os_log(.debug, log: self.log, "Reached end of stream for %@", self.resource.id)
|
||||
self.didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
self.getStreamIds(continuation)
|
||||
}
|
||||
case .failure(let error):
|
||||
didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
//
|
||||
// FeedlyIngestUnreadArticleIdsOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 18/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
import RSParser
|
||||
import SyncDatabase
|
||||
|
||||
/// Clone locally the remote unread article state.
|
||||
///
|
||||
/// Typically, it pages through the unread article ids of the global.all stream.
|
||||
/// When all the unread article ids are collected, a status is created for each.
|
||||
/// The article ids previously marked as unread but not collected become read.
|
||||
/// So this operation has side effects *for the entire account* it operates on.
|
||||
final class FeedlyIngestUnreadArticleIdsOperation: FeedlyOperation {
|
||||
|
||||
private let account: Account
|
||||
private let resource: FeedlyResourceId
|
||||
private let service: FeedlyGetStreamIdsService
|
||||
private let database: SyncDatabase
|
||||
private var remoteEntryIds = Set<String>()
|
||||
private let log: OSLog
|
||||
|
||||
convenience init(account: Account, credentials: Credentials, service: FeedlyGetStreamIdsService, database: SyncDatabase, newerThan: Date?, log: OSLog) {
|
||||
let resource = FeedlyCategoryResourceId.Global.all(for: credentials.username)
|
||||
self.init(account: account, resource: resource, service: service, database: database, newerThan: newerThan, log: log)
|
||||
}
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamIdsService, database: SyncDatabase, newerThan: Date?, log: OSLog) {
|
||||
self.account = account
|
||||
self.resource = resource
|
||||
self.service = service
|
||||
self.database = database
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func run() {
|
||||
getStreamIds(nil)
|
||||
}
|
||||
|
||||
private func getStreamIds(_ continuation: String?) {
|
||||
service.getStreamIds(for: resource, continuation: continuation, newerThan: nil, unreadOnly: true, completion: didGetStreamIds(_:))
|
||||
}
|
||||
|
||||
private func didGetStreamIds(_ result: Result<FeedlyStreamIds, Error>) {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let streamIds):
|
||||
|
||||
remoteEntryIds.formUnion(streamIds.ids)
|
||||
|
||||
guard let continuation = streamIds.continuation else {
|
||||
removeEntryIdsWithPendingStatus()
|
||||
return
|
||||
}
|
||||
|
||||
getStreamIds(continuation)
|
||||
|
||||
case .failure(let error):
|
||||
didFinish(with: error)
|
||||
}
|
||||
}
|
||||
|
||||
/// Do not override pending statuses with the remote statuses of the same articles, otherwise an article will temporarily re-acquire the remote status before the pending status is pushed and subseqently pulled.
|
||||
private func removeEntryIdsWithPendingStatus() {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
database.selectPendingReadStatusArticleIDs { result in
|
||||
switch result {
|
||||
case .success(let pendingArticleIds):
|
||||
self.remoteEntryIds.subtract(pendingArticleIds)
|
||||
|
||||
self.updateUnreadStatuses()
|
||||
|
||||
case .failure(let error):
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func updateUnreadStatuses() {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
account.fetchUnreadArticleIDs { result in
|
||||
switch result {
|
||||
case .success(let localUnreadArticleIDs):
|
||||
self.processUnreadArticleIDs(localUnreadArticleIDs)
|
||||
|
||||
case .failure(let error):
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func processUnreadArticleIDs(_ localUnreadArticleIDs: Set<String>) {
|
||||
guard !isCanceled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
let remoteUnreadArticleIDs = remoteEntryIds
|
||||
let group = DispatchGroup()
|
||||
|
||||
final class ReadStatusResults {
|
||||
var markAsUnreadError: Error?
|
||||
var markAsReadError: Error?
|
||||
}
|
||||
|
||||
let results = ReadStatusResults()
|
||||
|
||||
group.enter()
|
||||
account.markAsUnread(remoteUnreadArticleIDs) { error in
|
||||
results.markAsUnreadError = error
|
||||
group.leave()
|
||||
}
|
||||
|
||||
let articleIDsToMarkRead = localUnreadArticleIDs.subtracting(remoteUnreadArticleIDs)
|
||||
group.enter()
|
||||
account.markAsRead(articleIDsToMarkRead) { error in
|
||||
results.markAsReadError = error
|
||||
group.leave()
|
||||
}
|
||||
|
||||
group.notify(queue: .main) {
|
||||
let markingError = results.markAsReadError ?? results.markAsUnreadError
|
||||
guard let error = markingError else {
|
||||
self.didFinish()
|
||||
return
|
||||
}
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -14,6 +14,7 @@ protocol FeedlyLogoutService {
|
|||
}
|
||||
|
||||
final class FeedlyLogoutOperation: FeedlyOperation {
|
||||
|
||||
let service: FeedlyLogoutService
|
||||
let account: Account
|
||||
let log: OSLog
|
||||
|
@ -24,11 +25,7 @@ final class FeedlyLogoutOperation: FeedlyOperation {
|
|||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
override func run() {
|
||||
os_log("Requesting logout of %{public}@ account.", "\(account.type)")
|
||||
service.logout(completion: didCompleteLogout(_:))
|
||||
}
|
||||
|
@ -48,7 +45,7 @@ final class FeedlyLogoutOperation: FeedlyOperation {
|
|||
|
||||
case .failure(let error):
|
||||
os_log("Logout failed because %{public}@.", error as NSError)
|
||||
didFinish(error)
|
||||
didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,61 +9,55 @@
|
|||
import Foundation
|
||||
import os.log
|
||||
|
||||
protocol FeedlyCollectionsAndFoldersProviding: class {
|
||||
var collectionsAndFolders: [(FeedlyCollection, Folder)] { get }
|
||||
}
|
||||
|
||||
protocol FeedlyFeedsAndFoldersProviding {
|
||||
var feedsAndFolders: [([FeedlyFeed], Folder)] { get }
|
||||
}
|
||||
|
||||
/// Single responsibility is accurately reflect Collections from Feedly as Folders.
|
||||
final class FeedlyMirrorCollectionsAsFoldersOperation: FeedlyOperation, FeedlyCollectionsAndFoldersProviding, FeedlyFeedsAndFoldersProviding {
|
||||
/// Reflect Collections from Feedly as Folders.
|
||||
final class FeedlyMirrorCollectionsAsFoldersOperation: FeedlyOperation, FeedlyFeedsAndFoldersProviding {
|
||||
|
||||
let account: Account
|
||||
let collectionsProvider: FeedlyCollectionProviding
|
||||
let log: OSLog
|
||||
|
||||
private(set) var collectionsAndFolders = [(FeedlyCollection, Folder)]()
|
||||
private(set) var feedsAndFolders = [([FeedlyFeed], Folder)]()
|
||||
|
||||
|
||||
init(account: Account, collectionsProvider: FeedlyCollectionProviding, log: OSLog) {
|
||||
self.collectionsProvider = collectionsProvider
|
||||
self.account = account
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
defer { didFinish() }
|
||||
|
||||
guard !isCancelled else { return }
|
||||
override func run() {
|
||||
defer {
|
||||
didFinish()
|
||||
}
|
||||
|
||||
let localFolders = account.folders ?? Set()
|
||||
let collections = collectionsProvider.collections
|
||||
|
||||
let pairs = collections.compactMap { collection -> (FeedlyCollection, Folder)? in
|
||||
guard let folder = account.ensureFolder(with: collection.label) else {
|
||||
feedsAndFolders = collections.compactMap { collection -> ([FeedlyFeed], Folder)? in
|
||||
let parser = FeedlyCollectionParser(collection: collection)
|
||||
guard let folder = account.ensureFolder(with: parser.folderName) else {
|
||||
assertionFailure("Why wasn't a folder created?")
|
||||
return nil
|
||||
}
|
||||
folder.externalID = collection.id
|
||||
return (collection, folder)
|
||||
}
|
||||
|
||||
collectionsAndFolders = pairs
|
||||
os_log(.debug, log: log, "Ensured %i folders for %i collections.", pairs.count, collections.count)
|
||||
|
||||
feedsAndFolders = pairs.map { (collection, folder) -> (([FeedlyFeed], Folder)) in
|
||||
folder.externalID = parser.externalID
|
||||
return (collection.feeds, folder)
|
||||
}
|
||||
|
||||
// Remove folders without a corresponding collection
|
||||
let collectionFolders = Set(pairs.map { $0.1 })
|
||||
let foldersWithoutCollections = localFolders.subtracting(collectionFolders)
|
||||
for unmatched in foldersWithoutCollections {
|
||||
account.removeFolder(unmatched)
|
||||
}
|
||||
os_log(.debug, log: log, "Ensured %i folders for %i collections.", feedsAndFolders.count, collections.count)
|
||||
|
||||
os_log(.debug, log: log, "Removed %i folders: %@", foldersWithoutCollections.count, foldersWithoutCollections.map { $0.externalID ?? $0.nameForDisplay })
|
||||
// Remove folders without a corresponding collection
|
||||
let collectionFolders = Set(feedsAndFolders.map { $0.1 })
|
||||
let foldersWithoutCollections = localFolders.subtracting(collectionFolders)
|
||||
|
||||
if !foldersWithoutCollections.isEmpty {
|
||||
for unmatched in foldersWithoutCollections {
|
||||
account.removeFolder(unmatched)
|
||||
}
|
||||
|
||||
os_log(.debug, log: log, "Removed %i folders: %@", foldersWithoutCollections.count, foldersWithoutCollections.map { $0.externalID ?? $0.nameForDisplay })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,97 +8,55 @@
|
|||
|
||||
import Foundation
|
||||
import RSWeb
|
||||
import RSCore
|
||||
|
||||
protocol FeedlyOperationDelegate: class {
|
||||
func feedlyOperation(_ operation: FeedlyOperation, didFailWith error: Error)
|
||||
}
|
||||
|
||||
/// Abstract class common to all the tasks required to ingest content from Feedly into NetNewsWire.
|
||||
/// Each task should try to have a single responsibility so they can be easily composed with others.
|
||||
class FeedlyOperation: Operation {
|
||||
|
||||
/// Abstract base class for Feedly sync operations.
|
||||
///
|
||||
/// Normally we don’t do inheritance — but in this case
|
||||
/// it’s the best option.
|
||||
class FeedlyOperation: MainThreadOperation {
|
||||
|
||||
weak var delegate: FeedlyOperationDelegate?
|
||||
|
||||
var downloadProgress: DownloadProgress? {
|
||||
didSet {
|
||||
guard downloadProgress == nil || !isExecuting else {
|
||||
fatalError("\(\FeedlyOperation.downloadProgress) was set to late. Set before operation starts executing.")
|
||||
}
|
||||
oldValue?.completeTask()
|
||||
downloadProgress?.addToNumberOfTasksAndRemaining(1)
|
||||
}
|
||||
}
|
||||
|
||||
func didFinish() {
|
||||
assert(Thread.isMainThread)
|
||||
assert(!isFinished, "Finished operation is attempting to finish again.")
|
||||
|
||||
downloadProgress = nil
|
||||
|
||||
isExecutingOperation = false
|
||||
isFinishedOperation = true
|
||||
|
||||
// MainThreadOperation
|
||||
var isCanceled = false {
|
||||
didSet {
|
||||
if isCanceled {
|
||||
didCancel()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func didFinish(_ error: Error) {
|
||||
assert(Thread.isMainThread)
|
||||
assert(!isFinished, "Finished operation is attempting to finish again.")
|
||||
var id: Int?
|
||||
weak var operationDelegate: MainThreadOperationDelegate?
|
||||
var name: String?
|
||||
var completionBlock: MainThreadOperation.MainThreadOperationCompletionBlock?
|
||||
|
||||
func run() {
|
||||
}
|
||||
|
||||
func didFinish() {
|
||||
if !isCanceled {
|
||||
operationDelegate?.operationDidComplete(self)
|
||||
}
|
||||
downloadProgress?.completeTask()
|
||||
}
|
||||
|
||||
func didFinish(with error: Error) {
|
||||
delegate?.feedlyOperation(self, didFailWith: error)
|
||||
didFinish()
|
||||
}
|
||||
|
||||
override func cancel() {
|
||||
// If the operation never started, disown the download progress.
|
||||
if !isExecuting && !isFinished, downloadProgress != nil {
|
||||
DispatchQueue.main.async {
|
||||
self.downloadProgress = nil
|
||||
}
|
||||
}
|
||||
super.cancel()
|
||||
}
|
||||
|
||||
override func start() {
|
||||
guard !isCancelled else {
|
||||
isExecutingOperation = false
|
||||
isFinishedOperation = true
|
||||
|
||||
if downloadProgress != nil {
|
||||
DispatchQueue.main.async {
|
||||
self.downloadProgress = nil
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
isExecutingOperation = true
|
||||
DispatchQueue.main.async {
|
||||
self.main()
|
||||
}
|
||||
}
|
||||
|
||||
override var isExecuting: Bool {
|
||||
return isExecutingOperation
|
||||
}
|
||||
|
||||
private var isExecutingOperation = false {
|
||||
willSet {
|
||||
willChangeValue(for: \.isExecuting)
|
||||
}
|
||||
didSet {
|
||||
didChangeValue(for: \.isExecuting)
|
||||
}
|
||||
}
|
||||
|
||||
override var isFinished: Bool {
|
||||
return isFinishedOperation
|
||||
}
|
||||
|
||||
private var isFinishedOperation = false {
|
||||
willSet {
|
||||
willChangeValue(for: \.isFinished)
|
||||
}
|
||||
didSet {
|
||||
didChangeValue(for: \.isFinished)
|
||||
}
|
||||
|
||||
func didCancel() {
|
||||
didFinish()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,23 +11,24 @@ import RSParser
|
|||
import os.log
|
||||
|
||||
protocol FeedlyParsedItemsByFeedProviding {
|
||||
var providerName: String { get }
|
||||
var parsedItemsByFeedProviderName: String { get }
|
||||
var parsedItemsKeyedByFeedId: [String: Set<ParsedItem>] { get }
|
||||
}
|
||||
|
||||
/// Single responsibility is to group articles by their feeds.
|
||||
/// Group articles by their feeds.
|
||||
final class FeedlyOrganiseParsedItemsByFeedOperation: FeedlyOperation, FeedlyParsedItemsByFeedProviding {
|
||||
|
||||
private let account: Account
|
||||
private let parsedItemProvider: FeedlyParsedItemProviding
|
||||
private let log: OSLog
|
||||
|
||||
var parsedItemsKeyedByFeedId: [String : Set<ParsedItem>] {
|
||||
assert(Thread.isMainThread) // Needs to be on main thread because Feed is a main-thread-only model type.
|
||||
return itemsKeyedByFeedId
|
||||
var parsedItemsByFeedProviderName: String {
|
||||
return name ?? String(describing: Self.self)
|
||||
}
|
||||
|
||||
var providerName: String {
|
||||
return parsedItemProvider.resource.id
|
||||
var parsedItemsKeyedByFeedId: [String : Set<ParsedItem>] {
|
||||
precondition(Thread.isMainThread) // Needs to be on main thread because Feed is a main-thread-only model type.
|
||||
return itemsKeyedByFeedId
|
||||
}
|
||||
|
||||
private var itemsKeyedByFeedId = [String: Set<ParsedItem>]()
|
||||
|
@ -38,11 +39,11 @@ final class FeedlyOrganiseParsedItemsByFeedOperation: FeedlyOperation, FeedlyPar
|
|||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
defer { didFinish() }
|
||||
|
||||
guard !isCancelled else { return }
|
||||
|
||||
override func run() {
|
||||
defer {
|
||||
didFinish()
|
||||
}
|
||||
|
||||
let items = parsedItemProvider.parsedEntries
|
||||
var dict = [String: Set<ParsedItem>](minimumCapacity: items.count)
|
||||
|
||||
|
@ -57,11 +58,9 @@ final class FeedlyOrganiseParsedItemsByFeedOperation: FeedlyOperation, FeedlyPar
|
|||
}
|
||||
}()
|
||||
dict[key] = value
|
||||
|
||||
guard !isCancelled else { return }
|
||||
}
|
||||
|
||||
os_log(.debug, log: log, "Grouped %i items by %i feeds for %@", items.count, dict.count, parsedItemProvider.resource.id)
|
||||
os_log(.debug, log: log, "Grouped %i items by %i feeds for %@", items.count, dict.count, parsedItemProvider.parsedItemProviderName)
|
||||
|
||||
itemsKeyedByFeedId = dict
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@ import os.log
|
|||
import RSWeb
|
||||
|
||||
final class FeedlyRefreshAccessTokenOperation: FeedlyOperation {
|
||||
|
||||
let service: OAuthAccessTokenRefreshing
|
||||
let oauthClient: OAuthAuthorizationClient
|
||||
let account: Account
|
||||
|
@ -23,12 +24,7 @@ final class FeedlyRefreshAccessTokenOperation: FeedlyOperation {
|
|||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
override func run() {
|
||||
let refreshToken: Credentials
|
||||
|
||||
do {
|
||||
|
@ -40,7 +36,7 @@ final class FeedlyRefreshAccessTokenOperation: FeedlyOperation {
|
|||
refreshToken = credentials
|
||||
|
||||
} catch {
|
||||
didFinish(error)
|
||||
didFinish(with: error)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -70,11 +66,11 @@ final class FeedlyRefreshAccessTokenOperation: FeedlyOperation {
|
|||
|
||||
didFinish()
|
||||
} catch {
|
||||
didFinish(error)
|
||||
didFinish(with: error)
|
||||
}
|
||||
|
||||
case .failure(let error):
|
||||
didFinish(error)
|
||||
didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ protocol FeedlyRequestStreamsOperationDelegate: class {
|
|||
func feedlyRequestStreamsOperation(_ operation: FeedlyRequestStreamsOperation, enqueue collectionStreamOperation: FeedlyGetStreamContentsOperation)
|
||||
}
|
||||
|
||||
/// Single responsibility is to create one stream request operation for one Feedly collection.
|
||||
/// Create one stream request operation for one Feedly collection.
|
||||
/// This is the start of the process of refreshing the entire contents of a Folder.
|
||||
final class FeedlyRequestStreamsOperation: FeedlyOperation {
|
||||
|
||||
|
@ -25,7 +25,7 @@ final class FeedlyRequestStreamsOperation: FeedlyOperation {
|
|||
let log: OSLog
|
||||
let newerThan: Date?
|
||||
let unreadOnly: Bool?
|
||||
|
||||
|
||||
init(account: Account, collectionsProvider: FeedlyCollectionProviding, newerThan: Date?, unreadOnly: Bool?, service: FeedlyGetStreamContentsService, log: OSLog) {
|
||||
self.account = account
|
||||
self.service = service
|
||||
|
@ -35,10 +35,10 @@ final class FeedlyRequestStreamsOperation: FeedlyOperation {
|
|||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
defer { didFinish() }
|
||||
|
||||
guard !isCancelled else { return }
|
||||
override func run() {
|
||||
defer {
|
||||
didFinish()
|
||||
}
|
||||
|
||||
assert(queueDelegate != nil, "This is not particularly useful unless the `queueDelegate` is non-nil.")
|
||||
|
||||
|
|
|
@ -16,27 +16,22 @@ protocol FeedlySearchOperationDelegate: class {
|
|||
func feedlySearchOperation(_ operation: FeedlySearchOperation, didGet response: FeedlyFeedsSearchResponse)
|
||||
}
|
||||
|
||||
/// Single responsibility is to find one and only one feed for a given query (usually, a URL).
|
||||
/// Find one and only one feed for a given query (usually, a URL).
|
||||
/// What happens when a feed is found for the URL is delegated to the `searchDelegate`.
|
||||
class FeedlySearchOperation: FeedlyOperation {
|
||||
|
||||
let query: String
|
||||
let locale: Locale
|
||||
let searchService: FeedlySearchService
|
||||
|
||||
weak var searchDelegate: FeedlySearchOperationDelegate?
|
||||
|
||||
|
||||
init(query: String, locale: Locale = .current, service: FeedlySearchService) {
|
||||
self.query = query
|
||||
self.locale = locale
|
||||
self.searchService = service
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
override func run() {
|
||||
searchService.getFeeds(for: query, count: 1, locale: locale.identifier) { result in
|
||||
switch result {
|
||||
case .success(let response):
|
||||
|
@ -45,7 +40,7 @@ class FeedlySearchOperation: FeedlyOperation {
|
|||
self.didFinish()
|
||||
|
||||
case .failure(let error):
|
||||
self.didFinish(error)
|
||||
self.didFinish(with: error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,27 +11,29 @@ import Articles
|
|||
import SyncDatabase
|
||||
import os.log
|
||||
|
||||
/// Single responsibility is to take changes to statuses of articles locally and apply them to the corresponding the articles remotely.
|
||||
|
||||
/// Take changes to statuses of articles locally and apply them to the corresponding the articles remotely.
|
||||
final class FeedlySendArticleStatusesOperation: FeedlyOperation {
|
||||
|
||||
private let database: SyncDatabase
|
||||
private let log: OSLog
|
||||
private let service: FeedlyMarkArticlesService
|
||||
|
||||
|
||||
init(database: SyncDatabase, service: FeedlyMarkArticlesService, log: OSLog) {
|
||||
self.database = database
|
||||
self.service = service
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
override func run() {
|
||||
os_log(.debug, log: log, "Sending article statuses...")
|
||||
|
||||
database.selectForProcessing { result in
|
||||
if self.isCanceled {
|
||||
self.didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
switch result {
|
||||
case .success(let syncStatuses):
|
||||
self.processStatuses(syncStatuses)
|
||||
|
|
|
@ -1,92 +0,0 @@
|
|||
//
|
||||
// FeedlySetStarredArticlesOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 14/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
|
||||
protocol FeedlyStarredEntryIdProviding {
|
||||
var entryIds: Set<String> { get }
|
||||
}
|
||||
|
||||
/// Single responsibility is to associate a starred status for ingested and remote
|
||||
/// articles identfied by the provided identifiers *for the entire account.*
|
||||
final class FeedlySetStarredArticlesOperation: FeedlyOperation {
|
||||
private let account: Account
|
||||
private let allStarredEntryIdsProvider: FeedlyStarredEntryIdProviding
|
||||
private let log: OSLog
|
||||
|
||||
init(account: Account, allStarredEntryIdsProvider: FeedlyStarredEntryIdProviding, log: OSLog) {
|
||||
self.account = account
|
||||
self.allStarredEntryIdsProvider = allStarredEntryIdsProvider
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
account.fetchStarredArticleIDs { result in
|
||||
switch result {
|
||||
case .success(let localStarredArticleIDs):
|
||||
self.processStarredArticleIDs(localStarredArticleIDs)
|
||||
|
||||
case .failure(let error):
|
||||
self.didFinish(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extension FeedlySetStarredArticlesOperation {
|
||||
|
||||
func processStarredArticleIDs(_ localStarredArticleIDs: Set<String>) {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
let remoteStarredArticleIDs = allStarredEntryIdsProvider.entryIds
|
||||
guard !remoteStarredArticleIDs.isEmpty else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
let group = DispatchGroup()
|
||||
|
||||
final class StarredStatusResults {
|
||||
var markAsStarredError: Error?
|
||||
var markAsUnstarredError: Error?
|
||||
}
|
||||
|
||||
let results = StarredStatusResults()
|
||||
|
||||
group.enter()
|
||||
account.markAsStarred(remoteStarredArticleIDs) { error in
|
||||
results.markAsStarredError = error
|
||||
group.leave()
|
||||
}
|
||||
|
||||
let deltaUnstarredArticleIDs = localStarredArticleIDs.subtracting(remoteStarredArticleIDs)
|
||||
group.enter()
|
||||
account.markAsUnstarred(deltaUnstarredArticleIDs) { error in
|
||||
results.markAsUnstarredError = error
|
||||
group.leave()
|
||||
}
|
||||
|
||||
group.notify(queue: .main) {
|
||||
let markingError = results.markAsStarredError ?? results.markAsUnstarredError
|
||||
guard let error = markingError else {
|
||||
self.didFinish()
|
||||
return
|
||||
}
|
||||
self.didFinish(error)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,92 +0,0 @@
|
|||
//
|
||||
// FeedlySetUnreadArticlesOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 25/9/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
|
||||
protocol FeedlyUnreadEntryIdProviding {
|
||||
var entryIds: Set<String> { get }
|
||||
}
|
||||
|
||||
/// Single responsibility is to associate a read status for ingested and remote articles
|
||||
/// where the provided article identifers identify the unread articles *for the entire account.*
|
||||
final class FeedlySetUnreadArticlesOperation: FeedlyOperation {
|
||||
private let account: Account
|
||||
private let allUnreadIdsProvider: FeedlyUnreadEntryIdProviding
|
||||
private let log: OSLog
|
||||
|
||||
init(account: Account, allUnreadIdsProvider: FeedlyUnreadEntryIdProviding, log: OSLog) {
|
||||
self.account = account
|
||||
self.allUnreadIdsProvider = allUnreadIdsProvider
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
account.fetchUnreadArticleIDs { result in
|
||||
switch result {
|
||||
case .success(let localUnreadArticleIDs):
|
||||
self.processUnreadArticleIDs(localUnreadArticleIDs)
|
||||
|
||||
case .failure(let error):
|
||||
self.didFinish(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extension FeedlySetUnreadArticlesOperation {
|
||||
|
||||
private func processUnreadArticleIDs(_ localUnreadArticleIDs: Set<String>) {
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
let remoteUnreadArticleIDs = allUnreadIdsProvider.entryIds
|
||||
guard !remoteUnreadArticleIDs.isEmpty else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
let group = DispatchGroup()
|
||||
|
||||
final class ReadStatusResults {
|
||||
var markAsUnreadError: Error?
|
||||
var markAsReadError: Error?
|
||||
}
|
||||
|
||||
let results = ReadStatusResults()
|
||||
|
||||
group.enter()
|
||||
account.markAsUnread(remoteUnreadArticleIDs) { error in
|
||||
results.markAsUnreadError = error
|
||||
group.leave()
|
||||
}
|
||||
|
||||
let articleIDsToMarkRead = localUnreadArticleIDs.subtracting(remoteUnreadArticleIDs)
|
||||
group.enter()
|
||||
account.markAsRead(articleIDsToMarkRead) { error in
|
||||
results.markAsReadError = error
|
||||
group.leave()
|
||||
}
|
||||
|
||||
group.notify(queue: .main) {
|
||||
let markingError = results.markAsReadError ?? results.markAsUnreadError
|
||||
guard let error = markingError else {
|
||||
self.didFinish()
|
||||
return
|
||||
}
|
||||
self.didFinish(error)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -10,20 +10,33 @@ import Foundation
|
|||
import os.log
|
||||
import SyncDatabase
|
||||
import RSWeb
|
||||
import RSCore
|
||||
|
||||
/// Single responsibility is to compose the operations necessary to get the entire set of articles, feeds and folders with the statuses the user expects between now and a certain date in the past.
|
||||
/// Compose the operations necessary to get the entire set of articles, feeds and folders with the statuses the user expects between now and a certain date in the past.
|
||||
final class FeedlySyncAllOperation: FeedlyOperation {
|
||||
private let operationQueue: OperationQueue
|
||||
|
||||
private let operationQueue = MainThreadOperationQueue()
|
||||
private let log: OSLog
|
||||
let syncUUID: UUID
|
||||
|
||||
var syncCompletionHandler: ((Result<Void, Error>) -> ())?
|
||||
|
||||
init(account: Account, credentials: Credentials, lastSuccessfulFetchStartDate: Date?, markArticlesService: FeedlyMarkArticlesService, getUnreadService: FeedlyGetStreamIdsService, getCollectionsService: FeedlyGetCollectionsService, getStreamContentsService: FeedlyGetStreamContentsService, getStarredArticlesService: FeedlyGetStreamContentsService, database: SyncDatabase, downloadProgress: DownloadProgress, log: OSLog) {
|
||||
/// These requests to Feedly determine which articles to download:
|
||||
/// 1. The set of all article ids we might need or show.
|
||||
/// 2. The set of all unread article ids we might need or show (a subset of 1).
|
||||
/// 3. The set of all article ids changed since the last sync (a subset of 1).
|
||||
/// 4. The set of all starred article ids.
|
||||
///
|
||||
/// On the response for 1, create statuses for each article id.
|
||||
/// On the response for 2, create unread statuses for each article id and mark as read those no longer in the response.
|
||||
/// On the response for 4, create starred statuses for each article id and mark as unstarred those no longer in the response.
|
||||
///
|
||||
/// Download articles for statuses at the union of those statuses without its corresponding article and those included in 3 (changed since last successful sync).
|
||||
///
|
||||
init(account: Account, credentials: Credentials, lastSuccessfulFetchStartDate: Date?, markArticlesService: FeedlyMarkArticlesService, getUnreadService: FeedlyGetStreamIdsService, getCollectionsService: FeedlyGetCollectionsService, getStreamContentsService: FeedlyGetStreamContentsService, getStarredService: FeedlyGetStreamIdsService, getStreamIdsService: FeedlyGetStreamIdsService, getEntriesService: FeedlyGetEntriesService, database: SyncDatabase, downloadProgress: DownloadProgress, log: OSLog) {
|
||||
self.syncUUID = UUID()
|
||||
self.log = log
|
||||
self.operationQueue = OperationQueue()
|
||||
self.operationQueue.isSuspended = true
|
||||
self.operationQueue.suspend()
|
||||
|
||||
super.init()
|
||||
|
||||
|
@ -33,91 +46,99 @@ final class FeedlySyncAllOperation: FeedlyOperation {
|
|||
let sendArticleStatuses = FeedlySendArticleStatusesOperation(database: database, service: markArticlesService, log: log)
|
||||
sendArticleStatuses.delegate = self
|
||||
sendArticleStatuses.downloadProgress = downloadProgress
|
||||
self.operationQueue.addOperation(sendArticleStatuses)
|
||||
self.operationQueue.add(sendArticleStatuses)
|
||||
|
||||
// Get all the Collections the user has.
|
||||
let getCollections = FeedlyGetCollectionsOperation(service: getCollectionsService, log: log)
|
||||
getCollections.delegate = self
|
||||
getCollections.downloadProgress = downloadProgress
|
||||
getCollections.addDependency(sendArticleStatuses)
|
||||
self.operationQueue.addOperation(getCollections)
|
||||
self.operationQueue.add(getCollections)
|
||||
|
||||
// Ensure a folder exists for each Collection, removing Folders without a corresponding Collection.
|
||||
let mirrorCollectionsAsFolders = FeedlyMirrorCollectionsAsFoldersOperation(account: account, collectionsProvider: getCollections, log: log)
|
||||
mirrorCollectionsAsFolders.delegate = self
|
||||
mirrorCollectionsAsFolders.addDependency(getCollections)
|
||||
self.operationQueue.addOperation(mirrorCollectionsAsFolders)
|
||||
self.operationQueue.add(mirrorCollectionsAsFolders)
|
||||
|
||||
// Ensure feeds are created and grouped by their folders.
|
||||
let createFeedsOperation = FeedlyCreateFeedsForCollectionFoldersOperation(account: account, feedsAndFoldersProvider: mirrorCollectionsAsFolders, log: log)
|
||||
createFeedsOperation.delegate = self
|
||||
createFeedsOperation.addDependency(mirrorCollectionsAsFolders)
|
||||
self.operationQueue.addOperation(createFeedsOperation)
|
||||
self.operationQueue.add(createFeedsOperation)
|
||||
|
||||
let getAllArticleIds = FeedlyIngestStreamArticleIdsOperation(account: account, credentials: credentials, service: getStreamIdsService, log: log)
|
||||
getAllArticleIds.delegate = self
|
||||
getAllArticleIds.downloadProgress = downloadProgress
|
||||
getAllArticleIds.addDependency(createFeedsOperation)
|
||||
self.operationQueue.add(getAllArticleIds)
|
||||
|
||||
// Get each page of unread article ids in the global.all stream for the last 31 days (nil = Feedly API default).
|
||||
let getUnread = FeedlySyncUnreadStatusesOperation(account: account, credentials: credentials, service: getUnreadService, newerThan: nil, log: log)
|
||||
let getUnread = FeedlyIngestUnreadArticleIdsOperation(account: account, credentials: credentials, service: getUnreadService, database: database, newerThan: nil, log: log)
|
||||
getUnread.delegate = self
|
||||
getUnread.addDependency(createFeedsOperation)
|
||||
getUnread.addDependency(getAllArticleIds)
|
||||
getUnread.downloadProgress = downloadProgress
|
||||
self.operationQueue.addOperation(getUnread)
|
||||
self.operationQueue.add(getUnread)
|
||||
|
||||
// Get each page of the global.all stream until we get either the content from the last sync or the last 31 days.
|
||||
let getStreamContents = FeedlySyncStreamContentsOperation(account: account, credentials: credentials, service: getStreamContentsService, newerThan: lastSuccessfulFetchStartDate, log: log)
|
||||
getStreamContents.delegate = self
|
||||
getStreamContents.downloadProgress = downloadProgress
|
||||
getStreamContents.addDependency(getUnread)
|
||||
self.operationQueue.addOperation(getStreamContents)
|
||||
// Get each page of the article ids which have been update since the last successful fetch start date.
|
||||
// If the date is nil, this operation provides an empty set (everything is new, nothing is updated).
|
||||
let getUpdated = FeedlyGetUpdatedArticleIdsOperation(account: account, credentials: credentials, service: getStreamIdsService, newerThan: lastSuccessfulFetchStartDate, log: log)
|
||||
getUpdated.delegate = self
|
||||
getUpdated.downloadProgress = downloadProgress
|
||||
getUpdated.addDependency(createFeedsOperation)
|
||||
self.operationQueue.add(getUpdated)
|
||||
|
||||
// Get each and every starred article.
|
||||
let syncStarred = FeedlySyncStarredArticlesOperation(account: account, credentials: credentials, service: getStarredArticlesService, log: log)
|
||||
syncStarred.delegate = self
|
||||
syncStarred.downloadProgress = downloadProgress
|
||||
syncStarred.addDependency(createFeedsOperation)
|
||||
self.operationQueue.addOperation(syncStarred)
|
||||
// Get each page of the article ids for starred articles.
|
||||
let getStarred = FeedlyIngestStarredArticleIdsOperation(account: account, credentials: credentials, service: getStarredService, database: database, newerThan: nil, log: log)
|
||||
getStarred.delegate = self
|
||||
getStarred.downloadProgress = downloadProgress
|
||||
getStarred.addDependency(createFeedsOperation)
|
||||
self.operationQueue.add(getStarred)
|
||||
|
||||
// Now all the possible article ids we need have a status, fetch the article ids for missing articles.
|
||||
let getMissingIds = FeedlyFetchIdsForMissingArticlesOperation(account: account, log: log)
|
||||
getMissingIds.delegate = self
|
||||
getMissingIds.downloadProgress = downloadProgress
|
||||
getMissingIds.addDependency(getAllArticleIds)
|
||||
getMissingIds.addDependency(getUnread)
|
||||
getMissingIds.addDependency(getStarred)
|
||||
getMissingIds.addDependency(getUpdated)
|
||||
self.operationQueue.add(getMissingIds)
|
||||
|
||||
// Download all the missing and updated articles
|
||||
let downloadMissingArticles = FeedlyDownloadArticlesOperation(account: account,
|
||||
missingArticleEntryIdProvider: getMissingIds,
|
||||
updatedArticleEntryIdProvider: getUpdated,
|
||||
getEntriesService: getEntriesService,
|
||||
log: log)
|
||||
downloadMissingArticles.delegate = self
|
||||
downloadMissingArticles.downloadProgress = downloadProgress
|
||||
downloadMissingArticles.addDependency(getMissingIds)
|
||||
downloadMissingArticles.addDependency(getUpdated)
|
||||
self.operationQueue.add(downloadMissingArticles)
|
||||
|
||||
// Once this operation's dependencies, their dependencies etc finish, we can finish.
|
||||
let finishOperation = FeedlyCheckpointOperation()
|
||||
finishOperation.checkpointDelegate = self
|
||||
finishOperation.downloadProgress = downloadProgress
|
||||
finishOperation.addDependency(getStreamContents)
|
||||
finishOperation.addDependency(syncStarred)
|
||||
|
||||
self.operationQueue.addOperation(finishOperation)
|
||||
finishOperation.addDependency(downloadMissingArticles)
|
||||
self.operationQueue.add(finishOperation)
|
||||
}
|
||||
|
||||
convenience init(account: Account, credentials: Credentials, caller: FeedlyAPICaller, database: SyncDatabase, lastSuccessfulFetchStartDate: Date?, downloadProgress: DownloadProgress, log: OSLog) {
|
||||
|
||||
let newerThan: Date? = {
|
||||
if let date = lastSuccessfulFetchStartDate {
|
||||
return date
|
||||
} else {
|
||||
return Calendar.current.date(byAdding: .day, value: -31, to: Date())
|
||||
}
|
||||
}()
|
||||
|
||||
self.init(account: account, credentials: credentials, lastSuccessfulFetchStartDate: newerThan, markArticlesService: caller, getUnreadService: caller, getCollectionsService: caller, getStreamContentsService: caller, getStarredArticlesService: caller, database: database, downloadProgress: downloadProgress, log: log)
|
||||
self.init(account: account, credentials: credentials, lastSuccessfulFetchStartDate: lastSuccessfulFetchStartDate, markArticlesService: caller, getUnreadService: caller, getCollectionsService: caller, getStreamContentsService: caller, getStarredService: caller, getStreamIdsService: caller, getEntriesService: caller, database: database, downloadProgress: downloadProgress, log: log)
|
||||
}
|
||||
|
||||
override func cancel() {
|
||||
override func run() {
|
||||
os_log(.debug, log: log, "Starting sync %{public}@", syncUUID.uuidString)
|
||||
operationQueue.resume()
|
||||
}
|
||||
|
||||
override func didCancel() {
|
||||
os_log(.debug, log: log, "Cancelling sync %{public}@", syncUUID.uuidString)
|
||||
self.operationQueue.cancelAllOperations()
|
||||
|
||||
super.cancel()
|
||||
|
||||
didFinish()
|
||||
|
||||
// Operation should silently cancel.
|
||||
syncCompletionHandler = nil
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
// override of cancel calls didFinish().
|
||||
return
|
||||
}
|
||||
|
||||
os_log(.debug, log: log, "Starting sync %{public}@", syncUUID.uuidString)
|
||||
operationQueue.isSuspended = false
|
||||
super.didCancel()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -138,7 +159,9 @@ extension FeedlySyncAllOperation: FeedlyOperationDelegate {
|
|||
|
||||
func feedlyOperation(_ operation: FeedlyOperation, didFailWith error: Error) {
|
||||
assert(Thread.isMainThread)
|
||||
os_log(.debug, log: log, "%{public}@ failed with error: %{public}@.", operation, error as NSError)
|
||||
|
||||
// Having this log is useful for debugging missing required JSON keys in the response from Feedly, for example.
|
||||
os_log(.debug, log: log, "%{public}@ failed with error: %{public}@.", String(describing: operation), error as NSError)
|
||||
|
||||
syncCompletionHandler?(.failure(error))
|
||||
syncCompletionHandler = nil
|
||||
|
|
|
@ -1,150 +0,0 @@
|
|||
//
|
||||
// FeedlySyncStarredArticlesOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 15/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
import RSParser
|
||||
|
||||
final class FeedlySyncStarredArticlesOperation: FeedlyOperation, FeedlyOperationDelegate, FeedlyGetStreamContentsOperationDelegate, FeedlyCheckpointOperationDelegate {
|
||||
private let account: Account
|
||||
private let operationQueue: OperationQueue
|
||||
private let service: FeedlyGetStreamContentsService
|
||||
private let log: OSLog
|
||||
|
||||
private let setStatuses: FeedlySetStarredArticlesOperation
|
||||
private let finishOperation: FeedlyCheckpointOperation
|
||||
|
||||
/// Buffers every starred/saved entry from every page.
|
||||
private class StarredEntryProvider: FeedlyEntryProviding, FeedlyStarredEntryIdProviding, FeedlyParsedItemProviding {
|
||||
var resource: FeedlyResourceId
|
||||
|
||||
private(set) var parsedEntries = Set<ParsedItem>()
|
||||
private(set) var entries = [FeedlyEntry]()
|
||||
|
||||
init(resource: FeedlyResourceId) {
|
||||
self.resource = resource
|
||||
}
|
||||
|
||||
func addEntries(from provider: FeedlyEntryProviding & FeedlyParsedItemProviding) {
|
||||
entries.append(contentsOf: provider.entries)
|
||||
parsedEntries.formUnion(provider.parsedEntries)
|
||||
}
|
||||
|
||||
var entryIds: Set<String> {
|
||||
return Set(entries.map { $0.id })
|
||||
}
|
||||
}
|
||||
|
||||
private let entryProvider: StarredEntryProvider
|
||||
|
||||
convenience init(account: Account, credentials: Credentials, service: FeedlyGetStreamContentsService, log: OSLog) {
|
||||
let saved = FeedlyTagResourceId.Global.saved(for: credentials.username)
|
||||
self.init(account: account, resource: saved, service: service, log: log)
|
||||
}
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamContentsService, log: OSLog) {
|
||||
self.account = account
|
||||
self.service = service
|
||||
self.operationQueue = OperationQueue()
|
||||
self.operationQueue.isSuspended = true
|
||||
self.finishOperation = FeedlyCheckpointOperation()
|
||||
self.log = log
|
||||
|
||||
let provider = StarredEntryProvider(resource: resource)
|
||||
self.entryProvider = provider
|
||||
self.setStatuses = FeedlySetStarredArticlesOperation(account: account,
|
||||
allStarredEntryIdsProvider: provider,
|
||||
log: log)
|
||||
|
||||
super.init()
|
||||
|
||||
let getFirstPage = FeedlyGetStreamContentsOperation(account: account,
|
||||
resource: resource,
|
||||
service: service,
|
||||
newerThan: nil,
|
||||
log: log)
|
||||
|
||||
let organiseByFeed = FeedlyOrganiseParsedItemsByFeedOperation(account: account,
|
||||
parsedItemProvider: provider,
|
||||
log: log)
|
||||
|
||||
let updateAccount = FeedlyUpdateAccountFeedsWithItemsOperation(account: account,
|
||||
organisedItemsProvider: organiseByFeed,
|
||||
log: log)
|
||||
|
||||
getFirstPage.delegate = self
|
||||
getFirstPage.streamDelegate = self
|
||||
|
||||
setStatuses.addDependency(getFirstPage)
|
||||
setStatuses.delegate = self
|
||||
|
||||
organiseByFeed.addDependency(setStatuses)
|
||||
organiseByFeed.delegate = self
|
||||
|
||||
updateAccount.addDependency(organiseByFeed)
|
||||
updateAccount.delegate = self
|
||||
|
||||
finishOperation.checkpointDelegate = self
|
||||
finishOperation.addDependency(updateAccount)
|
||||
|
||||
let operations = [getFirstPage, setStatuses, organiseByFeed, updateAccount, finishOperation]
|
||||
operationQueue.addOperations(operations, waitUntilFinished: false)
|
||||
}
|
||||
|
||||
override func cancel() {
|
||||
os_log(.debug, log: log, "Canceling sync starred articles")
|
||||
operationQueue.cancelAllOperations()
|
||||
super.cancel()
|
||||
didFinish()
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
// override of cancel calls didFinish().
|
||||
return
|
||||
}
|
||||
|
||||
operationQueue.isSuspended = false
|
||||
}
|
||||
|
||||
func feedlyGetStreamContentsOperation(_ operation: FeedlyGetStreamContentsOperation, didGetContentsOf stream: FeedlyStream) {
|
||||
guard !isCancelled else {
|
||||
os_log(.debug, log: log, "Cancelled starred stream contents for %@", stream.id)
|
||||
return
|
||||
}
|
||||
|
||||
entryProvider.addEntries(from: operation)
|
||||
os_log(.debug, log: log, "Collecting %i items from %@", stream.items.count, stream.id)
|
||||
|
||||
guard let continuation = stream.continuation else {
|
||||
return
|
||||
}
|
||||
|
||||
let nextPageOperation = FeedlyGetStreamContentsOperation(account: operation.account,
|
||||
resource: operation.resource,
|
||||
service: operation.service,
|
||||
continuation: continuation,
|
||||
newerThan: operation.newerThan,
|
||||
log: log)
|
||||
nextPageOperation.delegate = self
|
||||
nextPageOperation.streamDelegate = self
|
||||
|
||||
setStatuses.addDependency(nextPageOperation)
|
||||
operationQueue.addOperation(nextPageOperation)
|
||||
}
|
||||
|
||||
func feedlyCheckpointOperationDidReachCheckpoint(_ operation: FeedlyCheckpointOperation) {
|
||||
didFinish()
|
||||
}
|
||||
|
||||
func feedlyOperation(_ operation: FeedlyOperation, didFailWith error: Error) {
|
||||
os_log(.debug, log: log, "%{public}@ failing and cancelling other operations because %{public}@.", operation, error.localizedDescription)
|
||||
operationQueue.cancelAllOperations()
|
||||
didFinish(error)
|
||||
}
|
||||
}
|
|
@ -9,61 +9,59 @@
|
|||
import Foundation
|
||||
import os.log
|
||||
import RSParser
|
||||
import RSCore
|
||||
import RSWeb
|
||||
|
||||
final class FeedlySyncStreamContentsOperation: FeedlyOperation, FeedlyOperationDelegate, FeedlyGetStreamContentsOperationDelegate, FeedlyCheckpointOperationDelegate {
|
||||
|
||||
private let account: Account
|
||||
private let resource: FeedlyResourceId
|
||||
private let operationQueue: OperationQueue
|
||||
private let operationQueue = MainThreadOperationQueue()
|
||||
private let service: FeedlyGetStreamContentsService
|
||||
private let newerThan: Date?
|
||||
private let isPagingEnabled: Bool
|
||||
private let log: OSLog
|
||||
private let finishOperation: FeedlyCheckpointOperation
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamContentsService, newerThan: Date?, log: OSLog) {
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamContentsService, isPagingEnabled: Bool, newerThan: Date?, log: OSLog) {
|
||||
self.account = account
|
||||
self.resource = resource
|
||||
self.service = service
|
||||
self.operationQueue = OperationQueue()
|
||||
self.operationQueue.isSuspended = true
|
||||
self.isPagingEnabled = isPagingEnabled
|
||||
self.operationQueue.suspend()
|
||||
self.newerThan = newerThan
|
||||
self.log = log
|
||||
self.finishOperation = FeedlyCheckpointOperation()
|
||||
|
||||
super.init()
|
||||
|
||||
self.operationQueue.addOperation(self.finishOperation)
|
||||
self.operationQueue.add(self.finishOperation)
|
||||
self.finishOperation.checkpointDelegate = self
|
||||
enqueueOperations(for: nil)
|
||||
}
|
||||
|
||||
convenience init(account: Account, credentials: Credentials, service: FeedlyGetStreamContentsService, newerThan: Date?, log: OSLog) {
|
||||
let all = FeedlyCategoryResourceId.Global.all(for: credentials.username)
|
||||
self.init(account: account, resource: all, service: service, newerThan: newerThan, log: log)
|
||||
self.init(account: account, resource: all, service: service, isPagingEnabled: true, newerThan: newerThan, log: log)
|
||||
}
|
||||
|
||||
override func cancel() {
|
||||
override func run() {
|
||||
operationQueue.resume()
|
||||
}
|
||||
|
||||
override func didCancel() {
|
||||
os_log(.debug, log: log, "Canceling sync stream contents")
|
||||
operationQueue.cancelAllOperations()
|
||||
super.cancel()
|
||||
didFinish()
|
||||
super.didCancel()
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
// override of cancel calls didFinish().
|
||||
return
|
||||
}
|
||||
|
||||
operationQueue.isSuspended = false
|
||||
}
|
||||
|
||||
|
||||
func enqueueOperations(for continuation: String?) {
|
||||
os_log(.debug, log: log, "Requesting page for %@", resource.id)
|
||||
let operations = pageOperations(for: continuation)
|
||||
operationQueue.addOperations(operations, waitUntilFinished: false)
|
||||
operationQueue.addOperations(operations)
|
||||
}
|
||||
|
||||
func pageOperations(for continuation: String?) -> [Operation] {
|
||||
func pageOperations(for continuation: String?) -> [MainThreadOperation] {
|
||||
let getPage = FeedlyGetStreamContentsOperation(account: account,
|
||||
resource: resource,
|
||||
service: service,
|
||||
|
@ -72,37 +70,33 @@ final class FeedlySyncStreamContentsOperation: FeedlyOperation, FeedlyOperationD
|
|||
log: log)
|
||||
|
||||
|
||||
let organiseByFeed = FeedlyOrganiseParsedItemsByFeedOperation(account: account,
|
||||
parsedItemProvider: getPage,
|
||||
log: log)
|
||||
let organiseByFeed = FeedlyOrganiseParsedItemsByFeedOperation(account: account, parsedItemProvider: getPage, log: log)
|
||||
|
||||
let updateAccount = FeedlyUpdateAccountFeedsWithItemsOperation(account: account,
|
||||
organisedItemsProvider: organiseByFeed,
|
||||
log: log)
|
||||
let updateAccount = FeedlyUpdateAccountFeedsWithItemsOperation(account: account, organisedItemsProvider: organiseByFeed, log: log)
|
||||
|
||||
getPage.delegate = self
|
||||
getPage.streamDelegate = self
|
||||
|
||||
|
||||
organiseByFeed.addDependency(getPage)
|
||||
organiseByFeed.delegate = self
|
||||
|
||||
|
||||
updateAccount.addDependency(organiseByFeed)
|
||||
updateAccount.delegate = self
|
||||
|
||||
|
||||
finishOperation.addDependency(updateAccount)
|
||||
|
||||
|
||||
return [getPage, organiseByFeed, updateAccount]
|
||||
}
|
||||
|
||||
func feedlyGetStreamContentsOperation(_ operation: FeedlyGetStreamContentsOperation, didGetContentsOf stream: FeedlyStream) {
|
||||
guard !isCancelled else {
|
||||
guard !isCanceled else {
|
||||
os_log(.debug, log: log, "Cancelled requesting page for %@", resource.id)
|
||||
return
|
||||
}
|
||||
|
||||
os_log(.debug, log: log, "Ingesting %i items from %@", stream.items.count, stream.id)
|
||||
|
||||
guard let continuation = stream.continuation else {
|
||||
guard isPagingEnabled, let continuation = stream.continuation else {
|
||||
os_log(.debug, log: log, "Reached end of stream for %@", stream.id)
|
||||
return
|
||||
}
|
||||
|
@ -117,6 +111,6 @@ final class FeedlySyncStreamContentsOperation: FeedlyOperation, FeedlyOperationD
|
|||
|
||||
func feedlyOperation(_ operation: FeedlyOperation, didFailWith error: Error) {
|
||||
operationQueue.cancelAllOperations()
|
||||
didFinish(error)
|
||||
didFinish(with: error)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,131 +0,0 @@
|
|||
//
|
||||
// FeedlySyncUnreadStatusesOperation.swift
|
||||
// Account
|
||||
//
|
||||
// Created by Kiel Gillard on 18/10/19.
|
||||
// Copyright © 2019 Ranchero Software, LLC. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os.log
|
||||
import RSParser
|
||||
|
||||
/// Makes one or more requests to get the complete set of unread article ids to update the status of those articles *for the entire account.*
|
||||
final class FeedlySyncUnreadStatusesOperation: FeedlyOperation, FeedlyOperationDelegate, FeedlyGetStreamIdsOperationDelegate, FeedlyCheckpointOperationDelegate {
|
||||
private let account: Account
|
||||
private let resource: FeedlyResourceId
|
||||
private let operationQueue: OperationQueue
|
||||
private let service: FeedlyGetStreamIdsService
|
||||
private let log: OSLog
|
||||
|
||||
/// Buffers every unread article id from every page of the resource's stream.
|
||||
private class UnreadEntryIdsProvider: FeedlyUnreadEntryIdProviding {
|
||||
let resource: FeedlyResourceId
|
||||
private(set) var entryIds = Set<String>()
|
||||
|
||||
init(resource: FeedlyResourceId) {
|
||||
self.resource = resource
|
||||
}
|
||||
|
||||
func addEntryIds(from provider: FeedlyEntryIdenifierProviding) {
|
||||
entryIds.formUnion(provider.entryIds)
|
||||
}
|
||||
}
|
||||
|
||||
private let unreadEntryIdsProvider: UnreadEntryIdsProvider
|
||||
private let setStatuses: FeedlySetUnreadArticlesOperation
|
||||
|
||||
convenience init(account: Account, credentials: Credentials, service: FeedlyGetStreamIdsService, newerThan: Date?, log: OSLog) {
|
||||
let resource = FeedlyCategoryResourceId.Global.all(for: credentials.username)
|
||||
self.init(account: account, resource: resource, service: service, newerThan: newerThan, log: log)
|
||||
}
|
||||
|
||||
init(account: Account, resource: FeedlyResourceId, service: FeedlyGetStreamIdsService, newerThan: Date?, log: OSLog) {
|
||||
self.account = account
|
||||
self.resource = resource
|
||||
self.service = service
|
||||
self.operationQueue = OperationQueue()
|
||||
self.operationQueue.isSuspended = true
|
||||
self.log = log
|
||||
|
||||
let provider = UnreadEntryIdsProvider(resource: resource)
|
||||
self.unreadEntryIdsProvider = provider
|
||||
self.setStatuses = FeedlySetUnreadArticlesOperation(account: account,
|
||||
allUnreadIdsProvider: unreadEntryIdsProvider,
|
||||
log: log)
|
||||
|
||||
super.init()
|
||||
|
||||
let getFirstPageOfUnreadIds = FeedlyGetStreamIdsOperation(account: account,
|
||||
resource: resource,
|
||||
service: service,
|
||||
newerThan: nil,
|
||||
unreadOnly: true,
|
||||
log: log)
|
||||
|
||||
getFirstPageOfUnreadIds.delegate = self
|
||||
getFirstPageOfUnreadIds.streamIdsDelegate = self
|
||||
|
||||
setStatuses.addDependency(getFirstPageOfUnreadIds)
|
||||
setStatuses.delegate = self
|
||||
|
||||
let finishOperation = FeedlyCheckpointOperation()
|
||||
finishOperation.checkpointDelegate = self
|
||||
finishOperation.addDependency(setStatuses)
|
||||
|
||||
let operations = [getFirstPageOfUnreadIds, setStatuses, finishOperation]
|
||||
operationQueue.addOperations(operations, waitUntilFinished: false)
|
||||
}
|
||||
|
||||
override func cancel() {
|
||||
os_log(.debug, log: log, "Canceling sync unread statuses")
|
||||
operationQueue.cancelAllOperations()
|
||||
super.cancel()
|
||||
didFinish()
|
||||
}
|
||||
|
||||
override func main() {
|
||||
guard !isCancelled else {
|
||||
// override of cancel calls didFinish().
|
||||
return
|
||||
}
|
||||
|
||||
operationQueue.isSuspended = false
|
||||
}
|
||||
|
||||
func feedlyGetStreamIdsOperation(_ operation: FeedlyGetStreamIdsOperation, didGet streamIds: FeedlyStreamIds) {
|
||||
guard !isCancelled else {
|
||||
os_log(.debug, log: log, "Cancelled unread stream ids.")
|
||||
return
|
||||
}
|
||||
|
||||
os_log(.debug, log: log, "Collecting %i unread article ids from %@", streamIds.ids.count, resource.id)
|
||||
unreadEntryIdsProvider.addEntryIds(from: operation)
|
||||
|
||||
guard let continuation = streamIds.continuation else {
|
||||
return
|
||||
}
|
||||
|
||||
let nextPageOperation = FeedlyGetStreamIdsOperation(account: operation.account,
|
||||
resource: operation.resource,
|
||||
service: operation.service,
|
||||
continuation: continuation,
|
||||
newerThan: operation.newerThan,
|
||||
unreadOnly: operation.unreadOnly,
|
||||
log: log)
|
||||
nextPageOperation.delegate = self
|
||||
nextPageOperation.streamIdsDelegate = self
|
||||
|
||||
setStatuses.addDependency(nextPageOperation)
|
||||
operationQueue.addOperation(nextPageOperation)
|
||||
}
|
||||
|
||||
func feedlyCheckpointOperationDidReachCheckpoint(_ operation: FeedlyCheckpointOperation) {
|
||||
didFinish()
|
||||
}
|
||||
|
||||
func feedlyOperation(_ operation: FeedlyOperation, didFailWith error: Error) {
|
||||
operationQueue.cancelAllOperations()
|
||||
didFinish(error)
|
||||
}
|
||||
}
|
|
@ -10,34 +10,29 @@ import Foundation
|
|||
import RSParser
|
||||
import os.log
|
||||
|
||||
/// Single responsibility is to combine the articles with their feeds for a specific account.
|
||||
/// Combine the articles with their feeds for a specific account.
|
||||
final class FeedlyUpdateAccountFeedsWithItemsOperation: FeedlyOperation {
|
||||
|
||||
private let account: Account
|
||||
private let organisedItemsProvider: FeedlyParsedItemsByFeedProviding
|
||||
private let log: OSLog
|
||||
|
||||
|
||||
init(account: Account, organisedItemsProvider: FeedlyParsedItemsByFeedProviding, log: OSLog) {
|
||||
self.account = account
|
||||
self.organisedItemsProvider = organisedItemsProvider
|
||||
self.log = log
|
||||
}
|
||||
|
||||
override func main() {
|
||||
precondition(Thread.isMainThread) // Needs to be on main thread because Feed is a main-thread-only model type.
|
||||
guard !isCancelled else {
|
||||
didFinish()
|
||||
return
|
||||
}
|
||||
|
||||
override func run() {
|
||||
let webFeedIDsAndItems = organisedItemsProvider.parsedItemsKeyedByFeedId
|
||||
|
||||
account.update(webFeedIDsAndItems: webFeedIDsAndItems, defaultRead: true) { databaseError in
|
||||
if let error = databaseError {
|
||||
self.didFinish(error)
|
||||
self.didFinish(with: error)
|
||||
return
|
||||
}
|
||||
|
||||
os_log(.debug, log: self.log, "Updated %i feeds for \"%@\"", webFeedIDsAndItems.count, self.organisedItemsProvider.providerName)
|
||||
os_log(.debug, log: self.log, "Updated %i feeds for \"%@\"", webFeedIDsAndItems.count, self.organisedItemsProvider.parsedItemsByFeedProviderName)
|
||||
self.didFinish()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -173,36 +173,47 @@ private extension Folder {
|
|||
|
||||
extension Folder: OPMLRepresentable {
|
||||
|
||||
public func OPMLString(indentLevel: Int, strictConformance: Bool) -> String {
|
||||
public func OPMLString(indentLevel: Int, allowCustomAttributes: Bool) -> String {
|
||||
|
||||
let attrExternalID: String = {
|
||||
if !strictConformance, let externalID = externalID {
|
||||
return " nnw_externalID=\"\(externalID)\""
|
||||
if allowCustomAttributes, let externalID = externalID {
|
||||
return " nnw_externalID=\"\(externalID.escapingSpecialXMLCharacters)\""
|
||||
} else {
|
||||
return ""
|
||||
}
|
||||
}()
|
||||
|
||||
let escapedTitle = nameForDisplay.rs_stringByEscapingSpecialXMLCharacters()
|
||||
let escapedTitle = nameForDisplay.escapingSpecialXMLCharacters
|
||||
var s = "<outline text=\"\(escapedTitle)\" title=\"\(escapedTitle)\"\(attrExternalID)>\n"
|
||||
s = s.rs_string(byPrependingNumberOfTabs: indentLevel)
|
||||
s = s.prepending(tabCount: indentLevel)
|
||||
|
||||
var hasAtLeastOneChild = false
|
||||
|
||||
for feed in topLevelWebFeeds.sorted(by: { $0.nameForDisplay < $1.nameForDisplay }) {
|
||||
s += feed.OPMLString(indentLevel: indentLevel + 1, strictConformance: strictConformance)
|
||||
for feed in topLevelWebFeeds.sorted() {
|
||||
s += feed.OPMLString(indentLevel: indentLevel + 1, allowCustomAttributes: allowCustomAttributes)
|
||||
hasAtLeastOneChild = true
|
||||
}
|
||||
|
||||
if !hasAtLeastOneChild {
|
||||
s = "<outline text=\"\(escapedTitle)\" title=\"\(escapedTitle)\"\(attrExternalID)/>\n"
|
||||
s = s.rs_string(byPrependingNumberOfTabs: indentLevel)
|
||||
s = s.prepending(tabCount: indentLevel)
|
||||
return s
|
||||
}
|
||||
|
||||
s = s + NSString.rs_string(withNumberOfTabs: indentLevel) + "</outline>\n"
|
||||
s = s + String(tabCount: indentLevel) + "</outline>\n"
|
||||
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Set
|
||||
|
||||
extension Set where Element == Folder {
|
||||
|
||||
func sorted() -> Array<Folder> {
|
||||
return sorted(by: { (folder1, folder2) -> Bool in
|
||||
return folder1.nameForDisplay.localizedStandardCompare(folder2.nameForDisplay) == .orderedAscending
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -53,12 +53,12 @@ extension LocalAccountRefresher: DownloadSessionDelegate {
|
|||
return nil
|
||||
}
|
||||
|
||||
let request = NSMutableURLRequest(url: url)
|
||||
var request = URLRequest(url: url)
|
||||
if let conditionalGetInfo = feed.conditionalGetInfo {
|
||||
conditionalGetInfo.addRequestHeadersToURLRequest(request)
|
||||
conditionalGetInfo.addRequestHeadersToURLRequest(&request)
|
||||
}
|
||||
|
||||
return request as URLRequest
|
||||
return request
|
||||
}
|
||||
|
||||
func downloadSession(_ downloadSession: DownloadSession, downloadDidCompleteForRepresentedObject representedObject: AnyObject, response: URLResponse?, data: Data, error: NSError?, completion: @escaping () -> Void) {
|
||||
|
@ -73,7 +73,7 @@ extension LocalAccountRefresher: DownloadSessionDelegate {
|
|||
return
|
||||
}
|
||||
|
||||
let dataHash = (data as NSData).rs_md5HashString()
|
||||
let dataHash = data.md5String
|
||||
if dataHash == feed.contentHash {
|
||||
completion()
|
||||
return
|
||||
|
@ -137,6 +137,6 @@ private extension Data {
|
|||
|
||||
func isDefinitelyNotFeed() -> Bool {
|
||||
// We only detect a few image types for now. This should get fleshed-out at some later date.
|
||||
return (self as NSData).rs_dataIsImage()
|
||||
return self.isImage
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,62 +17,40 @@ final class OPMLFile {
|
|||
|
||||
private let fileURL: URL
|
||||
private let account: Account
|
||||
private lazy var managedFile = ManagedResourceFile(fileURL: fileURL, load: loadCallback, save: saveCallback)
|
||||
|
||||
|
||||
private var isDirty = false {
|
||||
didSet {
|
||||
queueSaveToDiskIfNeeded()
|
||||
}
|
||||
}
|
||||
private let saveQueue = CoalescingQueue(name: "Save Queue", interval: 0.5)
|
||||
|
||||
init(filename: String, account: Account) {
|
||||
self.fileURL = URL(fileURLWithPath: filename)
|
||||
self.account = account
|
||||
}
|
||||
|
||||
func markAsDirty() {
|
||||
managedFile.markAsDirty()
|
||||
isDirty = true
|
||||
}
|
||||
|
||||
func load() {
|
||||
managedFile.load()
|
||||
}
|
||||
|
||||
func save() {
|
||||
managedFile.saveIfNecessary()
|
||||
}
|
||||
|
||||
func suspend() {
|
||||
managedFile.suspend()
|
||||
}
|
||||
|
||||
func resume() {
|
||||
managedFile.resume()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private extension OPMLFile {
|
||||
|
||||
func loadCallback() {
|
||||
guard let fileData = opmlFileData() else {
|
||||
guard let fileData = opmlFileData(), let opmlItems = parsedOPMLItems(fileData: fileData) else {
|
||||
return
|
||||
}
|
||||
|
||||
// Don't rebuild the account if the OPML hasn't changed since the last save
|
||||
guard let opml = String(data: fileData, encoding: .utf8), opml != opmlDocument() else {
|
||||
return
|
||||
}
|
||||
|
||||
guard let opmlItems = parsedOPMLItems(fileData: fileData) else { return }
|
||||
|
||||
BatchUpdate.shared.perform {
|
||||
account.topLevelWebFeeds.removeAll()
|
||||
account.loadOPMLItems(opmlItems, parentFolder: nil)
|
||||
}
|
||||
}
|
||||
|
||||
func saveCallback() {
|
||||
func save() {
|
||||
guard !account.isDeleted else { return }
|
||||
|
||||
let opmlDocumentString = opmlDocument()
|
||||
|
||||
let errorPointer: NSErrorPointer = nil
|
||||
let fileCoordinator = NSFileCoordinator(filePresenter: managedFile)
|
||||
let fileCoordinator = NSFileCoordinator()
|
||||
|
||||
fileCoordinator.coordinate(writingItemAt: fileURL, options: [], error: errorPointer, byAccessor: { writeURL in
|
||||
do {
|
||||
|
@ -85,12 +63,28 @@ private extension OPMLFile {
|
|||
if let error = errorPointer?.pointee {
|
||||
os_log(.error, log: log, "OPML save to disk coordination failed: %@.", error.localizedDescription)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private extension OPMLFile {
|
||||
|
||||
func queueSaveToDiskIfNeeded() {
|
||||
saveQueue.add(self, #selector(saveToDiskIfNeeded))
|
||||
}
|
||||
|
||||
@objc func saveToDiskIfNeeded() {
|
||||
if isDirty {
|
||||
isDirty = false
|
||||
save()
|
||||
}
|
||||
}
|
||||
|
||||
func opmlFileData() -> Data? {
|
||||
var fileData: Data? = nil
|
||||
let errorPointer: NSErrorPointer = nil
|
||||
let fileCoordinator = NSFileCoordinator(filePresenter: managedFile)
|
||||
let fileCoordinator = NSFileCoordinator()
|
||||
|
||||
fileCoordinator.coordinate(readingItemAt: fileURL, options: [], error: errorPointer, byAccessor: { readURL in
|
||||
do {
|
||||
|
@ -125,7 +119,7 @@ private extension OPMLFile {
|
|||
}
|
||||
|
||||
func opmlDocument() -> String {
|
||||
let escapedTitle = account.nameForDisplay.rs_stringByEscapingSpecialXMLCharacters()
|
||||
let escapedTitle = account.nameForDisplay.escapingSpecialXMLCharacters
|
||||
let openingText =
|
||||
"""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
@ -138,7 +132,7 @@ private extension OPMLFile {
|
|||
|
||||
"""
|
||||
|
||||
let middleText = account.OPMLString(indentLevel: 0, strictConformance: false)
|
||||
let middleText = account.OPMLString(indentLevel: 0, allowCustomAttributes: true)
|
||||
|
||||
let closingText =
|
||||
"""
|
||||
|
|
|
@ -598,14 +598,7 @@ private extension ReaderAPIAccountDelegate {
|
|||
os_log(.debug, log: log, "Syncing taggings with %ld subscriptions.", subscriptions.count)
|
||||
|
||||
// Set up some structures to make syncing easier
|
||||
let folderDict: [String: Folder] = {
|
||||
if let folders = account.folders {
|
||||
return Dictionary(uniqueKeysWithValues: folders.map { ($0.name ?? "", $0) } )
|
||||
} else {
|
||||
return [String: Folder]()
|
||||
}
|
||||
}()
|
||||
|
||||
let folderDict = nameToFolderDictionary(with: account.folders)
|
||||
let taggingsDict = subscriptions.reduce([String: [ReaderAPISubscription]]()) { (dict, subscription) in
|
||||
var taggedFeeds = dict
|
||||
|
||||
|
@ -667,6 +660,21 @@ private extension ReaderAPIAccountDelegate {
|
|||
|
||||
}
|
||||
|
||||
func nameToFolderDictionary(with folders: Set<Folder>?) -> [String: Folder] {
|
||||
guard let folders = folders else {
|
||||
return [String: Folder]()
|
||||
}
|
||||
|
||||
var d = [String: Folder]()
|
||||
for folder in folders {
|
||||
let name = folder.name ?? ""
|
||||
if d[name] == nil {
|
||||
d[name] = folder
|
||||
}
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
func sendArticleStatuses(_ statuses: [SyncStatus],
|
||||
apiCall: ([Int], @escaping (Result<Void, Error>) -> Void) -> Void,
|
||||
completion: @escaping (() -> Void)) {
|
||||
|
|
|
@ -42,8 +42,8 @@ public final class WebFeed: Feed, Renamable, Hashable {
|
|||
return metadata.homePageURL
|
||||
}
|
||||
set {
|
||||
if let url = newValue {
|
||||
metadata.homePageURL = url.rs_normalizedURL()
|
||||
if let url = newValue, !url.isEmpty {
|
||||
metadata.homePageURL = url.normalizedURL
|
||||
}
|
||||
else {
|
||||
metadata.homePageURL = nil
|
||||
|
@ -245,7 +245,7 @@ public final class WebFeed: Feed, Renamable, Hashable {
|
|||
|
||||
extension WebFeed: OPMLRepresentable {
|
||||
|
||||
public func OPMLString(indentLevel: Int, strictConformance: Bool) -> String {
|
||||
public func OPMLString(indentLevel: Int, allowCustomAttributes: Bool) -> String {
|
||||
// https://github.com/brentsimmons/NetNewsWire/issues/527
|
||||
// Don’t use nameForDisplay because that can result in a feed name "Untitled" written to disk,
|
||||
// which NetNewsWire may take later to be the actual name.
|
||||
|
@ -256,16 +256,16 @@ extension WebFeed: OPMLRepresentable {
|
|||
if nameToUse == nil {
|
||||
nameToUse = ""
|
||||
}
|
||||
let escapedName = nameToUse!.rs_stringByEscapingSpecialXMLCharacters()
|
||||
let escapedName = nameToUse!.escapingSpecialXMLCharacters
|
||||
|
||||
var escapedHomePageURL = ""
|
||||
if let homePageURL = homePageURL {
|
||||
escapedHomePageURL = homePageURL.rs_stringByEscapingSpecialXMLCharacters()
|
||||
escapedHomePageURL = homePageURL.escapingSpecialXMLCharacters
|
||||
}
|
||||
let escapedFeedURL = url.rs_stringByEscapingSpecialXMLCharacters()
|
||||
let escapedFeedURL = url.escapingSpecialXMLCharacters
|
||||
|
||||
var s = "<outline text=\"\(escapedName)\" title=\"\(escapedName)\" description=\"\" type=\"rss\" version=\"RSS\" htmlUrl=\"\(escapedHomePageURL)\" xmlUrl=\"\(escapedFeedURL)\"/>\n"
|
||||
s = s.rs_string(byPrependingNumberOfTabs: indentLevel)
|
||||
s = s.prepending(tabCount: indentLevel)
|
||||
|
||||
return s
|
||||
}
|
||||
|
@ -276,4 +276,14 @@ extension Set where Element == WebFeed {
|
|||
func webFeedIDs() -> Set<String> {
|
||||
return Set<String>(map { $0.webFeedID })
|
||||
}
|
||||
|
||||
func sorted() -> Array<WebFeed> {
|
||||
return sorted(by: { (webFeed1, webFeed2) -> Bool in
|
||||
if webFeed1.nameForDisplay.localizedStandardCompare(webFeed2.nameForDisplay) == .orderedSame {
|
||||
return webFeed1.url < webFeed2.url
|
||||
}
|
||||
return webFeed1.nameForDisplay.localizedStandardCompare(webFeed2.nameForDisplay) == .orderedAscending
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,41 +16,26 @@ final class WebFeedMetadataFile {
|
|||
|
||||
private let fileURL: URL
|
||||
private let account: Account
|
||||
private lazy var managedFile = ManagedResourceFile(fileURL: fileURL, load: loadCallback, save: saveCallback)
|
||||
|
||||
|
||||
private var isDirty = false {
|
||||
didSet {
|
||||
queueSaveToDiskIfNeeded()
|
||||
}
|
||||
}
|
||||
private let saveQueue = CoalescingQueue(name: "Save Queue", interval: 0.5)
|
||||
|
||||
init(filename: String, account: Account) {
|
||||
self.fileURL = URL(fileURLWithPath: filename)
|
||||
self.account = account
|
||||
}
|
||||
|
||||
func markAsDirty() {
|
||||
managedFile.markAsDirty()
|
||||
isDirty = true
|
||||
}
|
||||
|
||||
func load() {
|
||||
managedFile.load()
|
||||
}
|
||||
|
||||
func save() {
|
||||
managedFile.saveIfNecessary()
|
||||
}
|
||||
|
||||
func suspend() {
|
||||
managedFile.suspend()
|
||||
}
|
||||
|
||||
func resume() {
|
||||
managedFile.resume()
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private extension WebFeedMetadataFile {
|
||||
|
||||
func loadCallback() {
|
||||
|
||||
let errorPointer: NSErrorPointer = nil
|
||||
let fileCoordinator = NSFileCoordinator(filePresenter: managedFile)
|
||||
let fileCoordinator = NSFileCoordinator()
|
||||
|
||||
fileCoordinator.coordinate(readingItemAt: fileURL, options: [], error: errorPointer, byAccessor: { readURL in
|
||||
if let fileData = try? Data(contentsOf: readURL) {
|
||||
|
@ -58,19 +43,14 @@ private extension WebFeedMetadataFile {
|
|||
account.webFeedMetadata = (try? decoder.decode(Account.WebFeedMetadataDictionary.self, from: fileData)) ?? Account.WebFeedMetadataDictionary()
|
||||
}
|
||||
account.webFeedMetadata.values.forEach { $0.delegate = account }
|
||||
if !account.startingUp {
|
||||
account.resetWebFeedMetadataAndUnreadCounts()
|
||||
}
|
||||
})
|
||||
|
||||
if let error = errorPointer?.pointee {
|
||||
os_log(.error, log: log, "Read from disk coordination failed: %@.", error.localizedDescription)
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
func saveCallback() {
|
||||
func save() {
|
||||
guard !account.isDeleted else { return }
|
||||
|
||||
let feedMetadata = metadataForOnlySubscribedToFeeds()
|
||||
|
@ -79,7 +59,7 @@ private extension WebFeedMetadataFile {
|
|||
encoder.outputFormat = .binary
|
||||
|
||||
let errorPointer: NSErrorPointer = nil
|
||||
let fileCoordinator = NSFileCoordinator(filePresenter: managedFile)
|
||||
let fileCoordinator = NSFileCoordinator()
|
||||
|
||||
fileCoordinator.coordinate(writingItemAt: fileURL, options: [], error: errorPointer, byAccessor: { writeURL in
|
||||
do {
|
||||
|
@ -94,7 +74,22 @@ private extension WebFeedMetadataFile {
|
|||
os_log(.error, log: log, "Save to disk coordination failed: %@.", error.localizedDescription)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
private extension WebFeedMetadataFile {
|
||||
|
||||
func queueSaveToDiskIfNeeded() {
|
||||
saveQueue.add(self, #selector(saveToDiskIfNeeded))
|
||||
}
|
||||
|
||||
@objc func saveToDiskIfNeeded() {
|
||||
if isDirty {
|
||||
isDirty = false
|
||||
save()
|
||||
}
|
||||
}
|
||||
|
||||
private func metadataForOnlySubscribedToFeeds() -> Account.WebFeedMetadataDictionary {
|
||||
let webFeedIDs = account.idToWebFeedDictionary.keys
|
||||
return account.webFeedMetadata.filter { (feedID: String, metadata: WebFeedMetadata) -> Bool in
|
||||
|
|
|
@ -23,13 +23,12 @@ public struct Article: Hashable {
|
|||
public let externalURL: String?
|
||||
public let summary: String?
|
||||
public let imageURL: String?
|
||||
public let bannerImageURL: String?
|
||||
public let datePublished: Date?
|
||||
public let dateModified: Date?
|
||||
public let authors: Set<Author>?
|
||||
public let status: ArticleStatus
|
||||
|
||||
public init(accountID: String, articleID: String?, webFeedID: String, uniqueID: String, title: String?, contentHTML: String?, contentText: String?, url: String?, externalURL: String?, summary: String?, imageURL: String?, bannerImageURL: String?, datePublished: Date?, dateModified: Date?, authors: Set<Author>?, status: ArticleStatus) {
|
||||
public init(accountID: String, articleID: String?, webFeedID: String, uniqueID: String, title: String?, contentHTML: String?, contentText: String?, url: String?, externalURL: String?, summary: String?, imageURL: String?, datePublished: Date?, dateModified: Date?, authors: Set<Author>?, status: ArticleStatus) {
|
||||
self.accountID = accountID
|
||||
self.webFeedID = webFeedID
|
||||
self.uniqueID = uniqueID
|
||||
|
@ -40,7 +39,6 @@ public struct Article: Hashable {
|
|||
self.externalURL = externalURL
|
||||
self.summary = summary
|
||||
self.imageURL = imageURL
|
||||
self.bannerImageURL = bannerImageURL
|
||||
self.datePublished = datePublished
|
||||
self.dateModified = dateModified
|
||||
self.authors = authors
|
||||
|
|
|
@ -26,7 +26,7 @@ public func databaseIDWithString(_ s: String) -> String {
|
|||
return identifier
|
||||
}
|
||||
|
||||
let identifier = (s as NSString).rs_md5Hash()
|
||||
let identifier = s.md5String
|
||||
databaseIDCache[s] = identifier
|
||||
return identifier
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ PROVISIONING_PROFILE_SPECIFIER =
|
|||
#include? "../../../SharedXcodeSettings/DeveloperSettings.xcconfig"
|
||||
|
||||
SDKROOT = macosx
|
||||
MACOSX_DEPLOYMENT_TARGET = 10.13
|
||||
MACOSX_DEPLOYMENT_TARGET = 10.14
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 13.0
|
||||
SUPPORTED_PLATFORMS = macosx iphoneos iphonesimulator
|
||||
|
||||
|
@ -18,7 +18,6 @@ SWIFT_VERSION = 5.1
|
|||
COMBINE_HIDPI_IMAGES = YES
|
||||
|
||||
COPY_PHASE_STRIP = NO
|
||||
MACOSX_DEPLOYMENT_TARGET = 10.13
|
||||
ALWAYS_SEARCH_USER_PATHS = NO
|
||||
CURRENT_PROJECT_VERSION = 1
|
||||
VERSION_INFO_PREFIX =
|
||||
|
|
|
@ -45,6 +45,7 @@ public final class ArticlesDatabase {
|
|||
|
||||
private let articlesTable: ArticlesTable
|
||||
private let queue: DatabaseQueue
|
||||
private let operationQueue = MainThreadOperationQueue()
|
||||
|
||||
public init(databaseFilePath: String, accountID: String) {
|
||||
let queue = DatabaseQueue(databasePath: databaseFilePath)
|
||||
|
@ -136,13 +137,36 @@ public final class ArticlesDatabase {
|
|||
}
|
||||
|
||||
// MARK: - Unread Counts
|
||||
|
||||
public func fetchUnreadCounts(for webFeedIDs: Set<String>, _ completion: @escaping UnreadCountDictionaryCompletionBlock) {
|
||||
articlesTable.fetchUnreadCounts(webFeedIDs, completion)
|
||||
|
||||
/// Fetch all non-zero unread counts.
|
||||
public func fetchAllUnreadCounts(_ completion: @escaping UnreadCountDictionaryCompletionBlock) {
|
||||
let operation = FetchAllUnreadCountsOperation(databaseQueue: queue, cutoffDate: articlesTable.articleCutoffDate)
|
||||
operationQueue.cancelOperations(named: operation.name!)
|
||||
operation.completionBlock = { operation in
|
||||
let fetchOperation = operation as! FetchAllUnreadCountsOperation
|
||||
completion(fetchOperation.result)
|
||||
}
|
||||
operationQueue.add(operation)
|
||||
}
|
||||
|
||||
public func fetchAllNonZeroUnreadCounts(_ completion: @escaping UnreadCountDictionaryCompletionBlock) {
|
||||
articlesTable.fetchAllUnreadCounts(completion)
|
||||
/// Fetch unread count for a single feed.
|
||||
public func fetchUnreadCount(_ webFeedID: String, _ completion: @escaping SingleUnreadCountCompletionBlock) {
|
||||
let operation = FetchFeedUnreadCountOperation(webFeedID: webFeedID, databaseQueue: queue, cutoffDate: articlesTable.articleCutoffDate)
|
||||
operation.completionBlock = { operation in
|
||||
let fetchOperation = operation as! FetchFeedUnreadCountOperation
|
||||
completion(fetchOperation.result)
|
||||
}
|
||||
operationQueue.add(operation)
|
||||
}
|
||||
|
||||
/// Fetch non-zero unread counts for given webFeedIDs.
|
||||
public func fetchUnreadCounts(for webFeedIDs: Set<String>, _ completion: @escaping UnreadCountDictionaryCompletionBlock) {
|
||||
let operation = FetchUnreadCountsForFeedsOperation(webFeedIDs: webFeedIDs, databaseQueue: queue, cutoffDate: articlesTable.articleCutoffDate)
|
||||
operation.completionBlock = { operation in
|
||||
let fetchOperation = operation as! FetchUnreadCountsForFeedsOperation
|
||||
completion(fetchOperation.result)
|
||||
}
|
||||
operationQueue.add(operation)
|
||||
}
|
||||
|
||||
public func fetchUnreadCountForToday(for webFeedIDs: Set<String>, completion: @escaping SingleUnreadCountCompletionBlock) {
|
||||
|
@ -176,7 +200,7 @@ public final class ArticlesDatabase {
|
|||
articlesTable.fetchStarredArticleIDsAsync(webFeedIDs, completion)
|
||||
}
|
||||
|
||||
/// Fetch articleIDs for articles that we should have, but don’t. These articles are not userDeleted, and they are either (starred) or (unread and newer than the article cutoff date).
|
||||
/// Fetch articleIDs for articles that we should have, but don’t. These articles are not userDeleted, and they are either (starred) or (newer than the article cutoff date).
|
||||
public func fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate(_ completion: @escaping ArticleIDsCompletionBlock) {
|
||||
articlesTable.fetchArticleIDsForStatusesWithoutArticlesNewerThanCutoffDate(completion)
|
||||
}
|
||||
|
@ -189,17 +213,31 @@ public final class ArticlesDatabase {
|
|||
articlesTable.mark(articleIDs, statusKey, flag, completion)
|
||||
}
|
||||
|
||||
/// Create statuses for specified articleIDs. For existing statuses, don’t do anything.
|
||||
/// For newly-created statuses, mark them as read and not-starred.
|
||||
public func createStatusesIfNeeded(articleIDs: Set<String>, completion: @escaping DatabaseCompletionBlock) {
|
||||
articlesTable.createStatusesIfNeeded(articleIDs, completion)
|
||||
}
|
||||
|
||||
// MARK: - Suspend and Resume (for iOS)
|
||||
|
||||
/// Cancel current operations and close the database.
|
||||
public func cancelAndSuspend() {
|
||||
cancelOperations()
|
||||
suspend()
|
||||
}
|
||||
|
||||
/// Close the database and stop running database calls.
|
||||
/// Any pending calls will complete first.
|
||||
public func suspend() {
|
||||
operationQueue.suspend()
|
||||
queue.suspend()
|
||||
}
|
||||
|
||||
/// Open the database and allow for running database calls again.
|
||||
public func resume() {
|
||||
queue.resume()
|
||||
operationQueue.resume()
|
||||
}
|
||||
|
||||
// MARK: - Caches
|
||||
|
@ -216,6 +254,7 @@ public final class ArticlesDatabase {
|
|||
|
||||
/// Calls the various clean-up functions.
|
||||
public func cleanupDatabaseAtStartup(subscribedToWebFeedIDs: Set<String>) {
|
||||
articlesTable.deleteOldArticles()
|
||||
articlesTable.deleteArticlesNotInSubscribedToFeedIDs(subscribedToWebFeedIDs)
|
||||
}
|
||||
}
|
||||
|
@ -245,4 +284,10 @@ private extension ArticlesDatabase {
|
|||
// 24 hours previous. This is used by the Today smart feed, which should not actually empty out at midnight.
|
||||
return Date(timeIntervalSinceNow: -(60 * 60 * 24)) // This does not need to be more precise.
|
||||
}
|
||||
|
||||
// MARK: - Operations
|
||||
|
||||
func cancelOperations() {
|
||||
operationQueue.cancelAllOperations()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,10 +8,10 @@
|
|||
|
||||
/* Begin PBXBuildFile section */
|
||||
51C451FF2264CF2100C03939 /* RSParser.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 51C451FE2264CF2100C03939 /* RSParser.framework */; };
|
||||
84116B8923E01E86000B2E98 /* FetchFeedUnreadCountOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84116B8823E01E86000B2E98 /* FetchFeedUnreadCountOperation.swift */; };
|
||||
841D4D742106B59F00DD04E6 /* Articles.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 841D4D732106B59F00DD04E6 /* Articles.framework */; };
|
||||
84288A001F6A3C4400395871 /* DatabaseObject+Database.swift in Sources */ = {isa = PBXBuildFile; fileRef = 842889FF1F6A3C4400395871 /* DatabaseObject+Database.swift */; };
|
||||
84288A021F6A3D8000395871 /* RelatedObjectsMap+Database.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84288A011F6A3D8000395871 /* RelatedObjectsMap+Database.swift */; };
|
||||
843577161F744FC800F460AE /* DatabaseArticle.swift in Sources */ = {isa = PBXBuildFile; fileRef = 843577151F744FC800F460AE /* DatabaseArticle.swift */; };
|
||||
843577221F749C6200F460AE /* ArticleChangesTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 843577211F749C6200F460AE /* ArticleChangesTests.swift */; };
|
||||
843702C31F70D15D00B18807 /* ParsedArticle+Database.swift in Sources */ = {isa = PBXBuildFile; fileRef = 843702C21F70D15D00B18807 /* ParsedArticle+Database.swift */; };
|
||||
843CB9961F34174100EE6581 /* Author+Database.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84F20F901F1810DD00D8E682 /* Author+Database.swift */; };
|
||||
|
@ -20,9 +20,11 @@
|
|||
845580671F0AEBCD003CCFA1 /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845580661F0AEBCD003CCFA1 /* Constants.swift */; };
|
||||
845580761F0AF670003CCFA1 /* Article+Database.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845580751F0AF670003CCFA1 /* Article+Database.swift */; };
|
||||
8455807A1F0AF67D003CCFA1 /* ArticleStatus+Database.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845580791F0AF67D003CCFA1 /* ArticleStatus+Database.swift */; };
|
||||
84611DCC23E62FE200BC630C /* FetchUnreadCountsForFeedsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84611DCB23E62FE200BC630C /* FetchUnreadCountsForFeedsOperation.swift */; };
|
||||
8477ACBC2221E76F00DF7F37 /* SearchTable.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8477ACBB2221E76F00DF7F37 /* SearchTable.swift */; };
|
||||
848E3EB920FBCFD20004B7ED /* RSCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 848E3EB820FBCFD20004B7ED /* RSCore.framework */; };
|
||||
848E3EBD20FBCFDE0004B7ED /* RSDatabase.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 848E3EBC20FBCFDE0004B7ED /* RSDatabase.framework */; };
|
||||
84C242C923DEB45C00C50516 /* FetchAllUnreadCountsOperation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84C242C823DEB45C00C50516 /* FetchAllUnreadCountsOperation.swift */; };
|
||||
84E156EA1F0AB80500F8CC05 /* ArticlesDatabase.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84E156E91F0AB80500F8CC05 /* ArticlesDatabase.swift */; };
|
||||
84E156EC1F0AB80E00F8CC05 /* ArticlesTable.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84E156EB1F0AB80E00F8CC05 /* ArticlesTable.swift */; };
|
||||
84E156EE1F0AB81400F8CC05 /* StatusesTable.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84E156ED1F0AB81400F8CC05 /* StatusesTable.swift */; };
|
||||
|
@ -112,10 +114,10 @@
|
|||
/* Begin PBXFileReference section */
|
||||
518B2EA7235130CD00400001 /* ArticlesDatabase_project_test.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; path = ArticlesDatabase_project_test.xcconfig; sourceTree = "<group>"; };
|
||||
51C451FE2264CF2100C03939 /* RSParser.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = RSParser.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
84116B8823E01E86000B2E98 /* FetchFeedUnreadCountOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FetchFeedUnreadCountOperation.swift; sourceTree = "<group>"; };
|
||||
841D4D732106B59F00DD04E6 /* Articles.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = Articles.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
842889FF1F6A3C4400395871 /* DatabaseObject+Database.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "DatabaseObject+Database.swift"; sourceTree = "<group>"; };
|
||||
84288A011F6A3D8000395871 /* RelatedObjectsMap+Database.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RelatedObjectsMap+Database.swift"; sourceTree = "<group>"; };
|
||||
843577151F744FC800F460AE /* DatabaseArticle.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DatabaseArticle.swift; sourceTree = "<group>"; };
|
||||
843577211F749C6200F460AE /* ArticleChangesTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ArticleChangesTests.swift; sourceTree = "<group>"; };
|
||||
843702C21F70D15D00B18807 /* ParsedArticle+Database.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = "ParsedArticle+Database.swift"; path = "Extensions/ParsedArticle+Database.swift"; sourceTree = "<group>"; };
|
||||
844BEE371F0AB3AA004AB7CD /* ArticlesDatabase.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = ArticlesDatabase.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
|
@ -125,12 +127,14 @@
|
|||
845580661F0AEBCD003CCFA1 /* Constants.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = "<group>"; };
|
||||
845580751F0AF670003CCFA1 /* Article+Database.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "Article+Database.swift"; path = "Extensions/Article+Database.swift"; sourceTree = "<group>"; };
|
||||
845580791F0AF67D003CCFA1 /* ArticleStatus+Database.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "ArticleStatus+Database.swift"; path = "Extensions/ArticleStatus+Database.swift"; sourceTree = "<group>"; };
|
||||
84611DCB23E62FE200BC630C /* FetchUnreadCountsForFeedsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FetchUnreadCountsForFeedsOperation.swift; sourceTree = "<group>"; };
|
||||
8461461E1F0ABC7300870CB3 /* RSParser.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = RSParser.xcodeproj; path = ../RSParser/RSParser.xcodeproj; sourceTree = "<group>"; };
|
||||
8477ACBB2221E76F00DF7F37 /* SearchTable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SearchTable.swift; sourceTree = "<group>"; };
|
||||
848E3EB820FBCFD20004B7ED /* RSCore.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = RSCore.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
848E3EBA20FBCFD80004B7ED /* RSParser.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = RSParser.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
848E3EBC20FBCFDE0004B7ED /* RSDatabase.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = RSDatabase.framework; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
84BB4B8F1F119C4900858766 /* RSCore.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = RSCore.xcodeproj; path = ../RSCore/RSCore.xcodeproj; sourceTree = "<group>"; };
|
||||
84C242C823DEB45C00C50516 /* FetchAllUnreadCountsOperation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FetchAllUnreadCountsOperation.swift; sourceTree = "<group>"; };
|
||||
84E156E81F0AB75600F8CC05 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
|
||||
84E156E91F0AB80500F8CC05 /* ArticlesDatabase.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ArticlesDatabase.swift; sourceTree = "<group>"; };
|
||||
84E156EB1F0AB80E00F8CC05 /* ArticlesTable.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ArticlesTable.swift; sourceTree = "<group>"; };
|
||||
|
@ -176,9 +180,9 @@
|
|||
845580661F0AEBCD003CCFA1 /* Constants.swift */,
|
||||
84E156EB1F0AB80E00F8CC05 /* ArticlesTable.swift */,
|
||||
8477ACBB2221E76F00DF7F37 /* SearchTable.swift */,
|
||||
843577151F744FC800F460AE /* DatabaseArticle.swift */,
|
||||
84E156ED1F0AB81400F8CC05 /* StatusesTable.swift */,
|
||||
84F20F8E1F180D8700D8E682 /* AuthorsTable.swift */,
|
||||
84C242C723DEB42700C50516 /* Operations */,
|
||||
8461462A1F0AC44100870CB3 /* Extensions */,
|
||||
84E156E81F0AB75600F8CC05 /* Info.plist */,
|
||||
844BEE441F0AB3AB004AB7CD /* DatabaseTests */,
|
||||
|
@ -240,6 +244,16 @@
|
|||
name = Products;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
84C242C723DEB42700C50516 /* Operations */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
84116B8823E01E86000B2E98 /* FetchFeedUnreadCountOperation.swift */,
|
||||
84611DCB23E62FE200BC630C /* FetchUnreadCountsForFeedsOperation.swift */,
|
||||
84C242C823DEB45C00C50516 /* FetchAllUnreadCountsOperation.swift */,
|
||||
);
|
||||
path = Operations;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
84E156F21F0AB83600F8CC05 /* Products */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
|
@ -350,14 +364,14 @@
|
|||
TargetAttributes = {
|
||||
844BEE361F0AB3AA004AB7CD = {
|
||||
CreatedOnToolsVersion = 8.3.2;
|
||||
DevelopmentTeam = SHJK2V3AJG;
|
||||
DevelopmentTeam = M8L2WTLA8W;
|
||||
LastSwiftMigration = 0830;
|
||||
ProvisioningStyle = Automatic;
|
||||
ProvisioningStyle = Manual;
|
||||
};
|
||||
844BEE3F1F0AB3AB004AB7CD = {
|
||||
CreatedOnToolsVersion = 8.3.2;
|
||||
DevelopmentTeam = SHJK2V3AJG;
|
||||
ProvisioningStyle = Automatic;
|
||||
DevelopmentTeam = M8L2WTLA8W;
|
||||
ProvisioningStyle = Manual;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
@ -517,12 +531,14 @@
|
|||
845580671F0AEBCD003CCFA1 /* Constants.swift in Sources */,
|
||||
843CB9961F34174100EE6581 /* Author+Database.swift in Sources */,
|
||||
845580761F0AF670003CCFA1 /* Article+Database.swift in Sources */,
|
||||
84116B8923E01E86000B2E98 /* FetchFeedUnreadCountOperation.swift in Sources */,
|
||||
8455807A1F0AF67D003CCFA1 /* ArticleStatus+Database.swift in Sources */,
|
||||
84288A021F6A3D8000395871 /* RelatedObjectsMap+Database.swift in Sources */,
|
||||
84C242C923DEB45C00C50516 /* FetchAllUnreadCountsOperation.swift in Sources */,
|
||||
84F20F8F1F180D8700D8E682 /* AuthorsTable.swift in Sources */,
|
||||
84288A001F6A3C4400395871 /* DatabaseObject+Database.swift in Sources */,
|
||||
8477ACBC2221E76F00DF7F37 /* SearchTable.swift in Sources */,
|
||||
843577161F744FC800F460AE /* DatabaseArticle.swift in Sources */,
|
||||
84611DCC23E62FE200BC630C /* FetchUnreadCountsForFeedsOperation.swift in Sources */,
|
||||
843702C31F70D15D00B18807 /* ParsedArticle+Database.swift in Sources */,
|
||||
84E156EC1F0AB80E00F8CC05 /* ArticlesTable.swift in Sources */,
|
||||
84E156EE1F0AB81400F8CC05 /* StatusesTable.swift in Sources */,
|
||||
|
|
|
@ -19,14 +19,14 @@ final class ArticlesTable: DatabaseTable {
|
|||
private let queue: DatabaseQueue
|
||||
private let statusesTable: StatusesTable
|
||||
private let authorsLookupTable: DatabaseLookupTable
|
||||
private var databaseArticlesCache = [String: DatabaseArticle]()
|
||||
private var articlesCache = [String: Article]()
|
||||
|
||||
private lazy var searchTable: SearchTable = {
|
||||
return SearchTable(queue: queue, articlesTable: self)
|
||||
}()
|
||||
|
||||
// TODO: update articleCutoffDate as time passes and based on user preferences.
|
||||
private var articleCutoffDate = NSDate.rs_dateWithNumberOfDays(inThePast: 90)!
|
||||
let articleCutoffDate = Date().bySubtracting(days: 90)
|
||||
|
||||
private typealias ArticlesFetchMethod = (FMDatabase) -> Set<Article>
|
||||
|
||||
|
@ -212,6 +212,9 @@ final class ArticlesTable: DatabaseTable {
|
|||
|
||||
self.callUpdateArticlesCompletionBlock(newArticles, updatedArticles, completion) //7
|
||||
|
||||
self.addArticlesToCache(newArticles)
|
||||
self.addArticlesToCache(updatedArticles)
|
||||
|
||||
// 8. Update search index.
|
||||
if let newArticles = newArticles {
|
||||
self.searchTable.indexNewArticles(newArticles, database)
|
||||
|
@ -234,31 +237,6 @@ final class ArticlesTable: DatabaseTable {
|
|||
|
||||
// MARK: - Unread Counts
|
||||
|
||||
func fetchUnreadCounts(_ webFeedIDs: Set<String>, _ completion: @escaping UnreadCountDictionaryCompletionBlock) {
|
||||
if webFeedIDs.isEmpty {
|
||||
completion(.success(UnreadCountDictionary()))
|
||||
return
|
||||
}
|
||||
|
||||
fetchAllUnreadCounts { (unreadCountsResult) in
|
||||
|
||||
func createUnreadCountDictionary(_ unreadCountDictionary: UnreadCountDictionary) -> UnreadCountDictionary {
|
||||
var d = UnreadCountDictionary()
|
||||
for webFeedID in webFeedIDs {
|
||||
d[webFeedID] = unreadCountDictionary[webFeedID] ?? 0
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
switch unreadCountsResult {
|
||||
case .success(let unreadCountDictionary):
|
||||
completion(.success(createUnreadCountDictionary(unreadCountDictionary)))
|
||||
case .failure(let databaseError):
|
||||
completion(.failure(databaseError))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func fetchUnreadCount(_ webFeedIDs: Set<String>, _ since: Date, _ completion: @escaping SingleUnreadCountCompletionBlock) {
|
||||
// Get unread count for today, for instance.
|
||||
if webFeedIDs.isEmpty {
|
||||
|
@ -295,46 +273,6 @@ final class ArticlesTable: DatabaseTable {
|
|||
}
|
||||
}
|
||||
|
||||
func fetchAllUnreadCounts(_ completion: @escaping UnreadCountDictionaryCompletionBlock) {
|
||||
// Returns only where unreadCount > 0.
|
||||
|
||||
let cutoffDate = articleCutoffDate
|
||||
queue.runInDatabase { databaseResult in
|
||||
|
||||
func makeDatabaseCalls(_ database: FMDatabase) {
|
||||
let sql = "select distinct feedID, count(*) from articles natural join statuses where read=0 and userDeleted=0 and (starred=1 or (datePublished > ? or (datePublished is null and dateArrived > ?))) group by feedID;"
|
||||
|
||||
guard let resultSet = database.executeQuery(sql, withArgumentsIn: [cutoffDate, cutoffDate]) else {
|
||||
DispatchQueue.main.async {
|
||||
completion(.success(UnreadCountDictionary()))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var d = UnreadCountDictionary()
|
||||
while resultSet.next() {
|
||||
let unreadCount = resultSet.long(forColumnIndex: 1)
|
||||
if let webFeedID = resultSet.string(forColumnIndex: 0) {
|
||||
d[webFeedID] = unreadCount
|
||||
}
|
||||
}
|
||||
|
||||
DispatchQueue.main.async {
|
||||
completion(.success(d))
|
||||
}
|
||||
}
|
||||
|
||||
switch databaseResult {
|
||||
case .success(let database):
|
||||
makeDatabaseCalls(database)
|
||||
case .failure(let databaseError):
|
||||
DispatchQueue.main.async {
|
||||
completion(.failure(databaseError))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func fetchStarredAndUnreadCount(_ webFeedIDs: Set<String>, _ completion: @escaping SingleUnreadCountCompletionBlock) {
|
||||
if webFeedIDs.isEmpty {
|
||||
completion(.success(0))
|
||||
|
@ -418,6 +356,22 @@ final class ArticlesTable: DatabaseTable {
|
|||
}
|
||||
}
|
||||
|
||||
func createStatusesIfNeeded(_ articleIDs: Set<String>, _ completion: @escaping DatabaseCompletionBlock) {
|
||||
queue.runInTransaction { databaseResult in
|
||||
switch databaseResult {
|
||||
case .success(let database):
|
||||
let _ = self.statusesTable.ensureStatusesForArticleIDs(articleIDs, true, database)
|
||||
DispatchQueue.main.async {
|
||||
completion(nil)
|
||||
}
|
||||
case .failure(let databaseError):
|
||||
DispatchQueue.main.async {
|
||||
completion(databaseError)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Indexing
|
||||
|
||||
func indexUnindexedArticles() {
|
||||
|
@ -449,12 +403,30 @@ final class ArticlesTable: DatabaseTable {
|
|||
|
||||
func emptyCaches() {
|
||||
queue.runInDatabase { _ in
|
||||
self.databaseArticlesCache = [String: DatabaseArticle]()
|
||||
self.articlesCache = [String: Article]()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Cleanup
|
||||
|
||||
/// Delete articles that we won’t show in the UI any longer
|
||||
/// — their arrival date is before our 90-day recency window.
|
||||
/// Keep all starred articles, no matter their age.
|
||||
func deleteOldArticles() {
|
||||
queue.runInTransaction { databaseResult in
|
||||
|
||||
func makeDatabaseCalls(_ database: FMDatabase) {
|
||||
let sql = "delete from articles where articleID in (select articleID from articles natural join statuses where dateArrived<? and starred=0);"
|
||||
let parameters = [self.articleCutoffDate] as [Any]
|
||||
database.executeUpdate(sql, withArgumentsIn: parameters)
|
||||
}
|
||||
|
||||
if let database = databaseResult.database {
|
||||
makeDatabaseCalls(database)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete articles from feeds that are no longer in the current set of subscribed-to feeds.
|
||||
/// This deletes from the articles and articleStatuses tables,
|
||||
/// and, via a trigger, it also deletes from the search index.
|
||||
|
@ -527,86 +499,55 @@ private extension ArticlesTable {
|
|||
}
|
||||
|
||||
func articlesWithResultSet(_ resultSet: FMResultSet, _ database: FMDatabase) -> Set<Article> {
|
||||
// 1. Create DatabaseArticles without related objects.
|
||||
// 2. Then fetch the related objects, given the set of articleIDs.
|
||||
// 3. Then create set of Articles with DatabaseArticles and related objects and return it.
|
||||
var cachedArticles = Set<Article>()
|
||||
var fetchedArticles = Set<Article>()
|
||||
|
||||
// 1. Create databaseArticles (intermediate representations).
|
||||
while resultSet.next() {
|
||||
|
||||
let databaseArticles = makeDatabaseArticles(with: resultSet)
|
||||
if databaseArticles.isEmpty {
|
||||
return Set<Article>()
|
||||
}
|
||||
|
||||
let articleIDs = databaseArticles.articleIDs()
|
||||
|
||||
// 2. Fetch related objects.
|
||||
|
||||
let authorsMap = authorsLookupTable.fetchRelatedObjects(for: articleIDs, in: database)
|
||||
|
||||
// 3. Create articles with related objects.
|
||||
|
||||
let articles = databaseArticles.map { (databaseArticle) -> Article in
|
||||
return articleWithDatabaseArticle(databaseArticle, authorsMap)
|
||||
}
|
||||
|
||||
return Set(articles)
|
||||
}
|
||||
|
||||
func articleWithDatabaseArticle(_ databaseArticle: DatabaseArticle, _ authorsMap: RelatedObjectsMap?) -> Article {
|
||||
|
||||
let articleID = databaseArticle.articleID
|
||||
let authors = authorsMap?.authors(for: articleID)
|
||||
|
||||
return Article(databaseArticle: databaseArticle, accountID: accountID, authors: authors)
|
||||
}
|
||||
|
||||
func makeDatabaseArticles(with resultSet: FMResultSet) -> Set<DatabaseArticle> {
|
||||
let articles = resultSet.mapToSet { (row) -> DatabaseArticle? in
|
||||
|
||||
guard let articleID = row.string(forColumn: DatabaseKey.articleID) else {
|
||||
guard let articleID = resultSet.string(forColumn: DatabaseKey.articleID) else {
|
||||
assertionFailure("Expected articleID.")
|
||||
return nil
|
||||
continue
|
||||
}
|
||||
|
||||
// Articles are removed from the cache when they’re updated.
|
||||
// See saveUpdatedArticles.
|
||||
if let databaseArticle = databaseArticlesCache[articleID] {
|
||||
return databaseArticle
|
||||
if let article = articlesCache[articleID] {
|
||||
cachedArticles.insert(article)
|
||||
continue
|
||||
}
|
||||
|
||||
// The resultSet is a result of a JOIN query with the statuses table,
|
||||
// so we can get the statuses at the same time and avoid additional database lookups.
|
||||
guard let status = statusesTable.statusWithRow(resultSet, articleID: articleID) else {
|
||||
assertionFailure("Expected status.")
|
||||
return nil
|
||||
}
|
||||
guard let webFeedID = row.string(forColumn: DatabaseKey.feedID) else {
|
||||
assertionFailure("Expected feedID.")
|
||||
return nil
|
||||
}
|
||||
guard let uniqueID = row.string(forColumn: DatabaseKey.uniqueID) else {
|
||||
assertionFailure("Expected uniqueID.")
|
||||
return nil
|
||||
continue
|
||||
}
|
||||
|
||||
let title = row.string(forColumn: DatabaseKey.title)
|
||||
let contentHTML = row.string(forColumn: DatabaseKey.contentHTML)
|
||||
let contentText = row.string(forColumn: DatabaseKey.contentText)
|
||||
let url = row.string(forColumn: DatabaseKey.url)
|
||||
let externalURL = row.string(forColumn: DatabaseKey.externalURL)
|
||||
let summary = row.string(forColumn: DatabaseKey.summary)
|
||||
let imageURL = row.string(forColumn: DatabaseKey.imageURL)
|
||||
let bannerImageURL = row.string(forColumn: DatabaseKey.bannerImageURL)
|
||||
let datePublished = row.date(forColumn: DatabaseKey.datePublished)
|
||||
let dateModified = row.date(forColumn: DatabaseKey.dateModified)
|
||||
guard let article = Article(accountID: accountID, row: resultSet, status: status) else {
|
||||
continue
|
||||
}
|
||||
fetchedArticles.insert(article)
|
||||
}
|
||||
resultSet.close()
|
||||
|
||||
let databaseArticle = DatabaseArticle(articleID: articleID, webFeedID: webFeedID, uniqueID: uniqueID, title: title, contentHTML: contentHTML, contentText: contentText, url: url, externalURL: externalURL, summary: summary, imageURL: imageURL, bannerImageURL: bannerImageURL, datePublished: datePublished, dateModified: dateModified, status: status)
|
||||
databaseArticlesCache[articleID] = databaseArticle
|
||||
return databaseArticle
|
||||
if fetchedArticles.isEmpty {
|
||||
return cachedArticles
|
||||
}
|
||||
|
||||
return articles
|
||||
// Fetch authors for non-cached articles. (Articles from the cache already have authors.)
|
||||
let fetchedArticleIDs = fetchedArticles.articleIDs()
|
||||
let authorsMap = authorsLookupTable.fetchRelatedObjects(for: fetchedArticleIDs, in: database)
|
||||
let articlesWithFetchedAuthors = fetchedArticles.map { (article) -> Article in
|
||||
if let authors = authorsMap?.authors(for: article.articleID) {
|
||||
return article.byAdding(authors)
|
||||
}
|
||||
return article
|
||||
}
|
||||
|
||||
// Add fetchedArticles to cache, now that they have attached authors.
|
||||
for article in articlesWithFetchedAuthors {
|
||||
articlesCache[article.articleID] = article
|
||||
}
|
||||
|
||||
return cachedArticles.union(articlesWithFetchedAuthors)
|
||||
}
|
||||
|
||||
func fetchArticlesWithWhereClause(_ database: FMDatabase, whereClause: String, parameters: [AnyObject], withLimits: Bool) -> Set<Article> {
|
||||
|
@ -615,8 +556,8 @@ private extension ArticlesTable {
|
|||
// * Must be either 1) starred or 2) dateArrived must be newer than cutoff date.
|
||||
|
||||
if withLimits {
|
||||
let sql = "select * from articles natural join statuses where \(whereClause) and userDeleted=0 and (starred=1 or (datePublished > ? or (datePublished is null and dateArrived > ?)));"
|
||||
return articlesWithSQL(sql, parameters + [articleCutoffDate as AnyObject] + [articleCutoffDate as AnyObject], database)
|
||||
let sql = "select * from articles natural join statuses where \(whereClause) and userDeleted=0 and (starred=1 or dateArrived>?);"
|
||||
return articlesWithSQL(sql, parameters + [articleCutoffDate as AnyObject], database)
|
||||
}
|
||||
else {
|
||||
let sql = "select * from articles natural join statuses where \(whereClause);"
|
||||
|
@ -624,15 +565,15 @@ private extension ArticlesTable {
|
|||
}
|
||||
}
|
||||
|
||||
func fetchUnreadCount(_ webFeedID: String, _ database: FMDatabase) -> Int {
|
||||
// Count only the articles that would appear in the UI.
|
||||
// * Must be unread.
|
||||
// * Must not be deleted.
|
||||
// * Must be either 1) starred or 2) dateArrived must be newer than cutoff date.
|
||||
|
||||
let sql = "select count(*) from articles natural join statuses where feedID=? and read=0 and userDeleted=0 and (starred=1 or (datePublished > ? or (datePublished is null and dateArrived > ?)));"
|
||||
return numberWithSQLAndParameters(sql, [webFeedID, articleCutoffDate, articleCutoffDate], in: database)
|
||||
}
|
||||
// func fetchUnreadCount(_ webFeedID: String, _ database: FMDatabase) -> Int {
|
||||
// // Count only the articles that would appear in the UI.
|
||||
// // * Must be unread.
|
||||
// // * Must not be deleted.
|
||||
// // * Must be either 1) starred or 2) dateArrived must be newer than cutoff date.
|
||||
//
|
||||
// let sql = "select count(*) from articles natural join statuses where feedID=? and read=0 and userDeleted=0 and (starred=1 or dateArrived>?);"
|
||||
// return numberWithSQLAndParameters(sql, [webFeedID, articleCutoffDate], in: database)
|
||||
// }
|
||||
|
||||
func fetchArticlesMatching(_ searchString: String, _ database: FMDatabase) -> Set<Article> {
|
||||
let sql = "select rowid from search where search match ?;"
|
||||
|
@ -872,7 +813,6 @@ private extension ArticlesTable {
|
|||
|
||||
|
||||
func saveUpdatedArticles(_ updatedArticles: Set<Article>, _ fetchedArticles: [String: Article], _ database: FMDatabase) {
|
||||
removeArticlesFromDatabaseArticlesCache(updatedArticles)
|
||||
saveUpdatedRelatedObjects(updatedArticles, fetchedArticles, database)
|
||||
|
||||
for updatedArticle in updatedArticles {
|
||||
|
@ -897,10 +837,12 @@ private extension ArticlesTable {
|
|||
updateRowsWithDictionary(changesDictionary, whereKey: DatabaseKey.articleID, matches: updatedArticle.articleID, database: database)
|
||||
}
|
||||
|
||||
func removeArticlesFromDatabaseArticlesCache(_ updatedArticles: Set<Article>) {
|
||||
let articleIDs = updatedArticles.articleIDs()
|
||||
for articleID in articleIDs {
|
||||
databaseArticlesCache[articleID] = nil
|
||||
func addArticlesToCache(_ articles: Set<Article>?) {
|
||||
guard let articles = articles else {
|
||||
return
|
||||
}
|
||||
for article in articles {
|
||||
articlesCache[article.articleID] = article
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -912,9 +854,6 @@ private extension ArticlesTable {
|
|||
if article.status.starred {
|
||||
return false
|
||||
}
|
||||
if let datePublished = article.datePublished {
|
||||
return datePublished < articleCutoffDate
|
||||
}
|
||||
return article.status.dateArrived < articleCutoffDate
|
||||
}
|
||||
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
//
|
||||
// DatabaseArticle.swift
|
||||
// NetNewsWire
|
||||
//
|
||||
// Created by Brent Simmons on 9/21/17.
|
||||
// Copyright © 2017 Ranchero Software. All rights reserved.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import Articles
|
||||
|
||||
// Intermediate representation of an Article. Doesn’t include related objects.
|
||||
// Used by ArticlesTable as part of fetching articles.
|
||||
|
||||
struct DatabaseArticle: Hashable {
|
||||
|
||||
let articleID: String
|
||||
let webFeedID: String
|
||||
let uniqueID: String
|
||||
let title: String?
|
||||
let contentHTML: String?
|
||||
let contentText: String?
|
||||
let url: String?
|
||||
let externalURL: String?
|
||||
let summary: String?
|
||||
let imageURL: String?
|
||||
let bannerImageURL: String?
|
||||
let datePublished: Date?
|
||||
let dateModified: Date?
|
||||
let status: ArticleStatus
|
||||
|
||||
// MARK: - Hashable
|
||||
|
||||
public func hash(into hasher: inout Hasher) {
|
||||
hasher.combine(articleID)
|
||||
}
|
||||
}
|
||||
|
||||
extension Set where Element == DatabaseArticle {
|
||||
|
||||
func articleIDs() -> Set<String> {
|
||||
return Set<String>(map { $0.articleID })
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue