NetNewsWire/Frameworks/Account/FeedFinder/FeedFinder.swift

162 lines
5.2 KiB
Swift
Raw Normal View History

2017-05-22 22:27:54 +02:00
//
// FeedFinder.swift
// NetNewsWire
2017-05-22 22:27:54 +02:00
//
// Created by Brent Simmons on 8/2/16.
// Copyright © 2016 Ranchero Software, LLC. All rights reserved.
2017-05-22 22:27:54 +02:00
//
import Foundation
import RSParser
2017-05-22 22:27:54 +02:00
import RSWeb
import RSCore
class FeedFinder {
static func find(url: URL, completion: @escaping (Result<Set<FeedSpecifier>, Error>) -> Void) {
downloadAddingToCache(url) { (data, response, error) in
if response?.forcedStatusCode == 404 {
completion(.failure(AccountError.createErrorNotFound))
return
}
if let error = error {
completion(.failure(error))
return
}
guard let data = data, let response = response else {
completion(.failure(AccountError.createErrorNotFound))
return
}
if !response.statusIsOK || data.isEmpty {
completion(.failure(AccountError.createErrorNotFound))
return
}
if FeedFinder.isFeed(data, url.absoluteString) {
let feedSpecifier = FeedSpecifier(title: nil, urlString: url.absoluteString, source: .UserEntered)
completion(.success(Set([feedSpecifier])))
return
}
if !FeedFinder.isHTML(data) {
completion(.failure(AccountError.createErrorNotFound))
return
}
FeedFinder.findFeedsInHTMLPage(htmlData: data, urlString: url.absoluteString, completion: completion)
2017-05-22 22:27:54 +02:00
}
}
}
private extension FeedFinder {
static func addFeedSpecifier(_ feedSpecifier: FeedSpecifier, feedSpecifiers: inout [String: FeedSpecifier]) {
2017-05-22 22:27:54 +02:00
// If theres an existing feed specifier, merge the two so that we have the best data. If one has a title and one doesnt, use that non-nil title. Use the better source.
if let existingFeedSpecifier = feedSpecifiers[feedSpecifier.urlString] {
let mergedFeedSpecifier = existingFeedSpecifier.feedSpecifierByMerging(feedSpecifier)
feedSpecifiers[feedSpecifier.urlString] = mergedFeedSpecifier
}
else {
feedSpecifiers[feedSpecifier.urlString] = feedSpecifier
}
}
static func findFeedsInHTMLPage(htmlData: Data, urlString: String, completion: @escaping (Result<Set<FeedSpecifier>, Error>) -> Void) {
2017-05-22 22:27:54 +02:00
// Feeds in the <head> section we automatically assume are feeds.
// If there are none from the <head> section,
// then possible feeds in <body> section are downloaded individually
// and added once we determine they are feeds.
let possibleFeedSpecifiers = possibleFeedsInHTMLPage(htmlData: htmlData, urlString: urlString)
var feedSpecifiers = [String: FeedSpecifier]()
2017-05-22 22:27:54 +02:00
var feedSpecifiersToDownload = Set<FeedSpecifier>()
var didFindFeedInHTMLHead = false
for oneFeedSpecifier in possibleFeedSpecifiers {
if oneFeedSpecifier.source == .HTMLHead {
addFeedSpecifier(oneFeedSpecifier, feedSpecifiers: &feedSpecifiers)
2017-05-22 22:27:54 +02:00
didFindFeedInHTMLHead = true
}
else {
if feedSpecifiers[oneFeedSpecifier.urlString] == nil {
2017-05-22 22:27:54 +02:00
feedSpecifiersToDownload.insert(oneFeedSpecifier)
}
}
}
if didFindFeedInHTMLHead {
completion(.success(Set(feedSpecifiers.values)))
return
}
else if feedSpecifiersToDownload.isEmpty {
completion(.failure(AccountError.createErrorNotFound))
return
}
else {
downloadFeedSpecifiers(feedSpecifiersToDownload, feedSpecifiers: feedSpecifiers, completion: completion)
2017-05-22 22:27:54 +02:00
}
}
static func possibleFeedsInHTMLPage(htmlData: Data, urlString: String) -> Set<FeedSpecifier> {
let parserData = ParserData(url: urlString, data: htmlData)
var feedSpecifiers = HTMLFeedFinder(parserData: parserData).feedSpecifiers
2017-05-22 22:27:54 +02:00
if feedSpecifiers.isEmpty {
// Odds are decent its a WordPress site, and just adding /feed/ will work.
// Its also fairly common for /index.xml to work.
2017-05-22 22:27:54 +02:00
if let url = URL(string: urlString) {
let feedURL = url.appendingPathComponent("feed", isDirectory: true)
let wordpressFeedSpecifier = FeedSpecifier(title: nil, urlString: feedURL.absoluteString, source: .HTMLLink)
feedSpecifiers.insert(wordpressFeedSpecifier)
let indexXMLURL = url.appendingPathComponent("index.xml", isDirectory: false)
let indexXMLFeedSpecifier = FeedSpecifier(title: nil, urlString: indexXMLURL.absoluteString, source: .HTMLLink)
feedSpecifiers.insert(indexXMLFeedSpecifier)
2017-05-22 22:27:54 +02:00
}
}
return feedSpecifiers
}
static func isHTML(_ data: Data) -> Bool {
2020-01-18 08:00:56 +01:00
return data.isProbablyHTML
2017-05-22 22:27:54 +02:00
}
static func downloadFeedSpecifiers(_ downloadFeedSpecifiers: Set<FeedSpecifier>, feedSpecifiers: [String: FeedSpecifier], completion: @escaping (Result<Set<FeedSpecifier>, Error>) -> Void) {
2017-05-22 22:27:54 +02:00
var resultFeedSpecifiers = feedSpecifiers
let group = DispatchGroup()
for downloadFeedSpecifier in downloadFeedSpecifiers {
guard let url = URL(string: downloadFeedSpecifier.urlString) else {
2017-05-22 22:27:54 +02:00
continue
}
group.enter()
downloadUsingCache(url) { (data, response, error) in
2017-05-22 22:27:54 +02:00
if let data = data, let response = response, response.statusIsOK, error == nil {
if self.isFeed(data, downloadFeedSpecifier.urlString) {
addFeedSpecifier(downloadFeedSpecifier, feedSpecifiers: &resultFeedSpecifiers)
2017-05-22 22:27:54 +02:00
}
}
group.leave()
2017-05-22 22:27:54 +02:00
}
2017-05-22 22:27:54 +02:00
}
group.notify(queue: DispatchQueue.main) {
completion(.success(Set(resultFeedSpecifiers.values)))
2017-05-22 22:27:54 +02:00
}
}
static func isFeed(_ data: Data, _ urlString: String) -> Bool {
let parserData = ParserData(url: urlString, data: data)
return FeedParser.canParse(parserData)
2017-05-22 22:27:54 +02:00
}
}