2017-05-22 13:27:54 -07:00
|
|
|
|
//
|
|
|
|
|
// FeedFinder.swift
|
2019-07-08 22:58:19 -07:00
|
|
|
|
// NetNewsWire
|
2017-05-22 13:27:54 -07:00
|
|
|
|
//
|
|
|
|
|
// Created by Brent Simmons on 8/2/16.
|
2017-05-29 13:17:58 -07:00
|
|
|
|
// Copyright © 2016 Ranchero Software, LLC. All rights reserved.
|
2017-05-22 13:27:54 -07:00
|
|
|
|
//
|
|
|
|
|
|
|
|
|
|
import Foundation
|
2017-07-01 17:22:19 -07:00
|
|
|
|
import RSParser
|
2017-05-22 13:27:54 -07:00
|
|
|
|
import RSWeb
|
|
|
|
|
import RSCore
|
|
|
|
|
|
2018-08-25 17:03:10 -07:00
|
|
|
|
class FeedFinder {
|
2019-05-30 17:35:08 -05:00
|
|
|
|
|
|
|
|
|
static func find(url: URL, completion: @escaping (Result<Set<FeedSpecifier>, Error>) -> Void) {
|
|
|
|
|
downloadUsingCache(url) { (data, response, error) in
|
|
|
|
|
if response?.forcedStatusCode == 404 {
|
|
|
|
|
completion(.failure(AccountError.createErrorNotFound))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let error = error {
|
|
|
|
|
completion(.failure(error))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
guard let data = data, let response = response else {
|
|
|
|
|
completion(.failure(AccountError.createErrorNotFound))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !response.statusIsOK || data.isEmpty {
|
|
|
|
|
completion(.failure(AccountError.createErrorNotFound))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if FeedFinder.isFeed(data, url.absoluteString) {
|
|
|
|
|
let feedSpecifier = FeedSpecifier(title: nil, urlString: url.absoluteString, source: .UserEntered)
|
|
|
|
|
completion(.success(Set([feedSpecifier])))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if !FeedFinder.isHTML(data) {
|
|
|
|
|
completion(.failure(AccountError.createErrorNotFound))
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
FeedFinder.findFeedsInHTMLPage(htmlData: data, urlString: url.absoluteString, completion: completion)
|
2017-05-22 13:27:54 -07:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
private extension FeedFinder {
|
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
static func addFeedSpecifier(_ feedSpecifier: FeedSpecifier, feedSpecifiers: inout [String: FeedSpecifier]) {
|
2017-05-22 13:27:54 -07:00
|
|
|
|
// If there’s an existing feed specifier, merge the two so that we have the best data. If one has a title and one doesn’t, use that non-nil title. Use the better source.
|
|
|
|
|
|
|
|
|
|
if let existingFeedSpecifier = feedSpecifiers[feedSpecifier.urlString] {
|
|
|
|
|
let mergedFeedSpecifier = existingFeedSpecifier.feedSpecifierByMerging(feedSpecifier)
|
|
|
|
|
feedSpecifiers[feedSpecifier.urlString] = mergedFeedSpecifier
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
feedSpecifiers[feedSpecifier.urlString] = feedSpecifier
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
static func findFeedsInHTMLPage(htmlData: Data, urlString: String, completion: @escaping (Result<Set<FeedSpecifier>, Error>) -> Void) {
|
2017-05-22 13:27:54 -07:00
|
|
|
|
// Feeds in the <head> section we automatically assume are feeds.
|
|
|
|
|
// If there are none from the <head> section,
|
|
|
|
|
// then possible feeds in <body> section are downloaded individually
|
|
|
|
|
// and added once we determine they are feeds.
|
|
|
|
|
|
|
|
|
|
let possibleFeedSpecifiers = possibleFeedsInHTMLPage(htmlData: htmlData, urlString: urlString)
|
2019-05-30 17:35:08 -05:00
|
|
|
|
var feedSpecifiers = [String: FeedSpecifier]()
|
2017-05-22 13:27:54 -07:00
|
|
|
|
var feedSpecifiersToDownload = Set<FeedSpecifier>()
|
|
|
|
|
|
|
|
|
|
var didFindFeedInHTMLHead = false
|
|
|
|
|
|
|
|
|
|
for oneFeedSpecifier in possibleFeedSpecifiers {
|
|
|
|
|
if oneFeedSpecifier.source == .HTMLHead {
|
2019-05-30 17:35:08 -05:00
|
|
|
|
addFeedSpecifier(oneFeedSpecifier, feedSpecifiers: &feedSpecifiers)
|
2017-05-22 13:27:54 -07:00
|
|
|
|
didFindFeedInHTMLHead = true
|
|
|
|
|
}
|
|
|
|
|
else {
|
2019-05-30 17:35:08 -05:00
|
|
|
|
if feedSpecifiers[oneFeedSpecifier.urlString] == nil {
|
2017-05-22 13:27:54 -07:00
|
|
|
|
feedSpecifiersToDownload.insert(oneFeedSpecifier)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
if didFindFeedInHTMLHead {
|
|
|
|
|
completion(.success(Set(feedSpecifiers.values)))
|
|
|
|
|
return
|
2019-07-08 22:58:19 -07:00
|
|
|
|
}
|
|
|
|
|
else if feedSpecifiersToDownload.isEmpty {
|
2019-05-30 17:35:08 -05:00
|
|
|
|
completion(.failure(AccountError.createErrorNotFound))
|
|
|
|
|
return
|
2019-07-08 22:58:19 -07:00
|
|
|
|
}
|
|
|
|
|
else {
|
2019-05-30 17:35:08 -05:00
|
|
|
|
downloadFeedSpecifiers(feedSpecifiersToDownload, feedSpecifiers: feedSpecifiers, completion: completion)
|
2017-05-22 13:27:54 -07:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
static func possibleFeedsInHTMLPage(htmlData: Data, urlString: String) -> Set<FeedSpecifier> {
|
2017-07-01 17:22:19 -07:00
|
|
|
|
let parserData = ParserData(url: urlString, data: htmlData)
|
|
|
|
|
var feedSpecifiers = HTMLFeedFinder(parserData: parserData).feedSpecifiers
|
2017-05-22 13:27:54 -07:00
|
|
|
|
|
|
|
|
|
if feedSpecifiers.isEmpty {
|
|
|
|
|
// Odds are decent it’s a WordPress site, and just adding /feed/ will work.
|
2018-01-05 22:54:28 -08:00
|
|
|
|
// It’s also fairly common for /index.xml to work.
|
2017-05-22 13:27:54 -07:00
|
|
|
|
if let url = URL(string: urlString) {
|
|
|
|
|
let feedURL = url.appendingPathComponent("feed", isDirectory: true)
|
|
|
|
|
let wordpressFeedSpecifier = FeedSpecifier(title: nil, urlString: feedURL.absoluteString, source: .HTMLLink)
|
|
|
|
|
feedSpecifiers.insert(wordpressFeedSpecifier)
|
2018-01-05 22:54:28 -08:00
|
|
|
|
|
|
|
|
|
let indexXMLURL = url.appendingPathComponent("index.xml", isDirectory: false)
|
|
|
|
|
let indexXMLFeedSpecifier = FeedSpecifier(title: nil, urlString: indexXMLURL.absoluteString, source: .HTMLLink)
|
|
|
|
|
feedSpecifiers.insert(indexXMLFeedSpecifier)
|
2017-05-22 13:27:54 -07:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return feedSpecifiers
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
static func isHTML(_ data: Data) -> Bool {
|
2017-05-22 13:27:54 -07:00
|
|
|
|
return (data as NSData).rs_dataIsProbablyHTML()
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
static func downloadFeedSpecifiers(_ downloadFeedSpecifiers: Set<FeedSpecifier>, feedSpecifiers: [String: FeedSpecifier], completion: @escaping (Result<Set<FeedSpecifier>, Error>) -> Void) {
|
2017-05-22 13:27:54 -07:00
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
var resultFeedSpecifiers = feedSpecifiers
|
|
|
|
|
let group = DispatchGroup()
|
|
|
|
|
|
|
|
|
|
for downloadFeedSpecifier in downloadFeedSpecifiers {
|
|
|
|
|
guard let url = URL(string: downloadFeedSpecifier.urlString) else {
|
2017-05-22 13:27:54 -07:00
|
|
|
|
continue
|
|
|
|
|
}
|
2019-05-30 17:35:08 -05:00
|
|
|
|
|
|
|
|
|
group.enter()
|
2017-11-23 10:29:00 -08:00
|
|
|
|
downloadUsingCache(url) { (data, response, error) in
|
2017-05-22 13:27:54 -07:00
|
|
|
|
if let data = data, let response = response, response.statusIsOK, error == nil {
|
2019-05-30 17:35:08 -05:00
|
|
|
|
if self.isFeed(data, downloadFeedSpecifier.urlString) {
|
|
|
|
|
addFeedSpecifier(downloadFeedSpecifier, feedSpecifiers: &resultFeedSpecifiers)
|
2017-05-22 13:27:54 -07:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-05-30 17:35:08 -05:00
|
|
|
|
group.leave()
|
2017-05-22 13:27:54 -07:00
|
|
|
|
}
|
2019-05-30 17:35:08 -05:00
|
|
|
|
|
2017-05-22 13:27:54 -07:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
group.notify(queue: DispatchQueue.main) {
|
|
|
|
|
completion(.success(Set(resultFeedSpecifiers.values)))
|
2017-05-22 13:27:54 -07:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-30 17:35:08 -05:00
|
|
|
|
static func isFeed(_ data: Data, _ urlString: String) -> Bool {
|
2017-07-01 17:22:19 -07:00
|
|
|
|
let parserData = ParserData(url: urlString, data: data)
|
|
|
|
|
return FeedParser.canParse(parserData)
|
2017-05-22 13:27:54 -07:00
|
|
|
|
}
|
|
|
|
|
}
|