diff --git a/src/core/feedsmodelfeed.cpp b/src/core/feedsmodelfeed.cpp index 979726bc8..a0108a2f1 100755 --- a/src/core/feedsmodelfeed.cpp +++ b/src/core/feedsmodelfeed.cpp @@ -177,12 +177,12 @@ QPair FeedsModelFeed::guessFeed(co } QByteArray feed_contents; - NetworkResult network_result = NetworkFactory::downloadFile(url, - qApp->settings()->value(APP_CFG_FEEDS, "feed_update_timeout", DOWNLOAD_TIMEOUT).toInt(), - feed_contents, - !username.isEmpty(), - username, - password); + NetworkResult network_result = NetworkFactory::downloadFeedFile(url, + qApp->settings()->value(APP_CFG_FEEDS, "feed_update_timeout", DOWNLOAD_TIMEOUT).toInt(), + feed_contents, + !username.isEmpty(), + username, + password); result.second = network_result.first; if (result.second == QNetworkReply::NoError) { @@ -395,7 +395,7 @@ QVariant FeedsModelFeed::data(int column, int role) const { void FeedsModelFeed::update() { QByteArray feed_contents; int download_timeout = qApp->settings()->value(APP_CFG_FEEDS, "feed_update_timeout", DOWNLOAD_TIMEOUT).toInt(); - m_networkError = NetworkFactory::downloadFile(url(), download_timeout, feed_contents, passwordProtected(), username(), password()).first; + m_networkError = NetworkFactory::downloadFeedFile(url(), download_timeout, feed_contents, passwordProtected(), username(), password()).first; if (m_networkError != QNetworkReply::NoError) { qWarning("Error during fetching of new messages for feed '%s' (id %d).", qPrintable(url()), id()); diff --git a/src/definitions/definitions.h.in b/src/definitions/definitions.h.in index 3d0029f4c..7f4dba420 100755 --- a/src/definitions/definitions.h.in +++ b/src/definitions/definitions.h.in @@ -39,41 +39,42 @@ #define APP_DONATE_URL "https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=XMWPLPK893VH4" -#define URI_SCHEME_FEED "feed://" -#define URI_SCHEME_HTTP "http://" -#define RELEASES_LIST "https://bitbucket.org/skunkos/rssguard/raw/master/resources/text/UPDATES?at=master" -#define DEFAULT_LOCALE "en_GB" -#define DEFAULT_FEED_ENCODING "UTF-8" -#define DEFAULT_FEED_TYPE "RSS" -#define URL_REGEXP "^(http|https|feed|ftp):\\/\\/[\\w\\-_]+(\\.[\\w\\-_]+)+([\\w\\-\\.,@?^=%&:/~\\+#]*[\\w\\-\\@?^=%&/~\\+#])?$" -#define USER_AGENT_HTTP_HEADER "User-Agent" -#define TEXT_TITLE_LIMIT 30 -#define MAX_ZOOM_FACTOR 10.0 -#define ICON_SIZE_SETTINGS 16 -#define NO_PARENT_CATEGORY -1 -#define ID_RECYCLE_BIN -2 -#define TRAY_ICON_BUBBLE_TIMEOUT 20000 -#define KEY_MESSAGES_VIEW "messages_view_column_" -#define CLOSE_LOCK_TIMEOUT 3000 -#define DOWNLOAD_TIMEOUT 5000 -#define MESSAGES_VIEW_DEFAULT_COL 170 -#define FEEDS_VIEW_COLUMN_COUNT 2 -#define ELLIPSIS_LENGTH 3 -#define MIN_CATEGORY_NAME_LENGTH 3 -#define INTERNAL_URL_NEWSPAPER "@APP_LOW_NAME@:newspaper" -#define DEFAULT_AUTO_UPDATE_INTERVAL 15 -#define AUTO_UPDATE_INTERVAL 60000 -#define STARTUP_UPDATE_DELAY 1500 -#define TIMEZONE_OFFSET_LIMIT 6 -#define CHANGE_EVENT_DELAY 250 -#define FLAG_ICON_SUBFOLDER "flags" -#define SEACRH_MESSAGES_ACTION_NAME "search" -#define HIGHLIGHTER_ACTION_NAME "highlighter" -#define SPACER_ACTION_NAME "spacer" -#define SEPARATOR_ACTION_NAME "separator" -#define FILTER_WIDTH 150 -#define FILTER_RIGHT_MARGIN 5 -#define FEEDS_VIEW_INDENTATION 10 +#define URI_SCHEME_FEED "feed://" +#define URI_SCHEME_HTTP "http://" +#define RELEASES_LIST "https://bitbucket.org/skunkos/rssguard/raw/master/resources/text/UPDATES?at=master" +#define DEFAULT_LOCALE "en_GB" +#define DEFAULT_FEED_ENCODING "UTF-8" +#define DEFAULT_FEED_TYPE "RSS" +#define URL_REGEXP "^(http|https|feed|ftp):\\/\\/[\\w\\-_]+(\\.[\\w\\-_]+)+([\\w\\-\\.,@?^=%&:/~\\+#]*[\\w\\-\\@?^=%&/~\\+#])?$" +#define USER_AGENT_HTTP_HEADER "User-Agent" +#define TEXT_TITLE_LIMIT 30 +#define MAX_ZOOM_FACTOR 10.0 +#define ICON_SIZE_SETTINGS 16 +#define NO_PARENT_CATEGORY -1 +#define ID_RECYCLE_BIN -2 +#define TRAY_ICON_BUBBLE_TIMEOUT 20000 +#define KEY_MESSAGES_VIEW "messages_view_column_" +#define CLOSE_LOCK_TIMEOUT 3000 +#define DOWNLOAD_TIMEOUT 5000 +#define MESSAGES_VIEW_DEFAULT_COL 170 +#define FEEDS_VIEW_COLUMN_COUNT 2 +#define ELLIPSIS_LENGTH 3 +#define MIN_CATEGORY_NAME_LENGTH 3 +#define INTERNAL_URL_NEWSPAPER "@APP_LOW_NAME@:newspaper" +#define DEFAULT_AUTO_UPDATE_INTERVAL 15 +#define AUTO_UPDATE_INTERVAL 60000 +#define STARTUP_UPDATE_DELAY 1500 +#define TIMEZONE_OFFSET_LIMIT 6 +#define CHANGE_EVENT_DELAY 250 +#define FLAG_ICON_SUBFOLDER "flags" +#define SEACRH_MESSAGES_ACTION_NAME "search" +#define HIGHLIGHTER_ACTION_NAME "highlighter" +#define SPACER_ACTION_NAME "spacer" +#define SEPARATOR_ACTION_NAME "separator" +#define FILTER_WIDTH 150 +#define FILTER_RIGHT_MARGIN 5 +#define FEEDS_VIEW_INDENTATION 10 +#define ACCEPT_HEADER_FOR_FEED_DOWNLOADER "application/atom+xml,application/xml;q=0.9,text/xml;q=0.8,*/*;q=0.7" #define APP_DB_MYSQL_DRIVER "QMYSQL" #define APP_DB_MYSQL_INIT "db_init_mysql.sql" diff --git a/src/network-web/downloader.cpp b/src/network-web/downloader.cpp index 7da14441b..be6ebee19 100755 --- a/src/network-web/downloader.cpp +++ b/src/network-web/downloader.cpp @@ -24,7 +24,7 @@ Downloader::Downloader(QObject *parent) : QObject(parent), m_activeReply(NULL), m_downloadManager(new SilentNetworkAccessManager(this)), - m_timer(new QTimer(this)), m_lastOutputData(QByteArray()), + m_timer(new QTimer(this)), m_customHeaders(QHash()), m_lastOutputData(QByteArray()), m_lastOutputError(QNetworkReply::NoError), m_lastContentType(QVariant()) { m_timer->setInterval(DOWNLOAD_TIMEOUT); @@ -49,6 +49,10 @@ void Downloader::downloadFile(const QString &url, int timeout, bool protected_co originatingObject.setProperty("password", password); request.setOriginatingObject(&originatingObject); + foreach (const QByteArray &header_name, m_customHeaders.keys()) { + request.setRawHeader(header_name, m_customHeaders.value(header_name)); + } + // Set url for this request and fire it up. m_timer->setInterval(timeout); @@ -126,6 +130,10 @@ QVariant Downloader::lastContentType() const { return m_lastContentType; } +void Downloader::appendRawHeader(const QByteArray &name, const QByteArray &value) { + m_customHeaders.insert(name, value); +} + QNetworkReply::NetworkError Downloader::lastOutputError() const { return m_lastOutputError; } diff --git a/src/network-web/downloader.h b/src/network-web/downloader.h index c67d49398..9982b09c2 100755 --- a/src/network-web/downloader.h +++ b/src/network-web/downloader.h @@ -43,6 +43,8 @@ class Downloader : public QObject { QVariant lastContentType() const; public slots: + void appendRawHeader(const QByteArray &name, const QByteArray &value); + // Performs asynchronous download of given file. Redirections are handled. void downloadFile(const QString &url, int timeout = DOWNLOAD_TIMEOUT, bool protected_contents = false, const QString &username = QString(), const QString &password = QString()); @@ -69,6 +71,7 @@ class Downloader : public QObject { QNetworkReply *m_activeReply; SilentNetworkAccessManager *m_downloadManager; QTimer *m_timer; + QHash m_customHeaders; QByteArray m_lastOutputData; QNetworkReply::NetworkError m_lastOutputError; diff --git a/src/network-web/networkfactory.cpp b/src/network-web/networkfactory.cpp index da3f7484f..93451e8f3 100755 --- a/src/network-web/networkfactory.cpp +++ b/src/network-web/networkfactory.cpp @@ -96,9 +96,7 @@ QString NetworkFactory::networkErrorText(QNetworkReply::NetworkError error_code) } } -QNetworkReply::NetworkError NetworkFactory::downloadIcon(const QString &url, - int timeout, - QIcon &output) { +QNetworkReply::NetworkError NetworkFactory::downloadIcon(const QString &url, int timeout, QIcon &output) { #if QT_VERSION >= 0x050000 QString google_s2_with_url = QString("http://www.google.com/s2/favicons?domain=%1").arg(url.toHtmlEscaped()); #else @@ -116,6 +114,29 @@ QNetworkReply::NetworkError NetworkFactory::downloadIcon(const QString &url, return network_result; } +NetworkResult NetworkFactory::downloadFeedFile(const QString &url, int timeout, + QByteArray &output, bool protected_contents, + const QString &username, const QString &password) { + // Here, we want to achieve "synchronous" approach because we want synchronout download API for + // some use-cases too. + Downloader downloader; + QEventLoop loop; + NetworkResult result; + + downloader.appendRawHeader("Accept", ACCEPT_HEADER_FOR_FEED_DOWNLOADER); + + // We need to quit event loop when the download finishes. + QObject::connect(&downloader, SIGNAL(completed(QNetworkReply::NetworkError)), &loop, SLOT(quit())); + + downloader.downloadFile(url, timeout, protected_contents, username, password); + loop.exec(); + output = downloader.lastOutputData(); + result.first = downloader.lastOutputError(); + result.second = downloader.lastContentType(); + + return result; +} + NetworkResult NetworkFactory::downloadFile(const QString &url, int timeout, QByteArray &output, bool protected_contents, const QString &username, const QString &password) { diff --git a/src/network-web/networkfactory.h b/src/network-web/networkfactory.h index 8bc0dcf2d..af5db3049 100644 --- a/src/network-web/networkfactory.h +++ b/src/network-web/networkfactory.h @@ -41,6 +41,10 @@ class NetworkFactory { // given URL belongs to. static QNetworkReply::NetworkError downloadIcon(const QString &url, int timeout, QIcon &output); + static NetworkResult downloadFeedFile(const QString &url, int timeout, QByteArray &output, + bool protected_contents = false, const QString &username = QString(), + const QString &password = QString()); + // Performs SYNCHRONOUS download of file with given URL // and given timeout. static NetworkResult downloadFile(const QString &url, int timeout, QByteArray &output,