mirror of
https://github.com/d47081/qBittorrent.git
synced 2025-01-23 13:04:23 +00:00
Do not mark articles read if download has failed
This change includes: always try to download unread articles matching ruleset already present in list (aka redownload failed items)
This commit is contained in:
parent
050aadd3fa
commit
b6c59fd70b
@ -2725,6 +2725,7 @@ void QBtSession::processDownloadedFile(QString url, QString file_path) {
|
|||||||
// Pause torrent if necessary
|
// Pause torrent if necessary
|
||||||
if (h.is_valid() && pref.addTorrentsInPause() && Preferences().useAdditionDialog())
|
if (h.is_valid() && pref.addTorrentsInPause() && Preferences().useAdditionDialog())
|
||||||
h.pause();
|
h.pause();
|
||||||
|
emit newDownloadedTorrentFromRss(url);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -212,6 +212,7 @@ signals:
|
|||||||
void trackerError(const QString &hash, QString time, QString msg);
|
void trackerError(const QString &hash, QString time, QString msg);
|
||||||
void trackerAuthenticationRequired(const QTorrentHandle& h);
|
void trackerAuthenticationRequired(const QTorrentHandle& h);
|
||||||
void newDownloadedTorrent(QString path, QString url);
|
void newDownloadedTorrent(QString path, QString url);
|
||||||
|
void newDownloadedTorrentFromRss(QString url);
|
||||||
void newMagnetLink(const QString& link);
|
void newMagnetLink(const QString& link);
|
||||||
void updateFileSize(const QString &hash);
|
void updateFileSize(const QString &hash);
|
||||||
void downloadFromUrlFailure(QString url, QString reason);
|
void downloadFromUrlFailure(QString url, QString reason);
|
||||||
|
@ -120,3 +120,10 @@ const QString& RssArticle::title() const
|
|||||||
{
|
{
|
||||||
return m_title;
|
return m_title;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void RssArticle::handleTorrentDownloadSuccess(const QString &url) {
|
||||||
|
if (url == m_torrentUrl || url == m_link) {
|
||||||
|
markAsRead();
|
||||||
|
emit articleWasRead();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -42,7 +42,8 @@ class RssArticle;
|
|||||||
typedef QSharedPointer<RssArticle> RssArticlePtr;
|
typedef QSharedPointer<RssArticle> RssArticlePtr;
|
||||||
|
|
||||||
// Item of a rss stream, single information
|
// Item of a rss stream, single information
|
||||||
class RssArticle {
|
class RssArticle : public QObject {
|
||||||
|
Q_OBJECT
|
||||||
|
|
||||||
public:
|
public:
|
||||||
RssArticle(RssFeed* parent, const QString& guid);
|
RssArticle(RssFeed* parent, const QString& guid);
|
||||||
@ -62,6 +63,12 @@ public:
|
|||||||
// Serialization
|
// Serialization
|
||||||
QVariantHash toHash() const;
|
QVariantHash toHash() const;
|
||||||
|
|
||||||
|
signals:
|
||||||
|
void articleWasRead();
|
||||||
|
|
||||||
|
public slots:
|
||||||
|
void handleTorrentDownloadSuccess(const QString& url);
|
||||||
|
|
||||||
friend RssArticlePtr hashToRssArticle(RssFeed* parent, const QVariantHash& hash);
|
friend RssArticlePtr hashToRssArticle(RssFeed* parent, const QVariantHash& hash);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
@ -114,32 +114,46 @@ void RssFeed::loadItemsFromDisk()
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void RssFeed::addArticle(const RssArticlePtr& article)
|
void RssFeed::addArticle(const RssArticlePtr& article) {
|
||||||
{
|
int lbIndex = -1;
|
||||||
Q_ASSERT(!m_articles.contains(article->guid()));
|
int max_articles = RssSettings().getRSSMaxArticlesPerFeed();
|
||||||
// Update unreadCount
|
|
||||||
if (!article->isRead())
|
|
||||||
++m_unreadCount;
|
|
||||||
// Insert in hash table
|
|
||||||
m_articles[article->guid()] = article;
|
|
||||||
// Insertion sort
|
|
||||||
RssArticleList::Iterator lowerBound = qLowerBound(m_articlesByDate.begin(), m_articlesByDate.end(), article, rssArticleDateRecentThan);
|
|
||||||
m_articlesByDate.insert(lowerBound, article);
|
|
||||||
const int lbIndex = m_articlesByDate.indexOf(article);
|
|
||||||
// Restrict size
|
|
||||||
const int max_articles = RssSettings().getRSSMaxArticlesPerFeed();
|
|
||||||
if (m_articlesByDate.size() > max_articles) {
|
|
||||||
RssArticlePtr oldestArticle = m_articlesByDate.takeLast();
|
|
||||||
m_articles.remove(oldestArticle->guid());
|
|
||||||
// Update unreadCount
|
|
||||||
if (!oldestArticle->isRead())
|
|
||||||
--m_unreadCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if article was inserted at the end of the list and will break max_articles limit
|
if (!m_articles.contains(article->guid())) {
|
||||||
if (RssSettings().isRssDownloadingEnabled()) {
|
markAsDirty();
|
||||||
if (lbIndex < max_articles && !article->isRead())
|
|
||||||
downloadArticleTorrentIfMatching(m_manager->downloadRules(), article);
|
// Update unreadCount
|
||||||
|
if (!article->isRead())
|
||||||
|
++m_unreadCount;
|
||||||
|
// Insert in hash table
|
||||||
|
m_articles[article->guid()] = article;
|
||||||
|
// Insertion sort
|
||||||
|
RssArticleList::Iterator lowerBound = qLowerBound(m_articlesByDate.begin(), m_articlesByDate.end(), article, rssArticleDateRecentThan);
|
||||||
|
m_articlesByDate.insert(lowerBound, article);
|
||||||
|
lbIndex = m_articlesByDate.indexOf(article);
|
||||||
|
if (m_articlesByDate.size() > max_articles) {
|
||||||
|
RssArticlePtr oldestArticle = m_articlesByDate.takeLast();
|
||||||
|
m_articles.remove(oldestArticle->guid());
|
||||||
|
// Update unreadCount
|
||||||
|
if (!oldestArticle->isRead())
|
||||||
|
--m_unreadCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if article was inserted at the end of the list and will break max_articles limit
|
||||||
|
if (RssSettings().isRssDownloadingEnabled()) {
|
||||||
|
if (lbIndex < max_articles && !article->isRead())
|
||||||
|
downloadArticleTorrentIfMatching(m_manager->downloadRules(), article);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// m_articles.contains(article->guid())
|
||||||
|
// Try to download skipped articles
|
||||||
|
if (RssSettings().isRssDownloadingEnabled()) {
|
||||||
|
RssArticlePtr skipped = m_articles.value(article->guid(), RssArticlePtr());
|
||||||
|
if (skipped) {
|
||||||
|
if (!skipped->isRead())
|
||||||
|
downloadArticleTorrentIfMatching(m_manager->downloadRules(), skipped);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,15 +352,16 @@ void RssFeed::downloadArticleTorrentIfMatching(RssDownloadRuleList* rules, const
|
|||||||
if (!matching_rule)
|
if (!matching_rule)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
// Torrent was downloaded, consider article as read
|
|
||||||
article->markAsRead();
|
|
||||||
// Download the torrent
|
// Download the torrent
|
||||||
const QString& torrent_url = article->torrentUrl();
|
const QString& torrent_url = article->torrentUrl();
|
||||||
QBtSession::instance()->addConsoleMessage(tr("Automatically downloading %1 torrent from %2 RSS feed...").arg(article->title()).arg(displayName()));
|
QBtSession::instance()->addConsoleMessage(tr("Automatically downloading %1 torrent from %2 RSS feed...").arg(article->title()).arg(displayName()));
|
||||||
if (torrent_url.startsWith("magnet:", Qt::CaseInsensitive))
|
if (torrent_url.startsWith("magnet:", Qt::CaseInsensitive))
|
||||||
QBtSession::instance()->addMagnetSkipAddDlg(torrent_url, matching_rule->savePath(), matching_rule->label());
|
QBtSession::instance()->addMagnetSkipAddDlg(torrent_url, matching_rule->savePath(), matching_rule->label());
|
||||||
else
|
else {
|
||||||
|
connect(QBtSession::instance(), SIGNAL(newDownloadedTorrentFromRss(QString)), article.data(), SLOT(handleTorrentDownloadSuccess(const QString&)), Qt::UniqueConnection);
|
||||||
|
connect(article.data(), SIGNAL(articleWasRead()), SLOT(handleArticleStateChanged()), Qt::UniqueConnection);
|
||||||
QBtSession::instance()->downloadUrlAndSkipDialog(torrent_url, matching_rule->savePath(), matching_rule->label(), feedCookies());
|
QBtSession::instance()->downloadUrlAndSkipDialog(torrent_url, matching_rule->savePath(), matching_rule->label(), feedCookies());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void RssFeed::recheckRssItemsForDownload()
|
void RssFeed::recheckRssItemsForDownload()
|
||||||
@ -364,12 +379,6 @@ void RssFeed::handleNewArticle(const QString& feedUrl, const QVariantHash& artic
|
|||||||
if (feedUrl != m_url)
|
if (feedUrl != m_url)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
const QString guid = articleData["id"].toString();
|
|
||||||
if (m_articles.contains(guid))
|
|
||||||
return;
|
|
||||||
|
|
||||||
markAsDirty();
|
|
||||||
|
|
||||||
RssArticlePtr article = hashToRssArticle(this, articleData);
|
RssArticlePtr article = hashToRssArticle(this, articleData);
|
||||||
Q_ASSERT(article);
|
Q_ASSERT(article);
|
||||||
addArticle(article);
|
addArticle(article);
|
||||||
@ -400,6 +409,10 @@ void RssFeed::handleFeedParsingFinished(const QString& feedUrl, const QString& e
|
|||||||
saveItemsToDisk();
|
saveItemsToDisk();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void RssFeed::handleArticleStateChanged() {
|
||||||
|
m_manager->forwardFeedInfosChanged(m_url, displayName(), m_unreadCount);
|
||||||
|
}
|
||||||
|
|
||||||
void RssFeed::decrementUnreadCount()
|
void RssFeed::decrementUnreadCount()
|
||||||
{
|
{
|
||||||
--m_unreadCount;
|
--m_unreadCount;
|
||||||
|
@ -86,6 +86,7 @@ private slots:
|
|||||||
void handleFeedTitle(const QString& feedUrl, const QString& title);
|
void handleFeedTitle(const QString& feedUrl, const QString& title);
|
||||||
void handleNewArticle(const QString& feedUrl, const QVariantHash& article);
|
void handleNewArticle(const QString& feedUrl, const QVariantHash& article);
|
||||||
void handleFeedParsingFinished(const QString& feedUrl, const QString& error);
|
void handleFeedParsingFinished(const QString& feedUrl, const QString& error);
|
||||||
|
void handleArticleStateChanged();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QString iconUrl() const;
|
QString iconUrl() const;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user