mirror of
https://github.com/qbittorrent/qBittorrent.git
synced 2025-12-20 07:27:22 -06:00
Improve coding style
This commit is contained in:
@@ -126,7 +126,8 @@ QVariantHash Article::data() const
|
||||
|
||||
void Article::markAsRead()
|
||||
{
|
||||
if (!m_isRead) {
|
||||
if (!m_isRead)
|
||||
{
|
||||
m_isRead = true;
|
||||
m_data[KeyIsRead] = m_isRead;
|
||||
emit read(this);
|
||||
|
||||
@@ -82,7 +82,8 @@ namespace
|
||||
|
||||
const QJsonObject jsonObj {jsonDoc.object()};
|
||||
QVector<RSS::AutoDownloadRule> rules;
|
||||
for (auto it = jsonObj.begin(); it != jsonObj.end(); ++it) {
|
||||
for (auto it = jsonObj.begin(); it != jsonObj.end(); ++it)
|
||||
{
|
||||
const QJsonValue jsonVal {it.value()};
|
||||
if (!jsonVal.isObject())
|
||||
throw RSS::ParsingError(RSS::AutoDownloader::tr("Invalid data format."));
|
||||
@@ -177,7 +178,8 @@ QList<AutoDownloadRule> AutoDownloader::rules() const
|
||||
|
||||
void AutoDownloader::insertRule(const AutoDownloadRule &rule)
|
||||
{
|
||||
if (!hasRule(rule.name())) {
|
||||
if (!hasRule(rule.name()))
|
||||
{
|
||||
// Insert new rule
|
||||
setRule_impl(rule);
|
||||
m_dirty = true;
|
||||
@@ -185,7 +187,8 @@ void AutoDownloader::insertRule(const AutoDownloadRule &rule)
|
||||
emit ruleAdded(rule.name());
|
||||
resetProcessingQueue();
|
||||
}
|
||||
else if (ruleByName(rule.name()) != rule) {
|
||||
else if (ruleByName(rule.name()) != rule)
|
||||
{
|
||||
// Update existing rule
|
||||
setRule_impl(rule);
|
||||
m_dirty = true;
|
||||
@@ -211,7 +214,8 @@ bool AutoDownloader::renameRule(const QString &ruleName, const QString &newRuleN
|
||||
|
||||
void AutoDownloader::removeRule(const QString &ruleName)
|
||||
{
|
||||
if (m_rules.contains(ruleName)) {
|
||||
if (m_rules.contains(ruleName))
|
||||
{
|
||||
emit ruleAboutToBeRemoved(ruleName);
|
||||
m_rules.remove(ruleName);
|
||||
m_dirty = true;
|
||||
@@ -221,7 +225,8 @@ void AutoDownloader::removeRule(const QString &ruleName)
|
||||
|
||||
QByteArray AutoDownloader::exportRules(AutoDownloader::RulesFileFormat format) const
|
||||
{
|
||||
switch (format) {
|
||||
switch (format)
|
||||
{
|
||||
case RulesFileFormat::Legacy:
|
||||
return exportRulesToLegacyFormat();
|
||||
default:
|
||||
@@ -231,7 +236,8 @@ QByteArray AutoDownloader::exportRules(AutoDownloader::RulesFileFormat format) c
|
||||
|
||||
void AutoDownloader::importRules(const QByteArray &data, const AutoDownloader::RulesFileFormat format)
|
||||
{
|
||||
switch (format) {
|
||||
switch (format)
|
||||
{
|
||||
case RulesFileFormat::Legacy:
|
||||
importRulesFromLegacyFormat(data);
|
||||
break;
|
||||
@@ -286,8 +292,10 @@ QStringList AutoDownloader::smartEpisodeFilters() const
|
||||
{
|
||||
const QVariant filtersSetting = SettingsStorage::instance()->loadValue(SettingsKey_SmartEpisodeFilter);
|
||||
|
||||
if (filtersSetting.isNull()) {
|
||||
QStringList filters = {
|
||||
if (filtersSetting.isNull())
|
||||
{
|
||||
QStringList filters =
|
||||
{
|
||||
"s(\\d+)e(\\d+)", // Format 1: s01e01
|
||||
"(\\d+)x(\\d+)", // Format 2: 01x01
|
||||
"(\\d{4}[.\\-]\\d{1,2}[.\\-]\\d{1,2})", // Format 3: 2017.01.01
|
||||
@@ -375,7 +383,8 @@ void AutoDownloader::addJobForArticle(const Article *article)
|
||||
|
||||
void AutoDownloader::processJob(const QSharedPointer<ProcessingJob> &job)
|
||||
{
|
||||
for (AutoDownloadRule &rule : m_rules) {
|
||||
for (AutoDownloadRule &rule : m_rules)
|
||||
{
|
||||
if (!rule.isEnabled()) continue;
|
||||
if (!rule.feedURLs().contains(job->feedURL)) continue;
|
||||
if (!rule.accepts(job->articleData)) continue;
|
||||
@@ -393,13 +402,16 @@ void AutoDownloader::processJob(const QSharedPointer<ProcessingJob> &job)
|
||||
const auto torrentURL = job->articleData.value(Article::KeyTorrentURL).toString();
|
||||
BitTorrent::Session::instance()->addTorrent(torrentURL, params);
|
||||
|
||||
if (BitTorrent::MagnetUri(torrentURL).isValid()) {
|
||||
if (Feed *feed = Session::instance()->feedByURL(job->feedURL)) {
|
||||
if (BitTorrent::MagnetUri(torrentURL).isValid())
|
||||
{
|
||||
if (Feed *feed = Session::instance()->feedByURL(job->feedURL))
|
||||
{
|
||||
if (Article *article = feed->articleByGUID(job->articleData.value(Article::KeyId).toString()))
|
||||
article->markAsRead();
|
||||
}
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
// waiting for torrent file downloading
|
||||
m_waitingJobs.insert(torrentURL, job);
|
||||
}
|
||||
@@ -423,12 +435,14 @@ void AutoDownloader::load()
|
||||
|
||||
void AutoDownloader::loadRules(const QByteArray &data)
|
||||
{
|
||||
try {
|
||||
try
|
||||
{
|
||||
const auto rules = rulesFromJSON(data);
|
||||
for (const auto &rule : rules)
|
||||
setRule_impl(rule);
|
||||
}
|
||||
catch (const ParsingError &error) {
|
||||
catch (const ParsingError &error)
|
||||
{
|
||||
LogMsg(tr("Couldn't load RSS AutoDownloader rules. Reason: %1")
|
||||
.arg(error.message()), Log::CRITICAL);
|
||||
}
|
||||
@@ -438,7 +452,8 @@ void AutoDownloader::loadRulesLegacy()
|
||||
{
|
||||
const SettingsPtr settings = Profile::instance()->applicationSettings(QStringLiteral("qBittorrent-rss"));
|
||||
const QVariantHash rules = settings->value(QStringLiteral("download_rules")).toHash();
|
||||
for (const QVariant &ruleVar : rules) {
|
||||
for (const QVariant &ruleVar : rules)
|
||||
{
|
||||
const auto rule = AutoDownloadRule::fromLegacyDict(ruleVar.toHash());
|
||||
if (!rule.name().isEmpty())
|
||||
insertRule(rule);
|
||||
@@ -475,7 +490,8 @@ void AutoDownloader::resetProcessingQueue()
|
||||
m_processingQueue.clear();
|
||||
if (!m_processingEnabled) return;
|
||||
|
||||
for (Article *article : asConst(Session::instance()->rootFolder()->articles())) {
|
||||
for (Article *article : asConst(Session::instance()->rootFolder()->articles()))
|
||||
{
|
||||
if (!article->isRead() && !article->torrentUrl().isEmpty())
|
||||
addJobForArticle(article);
|
||||
}
|
||||
@@ -489,13 +505,16 @@ void AutoDownloader::startProcessing()
|
||||
|
||||
void AutoDownloader::setProcessingEnabled(const bool enabled)
|
||||
{
|
||||
if (m_processingEnabled != enabled) {
|
||||
if (m_processingEnabled != enabled)
|
||||
{
|
||||
m_processingEnabled = enabled;
|
||||
SettingsStorage::instance()->storeValue(SettingsKey_ProcessingEnabled, m_processingEnabled);
|
||||
if (m_processingEnabled) {
|
||||
if (m_processingEnabled)
|
||||
{
|
||||
startProcessing();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
m_processingQueue.clear();
|
||||
disconnect(Session::instance()->rootFolder(), &Folder::newArticle, this, &AutoDownloader::handleNewArticle);
|
||||
}
|
||||
|
||||
@@ -64,7 +64,8 @@ namespace
|
||||
|
||||
QJsonValue triStateBoolToJsonValue(const TriStateBool triStateBool)
|
||||
{
|
||||
switch (static_cast<signed char>(triStateBool)) {
|
||||
switch (static_cast<signed char>(triStateBool))
|
||||
{
|
||||
case 0: return false;
|
||||
case 1: return true;
|
||||
default: return {};
|
||||
@@ -73,7 +74,8 @@ namespace
|
||||
|
||||
TriStateBool addPausedLegacyToTriStateBool(const int val)
|
||||
{
|
||||
switch (val) {
|
||||
switch (val)
|
||||
{
|
||||
case 1: return TriStateBool::True; // always
|
||||
case 2: return TriStateBool::False; // never
|
||||
default: return TriStateBool::Undefined; // default
|
||||
@@ -82,7 +84,8 @@ namespace
|
||||
|
||||
int triStateBoolToAddPausedLegacy(const TriStateBool triStateBool)
|
||||
{
|
||||
switch (static_cast<signed char>(triStateBool)) {
|
||||
switch (static_cast<signed char>(triStateBool))
|
||||
{
|
||||
case 0: return 2; // never
|
||||
case 1: return 1; // always
|
||||
default: return 0; // default
|
||||
@@ -164,7 +167,8 @@ QString computeEpisodeName(const QString &article)
|
||||
return {};
|
||||
|
||||
QStringList ret;
|
||||
for (int i = 1; i <= match.lastCapturedIndex(); ++i) {
|
||||
for (int i = 1; i <= match.lastCapturedIndex(); ++i)
|
||||
{
|
||||
const QString cap = match.captured(i);
|
||||
|
||||
if (cap.isEmpty())
|
||||
@@ -199,8 +203,10 @@ QRegularExpression AutoDownloadRule::cachedRegex(const QString &expression, cons
|
||||
Q_ASSERT(!expression.isEmpty());
|
||||
|
||||
QRegularExpression ®ex = m_dataPtr->cachedRegexes[expression];
|
||||
if (regex.pattern().isEmpty()) {
|
||||
regex = QRegularExpression {
|
||||
if (regex.pattern().isEmpty())
|
||||
{
|
||||
regex = QRegularExpression
|
||||
{
|
||||
(isRegex ? expression : Utils::String::wildcardToRegex(expression))
|
||||
, QRegularExpression::CaseInsensitiveOption};
|
||||
}
|
||||
@@ -212,12 +218,14 @@ bool AutoDownloadRule::matchesExpression(const QString &articleTitle, const QStr
|
||||
{
|
||||
const QRegularExpression whitespace {"\\s+"};
|
||||
|
||||
if (expression.isEmpty()) {
|
||||
if (expression.isEmpty())
|
||||
{
|
||||
// A regex of the form "expr|" will always match, so do the same for wildcards
|
||||
return true;
|
||||
}
|
||||
|
||||
if (m_dataPtr->useRegex) {
|
||||
if (m_dataPtr->useRegex)
|
||||
{
|
||||
const QRegularExpression reg(cachedRegex(expression));
|
||||
return reg.match(articleTitle).hasMatch();
|
||||
}
|
||||
@@ -225,7 +233,8 @@ bool AutoDownloadRule::matchesExpression(const QString &articleTitle, const QStr
|
||||
// Only match if every wildcard token (separated by spaces) is present in the article name.
|
||||
// Order of wildcard tokens is unimportant (if order is important, they should have used *).
|
||||
const QStringList wildcards {expression.split(whitespace, QString::SplitBehavior::SkipEmptyParts)};
|
||||
for (const QString &wildcard : wildcards) {
|
||||
for (const QString &wildcard : wildcards)
|
||||
{
|
||||
const QRegularExpression reg {cachedRegex(wildcard, false)};
|
||||
if (!reg.match(articleTitle).hasMatch())
|
||||
return false;
|
||||
@@ -279,7 +288,8 @@ bool AutoDownloadRule::matchesEpisodeFilterExpression(const QString &articleTitl
|
||||
const QStringList episodes {matcher.captured(2).split(';')};
|
||||
const int seasonOurs {season.toInt()};
|
||||
|
||||
for (QString episode : episodes) {
|
||||
for (QString episode : episodes)
|
||||
{
|
||||
if (episode.isEmpty())
|
||||
continue;
|
||||
|
||||
@@ -287,7 +297,8 @@ bool AutoDownloadRule::matchesEpisodeFilterExpression(const QString &articleTitl
|
||||
while ((episode.size() > 1) && episode.startsWith('0'))
|
||||
episode = episode.right(episode.size() - 1);
|
||||
|
||||
if (episode.indexOf('-') != -1) { // Range detected
|
||||
if (episode.indexOf('-') != -1)
|
||||
{ // Range detected
|
||||
const QString partialPattern1 {"\\bs0?(\\d{1,4})[ -_\\.]?e(0?\\d{1,4})(?:\\D|\\b)"};
|
||||
const QString partialPattern2 {"\\b(\\d{1,4})x(0?\\d{1,4})(?:\\D|\\b)"};
|
||||
|
||||
@@ -295,21 +306,25 @@ bool AutoDownloadRule::matchesEpisodeFilterExpression(const QString &articleTitl
|
||||
QRegularExpressionMatch matcher = cachedRegex(partialPattern1).match(articleTitle);
|
||||
bool matched = matcher.hasMatch();
|
||||
|
||||
if (!matched) {
|
||||
if (!matched)
|
||||
{
|
||||
matcher = cachedRegex(partialPattern2).match(articleTitle);
|
||||
matched = matcher.hasMatch();
|
||||
}
|
||||
|
||||
if (matched) {
|
||||
if (matched)
|
||||
{
|
||||
const int seasonTheirs {matcher.captured(1).toInt()};
|
||||
const int episodeTheirs {matcher.captured(2).toInt()};
|
||||
|
||||
if (episode.endsWith('-')) { // Infinite range
|
||||
if (episode.endsWith('-'))
|
||||
{ // Infinite range
|
||||
const int episodeOurs {episode.leftRef(episode.size() - 1).toInt()};
|
||||
if (((seasonTheirs == seasonOurs) && (episodeTheirs >= episodeOurs)) || (seasonTheirs > seasonOurs))
|
||||
return true;
|
||||
}
|
||||
else { // Normal range
|
||||
else
|
||||
{ // Normal range
|
||||
const QStringList range {episode.split('-')};
|
||||
Q_ASSERT(range.size() == 2);
|
||||
if (range.first().toInt() > range.last().toInt())
|
||||
@@ -322,7 +337,8 @@ bool AutoDownloadRule::matchesEpisodeFilterExpression(const QString &articleTitl
|
||||
}
|
||||
}
|
||||
}
|
||||
else { // Single number
|
||||
else
|
||||
{ // Single number
|
||||
const QString expStr {QString::fromLatin1("\\b(?:s0?%1[ -_\\.]?e0?%2|%1x0?%2)(?:\\D|\\b)").arg(season, episode)};
|
||||
if (cachedRegex(expStr).match(articleTitle).hasMatch())
|
||||
return true;
|
||||
@@ -343,7 +359,8 @@ bool AutoDownloadRule::matchesSmartEpisodeFilter(const QString &articleTitle) co
|
||||
|
||||
// See if this episode has been downloaded before
|
||||
const bool previouslyMatched = m_dataPtr->previouslyMatchedEpisodes.contains(episodeStr);
|
||||
if (previouslyMatched) {
|
||||
if (previouslyMatched)
|
||||
{
|
||||
if (!AutoDownloader::instance()->downloadRepacks())
|
||||
return false;
|
||||
|
||||
@@ -365,7 +382,8 @@ bool AutoDownloadRule::matchesSmartEpisodeFilter(const QString &articleTitle) co
|
||||
|
||||
// If this is a REPACK and PROPER download, add the individual entries to the list
|
||||
// so we don't download those
|
||||
if (isRepack && isProper) {
|
||||
if (isRepack && isProper)
|
||||
{
|
||||
m_dataPtr->lastComputedEpisodes.append(episodeStr + QLatin1String("-REPACK"));
|
||||
m_dataPtr->lastComputedEpisodes.append(episodeStr + QLatin1String("-PROPER"));
|
||||
}
|
||||
@@ -378,7 +396,8 @@ bool AutoDownloadRule::matchesSmartEpisodeFilter(const QString &articleTitle) co
|
||||
bool AutoDownloadRule::matches(const QVariantHash &articleData) const
|
||||
{
|
||||
const QDateTime articleDate {articleData[Article::KeyDate].toDateTime()};
|
||||
if (ignoreDays() > 0) {
|
||||
if (ignoreDays() > 0)
|
||||
{
|
||||
if (lastMatch().isValid() && (articleDate < lastMatch().addDays(ignoreDays())))
|
||||
return false;
|
||||
}
|
||||
@@ -404,7 +423,8 @@ bool AutoDownloadRule::accepts(const QVariantHash &articleData)
|
||||
setLastMatch(articleData[Article::KeyDate].toDateTime());
|
||||
|
||||
// If there's a matched episode string, add that to the previously matched list
|
||||
if (!m_dataPtr->lastComputedEpisodes.isEmpty()) {
|
||||
if (!m_dataPtr->lastComputedEpisodes.isEmpty())
|
||||
{
|
||||
m_dataPtr->previouslyMatchedEpisodes.append(m_dataPtr->lastComputedEpisodes);
|
||||
m_dataPtr->lastComputedEpisodes.clear();
|
||||
}
|
||||
@@ -474,10 +494,12 @@ AutoDownloadRule AutoDownloadRule::fromJsonObject(const QJsonObject &jsonObj, co
|
||||
|
||||
const QJsonValue previouslyMatchedVal = jsonObj.value(Str_PreviouslyMatched);
|
||||
QStringList previouslyMatched;
|
||||
if (previouslyMatchedVal.isString()) {
|
||||
if (previouslyMatchedVal.isString())
|
||||
{
|
||||
previouslyMatched << previouslyMatchedVal.toString();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
for (const QJsonValue &val : asConst(previouslyMatchedVal.toArray()))
|
||||
previouslyMatched << val.toString();
|
||||
}
|
||||
|
||||
@@ -69,7 +69,8 @@ Feed::Feed(const QUuid &uid, const QString &url, const QString &path, Session *s
|
||||
m_dataFileName = QString::fromLatin1(m_uid.toRfc4122().toHex()) + QLatin1String(".json");
|
||||
|
||||
// Move to new file naming scheme (since v4.1.2)
|
||||
const QString legacyFilename {Utils::Fs::toValidFileSystemName(m_url, false, QLatin1String("_"))
|
||||
const QString legacyFilename
|
||||
{Utils::Fs::toValidFileSystemName(m_url, false, QLatin1String("_"))
|
||||
+ QLatin1String(".json")};
|
||||
const QDir storageDir {m_session->dataFileStorage()->storageDir()};
|
||||
if (!QFile::exists(storageDir.absoluteFilePath(m_dataFileName)))
|
||||
@@ -106,8 +107,10 @@ QList<Article *> Feed::articles() const
|
||||
void Feed::markAsRead()
|
||||
{
|
||||
const int oldUnreadCount = m_unreadCount;
|
||||
for (Article *article : asConst(m_articles)) {
|
||||
if (!article->isRead()) {
|
||||
for (Article *article : asConst(m_articles))
|
||||
{
|
||||
if (!article->isRead())
|
||||
{
|
||||
article->disconnect(this);
|
||||
article->markAsRead();
|
||||
--m_unreadCount;
|
||||
@@ -115,7 +118,8 @@ void Feed::markAsRead()
|
||||
}
|
||||
}
|
||||
|
||||
if (m_unreadCount != oldUnreadCount) {
|
||||
if (m_unreadCount != oldUnreadCount)
|
||||
{
|
||||
m_dirty = true;
|
||||
store();
|
||||
emit unreadCountChanged(this);
|
||||
@@ -180,7 +184,8 @@ void Feed::handleMaxArticlesPerFeedChanged(const int n)
|
||||
|
||||
void Feed::handleIconDownloadFinished(const Net::DownloadResult &result)
|
||||
{
|
||||
if (result.status == Net::DownloadStatus::Success) {
|
||||
if (result.status == Net::DownloadStatus::Success)
|
||||
{
|
||||
m_iconPath = Utils::Fs::toUniformPath(result.filePath);
|
||||
emit iconLoaded(this);
|
||||
}
|
||||
@@ -195,13 +200,15 @@ void Feed::handleDownloadFinished(const Net::DownloadResult &result)
|
||||
{
|
||||
m_downloadHandler = nullptr; // will be deleted by DownloadManager later
|
||||
|
||||
if (result.status == Net::DownloadStatus::Success) {
|
||||
if (result.status == Net::DownloadStatus::Success)
|
||||
{
|
||||
LogMsg(tr("RSS feed at '%1' is successfully downloaded. Starting to parse it.")
|
||||
.arg(result.url));
|
||||
// Parse the download RSS
|
||||
m_parser->parse(result.data);
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
m_isLoading = false;
|
||||
m_hasError = true;
|
||||
|
||||
@@ -216,13 +223,15 @@ void Feed::handleParsingFinished(const RSS::Private::ParsingResult &result)
|
||||
{
|
||||
m_hasError = !result.error.isEmpty();
|
||||
|
||||
if (!result.title.isEmpty() && (title() != result.title)) {
|
||||
if (!result.title.isEmpty() && (title() != result.title))
|
||||
{
|
||||
m_title = result.title;
|
||||
m_dirty = true;
|
||||
emit titleChanged(this);
|
||||
}
|
||||
|
||||
if (!result.lastBuildDate.isEmpty()) {
|
||||
if (!result.lastBuildDate.isEmpty())
|
||||
{
|
||||
m_lastBuildDate = result.lastBuildDate;
|
||||
m_dirty = true;
|
||||
}
|
||||
@@ -234,7 +243,8 @@ void Feed::handleParsingFinished(const RSS::Private::ParsingResult &result)
|
||||
const int newArticlesCount = updateArticles(result.articles);
|
||||
store();
|
||||
|
||||
if (m_hasError) {
|
||||
if (m_hasError)
|
||||
{
|
||||
LogMsg(tr("Failed to parse RSS feed at '%1'. Reason: %2").arg(m_url, result.error)
|
||||
, Log::WARNING);
|
||||
}
|
||||
@@ -249,16 +259,19 @@ void Feed::load()
|
||||
{
|
||||
QFile file(m_session->dataFileStorage()->storageDir().absoluteFilePath(m_dataFileName));
|
||||
|
||||
if (!file.exists()) {
|
||||
if (!file.exists())
|
||||
{
|
||||
loadArticlesLegacy();
|
||||
m_dirty = true;
|
||||
store(); // convert to new format
|
||||
}
|
||||
else if (file.open(QFile::ReadOnly)) {
|
||||
else if (file.open(QFile::ReadOnly))
|
||||
{
|
||||
loadArticles(file.readAll());
|
||||
file.close();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
LogMsg(tr("Couldn't read RSS Session data from %1. Error: %2")
|
||||
.arg(m_dataFileName, file.errorString())
|
||||
, Log::WARNING);
|
||||
@@ -269,28 +282,33 @@ void Feed::loadArticles(const QByteArray &data)
|
||||
{
|
||||
QJsonParseError jsonError;
|
||||
const QJsonDocument jsonDoc = QJsonDocument::fromJson(data, &jsonError);
|
||||
if (jsonError.error != QJsonParseError::NoError) {
|
||||
if (jsonError.error != QJsonParseError::NoError)
|
||||
{
|
||||
LogMsg(tr("Couldn't parse RSS Session data. Error: %1").arg(jsonError.errorString())
|
||||
, Log::WARNING);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!jsonDoc.isArray()) {
|
||||
if (!jsonDoc.isArray())
|
||||
{
|
||||
LogMsg(tr("Couldn't load RSS Session data. Invalid data format."), Log::WARNING);
|
||||
return;
|
||||
}
|
||||
|
||||
const QJsonArray jsonArr = jsonDoc.array();
|
||||
int i = -1;
|
||||
for (const QJsonValue &jsonVal : jsonArr) {
|
||||
for (const QJsonValue &jsonVal : jsonArr)
|
||||
{
|
||||
++i;
|
||||
if (!jsonVal.isObject()) {
|
||||
if (!jsonVal.isObject())
|
||||
{
|
||||
LogMsg(tr("Couldn't load RSS article '%1#%2'. Invalid data format.").arg(m_url).arg(i)
|
||||
, Log::WARNING);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
try
|
||||
{
|
||||
auto article = new Article(this, jsonVal.toObject());
|
||||
if (!addArticle(article))
|
||||
delete article;
|
||||
@@ -304,13 +322,15 @@ void Feed::loadArticlesLegacy()
|
||||
const SettingsPtr qBTRSSFeeds = Profile::instance()->applicationSettings(QStringLiteral("qBittorrent-rss-feeds"));
|
||||
const QVariantHash allOldItems = qBTRSSFeeds->value("old_items").toHash();
|
||||
|
||||
for (const QVariant &var : asConst(allOldItems.value(m_url).toList())) {
|
||||
for (const QVariant &var : asConst(allOldItems.value(m_url).toList()))
|
||||
{
|
||||
auto hash = var.toHash();
|
||||
// update legacy keys
|
||||
hash[Article::KeyLink] = hash.take(QLatin1String("news_link"));
|
||||
hash[Article::KeyTorrentURL] = hash.take(QLatin1String("torrent_url"));
|
||||
hash[Article::KeyIsRead] = hash.take(QLatin1String("read"));
|
||||
try {
|
||||
try
|
||||
{
|
||||
auto article = new Article(this, hash);
|
||||
if (!addArticle(article))
|
||||
delete article;
|
||||
@@ -353,7 +373,8 @@ bool Feed::addArticle(Article *article)
|
||||
|
||||
m_articles[article->guid()] = article;
|
||||
m_articlesByDate.insert(lowerBound, article);
|
||||
if (!article->isRead()) {
|
||||
if (!article->isRead())
|
||||
{
|
||||
increaseUnreadCount();
|
||||
connect(article, &Article::read, this, &Feed::handleArticleRead);
|
||||
}
|
||||
@@ -414,12 +435,14 @@ int Feed::updateArticles(const QList<QVariantHash> &loadedArticles)
|
||||
QDateTime dummyPubDate {QDateTime::currentDateTime()};
|
||||
QVector<QVariantHash> newArticles;
|
||||
newArticles.reserve(loadedArticles.size());
|
||||
for (QVariantHash article : loadedArticles) {
|
||||
for (QVariantHash article : loadedArticles)
|
||||
{
|
||||
// If article has no publication date we use feed update time as a fallback.
|
||||
// To prevent processing of "out-of-limit" articles we must not assign dates
|
||||
// that are earlier than the dates of existing articles.
|
||||
const Article *existingArticle = articleByGUID(article[Article::KeyId].toString());
|
||||
if (existingArticle) {
|
||||
if (existingArticle)
|
||||
{
|
||||
dummyPubDate = existingArticle->date().addMSecs(-1);
|
||||
continue;
|
||||
}
|
||||
@@ -462,7 +485,8 @@ int Feed::updateArticles(const QList<QVariantHash> &loadedArticles)
|
||||
int newArticlesCount = 0;
|
||||
std::for_each(sortData.crbegin(), sortData.crend(), [this, &newArticlesCount](const ArticleSortAdaptor &a)
|
||||
{
|
||||
if (a.second) {
|
||||
if (a.second)
|
||||
{
|
||||
addArticle(new Article {this, *a.second});
|
||||
++newArticlesCount;
|
||||
}
|
||||
@@ -482,7 +506,8 @@ QJsonValue Feed::toJsonValue(const bool withData) const
|
||||
jsonObj.insert(KEY_UID, uid().toString());
|
||||
jsonObj.insert(KEY_URL, url());
|
||||
|
||||
if (withData) {
|
||||
if (withData)
|
||||
{
|
||||
jsonObj.insert(KEY_TITLE, title());
|
||||
jsonObj.insert(KEY_LASTBUILDDATE, lastBuildDate());
|
||||
jsonObj.insert(KEY_ISLOADING, isLoading());
|
||||
@@ -499,7 +524,8 @@ QJsonValue Feed::toJsonValue(const bool withData) const
|
||||
|
||||
void Feed::handleSessionProcessingEnabledChanged(const bool enabled)
|
||||
{
|
||||
if (enabled) {
|
||||
if (enabled)
|
||||
{
|
||||
downloadIcon();
|
||||
disconnect(m_session, &Session::processingStateChanged
|
||||
, this, &Feed::handleSessionProcessingEnabledChanged);
|
||||
|
||||
@@ -57,7 +57,8 @@ QList<Article *> Folder::articles() const
|
||||
{
|
||||
QList<Article *> news;
|
||||
|
||||
for (Item *item : asConst(items())) {
|
||||
for (Item *item : asConst(items()))
|
||||
{
|
||||
int n = news.size();
|
||||
news << item->articles();
|
||||
std::inplace_merge(news.begin(), news.begin() + n, news.end()
|
||||
|
||||
@@ -47,7 +47,8 @@ Item::~Item() {}
|
||||
|
||||
void Item::setPath(const QString &path)
|
||||
{
|
||||
if (path != m_path) {
|
||||
if (path != m_path)
|
||||
{
|
||||
m_path = path;
|
||||
emit pathChanged(this);
|
||||
}
|
||||
@@ -69,7 +70,8 @@ bool Item::isValidPath(const QString &path)
|
||||
QString(R"(\A[^\%1]+(\%1[^\%1]+)*\z)").arg(Item::PathSeparator)
|
||||
, QRegularExpression::DontCaptureOption);
|
||||
|
||||
if (path.isEmpty() || !re.match(path).hasMatch()) {
|
||||
if (path.isEmpty() || !re.match(path).hasMatch())
|
||||
{
|
||||
qDebug() << "Incorrect RSS Item path:" << path;
|
||||
return false;
|
||||
}
|
||||
@@ -93,7 +95,8 @@ QStringList Item::expandPath(const QString &path)
|
||||
// return result;
|
||||
|
||||
int index = 0;
|
||||
while ((index = path.indexOf(Item::PathSeparator, index)) >= 0) {
|
||||
while ((index = path.indexOf(Item::PathSeparator, index)) >= 0)
|
||||
{
|
||||
result << path.left(index);
|
||||
++index;
|
||||
}
|
||||
|
||||
@@ -53,7 +53,8 @@ namespace
|
||||
// http://www.w3.org/TR/xhtml1/DTD/xhtml-lat1.ent
|
||||
// http://www.w3.org/TR/xhtml1/DTD/xhtml-symbol.ent
|
||||
// http://www.w3.org/TR/xhtml1/DTD/xhtml-special.ent
|
||||
static const QHash<QString, QString> HTMLEntities {
|
||||
static const QHash<QString, QString> HTMLEntities
|
||||
{
|
||||
{"nbsp", " "}, // no-break space = non-breaking space, U+00A0 ISOnum
|
||||
{"iexcl", "¡"}, // inverted exclamation mark, U+00A1 ISOnum
|
||||
{"cent", "¢"}, // cent sign, U+00A2 ISOnum
|
||||
@@ -359,17 +360,20 @@ namespace
|
||||
// Ported to Qt from KDElibs4
|
||||
QDateTime parseDate(const QString &string)
|
||||
{
|
||||
const char shortDay[][4] = {
|
||||
const char shortDay[][4] =
|
||||
{
|
||||
"Mon", "Tue", "Wed",
|
||||
"Thu", "Fri", "Sat",
|
||||
"Sun"
|
||||
};
|
||||
const char longDay[][10] = {
|
||||
const char longDay[][10] =
|
||||
{
|
||||
"Monday", "Tuesday", "Wednesday",
|
||||
"Thursday", "Friday", "Saturday",
|
||||
"Sunday"
|
||||
};
|
||||
const char shortMonth[][4] = {
|
||||
const char shortMonth[][4] =
|
||||
{
|
||||
"Jan", "Feb", "Mar", "Apr",
|
||||
"May", "Jun", "Jul", "Aug",
|
||||
"Sep", "Oct", "Nov", "Dec"
|
||||
@@ -389,7 +393,8 @@ namespace
|
||||
// Also accept obsolete form "Weekday, DD-Mon-YY HH:MM:SS ±hhmm"
|
||||
QRegExp rx("^(?:([A-Z][a-z]+),\\s*)?(\\d{1,2})(\\s+|-)([^-\\s]+)(\\s+|-)(\\d{2,4})\\s+(\\d\\d):(\\d\\d)(?::(\\d\\d))?\\s+(\\S+)$");
|
||||
QStringList parts;
|
||||
if (!str.indexOf(rx)) {
|
||||
if (!str.indexOf(rx))
|
||||
{
|
||||
// Check that if date has '-' separators, both separators are '-'.
|
||||
parts = rx.capturedTexts();
|
||||
const bool h1 = (parts[3] == QLatin1String("-"));
|
||||
@@ -397,7 +402,8 @@ namespace
|
||||
if (h1 != h2)
|
||||
return QDateTime::currentDateTime();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
// Check for the obsolete form "Wdy Mon DD HH:MM:SS YYYY"
|
||||
rx = QRegExp("^([A-Z][a-z]+)\\s+(\\S+)\\s+(\\d\\d)\\s+(\\d\\d):(\\d\\d):(\\d\\d)\\s+(\\d\\d\\d\\d)$");
|
||||
if (str.indexOf(rx))
|
||||
@@ -421,7 +427,8 @@ namespace
|
||||
return QDateTime::currentDateTime();
|
||||
|
||||
int second = 0;
|
||||
if (!parts[nsec].isEmpty()) {
|
||||
if (!parts[nsec].isEmpty())
|
||||
{
|
||||
second = parts[nsec].toInt(&ok[0]);
|
||||
if (!ok[0])
|
||||
return QDateTime::currentDateTime();
|
||||
@@ -433,7 +440,8 @@ namespace
|
||||
int month = 0;
|
||||
for ( ; (month < 12) && (parts[nmonth] != shortMonth[month]); ++month);
|
||||
int dayOfWeek = -1;
|
||||
if (!parts[nwday].isEmpty()) {
|
||||
if (!parts[nwday].isEmpty())
|
||||
{
|
||||
// Look up the weekday name
|
||||
while ((++dayOfWeek < 7) && (shortDay[dayOfWeek] != parts[nwday]));
|
||||
if (dayOfWeek >= 7)
|
||||
@@ -444,7 +452,8 @@ namespace
|
||||
// || (dayOfWeek < 0 && format == RFCDateDay))
|
||||
// return QDateTime;
|
||||
const int i = parts[nyear].size();
|
||||
if (i < 4) {
|
||||
if (i < 4)
|
||||
{
|
||||
// It's an obsolete year specification with less than 4 digits
|
||||
year += ((i == 2) && (year < 50)) ? 2000 : 1900;
|
||||
}
|
||||
@@ -452,9 +461,11 @@ namespace
|
||||
// Parse the UTC offset part
|
||||
int offset = 0; // set default to '-0000'
|
||||
bool negOffset = false;
|
||||
if (parts.count() > 10) {
|
||||
if (parts.count() > 10)
|
||||
{
|
||||
rx = QRegExp("^([+-])(\\d\\d)(\\d\\d)$");
|
||||
if (!parts[10].indexOf(rx)) {
|
||||
if (!parts[10].indexOf(rx))
|
||||
{
|
||||
// It's a UTC offset ±hhmm
|
||||
parts = rx.capturedTexts();
|
||||
offset = parts[2].toInt(&ok[0]) * 3600;
|
||||
@@ -466,13 +477,16 @@ namespace
|
||||
if (negOffset)
|
||||
offset = -offset;
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
// Check for an obsolete time zone name
|
||||
const QByteArray zone = parts[10].toLatin1();
|
||||
if ((zone.length() == 1) && (isalpha(zone[0])) && (toupper(zone[0]) != 'J')) {
|
||||
if ((zone.length() == 1) && (isalpha(zone[0])) && (toupper(zone[0]) != 'J'))
|
||||
{
|
||||
negOffset = true; // military zone: RFC 2822 treats as '-0000'
|
||||
}
|
||||
else if ((zone != "UT") && (zone != "GMT")) { // treated as '+0000'
|
||||
else if ((zone != "UT") && (zone != "GMT"))
|
||||
{ // treated as '+0000'
|
||||
offset = (zone == "EDT")
|
||||
? -4 * 3600
|
||||
: ((zone == "EST") || (zone == "CDT"))
|
||||
@@ -484,7 +498,8 @@ namespace
|
||||
: (zone == "PST")
|
||||
? -8 * 3600
|
||||
: 0;
|
||||
if (!offset) {
|
||||
if (!offset)
|
||||
{
|
||||
// Check for any other alphabetic time zone
|
||||
bool nonalpha = false;
|
||||
for (int i = 0, end = zone.size(); (i < end) && !nonalpha; ++i)
|
||||
@@ -509,7 +524,8 @@ namespace
|
||||
if (!result.isValid())
|
||||
return QDateTime::currentDateTime(); // invalid date/time
|
||||
|
||||
if (leapSecond) {
|
||||
if (leapSecond)
|
||||
{
|
||||
// Validate a leap second time. Leap seconds are inserted after 23:59:59 UTC.
|
||||
// Convert the time to UTC and check that it is 00:00:00.
|
||||
if ((hour*3600 + minute*60 + 60 - offset + 86400*5) % 86400) // (max abs(offset) is 100 hours)
|
||||
@@ -548,11 +564,15 @@ void Parser::parse_impl(const QByteArray &feedData)
|
||||
xml.setEntityResolver(&resolver);
|
||||
bool foundChannel = false;
|
||||
|
||||
while (xml.readNextStartElement()) {
|
||||
if (xml.name() == "rss") {
|
||||
while (xml.readNextStartElement())
|
||||
{
|
||||
if (xml.name() == "rss")
|
||||
{
|
||||
// Find channels
|
||||
while (xml.readNextStartElement()) {
|
||||
if (xml.name() == "channel") {
|
||||
while (xml.readNextStartElement())
|
||||
{
|
||||
if (xml.name() == "channel")
|
||||
{
|
||||
parseRSSChannel(xml);
|
||||
foundChannel = true;
|
||||
break;
|
||||
@@ -563,7 +583,8 @@ void Parser::parse_impl(const QByteArray &feedData)
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (xml.name() == "feed") { // Atom feed
|
||||
if (xml.name() == "feed")
|
||||
{ // Atom feed
|
||||
parseAtomChannel(xml);
|
||||
foundChannel = true;
|
||||
break;
|
||||
@@ -573,10 +594,12 @@ void Parser::parse_impl(const QByteArray &feedData)
|
||||
xml.skipCurrentElement();
|
||||
}
|
||||
|
||||
if (!foundChannel) {
|
||||
if (!foundChannel)
|
||||
{
|
||||
m_result.error = tr("Invalid RSS feed.");
|
||||
}
|
||||
else if (xml.hasError()) {
|
||||
else if (xml.hasError())
|
||||
{
|
||||
m_result.error = tr("%1 (line: %2, column: %3, offset: %4).")
|
||||
.arg(xml.errorString()).arg(xml.lineNumber())
|
||||
.arg(xml.columnNumber()).arg(xml.characterOffset());
|
||||
@@ -592,43 +615,53 @@ void Parser::parseRssArticle(QXmlStreamReader &xml)
|
||||
QVariantHash article;
|
||||
QString altTorrentUrl;
|
||||
|
||||
while (!xml.atEnd()) {
|
||||
while (!xml.atEnd())
|
||||
{
|
||||
xml.readNext();
|
||||
const QString name(xml.name().toString());
|
||||
|
||||
if (xml.isEndElement() && (name == QLatin1String("item")))
|
||||
break;
|
||||
|
||||
if (xml.isStartElement()) {
|
||||
if (name == QLatin1String("title")) {
|
||||
if (xml.isStartElement())
|
||||
{
|
||||
if (name == QLatin1String("title"))
|
||||
{
|
||||
article[Article::KeyTitle] = xml.readElementText().trimmed();
|
||||
}
|
||||
else if (name == QLatin1String("enclosure")) {
|
||||
else if (name == QLatin1String("enclosure"))
|
||||
{
|
||||
if (xml.attributes().value("type") == QLatin1String("application/x-bittorrent"))
|
||||
article[Article::KeyTorrentURL] = xml.attributes().value(QLatin1String("url")).toString();
|
||||
else if (xml.attributes().value("type").isEmpty())
|
||||
altTorrentUrl = xml.attributes().value(QLatin1String("url")).toString();
|
||||
}
|
||||
else if (name == QLatin1String("link")) {
|
||||
else if (name == QLatin1String("link"))
|
||||
{
|
||||
const QString text {xml.readElementText().trimmed()};
|
||||
if (text.startsWith(QLatin1String("magnet:"), Qt::CaseInsensitive))
|
||||
article[Article::KeyTorrentURL] = text; // magnet link instead of a news URL
|
||||
else
|
||||
article[Article::KeyLink] = text;
|
||||
}
|
||||
else if (name == QLatin1String("description")) {
|
||||
else if (name == QLatin1String("description"))
|
||||
{
|
||||
article[Article::KeyDescription] = xml.readElementText(QXmlStreamReader::IncludeChildElements);
|
||||
}
|
||||
else if (name == QLatin1String("pubDate")) {
|
||||
else if (name == QLatin1String("pubDate"))
|
||||
{
|
||||
article[Article::KeyDate] = parseDate(xml.readElementText().trimmed());
|
||||
}
|
||||
else if (name == QLatin1String("author")) {
|
||||
else if (name == QLatin1String("author"))
|
||||
{
|
||||
article[Article::KeyAuthor] = xml.readElementText().trimmed();
|
||||
}
|
||||
else if (name == QLatin1String("guid")) {
|
||||
else if (name == QLatin1String("guid"))
|
||||
{
|
||||
article[Article::KeyId] = xml.readElementText().trimmed();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
article[name] = xml.readElementText(QXmlStreamReader::IncludeChildElements);
|
||||
}
|
||||
}
|
||||
@@ -642,24 +675,31 @@ void Parser::parseRssArticle(QXmlStreamReader &xml)
|
||||
|
||||
void Parser::parseRSSChannel(QXmlStreamReader &xml)
|
||||
{
|
||||
while (!xml.atEnd()) {
|
||||
while (!xml.atEnd())
|
||||
{
|
||||
xml.readNext();
|
||||
|
||||
if (xml.isStartElement()) {
|
||||
if (xml.name() == QLatin1String("title")) {
|
||||
if (xml.isStartElement())
|
||||
{
|
||||
if (xml.name() == QLatin1String("title"))
|
||||
{
|
||||
m_result.title = xml.readElementText();
|
||||
}
|
||||
else if (xml.name() == QLatin1String("lastBuildDate")) {
|
||||
else if (xml.name() == QLatin1String("lastBuildDate"))
|
||||
{
|
||||
const QString lastBuildDate = xml.readElementText();
|
||||
if (!lastBuildDate.isEmpty()) {
|
||||
if (m_result.lastBuildDate == lastBuildDate) {
|
||||
if (!lastBuildDate.isEmpty())
|
||||
{
|
||||
if (m_result.lastBuildDate == lastBuildDate)
|
||||
{
|
||||
qDebug() << "The RSS feed has not changed since last time, aborting parsing.";
|
||||
return;
|
||||
}
|
||||
m_result.lastBuildDate = lastBuildDate;
|
||||
}
|
||||
}
|
||||
else if (xml.name() == QLatin1String("item")) {
|
||||
else if (xml.name() == QLatin1String("item"))
|
||||
{
|
||||
parseRssArticle(xml);
|
||||
}
|
||||
}
|
||||
@@ -671,18 +711,22 @@ void Parser::parseAtomArticle(QXmlStreamReader &xml)
|
||||
QVariantHash article;
|
||||
bool doubleContent = false;
|
||||
|
||||
while (!xml.atEnd()) {
|
||||
while (!xml.atEnd())
|
||||
{
|
||||
xml.readNext();
|
||||
const QString name(xml.name().toString());
|
||||
|
||||
if (xml.isEndElement() && (name == QLatin1String("entry")))
|
||||
break;
|
||||
|
||||
if (xml.isStartElement()) {
|
||||
if (name == QLatin1String("title")) {
|
||||
if (xml.isStartElement())
|
||||
{
|
||||
if (name == QLatin1String("title"))
|
||||
{
|
||||
article[Article::KeyTitle] = xml.readElementText().trimmed();
|
||||
}
|
||||
else if (name == QLatin1String("link")) {
|
||||
else if (name == QLatin1String("link"))
|
||||
{
|
||||
const QString link = (xml.attributes().isEmpty()
|
||||
? xml.readElementText().trimmed()
|
||||
: xml.attributes().value(QLatin1String("href")).toString());
|
||||
@@ -696,8 +740,10 @@ void Parser::parseAtomArticle(QXmlStreamReader &xml)
|
||||
article[Article::KeyLink] = (m_baseUrl.isEmpty() ? link : m_baseUrl + link);
|
||||
|
||||
}
|
||||
else if ((name == QLatin1String("summary")) || (name == QLatin1String("content"))) {
|
||||
if (doubleContent) { // Duplicate content -> ignore
|
||||
else if ((name == QLatin1String("summary")) || (name == QLatin1String("content")))
|
||||
{
|
||||
if (doubleContent)
|
||||
{ // Duplicate content -> ignore
|
||||
xml.skipCurrentElement();
|
||||
continue;
|
||||
}
|
||||
@@ -705,28 +751,34 @@ void Parser::parseAtomArticle(QXmlStreamReader &xml)
|
||||
// Try to also parse broken articles, which don't use html '&' escapes
|
||||
// Actually works great for non-broken content too
|
||||
const QString feedText = xml.readElementText(QXmlStreamReader::IncludeChildElements).trimmed();
|
||||
if (!feedText.isEmpty()) {
|
||||
if (!feedText.isEmpty())
|
||||
{
|
||||
article[Article::KeyDescription] = feedText;
|
||||
doubleContent = true;
|
||||
}
|
||||
}
|
||||
else if (name == QLatin1String("updated")) {
|
||||
else if (name == QLatin1String("updated"))
|
||||
{
|
||||
// ATOM uses standard compliant date, don't do fancy stuff
|
||||
const QDateTime articleDate = QDateTime::fromString(xml.readElementText().trimmed(), Qt::ISODate);
|
||||
article[Article::KeyDate] = (articleDate.isValid() ? articleDate : QDateTime::currentDateTime());
|
||||
}
|
||||
else if (name == QLatin1String("author")) {
|
||||
while (xml.readNextStartElement()) {
|
||||
else if (name == QLatin1String("author"))
|
||||
{
|
||||
while (xml.readNextStartElement())
|
||||
{
|
||||
if (xml.name() == QLatin1String("name"))
|
||||
article[Article::KeyAuthor] = xml.readElementText().trimmed();
|
||||
else
|
||||
xml.skipCurrentElement();
|
||||
}
|
||||
}
|
||||
else if (name == QLatin1String("id")) {
|
||||
else if (name == QLatin1String("id"))
|
||||
{
|
||||
article[Article::KeyId] = xml.readElementText().trimmed();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
article[name] = xml.readElementText(QXmlStreamReader::IncludeChildElements);
|
||||
}
|
||||
}
|
||||
@@ -739,24 +791,31 @@ void Parser::parseAtomChannel(QXmlStreamReader &xml)
|
||||
{
|
||||
m_baseUrl = xml.attributes().value("xml:base").toString();
|
||||
|
||||
while (!xml.atEnd()) {
|
||||
while (!xml.atEnd())
|
||||
{
|
||||
xml.readNext();
|
||||
|
||||
if (xml.isStartElement()) {
|
||||
if (xml.name() == QLatin1String("title")) {
|
||||
if (xml.isStartElement())
|
||||
{
|
||||
if (xml.name() == QLatin1String("title"))
|
||||
{
|
||||
m_result.title = xml.readElementText();
|
||||
}
|
||||
else if (xml.name() == QLatin1String("updated")) {
|
||||
else if (xml.name() == QLatin1String("updated"))
|
||||
{
|
||||
const QString lastBuildDate = xml.readElementText();
|
||||
if (!lastBuildDate.isEmpty()) {
|
||||
if (m_result.lastBuildDate == lastBuildDate) {
|
||||
if (!lastBuildDate.isEmpty())
|
||||
{
|
||||
if (m_result.lastBuildDate == lastBuildDate)
|
||||
{
|
||||
qDebug() << "The RSS feed has not changed since last time, aborting parsing.";
|
||||
return;
|
||||
}
|
||||
m_result.lastBuildDate = lastBuildDate;
|
||||
}
|
||||
}
|
||||
else if (xml.name() == QLatin1String("entry")) {
|
||||
else if (xml.name() == QLatin1String("entry"))
|
||||
{
|
||||
parseAtomArticle(xml);
|
||||
}
|
||||
}
|
||||
@@ -776,14 +835,16 @@ void Parser::addArticle(QVariantHash article)
|
||||
if (localId.toString().isEmpty())
|
||||
localId = article.value(Article::KeyTitle);
|
||||
|
||||
if (localId.toString().isEmpty()) {
|
||||
if (localId.toString().isEmpty())
|
||||
{
|
||||
// The article could not be uniquely identified
|
||||
// since it has no appropriate data.
|
||||
// Just ignore it.
|
||||
return;
|
||||
}
|
||||
|
||||
if (m_articleIDs.contains(localId.toString())) {
|
||||
if (m_articleIDs.contains(localId.toString()))
|
||||
{
|
||||
// The article could not be uniquely identified
|
||||
// since the Feed has duplicate identifiers.
|
||||
// Just ignore it.
|
||||
|
||||
@@ -97,7 +97,8 @@ Session::Session()
|
||||
load();
|
||||
|
||||
connect(&m_refreshTimer, &QTimer::timeout, this, &Session::refresh);
|
||||
if (m_processingEnabled) {
|
||||
if (m_processingEnabled)
|
||||
{
|
||||
m_refreshTimer.start(m_refreshInterval * MsecsPerMin);
|
||||
refresh();
|
||||
}
|
||||
@@ -155,7 +156,8 @@ bool Session::addFolder(const QString &path, QString *error)
|
||||
|
||||
bool Session::addFeed(const QString &url, const QString &path, QString *error)
|
||||
{
|
||||
if (m_feedsByURL.contains(url)) {
|
||||
if (m_feedsByURL.contains(url))
|
||||
{
|
||||
if (error)
|
||||
*error = tr("RSS feed with given URL already exists: %1.").arg(url);
|
||||
return false;
|
||||
@@ -174,14 +176,16 @@ bool Session::addFeed(const QString &url, const QString &path, QString *error)
|
||||
|
||||
bool Session::moveItem(const QString &itemPath, const QString &destPath, QString *error)
|
||||
{
|
||||
if (itemPath.isEmpty()) {
|
||||
if (itemPath.isEmpty())
|
||||
{
|
||||
if (error)
|
||||
*error = tr("Cannot move root folder.");
|
||||
return false;
|
||||
}
|
||||
|
||||
auto item = m_itemsByPath.value(itemPath);
|
||||
if (!item) {
|
||||
if (!item)
|
||||
{
|
||||
if (error)
|
||||
*error = tr("Item doesn't exist: %1.").arg(itemPath);
|
||||
return false;
|
||||
@@ -200,7 +204,8 @@ bool Session::moveItem(Item *item, const QString &destPath, QString *error)
|
||||
return false;
|
||||
|
||||
auto srcFolder = static_cast<Folder *>(m_itemsByPath.value(Item::parentPath(item->path())));
|
||||
if (srcFolder != destFolder) {
|
||||
if (srcFolder != destFolder)
|
||||
{
|
||||
srcFolder->removeItem(item);
|
||||
destFolder->addItem(item);
|
||||
}
|
||||
@@ -212,14 +217,16 @@ bool Session::moveItem(Item *item, const QString &destPath, QString *error)
|
||||
|
||||
bool Session::removeItem(const QString &itemPath, QString *error)
|
||||
{
|
||||
if (itemPath.isEmpty()) {
|
||||
if (itemPath.isEmpty())
|
||||
{
|
||||
if (error)
|
||||
*error = tr("Cannot delete root folder.");
|
||||
return false;
|
||||
}
|
||||
|
||||
auto item = m_itemsByPath.value(itemPath);
|
||||
if (!item) {
|
||||
if (!item)
|
||||
{
|
||||
if (error)
|
||||
*error = tr("Item doesn't exist: %1.").arg(itemPath);
|
||||
return false;
|
||||
@@ -248,12 +255,14 @@ Item *Session::itemByPath(const QString &path) const
|
||||
void Session::load()
|
||||
{
|
||||
QFile itemsFile(m_confFileStorage->storageDir().absoluteFilePath(FeedsFileName));
|
||||
if (!itemsFile.exists()) {
|
||||
if (!itemsFile.exists())
|
||||
{
|
||||
loadLegacy();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!itemsFile.open(QFile::ReadOnly)) {
|
||||
if (!itemsFile.open(QFile::ReadOnly))
|
||||
{
|
||||
Logger::instance()->addMessage(
|
||||
QString("Couldn't read RSS Session data from %1. Error: %2")
|
||||
.arg(itemsFile.fileName(), itemsFile.errorString()), Log::WARNING);
|
||||
@@ -262,14 +271,16 @@ void Session::load()
|
||||
|
||||
QJsonParseError jsonError;
|
||||
const QJsonDocument jsonDoc = QJsonDocument::fromJson(itemsFile.readAll(), &jsonError);
|
||||
if (jsonError.error != QJsonParseError::NoError) {
|
||||
if (jsonError.error != QJsonParseError::NoError)
|
||||
{
|
||||
Logger::instance()->addMessage(
|
||||
QString("Couldn't parse RSS Session data from %1. Error: %2")
|
||||
.arg(itemsFile.fileName(), jsonError.errorString()), Log::WARNING);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!jsonDoc.isObject()) {
|
||||
if (!jsonDoc.isObject())
|
||||
{
|
||||
Logger::instance()->addMessage(
|
||||
QString("Couldn't load RSS Session data from %1. Invalid data format.")
|
||||
.arg(itemsFile.fileName()), Log::WARNING);
|
||||
@@ -282,9 +293,11 @@ void Session::load()
|
||||
void Session::loadFolder(const QJsonObject &jsonObj, Folder *folder)
|
||||
{
|
||||
bool updated = false;
|
||||
for (const QString &key : asConst(jsonObj.keys())) {
|
||||
for (const QString &key : asConst(jsonObj.keys()))
|
||||
{
|
||||
const QJsonValue val {jsonObj[key]};
|
||||
if (val.isString()) {
|
||||
if (val.isString())
|
||||
{
|
||||
// previous format (reduced form) doesn't contain UID
|
||||
QString url = val.toString();
|
||||
if (url.isEmpty())
|
||||
@@ -292,31 +305,38 @@ void Session::loadFolder(const QJsonObject &jsonObj, Folder *folder)
|
||||
addFeedToFolder(generateUID(), url, key, folder);
|
||||
updated = true;
|
||||
}
|
||||
else if (val.isObject()) {
|
||||
else if (val.isObject())
|
||||
{
|
||||
const QJsonObject valObj {val.toObject()};
|
||||
if (valObj.contains("url")) {
|
||||
if (!valObj["url"].isString()) {
|
||||
if (valObj.contains("url"))
|
||||
{
|
||||
if (!valObj["url"].isString())
|
||||
{
|
||||
LogMsg(tr("Couldn't load RSS Feed '%1'. URL is required.")
|
||||
.arg(QString("%1\\%2").arg(folder->path(), key)), Log::WARNING);
|
||||
continue;
|
||||
}
|
||||
|
||||
QUuid uid;
|
||||
if (valObj.contains("uid")) {
|
||||
if (valObj.contains("uid"))
|
||||
{
|
||||
uid = QUuid {valObj["uid"].toString()};
|
||||
if (uid.isNull()) {
|
||||
if (uid.isNull())
|
||||
{
|
||||
LogMsg(tr("Couldn't load RSS Feed '%1'. UID is invalid.")
|
||||
.arg(QString("%1\\%2").arg(folder->path(), key)), Log::WARNING);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (m_feedsByUID.contains(uid)) {
|
||||
if (m_feedsByUID.contains(uid))
|
||||
{
|
||||
LogMsg(tr("Duplicate RSS Feed UID: %1. Configuration seems to be corrupted.")
|
||||
.arg(uid.toString()), Log::WARNING);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
// previous format doesn't contain UID
|
||||
uid = generateUID();
|
||||
updated = true;
|
||||
@@ -324,11 +344,13 @@ void Session::loadFolder(const QJsonObject &jsonObj, Folder *folder)
|
||||
|
||||
addFeedToFolder(uid, valObj["url"].toString(), key, folder);
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
loadFolder(valObj, addSubfolder(key, folder));
|
||||
}
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
LogMsg(tr("Couldn't load RSS Item '%1'. Invalid data format.")
|
||||
.arg(QString::fromLatin1("%1\\%2").arg(folder->path(), key)), Log::WARNING);
|
||||
}
|
||||
@@ -342,13 +364,15 @@ void Session::loadLegacy()
|
||||
{
|
||||
const QStringList legacyFeedPaths = SettingsStorage::instance()->loadValue("Rss/streamList").toStringList();
|
||||
const QStringList feedAliases = SettingsStorage::instance()->loadValue("Rss/streamAlias").toStringList();
|
||||
if (legacyFeedPaths.size() != feedAliases.size()) {
|
||||
if (legacyFeedPaths.size() != feedAliases.size())
|
||||
{
|
||||
Logger::instance()->addMessage("Corrupted RSS list, not loading it.", Log::WARNING);
|
||||
return;
|
||||
}
|
||||
|
||||
uint i = 0;
|
||||
for (QString legacyPath : legacyFeedPaths) {
|
||||
for (QString legacyPath : legacyFeedPaths)
|
||||
{
|
||||
if (Item::PathSeparator == QString(legacyPath[0]))
|
||||
legacyPath.remove(0, 1);
|
||||
const QString parentFolderPath = Item::parentPath(legacyPath);
|
||||
@@ -374,13 +398,15 @@ void Session::store()
|
||||
|
||||
Folder *Session::prepareItemDest(const QString &path, QString *error)
|
||||
{
|
||||
if (!Item::isValidPath(path)) {
|
||||
if (!Item::isValidPath(path))
|
||||
{
|
||||
if (error)
|
||||
*error = tr("Incorrect RSS Item path: %1.").arg(path);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (m_itemsByPath.contains(path)) {
|
||||
if (m_itemsByPath.contains(path))
|
||||
{
|
||||
if (error)
|
||||
*error = tr("RSS item with given path already exists: %1.").arg(path);
|
||||
return nullptr;
|
||||
@@ -388,7 +414,8 @@ Folder *Session::prepareItemDest(const QString &path, QString *error)
|
||||
|
||||
const QString destFolderPath = Item::parentPath(path);
|
||||
auto destFolder = qobject_cast<Folder *>(m_itemsByPath.value(destFolderPath));
|
||||
if (!destFolder) {
|
||||
if (!destFolder)
|
||||
{
|
||||
if (error)
|
||||
*error = tr("Parent folder doesn't exist: %1.").arg(destFolderPath);
|
||||
return nullptr;
|
||||
@@ -413,7 +440,8 @@ Feed *Session::addFeedToFolder(const QUuid &uid, const QString &url, const QStri
|
||||
|
||||
void Session::addItem(Item *item, Folder *destFolder)
|
||||
{
|
||||
if (auto feed = qobject_cast<Feed *>(item)) {
|
||||
if (auto feed = qobject_cast<Feed *>(item))
|
||||
{
|
||||
connect(feed, &Feed::titleChanged, this, &Session::handleFeedTitleChanged);
|
||||
connect(feed, &Feed::iconLoaded, this, &Session::feedIconLoaded);
|
||||
connect(feed, &Feed::stateChanged, this, &Session::feedStateChanged);
|
||||
@@ -435,14 +463,17 @@ bool Session::isProcessingEnabled() const
|
||||
|
||||
void Session::setProcessingEnabled(bool enabled)
|
||||
{
|
||||
if (m_processingEnabled != enabled) {
|
||||
if (m_processingEnabled != enabled)
|
||||
{
|
||||
m_processingEnabled = enabled;
|
||||
SettingsStorage::instance()->storeValue(SettingsKey_ProcessingEnabled, m_processingEnabled);
|
||||
if (m_processingEnabled) {
|
||||
if (m_processingEnabled)
|
||||
{
|
||||
m_refreshTimer.start(m_refreshInterval * MsecsPerMin);
|
||||
refresh();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
m_refreshTimer.stop();
|
||||
}
|
||||
|
||||
@@ -482,7 +513,8 @@ int Session::refreshInterval() const
|
||||
|
||||
void Session::setRefreshInterval(const int refreshInterval)
|
||||
{
|
||||
if (m_refreshInterval != refreshInterval) {
|
||||
if (m_refreshInterval != refreshInterval)
|
||||
{
|
||||
SettingsStorage::instance()->storeValue(SettingsKey_RefreshInterval, refreshInterval);
|
||||
m_refreshInterval = refreshInterval;
|
||||
m_refreshTimer.start(m_refreshInterval * MsecsPerMin);
|
||||
@@ -498,7 +530,8 @@ void Session::handleItemAboutToBeDestroyed(Item *item)
|
||||
{
|
||||
m_itemsByPath.remove(item->path());
|
||||
auto feed = qobject_cast<Feed *>(item);
|
||||
if (feed) {
|
||||
if (feed)
|
||||
{
|
||||
m_feedsByUID.remove(feed->uid());
|
||||
m_feedsByURL.remove(feed->url());
|
||||
}
|
||||
@@ -528,7 +561,8 @@ int Session::maxArticlesPerFeed() const
|
||||
|
||||
void Session::setMaxArticlesPerFeed(const int n)
|
||||
{
|
||||
if (m_maxArticlesPerFeed != n) {
|
||||
if (m_maxArticlesPerFeed != n)
|
||||
{
|
||||
m_maxArticlesPerFeed = n;
|
||||
SettingsStorage::instance()->storeValue(SettingsKey_MaxArticlesPerFeed, n);
|
||||
emit maxArticlesPerFeedChanged(n);
|
||||
|
||||
@@ -34,20 +34,26 @@
|
||||
* RSS Session configuration file format (JSON):
|
||||
*
|
||||
* =============== BEGIN ===============
|
||||
* {
|
||||
* "folder1": {
|
||||
* "subfolder1": {
|
||||
* "Feed name 1 (Alias)": {
|
||||
*
|
||||
{
|
||||
* "folder1":
|
||||
{
|
||||
* "subfolder1":
|
||||
{
|
||||
* "Feed name 1 (Alias)":
|
||||
{
|
||||
* "uid": "feed unique identifier",
|
||||
* "url": "http://some-feed-url1"
|
||||
* }
|
||||
* "Feed name 2 (Alias)": {
|
||||
* "Feed name 2 (Alias)":
|
||||
{
|
||||
* "uid": "feed unique identifier",
|
||||
* "url": "http://some-feed-url2"
|
||||
* }
|
||||
* },
|
||||
* "subfolder2": {},
|
||||
* "Feed name 3 (Alias)": {
|
||||
* "Feed name 3 (Alias)":
|
||||
{
|
||||
* "uid": "feed unique identifier",
|
||||
* "url": "http://some-feed-url3"
|
||||
* }
|
||||
|
||||
Reference in New Issue
Block a user