Add const to almost all remaining vars and arguments that qualify

This commit is contained in:
thalieht
2019-02-21 23:31:43 +02:00
parent bb041c0eca
commit 70f1537d9f
35 changed files with 261 additions and 261 deletions

View File

@@ -396,8 +396,8 @@ namespace
if (!str.indexOf(rx)) {
// Check that if date has '-' separators, both separators are '-'.
parts = rx.capturedTexts();
bool h1 = (parts[3] == QLatin1String("-"));
bool h2 = (parts[5] == QLatin1String("-"));
const bool h1 = (parts[3] == QLatin1String("-"));
const bool h2 = (parts[5] == QLatin1String("-"));
if (h1 != h2)
return QDateTime::currentDateTime();
}
@@ -431,7 +431,7 @@ namespace
return QDateTime::currentDateTime();
}
bool leapSecond = (second == 60);
const bool leapSecond = (second == 60);
if (leapSecond)
second = 59; // apparently a leap second - validate below, once time zone is known
int month = 0;
@@ -447,7 +447,7 @@ namespace
// if (month >= 12 || dayOfWeek >= 7
// || (dayOfWeek < 0 && format == RFCDateDay))
// return QDateTime;
int i = parts[nyear].size();
const int i = parts[nyear].size();
if (i < 4) {
// It's an obsolete year specification with less than 4 digits
year += ((i == 2) && (year < 50)) ? 2000 : 1900;
@@ -462,7 +462,7 @@ namespace
// It's a UTC offset ±hhmm
parts = rx.capturedTexts();
offset = parts[2].toInt(&ok[0]) * 3600;
int offsetMin = parts[3].toInt(&ok[1]);
const int offsetMin = parts[3].toInt(&ok[1]);
if (!ok[0] || !ok[1] || offsetMin > 59)
return {};
offset += offsetMin * 60;
@@ -472,7 +472,7 @@ namespace
}
else {
// Check for an obsolete time zone name
QByteArray zone = parts[10].toLatin1();
const QByteArray zone = parts[10].toLatin1();
if ((zone.length() == 1) && (isalpha(zone[0])) && (toupper(zone[0]) != 'J')) {
negOffset = true; // military zone: RFC 2822 treats as '-0000'
}
@@ -502,11 +502,11 @@ namespace
}
}
QDate qDate(year, month + 1, day); // convert date, and check for out-of-range
const QDate qDate(year, month + 1, day); // convert date, and check for out-of-range
if (!qDate.isValid())
return QDateTime::currentDateTime();
QTime qTime(hour, minute, second);
const QTime qTime(hour, minute, second);
QDateTime result(qDate, qTime, Qt::UTC);
if (offset)
result = result.addSecs(-offset);
@@ -528,7 +528,7 @@ using namespace RSS::Private;
const int ParsingResultTypeId = qRegisterMetaType<ParsingResult>();
Parser::Parser(QString lastBuildDate)
Parser::Parser(const QString lastBuildDate)
{
m_result.lastBuildDate = lastBuildDate;
}
@@ -642,7 +642,7 @@ void Parser::parseRSSChannel(QXmlStreamReader &xml)
m_result.title = xml.readElementText();
}
else if (xml.name() == QLatin1String("lastBuildDate")) {
QString lastBuildDate = xml.readElementText();
const QString lastBuildDate = xml.readElementText();
if (!lastBuildDate.isEmpty()) {
if (m_result.lastBuildDate == lastBuildDate) {
qDebug() << "The RSS feed has not changed since last time, aborting parsing.";
@@ -675,7 +675,7 @@ void Parser::parseAtomArticle(QXmlStreamReader &xml)
article[Article::KeyTitle] = xml.readElementText().trimmed();
}
else if (name == QLatin1String("link")) {
QString link = (xml.attributes().isEmpty()
const QString link = (xml.attributes().isEmpty()
? xml.readElementText().trimmed()
: xml.attributes().value(QLatin1String("href")).toString());
@@ -696,7 +696,7 @@ void Parser::parseAtomArticle(QXmlStreamReader &xml)
// Try to also parse broken articles, which don't use html '&' escapes
// Actually works great for non-broken content too
QString feedText = xml.readElementText(QXmlStreamReader::IncludeChildElements).trimmed();
const QString feedText = xml.readElementText(QXmlStreamReader::IncludeChildElements).trimmed();
if (!feedText.isEmpty()) {
article[Article::KeyDescription] = feedText;
doubleContent = true;
@@ -704,7 +704,7 @@ void Parser::parseAtomArticle(QXmlStreamReader &xml)
}
else if (name == QLatin1String("updated")) {
// ATOM uses standard compliant date, don't do fancy stuff
QDateTime articleDate = QDateTime::fromString(xml.readElementText().trimmed(), Qt::ISODate);
const QDateTime articleDate = QDateTime::fromString(xml.readElementText().trimmed(), Qt::ISODate);
article[Article::KeyDate] = (articleDate.isValid() ? articleDate : QDateTime::currentDateTime());
}
else if (name == QLatin1String("author")) {
@@ -739,7 +739,7 @@ void Parser::parseAtomChannel(QXmlStreamReader &xml)
m_result.title = xml.readElementText();
}
else if (xml.name() == QLatin1String("updated")) {
QString lastBuildDate = xml.readElementText();
const QString lastBuildDate = xml.readElementText();
if (!lastBuildDate.isEmpty()) {
if (m_result.lastBuildDate == lastBuildDate) {
qDebug() << "The RSS feed has not changed since last time, aborting parsing.";

View File

@@ -73,7 +73,7 @@ namespace
QVector<RSS::AutoDownloadRule> rulesFromJSON(const QByteArray &jsonData)
{
QJsonParseError jsonError;
QJsonDocument jsonDoc = QJsonDocument::fromJson(jsonData, &jsonError);
const QJsonDocument jsonDoc = QJsonDocument::fromJson(jsonData, &jsonError);
if (jsonError.error != QJsonParseError::NoError)
throw RSS::ParsingError(jsonError.errorString());
@@ -227,7 +227,7 @@ QByteArray AutoDownloader::exportRules(AutoDownloader::RulesFileFormat format) c
}
}
void AutoDownloader::importRules(const QByteArray &data, AutoDownloader::RulesFileFormat format)
void AutoDownloader::importRules(const QByteArray &data, const AutoDownloader::RulesFileFormat format)
{
switch (format) {
case RulesFileFormat::Legacy:
@@ -333,7 +333,7 @@ void AutoDownloader::process()
void AutoDownloader::handleTorrentDownloadFinished(const QString &url)
{
auto job = m_waitingJobs.take(url);
const auto job = m_waitingJobs.take(url);
if (!job) return;
if (Feed *feed = Session::instance()->feedByURL(job->feedURL))
@@ -387,7 +387,7 @@ void AutoDownloader::processJob(const QSharedPointer<ProcessingJob> &job)
params.addPaused = rule.addPaused();
if (!rule.savePath().isEmpty())
params.useAutoTMM = TriStateBool::False;
auto torrentURL = job->articleData.value(Article::KeyTorrentURL).toString();
const auto torrentURL = job->articleData.value(Article::KeyTorrentURL).toString();
BitTorrent::Session::instance()->addTorrent(torrentURL, params);
if (BitTorrent::MagnetUri(torrentURL).isValid()) {
@@ -434,10 +434,10 @@ void AutoDownloader::loadRules(const QByteArray &data)
void AutoDownloader::loadRulesLegacy()
{
SettingsPtr settings = Profile::instance().applicationSettings(QStringLiteral("qBittorrent-rss"));
const SettingsPtr settings = Profile::instance().applicationSettings(QStringLiteral("qBittorrent-rss"));
const QVariantHash rules = settings->value(QStringLiteral("download_rules")).toHash();
for (const QVariant &ruleVar : rules) {
auto rule = AutoDownloadRule::fromLegacyDict(ruleVar.toHash());
const auto rule = AutoDownloadRule::fromLegacyDict(ruleVar.toHash());
if (!rule.name().isEmpty())
insertRule(rule);
}
@@ -485,7 +485,7 @@ void AutoDownloader::startProcessing()
connect(Session::instance()->rootFolder(), &Folder::newArticle, this, &AutoDownloader::handleNewArticle);
}
void AutoDownloader::setProcessingEnabled(bool enabled)
void AutoDownloader::setProcessingEnabled(const bool enabled)
{
if (m_processingEnabled != enabled) {
m_processingEnabled = enabled;

View File

@@ -72,7 +72,7 @@ namespace
}
}
TriStateBool addPausedLegacyToTriStateBool(int val)
TriStateBool addPausedLegacyToTriStateBool(const int val)
{
switch (val) {
case 1: return TriStateBool::True; // always
@@ -163,13 +163,13 @@ QString computeEpisodeName(const QString &article)
QStringList ret;
for (int i = 1; i <= match.lastCapturedIndex(); ++i) {
QString cap = match.captured(i);
const QString cap = match.captured(i);
if (cap.isEmpty())
continue;
bool isInt = false;
int x = cap.toInt(&isInt);
const int x = cap.toInt(&isInt);
ret.append(isInt ? QString::number(x) : cap);
}
@@ -189,7 +189,7 @@ AutoDownloadRule::AutoDownloadRule(const AutoDownloadRule &other)
AutoDownloadRule::~AutoDownloadRule() {}
QRegularExpression AutoDownloadRule::cachedRegex(const QString &expression, bool isRegex) const
QRegularExpression AutoDownloadRule::cachedRegex(const QString &expression, const bool isRegex) const
{
// Use a cache of regexes so we don't have to continually recompile - big performance increase.
// The cache is cleared whenever the regex/wildcard, must or must not contain fields or
@@ -216,7 +216,7 @@ bool AutoDownloadRule::matchesExpression(const QString &articleTitle, const QStr
}
if (m_dataPtr->useRegex) {
QRegularExpression reg(cachedRegex(expression));
const QRegularExpression reg(cachedRegex(expression));
return reg.match(articleTitle).hasMatch();
}
@@ -600,7 +600,7 @@ bool AutoDownloadRule::isEnabled() const
return m_dataPtr->enabled;
}
void AutoDownloadRule::setEnabled(bool enable)
void AutoDownloadRule::setEnabled(const bool enable)
{
m_dataPtr->enabled = enable;
}
@@ -615,7 +615,7 @@ void AutoDownloadRule::setLastMatch(const QDateTime &lastMatch)
m_dataPtr->lastMatch = lastMatch;
}
void AutoDownloadRule::setIgnoreDays(int d)
void AutoDownloadRule::setIgnoreDays(const int d)
{
m_dataPtr->ignoreDays = d;
}
@@ -640,7 +640,7 @@ bool AutoDownloadRule::useSmartFilter() const
return m_dataPtr->smartFilter;
}
void AutoDownloadRule::setUseSmartFilter(bool enabled)
void AutoDownloadRule::setUseSmartFilter(const bool enabled)
{
m_dataPtr->smartFilter = enabled;
}
@@ -650,7 +650,7 @@ bool AutoDownloadRule::useRegex() const
return m_dataPtr->useRegex;
}
void AutoDownloadRule::setUseRegex(bool enabled)
void AutoDownloadRule::setUseRegex(const bool enabled)
{
m_dataPtr->useRegex = enabled;
m_dataPtr->cachedRegexes.clear();

View File

@@ -108,7 +108,7 @@ QList<Article *> Feed::articles() const
void Feed::markAsRead()
{
auto oldUnreadCount = m_unreadCount;
const int oldUnreadCount = m_unreadCount;
for (Article *article : asConst(m_articles)) {
if (!article->isRead()) {
article->disconnect(this);
@@ -176,7 +176,7 @@ Article *Feed::articleByGUID(const QString &guid) const
return m_articles.value(guid);
}
void Feed::handleMaxArticlesPerFeedChanged(int n)
void Feed::handleMaxArticlesPerFeedChanged(const int n)
{
while (m_articlesByDate.size() > n)
removeOldestArticle();
@@ -270,7 +270,7 @@ void Feed::load()
void Feed::loadArticles(const QByteArray &data)
{
QJsonParseError jsonError;
QJsonDocument jsonDoc = QJsonDocument::fromJson(data, &jsonError);
const QJsonDocument jsonDoc = QJsonDocument::fromJson(data, &jsonError);
if (jsonError.error != QJsonParseError::NoError) {
LogMsg(tr("Couldn't parse RSS Session data. Error: %1").arg(jsonError.errorString())
, Log::WARNING);
@@ -303,7 +303,7 @@ void Feed::loadArticles(const QByteArray &data)
void Feed::loadArticlesLegacy()
{
SettingsPtr qBTRSSFeeds = Profile::instance().applicationSettings(QStringLiteral("qBittorrent-rss-feeds"));
const SettingsPtr qBTRSSFeeds = Profile::instance().applicationSettings(QStringLiteral("qBittorrent-rss-feeds"));
const QVariantHash allOldItems = qBTRSSFeeds->value("old_items").toHash();
for (const QVariant &var : asConst(allOldItems.value(m_url).toList())) {
@@ -348,7 +348,7 @@ bool Feed::addArticle(Article *article)
// Insertion sort
const int maxArticles = m_session->maxArticlesPerFeed();
auto lowerBound = std::lower_bound(m_articlesByDate.begin(), m_articlesByDate.end()
const auto lowerBound = std::lower_bound(m_articlesByDate.begin(), m_articlesByDate.end()
, article->date(), Article::articleDateRecentThan);
if ((lowerBound - m_articlesByDate.begin()) >= maxArticles)
return false; // we reach max articles
@@ -376,7 +376,7 @@ void Feed::removeOldestArticle()
m_articles.remove(oldestArticle->guid());
m_articlesByDate.removeLast();
bool isRead = oldestArticle->isRead();
const bool isRead = oldestArticle->isRead();
delete oldestArticle;
if (!isRead)
@@ -402,8 +402,8 @@ void Feed::downloadIcon()
// Download the RSS Feed icon
// XXX: This works for most sites but it is not perfect
const QUrl url(m_url);
auto iconUrl = QString("%1://%2/favicon.ico").arg(url.scheme(), url.host());
Net::DownloadHandler *handler = Net::DownloadManager::instance()->download(
const auto iconUrl = QString("%1://%2/favicon.ico").arg(url.scheme(), url.host());
const Net::DownloadHandler *handler = Net::DownloadManager::instance()->download(
Net::DownloadRequest(iconUrl).saveToFile(true));
connect(handler
, static_cast<void (Net::DownloadHandler::*)(const QString &, const QString &)>(&Net::DownloadHandler::downloadFinished)
@@ -494,7 +494,7 @@ QString Feed::iconPath() const
return m_iconPath;
}
QJsonValue Feed::toJsonValue(bool withData) const
QJsonValue Feed::toJsonValue(const bool withData) const
{
QJsonObject jsonObj;
jsonObj.insert(KEY_UID, uid().toString());
@@ -515,7 +515,7 @@ QJsonValue Feed::toJsonValue(bool withData) const
return jsonObj;
}
void Feed::handleSessionProcessingEnabledChanged(bool enabled)
void Feed::handleSessionProcessingEnabledChanged(const bool enabled)
{
if (enabled) {
downloadIcon();

View File

@@ -263,7 +263,7 @@ void Session::load()
}
QJsonParseError jsonError;
QJsonDocument jsonDoc = QJsonDocument::fromJson(itemsFile.readAll(), &jsonError);
const QJsonDocument jsonDoc = QJsonDocument::fromJson(itemsFile.readAll(), &jsonError);
if (jsonError.error != QJsonParseError::NoError) {
Logger::instance()->addMessage(
QString("Couldn't parse RSS Session data from %1. Error: %2")
@@ -482,7 +482,7 @@ uint Session::refreshInterval() const
return m_refreshInterval;
}
void Session::setRefreshInterval(uint refreshInterval)
void Session::setRefreshInterval(const uint refreshInterval)
{
if (m_refreshInterval != refreshInterval) {
SettingsStorage::instance()->storeValue(SettingsKey_RefreshInterval, refreshInterval);
@@ -528,7 +528,7 @@ int Session::maxArticlesPerFeed() const
return m_maxArticlesPerFeed;
}
void Session::setMaxArticlesPerFeed(int n)
void Session::setMaxArticlesPerFeed(const int n)
{
if (m_maxArticlesPerFeed != n) {
m_maxArticlesPerFeed = n;