diff --git a/es-app/src/scrapers/Scraper.cpp b/es-app/src/scrapers/Scraper.cpp index eb796c141..8500b4703 100644 --- a/es-app/src/scrapers/Scraper.cpp +++ b/es-app/src/scrapers/Scraper.cpp @@ -168,6 +168,13 @@ void ScraperHttpRequest::update() if (status == HttpReq::REQ_IN_PROGRESS) return; + if (status == HttpReq::REQ_RESOURCE_NOT_FOUND) { + LOG(LogWarning) + << "ScraperHttpRequest: Server returned HTTP error code 404 (resource not found)"; + setStatus(ASYNC_DONE); + return; + } + // Everything else is some sort of error. LOG(LogError) << "ScraperHttpRequest network error (status: " << status << ") - " << mReq->getErrorMsg(); diff --git a/es-core/src/HttpReq.cpp b/es-core/src/HttpReq.cpp index 9b6fb043e..691e640bd 100644 --- a/es-core/src/HttpReq.cpp +++ b/es-core/src/HttpReq.cpp @@ -41,6 +41,7 @@ HttpReq::HttpReq(const std::string& url, bool scraperRequest) , mHandle(nullptr) , mTotalBytes {0} , mDownloadedBytes {0} + , mScraperRequest {scraperRequest} { // The multi-handle is cleaned up via a call from GuiScraperSearch after the scraping // has been completed for a game, meaning the handle is valid for all curl requests @@ -81,7 +82,7 @@ HttpReq::HttpReq(const std::string& url, bool scraperRequest) long connectionTimeout; - if (scraperRequest) { + if (mScraperRequest) { connectionTimeout = static_cast(Settings::getInstance()->getInt("ScraperConnectionTimeout")); @@ -103,7 +104,7 @@ HttpReq::HttpReq(const std::string& url, bool scraperRequest) long transferTimeout; - if (scraperRequest) { + if (mScraperRequest) { transferTimeout = static_cast(Settings::getInstance()->getInt("ScraperTransferTimeout")); @@ -259,10 +260,18 @@ HttpReq::Status HttpReq::status() req->onError(curl_easy_strerror(msg->data.result)); } else if (msg->data.result == CURLE_HTTP_RETURNED_ERROR) { - req->mStatus = REQ_BAD_STATUS_CODE; long responseCode; curl_easy_getinfo(msg->easy_handle, CURLINFO_RESPONSE_CODE, &responseCode); - req->onError("Server returned HTTP error code " + std::to_string(responseCode)); + + if (responseCode == 404 && mScraperRequest && + Settings::getInstance()->getBool("ScraperIgnoreHTTP404Errors")) { + req->mStatus = REQ_RESOURCE_NOT_FOUND; + } + else { + req->onError("Server returned HTTP error code " + + std::to_string(responseCode)); + req->mStatus = REQ_BAD_STATUS_CODE; + } } else { req->mStatus = REQ_IO_ERROR; diff --git a/es-core/src/HttpReq.h b/es-core/src/HttpReq.h index 5aae2270c..0de59915a 100644 --- a/es-core/src/HttpReq.h +++ b/es-core/src/HttpReq.h @@ -25,10 +25,11 @@ public: enum Status { // clang-format off REQ_IN_PROGRESS, // Request is in progress. - REQ_SUCCESS, // Request completed successfully, get it with getContent(). - REQ_IO_ERROR, // Some error happened, get it with getErrorMsg(). + REQ_SUCCESS, // Request completed successfully. + REQ_IO_ERROR, // An error occured. REQ_FAILED_VERIFICATION, // Peer's certificate or fingerprint wasn't verified correctly. - REQ_BAD_STATUS_CODE, // Some invalid HTTP response status code happened (non-200). + REQ_BAD_STATUS_CODE, // HTTP error response >= 400. + REQ_RESOURCE_NOT_FOUND, // HTTP error code 404 specifically. REQ_INVALID_RESPONSE, // The HTTP response was invalid. REQ_UNDEFINED_ERROR // clang-format on @@ -69,6 +70,7 @@ private: std::string mErrorMsg; std::atomic mTotalBytes; std::atomic mDownloadedBytes; + bool mScraperRequest; }; #endif // ES_CORE_HTTP_REQ_H diff --git a/es-core/src/Settings.cpp b/es-core/src/Settings.cpp index 43b6d180c..f2edc2493 100644 --- a/es-core/src/Settings.cpp +++ b/es-core/src/Settings.cpp @@ -150,6 +150,7 @@ void Settings::setDefaults() mIntMap["ScraperSearchFileHashMaxSize"] = {384, 384}; mBoolMap["ScraperOverwriteData"] = {true, true}; mBoolMap["ScraperHaltOnInvalidMedia"] = {true, true}; + mBoolMap["ScraperIgnoreHTTP404Errors"] = {true, true}; mBoolMap["ScraperSearchFileHash"] = {true, true}; mBoolMap["ScraperSearchMetadataName"] = {true, true}; mBoolMap["ScraperIncludeFolders"] = {true, true};