mirror of
https://github.com/RetroDECK/ES-DE.git
synced 2025-02-16 20:15:38 +00:00
Added an error popup if incorrect credentials are used when scraping using ScreenScraper
This commit is contained in:
parent
7b4cfefbc4
commit
43060f9b41
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// ES-DE
|
||||
// ES-DE Frontend
|
||||
// GuiScraperSearch.cpp
|
||||
//
|
||||
// User interface for the scraper where the user is able to see an overview
|
||||
|
@ -552,8 +552,16 @@ void GuiScraperSearch::onSearchDone(std::vector<ScraperSearchResult>& results)
|
|||
|
||||
void GuiScraperSearch::onSearchError(const std::string& error,
|
||||
const bool retry,
|
||||
const bool fatalError,
|
||||
HttpReq::Status status)
|
||||
{
|
||||
if (fatalError) {
|
||||
LOG(LogWarning) << "GuiScraperSearch: " << Utils::String::replace(error, "\n", "");
|
||||
mWindow->pushGui(new GuiMsgBox(getHelpStyle(), Utils::String::toUpper(error), "OK",
|
||||
mCancelCallback, "", nullptr, "", nullptr, nullptr, true));
|
||||
return;
|
||||
}
|
||||
|
||||
const int retries {
|
||||
glm::clamp(Settings::getInstance()->getInt("ScraperRetryOnErrorCount"), 0, 10)};
|
||||
if (retry && mSearchType != MANUAL_MODE && retries > 0 && mRetryCount < retries) {
|
||||
|
@ -798,6 +806,7 @@ void GuiScraperSearch::update(int deltaTime)
|
|||
mScraperResults = mSearchHandle->getResults();
|
||||
const std::string statusString {mSearchHandle->getStatusString()};
|
||||
const bool retryFlag {mSearchHandle->getRetry()};
|
||||
const bool fatalErrorFlag {mSearchHandle->getFatalError()};
|
||||
|
||||
// We reset here because onSearchDone in auto mode can call mSkipCallback() which
|
||||
// can call another search() which will set our mSearchHandle to something important.
|
||||
|
@ -821,7 +830,7 @@ void GuiScraperSearch::update(int deltaTime)
|
|||
}
|
||||
}
|
||||
else if (status == ASYNC_ERROR) {
|
||||
onSearchError(statusString, retryFlag);
|
||||
onSearchError(statusString, retryFlag, fatalErrorFlag);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -855,7 +864,7 @@ void GuiScraperSearch::update(int deltaTime)
|
|||
}
|
||||
else if (mMDRetrieveURLsHandle->status() == ASYNC_ERROR) {
|
||||
onSearchError(mMDRetrieveURLsHandle->getStatusString(),
|
||||
mMDRetrieveURLsHandle->getRetry());
|
||||
mMDRetrieveURLsHandle->getRetry(), mSearchHandle->getFatalError());
|
||||
mMDRetrieveURLsHandle.reset();
|
||||
}
|
||||
}
|
||||
|
@ -912,7 +921,8 @@ void GuiScraperSearch::update(int deltaTime)
|
|||
}
|
||||
}
|
||||
else if (mMDResolveHandle->status() == ASYNC_ERROR) {
|
||||
onSearchError(mMDResolveHandle->getStatusString(), mMDResolveHandle->getRetry());
|
||||
onSearchError(mMDResolveHandle->getStatusString(), mMDResolveHandle->getRetry(),
|
||||
mSearchHandle->getFatalError());
|
||||
mMDResolveHandle.reset();
|
||||
}
|
||||
}
|
||||
|
@ -941,7 +951,7 @@ void GuiScraperSearch::updateThumbnail()
|
|||
else {
|
||||
mResultThumbnail->setImage("");
|
||||
onSearchError("Error downloading thumbnail:\n " + it->second->getErrorMsg(), true,
|
||||
it->second->status());
|
||||
mSearchHandle->getFatalError(), it->second->status());
|
||||
}
|
||||
|
||||
mThumbnailReqMap.erase(it);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// ES-DE
|
||||
// ES-DE Frontend
|
||||
// GuiScraperSearch.h
|
||||
//
|
||||
// User interface for the scraper where the user is able to see an overview
|
||||
|
@ -109,6 +109,7 @@ private:
|
|||
|
||||
void onSearchError(const std::string& error,
|
||||
const bool retry,
|
||||
const bool fatalError,
|
||||
HttpReq::Status status = HttpReq::REQ_UNDEFINED_ERROR);
|
||||
void onSearchDone(std::vector<ScraperSearchResult>& results);
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// ES-DE
|
||||
// ES-DE Frontend
|
||||
// Scraper.cpp
|
||||
//
|
||||
// Main scraper logic.
|
||||
|
@ -116,7 +116,7 @@ void ScraperSearchHandle::update()
|
|||
|
||||
if (status == ASYNC_ERROR) {
|
||||
// Propagate error.
|
||||
setError(req.getStatusString(), req.getRetry());
|
||||
setError(req.getStatusString(), req.getRetry(), req.getFatalError());
|
||||
|
||||
// Empty our queue.
|
||||
while (!mRequestQueue.empty())
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// ES-DE
|
||||
// ES-DE Frontend
|
||||
// Scraper.h
|
||||
//
|
||||
// Main scraper logic.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// ES-DE
|
||||
// ES-DE Frontend
|
||||
// ScreenScraper.cpp
|
||||
//
|
||||
// Functions specifically for scraping from screenscraper.fr
|
||||
|
@ -281,9 +281,17 @@ void ScreenScraperRequest::process(const std::unique_ptr<HttpReq>& req,
|
|||
std::stringstream ss;
|
||||
ss << "ScreenScraperRequest - Error parsing XML: " << parseResult.description();
|
||||
|
||||
std::string err = ss.str();
|
||||
const size_t maxErrorLength {150};
|
||||
|
||||
std::string err {ss.str()};
|
||||
if (err.length() > maxErrorLength)
|
||||
err = err.substr(0, maxErrorLength) + "...";
|
||||
LOG(LogError) << err;
|
||||
setError("ScreenScraper error: \n" + req->getContent(), true);
|
||||
|
||||
std::string content {req->getContent()};
|
||||
if (content.length() > maxErrorLength)
|
||||
content = content.substr(0, maxErrorLength) + "...";
|
||||
setError("ScreenScraper error: \n" + content, true);
|
||||
|
||||
return;
|
||||
}
|
||||
|
@ -334,25 +342,34 @@ void ScreenScraperRequest::processGame(const pugi::xml_document& xmldoc,
|
|||
{
|
||||
pugi::xml_node data {xmldoc.child("Data")};
|
||||
|
||||
// Check if our username was included in the response (assuming an account is used).
|
||||
// It seems as if this information is randomly missing from the server response, which
|
||||
// also seems to correlate with missing scraper allowance data. This is however a scraper
|
||||
// service issue so we're not attempting to compensate for it here.
|
||||
// The "niveau" tag indicates whether the account is valid (correct username and password).
|
||||
if (Settings::getInstance()->getBool("ScraperUseAccountScreenScraper") &&
|
||||
Settings::getInstance()->getString("ScraperUsernameScreenScraper") != "" &&
|
||||
Settings::getInstance()->getString("ScraperPasswordScreenScraper") != "") {
|
||||
std::string userID {data.child("ssuser").child("id").text().get()};
|
||||
if (userID != "") {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): Scraping using account \""
|
||||
<< userID << "\"";
|
||||
Settings::getInstance()->getString("ScraperUsernameScreenScraper") != "") {
|
||||
if (data.child("ssuser").child("niveau") != nullptr) {
|
||||
const std::string userID {data.child("ssuser").child("id").text().get()};
|
||||
const std::string userStatus {data.child("ssuser").child("niveau").text().get()};
|
||||
if (userStatus != "0") {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): Scraping using account \""
|
||||
<< userID << "\"";
|
||||
}
|
||||
else {
|
||||
LOG(LogError) << "ScreenScraper: Couldn't authenticate user \""
|
||||
<< Settings::getInstance()->getString("ScraperUsernameScreenScraper")
|
||||
<< "\", wrong username or password?";
|
||||
|
||||
setError("ScreenScraper: Wrong username or password", false, true);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else {
|
||||
LOG(LogDebug)
|
||||
<< "ScreenScraperRequest::processGame(): The configured account \""
|
||||
<< Settings::getInstance()->getString("ScraperUsernameScreenScraper")
|
||||
<< "\" was not included in the scraper response, wrong username or password?";
|
||||
LOG(LogWarning)
|
||||
<< "ScreenScraperRequest::processGame(): Invalid server response, missing "
|
||||
"\"niveau\" tag";
|
||||
}
|
||||
}
|
||||
else {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): Scraping without a user account";
|
||||
}
|
||||
|
||||
// Find how many more requests we can make before the scraper request
|
||||
// allowance counter is reset. For some strange reason the ssuser information
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// ES-DE
|
||||
// ES-DE Frontend
|
||||
// ScreenScraper.h
|
||||
//
|
||||
// Functions specifically for scraping from screenscraper.fr
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: MIT
|
||||
//
|
||||
// ES-DE
|
||||
// ES-DE Frontend
|
||||
// AsyncHandle.h
|
||||
//
|
||||
// Asynchronous operations used by GuiScraperSearch and Scraper.
|
||||
|
@ -24,6 +24,7 @@ public:
|
|||
AsyncHandle()
|
||||
: mStatus(ASYNC_IN_PROGRESS)
|
||||
, mRetry {true}
|
||||
, mFatalError {false}
|
||||
{
|
||||
}
|
||||
virtual ~AsyncHandle() {}
|
||||
|
@ -38,6 +39,7 @@ public:
|
|||
}
|
||||
|
||||
const bool getRetry() { return mRetry; }
|
||||
const bool getFatalError() { return mFatalError; }
|
||||
|
||||
// User-friendly string of our current status.
|
||||
// Will return error message if status() == SEARCH_ERROR.
|
||||
|
@ -57,16 +59,18 @@ public:
|
|||
|
||||
protected:
|
||||
void setStatus(AsyncHandleStatus status) { mStatus = status; }
|
||||
void setError(const std::string& error, bool retry)
|
||||
void setError(const std::string& error, bool retry, bool fatalError = false)
|
||||
{
|
||||
setStatus(ASYNC_ERROR);
|
||||
mError = error;
|
||||
mRetry = retry;
|
||||
mFatalError = fatalError;
|
||||
}
|
||||
|
||||
std::string mError;
|
||||
AsyncHandleStatus mStatus;
|
||||
bool mRetry;
|
||||
bool mFatalError;
|
||||
};
|
||||
|
||||
#endif // ES_CORE_ASYNC_HANDLE_H
|
||||
|
|
Loading…
Reference in a new issue