mirror of
https://github.com/RetroDECK/ES-DE.git
synced 2025-02-16 20:15:38 +00:00
Minor cosmetic changes to the scraper log output.
This commit is contained in:
parent
606958bb6b
commit
ad544d684d
|
@ -393,7 +393,7 @@ void TheGamesDBJSONRequest::process(const std::unique_ptr<HttpReq>& req,
|
|||
|
||||
if (doc.HasParseError()) {
|
||||
std::string err =
|
||||
std::string("TheGamesDBJSONRequest - Error parsing JSON. \n\t") +
|
||||
std::string("TheGamesDBJSONRequest - Error parsing JSON \n\t") +
|
||||
GetParseError_En(doc.GetParseError());
|
||||
setError(err);
|
||||
LOG(LogError) << err;
|
||||
|
@ -412,7 +412,7 @@ void TheGamesDBJSONRequest::process(const std::unique_ptr<HttpReq>& req,
|
|||
baseImageUrlLarge = base_url["large"].GetString();
|
||||
}
|
||||
else {
|
||||
LOG(LogWarning) << "TheGamesDBJSONRequest - No URL path for large images.\n";
|
||||
LOG(LogWarning) << "TheGamesDBJSONRequest - No URL path for large images\n";
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -441,7 +441,7 @@ void TheGamesDBJSONRequest::process(const std::unique_ptr<HttpReq>& req,
|
|||
// These process steps are for the initial scraping response.
|
||||
if (!doc.HasMember("data") || !doc["data"].HasMember("games") ||
|
||||
!doc["data"]["games"].IsArray()) {
|
||||
LOG(LogWarning) << "TheGamesDBJSONRequest - Response had no game data.\n";
|
||||
LOG(LogWarning) << "TheGamesDBJSONRequest - Response had no game data\n";
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -459,6 +459,6 @@ void TheGamesDBJSONRequest::process(const std::unique_ptr<HttpReq>& req,
|
|||
}
|
||||
|
||||
if (results.size() == 0) {
|
||||
LOG(LogDebug) << "TheGamesDBJSONRequest::process(): No games found.";
|
||||
LOG(LogDebug) << "TheGamesDBJSONRequest::process(): No games found";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
#include <rapidjson/document.h>
|
||||
#include <rapidjson/error/en.h>
|
||||
|
||||
#include <chrono>
|
||||
#include <fstream>
|
||||
#include <memory>
|
||||
|
@ -127,7 +126,7 @@ bool TheGamesDBJSONRequestResources::saveResource(
|
|||
{
|
||||
|
||||
if (req == nullptr) {
|
||||
LOG(LogError) << "HTTP request pointer was null.\n";
|
||||
LOG(LogError) << "HTTP request pointer was null\n";
|
||||
return true;
|
||||
}
|
||||
if (req->status() == HttpReq::REQ_IN_PROGRESS) {
|
||||
|
@ -181,7 +180,7 @@ int TheGamesDBJSONRequestResources::loadResource(
|
|||
|
||||
if (!doc.HasMember("data") || !doc["data"].HasMember(resource_name.c_str()) ||
|
||||
!doc["data"][resource_name.c_str()].IsObject()) {
|
||||
std::string err = "TheGamesDBJSONRequest - Response had no resource data.\n";
|
||||
std::string err = "TheGamesDBJSONRequest - Response had no resource data\n";
|
||||
LOG(LogError) << err;
|
||||
return 1;
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ std::unique_ptr<ScraperSearchHandle> startScraperSearch(const ScraperSearchParam
|
|||
|
||||
// Check if the scraper in the settings still exists as a registered scraping source.
|
||||
if (scraper_request_funcs.find(name) == scraper_request_funcs.end())
|
||||
LOG(LogWarning) << "Configured scraper (" << name << ") unavailable, scraping aborted.";
|
||||
LOG(LogWarning) << "Configured scraper (" << name << ") unavailable, scraping aborted";
|
||||
else
|
||||
scraper_request_funcs.at(name)(params, handle->mRequestQueue, handle->mResults);
|
||||
|
||||
|
@ -48,7 +48,7 @@ std::unique_ptr<ScraperSearchHandle> startMediaURLsFetch(const std::string& game
|
|||
ScraperSearchParams params;
|
||||
// Check if the scraper in the settings still exists as a registered scraping source.
|
||||
if (scraper_request_funcs.find(name) == scraper_request_funcs.end()) {
|
||||
LOG(LogWarning) << "Configured scraper (" << name << ") unavailable, scraping aborted.";
|
||||
LOG(LogWarning) << "Configured scraper (" << name << ") unavailable, scraping aborted";
|
||||
}
|
||||
else {
|
||||
// Specifically use the TheGamesDB function as this type of request
|
||||
|
@ -472,7 +472,7 @@ bool resizeImage(const std::string& path, int maxWidth, int maxHeight)
|
|||
image = FreeImage_Load(format, path.c_str());
|
||||
}
|
||||
else {
|
||||
LOG(LogError) << "File format not supported for image \"" << path << "\"!";
|
||||
LOG(LogError) << "File format not supported for image \"" << path << "\"";
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -493,7 +493,7 @@ bool resizeImage(const std::string& path, int maxWidth, int maxHeight)
|
|||
FreeImage_Unload(image);
|
||||
|
||||
if (imageRescaled == nullptr) {
|
||||
LOG(LogError) << "Could not resize image. (Not enough memory? Invalid bitdepth?)";
|
||||
LOG(LogError) << "Could not resize image (not enough memory or invalid bitdepth?)";
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -501,7 +501,7 @@ bool resizeImage(const std::string& path, int maxWidth, int maxHeight)
|
|||
FreeImage_Unload(imageRescaled);
|
||||
|
||||
if (!saved) {
|
||||
LOG(LogError) << "Failed to save resized image.";
|
||||
LOG(LogError) << "Failed to save resized image";
|
||||
}
|
||||
|
||||
return saved;
|
||||
|
|
|
@ -248,7 +248,7 @@ void ScreenScraperRequest::process(const std::unique_ptr<HttpReq>& req,
|
|||
std::string gameName = Utils::String::toUpper((*it).mdl.get("name"));
|
||||
if (gameName.substr(0, 12) == "ZZZ(NOTGAME)") {
|
||||
LOG(LogWarning) << "ScreenScraperRequest - Received \"ZZZ(notgame)\" as game name, "
|
||||
"ignoring response.";
|
||||
"ignoring response";
|
||||
results.pop_back();
|
||||
return;
|
||||
}
|
||||
|
@ -270,7 +270,7 @@ void ScreenScraperRequest::processGame(const pugi::xml_document& xmldoc,
|
|||
std::string userID = data.child("ssuser").child("id").text().get();
|
||||
if (userID != "") {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): Scraping using account '" <<
|
||||
userID << "'.";
|
||||
userID << "'";
|
||||
}
|
||||
else {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): The configured account '" <<
|
||||
|
@ -291,11 +291,11 @@ void ScreenScraperRequest::processGame(const pugi::xml_document& xmldoc,
|
|||
if (maxRequestsPerDay > 0) {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): Daily scraping allowance: " <<
|
||||
requestsToday << "/" << maxRequestsPerDay << " (" <<
|
||||
scraperRequestAllowance << " remaining).";
|
||||
scraperRequestAllowance << " remaining)";
|
||||
}
|
||||
else {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): Daily scraping allowance: "
|
||||
"No statistics were provided with the response.";
|
||||
"No statistics were provided with the response";
|
||||
}
|
||||
|
||||
if (data.child("jeux"))
|
||||
|
@ -425,7 +425,7 @@ void ScreenScraperRequest::processGame(const pugi::xml_document& xmldoc,
|
|||
} // Game.
|
||||
|
||||
if (out_results.size() == 0) {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): No games found.";
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processGame(): No games found";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -481,7 +481,7 @@ void ScreenScraperRequest::processMedia(
|
|||
}
|
||||
else {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processMedia(): "
|
||||
"Failed to find media XML node with name '" << mediaType << "'.";
|
||||
"Failed to find media XML node with name '" << mediaType << "'";
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -491,13 +491,13 @@ void ScreenScraperRequest::processList(const pugi::xml_document& xmldoc,
|
|||
{
|
||||
assert(mRequestQueue != nullptr);
|
||||
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processList(): Processing a list of results.";
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processList(): Processing a list of results";
|
||||
|
||||
pugi::xml_node data = xmldoc.child("Data");
|
||||
pugi::xml_node game = data.child("jeu");
|
||||
|
||||
if (!game) {
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processList(): Found nothing.";
|
||||
LOG(LogDebug) << "ScreenScraperRequest::processList(): Found nothing";
|
||||
}
|
||||
|
||||
ScreenScraperRequest::ScreenScraperConfig ssConfig;
|
||||
|
|
Loading…
Reference in a new issue