mirror of
https://github.com/RetroDECK/ES-DE.git
synced 2024-11-26 08:05:38 +00:00
Merge pull request #238 from mooware/scraper-use-getgameslist
TheGamesDB scrapper should use GetGameList.php
This commit is contained in:
commit
3c60235363
|
@ -231,11 +231,9 @@ void ScraperSearchComponent::onSearchDone(const std::vector<ScraperSearchResult>
|
||||||
|
|
||||||
mScraperResults = results;
|
mScraperResults = results;
|
||||||
|
|
||||||
const int end = results.size() > MAX_SCRAPER_RESULTS ? MAX_SCRAPER_RESULTS : results.size(); // at max display 5
|
|
||||||
|
|
||||||
auto font = Font::get(FONT_SIZE_MEDIUM);
|
auto font = Font::get(FONT_SIZE_MEDIUM);
|
||||||
unsigned int color = 0x777777FF;
|
unsigned int color = 0x777777FF;
|
||||||
if(end == 0)
|
if(results.empty())
|
||||||
{
|
{
|
||||||
ComponentListRow row;
|
ComponentListRow row;
|
||||||
row.addElement(std::make_shared<TextComponent>(mWindow, "NO GAMES FOUND - SKIP", font, color), true);
|
row.addElement(std::make_shared<TextComponent>(mWindow, "NO GAMES FOUND - SKIP", font, color), true);
|
||||||
|
@ -247,7 +245,7 @@ void ScraperSearchComponent::onSearchDone(const std::vector<ScraperSearchResult>
|
||||||
mGrid.resetCursor();
|
mGrid.resetCursor();
|
||||||
}else{
|
}else{
|
||||||
ComponentListRow row;
|
ComponentListRow row;
|
||||||
for(int i = 0; i < end; i++)
|
for(size_t i = 0; i < results.size(); i++)
|
||||||
{
|
{
|
||||||
row.elements.clear();
|
row.elements.clear();
|
||||||
row.addElement(std::make_shared<TextComponent>(mWindow, strToUpper(results.at(i).mdl.get("name")), font, color), true);
|
row.addElement(std::make_shared<TextComponent>(mWindow, strToUpper(results.at(i).mdl.get("name")), font, color), true);
|
||||||
|
|
|
@ -74,31 +74,28 @@ const std::map<PlatformId, const char*> gamesdb_platformid_map = boost::assign::
|
||||||
void thegamesdb_generate_scraper_requests(const ScraperSearchParams& params, std::queue< std::unique_ptr<ScraperRequest> >& requests,
|
void thegamesdb_generate_scraper_requests(const ScraperSearchParams& params, std::queue< std::unique_ptr<ScraperRequest> >& requests,
|
||||||
std::vector<ScraperSearchResult>& results)
|
std::vector<ScraperSearchResult>& results)
|
||||||
{
|
{
|
||||||
std::string path = "thegamesdb.net/api/GetGame.php?";
|
std::string path;
|
||||||
bool usingGameID = false;
|
bool usingGameID = false;
|
||||||
|
|
||||||
std::string cleanName = params.nameOverride;
|
std::string cleanName = params.nameOverride;
|
||||||
if (cleanName.empty())
|
if (!cleanName.empty() && cleanName.substr(0,3) == "id:")
|
||||||
{
|
{
|
||||||
cleanName = params.game->getCleanName();
|
std::string gameID = cleanName.substr(3);
|
||||||
path += "name=" + HttpReq::urlEncode(cleanName);
|
path = "thegamesdb.net/api/GetGame.php?id=" + HttpReq::urlEncode(gameID);
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (cleanName.substr(0,3) == "id:") {
|
|
||||||
std::string gameID = cleanName.substr(3,-1);
|
|
||||||
path += "id=" + HttpReq::urlEncode(gameID);
|
|
||||||
usingGameID = true;
|
usingGameID = true;
|
||||||
}
|
}else{
|
||||||
else {
|
if (cleanName.empty())
|
||||||
path += "exactname=" + HttpReq::urlEncode(cleanName);
|
cleanName = params.game->getCleanName();
|
||||||
}
|
path += "thegamesdb.net/api/GetGamesList.php?name=" + HttpReq::urlEncode(cleanName);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(params.system->getPlatformIds().empty() || usingGameID)
|
if(usingGameID)
|
||||||
{
|
{
|
||||||
// no platform specified, we're done
|
// if we have the ID already, we don't need the GetGameList request
|
||||||
requests.push(std::unique_ptr<ScraperRequest>(new TheGamesDBRequest(results, path)));
|
requests.push(std::unique_ptr<ScraperRequest>(new TheGamesDBRequest(results, path)));
|
||||||
|
}else if(params.system->getPlatformIds().empty()){
|
||||||
|
// no platform specified, we're done
|
||||||
|
requests.push(std::unique_ptr<ScraperRequest>(new TheGamesDBRequest(requests, results, path)));
|
||||||
}else{
|
}else{
|
||||||
// go through the list, we need to split this into multiple requests
|
// go through the list, we need to split this into multiple requests
|
||||||
// because TheGamesDB API either sucks or I don't know how to use it properly...
|
// because TheGamesDB API either sucks or I don't know how to use it properly...
|
||||||
|
@ -116,7 +113,7 @@ void thegamesdb_generate_scraper_requests(const ScraperSearchParams& params, std
|
||||||
LOG(LogWarning) << "TheGamesDB scraper warning - no support for platform " << getPlatformName(*platformIt);
|
LOG(LogWarning) << "TheGamesDB scraper warning - no support for platform " << getPlatformName(*platformIt);
|
||||||
}
|
}
|
||||||
|
|
||||||
requests.push(std::unique_ptr<ScraperRequest>(new TheGamesDBRequest(results, path)));
|
requests.push(std::unique_ptr<ScraperRequest>(new TheGamesDBRequest(requests, results, path)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -130,19 +127,27 @@ void TheGamesDBRequest::process(const std::unique_ptr<HttpReq>& req, std::vector
|
||||||
if(!parseResult)
|
if(!parseResult)
|
||||||
{
|
{
|
||||||
std::stringstream ss;
|
std::stringstream ss;
|
||||||
ss << "GamesDBRequest - Error parsing XML. \n\t" << parseResult.description() << "";
|
ss << "TheGamesDBRequest - Error parsing XML. \n\t" << parseResult.description() << "";
|
||||||
std::string err = ss.str();
|
std::string err = ss.str();
|
||||||
setError(err);
|
setError(err);
|
||||||
LOG(LogError) << err;
|
LOG(LogError) << err;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
pugi::xml_node data = doc.child("Data");
|
if (isGameRequest())
|
||||||
|
processGame(doc, results);
|
||||||
|
else
|
||||||
|
processList(doc, results);
|
||||||
|
}
|
||||||
|
|
||||||
|
void TheGamesDBRequest::processGame(const pugi::xml_document& xmldoc, std::vector<ScraperSearchResult>& results)
|
||||||
|
{
|
||||||
|
pugi::xml_node data = xmldoc.child("Data");
|
||||||
|
|
||||||
std::string baseImageUrl = data.child("baseImgUrl").text().get();
|
std::string baseImageUrl = data.child("baseImgUrl").text().get();
|
||||||
|
|
||||||
pugi::xml_node game = data.child("Game");
|
pugi::xml_node game = data.child("Game");
|
||||||
while(game && results.size() < MAX_SCRAPER_RESULTS)
|
if(game)
|
||||||
{
|
{
|
||||||
ScraperSearchResult result;
|
ScraperSearchResult result;
|
||||||
|
|
||||||
|
@ -179,6 +184,27 @@ void TheGamesDBRequest::process(const std::unique_ptr<HttpReq>& req, std::vector
|
||||||
}
|
}
|
||||||
|
|
||||||
results.push_back(result);
|
results.push_back(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void TheGamesDBRequest::processList(const pugi::xml_document& xmldoc, std::vector<ScraperSearchResult>& results)
|
||||||
|
{
|
||||||
|
assert(mRequestQueue != nullptr);
|
||||||
|
|
||||||
|
pugi::xml_node data = xmldoc.child("Data");
|
||||||
|
pugi::xml_node game = data.child("Game");
|
||||||
|
|
||||||
|
// limit the number of results per platform, not in total.
|
||||||
|
// otherwise if the first platform returns >= 7 games
|
||||||
|
// but the second platform contains the relevant game,
|
||||||
|
// the relevant result would not be shown.
|
||||||
|
for(int i = 0; game && i < MAX_SCRAPER_RESULTS; i++)
|
||||||
|
{
|
||||||
|
std::string id = game.child("id").text().get();
|
||||||
|
std::string path = "thegamesdb.net/api/GetGame.php?id=" + id;
|
||||||
|
|
||||||
|
mRequestQueue->push(std::unique_ptr<ScraperRequest>(new TheGamesDBRequest(results, path)));
|
||||||
|
|
||||||
game = game.next_sibling("Game");
|
game = game.next_sibling("Game");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,13 +2,24 @@
|
||||||
|
|
||||||
#include "scrapers/Scraper.h"
|
#include "scrapers/Scraper.h"
|
||||||
|
|
||||||
|
namespace pugi { class xml_document; }
|
||||||
|
|
||||||
void thegamesdb_generate_scraper_requests(const ScraperSearchParams& params, std::queue< std::unique_ptr<ScraperRequest> >& requests,
|
void thegamesdb_generate_scraper_requests(const ScraperSearchParams& params, std::queue< std::unique_ptr<ScraperRequest> >& requests,
|
||||||
std::vector<ScraperSearchResult>& results);
|
std::vector<ScraperSearchResult>& results);
|
||||||
|
|
||||||
class TheGamesDBRequest : public ScraperHttpRequest
|
class TheGamesDBRequest : public ScraperHttpRequest
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
TheGamesDBRequest(std::vector<ScraperSearchResult>& resultsWrite, const std::string& url) : ScraperHttpRequest(resultsWrite, url) {}
|
// ctor for a GetGameList request
|
||||||
|
TheGamesDBRequest(std::queue< std::unique_ptr<ScraperRequest> >& requestsWrite, std::vector<ScraperSearchResult>& resultsWrite, const std::string& url) : ScraperHttpRequest(resultsWrite, url), mRequestQueue(&requestsWrite) {}
|
||||||
|
// ctor for a GetGame request
|
||||||
|
TheGamesDBRequest(std::vector<ScraperSearchResult>& resultsWrite, const std::string& url) : ScraperHttpRequest(resultsWrite, url), mRequestQueue(nullptr) {}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void process(const std::unique_ptr<HttpReq>& req, std::vector<ScraperSearchResult>& results) override;
|
void process(const std::unique_ptr<HttpReq>& req, std::vector<ScraperSearchResult>& results) override;
|
||||||
|
void processList(const pugi::xml_document& xmldoc, std::vector<ScraperSearchResult>& results);
|
||||||
|
void processGame(const pugi::xml_document& xmldoc, std::vector<ScraperSearchResult>& results);
|
||||||
|
bool isGameRequest() { return !mRequestQueue; }
|
||||||
|
|
||||||
|
std::queue< std::unique_ptr<ScraperRequest> >* mRequestQueue;
|
||||||
};
|
};
|
||||||
|
|
|
@ -43,13 +43,15 @@ void ScraperSearchHandle::update()
|
||||||
|
|
||||||
if(!mRequestQueue.empty())
|
if(!mRequestQueue.empty())
|
||||||
{
|
{
|
||||||
auto& req = mRequestQueue.front();
|
// a request can add more requests to the queue while running,
|
||||||
AsyncHandleStatus status = req->status();
|
// so be careful with references into the queue
|
||||||
|
auto& req = *(mRequestQueue.front());
|
||||||
|
AsyncHandleStatus status = req.status();
|
||||||
|
|
||||||
if(status == ASYNC_ERROR)
|
if(status == ASYNC_ERROR)
|
||||||
{
|
{
|
||||||
// propegate error
|
// propegate error
|
||||||
setError(req->getStatusString());
|
setError(req.getStatusString());
|
||||||
|
|
||||||
// empty our queue
|
// empty our queue
|
||||||
while(!mRequestQueue.empty())
|
while(!mRequestQueue.empty())
|
||||||
|
|
Loading…
Reference in a new issue