2020-09-21 17:17:34 +00:00
|
|
|
// SPDX-License-Identifier: MIT
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
2020-09-21 17:17:34 +00:00
|
|
|
// EmulationStation Desktop Edition
|
2020-06-21 12:25:28 +00:00
|
|
|
// Scraper.cpp
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
2020-06-21 12:25:28 +00:00
|
|
|
// Main scraper logic.
|
|
|
|
// Called from GuiScraperSearch.
|
|
|
|
// Calls either GamesDBJSONScraper or ScreenScraper.
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
|
|
|
|
2014-06-25 16:29:58 +00:00
|
|
|
#include "scrapers/Scraper.h"
|
2017-11-01 22:21:10 +00:00
|
|
|
|
|
|
|
#include "FileData.h"
|
2019-02-08 02:08:11 +00:00
|
|
|
#include "GamesDBJSONScraper.h"
|
2014-06-25 16:29:58 +00:00
|
|
|
#include "Log.h"
|
2020-07-13 18:10:09 +00:00
|
|
|
#include "ScreenScraper.h"
|
2014-06-25 16:29:58 +00:00
|
|
|
#include "Settings.h"
|
2017-11-01 22:21:10 +00:00
|
|
|
#include "SystemData.h"
|
2021-07-07 18:03:42 +00:00
|
|
|
#include "utils/StringUtil.h"
|
2020-07-13 18:10:09 +00:00
|
|
|
|
2021-02-01 18:27:48 +00:00
|
|
|
#if defined(_WIN64)
|
|
|
|
#include "views/ViewController.h"
|
|
|
|
#endif
|
|
|
|
|
2014-06-25 16:29:58 +00:00
|
|
|
#include <FreeImage.h>
|
2021-07-07 18:03:42 +00:00
|
|
|
#include <cmath>
|
2017-11-01 22:21:10 +00:00
|
|
|
#include <fstream>
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2021-08-17 16:41:45 +00:00
|
|
|
const std::map<std::string, generate_scraper_requests_func> scraper_request_funcs{
|
|
|
|
{"thegamesdb", &thegamesdb_generate_json_scraper_requests},
|
|
|
|
{"screenscraper", &screenscraper_generate_scraper_requests}};
|
2014-06-25 16:29:58 +00:00
|
|
|
|
|
|
|
std::unique_ptr<ScraperSearchHandle> startScraperSearch(const ScraperSearchParams& params)
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& name = Settings::getInstance()->getString("Scraper");
|
|
|
|
std::unique_ptr<ScraperSearchHandle> handle(new ScraperSearchHandle());
|
2019-01-24 18:00:19 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Check if the scraper in the settings still exists as a registered scraping source.
|
2021-02-08 19:56:11 +00:00
|
|
|
if (scraper_request_funcs.find(name) == scraper_request_funcs.end()) {
|
|
|
|
LOG(LogError) << "Configured scraper (" << name << ") unavailable, scraping aborted";
|
|
|
|
}
|
|
|
|
else {
|
2021-07-07 18:03:42 +00:00
|
|
|
LOG(LogDebug) << "Scraper::startScraperSearch(): Scraping system \""
|
|
|
|
<< params.system->getName() << "\", game file \""
|
|
|
|
<< params.game->getFileName() << "\"";
|
2020-06-21 12:25:28 +00:00
|
|
|
scraper_request_funcs.at(name)(params, handle->mRequestQueue, handle->mResults);
|
2021-02-08 19:56:11 +00:00
|
|
|
}
|
2019-01-24 18:00:19 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
return handle;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-06-06 11:10:33 +00:00
|
|
|
std::unique_ptr<ScraperSearchHandle> startMediaURLsFetch(const std::string& gameIDs)
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& name = Settings::getInstance()->getString("Scraper");
|
|
|
|
std::unique_ptr<ScraperSearchHandle> handle(new ScraperSearchHandle());
|
|
|
|
|
|
|
|
ScraperSearchParams params;
|
|
|
|
// Check if the scraper in the settings still exists as a registered scraping source.
|
2020-07-13 18:10:09 +00:00
|
|
|
if (scraper_request_funcs.find(name) == scraper_request_funcs.end()) {
|
2021-01-26 16:28:54 +00:00
|
|
|
LOG(LogWarning) << "Configured scraper (" << name << ") unavailable, scraping aborted";
|
2020-07-13 18:10:09 +00:00
|
|
|
}
|
|
|
|
else {
|
2020-06-21 12:25:28 +00:00
|
|
|
// Specifically use the TheGamesDB function as this type of request
|
|
|
|
// will never occur for ScreenScraper.
|
2020-07-13 18:10:09 +00:00
|
|
|
thegamesdb_generate_json_scraper_requests(gameIDs, handle->mRequestQueue, handle->mResults);
|
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
|
|
|
|
return handle;
|
2020-06-06 11:10:33 +00:00
|
|
|
}
|
|
|
|
|
2014-06-25 16:29:58 +00:00
|
|
|
std::vector<std::string> getScraperList()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
std::vector<std::string> list;
|
|
|
|
for (auto it = scraper_request_funcs.cbegin(); it != scraper_request_funcs.cend(); it++)
|
|
|
|
list.push_back(it->first);
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
return list;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2019-01-24 18:00:19 +00:00
|
|
|
bool isValidConfiguredScraper()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& name = Settings::getInstance()->getString("Scraper");
|
|
|
|
return scraper_request_funcs.find(name) != scraper_request_funcs.end();
|
2019-01-24 18:00:19 +00:00
|
|
|
}
|
|
|
|
|
2014-06-25 16:29:58 +00:00
|
|
|
void ScraperSearchHandle::update()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
if (mStatus == ASYNC_DONE)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (!mRequestQueue.empty()) {
|
|
|
|
// A request can add more requests to the queue while running,
|
|
|
|
// so be careful with references into the queue.
|
|
|
|
auto& req = *(mRequestQueue.front());
|
|
|
|
AsyncHandleStatus status = req.status();
|
|
|
|
|
|
|
|
if (status == ASYNC_ERROR) {
|
|
|
|
// Propagate error.
|
|
|
|
setError(req.getStatusString());
|
|
|
|
|
|
|
|
// Empty our queue.
|
|
|
|
while (!mRequestQueue.empty())
|
|
|
|
mRequestQueue.pop();
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Finished this one, see if we have any more.
|
|
|
|
if (status == ASYNC_DONE)
|
|
|
|
mRequestQueue.pop();
|
|
|
|
|
|
|
|
// Status == ASYNC_IN_PROGRESS.
|
|
|
|
}
|
|
|
|
|
2020-07-31 12:20:37 +00:00
|
|
|
// Check if we finished without any errors and if so set the status flag accordingly.
|
|
|
|
if (mRequestQueue.empty() && mStatus != ASYNC_ERROR) {
|
2020-06-21 12:25:28 +00:00
|
|
|
setStatus(ASYNC_DONE);
|
|
|
|
return;
|
|
|
|
}
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
// ScraperRequest.
|
|
|
|
ScraperRequest::ScraperRequest(std::vector<ScraperSearchResult>& resultsWrite)
|
2021-07-07 18:03:42 +00:00
|
|
|
: mResults(resultsWrite)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
// ScraperHttpRequest.
|
2020-06-06 11:10:33 +00:00
|
|
|
ScraperHttpRequest::ScraperHttpRequest(std::vector<ScraperSearchResult>& resultsWrite,
|
2021-07-07 18:03:42 +00:00
|
|
|
const std::string& url)
|
|
|
|
: ScraperRequest(resultsWrite)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
setStatus(ASYNC_IN_PROGRESS);
|
|
|
|
mReq = std::unique_ptr<HttpReq>(new HttpReq(url));
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ScraperHttpRequest::update()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
HttpReq::Status status = mReq->status();
|
|
|
|
if (status == HttpReq::REQ_SUCCESS) {
|
|
|
|
// If process() has an error, status will be changed to ASYNC_ERROR.
|
|
|
|
setStatus(ASYNC_DONE);
|
|
|
|
process(mReq, mResults);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Not ready yet.
|
|
|
|
if (status == HttpReq::REQ_IN_PROGRESS)
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Everything else is some sort of error.
|
2021-07-07 18:03:42 +00:00
|
|
|
LOG(LogError) << "ScraperHttpRequest network error (status: " << status << ") - "
|
|
|
|
<< mReq->getErrorMsg();
|
2020-07-31 13:20:55 +00:00
|
|
|
setError("Network error: " + mReq->getErrorMsg());
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-06-06 11:10:33 +00:00
|
|
|
// Download and write the media files to disk.
|
2020-05-26 16:34:33 +00:00
|
|
|
std::unique_ptr<MDResolveHandle> resolveMetaDataAssets(const ScraperSearchResult& result,
|
2021-07-07 18:03:42 +00:00
|
|
|
const ScraperSearchParams& search)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
return std::unique_ptr<MDResolveHandle>(new MDResolveHandle(result, search));
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
MDResolveHandle::MDResolveHandle(const ScraperSearchResult& result,
|
2021-07-07 18:03:42 +00:00
|
|
|
const ScraperSearchParams& search)
|
|
|
|
: mResult(result)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
struct mediaFileInfoStruct {
|
|
|
|
std::string fileURL;
|
|
|
|
std::string fileFormat;
|
|
|
|
std::string subDirectory;
|
|
|
|
std::string existingMediaFile;
|
2020-08-05 20:38:44 +00:00
|
|
|
bool resizeFile;
|
2020-06-21 12:25:28 +00:00
|
|
|
} mediaFileInfo;
|
|
|
|
|
|
|
|
std::vector<struct mediaFileInfoStruct> scrapeFiles;
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
mResult.savedNewMedia = false;
|
2020-07-13 18:10:09 +00:00
|
|
|
|
2021-05-30 10:28:17 +00:00
|
|
|
if (Settings::getInstance()->getBool("Scrape3DBoxes") && result.box3DUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.box3DUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.box3DFormat;
|
2020-06-21 12:25:28 +00:00
|
|
|
mediaFileInfo.subDirectory = "3dboxes";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->get3DBoxPath();
|
2020-08-05 20:38:44 +00:00
|
|
|
mediaFileInfo.resizeFile = true;
|
2020-06-21 12:25:28 +00:00
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
if (Settings::getInstance()->getBool("ScrapeCovers") && result.coverUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.coverUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.coverFormat;
|
|
|
|
mediaFileInfo.subDirectory = "covers";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->getCoverPath();
|
2020-08-05 20:38:44 +00:00
|
|
|
mediaFileInfo.resizeFile = true;
|
2020-06-21 12:25:28 +00:00
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
if (Settings::getInstance()->getBool("ScrapeMarquees") && result.marqueeUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.marqueeUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.marqueeFormat;
|
|
|
|
mediaFileInfo.subDirectory = "marquees";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->getMarqueePath();
|
2020-08-05 20:38:44 +00:00
|
|
|
mediaFileInfo.resizeFile = true;
|
2020-06-21 12:25:28 +00:00
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
if (Settings::getInstance()->getBool("ScrapeScreenshots") && result.screenshotUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.screenshotUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.screenshotFormat;
|
|
|
|
mediaFileInfo.subDirectory = "screenshots";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->getScreenshotPath();
|
2020-08-05 20:38:44 +00:00
|
|
|
mediaFileInfo.resizeFile = true;
|
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
if (Settings::getInstance()->getBool("ScrapeVideos") && result.videoUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.videoUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.videoFormat;
|
|
|
|
mediaFileInfo.subDirectory = "videos";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->getVideoPath();
|
|
|
|
mediaFileInfo.resizeFile = false;
|
2020-06-21 12:25:28 +00:00
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
2021-07-07 18:03:42 +00:00
|
|
|
#if defined(_WIN64)
|
2021-02-01 18:27:48 +00:00
|
|
|
// Required due to the idiotic file locking that exists on this operating system.
|
|
|
|
ViewController::get()->onStopVideo();
|
2021-07-07 18:03:42 +00:00
|
|
|
#endif
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for (auto it = scrapeFiles.cbegin(); it != scrapeFiles.cend(); it++) {
|
|
|
|
|
|
|
|
std::string ext;
|
|
|
|
|
|
|
|
// If we have a file extension returned by the scraper, then use it.
|
2020-08-05 20:38:44 +00:00
|
|
|
// Otherwise, try to guess it by the name of the URL, which points to a media file.
|
2020-06-21 12:25:28 +00:00
|
|
|
if (!it->fileFormat.empty()) {
|
|
|
|
ext = it->fileFormat;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
size_t dot = it->fileURL.find_last_of('.');
|
|
|
|
|
|
|
|
if (dot != std::string::npos)
|
|
|
|
ext = it->fileURL.substr(dot, std::string::npos);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string filePath = getSaveAsPath(search, it->subDirectory, ext);
|
|
|
|
|
|
|
|
// If there is an existing media file on disk and the setting to overwrite data
|
|
|
|
// has been set to no, then don't proceed with downloading or saving a new file.
|
|
|
|
if (it->existingMediaFile != "" &&
|
2021-07-07 18:03:42 +00:00
|
|
|
!Settings::getInstance()->getBool("ScraperOverwriteData"))
|
2020-06-21 12:25:28 +00:00
|
|
|
continue;
|
|
|
|
|
|
|
|
// If the image is cached already as the thumbnail, then we don't need
|
|
|
|
// to download it again, in this case just save it to disk and resize it.
|
2021-07-07 18:03:42 +00:00
|
|
|
if (mResult.thumbnailImageUrl == it->fileURL && mResult.thumbnailImageData.size() > 0) {
|
2020-06-21 12:25:28 +00:00
|
|
|
|
2020-12-18 15:35:19 +00:00
|
|
|
// This is just a temporary workaround to avoid saving media files to disk that
|
|
|
|
// are actually just containing error messages from the scraper service. The
|
|
|
|
// proper solution is to implement file checksum checks to determine if the
|
2021-05-30 09:13:40 +00:00
|
|
|
// server response contains valid media. As for the current approach, if the
|
|
|
|
// file is less than 350 bytes, we check if FreeImage can actually detect a
|
|
|
|
// valid format, and if not, we present an error message. Black/empty images
|
|
|
|
// are sometimes returned from the scraper service and these can actually be
|
|
|
|
// less than 350 bytes in size.
|
2020-12-18 15:35:19 +00:00
|
|
|
if (Settings::getInstance()->getBool("ScraperHaltOnInvalidMedia") &&
|
2021-07-07 18:03:42 +00:00
|
|
|
mResult.thumbnailImageData.size() < 350) {
|
2021-05-30 09:13:40 +00:00
|
|
|
|
2021-07-07 18:03:42 +00:00
|
|
|
FIMEMORY* memoryStream =
|
|
|
|
FreeImage_OpenMemory(reinterpret_cast<BYTE*>(&mResult.thumbnailImageData.at(0)),
|
|
|
|
static_cast<DWORD>(mResult.thumbnailImageData.size()));
|
2021-05-30 09:13:40 +00:00
|
|
|
|
|
|
|
FREE_IMAGE_FORMAT imageFormat = FreeImage_GetFileTypeFromMemory(memoryStream, 0);
|
|
|
|
FreeImage_CloseMemory(memoryStream);
|
|
|
|
|
|
|
|
if (imageFormat == FIF_UNKNOWN) {
|
|
|
|
setError("The file \"" + Utils::FileSystem::getFileName(filePath) +
|
2021-07-07 18:03:42 +00:00
|
|
|
"\" returned by the scraper seems to be invalid as it's less than " +
|
|
|
|
"350 bytes in size");
|
2021-05-30 09:13:40 +00:00
|
|
|
return;
|
|
|
|
}
|
2020-12-18 15:35:19 +00:00
|
|
|
}
|
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Remove any existing media file before attempting to write a new one.
|
|
|
|
// This avoids the problem where there's already a file for this media type
|
|
|
|
// with a different format/extension (e.g. game.jpg and we're going to write
|
|
|
|
// game.png) which would lead to two media files for this game.
|
2020-07-13 18:58:25 +00:00
|
|
|
if (it->existingMediaFile != "")
|
2020-06-21 12:25:28 +00:00
|
|
|
Utils::FileSystem::removeFile(it->existingMediaFile);
|
|
|
|
|
2020-07-11 08:10:07 +00:00
|
|
|
// If the media directory does not exist, something is wrong, possibly permission
|
|
|
|
// problems or the MediaDirectory setting points to a file instead of a directory.
|
|
|
|
if (!Utils::FileSystem::isDirectory(Utils::FileSystem::getParent(filePath))) {
|
|
|
|
setError("Media directory does not exist and can't be created. "
|
2021-07-07 18:03:42 +00:00
|
|
|
"Permission problems?");
|
|
|
|
LOG(LogError) << "Couldn't create media directory: \""
|
|
|
|
<< Utils::FileSystem::getParent(filePath) << "\"";
|
|
|
|
return;
|
2020-07-11 08:10:07 +00:00
|
|
|
}
|
|
|
|
|
2021-07-07 18:03:42 +00:00
|
|
|
#if defined(_WIN64)
|
2020-07-10 16:32:23 +00:00
|
|
|
std::ofstream stream(Utils::String::stringToWideString(filePath).c_str(),
|
2021-07-07 18:03:42 +00:00
|
|
|
std::ios_base::out | std::ios_base::binary);
|
|
|
|
#else
|
2020-06-21 12:25:28 +00:00
|
|
|
std::ofstream stream(filePath, std::ios_base::out | std::ios_base::binary);
|
2021-07-07 18:03:42 +00:00
|
|
|
#endif
|
2020-07-31 13:00:07 +00:00
|
|
|
if (!stream || stream.bad()) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Failed to open path for writing media file.\nPermission error?");
|
2020-06-21 12:25:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-11-14 19:46:08 +00:00
|
|
|
const std::string& content = mResult.thumbnailImageData;
|
2020-06-21 12:25:28 +00:00
|
|
|
stream.write(content.data(), content.length());
|
|
|
|
stream.close();
|
|
|
|
if (stream.bad()) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Failed to save media file.\nDisk full?");
|
2020-06-21 12:25:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Resize it.
|
2020-08-05 20:38:44 +00:00
|
|
|
if (it->resizeFile) {
|
2021-05-24 19:34:08 +00:00
|
|
|
if (!resizeImage(filePath, it->subDirectory)) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Error saving resized image.\nOut of memory? Disk full?");
|
|
|
|
return;
|
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
2020-07-13 18:10:09 +00:00
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
mResult.savedNewMedia = true;
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
|
|
|
// If it's not cached, then initiate the download.
|
|
|
|
else {
|
2020-08-05 20:38:44 +00:00
|
|
|
mFuncs.push_back(ResolvePair(downloadMediaAsync(it->fileURL, filePath,
|
2021-07-07 18:03:42 +00:00
|
|
|
it->existingMediaFile, it->subDirectory,
|
|
|
|
it->resizeFile, mResult.savedNewMedia),
|
|
|
|
[this, filePath] {}));
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
|
|
|
}
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void MDResolveHandle::update()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
if (mStatus == ASYNC_DONE || mStatus == ASYNC_ERROR)
|
|
|
|
return;
|
|
|
|
|
|
|
|
auto it = mFuncs.cbegin();
|
|
|
|
while (it != mFuncs.cend()) {
|
|
|
|
|
|
|
|
if (it->first->status() == ASYNC_ERROR) {
|
|
|
|
setError(it->first->getStatusString());
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
else if (it->first->status() == ASYNC_DONE) {
|
|
|
|
it->second();
|
|
|
|
it = mFuncs.erase(it);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
it++;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mFuncs.empty())
|
|
|
|
setStatus(ASYNC_DONE);
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2021-07-07 18:03:42 +00:00
|
|
|
std::unique_ptr<MediaDownloadHandle> downloadMediaAsync(const std::string& url,
|
|
|
|
const std::string& saveAs,
|
|
|
|
const std::string& existingMediaPath,
|
|
|
|
const std::string& mediaType,
|
|
|
|
const bool resizeFile,
|
|
|
|
bool& savedNewMedia)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-08-05 20:38:44 +00:00
|
|
|
return std::unique_ptr<MediaDownloadHandle>(new MediaDownloadHandle(
|
2021-07-07 18:03:42 +00:00
|
|
|
url, saveAs, existingMediaPath, mediaType, resizeFile, savedNewMedia));
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2021-07-07 18:03:42 +00:00
|
|
|
MediaDownloadHandle::MediaDownloadHandle(const std::string& url,
|
|
|
|
const std::string& path,
|
|
|
|
const std::string& existingMediaPath,
|
|
|
|
const std::string& mediaType,
|
|
|
|
const bool resizeFile,
|
|
|
|
bool& savedNewMedia)
|
2021-09-18 07:53:26 +00:00
|
|
|
: mReq(new HttpReq(url))
|
|
|
|
, mSavePath(path)
|
2021-07-07 18:03:42 +00:00
|
|
|
, mExistingMediaFile(existingMediaPath)
|
|
|
|
, mMediaType(mediaType)
|
|
|
|
, mResizeFile(resizeFile)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2021-07-07 18:03:42 +00:00
|
|
|
mSavedNewMediaPtr = &savedNewMedia;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
void MediaDownloadHandle::update()
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
if (mReq->status() == HttpReq::REQ_IN_PROGRESS)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (mReq->status() != HttpReq::REQ_SUCCESS) {
|
|
|
|
std::stringstream ss;
|
|
|
|
ss << "Network error: " << mReq->getErrorMsg();
|
|
|
|
setError(ss.str());
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
// This seems to take care of a strange race condition where the media saving and
|
|
|
|
// resizing would sometimes take place twice.
|
|
|
|
if (mStatus == ASYNC_DONE)
|
|
|
|
return;
|
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Download is done, save it to disk.
|
|
|
|
|
2021-05-30 09:13:40 +00:00
|
|
|
// This is just a temporary workaround to avoid saving media files to disk that are
|
|
|
|
// actually just containing error messages from the scraper service. The proper solution
|
|
|
|
// is to implement file checksum checks to determine if the server response contains valid
|
|
|
|
// media. As for the current approach, if the file is less than 350 bytes, we check if
|
|
|
|
// FreeImage can actually detect a valid format, and if not, we present an error message.
|
|
|
|
// Black/empty images are sometimes returned from the scraper service and these can actually
|
|
|
|
// be less than 350 bytes in size.
|
2020-12-18 15:35:19 +00:00
|
|
|
if (Settings::getInstance()->getBool("ScraperHaltOnInvalidMedia") &&
|
2021-07-07 18:03:42 +00:00
|
|
|
mReq->getContent().size() < 350) {
|
2021-05-30 09:13:40 +00:00
|
|
|
|
|
|
|
FREE_IMAGE_FORMAT imageFormat = FIF_UNKNOWN;
|
|
|
|
|
|
|
|
if (mMediaType != "videos") {
|
|
|
|
std::string imageData = mReq->getContent();
|
2021-07-07 18:03:42 +00:00
|
|
|
FIMEMORY* memoryStream = FreeImage_OpenMemory(reinterpret_cast<BYTE*>(&imageData.at(0)),
|
|
|
|
static_cast<DWORD>(imageData.size()));
|
2021-05-30 09:13:40 +00:00
|
|
|
imageFormat = FreeImage_GetFileTypeFromMemory(memoryStream, 0);
|
|
|
|
FreeImage_CloseMemory(memoryStream);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (imageFormat == FIF_UNKNOWN) {
|
|
|
|
setError("The file \"" + Utils::FileSystem::getFileName(mSavePath) +
|
2021-07-07 18:03:42 +00:00
|
|
|
"\" returned by the scraper seems to be invalid as it's less than " +
|
|
|
|
"350 bytes in size");
|
2021-05-30 09:13:40 +00:00
|
|
|
return;
|
|
|
|
}
|
2020-12-18 15:35:19 +00:00
|
|
|
}
|
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Remove any existing media file before attempting to write a new one.
|
|
|
|
// This avoids the problem where there's already a file for this media type
|
|
|
|
// with a different format/extension (e.g. game.jpg and we're going to write
|
|
|
|
// game.png) which would lead to two media files for this game.
|
2020-07-13 18:58:25 +00:00
|
|
|
if (mExistingMediaFile != "")
|
2020-06-21 12:25:28 +00:00
|
|
|
Utils::FileSystem::removeFile(mExistingMediaFile);
|
|
|
|
|
2020-07-11 08:10:07 +00:00
|
|
|
// If the media directory does not exist, something is wrong, possibly permission
|
|
|
|
// problems or the MediaDirectory setting points to a file instead of a directory.
|
|
|
|
if (!Utils::FileSystem::isDirectory(Utils::FileSystem::getParent(mSavePath))) {
|
|
|
|
setError("Media directory does not exist and can't be created. Permission problems?");
|
2021-07-07 18:03:42 +00:00
|
|
|
LOG(LogError) << "Couldn't create media directory: \""
|
|
|
|
<< Utils::FileSystem::getParent(mSavePath) << "\"";
|
2020-07-11 08:10:07 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2021-07-07 18:03:42 +00:00
|
|
|
#if defined(_WIN64)
|
2020-07-10 16:32:23 +00:00
|
|
|
std::ofstream stream(Utils::String::stringToWideString(mSavePath).c_str(),
|
2021-07-07 18:03:42 +00:00
|
|
|
std::ios_base::out | std::ios_base::binary);
|
|
|
|
#else
|
2020-06-21 12:25:28 +00:00
|
|
|
std::ofstream stream(mSavePath, std::ios_base::out | std::ios_base::binary);
|
2021-07-07 18:03:42 +00:00
|
|
|
#endif
|
2020-07-31 13:00:07 +00:00
|
|
|
if (!stream || stream.bad()) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Failed to open path for writing media file.\nPermission error?");
|
2020-06-21 12:25:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const std::string& content = mReq->getContent();
|
|
|
|
stream.write(content.data(), content.length());
|
|
|
|
stream.close();
|
|
|
|
if (stream.bad()) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Failed to save media file.\nDisk full?");
|
2020-06-21 12:25:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Resize it.
|
2020-08-05 20:38:44 +00:00
|
|
|
if (mResizeFile) {
|
2021-05-24 19:34:08 +00:00
|
|
|
if (!resizeImage(mSavePath, mMediaType)) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Error saving resized image.\nOut of memory? Disk full?");
|
|
|
|
return;
|
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
// If this media file was successfully saved, update savedNewMedia in ScraperSearchResult.
|
|
|
|
*mSavedNewMediaPtr = true;
|
2020-07-13 18:10:09 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
setStatus(ASYNC_DONE);
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2021-05-24 19:34:08 +00:00
|
|
|
bool resizeImage(const std::string& path, const std::string& mediaType)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2021-05-24 19:34:08 +00:00
|
|
|
float maxWidth = 0.0f;
|
|
|
|
float maxHeight = 0.0f;
|
|
|
|
|
|
|
|
if (mediaType == "marquees") {
|
|
|
|
// We don't really need huge marquees.
|
|
|
|
maxWidth = 1000.0f;
|
|
|
|
maxHeight = 600.0f;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
maxWidth = 2560.0f;
|
|
|
|
maxHeight = 1440.0f;
|
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
|
|
|
|
FREE_IMAGE_FORMAT format = FIF_UNKNOWN;
|
2020-06-23 18:07:00 +00:00
|
|
|
FIBITMAP* image = nullptr;
|
2020-06-21 12:25:28 +00:00
|
|
|
|
2021-05-24 19:34:08 +00:00
|
|
|
// Detect the file format.
|
2021-07-07 18:03:42 +00:00
|
|
|
|
|
|
|
#if defined(_WIN64)
|
2021-06-07 22:06:22 +00:00
|
|
|
format = FreeImage_GetFileTypeU(Utils::String::stringToWideString(path).c_str(), 0);
|
|
|
|
if (format == FIF_UNKNOWN)
|
|
|
|
format = FreeImage_GetFIFFromFilenameU(Utils::String::stringToWideString(path).c_str());
|
2021-07-07 18:03:42 +00:00
|
|
|
#else
|
2020-06-21 12:25:28 +00:00
|
|
|
format = FreeImage_GetFileType(path.c_str(), 0);
|
|
|
|
if (format == FIF_UNKNOWN)
|
|
|
|
format = FreeImage_GetFIFFromFilename(path.c_str());
|
2021-07-07 18:03:42 +00:00
|
|
|
#endif
|
2020-06-21 12:25:28 +00:00
|
|
|
if (format == FIF_UNKNOWN) {
|
2020-07-26 21:30:45 +00:00
|
|
|
LOG(LogError) << "Could not detect filetype for image \"" << path << "\"!";
|
2020-06-21 12:25:28 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2021-05-24 19:34:08 +00:00
|
|
|
// Make sure we can read this format, and if so, then load it.
|
2020-06-21 12:25:28 +00:00
|
|
|
if (FreeImage_FIFSupportsReading(format)) {
|
2021-07-07 18:03:42 +00:00
|
|
|
#if defined(_WIN64)
|
2021-06-07 22:06:22 +00:00
|
|
|
image = FreeImage_LoadU(format, Utils::String::stringToWideString(path).c_str());
|
2021-07-07 18:03:42 +00:00
|
|
|
#else
|
2020-06-21 12:25:28 +00:00
|
|
|
image = FreeImage_Load(format, path.c_str());
|
2021-07-07 18:03:42 +00:00
|
|
|
#endif
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
|
|
|
else {
|
2021-01-26 16:28:54 +00:00
|
|
|
LOG(LogError) << "File format not supported for image \"" << path << "\"";
|
2020-06-21 12:25:28 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-11-17 22:06:54 +00:00
|
|
|
float width = static_cast<float>(FreeImage_GetWidth(image));
|
|
|
|
float height = static_cast<float>(FreeImage_GetHeight(image));
|
2020-06-21 12:25:28 +00:00
|
|
|
|
2021-05-24 19:34:08 +00:00
|
|
|
// If the image is smaller than (or the same size as) maxWidth and maxHeight, then don't
|
|
|
|
// do any scaling. It doesn't make sense to upscale the image and waste disk space.
|
|
|
|
if (maxWidth >= width && maxHeight >= height) {
|
2021-07-07 18:03:42 +00:00
|
|
|
LOG(LogDebug) << "Scraper::resizeImage(): Saving image \"" << path
|
|
|
|
<< "\" at its original resolution " << width << "x" << height;
|
2021-03-15 19:11:01 +00:00
|
|
|
FreeImage_Unload(image);
|
2020-06-21 12:25:28 +00:00
|
|
|
return true;
|
2021-03-15 19:11:01 +00:00
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
|
2021-05-24 19:34:08 +00:00
|
|
|
float scaleFactor = 0.0f;
|
2020-06-21 12:25:28 +00:00
|
|
|
|
2021-05-24 19:34:08 +00:00
|
|
|
// Calculate how much we should scale.
|
|
|
|
if (width > maxWidth) {
|
|
|
|
scaleFactor = maxWidth / width;
|
|
|
|
if (height * scaleFactor > maxHeight)
|
|
|
|
scaleFactor = maxHeight / height;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
scaleFactor = maxHeight / height;
|
|
|
|
}
|
|
|
|
|
|
|
|
maxWidth = floorf(width * scaleFactor);
|
|
|
|
maxHeight = floorf(height * scaleFactor);
|
|
|
|
|
|
|
|
// We use Lanczos3 which is the highest quality resampling method available in FreeImage.
|
|
|
|
FIBITMAP* imageRescaled = FreeImage_Rescale(image, static_cast<int>(maxWidth),
|
2021-07-07 18:03:42 +00:00
|
|
|
static_cast<int>(maxHeight), FILTER_LANCZOS3);
|
2020-06-21 12:25:28 +00:00
|
|
|
FreeImage_Unload(image);
|
|
|
|
|
2020-06-23 18:07:00 +00:00
|
|
|
if (imageRescaled == nullptr) {
|
2021-05-24 19:34:08 +00:00
|
|
|
LOG(LogError) << "Couldn't resize image, not enough memory or invalid bit depth?";
|
2020-06-21 12:25:28 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2021-07-07 18:03:42 +00:00
|
|
|
#if defined(_WIN64)
|
2021-06-07 22:06:22 +00:00
|
|
|
bool saved = (FreeImage_SaveU(format, imageRescaled,
|
2021-07-07 18:03:42 +00:00
|
|
|
Utils::String::stringToWideString(path).c_str()) != 0);
|
|
|
|
#else
|
2020-06-21 12:25:28 +00:00
|
|
|
bool saved = (FreeImage_Save(format, imageRescaled, path.c_str()) != 0);
|
2021-07-07 18:03:42 +00:00
|
|
|
#endif
|
2020-06-21 12:25:28 +00:00
|
|
|
FreeImage_Unload(imageRescaled);
|
|
|
|
|
2020-06-25 17:52:38 +00:00
|
|
|
if (!saved) {
|
2021-01-26 16:28:54 +00:00
|
|
|
LOG(LogError) << "Failed to save resized image";
|
2020-06-25 17:52:38 +00:00
|
|
|
}
|
2021-05-24 19:34:08 +00:00
|
|
|
else {
|
|
|
|
LOG(LogDebug) << "Scraper::resizeImage(): Downscaled image \"" << path << "\" from "
|
2021-07-07 18:03:42 +00:00
|
|
|
<< width << "x" << height << " to " << maxWidth << "x" << maxHeight;
|
2021-05-24 19:34:08 +00:00
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
|
|
|
|
return saved;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
std::string getSaveAsPath(const ScraperSearchParams& params,
|
2021-07-07 18:03:42 +00:00
|
|
|
const std::string& filetypeSubdirectory,
|
|
|
|
const std::string& extension)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string systemsubdirectory = params.system->getName();
|
|
|
|
const std::string name = Utils::FileSystem::getStem(params.game->getPath());
|
2021-01-31 18:53:55 +00:00
|
|
|
std::string subFolders;
|
|
|
|
|
|
|
|
// Extract possible subfolders from the path.
|
|
|
|
if (params.system->getSystemEnvData()->mStartPath != "")
|
|
|
|
subFolders = Utils::String::replace(Utils::FileSystem::getParent(params.game->getPath()),
|
2021-07-07 18:03:42 +00:00
|
|
|
params.system->getSystemEnvData()->mStartPath, "");
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
std::string path = FileData::getMediaDirectory();
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
if (!Utils::FileSystem::exists(path))
|
|
|
|
Utils::FileSystem::createDirectory(path);
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2021-01-31 18:53:55 +00:00
|
|
|
path += systemsubdirectory + "/" + filetypeSubdirectory + subFolders + "/";
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
if (!Utils::FileSystem::exists(path))
|
|
|
|
Utils::FileSystem::createDirectory(path);
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
path += name + extension;
|
|
|
|
return path;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|