2020-09-21 17:17:34 +00:00
|
|
|
// SPDX-License-Identifier: MIT
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
2020-09-21 17:17:34 +00:00
|
|
|
// EmulationStation Desktop Edition
|
2020-06-21 12:25:28 +00:00
|
|
|
// Scraper.cpp
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
2020-06-21 12:25:28 +00:00
|
|
|
// Main scraper logic.
|
|
|
|
// Called from GuiScraperSearch.
|
|
|
|
// Calls either GamesDBJSONScraper or ScreenScraper.
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
|
|
|
|
2014-06-25 16:29:58 +00:00
|
|
|
#include "scrapers/Scraper.h"
|
2017-11-01 22:21:10 +00:00
|
|
|
|
2020-07-10 16:32:23 +00:00
|
|
|
#include "utils/StringUtil.h"
|
2017-11-01 22:21:10 +00:00
|
|
|
#include "FileData.h"
|
2019-02-08 02:08:11 +00:00
|
|
|
#include "GamesDBJSONScraper.h"
|
2014-06-25 16:29:58 +00:00
|
|
|
#include "Log.h"
|
2020-07-13 18:10:09 +00:00
|
|
|
#include "ScreenScraper.h"
|
2014-06-25 16:29:58 +00:00
|
|
|
#include "Settings.h"
|
2017-11-01 22:21:10 +00:00
|
|
|
#include "SystemData.h"
|
2020-07-13 18:10:09 +00:00
|
|
|
|
2014-06-25 16:29:58 +00:00
|
|
|
#include <FreeImage.h>
|
2017-11-01 22:21:10 +00:00
|
|
|
#include <fstream>
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2017-11-03 00:33:08 +00:00
|
|
|
const std::map<std::string, generate_scraper_requests_func> scraper_request_funcs {
|
2020-07-27 10:11:30 +00:00
|
|
|
{ "thegamesdb", &thegamesdb_generate_json_scraper_requests },
|
|
|
|
{ "screenscraper", &screenscraper_generate_scraper_requests }
|
2017-11-03 00:33:08 +00:00
|
|
|
};
|
2014-06-25 16:29:58 +00:00
|
|
|
|
|
|
|
std::unique_ptr<ScraperSearchHandle> startScraperSearch(const ScraperSearchParams& params)
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& name = Settings::getInstance()->getString("Scraper");
|
|
|
|
std::unique_ptr<ScraperSearchHandle> handle(new ScraperSearchHandle());
|
2019-01-24 18:00:19 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Check if the scraper in the settings still exists as a registered scraping source.
|
|
|
|
if (scraper_request_funcs.find(name) == scraper_request_funcs.end())
|
2020-07-26 21:30:45 +00:00
|
|
|
LOG(LogWarning) << "Configured scraper (" << name << ") unavailable, scraping aborted.";
|
2020-06-21 12:25:28 +00:00
|
|
|
else
|
|
|
|
scraper_request_funcs.at(name)(params, handle->mRequestQueue, handle->mResults);
|
2019-01-24 18:00:19 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
return handle;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-06-06 11:10:33 +00:00
|
|
|
std::unique_ptr<ScraperSearchHandle> startMediaURLsFetch(const std::string& gameIDs)
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& name = Settings::getInstance()->getString("Scraper");
|
|
|
|
std::unique_ptr<ScraperSearchHandle> handle(new ScraperSearchHandle());
|
|
|
|
|
|
|
|
ScraperSearchParams params;
|
|
|
|
// Check if the scraper in the settings still exists as a registered scraping source.
|
2020-07-13 18:10:09 +00:00
|
|
|
if (scraper_request_funcs.find(name) == scraper_request_funcs.end()) {
|
2020-07-26 21:30:45 +00:00
|
|
|
LOG(LogWarning) << "Configured scraper (" << name << ") unavailable, scraping aborted.";
|
2020-07-13 18:10:09 +00:00
|
|
|
}
|
|
|
|
else {
|
2020-06-21 12:25:28 +00:00
|
|
|
// Specifically use the TheGamesDB function as this type of request
|
|
|
|
// will never occur for ScreenScraper.
|
2020-07-13 18:10:09 +00:00
|
|
|
thegamesdb_generate_json_scraper_requests(gameIDs, handle->mRequestQueue, handle->mResults);
|
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
|
|
|
|
return handle;
|
2020-06-06 11:10:33 +00:00
|
|
|
}
|
|
|
|
|
2014-06-25 16:29:58 +00:00
|
|
|
std::vector<std::string> getScraperList()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
std::vector<std::string> list;
|
|
|
|
for (auto it = scraper_request_funcs.cbegin(); it != scraper_request_funcs.cend(); it++)
|
|
|
|
list.push_back(it->first);
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
return list;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2019-01-24 18:00:19 +00:00
|
|
|
bool isValidConfiguredScraper()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& name = Settings::getInstance()->getString("Scraper");
|
|
|
|
return scraper_request_funcs.find(name) != scraper_request_funcs.end();
|
2019-01-24 18:00:19 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
// ScraperSearchHandle.
|
2014-06-25 16:29:58 +00:00
|
|
|
ScraperSearchHandle::ScraperSearchHandle()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
setStatus(ASYNC_IN_PROGRESS);
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ScraperSearchHandle::update()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
if (mStatus == ASYNC_DONE)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (!mRequestQueue.empty()) {
|
|
|
|
// A request can add more requests to the queue while running,
|
|
|
|
// so be careful with references into the queue.
|
|
|
|
auto& req = *(mRequestQueue.front());
|
|
|
|
AsyncHandleStatus status = req.status();
|
|
|
|
|
|
|
|
if (status == ASYNC_ERROR) {
|
|
|
|
// Propagate error.
|
|
|
|
setError(req.getStatusString());
|
|
|
|
|
|
|
|
// Empty our queue.
|
|
|
|
while (!mRequestQueue.empty())
|
|
|
|
mRequestQueue.pop();
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Finished this one, see if we have any more.
|
|
|
|
if (status == ASYNC_DONE)
|
|
|
|
mRequestQueue.pop();
|
|
|
|
|
|
|
|
// Status == ASYNC_IN_PROGRESS.
|
|
|
|
}
|
|
|
|
|
2020-07-31 12:20:37 +00:00
|
|
|
// Check if we finished without any errors and if so set the status flag accordingly.
|
|
|
|
if (mRequestQueue.empty() && mStatus != ASYNC_ERROR) {
|
2020-06-21 12:25:28 +00:00
|
|
|
setStatus(ASYNC_DONE);
|
|
|
|
return;
|
|
|
|
}
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
// ScraperRequest.
|
|
|
|
ScraperRequest::ScraperRequest(std::vector<ScraperSearchResult>& resultsWrite)
|
2020-06-21 12:25:28 +00:00
|
|
|
: mResults(resultsWrite)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
// ScraperHttpRequest.
|
2020-06-06 11:10:33 +00:00
|
|
|
ScraperHttpRequest::ScraperHttpRequest(std::vector<ScraperSearchResult>& resultsWrite,
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& url) : ScraperRequest(resultsWrite)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
setStatus(ASYNC_IN_PROGRESS);
|
|
|
|
mReq = std::unique_ptr<HttpReq>(new HttpReq(url));
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void ScraperHttpRequest::update()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
HttpReq::Status status = mReq->status();
|
|
|
|
if (status == HttpReq::REQ_SUCCESS) {
|
|
|
|
// If process() has an error, status will be changed to ASYNC_ERROR.
|
|
|
|
setStatus(ASYNC_DONE);
|
|
|
|
process(mReq, mResults);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Not ready yet.
|
|
|
|
if (status == HttpReq::REQ_IN_PROGRESS)
|
|
|
|
return;
|
|
|
|
|
|
|
|
// Everything else is some sort of error.
|
2020-07-26 21:30:45 +00:00
|
|
|
LOG(LogError) << "ScraperHttpRequest network error (status: " << status<< ") - "
|
2020-06-21 12:25:28 +00:00
|
|
|
<< mReq->getErrorMsg();
|
2020-07-31 13:20:55 +00:00
|
|
|
setError("Network error: " + mReq->getErrorMsg());
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-06-06 11:10:33 +00:00
|
|
|
// Download and write the media files to disk.
|
2020-05-26 16:34:33 +00:00
|
|
|
std::unique_ptr<MDResolveHandle> resolveMetaDataAssets(const ScraperSearchResult& result,
|
2020-06-21 12:25:28 +00:00
|
|
|
const ScraperSearchParams& search)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
return std::unique_ptr<MDResolveHandle>(new MDResolveHandle(result, search));
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
MDResolveHandle::MDResolveHandle(const ScraperSearchResult& result,
|
2020-06-21 12:25:28 +00:00
|
|
|
const ScraperSearchParams& search) : mResult(result)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
struct mediaFileInfoStruct {
|
|
|
|
std::string fileURL;
|
|
|
|
std::string fileFormat;
|
|
|
|
std::string subDirectory;
|
|
|
|
std::string existingMediaFile;
|
2020-08-05 20:38:44 +00:00
|
|
|
bool resizeFile;
|
2020-06-21 12:25:28 +00:00
|
|
|
} mediaFileInfo;
|
|
|
|
|
|
|
|
std::vector<struct mediaFileInfoStruct> scrapeFiles;
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
mResult.savedNewMedia = false;
|
2020-07-13 18:10:09 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
if (Settings::getInstance()->getBool("Scrape3DBoxes") && result.box3dUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.box3dUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.box3dFormat;
|
|
|
|
mediaFileInfo.subDirectory = "3dboxes";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->get3DBoxPath();
|
2020-08-05 20:38:44 +00:00
|
|
|
mediaFileInfo.resizeFile = true;
|
2020-06-21 12:25:28 +00:00
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
if (Settings::getInstance()->getBool("ScrapeCovers") && result.coverUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.coverUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.coverFormat;
|
|
|
|
mediaFileInfo.subDirectory = "covers";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->getCoverPath();
|
2020-08-05 20:38:44 +00:00
|
|
|
mediaFileInfo.resizeFile = true;
|
2020-06-21 12:25:28 +00:00
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
if (Settings::getInstance()->getBool("ScrapeMarquees") && result.marqueeUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.marqueeUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.marqueeFormat;
|
|
|
|
mediaFileInfo.subDirectory = "marquees";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->getMarqueePath();
|
2020-08-05 20:38:44 +00:00
|
|
|
mediaFileInfo.resizeFile = true;
|
2020-06-21 12:25:28 +00:00
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
if (Settings::getInstance()->getBool("ScrapeScreenshots") && result.screenshotUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.screenshotUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.screenshotFormat;
|
|
|
|
mediaFileInfo.subDirectory = "screenshots";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->getScreenshotPath();
|
2020-08-05 20:38:44 +00:00
|
|
|
mediaFileInfo.resizeFile = true;
|
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
if (Settings::getInstance()->getBool("ScrapeVideos") && result.videoUrl != "") {
|
|
|
|
mediaFileInfo.fileURL = result.videoUrl;
|
|
|
|
mediaFileInfo.fileFormat = result.videoFormat;
|
|
|
|
mediaFileInfo.subDirectory = "videos";
|
|
|
|
mediaFileInfo.existingMediaFile = search.game->getVideoPath();
|
|
|
|
mediaFileInfo.resizeFile = false;
|
2020-06-21 12:25:28 +00:00
|
|
|
scrapeFiles.push_back(mediaFileInfo);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (auto it = scrapeFiles.cbegin(); it != scrapeFiles.cend(); it++) {
|
|
|
|
|
|
|
|
std::string ext;
|
|
|
|
|
|
|
|
// If we have a file extension returned by the scraper, then use it.
|
2020-08-05 20:38:44 +00:00
|
|
|
// Otherwise, try to guess it by the name of the URL, which points to a media file.
|
2020-06-21 12:25:28 +00:00
|
|
|
if (!it->fileFormat.empty()) {
|
|
|
|
ext = it->fileFormat;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
size_t dot = it->fileURL.find_last_of('.');
|
|
|
|
|
|
|
|
if (dot != std::string::npos)
|
|
|
|
ext = it->fileURL.substr(dot, std::string::npos);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string filePath = getSaveAsPath(search, it->subDirectory, ext);
|
|
|
|
|
|
|
|
// If there is an existing media file on disk and the setting to overwrite data
|
|
|
|
// has been set to no, then don't proceed with downloading or saving a new file.
|
|
|
|
if (it->existingMediaFile != "" &&
|
|
|
|
!Settings::getInstance()->getBool("ScraperOverwriteData"))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// If the image is cached already as the thumbnail, then we don't need
|
|
|
|
// to download it again, in this case just save it to disk and resize it.
|
2020-11-14 19:46:08 +00:00
|
|
|
if (mResult.thumbnailImageUrl == it->fileURL &&
|
|
|
|
mResult.thumbnailImageData.size() > 0) {
|
2020-06-21 12:25:28 +00:00
|
|
|
|
2020-12-18 15:35:19 +00:00
|
|
|
// This is just a temporary workaround to avoid saving media files to disk that
|
|
|
|
// are actually just containing error messages from the scraper service. The
|
|
|
|
// proper solution is to implement file checksum checks to determine if the
|
|
|
|
// server response contains valid media. The problem with this temporary
|
|
|
|
// solution is of course that any tiny media files of less than 300 bytes
|
|
|
|
// will not be saved to disk.
|
|
|
|
if (Settings::getInstance()->getBool("ScraperHaltOnInvalidMedia") &&
|
|
|
|
mResult.thumbnailImageData.size() < 350) {
|
|
|
|
setError("The file '" + Utils::FileSystem::getFileName(filePath) +
|
|
|
|
"' returned by the scraper seems to be invalid as it's less than " +
|
|
|
|
"350 bytes in size.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Remove any existing media file before attempting to write a new one.
|
|
|
|
// This avoids the problem where there's already a file for this media type
|
|
|
|
// with a different format/extension (e.g. game.jpg and we're going to write
|
|
|
|
// game.png) which would lead to two media files for this game.
|
2020-07-13 18:58:25 +00:00
|
|
|
if (it->existingMediaFile != "")
|
2020-06-21 12:25:28 +00:00
|
|
|
Utils::FileSystem::removeFile(it->existingMediaFile);
|
|
|
|
|
2020-07-11 08:10:07 +00:00
|
|
|
// If the media directory does not exist, something is wrong, possibly permission
|
|
|
|
// problems or the MediaDirectory setting points to a file instead of a directory.
|
|
|
|
if (!Utils::FileSystem::isDirectory(Utils::FileSystem::getParent(filePath))) {
|
|
|
|
setError("Media directory does not exist and can't be created. "
|
|
|
|
"Permission problems?");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-08-23 15:04:30 +00:00
|
|
|
#if defined(_WIN64)
|
2020-07-10 16:32:23 +00:00
|
|
|
std::ofstream stream(Utils::String::stringToWideString(filePath).c_str(),
|
|
|
|
std::ios_base::out | std::ios_base::binary);
|
|
|
|
#else
|
2020-06-21 12:25:28 +00:00
|
|
|
std::ofstream stream(filePath, std::ios_base::out | std::ios_base::binary);
|
2020-07-10 16:32:23 +00:00
|
|
|
#endif
|
2020-07-31 13:00:07 +00:00
|
|
|
if (!stream || stream.bad()) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Failed to open path for writing media file.\nPermission error?");
|
2020-06-21 12:25:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-11-14 19:46:08 +00:00
|
|
|
const std::string& content = mResult.thumbnailImageData;
|
2020-06-21 12:25:28 +00:00
|
|
|
stream.write(content.data(), content.length());
|
|
|
|
stream.close();
|
|
|
|
if (stream.bad()) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Failed to save media file.\nDisk full?");
|
2020-06-21 12:25:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Resize it.
|
2020-08-05 20:38:44 +00:00
|
|
|
if (it->resizeFile) {
|
|
|
|
if (!resizeImage(filePath, Settings::getInstance()->getInt("ScraperResizeMaxWidth"),
|
|
|
|
Settings::getInstance()->getInt("ScraperResizeMaxHeight"))) {
|
|
|
|
setError("Error saving resized image.\nOut of memory? Disk full?");
|
|
|
|
return;
|
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
2020-07-13 18:10:09 +00:00
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
mResult.savedNewMedia = true;
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
|
|
|
// If it's not cached, then initiate the download.
|
|
|
|
else {
|
2020-08-05 20:38:44 +00:00
|
|
|
mFuncs.push_back(ResolvePair(downloadMediaAsync(it->fileURL, filePath,
|
|
|
|
it->existingMediaFile, it->resizeFile, mResult.savedNewMedia),
|
|
|
|
[this, filePath] {}));
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
|
|
|
}
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void MDResolveHandle::update()
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
if (mStatus == ASYNC_DONE || mStatus == ASYNC_ERROR)
|
|
|
|
return;
|
|
|
|
|
|
|
|
auto it = mFuncs.cbegin();
|
|
|
|
while (it != mFuncs.cend()) {
|
|
|
|
|
|
|
|
if (it->first->status() == ASYNC_ERROR) {
|
|
|
|
setError(it->first->getStatusString());
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
else if (it->first->status() == ASYNC_DONE) {
|
|
|
|
it->second();
|
|
|
|
it = mFuncs.erase(it);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
it++;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mFuncs.empty())
|
|
|
|
setStatus(ASYNC_DONE);
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
std::unique_ptr<MediaDownloadHandle> downloadMediaAsync(
|
|
|
|
const std::string& url,
|
|
|
|
const std::string& saveAs,
|
|
|
|
const std::string& existingMediaPath,
|
|
|
|
const bool resizeFile,
|
|
|
|
bool& savedNewMedia)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-08-05 20:38:44 +00:00
|
|
|
return std::unique_ptr<MediaDownloadHandle>(new MediaDownloadHandle(
|
2020-06-21 12:25:28 +00:00
|
|
|
url,
|
|
|
|
saveAs,
|
2020-08-05 20:38:44 +00:00
|
|
|
existingMediaPath,
|
|
|
|
resizeFile,
|
|
|
|
savedNewMedia,
|
2020-07-09 17:24:20 +00:00
|
|
|
Settings::getInstance()->getInt("ScraperResizeMaxWidth"),
|
|
|
|
Settings::getInstance()->getInt("ScraperResizeMaxHeight")));
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
MediaDownloadHandle::MediaDownloadHandle(
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& url,
|
|
|
|
const std::string& path,
|
|
|
|
const std::string& existingMediaPath,
|
2020-08-05 20:38:44 +00:00
|
|
|
const bool resizeFile,
|
|
|
|
bool& savedNewMedia,
|
2020-06-21 12:25:28 +00:00
|
|
|
int maxWidth,
|
|
|
|
int maxHeight)
|
|
|
|
: mSavePath(path),
|
|
|
|
mExistingMediaFile(existingMediaPath),
|
2020-08-05 20:38:44 +00:00
|
|
|
mResizeFile(resizeFile),
|
2020-06-21 12:25:28 +00:00
|
|
|
mMaxWidth(maxWidth),
|
|
|
|
mMaxHeight(maxHeight),
|
|
|
|
mReq(new HttpReq(url))
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-08-05 20:38:44 +00:00
|
|
|
mSavedNewMediaPtr = &savedNewMedia;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
void MediaDownloadHandle::update()
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
if (mReq->status() == HttpReq::REQ_IN_PROGRESS)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (mReq->status() != HttpReq::REQ_SUCCESS) {
|
|
|
|
std::stringstream ss;
|
|
|
|
ss << "Network error: " << mReq->getErrorMsg();
|
|
|
|
setError(ss.str());
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
// This seems to take care of a strange race condition where the media saving and
|
|
|
|
// resizing would sometimes take place twice.
|
|
|
|
if (mStatus == ASYNC_DONE)
|
|
|
|
return;
|
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Download is done, save it to disk.
|
|
|
|
|
2020-12-18 15:35:19 +00:00
|
|
|
// This is just a temporary workaround to avoid saving media files to disk that
|
|
|
|
// are actually just containing error messages from the scraper service. The
|
|
|
|
// proper solution is to implement file checksum checks to determine if the
|
|
|
|
// server response contains valid media. The problem with this temporary
|
|
|
|
// solution is of course that any tiny media files of less than 300 bytes
|
|
|
|
// will not be saved to disk.
|
|
|
|
if (Settings::getInstance()->getBool("ScraperHaltOnInvalidMedia") &&
|
|
|
|
mReq->getContent().size() < 350) {
|
|
|
|
setError("The file '" + Utils::FileSystem::getFileName(mSavePath) + "' returned by the " +
|
|
|
|
"scraper seems to be invalid as it's less than 350 bytes in size.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Remove any existing media file before attempting to write a new one.
|
|
|
|
// This avoids the problem where there's already a file for this media type
|
|
|
|
// with a different format/extension (e.g. game.jpg and we're going to write
|
|
|
|
// game.png) which would lead to two media files for this game.
|
2020-07-13 18:58:25 +00:00
|
|
|
if (mExistingMediaFile != "")
|
2020-06-21 12:25:28 +00:00
|
|
|
Utils::FileSystem::removeFile(mExistingMediaFile);
|
|
|
|
|
2020-07-11 08:10:07 +00:00
|
|
|
// If the media directory does not exist, something is wrong, possibly permission
|
|
|
|
// problems or the MediaDirectory setting points to a file instead of a directory.
|
|
|
|
if (!Utils::FileSystem::isDirectory(Utils::FileSystem::getParent(mSavePath))) {
|
|
|
|
setError("Media directory does not exist and can't be created. Permission problems?");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-08-23 15:04:30 +00:00
|
|
|
#if defined(_WIN64)
|
2020-07-10 16:32:23 +00:00
|
|
|
std::ofstream stream(Utils::String::stringToWideString(mSavePath).c_str(),
|
|
|
|
std::ios_base::out | std::ios_base::binary);
|
|
|
|
#else
|
2020-06-21 12:25:28 +00:00
|
|
|
std::ofstream stream(mSavePath, std::ios_base::out | std::ios_base::binary);
|
2020-07-10 16:32:23 +00:00
|
|
|
#endif
|
2020-07-31 13:00:07 +00:00
|
|
|
if (!stream || stream.bad()) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Failed to open path for writing media file.\nPermission error?");
|
2020-06-21 12:25:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const std::string& content = mReq->getContent();
|
|
|
|
stream.write(content.data(), content.length());
|
|
|
|
stream.close();
|
|
|
|
if (stream.bad()) {
|
2020-08-05 20:38:44 +00:00
|
|
|
setError("Failed to save media file.\nDisk full?");
|
2020-06-21 12:25:28 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Resize it.
|
2020-08-05 20:38:44 +00:00
|
|
|
if (mResizeFile) {
|
|
|
|
if (!resizeImage(mSavePath, mMaxWidth, mMaxHeight)) {
|
|
|
|
setError("Error saving resized image.\nOut of memory? Disk full?");
|
|
|
|
return;
|
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
|
|
|
|
2020-08-05 20:38:44 +00:00
|
|
|
// If this media file was successfully saved, update savedNewMedia in ScraperSearchResult.
|
|
|
|
*mSavedNewMediaPtr = true;
|
2020-07-13 18:10:09 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
setStatus(ASYNC_DONE);
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
// You can pass 0 for width or height to keep aspect ratio.
|
2014-06-25 16:29:58 +00:00
|
|
|
bool resizeImage(const std::string& path, int maxWidth, int maxHeight)
|
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
// Nothing to do.
|
|
|
|
if (maxWidth == 0 && maxHeight == 0)
|
|
|
|
return true;
|
|
|
|
|
|
|
|
FREE_IMAGE_FORMAT format = FIF_UNKNOWN;
|
2020-06-23 18:07:00 +00:00
|
|
|
FIBITMAP* image = nullptr;
|
2020-06-21 12:25:28 +00:00
|
|
|
|
|
|
|
// Detect the filetype.
|
|
|
|
format = FreeImage_GetFileType(path.c_str(), 0);
|
|
|
|
if (format == FIF_UNKNOWN)
|
|
|
|
format = FreeImage_GetFIFFromFilename(path.c_str());
|
|
|
|
if (format == FIF_UNKNOWN) {
|
2020-07-26 21:30:45 +00:00
|
|
|
LOG(LogError) << "Could not detect filetype for image \"" << path << "\"!";
|
2020-06-21 12:25:28 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make sure we can read this filetype first, then load it.
|
|
|
|
if (FreeImage_FIFSupportsReading(format)) {
|
|
|
|
image = FreeImage_Load(format, path.c_str());
|
|
|
|
}
|
|
|
|
else {
|
2020-07-26 21:30:45 +00:00
|
|
|
LOG(LogError) << "File format not supported for image \"" << path << "\"!";
|
2020-06-21 12:25:28 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-11-17 22:06:54 +00:00
|
|
|
float width = static_cast<float>(FreeImage_GetWidth(image));
|
|
|
|
float height = static_cast<float>(FreeImage_GetHeight(image));
|
2020-06-21 12:25:28 +00:00
|
|
|
|
|
|
|
// If the image is smaller than maxWidth or maxHeight, then don't do any
|
|
|
|
// scaling. It doesn't make sense to upscale the image and waste disk space.
|
|
|
|
if (maxWidth > width || maxHeight > height)
|
|
|
|
return true;
|
|
|
|
|
|
|
|
if (maxWidth == 0)
|
2020-11-17 22:06:54 +00:00
|
|
|
maxWidth = static_cast<int>((maxHeight / height) * width);
|
2020-06-21 12:25:28 +00:00
|
|
|
else if (maxHeight == 0)
|
2020-11-17 22:06:54 +00:00
|
|
|
maxHeight = static_cast<int>((maxWidth / width) * height);
|
2020-06-21 12:25:28 +00:00
|
|
|
|
|
|
|
FIBITMAP* imageRescaled = FreeImage_Rescale(image, maxWidth, maxHeight, FILTER_BILINEAR);
|
|
|
|
FreeImage_Unload(image);
|
|
|
|
|
2020-06-23 18:07:00 +00:00
|
|
|
if (imageRescaled == nullptr) {
|
2020-07-26 21:30:45 +00:00
|
|
|
LOG(LogError) << "Could not resize image. (Not enough memory? Invalid bitdepth?)";
|
2020-06-21 12:25:28 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool saved = (FreeImage_Save(format, imageRescaled, path.c_str()) != 0);
|
|
|
|
FreeImage_Unload(imageRescaled);
|
|
|
|
|
2020-06-25 17:52:38 +00:00
|
|
|
if (!saved) {
|
2020-07-26 21:30:45 +00:00
|
|
|
LOG(LogError) << "Failed to save resized image.";
|
2020-06-25 17:52:38 +00:00
|
|
|
}
|
2020-06-21 12:25:28 +00:00
|
|
|
|
|
|
|
return saved;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|
|
|
|
|
2020-05-26 16:34:33 +00:00
|
|
|
std::string getSaveAsPath(const ScraperSearchParams& params,
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string& filetypeSubdirectory, const std::string& extension)
|
2014-06-25 16:29:58 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
const std::string systemsubdirectory = params.system->getName();
|
|
|
|
const std::string name = Utils::FileSystem::getStem(params.game->getPath());
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
std::string path = FileData::getMediaDirectory();
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
if (!Utils::FileSystem::exists(path))
|
|
|
|
Utils::FileSystem::createDirectory(path);
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
path += systemsubdirectory + "/" + filetypeSubdirectory + "/";
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
if (!Utils::FileSystem::exists(path))
|
|
|
|
Utils::FileSystem::createDirectory(path);
|
2014-06-25 16:29:58 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
path += name + extension;
|
|
|
|
return path;
|
2014-06-25 16:29:58 +00:00
|
|
|
}
|