diff --git a/src/HttpReq.cpp b/src/HttpReq.cpp index e1ac90ec6..77f0a146d 100644 --- a/src/HttpReq.cpp +++ b/src/HttpReq.cpp @@ -43,12 +43,17 @@ HttpReq::HttpReq(const std::string& url) else if(url.substr(0, 8) == "https://") startpos = 8; - if(url.substr(startpos, 4) == "www.") - startpos += 4; - size_t pathStart = url.find('/', startpos); - std::string server = url.substr(startpos, pathStart - startpos); - std::string path = url.substr(pathStart, std::string::npos); + + std::string server, path; + if(pathStart == std::string::npos) + { + server = url; + path = "/"; + }else{ + server = url.substr(startpos, pathStart - startpos); + path = url.substr(pathStart, std::string::npos); + } start(server, path); } @@ -57,8 +62,8 @@ HttpReq::~HttpReq() { mResolver.cancel(); mSocket.close(); - status(); //poll once - //while(status() == REQ_IN_PROGRESS); //otherwise you get really weird heap-allocation-related crashes + //status(); //poll once + while(status() == REQ_IN_PROGRESS); //otherwise you get really weird heap-allocation-related crashes } void HttpReq::start(const std::string& server, const std::string& path) diff --git a/src/Settings.cpp b/src/Settings.cpp index 78f1c96d8..169a640d4 100644 --- a/src/Settings.cpp +++ b/src/Settings.cpp @@ -36,6 +36,8 @@ void Settings::setDefaults() mBoolMap["DISABLESOUNDS"] = false; mIntMap["DIMTIME"] = 30*1000; + mIntMap["ScraperResizeWidth"] = 450; + mIntMap["ScraperResizeHeight"] = 0; mIntMap["GameListSortIndex"] = 0; diff --git a/src/components/GuiMetaDataEd.cpp b/src/components/GuiMetaDataEd.cpp index bac78d6e9..ffab7c9ff 100644 --- a/src/components/GuiMetaDataEd.cpp +++ b/src/components/GuiMetaDataEd.cpp @@ -132,6 +132,12 @@ void GuiMetaDataEd::fetchDone(MetaDataList result) continue; const std::string key = mMetaDataDecl.at(i).key; + if(mMetaDataDecl.at(i).type == MD_IMAGE_PATH) + { + std::string url = result.get(key); + result.set(key, downloadImage(url, getSaveAsPath(mScraperParams.system->getName(), mScraperParams.game->getCleanName() + "-" + key, url))); + } + mEditors.at(i)->setValue(result.get(key)); } } diff --git a/src/scrapers/Scraper.cpp b/src/scrapers/Scraper.cpp index 1a8b26086..e6f7f76ab 100644 --- a/src/scrapers/Scraper.cpp +++ b/src/scrapers/Scraper.cpp @@ -1,5 +1,10 @@ #include "Scraper.h" #include "../components/AsyncReqComponent.h" +#include "../Log.h" +#include "../Settings.h" +#include +#include +#include std::vector Scraper::getResults(ScraperSearchParams params) { @@ -23,3 +28,132 @@ void Scraper::getResultsAsync(ScraperSearchParams params, Window* window, std::f window->pushGui(req); } + + + +std::string processFileDownload(std::shared_ptr r, std::string saveAs) +{ + if(r->status() != HttpReq::REQ_SUCCESS) + { + LOG(LogError) << "Failed to download file - HttpReq error: " << r->getErrorMsg(); + return ""; + } + + std::ofstream stream(saveAs, std::ios_base::out | std::ios_base::binary); + if(stream.fail()) + { + LOG(LogError) << "Failed to open \"" << saveAs << "\" for writing downloaded file."; + return ""; + } + + std::string content = r->getContent(); + stream.write(content.data(), content.length()); + stream.close(); + + return saveAs; +} + +//you can pass 0 for width or height to keep aspect ratio +void resizeImage(const std::string& path, int maxWidth, int maxHeight) +{ + if(maxWidth == 0 && maxHeight == 0) + return; + + FREE_IMAGE_FORMAT format = FIF_UNKNOWN; + FIBITMAP* image = NULL; + + //detect the filetype + format = FreeImage_GetFileType(path.c_str(), 0); + if(format == FIF_UNKNOWN) + format = FreeImage_GetFIFFromFilename(path.c_str()); + if(format == FIF_UNKNOWN) + { + LOG(LogError) << "Error - could not detect filetype for image \"" << path << "\"!"; + return; + } + + //make sure we can read this filetype first, then load it + if(FreeImage_FIFSupportsReading(format)) + { + image = FreeImage_Load(format, path.c_str()); + }else{ + LOG(LogError) << "Error - file format reading not supported for image \"" << path << "\"!"; + return; + } + + float width = (float)FreeImage_GetWidth(image); + float height = (float)FreeImage_GetHeight(image); + + if(maxWidth == 0) + { + maxWidth = (int)((maxHeight / height) * width); + }else if(maxHeight == 0) + { + maxHeight = (int)((maxWidth / width) * height); + } + + FIBITMAP* imageRescaled = FreeImage_Rescale(image, maxWidth, maxHeight, FILTER_BILINEAR); + FreeImage_Unload(image); + + if(imageRescaled == NULL) + { + LOG(LogError) << "Could not resize image! (not enough memory? invalid bitdepth?)"; + return; + } + + if(!FreeImage_Save(format, imageRescaled, path.c_str())) + { + LOG(LogError) << "Failed to save resized image!"; + } + + FreeImage_Unload(imageRescaled); +} + +void downloadImageAsync(Window* window, const std::string& url, const std::string& saveAs, std::function returnFunc) +{ + std::shared_ptr httpreq = std::make_shared(url); + AsyncReqComponent* req = new AsyncReqComponent(window, httpreq, + [returnFunc, saveAs] (std::shared_ptr r) + { + std::string file = processFileDownload(r, saveAs); + if(!file.empty()) + resizeImage(file, Settings::getInstance()->getInt("ScraperResizeWidth"), Settings::getInstance()->getInt("ScraperResizeHeight")); + returnFunc(file); + }, NULL); + + window->pushGui(req); +} + +std::string downloadImage(const std::string& url, const std::string& saveAs) +{ + std::shared_ptr httpreq = std::make_shared(url); + while(httpreq->status() == HttpReq::REQ_IN_PROGRESS); + + std::string file = processFileDownload(httpreq, saveAs); + + if(!file.empty()) + resizeImage(file, Settings::getInstance()->getInt("ScraperResizeWidth"), Settings::getInstance()->getInt("ScraperResizeHeight")); + + return file; +} + +std::string getSaveAsPath(const std::string& subdirectory, const std::string& name, const std::string& url) +{ + std::string path = getHomePath() + "/.emulationstation/downloaded_images/"; + + if(!boost::filesystem::exists(path)) + boost::filesystem::create_directory(path); + + path += subdirectory + "/"; + + if(!boost::filesystem::exists(path)) + boost::filesystem::create_directory(path); + + size_t dot = url.find_last_of('.'); + std::string ext; + if(dot != std::string::npos) + ext = url.substr(dot, std::string::npos); + + path += name + ext; + return path; +} diff --git a/src/scrapers/Scraper.h b/src/scrapers/Scraper.h index f5f9a8ed3..7d11af471 100644 --- a/src/scrapers/Scraper.h +++ b/src/scrapers/Scraper.h @@ -30,3 +30,21 @@ private: virtual std::vector parseReq(ScraperSearchParams params, std::shared_ptr) = 0; }; +//About the same as "~/.emulationstation/downloaded_images/[subdirectory]/[name].[url's extension]". +//Will create the "downloaded_images" and "subdirectory" directories if they do not exist. +std::string getSaveAsPath(const std::string& subdirectory, const std::string& name, const std::string& url); + +//Returns the path to the downloaded file (saveAs) on completion. +//Returns empty string if an error occured. +//Will resize according to Settings::getInt("ScraperResizeWidth") and Settings::getInt("ScraperResizeHeight"). +std::string downloadImage(const std::string& url, const std::string& saveAs); + +//Returns (via returnFunc) the path to the downloaded file (saveAs) on completion. +//Returns empty string if an error occured. +//Will resize according to Settings::getInt("ScraperResizeWidth") and Settings::getInt("ScraperResizeHeight"). +//Same as downloadImage, just async. +void downloadImageAsync(Window* window, const std::string& url, const std::string& saveAs, std::function returnFunc); + +//You can pass 0 for maxWidth or maxHeight to automatically keep the aspect ratio. +//Will overwrite the image at [path] with the new resized one. +void resizeImage(const std::string& path, int maxWidth, int maxHeight);