Synchronous scraper image downloading.

Code is there for async, just not hooked up to the UI yet.
This commit is contained in:
Aloshi 2013-10-05 15:28:59 -05:00
parent a6dbaa2dea
commit 63d8908061
5 changed files with 172 additions and 7 deletions

View file

@ -43,12 +43,17 @@ HttpReq::HttpReq(const std::string& url)
else if(url.substr(0, 8) == "https://") else if(url.substr(0, 8) == "https://")
startpos = 8; startpos = 8;
if(url.substr(startpos, 4) == "www.")
startpos += 4;
size_t pathStart = url.find('/', startpos); size_t pathStart = url.find('/', startpos);
std::string server = url.substr(startpos, pathStart - startpos);
std::string path = url.substr(pathStart, std::string::npos); std::string server, path;
if(pathStart == std::string::npos)
{
server = url;
path = "/";
}else{
server = url.substr(startpos, pathStart - startpos);
path = url.substr(pathStart, std::string::npos);
}
start(server, path); start(server, path);
} }
@ -57,8 +62,8 @@ HttpReq::~HttpReq()
{ {
mResolver.cancel(); mResolver.cancel();
mSocket.close(); mSocket.close();
status(); //poll once //status(); //poll once
//while(status() == REQ_IN_PROGRESS); //otherwise you get really weird heap-allocation-related crashes while(status() == REQ_IN_PROGRESS); //otherwise you get really weird heap-allocation-related crashes
} }
void HttpReq::start(const std::string& server, const std::string& path) void HttpReq::start(const std::string& server, const std::string& path)

View file

@ -36,6 +36,8 @@ void Settings::setDefaults()
mBoolMap["DISABLESOUNDS"] = false; mBoolMap["DISABLESOUNDS"] = false;
mIntMap["DIMTIME"] = 30*1000; mIntMap["DIMTIME"] = 30*1000;
mIntMap["ScraperResizeWidth"] = 450;
mIntMap["ScraperResizeHeight"] = 0;
mIntMap["GameListSortIndex"] = 0; mIntMap["GameListSortIndex"] = 0;

View file

@ -132,6 +132,12 @@ void GuiMetaDataEd::fetchDone(MetaDataList result)
continue; continue;
const std::string key = mMetaDataDecl.at(i).key; const std::string key = mMetaDataDecl.at(i).key;
if(mMetaDataDecl.at(i).type == MD_IMAGE_PATH)
{
std::string url = result.get(key);
result.set(key, downloadImage(url, getSaveAsPath(mScraperParams.system->getName(), mScraperParams.game->getCleanName() + "-" + key, url)));
}
mEditors.at(i)->setValue(result.get(key)); mEditors.at(i)->setValue(result.get(key));
} }
} }

View file

@ -1,5 +1,10 @@
#include "Scraper.h" #include "Scraper.h"
#include "../components/AsyncReqComponent.h" #include "../components/AsyncReqComponent.h"
#include "../Log.h"
#include "../Settings.h"
#include <FreeImage.h>
#include <boost/filesystem.hpp>
#include <boost/regex.hpp>
std::vector<MetaDataList> Scraper::getResults(ScraperSearchParams params) std::vector<MetaDataList> Scraper::getResults(ScraperSearchParams params)
{ {
@ -23,3 +28,132 @@ void Scraper::getResultsAsync(ScraperSearchParams params, Window* window, std::f
window->pushGui(req); window->pushGui(req);
} }
std::string processFileDownload(std::shared_ptr<HttpReq> r, std::string saveAs)
{
if(r->status() != HttpReq::REQ_SUCCESS)
{
LOG(LogError) << "Failed to download file - HttpReq error: " << r->getErrorMsg();
return "";
}
std::ofstream stream(saveAs, std::ios_base::out | std::ios_base::binary);
if(stream.fail())
{
LOG(LogError) << "Failed to open \"" << saveAs << "\" for writing downloaded file.";
return "";
}
std::string content = r->getContent();
stream.write(content.data(), content.length());
stream.close();
return saveAs;
}
//you can pass 0 for width or height to keep aspect ratio
void resizeImage(const std::string& path, int maxWidth, int maxHeight)
{
if(maxWidth == 0 && maxHeight == 0)
return;
FREE_IMAGE_FORMAT format = FIF_UNKNOWN;
FIBITMAP* image = NULL;
//detect the filetype
format = FreeImage_GetFileType(path.c_str(), 0);
if(format == FIF_UNKNOWN)
format = FreeImage_GetFIFFromFilename(path.c_str());
if(format == FIF_UNKNOWN)
{
LOG(LogError) << "Error - could not detect filetype for image \"" << path << "\"!";
return;
}
//make sure we can read this filetype first, then load it
if(FreeImage_FIFSupportsReading(format))
{
image = FreeImage_Load(format, path.c_str());
}else{
LOG(LogError) << "Error - file format reading not supported for image \"" << path << "\"!";
return;
}
float width = (float)FreeImage_GetWidth(image);
float height = (float)FreeImage_GetHeight(image);
if(maxWidth == 0)
{
maxWidth = (int)((maxHeight / height) * width);
}else if(maxHeight == 0)
{
maxHeight = (int)((maxWidth / width) * height);
}
FIBITMAP* imageRescaled = FreeImage_Rescale(image, maxWidth, maxHeight, FILTER_BILINEAR);
FreeImage_Unload(image);
if(imageRescaled == NULL)
{
LOG(LogError) << "Could not resize image! (not enough memory? invalid bitdepth?)";
return;
}
if(!FreeImage_Save(format, imageRescaled, path.c_str()))
{
LOG(LogError) << "Failed to save resized image!";
}
FreeImage_Unload(imageRescaled);
}
void downloadImageAsync(Window* window, const std::string& url, const std::string& saveAs, std::function<void(std::string)> returnFunc)
{
std::shared_ptr<HttpReq> httpreq = std::make_shared<HttpReq>(url);
AsyncReqComponent* req = new AsyncReqComponent(window, httpreq,
[returnFunc, saveAs] (std::shared_ptr<HttpReq> r)
{
std::string file = processFileDownload(r, saveAs);
if(!file.empty())
resizeImage(file, Settings::getInstance()->getInt("ScraperResizeWidth"), Settings::getInstance()->getInt("ScraperResizeHeight"));
returnFunc(file);
}, NULL);
window->pushGui(req);
}
std::string downloadImage(const std::string& url, const std::string& saveAs)
{
std::shared_ptr<HttpReq> httpreq = std::make_shared<HttpReq>(url);
while(httpreq->status() == HttpReq::REQ_IN_PROGRESS);
std::string file = processFileDownload(httpreq, saveAs);
if(!file.empty())
resizeImage(file, Settings::getInstance()->getInt("ScraperResizeWidth"), Settings::getInstance()->getInt("ScraperResizeHeight"));
return file;
}
std::string getSaveAsPath(const std::string& subdirectory, const std::string& name, const std::string& url)
{
std::string path = getHomePath() + "/.emulationstation/downloaded_images/";
if(!boost::filesystem::exists(path))
boost::filesystem::create_directory(path);
path += subdirectory + "/";
if(!boost::filesystem::exists(path))
boost::filesystem::create_directory(path);
size_t dot = url.find_last_of('.');
std::string ext;
if(dot != std::string::npos)
ext = url.substr(dot, std::string::npos);
path += name + ext;
return path;
}

View file

@ -30,3 +30,21 @@ private:
virtual std::vector<MetaDataList> parseReq(ScraperSearchParams params, std::shared_ptr<HttpReq>) = 0; virtual std::vector<MetaDataList> parseReq(ScraperSearchParams params, std::shared_ptr<HttpReq>) = 0;
}; };
//About the same as "~/.emulationstation/downloaded_images/[subdirectory]/[name].[url's extension]".
//Will create the "downloaded_images" and "subdirectory" directories if they do not exist.
std::string getSaveAsPath(const std::string& subdirectory, const std::string& name, const std::string& url);
//Returns the path to the downloaded file (saveAs) on completion.
//Returns empty string if an error occured.
//Will resize according to Settings::getInt("ScraperResizeWidth") and Settings::getInt("ScraperResizeHeight").
std::string downloadImage(const std::string& url, const std::string& saveAs);
//Returns (via returnFunc) the path to the downloaded file (saveAs) on completion.
//Returns empty string if an error occured.
//Will resize according to Settings::getInt("ScraperResizeWidth") and Settings::getInt("ScraperResizeHeight").
//Same as downloadImage, just async.
void downloadImageAsync(Window* window, const std::string& url, const std::string& saveAs, std::function<void(std::string)> returnFunc);
//You can pass 0 for maxWidth or maxHeight to automatically keep the aspect ratio.
//Will overwrite the image at [path] with the new resized one.
void resizeImage(const std::string& path, int maxWidth, int maxHeight);