mirror of
https://github.com/RetroDECK/ES-DE.git
synced 2024-11-25 15:45:38 +00:00
Added a download percentage indicator to the application updater together with some other minor improvements
Also cleaned up HttpReq in general and added a progress meter callback
This commit is contained in:
parent
cd2181a8b5
commit
f91a87251d
|
@ -125,10 +125,11 @@ bool ApplicationUpdater::downloadFile()
|
|||
mMaxTime = mTimer + (MAX_DOWNLOAD_TIME * 1000);
|
||||
|
||||
mStatus = ASYNC_IN_PROGRESS;
|
||||
mRequest = std::unique_ptr<HttpReq>(std::make_unique<HttpReq>(mUrl));
|
||||
mRequest = std::unique_ptr<HttpReq>(std::make_unique<HttpReq>(mUrl, false));
|
||||
|
||||
while (mTimer < mMaxTime || !mAbortDownload) {
|
||||
SDL_Delay(10);
|
||||
// Add a small delay so we don't eat all CPU cycles checking for status updates.
|
||||
SDL_Delay(5);
|
||||
try {
|
||||
update();
|
||||
}
|
||||
|
|
|
@ -13,12 +13,15 @@
|
|||
#include "guis/GuiTextEditKeyboardPopup.h"
|
||||
#include "utils/PlatformUtil.h"
|
||||
|
||||
#include <SDL2/SDL_timer.h>
|
||||
|
||||
#include <filesystem>
|
||||
|
||||
GuiApplicationUpdater::GuiApplicationUpdater()
|
||||
: mRenderer {Renderer::getInstance()}
|
||||
, mBackground {":/graphics/frame.svg"}
|
||||
, mGrid {glm::ivec2 {4, 11}}
|
||||
, mDownloadPercentage {0}
|
||||
, mLinuxAppImage {false}
|
||||
, mAbortDownload {false}
|
||||
, mDownloading {false}
|
||||
|
@ -97,6 +100,7 @@ GuiApplicationUpdater::GuiApplicationUpdater()
|
|||
}
|
||||
mMessage = "";
|
||||
mStatusMessage->setText(mMessage);
|
||||
mDownloadPercentage = 0;
|
||||
mDownloading = true;
|
||||
if (mThread) {
|
||||
mThread->join();
|
||||
|
@ -158,6 +162,10 @@ GuiApplicationUpdater::GuiApplicationUpdater()
|
|||
|
||||
mButton3 = std::make_shared<ButtonComponent>("CANCEL", "cancel", [this]() {
|
||||
mAbortDownload = true;
|
||||
if (mThread) {
|
||||
mThread->join();
|
||||
mThread.reset();
|
||||
}
|
||||
if (mDownloading) {
|
||||
mWindow->pushGui(
|
||||
new GuiMsgBox(getHelpStyle(), "DOWNLOAD ABORTED\nNO PACKAGE SAVED TO DISK", "OK",
|
||||
|
@ -166,7 +174,7 @@ GuiApplicationUpdater::GuiApplicationUpdater()
|
|||
0.70f :
|
||||
0.45f * (1.778f / mRenderer->getScreenAspectRatio()))));
|
||||
}
|
||||
else if (mHasDownloaded && !mHasInstalled) {
|
||||
else if (mHasDownloaded || mReadyToInstall) {
|
||||
mWindow->pushGui(new GuiMsgBox(
|
||||
getHelpStyle(), "PACKAGE WAS DOWNLOADED AND\nCAN BE MANUALLY INSTALLED", "OK",
|
||||
nullptr, "", nullptr, "", nullptr, true, true,
|
||||
|
@ -201,7 +209,7 @@ GuiApplicationUpdater::GuiApplicationUpdater()
|
|||
std::round(mRenderer->getScreenHeight() * 0.13f));
|
||||
|
||||
mBusyAnim.setSize(mSize);
|
||||
mBusyAnim.setText("DOWNLOADING");
|
||||
mBusyAnim.setText("DOWNLOADING 100%");
|
||||
mBusyAnim.onSizeChanged();
|
||||
}
|
||||
|
||||
|
@ -240,11 +248,12 @@ void GuiApplicationUpdater::setDownloadPath()
|
|||
bool GuiApplicationUpdater::downloadPackage()
|
||||
{
|
||||
mStatus = ASYNC_IN_PROGRESS;
|
||||
mRequest = std::unique_ptr<HttpReq>(std::make_unique<HttpReq>(mPackage.url));
|
||||
LOG(LogDebug) << "GuiApplicationUpdater::downloadPackage(): Starting download of \""
|
||||
<< mPackage.filename << "\"";
|
||||
mRequest = std::unique_ptr<HttpReq>(std::make_unique<HttpReq>(mPackage.url, false));
|
||||
LOG(LogInfo) << "Downloading \"" << mPackage.filename << "\"...";
|
||||
|
||||
while (!mAbortDownload) {
|
||||
// Add a small delay so we don't eat all CPU cycles checking for status updates.
|
||||
SDL_Delay(5);
|
||||
HttpReq::Status reqStatus {mRequest->status()};
|
||||
if (reqStatus == HttpReq::REQ_SUCCESS) {
|
||||
mStatus = ASYNC_DONE;
|
||||
|
@ -260,6 +269,14 @@ bool GuiApplicationUpdater::downloadPackage()
|
|||
mMessage = errorMessage;
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
// Download progress as reported by curl.
|
||||
const float downloadedBytes {static_cast<float>(mRequest->getDownloadedBytes())};
|
||||
const float totalBytes {static_cast<float>(mRequest->getTotalBytes())};
|
||||
if (downloadedBytes != 0.0f && totalBytes != 0.0f)
|
||||
mDownloadPercentage =
|
||||
static_cast<int>(std::round((downloadedBytes / totalBytes) * 100.0f));
|
||||
}
|
||||
}
|
||||
|
||||
if (mAbortDownload) {
|
||||
|
@ -304,10 +321,10 @@ bool GuiApplicationUpdater::downloadPackage()
|
|||
writeFile.open(mDownloadPackageFilename.c_str(), std::ofstream::binary);
|
||||
|
||||
if (writeFile.fail()) {
|
||||
const std::string errorMessage {"Couldn't write package file, permission problems?"};
|
||||
LOG(LogError) << errorMessage;
|
||||
LOG(LogError) << "Couldn't write package file \"" << mDownloadPackageFilename
|
||||
<< "\", permission problems?";
|
||||
std::unique_lock<std::mutex> lock {mMutex};
|
||||
mMessage = "Error: " + errorMessage;
|
||||
mMessage = "Error: Couldn't write package file, permission problems?";
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -440,8 +457,10 @@ void GuiApplicationUpdater::update(int deltaTime)
|
|||
}
|
||||
}
|
||||
|
||||
if (mDownloading)
|
||||
if (mDownloading) {
|
||||
mBusyAnim.setText("DOWNLOADING " + std::to_string(mDownloadPercentage) + "%");
|
||||
mBusyAnim.update(deltaTime);
|
||||
}
|
||||
else if (mLinuxAppImage && mReadyToInstall) {
|
||||
mProcessStep1->setText(ViewController::TICKMARK_CHAR + " " + mProcessStep1->getValue());
|
||||
mProcessStep1->setColor(mMenuColorGreen);
|
||||
|
|
|
@ -73,6 +73,7 @@ private:
|
|||
|
||||
ApplicationUpdater::Package mPackage;
|
||||
std::string mDownloadPackageFilename;
|
||||
std::atomic<int> mDownloadPercentage;
|
||||
std::atomic<bool> mLinuxAppImage;
|
||||
std::atomic<bool> mAbortDownload;
|
||||
std::atomic<bool> mDownloading;
|
||||
|
|
|
@ -573,7 +573,7 @@ void GuiScraperSearch::updateInfoPane()
|
|||
// through the result list.
|
||||
mThumbnailReqMap.insert(std::pair<std::string, std::unique_ptr<HttpReq>>(
|
||||
mScraperResults[i].thumbnailImageUrl,
|
||||
std::unique_ptr<HttpReq>(new HttpReq(thumb))));
|
||||
std::unique_ptr<HttpReq>(new HttpReq(thumb, true))));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -158,7 +158,7 @@ std::unique_ptr<HttpReq> TheGamesDBJSONRequestResources::fetchResource(const std
|
|||
std::string path {"https://api.thegamesdb.net/v1"};
|
||||
path.append(endpoint).append("?apikey=").append(getApiKey());
|
||||
|
||||
return std::unique_ptr<HttpReq>(new HttpReq(path));
|
||||
return std::unique_ptr<HttpReq>(new HttpReq(path, true));
|
||||
}
|
||||
|
||||
int TheGamesDBJSONRequestResources::loadResource(std::unordered_map<int, std::string>& resource,
|
||||
|
|
|
@ -142,7 +142,7 @@ ScraperHttpRequest::ScraperHttpRequest(std::vector<ScraperSearchResult>& results
|
|||
: ScraperRequest(resultsWrite)
|
||||
{
|
||||
setStatus(ASYNC_IN_PROGRESS);
|
||||
mReq = std::unique_ptr<HttpReq>(new HttpReq(url));
|
||||
mReq = std::unique_ptr<HttpReq>(new HttpReq(url, true));
|
||||
}
|
||||
|
||||
void ScraperHttpRequest::update()
|
||||
|
@ -426,7 +426,7 @@ MediaDownloadHandle::MediaDownloadHandle(const std::string& url,
|
|||
const std::string& mediaType,
|
||||
const bool resizeFile,
|
||||
bool& savedNewMedia)
|
||||
: mReq(new HttpReq(url))
|
||||
: mReq(new HttpReq(url, true))
|
||||
, mSavePath(path)
|
||||
, mExistingMediaFile(existingMediaPath)
|
||||
, mMediaType(mediaType)
|
||||
|
|
|
@ -3,10 +3,8 @@
|
|||
// EmulationStation Desktop Edition
|
||||
// HttpReq.cpp
|
||||
//
|
||||
// HTTP request functions.
|
||||
// Used by Scraper, GamesDBJSONScraper, GamesDBJSONScraperResources and
|
||||
// ScreenScraper to download game information and media files.
|
||||
// Also used by ApplicationUpdater to check for application updates.
|
||||
// HTTP requests using libcurl.
|
||||
// Used by the scraper and application updater.
|
||||
//
|
||||
|
||||
#include "HttpReq.h"
|
||||
|
@ -38,17 +36,11 @@ std::string HttpReq::urlEncode(const std::string& s)
|
|||
return escaped;
|
||||
}
|
||||
|
||||
bool HttpReq::isUrl(const std::string& str)
|
||||
{
|
||||
// The worst guess.
|
||||
return (!str.empty() && !Utils::FileSystem::exists(str) &&
|
||||
(str.find("http://") != std::string::npos ||
|
||||
str.find("https://") != std::string::npos || str.find("www.") != std::string::npos));
|
||||
}
|
||||
|
||||
HttpReq::HttpReq(const std::string& url)
|
||||
HttpReq::HttpReq(const std::string& url, bool scraperRequest)
|
||||
: mStatus(REQ_IN_PROGRESS)
|
||||
, mHandle(nullptr)
|
||||
, mTotalBytes {0}
|
||||
, mDownloadedBytes {0}
|
||||
{
|
||||
// The multi-handle is cleaned up via a call from GuiScraperSearch after the scraping
|
||||
// has been completed for a game, meaning the handle is valid for all curl requests
|
||||
|
@ -87,12 +79,19 @@ HttpReq::HttpReq(const std::string& url)
|
|||
return;
|
||||
}
|
||||
|
||||
long connectionTimeout {
|
||||
static_cast<long>(Settings::getInstance()->getInt("ScraperConnectionTimeout"))};
|
||||
long connectionTimeout;
|
||||
|
||||
if (connectionTimeout < 0 || connectionTimeout > 300)
|
||||
if (scraperRequest) {
|
||||
connectionTimeout =
|
||||
static_cast<long>(Settings::getInstance()->getDefaultInt("ScraperConnectionTimeout"));
|
||||
static_cast<long>(Settings::getInstance()->getInt("ScraperConnectionTimeout"));
|
||||
|
||||
if (connectionTimeout < 0 || connectionTimeout > 300)
|
||||
connectionTimeout = static_cast<long>(
|
||||
Settings::getInstance()->getDefaultInt("ScraperConnectionTimeout"));
|
||||
}
|
||||
else {
|
||||
connectionTimeout = 30;
|
||||
}
|
||||
|
||||
// Set connection timeout (default is 30 seconds).
|
||||
err = curl_easy_setopt(mHandle, CURLOPT_CONNECTTIMEOUT, connectionTimeout);
|
||||
|
@ -102,14 +101,21 @@ HttpReq::HttpReq(const std::string& url)
|
|||
return;
|
||||
}
|
||||
|
||||
long transferTimeout {
|
||||
static_cast<long>(Settings::getInstance()->getInt("ScraperTransferTimeout"))};
|
||||
long transferTimeout;
|
||||
|
||||
if (transferTimeout < 0 || transferTimeout > 300)
|
||||
if (scraperRequest) {
|
||||
transferTimeout =
|
||||
static_cast<long>(Settings::getInstance()->getDefaultInt("ScraperTransferTimeout"));
|
||||
static_cast<long>(Settings::getInstance()->getInt("ScraperTransferTimeout"));
|
||||
|
||||
// Set transfer timeout (default is 120 seconds).
|
||||
if (transferTimeout < 0 || transferTimeout > 300)
|
||||
transferTimeout =
|
||||
static_cast<long>(Settings::getInstance()->getDefaultInt("ScraperTransferTimeout"));
|
||||
}
|
||||
else {
|
||||
transferTimeout = 0;
|
||||
}
|
||||
|
||||
// Set transfer timeout (default is 120 seconds for the scraper and infinite otherwise).
|
||||
err = curl_easy_setopt(mHandle, CURLOPT_TIMEOUT, transferTimeout);
|
||||
if (err != CURLE_OK) {
|
||||
mStatus = REQ_IO_ERROR;
|
||||
|
@ -134,7 +140,6 @@ HttpReq::HttpReq(const std::string& url)
|
|||
}
|
||||
|
||||
// Set curl restrict redirect protocols.
|
||||
|
||||
#if defined(__APPLE__) || LIBCURL_VERSION_MAJOR < 7 || \
|
||||
(LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 85)
|
||||
err = curl_easy_setopt(mHandle, CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTP | CURLPROTO_HTTPS);
|
||||
|
@ -156,7 +161,7 @@ HttpReq::HttpReq(const std::string& url)
|
|||
return;
|
||||
}
|
||||
|
||||
// Give curl a pointer to this HttpReq so we know where to write the data to in our
|
||||
// Pass curl a pointer to this HttpReq so we know where to write the data to in our
|
||||
// write function.
|
||||
err = curl_easy_setopt(mHandle, CURLOPT_WRITEDATA, this);
|
||||
if (err != CURLE_OK) {
|
||||
|
@ -165,8 +170,32 @@ HttpReq::HttpReq(const std::string& url)
|
|||
return;
|
||||
}
|
||||
|
||||
// Enable the curl progress meter.
|
||||
err = curl_easy_setopt(mHandle, CURLOPT_NOPROGRESS, 0);
|
||||
if (err != CURLE_OK) {
|
||||
mStatus = REQ_IO_ERROR;
|
||||
onError(curl_easy_strerror(err));
|
||||
return;
|
||||
}
|
||||
|
||||
// Pass curl a pointer to HttpReq to provide access to the counter variables.
|
||||
err = curl_easy_setopt(mHandle, CURLOPT_XFERINFODATA, this);
|
||||
if (err != CURLE_OK) {
|
||||
mStatus = REQ_IO_ERROR;
|
||||
onError(curl_easy_strerror(err));
|
||||
return;
|
||||
}
|
||||
|
||||
// Progress meter callback.
|
||||
err = curl_easy_setopt(mHandle, CURLOPT_XFERINFOFUNCTION, HttpReq::transferProgress);
|
||||
if (err != CURLE_OK) {
|
||||
mStatus = REQ_IO_ERROR;
|
||||
onError(curl_easy_strerror(err));
|
||||
return;
|
||||
}
|
||||
|
||||
// Add the handle to our multi.
|
||||
CURLMcode merr = curl_multi_add_handle(sMultiHandle, mHandle);
|
||||
CURLMcode merr {curl_multi_add_handle(sMultiHandle, mHandle)};
|
||||
if (merr != CURLM_OK) {
|
||||
mStatus = REQ_IO_ERROR;
|
||||
onError(curl_multi_strerror(merr));
|
||||
|
@ -238,13 +267,24 @@ std::string HttpReq::getContent() const
|
|||
return mContent.str();
|
||||
}
|
||||
|
||||
// Used as a curl callback.
|
||||
// size = size of an element, nmemb = number of elements.
|
||||
// Return value is number of elements successfully read.
|
||||
int HttpReq::transferProgress(
|
||||
void* clientp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow)
|
||||
{
|
||||
// Note that it's not guaranteed that the server will actually provide the total size.
|
||||
if (dltotal > 0)
|
||||
static_cast<HttpReq*>(clientp)->mTotalBytes = dltotal;
|
||||
if (dlnow > 0)
|
||||
static_cast<HttpReq*>(clientp)->mDownloadedBytes = dlnow;
|
||||
|
||||
return CURLE_OK;
|
||||
}
|
||||
|
||||
size_t HttpReq::writeContent(void* buff, size_t size, size_t nmemb, void* req_ptr)
|
||||
{
|
||||
// size = size of an element, nmemb = number of elements.
|
||||
std::stringstream& ss {static_cast<HttpReq*>(req_ptr)->mContent};
|
||||
ss.write(static_cast<char*>(buff), size * nmemb);
|
||||
|
||||
// Return value is number of elements successfully read.
|
||||
return nmemb;
|
||||
}
|
||||
|
|
|
@ -3,44 +3,23 @@
|
|||
// EmulationStation Desktop Edition
|
||||
// HttpReq.h
|
||||
//
|
||||
// HTTP request functions.
|
||||
// Used by Scraper, GamesDBJSONScraper, GamesDBJSONScraperResources and
|
||||
// ScreenScraper to download game information and media files.
|
||||
// Also used by ApplicationUpdater to check for application updates.
|
||||
// HTTP requests using libcurl.
|
||||
// Used by the scraper and application updater.
|
||||
//
|
||||
|
||||
#ifndef ES_CORE_HTTP_REQ_H
|
||||
#define ES_CORE_HTTP_REQ_H
|
||||
|
||||
#include <curl/curl.h>
|
||||
|
||||
#include <atomic>
|
||||
#include <map>
|
||||
#include <sstream>
|
||||
|
||||
// Usage:
|
||||
// HttpReq myRequest("www.duckduckgo.com", "/index.html");
|
||||
//
|
||||
// For blocking behavior:
|
||||
// while (myRequest.status() == HttpReq::REQ_IN_PROGRESS);
|
||||
//
|
||||
// For non-blocking behavior:
|
||||
// Check 'if (myRequest.status() != HttpReq::REQ_IN_PROGRESS)' in some sort of update method.
|
||||
//
|
||||
// Once one of those calls complete, the request is ready.
|
||||
//
|
||||
// Do something like this to capture errors:
|
||||
// if (myRequest.status() != REQ_SUCCESS) {
|
||||
// // An error occured.
|
||||
// LOG(LogError) << "HTTP request error - " << myRequest.getErrorMessage();
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// This is how to read the returned content:
|
||||
// std::string content = myRequest.getContent();
|
||||
|
||||
class HttpReq
|
||||
{
|
||||
public:
|
||||
HttpReq(const std::string& url);
|
||||
HttpReq(const std::string& url, bool scraperRequest);
|
||||
~HttpReq();
|
||||
|
||||
enum Status {
|
||||
|
@ -55,13 +34,15 @@ public:
|
|||
// clang-format on
|
||||
};
|
||||
|
||||
Status status(); // Process any received data and return the status afterwards.
|
||||
// Process any received data and return the status afterwards.
|
||||
Status status();
|
||||
|
||||
std::string getErrorMsg() { return mErrorMsg; }
|
||||
std::string getContent() const; // mStatus must be REQ_SUCCESS.
|
||||
std::string getContent() const;
|
||||
long getTotalBytes() { return mTotalBytes; }
|
||||
long getDownloadedBytes() { return mDownloadedBytes; }
|
||||
|
||||
static std::string urlEncode(const std::string& s);
|
||||
static bool isUrl(const std::string& s);
|
||||
|
||||
static void cleanupCurlMulti()
|
||||
{
|
||||
if (sMultiHandle != nullptr) {
|
||||
|
@ -71,7 +52,11 @@ public:
|
|||
}
|
||||
|
||||
private:
|
||||
// Callbacks.
|
||||
static int transferProgress(
|
||||
void* clientp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow);
|
||||
static size_t writeContent(void* buff, size_t size, size_t nmemb, void* req_ptr);
|
||||
|
||||
void onError(const std::string& msg) { mErrorMsg = msg; }
|
||||
|
||||
static inline std::map<CURL*, HttpReq*> sRequests;
|
||||
|
@ -82,6 +67,8 @@ private:
|
|||
|
||||
std::stringstream mContent;
|
||||
std::string mErrorMsg;
|
||||
std::atomic<long> mTotalBytes;
|
||||
std::atomic<long> mDownloadedBytes;
|
||||
};
|
||||
|
||||
#endif // ES_CORE_HTTP_REQ_H
|
||||
|
|
Loading…
Reference in a new issue