Cleaned up some code in HttpReq.

This commit is contained in:
Leon Styhre 2023-02-16 22:30:32 +01:00
parent daa0fe3a91
commit eff400d6ed
2 changed files with 24 additions and 30 deletions

View file

@ -17,16 +17,13 @@
#include <assert.h> #include <assert.h>
CURLM* HttpReq::s_multi_handle;
std::map<CURL*, HttpReq*> HttpReq::s_requests;
std::string HttpReq::urlEncode(const std::string& s) std::string HttpReq::urlEncode(const std::string& s)
{ {
const std::string unreserved { const std::string unreserved {
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~"}; "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~"};
std::string escaped {""}; std::string escaped {""};
for (size_t i = 0; i < s.length(); ++i) { for (size_t i {0}; i < s.length(); ++i) {
if (unreserved.find_first_of(s[i]) != std::string::npos) { if (unreserved.find_first_of(s[i]) != std::string::npos) {
escaped.push_back(s[i]); escaped.push_back(s[i]);
} }
@ -55,8 +52,8 @@ HttpReq::HttpReq(const std::string& url)
// The multi-handle is cleaned up via a call from GuiScraperSearch after the scraping // The multi-handle is cleaned up via a call from GuiScraperSearch after the scraping
// has been completed for a game, meaning the handle is valid for all curl requests // has been completed for a game, meaning the handle is valid for all curl requests
// performed for the current game. // performed for the current game.
if (!s_multi_handle) if (!sMultiHandle)
s_multi_handle = curl_multi_init(); sMultiHandle = curl_multi_init();
mHandle = curl_easy_init(); mHandle = curl_easy_init();
@ -96,7 +93,7 @@ HttpReq::HttpReq(const std::string& url)
connectionTimeout = connectionTimeout =
static_cast<long>(Settings::getInstance()->getDefaultInt("ScraperConnectionTimeout")); static_cast<long>(Settings::getInstance()->getDefaultInt("ScraperConnectionTimeout"));
// Set connection timeout (default is 300 seconds). // Set connection timeout (default is 30 seconds).
err = curl_easy_setopt(mHandle, CURLOPT_CONNECTTIMEOUT, connectionTimeout); err = curl_easy_setopt(mHandle, CURLOPT_CONNECTTIMEOUT, connectionTimeout);
if (err != CURLE_OK) { if (err != CURLE_OK) {
mStatus = REQ_IO_ERROR; mStatus = REQ_IO_ERROR;
@ -111,7 +108,7 @@ HttpReq::HttpReq(const std::string& url)
transferTimeout = transferTimeout =
static_cast<long>(Settings::getInstance()->getDefaultInt("ScraperTransferTimeout")); static_cast<long>(Settings::getInstance()->getDefaultInt("ScraperTransferTimeout"));
// Set transfer timeout (default is 0/infinity). // Set transfer timeout (default is 120 seconds).
err = curl_easy_setopt(mHandle, CURLOPT_TIMEOUT, transferTimeout); err = curl_easy_setopt(mHandle, CURLOPT_TIMEOUT, transferTimeout);
if (err != CURLE_OK) { if (err != CURLE_OK) {
mStatus = REQ_IO_ERROR; mStatus = REQ_IO_ERROR;
@ -151,15 +148,15 @@ HttpReq::HttpReq(const std::string& url)
} }
// Tell curl how to write the data. // Tell curl how to write the data.
err = curl_easy_setopt(mHandle, CURLOPT_WRITEFUNCTION, &HttpReq::write_content); err = curl_easy_setopt(mHandle, CURLOPT_WRITEFUNCTION, &HttpReq::writeContent);
if (err != CURLE_OK) { if (err != CURLE_OK) {
mStatus = REQ_IO_ERROR; mStatus = REQ_IO_ERROR;
onError(curl_easy_strerror(err)); onError(curl_easy_strerror(err));
return; return;
} }
// Give curl a pointer to this HttpReq so we know where to write the // Give curl a pointer to this HttpReq so we know where to write the data to in our
// data *to* in our write function. // write function.
err = curl_easy_setopt(mHandle, CURLOPT_WRITEDATA, this); err = curl_easy_setopt(mHandle, CURLOPT_WRITEDATA, this);
if (err != CURLE_OK) { if (err != CURLE_OK) {
mStatus = REQ_IO_ERROR; mStatus = REQ_IO_ERROR;
@ -168,22 +165,22 @@ HttpReq::HttpReq(const std::string& url)
} }
// Add the handle to our multi. // Add the handle to our multi.
CURLMcode merr = curl_multi_add_handle(s_multi_handle, mHandle); CURLMcode merr = curl_multi_add_handle(sMultiHandle, mHandle);
if (merr != CURLM_OK) { if (merr != CURLM_OK) {
mStatus = REQ_IO_ERROR; mStatus = REQ_IO_ERROR;
onError(curl_multi_strerror(merr)); onError(curl_multi_strerror(merr));
return; return;
} }
s_requests[mHandle] = this; sRequests[mHandle] = this;
} }
HttpReq::~HttpReq() HttpReq::~HttpReq()
{ {
if (mHandle) { if (mHandle) {
s_requests.erase(mHandle); sRequests.erase(mHandle);
CURLMcode merr {curl_multi_remove_handle(s_multi_handle, mHandle)}; CURLMcode merr {curl_multi_remove_handle(sMultiHandle, mHandle)};
if (merr != CURLM_OK) { if (merr != CURLM_OK) {
LOG(LogError) << "Error removing curl_easy handle from curl_multi: " LOG(LogError) << "Error removing curl_easy handle from curl_multi: "
@ -197,19 +194,19 @@ HttpReq::~HttpReq()
HttpReq::Status HttpReq::status() HttpReq::Status HttpReq::status()
{ {
if (mStatus == REQ_IN_PROGRESS) { if (mStatus == REQ_IN_PROGRESS) {
int handle_count; int handleCount {0};
CURLMcode merr {curl_multi_perform(s_multi_handle, &handle_count)}; CURLMcode merr {curl_multi_perform(sMultiHandle, &handleCount)};
if (merr != CURLM_OK && merr != CURLM_CALL_MULTI_PERFORM) { if (merr != CURLM_OK && merr != CURLM_CALL_MULTI_PERFORM) {
mStatus = REQ_IO_ERROR; mStatus = REQ_IO_ERROR;
onError(curl_multi_strerror(merr)); onError(curl_multi_strerror(merr));
return mStatus; return mStatus;
} }
int msgs_left; int msgsLeft;
CURLMsg* msg; CURLMsg* msg;
while ((msg = curl_multi_info_read(s_multi_handle, &msgs_left)) != nullptr) { while ((msg = curl_multi_info_read(sMultiHandle, &msgsLeft)) != nullptr) {
if (msg->msg == CURLMSG_DONE) { if (msg->msg == CURLMSG_DONE) {
HttpReq* req = s_requests[msg->easy_handle]; HttpReq* req {sRequests[msg->easy_handle]};
if (req == nullptr) { if (req == nullptr) {
LOG(LogError) << "Cannot find easy handle!"; LOG(LogError) << "Cannot find easy handle!";
@ -243,7 +240,7 @@ std::string HttpReq::getContent() const
// Used as a curl callback. // Used as a curl callback.
// size = size of an element, nmemb = number of elements. // size = size of an element, nmemb = number of elements.
// Return value is number of elements successfully read. // Return value is number of elements successfully read.
size_t HttpReq::write_content(void* buff, size_t size, size_t nmemb, void* req_ptr) size_t HttpReq::writeContent(void* buff, size_t size, size_t nmemb, void* req_ptr)
{ {
std::stringstream& ss {static_cast<HttpReq*>(req_ptr)->mContent}; std::stringstream& ss {static_cast<HttpReq*>(req_ptr)->mContent};
ss.write(static_cast<char*>(buff), size * nmemb); ss.write(static_cast<char*>(buff), size * nmemb);

View file

@ -63,21 +63,18 @@ public:
static void cleanupCurlMulti() static void cleanupCurlMulti()
{ {
if (s_multi_handle != nullptr) { if (sMultiHandle != nullptr) {
curl_multi_cleanup(s_multi_handle); curl_multi_cleanup(sMultiHandle);
s_multi_handle = nullptr; sMultiHandle = nullptr;
} }
} }
private: private:
static size_t write_content(void* buff, size_t size, size_t nmemb, void* req_ptr); static size_t writeContent(void* buff, size_t size, size_t nmemb, void* req_ptr);
void onError(const std::string& msg) { mErrorMsg = msg; } void onError(const std::string& msg) { mErrorMsg = msg; }
// God dammit libcurl why can't you have some way to check the status of an static inline std::map<CURL*, HttpReq*> sRequests;
// individual handle why do I have to handle ALL messages at once. static inline CURLM* sMultiHandle;
static std::map<CURL*, HttpReq*> s_requests;
static CURLM* s_multi_handle;
Status mStatus; Status mStatus;
CURL* mHandle; CURL* mHandle;