2020-09-21 17:17:34 +00:00
|
|
|
// SPDX-License-Identifier: MIT
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
2024-04-08 17:26:08 +00:00
|
|
|
// ES-DE Frontend
|
2020-06-21 12:25:28 +00:00
|
|
|
// HttpReq.cpp
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
2023-08-01 15:36:15 +00:00
|
|
|
// HTTP requests using libcurl.
|
|
|
|
// Used by the scraper and application updater.
|
2020-05-26 16:34:33 +00:00
|
|
|
//
|
|
|
|
|
2013-09-15 17:56:47 +00:00
|
|
|
#include "HttpReq.h"
|
2017-11-01 22:21:10 +00:00
|
|
|
|
2024-04-16 18:59:41 +00:00
|
|
|
#include "ApplicationVersion.h"
|
2021-07-07 18:31:46 +00:00
|
|
|
#include "Log.h"
|
2022-06-29 15:28:39 +00:00
|
|
|
#include "Settings.h"
|
2020-07-03 18:23:51 +00:00
|
|
|
#include "resources/ResourceManager.h"
|
2018-01-09 22:55:09 +00:00
|
|
|
#include "utils/FileSystemUtil.h"
|
2020-07-03 18:23:51 +00:00
|
|
|
|
2024-04-08 17:26:08 +00:00
|
|
|
#include <algorithm>
|
2018-01-09 22:55:09 +00:00
|
|
|
#include <assert.h>
|
2013-09-15 17:56:47 +00:00
|
|
|
|
2021-07-07 18:31:46 +00:00
|
|
|
std::string HttpReq::urlEncode(const std::string& s)
|
2013-09-24 07:02:14 +00:00
|
|
|
{
|
2022-01-19 17:01:54 +00:00
|
|
|
const std::string unreserved {
|
|
|
|
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~"};
|
2013-09-24 07:02:14 +00:00
|
|
|
|
2022-01-19 17:01:54 +00:00
|
|
|
std::string escaped {""};
|
2023-02-16 21:30:32 +00:00
|
|
|
for (size_t i {0}; i < s.length(); ++i) {
|
2020-05-26 16:34:33 +00:00
|
|
|
if (unreserved.find_first_of(s[i]) != std::string::npos) {
|
2013-09-24 07:02:14 +00:00
|
|
|
escaped.push_back(s[i]);
|
|
|
|
}
|
2020-05-26 16:34:33 +00:00
|
|
|
else {
|
2013-09-24 07:02:14 +00:00
|
|
|
escaped.append("%");
|
|
|
|
char buf[3];
|
2020-12-29 10:06:01 +00:00
|
|
|
snprintf(buf, 3, "%.2X", static_cast<unsigned char>(s[i]));
|
2013-09-24 07:02:14 +00:00
|
|
|
escaped.append(buf);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return escaped;
|
|
|
|
}
|
|
|
|
|
2023-08-01 15:36:15 +00:00
|
|
|
HttpReq::HttpReq(const std::string& url, bool scraperRequest)
|
2024-04-08 17:26:08 +00:00
|
|
|
: mStatus {REQ_IN_PROGRESS}
|
|
|
|
, mHandle {nullptr}
|
2023-08-01 15:36:15 +00:00
|
|
|
, mTotalBytes {0}
|
|
|
|
, mDownloadedBytes {0}
|
2024-01-10 23:46:11 +00:00
|
|
|
, mScraperRequest {scraperRequest}
|
2013-09-20 23:55:05 +00:00
|
|
|
{
|
2024-04-08 17:26:08 +00:00
|
|
|
// The multi-handle is cleaned up via an explicit call to cleanupCurlMulti() from any object
|
|
|
|
// that uses HttpReq. For example from GuiScraperSearch after scraping has been completed.
|
2023-02-16 21:30:32 +00:00
|
|
|
if (!sMultiHandle)
|
|
|
|
sMultiHandle = curl_multi_init();
|
2020-10-18 09:01:56 +00:00
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
mHandle = curl_easy_init();
|
|
|
|
|
2024-04-08 17:26:08 +00:00
|
|
|
if (mHandle == nullptr) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError("curl_easy_init failed");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!mPollThread) {
|
|
|
|
sStopPoll = false;
|
|
|
|
mPollThread = std::make_unique<std::thread>(&HttpReq::pollCurl, this);
|
|
|
|
}
|
|
|
|
|
2021-11-25 16:34:34 +00:00
|
|
|
#if defined(USE_BUNDLED_CERTIFICATES)
|
2024-04-08 17:26:08 +00:00
|
|
|
// Use the bundled curl TLS/SSL certificates (which come from the Mozilla project).
|
|
|
|
// This is used on Windows and also on Android as there is no way for curl to access
|
|
|
|
// the system certificates on this OS.
|
2021-07-07 18:31:46 +00:00
|
|
|
curl_easy_setopt(mHandle, CURLOPT_CAINFO,
|
|
|
|
ResourceManager::getInstance()
|
2022-01-04 20:38:46 +00:00
|
|
|
.getResourcePath(":/certificates/curl-ca-bundle.crt")
|
2021-07-07 18:31:46 +00:00
|
|
|
.c_str());
|
|
|
|
#endif
|
2020-07-03 18:23:51 +00:00
|
|
|
|
2024-04-08 17:26:08 +00:00
|
|
|
// Set the URL.
|
2022-09-07 17:59:27 +00:00
|
|
|
CURLcode err {curl_easy_setopt(mHandle, CURLOPT_URL, url.c_str())};
|
2020-06-21 12:25:28 +00:00
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-04-16 18:59:41 +00:00
|
|
|
if (!mScraperRequest) {
|
|
|
|
// Set User-Agent.
|
|
|
|
std::string userAgent {"ES-DE Frontend/"};
|
|
|
|
userAgent.append(PROGRAM_VERSION_STRING).append(" (");
|
|
|
|
#if defined(__ANDROID__)
|
|
|
|
userAgent.append("Android");
|
|
|
|
#elif defined(_WIN64)
|
|
|
|
userAgent.append("Windows");
|
|
|
|
#elif defined(__APPLE__)
|
|
|
|
userAgent.append("macOS");
|
|
|
|
#elif defined(__linux__)
|
|
|
|
userAgent.append("Linux");
|
|
|
|
#elif defined(__unix__)
|
|
|
|
userAgent.append("Unix");
|
|
|
|
#else
|
|
|
|
userAgent.append("Unknown");
|
|
|
|
#endif
|
|
|
|
userAgent.append(")");
|
|
|
|
CURLcode err {curl_easy_setopt(mHandle, CURLOPT_USERAGENT, userAgent.c_str())};
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-01 15:36:15 +00:00
|
|
|
long connectionTimeout;
|
2022-06-29 15:28:39 +00:00
|
|
|
|
2024-01-10 23:46:11 +00:00
|
|
|
if (mScraperRequest) {
|
2022-06-29 15:28:39 +00:00
|
|
|
connectionTimeout =
|
2023-08-01 15:36:15 +00:00
|
|
|
static_cast<long>(Settings::getInstance()->getInt("ScraperConnectionTimeout"));
|
|
|
|
|
|
|
|
if (connectionTimeout < 0 || connectionTimeout > 300)
|
|
|
|
connectionTimeout = static_cast<long>(
|
|
|
|
Settings::getInstance()->getDefaultInt("ScraperConnectionTimeout"));
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
connectionTimeout = 30;
|
|
|
|
}
|
2022-06-29 15:28:39 +00:00
|
|
|
|
2023-02-16 21:30:32 +00:00
|
|
|
// Set connection timeout (default is 30 seconds).
|
2022-06-29 15:28:39 +00:00
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_CONNECTTIMEOUT, connectionTimeout);
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-08-01 15:36:15 +00:00
|
|
|
long transferTimeout;
|
2022-06-29 15:28:39 +00:00
|
|
|
|
2024-01-10 23:46:11 +00:00
|
|
|
if (mScraperRequest) {
|
2022-06-29 15:28:39 +00:00
|
|
|
transferTimeout =
|
2023-08-01 15:36:15 +00:00
|
|
|
static_cast<long>(Settings::getInstance()->getInt("ScraperTransferTimeout"));
|
|
|
|
|
|
|
|
if (transferTimeout < 0 || transferTimeout > 300)
|
|
|
|
transferTimeout =
|
|
|
|
static_cast<long>(Settings::getInstance()->getDefaultInt("ScraperTransferTimeout"));
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
transferTimeout = 0;
|
|
|
|
}
|
2022-06-29 15:28:39 +00:00
|
|
|
|
2023-08-01 15:36:15 +00:00
|
|
|
// Set transfer timeout (default is 120 seconds for the scraper and infinite otherwise).
|
2022-06-29 15:28:39 +00:00
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_TIMEOUT, transferTimeout);
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
// Set curl to handle redirects.
|
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_FOLLOWLOCATION, 1L);
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set curl max redirects.
|
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_MAXREDIRS, 2L);
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set curl restrict redirect protocols.
|
2023-01-27 17:32:56 +00:00
|
|
|
#if defined(__APPLE__) || LIBCURL_VERSION_MAJOR < 7 || \
|
|
|
|
(LIBCURL_VERSION_MAJOR == 7 && LIBCURL_VERSION_MINOR < 85)
|
2020-06-21 12:25:28 +00:00
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTP | CURLPROTO_HTTPS);
|
2023-01-05 18:37:23 +00:00
|
|
|
#else
|
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_REDIR_PROTOCOLS_STR, "http,https");
|
|
|
|
#endif
|
|
|
|
|
2020-06-21 12:25:28 +00:00
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Tell curl how to write the data.
|
2023-02-16 21:30:32 +00:00
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_WRITEFUNCTION, &HttpReq::writeContent);
|
2020-06-21 12:25:28 +00:00
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-08-01 15:36:15 +00:00
|
|
|
// Pass curl a pointer to this HttpReq so we know where to write the data to in our
|
2023-02-16 21:30:32 +00:00
|
|
|
// write function.
|
2020-06-21 12:25:28 +00:00
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_WRITEDATA, this);
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2023-08-01 15:36:15 +00:00
|
|
|
// Enable the curl progress meter.
|
2024-04-08 17:26:08 +00:00
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_NOPROGRESS, mScraperRequest ? 1 : 0);
|
2023-08-01 15:36:15 +00:00
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Pass curl a pointer to HttpReq to provide access to the counter variables.
|
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_XFERINFODATA, this);
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Progress meter callback.
|
2024-04-08 17:26:08 +00:00
|
|
|
if (!mScraperRequest) {
|
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_XFERINFOFUNCTION, HttpReq::transferProgress);
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
2023-08-01 15:36:15 +00:00
|
|
|
}
|
|
|
|
|
2023-12-24 10:48:14 +00:00
|
|
|
// Fail on HTTP status codes >= 400.
|
|
|
|
err = curl_easy_setopt(mHandle, CURLOPT_FAILONERROR, 1L);
|
|
|
|
if (err != CURLE_OK) {
|
|
|
|
mStatus = REQ_IO_ERROR;
|
|
|
|
onError(curl_easy_strerror(err));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2024-04-08 17:26:08 +00:00
|
|
|
// Add the handle to the multi. This is done in pollCurl(), running in a separate thread.
|
|
|
|
std::unique_lock<std::mutex> handleLock {sHandleMutex};
|
|
|
|
sAddHandleQueue.push(mHandle);
|
|
|
|
handleLock.unlock();
|
|
|
|
|
|
|
|
curl_multi_wakeup(sMultiHandle);
|
2020-06-21 12:25:28 +00:00
|
|
|
|
2024-04-08 17:26:08 +00:00
|
|
|
std::unique_lock<std::mutex> requestLock {sRequestMutex};
|
2023-02-16 21:30:32 +00:00
|
|
|
sRequests[mHandle] = this;
|
2024-04-08 17:26:08 +00:00
|
|
|
requestLock.unlock();
|
2013-09-15 17:56:47 +00:00
|
|
|
}
|
|
|
|
|
2013-10-10 18:11:01 +00:00
|
|
|
HttpReq::~HttpReq()
|
2013-09-15 17:56:47 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
if (mHandle) {
|
2024-04-08 17:26:08 +00:00
|
|
|
std::unique_lock<std::mutex> requestLock {sRequestMutex};
|
2023-02-16 21:30:32 +00:00
|
|
|
sRequests.erase(mHandle);
|
2024-04-08 17:26:08 +00:00
|
|
|
requestLock.unlock();
|
2013-09-15 17:56:47 +00:00
|
|
|
|
2024-04-08 17:26:08 +00:00
|
|
|
std::unique_lock<std::mutex> handleLock {sHandleMutex};
|
|
|
|
sRemoveHandleQueue.push(mHandle);
|
|
|
|
handleLock.unlock();
|
2013-09-15 17:56:47 +00:00
|
|
|
|
2024-04-08 17:26:08 +00:00
|
|
|
curl_multi_wakeup(sMultiHandle);
|
2020-06-21 12:25:28 +00:00
|
|
|
}
|
2013-09-15 17:56:47 +00:00
|
|
|
}
|
|
|
|
|
2014-03-19 00:55:37 +00:00
|
|
|
std::string HttpReq::getContent() const
|
2013-09-15 17:56:47 +00:00
|
|
|
{
|
2020-06-21 12:25:28 +00:00
|
|
|
assert(mStatus == REQ_SUCCESS);
|
|
|
|
return mContent.str();
|
2013-09-15 17:56:47 +00:00
|
|
|
}
|
|
|
|
|
2023-08-01 15:36:15 +00:00
|
|
|
int HttpReq::transferProgress(
|
|
|
|
void* clientp, curl_off_t dltotal, curl_off_t dlnow, curl_off_t ultotal, curl_off_t ulnow)
|
|
|
|
{
|
2024-04-08 17:26:08 +00:00
|
|
|
if (dltotal == 0 && dlnow == 0)
|
|
|
|
return CURLE_OK;
|
|
|
|
|
|
|
|
// We need all the check logic below to make sure we're not attempting to write into
|
|
|
|
// a request that has just been removed by the main thread.
|
|
|
|
bool validEntry {false};
|
|
|
|
|
|
|
|
std::unique_lock<std::mutex> requestLock {sRequestMutex};
|
|
|
|
if (std::find_if(sRequests.cbegin(), sRequests.cend(), [&clientp](auto&& entry) {
|
|
|
|
return entry.second == clientp;
|
|
|
|
}) != sRequests.cend())
|
|
|
|
validEntry = true;
|
|
|
|
|
|
|
|
if (validEntry) {
|
|
|
|
// Note that it's not guaranteed that the server will actually provide the total size.
|
|
|
|
if (dltotal > 0)
|
|
|
|
static_cast<HttpReq*>(clientp)->mTotalBytes = static_cast<long>(dltotal);
|
|
|
|
if (dlnow > 0)
|
|
|
|
static_cast<HttpReq*>(clientp)->mDownloadedBytes = static_cast<long>(dlnow);
|
|
|
|
}
|
|
|
|
|
|
|
|
requestLock.unlock();
|
2023-08-01 15:36:15 +00:00
|
|
|
|
|
|
|
return CURLE_OK;
|
|
|
|
}
|
|
|
|
|
2023-02-16 21:30:32 +00:00
|
|
|
size_t HttpReq::writeContent(void* buff, size_t size, size_t nmemb, void* req_ptr)
|
2013-10-10 18:11:01 +00:00
|
|
|
{
|
2024-04-08 17:26:08 +00:00
|
|
|
// We need all the check logic below to make sure we're not attempting to write into
|
|
|
|
// a request that has just been removed by the main thread.
|
|
|
|
bool validEntry {false};
|
|
|
|
|
|
|
|
std::unique_lock<std::mutex> requestLock {sRequestMutex};
|
|
|
|
if (std::find_if(sRequests.cbegin(), sRequests.cend(), [&req_ptr](auto&& entry) {
|
|
|
|
return entry.second == req_ptr;
|
|
|
|
}) != sRequests.cend())
|
|
|
|
validEntry = true;
|
|
|
|
|
|
|
|
if (validEntry) {
|
|
|
|
// size = size of an element, nmemb = number of elements.
|
|
|
|
std::stringstream& ss {static_cast<HttpReq*>(req_ptr)->mContent};
|
|
|
|
ss.write(static_cast<char*>(buff), size * nmemb);
|
|
|
|
}
|
|
|
|
|
|
|
|
requestLock.unlock();
|
2013-10-10 18:11:01 +00:00
|
|
|
|
2023-08-01 15:36:15 +00:00
|
|
|
// Return value is number of elements successfully read.
|
2020-06-21 12:25:28 +00:00
|
|
|
return nmemb;
|
2013-10-10 18:11:01 +00:00
|
|
|
}
|
2024-04-08 17:26:08 +00:00
|
|
|
|
|
|
|
void HttpReq::pollCurl()
|
|
|
|
{
|
|
|
|
int numfds {0};
|
|
|
|
|
|
|
|
do {
|
|
|
|
if (!sStopPoll)
|
|
|
|
curl_multi_poll(sMultiHandle, nullptr, 0, 2000, &numfds);
|
|
|
|
|
|
|
|
// Check if any easy handles should be added or removed.
|
|
|
|
std::unique_lock<std::mutex> handleLock {sHandleMutex};
|
|
|
|
|
|
|
|
if (sAddHandleQueue.size() > 0) {
|
|
|
|
// Add the handle to our multi.
|
|
|
|
CURLMcode merr {curl_multi_add_handle(sMultiHandle, sAddHandleQueue.front())};
|
|
|
|
|
|
|
|
std::unique_lock<std::mutex> requestLock {sRequestMutex};
|
|
|
|
HttpReq* req {sRequests[sAddHandleQueue.front()]};
|
|
|
|
if (merr != CURLM_OK) {
|
|
|
|
if (req != nullptr) {
|
|
|
|
req->mStatus = REQ_IO_ERROR;
|
|
|
|
req->onError(curl_multi_strerror(merr));
|
|
|
|
LOG(LogError) << "onError(): " << curl_multi_strerror(merr);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
if (req != nullptr)
|
|
|
|
req->mStatus = REQ_IN_PROGRESS;
|
|
|
|
}
|
|
|
|
sAddHandleQueue.pop();
|
|
|
|
requestLock.unlock();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (sRemoveHandleQueue.size() > 0) {
|
|
|
|
// Remove the handle from our multi.
|
|
|
|
CURLMcode merr {curl_multi_remove_handle(sMultiHandle, sRemoveHandleQueue.front())};
|
|
|
|
if (merr != CURLM_OK) {
|
|
|
|
LOG(LogError) << "Error removing curl easy handle from curl multi: "
|
|
|
|
<< curl_multi_strerror(merr);
|
|
|
|
}
|
|
|
|
curl_easy_cleanup(sRemoveHandleQueue.front());
|
|
|
|
sRemoveHandleQueue.pop();
|
|
|
|
}
|
|
|
|
|
|
|
|
handleLock.unlock();
|
|
|
|
|
|
|
|
if (sMultiHandle != nullptr && !sStopPoll) {
|
|
|
|
int handleCount {0};
|
|
|
|
std::unique_lock<std::mutex> handleLock {sHandleMutex};
|
|
|
|
CURLMcode merr {curl_multi_perform(sMultiHandle, &handleCount)};
|
|
|
|
handleLock.unlock();
|
|
|
|
if (merr != CURLM_OK && merr != CURLM_CALL_MULTI_PERFORM) {
|
|
|
|
LOG(LogError) << "Error reading data from multi: " << curl_multi_strerror(merr);
|
|
|
|
}
|
|
|
|
|
|
|
|
int msgsLeft;
|
|
|
|
CURLMsg* msg;
|
|
|
|
while (!sStopPoll && (msg = curl_multi_info_read(sMultiHandle, &msgsLeft)) != nullptr) {
|
|
|
|
if (msg->msg == CURLMSG_DONE) {
|
|
|
|
std::unique_lock<std::mutex> requestLock {sRequestMutex};
|
|
|
|
HttpReq* req {sRequests[msg->easy_handle]};
|
|
|
|
|
|
|
|
if (req == nullptr) {
|
|
|
|
LOG(LogError) << "Cannot find easy handle!";
|
|
|
|
requestLock.unlock();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (msg->data.result == CURLE_OK) {
|
|
|
|
req->mStatus = REQ_SUCCESS;
|
|
|
|
}
|
|
|
|
else if (msg->data.result == CURLE_PEER_FAILED_VERIFICATION) {
|
|
|
|
req->mStatus = REQ_FAILED_VERIFICATION;
|
|
|
|
req->onError(curl_easy_strerror(msg->data.result));
|
|
|
|
}
|
|
|
|
else if (msg->data.result == CURLE_HTTP_RETURNED_ERROR) {
|
|
|
|
long responseCode;
|
|
|
|
curl_easy_getinfo(msg->easy_handle, CURLINFO_RESPONSE_CODE, &responseCode);
|
|
|
|
|
|
|
|
if (responseCode == 430 &&
|
|
|
|
Settings::getInstance()->getString("Scraper") == "screenscraper") {
|
|
|
|
req->mContent << "You have exceeded your daily scrape quota";
|
|
|
|
req->mStatus = REQ_SUCCESS;
|
|
|
|
}
|
|
|
|
else if (responseCode == 404 && req->mScraperRequest &&
|
|
|
|
Settings::getInstance()->getBool("ScraperIgnoreHTTP404Errors")) {
|
|
|
|
req->mStatus = REQ_RESOURCE_NOT_FOUND;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
req->mStatus = REQ_BAD_STATUS_CODE;
|
|
|
|
req->onError("Server returned HTTP error code " +
|
|
|
|
std::to_string(responseCode));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
req->mStatus = REQ_IO_ERROR;
|
|
|
|
req->onError(curl_easy_strerror(msg->data.result));
|
|
|
|
}
|
|
|
|
requestLock.unlock();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} while (!sStopPoll || !sAddHandleQueue.empty() || !sRemoveHandleQueue.empty());
|
|
|
|
}
|