X-Git-Url: http://de.git.xonotic.org/?a=blobdiff_plain;f=libcurl.c;h=4eac714a69a677d293a470b24a7ba8549a752cfb;hb=4c7b42b047585a7e609ec241b0e9c7e14ee11103;hp=4061f5516732e1e430ef9943b25c3a16cfea5e23;hpb=c6bde419ba16d6c3f48941ee5cadb717aead3221;p=xonotic%2Fdarkplaces.git diff --git a/libcurl.c b/libcurl.c index 4061f551..4eac714a 100644 --- a/libcurl.c +++ b/libcurl.c @@ -6,7 +6,7 @@ static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "ma static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","100", "maximum download speed (KiB/s)"}; static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"}; static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"}; -static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","0", "whether client's download support is enabled"}; +static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"}; /* ================================================================= @@ -170,7 +170,7 @@ static dllhandle_t curl_dll = NULL; typedef struct downloadinfo_s { char filename[MAX_QPATH]; - char url[256]; + char url[1024]; char referer[256]; qfile_t *stream; fs_offset_t startpos; @@ -180,6 +180,11 @@ typedef struct downloadinfo_s unsigned long bytes_received; struct downloadinfo_s *next, *prev; qboolean forthismap; + + unsigned char *buffer; + size_t buffersize; + curl_callback_t callback; + void *callback_data; } downloadinfo; static downloadinfo *downloads = NULL; @@ -281,7 +286,7 @@ static void Curl_CheckCommandWhenDone() return; if(numdownloads_added && (numdownloads_success == numdownloads_added) && *command_when_done) { - Con_DPrintf("Map downloads occurred, executing %s\n", command_when_done); + Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done); Cbuf_AddText("\n"); Cbuf_AddText(command_when_done); Cbuf_AddText("\n"); @@ -289,7 +294,7 @@ static void Curl_CheckCommandWhenDone() } else if(numdownloads_added && numdownloads_fail && *command_when_error) { - Con_DPrintf("Map downloads FAILED, executing %s\n", command_when_error); + Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error); Cbuf_AddText("\n"); Cbuf_AddText(command_when_error); Cbuf_AddText("\n"); @@ -311,12 +316,16 @@ static qboolean CURL_OpenLibrary (void) #if defined(WIN64) "libcurl64.dll", #elif defined(WIN32) + "libcurl-4.dll", "libcurl-3.dll", #elif defined(MACOSX) + "libcurl.4.dylib", // Mac OS X Notyetreleased "libcurl.3.dylib", // Mac OS X Tiger "libcurl.2.dylib", // Mac OS X Panther #else + "libcurl.so.4", "libcurl.so.3", + "libcurl.so", // FreeBSD #endif NULL }; @@ -326,14 +335,7 @@ static qboolean CURL_OpenLibrary (void) return true; // Load the DLL - if (! Sys_LoadLibrary (dllnames, &curl_dll, curlfuncs)) - { - Con_Printf ("cURL support disabled\n"); - return false; - } - - Con_Printf ("cURL support enabled\n"); - return true; + return Sys_LoadLibrary (dllnames, &curl_dll, curlfuncs); } @@ -364,15 +366,28 @@ this. */ static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi) { - fs_offset_t ret; + fs_offset_t ret = -1; size_t bytes = size * nmemb; downloadinfo *di = (downloadinfo *) vdi; + if(di->buffer) + { + if(di->bytes_received + bytes <= di->buffersize) + { + memcpy(di->buffer + di->bytes_received, data, bytes); + ret = bytes; + } + // otherwise: buffer overrun, ret stays -1 + } + + if(di->stream) + { + ret = FS_Write(di->stream, data, bytes); + } + bytes_received += bytes; di->bytes_received += bytes; - ret = FS_Write(di->stream, data, bytes); - return ret; // why not ret / nmemb? } @@ -385,6 +400,38 @@ typedef enum } CurlStatus; +static void curl_default_callback(int status, size_t length_received, unsigned char *buffer, void *cbdata) +{ + downloadinfo *di = (downloadinfo *) cbdata; + switch(status) + { + case CURLCBSTATUS_OK: + Con_Printf("Download of %s: OK\n", di->filename); + break; + case CURLCBSTATUS_FAILED: + Con_Printf("Download of %s: FAILED\n", di->filename); + break; + case CURLCBSTATUS_ABORTED: + Con_Printf("Download of %s: ABORTED\n", di->filename); + break; + case CURLCBSTATUS_SERVERERROR: + Con_Printf("Download of %s: (unknown server error)\n", di->filename); + break; + case CURLCBSTATUS_UNKNOWN: + Con_Printf("Download of %s: (unknown client error)\n", di->filename); + break; + default: + Con_Printf("Download of %s: %d\n", di->filename, status); + break; + } +} + +static void curl_quiet_callback(int status, size_t length_received, unsigned char *buffer, void *cbdata) +{ + if(developer.integer) + curl_default_callback(status, length_received, buffer, cbdata); +} + /* ==================== Curl_EndDownload @@ -394,6 +441,7 @@ CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error code from libcurl, or 0, if another error has occurred. ==================== */ +static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata); static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error) { qboolean ok = false; @@ -402,24 +450,29 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error switch(status) { case CURL_DOWNLOAD_SUCCESS: - Con_Printf("Download of %s: OK\n", di->filename); ok = true; + di->callback(CURLCBSTATUS_OK, di->bytes_received, di->buffer, di->callback_data); break; case CURL_DOWNLOAD_FAILED: - Con_Printf("Download of %s: FAILED\n", di->filename); - if(error) - Con_Printf("Reason given by libcurl: %s\n", qcurl_easy_strerror(error)); + di->callback(CURLCBSTATUS_FAILED, di->bytes_received, di->buffer, di->callback_data); break; case CURL_DOWNLOAD_ABORTED: - Con_Printf("Download of %s: ABORTED\n", di->filename); + di->callback(CURLCBSTATUS_ABORTED, di->bytes_received, di->buffer, di->callback_data); break; case CURL_DOWNLOAD_SERVERERROR: - Con_Printf("Download of %s: %d\n", di->filename, (int) error); - // reopen to enforce it to have zero bytes again - FS_Close(di->stream); - di->stream = FS_Open(di->filename, "w", false, false); + if(di->stream) + { + FS_Close(di->stream); + di->stream = FS_OpenRealFile(di->filename, "wb", false); + } + if(di->callback) + di->callback(error ? (int) error : CURLCBSTATUS_SERVERERROR, di->bytes_received, di->buffer, di->callback_data); + break; + default: + if(di->callback) + di->callback(CURLCBSTATUS_UNKNOWN, di->bytes_received, di->buffer, di->callback_data); break; } @@ -439,7 +492,25 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error FS_Close(di->stream); if(ok && di->ispak) + { ok = FS_AddPack(di->filename, NULL, true); + if(!ok) + { + // pack loading failed? + // this is critical + // better clear the file again... + di->stream = FS_OpenRealFile(di->filename, "wb", false); + FS_Close(di->stream); + + if(di->startpos && !di->callback) + { + // this was a resume? + // then try to redownload it without reporting the error + Curl_Begin(di->url, di->filename, di->ispak, di->forthismap, NULL, 0, NULL, NULL); + di->forthismap = false; // don't count the error + } + } + } if(di->prev) di->prev->next = di->next; @@ -479,22 +550,30 @@ static void CheckPendingDownloads() { if(!di->started) { - Con_Printf("Downloading %s -> %s", di->url, di->filename); + if(!di->buffer) + { + Con_Printf("Downloading %s -> %s", di->url, di->filename); - di->stream = FS_Open(di->filename, "ab", false, false); - if(!di->stream) + di->stream = FS_OpenRealFile(di->filename, "ab", false); + if(!di->stream) + { + Con_Printf("\nFAILED: Could not open output file %s\n", di->filename); + Curl_EndDownload(di, CURL_DOWNLOAD_FAILED, CURLE_OK); + return; + } + FS_Seek(di->stream, 0, SEEK_END); + di->startpos = FS_Tell(di->stream); + + if(di->startpos > 0) + Con_Printf(", resuming from position %ld", (long) di->startpos); + Con_Print("...\n"); + } + else { - Con_Printf("\nFAILED: Could not open output file %s\n", di->filename); - Curl_EndDownload(di, CURL_DOWNLOAD_FAILED, CURLE_OK); - return; + Con_DPrintf("Downloading %s -> memory\n", di->url); + di->startpos = 0; } - FS_Seek(di->stream, 0, SEEK_END); - di->startpos = FS_Tell(di->stream); - if(di->startpos > 0) - Con_Printf(", resuming from position %ld", (long) di->startpos); - Con_Print("...\n"); - di->curle = qcurl_easy_init(); qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url); qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, engineversion); @@ -569,6 +648,24 @@ static downloadinfo *Curl_Find(const char *filename) return NULL; } +void Curl_Cancel_ToMemory(curl_callback_t callback, void *cbdata) +{ + downloadinfo *di; + if(!curl_dll) + return; + for(di = downloads; di; ) + { + if(di->callback == callback && di->callback_data == cbdata) + { + di->callback = curl_quiet_callback; // do NOT call the callback + Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK); + di = downloads; + } + else + di = di->next; + } +} + /* ==================== Curl_Begin @@ -577,10 +674,12 @@ Starts a download of a given URL to the file name portion of this URL (or name if given) in the "dlcache/" folder. ==================== */ -void Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap) +static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) { if(!curl_dll) - return; + { + return false; + } else { char fn[MAX_QPATH]; @@ -616,80 +715,94 @@ void Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean fort if(!name) name = URL; - p = strrchr(name, '/'); - p = p ? (p+1) : name; - q = strchr(p, '?'); - length = q ? (size_t)(q - p) : strlen(p); - dpsnprintf(fn, sizeof(fn), "dlcache/%.*s", (int)length, p); - - // already downloading the file? + if(!buf) { - downloadinfo *di = Curl_Find(fn); - if(di) - { - Con_Printf("Can't download %s, already getting it from %s!\n", fn, di->url); + p = strrchr(name, '/'); + p = p ? (p+1) : name; + q = strchr(p, '?'); + length = q ? (size_t)(q - p) : strlen(p); + dpsnprintf(fn, sizeof(fn), "dlcache/%.*s", (int)length, p); - // however, if it was not for this map yet... - if(forthismap && !di->forthismap) - { - di->forthismap = true; - // this "fakes" a download attempt so the client will wait for - // the download to finish and then reconnect - ++numdownloads_added; - } + name = fn; // make it point back - return; - } - } - - if(ispak && FS_FileExists(fn)) - { - qboolean already_loaded; - if(FS_AddPack(fn, &already_loaded, true)) + // already downloading the file? { - Con_DPrintf("%s already exists, not downloading!\n", fn); - if(already_loaded) - Con_DPrintf("(pak was already loaded)\n"); - else + downloadinfo *di = Curl_Find(fn); + if(di) { - if(forthismap) + Con_Printf("Can't download %s, already getting it from %s!\n", fn, di->url); + + // however, if it was not for this map yet... + if(forthismap && !di->forthismap) { + di->forthismap = true; + // this "fakes" a download attempt so the client will wait for + // the download to finish and then reconnect ++numdownloads_added; - ++numdownloads_success; } + + return false; } - return; } - else + + if(ispak && FS_FileExists(fn)) { - qfile_t *f = FS_Open(fn, "rb", false, false); - if(f) + qboolean already_loaded; + if(FS_AddPack(fn, &already_loaded, true)) { - char buf[4] = {0}; - FS_Read(f, buf, sizeof(buf)); // no "-1", I will use memcmp - - if(memcmp(buf, "PK\x03\x04", 4) && memcmp(buf, "PACK", 4)) + Con_DPrintf("%s already exists, not downloading!\n", fn); + if(already_loaded) + Con_DPrintf("(pak was already loaded)\n"); + else { - Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn); - FS_Close(f); - f = FS_Open(fn, "w", false, false); - if(f) - FS_Close(f); + if(forthismap) + { + ++numdownloads_added; + ++numdownloads_success; + } } - else + + return false; + } + else + { + qfile_t *f = FS_OpenRealFile(fn, "rb", false); + if(f) { - // OK - FS_Close(f); + char buf[4] = {0}; + FS_Read(f, buf, sizeof(buf)); // no "-1", I will use memcmp + + if(memcmp(buf, "PK\x03\x04", 4) && memcmp(buf, "PACK", 4)) + { + Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn); + FS_Close(f); + f = FS_OpenRealFile(fn, "wb", false); + if(f) + FS_Close(f); + } + else + { + // OK + FS_Close(f); + } } } } } + // if we get here, we actually want to download... so first verify the + // URL scheme (so one can't read local files using file://) + if(strncmp(URL, "http://", 7) && strncmp(URL, "ftp://", 6) && strncmp(URL, "https://", 8)) + { + Con_Printf("Curl_Begin(\"%s\")): nasty URL scheme rejected\n", URL); + return false; + } + if(forthismap) ++numdownloads_added; di = (downloadinfo *) Z_Malloc(sizeof(*di)); - strlcpy(di->filename, fn, sizeof(di->filename)); + strlcpy(di->filename, name, sizeof(di->filename)); strlcpy(di->url, URL, sizeof(di->url)); dpsnprintf(di->referer, sizeof(di->referer), "dp://%s/", cls.netcon ? cls.netcon->address : "notconnected.invalid"); di->forthismap = forthismap; @@ -697,16 +810,39 @@ void Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean fort di->startpos = 0; di->curle = NULL; di->started = false; - di->ispak = ispak; + di->ispak = (ispak && !buf); di->bytes_received = 0; di->next = downloads; di->prev = NULL; if(di->next) di->next->prev = di; + + di->buffer = buf; + di->buffersize = bufsize; + if(callback == NULL) + { + di->callback = curl_default_callback; + di->callback_data = di; + } + else + { + di->callback = callback; + di->callback_data = cbdata; + } + downloads = di; + return true; } } +qboolean Curl_Begin_ToFile(const char *URL, const char *name, qboolean ispak, qboolean forthismap) +{ + return Curl_Begin(URL, name, ispak, forthismap, NULL, 0, NULL, NULL); +} +qboolean Curl_Begin_ToMemory(const char *URL, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) +{ + return Curl_Begin(URL, NULL, false, false, buf, bufsize, callback, cbdata); +} /* ==================== @@ -737,7 +873,7 @@ void Curl_Run() { int remaining; CURLMcode mc; - + do { mc = qcurl_multi_perform(curlm, &remaining); @@ -769,11 +905,11 @@ void Curl_Run() case 4: // e.g. 404? case 5: // e.g. 500? failed = CURL_DOWNLOAD_SERVERERROR; - result = code; + result = (CURLcode) code; break; } } - + Curl_EndDownload(di, failed, result); } } @@ -845,7 +981,7 @@ static double Curl_GetDownloadAmount(downloadinfo *di) double length; qcurl_easy_getinfo(di->curle, CURLINFO_CONTENT_LENGTH_DOWNLOAD, &length); if(length > 0) - return di->bytes_received / length; + return (di->startpos + di->bytes_received) / (di->startpos + length); else return 0; } @@ -1030,7 +1166,7 @@ void Curl_Curl_f(void) char donecommand[256]; if(cls.netcon) { - if(cls.signon >= 3) + if(cl.loadbegun) // curling won't inhibit loading the map any more when at this stage, so bail out and force a reconnect { dpsnprintf(donecommand, sizeof(donecommand), "connect %s", cls.netcon->address); Curl_CommandWhenDone(donecommand); @@ -1053,8 +1189,24 @@ void Curl_Curl_f(void) } needthefile: - Curl_Begin(url, name, pak, forthismap); + Curl_Begin_ToFile(url, name, pak, forthismap); +} + +/* +static void curl_curlcat_callback(int code, size_t length_received, unsigned char *buffer, void *cbdata) +{ + Con_Printf("Received %d bytes (status %d):\n%.*s\n", (int) length_received, code, (int) length_received, buffer); + Z_Free(buffer); +} + +void Curl_CurlCat_f(void) +{ + unsigned char *buf; + const char *url = Cmd_Argv(1); + buf = Z_Malloc(16384); + Curl_Begin_ToMemory(url, buf, 16384, curl_curlcat_callback, NULL); } +*/ /* ==================== @@ -1071,6 +1223,7 @@ void Curl_Init_Commands(void) Cvar_RegisterVariable (&sv_curl_defaulturl); Cvar_RegisterVariable (&sv_curl_serverpackages); Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path"); + //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)"); } /* @@ -1086,7 +1239,7 @@ array must be freed later using Z_Free. */ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info) { - int n, i; + int i; downloadinfo *di; Curl_downloadinfo_t *downinfo; static char addinfo[128]; @@ -1099,14 +1252,18 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition return NULL; } - n = 0; + i = 0; for(di = downloads; di; di = di->next) - ++n; + ++i; - downinfo = (Curl_downloadinfo_t *) Z_Malloc(sizeof(*downinfo) * n); + downinfo = (Curl_downloadinfo_t *) Z_Malloc(sizeof(*downinfo) * i); i = 0; for(di = downloads; di; di = di->next) { + // do not show infobars for background downloads + if(!developer.integer) + if(di->buffer) + continue; strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename)); if(di->curle) { @@ -1120,7 +1277,7 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition } ++i; } - + if(additional_info) { // TODO: can I clear command_when_done as soon as the first download fails? @@ -1138,7 +1295,7 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition *additional_info = NULL; } - *nDownloads = n; + *nDownloads = i; return downinfo; } @@ -1169,7 +1326,7 @@ this file for obvious reasons. */ static const char *Curl_FindPackURL(const char *filename) { - static char foundurl[256]; + static char foundurl[1024]; fs_offset_t filesize; char *buf = (char *) FS_LoadFile("curl_urls.txt", tempmempool, true, &filesize); if(buf && filesize) @@ -1178,7 +1335,7 @@ static const char *Curl_FindPackURL(const char *filename) char *p = buf; char *pattern = NULL, *patternend = NULL, *url = NULL, *urlend = NULL; qboolean eof = false; - + pattern = p; while(!eof) { @@ -1272,7 +1429,7 @@ void Curl_ClearRequirements() } p = sv_curl_serverpackages.string; Con_DPrintf("Require all of: %s\n", p); - while(COM_ParseTokenConsole(&p)) + while(COM_ParseToken_Simple(&p, false, false)) { Con_DPrintf("Require: %s\n", com_token); Curl_RequireFile(com_token);