X-Git-Url: http://de.git.xonotic.org/?p=xonotic%2Fdarkplaces.git;a=blobdiff_plain;f=libcurl.c;h=2f001d5b5d821d79036f0b6086eb1c398d970a7e;hp=b472d9e9c4033d69ff98a44ea1b3e06da9eb947a;hb=cbf399e8fa61dff35420bdc717e80c2764cef759;hpb=dd03295660a97e009e976b1d3b91d1f42f46678e diff --git a/libcurl.c b/libcurl.c index b472d9e9..2f001d5b 100644 --- a/libcurl.c +++ b/libcurl.c @@ -1,6 +1,11 @@ #include "quakedef.h" #include "fs.h" #include "libcurl.h" +#include "thread.h" + +#include "image.h" +#include "jpeg.h" +#include "image_png.h" static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"}; static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"}; @@ -8,6 +13,8 @@ static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"}; static cvar_t sv_curl_maxspeed = {CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"}; static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"}; +static cvar_t cl_curl_useragent = {0, "cl_curl_useragent","1", "send the User-Agent string (note: turning this off may break stuff)"}; +static cvar_t cl_curl_useragent_append = {0, "cl_curl_useragent_append","", "a string to append to the User-Agent string (useful for name and version number of your mod)"}; /* ================================================================= @@ -182,6 +189,13 @@ static dllfunction_t curlfuncs[] = static dllhandle_t curl_dll = NULL; // will be checked at many places to find out if qcurl calls are allowed +#define LOADTYPE_NONE 0 +#define LOADTYPE_PAK 1 +#define LOADTYPE_CACHEPIC 2 +#define LOADTYPE_SKINFRAME 3 + +void *curl_mutex = NULL; + typedef struct downloadinfo_s { char filename[MAX_OSPATH]; @@ -191,8 +205,8 @@ typedef struct downloadinfo_s fs_offset_t startpos; CURL *curle; qboolean started; - qboolean ispak; - unsigned long bytes_received; // for buffer + int loadtype; + size_t bytes_received; // for buffer double bytes_received_curl; // for throttling double bytes_sent_curl; // for throttling struct downloadinfo_s *next, *prev; @@ -231,7 +245,7 @@ all downloads since last server connect ended with a successful status. Setting the command to NULL clears it. ==================== */ -void Curl_CommandWhenDone(const char *cmd) +static void Curl_CommandWhenDone(const char *cmd) { if(!curl_dll) return; @@ -247,7 +261,7 @@ Do not use yet. Not complete. Problem: what counts as an error? */ -void Curl_CommandWhenError(const char *cmd) +static void Curl_CommandWhenError(const char *cmd) { if(!curl_dll) return; @@ -269,6 +283,7 @@ void Curl_Clear_forthismap(void) downloadinfo *di; if(noclear) return; + if (curl_mutex) Thread_LockMutex(curl_mutex); for(di = downloads; di; di = di->next) di->forthismap = false; Curl_CommandWhenError(NULL); @@ -276,6 +291,7 @@ void Curl_Clear_forthismap(void) numdownloads_fail = 0; numdownloads_success = 0; numdownloads_added = 0; + if (curl_mutex) Thread_UnlockMutex(curl_mutex); } /* @@ -292,8 +308,10 @@ qboolean Curl_Have_forthismap(void) void Curl_Register_predownload(void) { + if (curl_mutex) Thread_LockMutex(curl_mutex); Curl_CommandWhenDone("cl_begindownloads"); Curl_CommandWhenError("cl_begindownloads"); + if (curl_mutex) Thread_UnlockMutex(curl_mutex); } /* @@ -308,20 +326,22 @@ static void Curl_CheckCommandWhenDone(void) { if(!curl_dll) return; - if(numdownloads_added && (numdownloads_success == numdownloads_added) && *command_when_done) - { - Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done); - Cbuf_AddText("\n"); - Cbuf_AddText(command_when_done); - Cbuf_AddText("\n"); - Curl_Clear_forthismap(); - } - else if(numdownloads_added && numdownloads_fail && *command_when_error) + if(numdownloads_added && ((numdownloads_success + numdownloads_fail) == numdownloads_added)) { - Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error); - Cbuf_AddText("\n"); - Cbuf_AddText(command_when_error); - Cbuf_AddText("\n"); + if(numdownloads_fail == 0) + { + Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done); + Cbuf_AddText("\n"); + Cbuf_AddText(command_when_done); + Cbuf_AddText("\n"); + } + else + { + Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error); + Cbuf_AddText("\n"); + Cbuf_AddText(command_when_error); + Cbuf_AddText("\n"); + } Curl_Clear_forthismap(); } } @@ -410,7 +430,10 @@ static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi) di->bytes_received += bytes; - return ret; // why not ret / nmemb? + return ret; + // Why not ret / nmemb? + // Because CURLOPT_WRITEFUNCTION docs say to return the number of bytes. + // Yes, this is incompatible to fwrite(2). } typedef enum @@ -453,6 +476,30 @@ static void curl_quiet_callback(int status, size_t length_received, unsigned cha curl_default_callback(status, length_received, buffer, cbdata); } +static unsigned char *decode_image(downloadinfo *di, const char *content_type) +{ + unsigned char *pixels = NULL; + fs_offset_t filesize = 0; + unsigned char *data = FS_LoadFile(di->filename, tempmempool, true, &filesize); + if(data) + { + int mip = 0; + if(!strcmp(content_type, "image/jpeg")) + pixels = JPEG_LoadImage_BGRA(data, filesize, &mip); + else if(!strcmp(content_type, "image/png")) + pixels = PNG_LoadImage_BGRA(data, filesize, &mip); + else if(filesize >= 7 && !strncmp((char *) data, "\xFF\xD8", 7)) + pixels = JPEG_LoadImage_BGRA(data, filesize, &mip); + else if(filesize >= 7 && !strncmp((char *) data, "\x89PNG\x0D\x0A\x1A\x0A", 7)) + pixels = PNG_LoadImage_BGRA(data, filesize, &mip); + else + Con_Printf("Did not detect content type: %s\n", content_type); + Mem_Free(data); + } + // do we call Image_MakeLinearColorsFromsRGB or not? + return pixels; +} + /* ==================== Curl_EndDownload @@ -462,9 +509,10 @@ CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error code from libcurl, or 0, if another error has occurred. ==================== */ -static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata); -static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error) +static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, int loadtype, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata); +static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error, const char *content_type_) { + char content_type[64]; qboolean ok = false; if(!curl_dll) return; @@ -496,6 +544,10 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error di->callback(CURLCBSTATUS_UNKNOWN, di->bytes_received, di->buffer, di->callback_data); break; } + if(content_type_) + strlcpy(content_type, content_type_, sizeof(content_type)); + else + *content_type = 0; if(di->curle) { @@ -514,25 +566,58 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error if(di->stream) FS_Close(di->stream); - if(ok && di->ispak) +#define CLEAR_AND_RETRY() \ + do \ + { \ + di->stream = FS_OpenRealFile(di->filename, "wb", false); \ + FS_Close(di->stream); \ + if(di->startpos && !di->callback) \ + { \ + Curl_Begin(di->url, di->extraheaders, di->maxspeed, di->filename, di->loadtype, di->forthismap, di->post_content_type, di->postbuf, di->postbufsize, NULL, 0, NULL, NULL); \ + di->forthismap = false; \ + } \ + } \ + while(0) + + if(ok && di->loadtype == LOADTYPE_PAK) { ok = FS_AddPack(di->filename, NULL, true); if(!ok) - { - // pack loading failed? - // this is critical - // better clear the file again... - di->stream = FS_OpenRealFile(di->filename, "wb", false); - FS_Close(di->stream); + CLEAR_AND_RETRY(); + } + else if(ok && di->loadtype == LOADTYPE_CACHEPIC) + { + const char *p; + unsigned char *pixels = NULL; - if(di->startpos && !di->callback) - { - // this was a resume? - // then try to redownload it without reporting the error - Curl_Begin(di->url, di->extraheaders, di->maxspeed, di->filename, di->ispak, di->forthismap, di->post_content_type, di->postbuf, di->postbufsize, NULL, 0, NULL, NULL); - di->forthismap = false; // don't count the error - } - } + p = di->filename; +#ifdef WE_ARE_EVIL + if(!strncmp(p, "dlcache/", 8)) + p += 8; +#endif + + pixels = decode_image(di, content_type); + if(pixels) + Draw_NewPic(p, image_width, image_height, true, pixels); + else + CLEAR_AND_RETRY(); + } + else if(ok && di->loadtype == LOADTYPE_SKINFRAME) + { + const char *p; + unsigned char *pixels = NULL; + + p = di->filename; +#ifdef WE_ARE_EVIL + if(!strncmp(p, "dlcache/", 8)) + p += 8; +#endif + + pixels = decode_image(di, content_type); + if(pixels) + R_SkinFrame_LoadInternalBGRA(p, TEXF_FORCE_RELOAD | TEXF_MIPMAP | TEXF_ALPHA, pixels, image_width, image_height, false); // TODO what sRGB argument to put here? + else + CLEAR_AND_RETRY(); } if(di->prev) @@ -560,9 +645,8 @@ CleanURL Returns a "cleaned up" URL for display (to strip login data) ==================== */ -static const char *CleanURL(const char *url) +static const char *CleanURL(const char *url, char *urlbuf, size_t urlbuflength) { - static char urlbuf[1024]; const char *p, *q, *r; // if URL is of form anything://foo-without-slash@rest, replace by anything://rest @@ -575,7 +659,7 @@ static const char *CleanURL(const char *url) r = strchr(p + 3, '/'); if(!r || q < r) { - dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s%s", (int)(p - url + 3), url, q + 1); + dpsnprintf(urlbuf, urlbuflength, "%.*s%s", (int)(p - url + 3), url, q + 1); return urlbuf; } } @@ -596,6 +680,8 @@ up to a maximum number of cl_curl_maxdownloads are running. static void CheckPendingDownloads(void) { const char *h; + char urlbuf[1024]; + char vabuf[1024]; if(!curl_dll) return; if(numdownloads < cl_curl_maxdownloads.integer) @@ -607,13 +693,13 @@ static void CheckPendingDownloads(void) { if(!di->buffer) { - Con_Printf("Downloading %s -> %s", CleanURL(di->url), di->filename); + Con_Printf("Downloading %s -> %s", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename); di->stream = FS_OpenRealFile(di->filename, "ab", false); if(!di->stream) { Con_Printf("\nFAILED: Could not open output file %s\n", di->filename); - Curl_EndDownload(di, CURL_DOWNLOAD_FAILED, CURLE_OK); + Curl_EndDownload(di, CURL_DOWNLOAD_FAILED, CURLE_OK, NULL); return; } FS_Seek(di->stream, 0, SEEK_END); @@ -625,14 +711,34 @@ static void CheckPendingDownloads(void) } else { - Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url)); + Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url, urlbuf, sizeof(urlbuf))); di->startpos = 0; } di->curle = qcurl_easy_init(); di->slist = NULL; qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url); - qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, engineversion); + if(cl_curl_useragent.integer) + { + const char *ua +#ifdef HTTP_USER_AGENT + = HTTP_USER_AGENT; +#else + = engineversion; +#endif + if(!ua) + ua = ""; + if(*cl_curl_useragent_append.string) + ua = va(vabuf, sizeof(vabuf), "%s%s%s", + ua, + (ua[0] && ua[strlen(ua)-1] != ' ') + ? " " + : "", + cl_curl_useragent_append.string); + qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, ua); + } + else + qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, ""); qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer); qcurl_easy_setopt(di->curle, CURLOPT_RESUME_FROM, (long) di->startpos); qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 1); @@ -652,7 +758,7 @@ static void CheckPendingDownloads(void) qcurl_easy_setopt(di->curle, CURLOPT_POST, 1); qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDS, di->postbuf); qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDSIZE, di->postbufsize); - di->slist = qcurl_slist_append(di->slist, va("Content-Type: %s", di->post_content_type)); + di->slist = qcurl_slist_append(di->slist, va(vabuf, sizeof(vabuf), "Content-Type: %s", di->post_content_type)); } // parse extra headers into slist @@ -677,7 +783,6 @@ static void CheckPendingDownloads(void) } qcurl_easy_setopt(di->curle, CURLOPT_HTTPHEADER, di->slist); - qcurl_multi_add_handle(curlm, di->curle); di->started = true; @@ -702,6 +807,7 @@ void Curl_Init(void) CURL_OpenLibrary(); if(!curl_dll) return; + if (Thread_HasThreads()) curl_mutex = Thread_CreateMutex(); qcurl_global_init(CURL_GLOBAL_NOTHING); curlm = qcurl_multi_init(); } @@ -720,6 +826,7 @@ void Curl_Shutdown(void) return; Curl_ClearRequirements(); Curl_CancelAll(); + if (curl_mutex) Thread_DestroyMutex(curl_mutex); CURL_CloseLibrary(); curl_dll = NULL; } @@ -752,7 +859,7 @@ void Curl_Cancel_ToMemory(curl_callback_t callback, void *cbdata) if(di->callback == callback && di->callback_data == cbdata) { di->callback = curl_quiet_callback; // do NOT call the callback - Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK); + Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL); di = downloads; } else @@ -768,9 +875,13 @@ Starts a download of a given URL to the file name portion of this URL (or name if given) in the "dlcache/" folder. ==================== */ -static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) +static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, int loadtype, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) { - if(!curl_dll) + if(buf) + if(loadtype != LOADTYPE_NONE) + Host_Error("Curl_Begin: loadtype and buffer are both set"); + + if(!curl_dll || !cl_curl_enabled.integer) { return false; } @@ -828,16 +939,28 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max // // 141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006" - if(!name) - name = CleanURL(URL); + if (curl_mutex) Thread_LockMutex(curl_mutex); - if(!buf) + if(buf) { - p = strrchr(name, '/'); - p = p ? (p+1) : name; - q = strchr(p, '?'); - length = q ? (size_t)(q - p) : strlen(p); - dpsnprintf(fn, sizeof(fn), "dlcache/%.*s", (int)length, p); + if(!name) + name = CleanURL(URL, urlbuf, sizeof(urlbuf)); + } + else + { + if(!name) + { + name = CleanURL(URL, urlbuf, sizeof(urlbuf)); + p = strrchr(name, '/'); + p = p ? (p+1) : name; + q = strchr(p, '?'); + length = q ? (size_t)(q - p) : strlen(p); + dpsnprintf(fn, sizeof(fn), "dlcache/%.*s", (int)length, p); + } + else + { + dpsnprintf(fn, sizeof(fn), "dlcache/%s", name); + } name = fn; // make it point back @@ -846,7 +969,7 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max downloadinfo *di = Curl_Find(fn); if(di) { - Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url)); + Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url, urlbuf, sizeof(urlbuf))); // however, if it was not for this map yet... if(forthismap && !di->forthismap) @@ -857,52 +980,64 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max ++numdownloads_added; } + if (curl_mutex) Thread_UnlockMutex(curl_mutex); return false; } } - if(ispak && FS_FileExists(fn)) + if(FS_FileExists(fn)) { - qboolean already_loaded; - if(FS_AddPack(fn, &already_loaded, true)) + if(loadtype == LOADTYPE_PAK) { - Con_DPrintf("%s already exists, not downloading!\n", fn); - if(already_loaded) - Con_DPrintf("(pak was already loaded)\n"); - else + qboolean already_loaded; + if(FS_AddPack(fn, &already_loaded, true)) { - if(forthismap) + Con_DPrintf("%s already exists, not downloading!\n", fn); + if(already_loaded) + Con_DPrintf("(pak was already loaded)\n"); + else { - ++numdownloads_added; - ++numdownloads_success; + if(forthismap) + { + ++numdownloads_added; + ++numdownloads_success; + } } - } - return false; - } - else - { - qfile_t *f = FS_OpenRealFile(fn, "rb", false); - if(f) + if (curl_mutex) Thread_UnlockMutex(curl_mutex); + return false; + } + else { - char buf[4] = {0}; - FS_Read(f, buf, sizeof(buf)); // no "-1", I will use memcmp - - if(memcmp(buf, "PK\x03\x04", 4) && memcmp(buf, "PACK", 4)) + qfile_t *f = FS_OpenRealFile(fn, "rb", false); + if(f) { - Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn); - FS_Close(f); - f = FS_OpenRealFile(fn, "wb", false); - if(f) + char buf[4] = {0}; + FS_Read(f, buf, sizeof(buf)); // no "-1", I will use memcmp + + if(memcmp(buf, "PK\x03\x04", 4) && memcmp(buf, "PACK", 4)) + { + Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn); FS_Close(f); - } - else - { - // OK - FS_Close(f); + f = FS_OpenRealFile(fn, "wb", false); + if(f) + FS_Close(f); + } + else + { + // OK + FS_Close(f); + } } } } + else + { + // never resume these + qfile_t *f = FS_OpenRealFile(fn, "wb", false); + if(f) + FS_Close(f); + } } } @@ -911,6 +1046,7 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max if(strncmp(URL, "http://", 7) && strncmp(URL, "ftp://", 6) && strncmp(URL, "https://", 8)) { Con_Printf("Curl_Begin(\"%s\"): nasty URL scheme rejected\n", URL); + if (curl_mutex) Thread_UnlockMutex(curl_mutex); return false; } @@ -925,7 +1061,7 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max di->startpos = 0; di->curle = NULL; di->started = false; - di->ispak = (ispak && !buf); + di->loadtype = loadtype; di->maxspeed = maxspeed; di->bytes_received = 0; di->bytes_received_curl = 0; @@ -963,13 +1099,14 @@ static qboolean Curl_Begin(const char *URL, const char *extraheaders, double max } downloads = di; + if (curl_mutex) Thread_UnlockMutex(curl_mutex); return true; } } -qboolean Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, qboolean ispak, qboolean forthismap) +qboolean Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, int loadtype, qboolean forthismap) { - return Curl_Begin(URL, NULL, maxspeed, name, ispak, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL); + return Curl_Begin(URL, NULL, maxspeed, name, loadtype, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL); } qboolean Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata) { @@ -1001,13 +1138,21 @@ void Curl_Run(void) if(!curl_dll) return; + if (curl_mutex) Thread_LockMutex(curl_mutex); + Curl_CheckCommandWhenDone(); if(!downloads) + { + if (curl_mutex) Thread_UnlockMutex(curl_mutex); return; + } if(realtime < curltime) // throttle + { + if (curl_mutex) Thread_UnlockMutex(curl_mutex); return; + } { int remaining; @@ -1022,12 +1167,15 @@ void Curl_Run(void) for(di = downloads; di; di = di->next) { double b = 0; - qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b); - bytes_sent += (b - di->bytes_sent_curl); - di->bytes_sent_curl = b; - qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b); - bytes_sent += (b - di->bytes_received_curl); - di->bytes_received_curl = b; + if(di->curle) + { + qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b); + bytes_sent += (b - di->bytes_sent_curl); + di->bytes_sent_curl = b; + qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b); + bytes_sent += (b - di->bytes_received_curl); + di->bytes_received_curl = b; + } } for(;;) @@ -1037,6 +1185,7 @@ void Curl_Run(void) break; if(msg->msg == CURLMSG_DONE) { + const char *ct = NULL; CurlStatus failed = CURL_DOWNLOAD_SUCCESS; CURLcode result; qcurl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &di); @@ -1057,9 +1206,10 @@ void Curl_Run(void) result = (CURLcode) code; break; } + qcurl_easy_getinfo(msg->easy_handle, CURLINFO_CONTENT_TYPE, &ct); } - Curl_EndDownload(di, failed, result); + Curl_EndDownload(di, failed, result, ct); } } } @@ -1081,12 +1231,14 @@ void Curl_Run(void) if(maxspeed > 0) { double bytes = bytes_sent + bytes_received; // maybe smoothen a bit? - curltime = realtime + bytes / (cl_curl_maxspeed.value * 1024.0); + curltime = realtime + bytes / (maxspeed * 1024.0); bytes_sent = 0; bytes_received = 0; } else curltime = realtime; + + if (curl_mutex) Thread_UnlockMutex(curl_mutex); } /* @@ -1101,11 +1253,15 @@ void Curl_CancelAll(void) if(!curl_dll) return; + if (curl_mutex) Thread_LockMutex(curl_mutex); + while(downloads) { - Curl_EndDownload(downloads, CURL_DOWNLOAD_ABORTED, CURLE_OK); + Curl_EndDownload(downloads, CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL); // INVARIANT: downloads will point to the next download after that! } + + if (curl_mutex) Thread_UnlockMutex(curl_mutex); } /* @@ -1180,15 +1336,17 @@ prints the download list static void Curl_Info_f(void) { downloadinfo *di; + char urlbuf[1024]; if(!curl_dll) return; if(Curl_Running()) { + if (curl_mutex) Thread_LockMutex(curl_mutex); Con_Print("Currently running downloads:\n"); for(di = downloads; di; di = di->next) { double speed, percent; - Con_Printf(" %s -> %s ", CleanURL(di->url), di->filename); + Con_Printf(" %s -> %s ", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename); percent = 100.0 * Curl_GetDownloadAmount(di); speed = Curl_GetDownloadSpeed(di); if(percent >= 0) @@ -1196,6 +1354,7 @@ static void Curl_Info_f(void) else Con_Print("(queued)\n"); } + if (curl_mutex) Thread_UnlockMutex(curl_mutex); } else { @@ -1229,12 +1388,12 @@ curl --finish_autodownload once the last download completes successfully, reconnect to the current server ==================== */ -void Curl_Curl_f(void) +static void Curl_Curl_f(void) { double maxspeed = 0; int i; int end; - qboolean pak = false; + int loadtype = LOADTYPE_NONE; qboolean forthismap = false; const char *url; const char *name = 0; @@ -1276,7 +1435,7 @@ void Curl_Curl_f(void) { downloadinfo *di = Curl_Find(url); if(di) - Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK); + Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK, NULL); else Con_Print("download not found\n"); } @@ -1284,7 +1443,15 @@ void Curl_Curl_f(void) } else if(!strcmp(a, "--pak")) { - pak = true; + loadtype = LOADTYPE_PAK; + } + else if(!strcmp(a, "--cachepic")) + { + loadtype = LOADTYPE_CACHEPIC; + } + else if(!strcmp(a, "--skinframe")) + { + loadtype = LOADTYPE_SKINFRAME; } else if(!strcmp(a, "--for")) // must be last option { @@ -1349,7 +1516,7 @@ void Curl_Curl_f(void) } needthefile: - Curl_Begin_ToFile(url, maxspeed, name, pak, forthismap); + Curl_Begin_ToFile(url, maxspeed, name, loadtype, forthismap); } /* @@ -1383,6 +1550,8 @@ void Curl_Init_Commands(void) Cvar_RegisterVariable (&sv_curl_defaulturl); Cvar_RegisterVariable (&sv_curl_serverpackages); Cvar_RegisterVariable (&sv_curl_maxspeed); + Cvar_RegisterVariable (&cl_curl_useragent); + Cvar_RegisterVariable (&cl_curl_useragent_append); Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path"); //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)"); } @@ -1398,12 +1567,11 @@ information, or to NULL if no such display shall occur. The returned array must be freed later using Z_Free. ==================== */ -Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info) +Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info, char *addinfo, size_t addinfolength) { int i; downloadinfo *di; Curl_downloadinfo_t *downinfo; - static char addinfo[128]; if(!curl_dll) { @@ -1413,6 +1581,8 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition return NULL; } + if (curl_mutex) Thread_LockMutex(curl_mutex); + i = 0; for(di = downloads; di; di = di->next) ++i; @@ -1445,11 +1615,11 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition if(*command_when_done && !numdownloads_fail && numdownloads_added) { if(!strncmp(command_when_done, "connect ", 8)) - dpsnprintf(addinfo, sizeof(addinfo), "(will join %s when done)", command_when_done + 8); + dpsnprintf(addinfo, addinfolength, "(will join %s when done)", command_when_done + 8); else if(!strcmp(command_when_done, "cl_begindownloads")) - dpsnprintf(addinfo, sizeof(addinfo), "(will enter the game when done)"); + dpsnprintf(addinfo, addinfolength, "(will enter the game when done)"); else - dpsnprintf(addinfo, sizeof(addinfo), "(will do '%s' when done)", command_when_done); + dpsnprintf(addinfo, addinfolength, "(will do '%s' when done)", command_when_done); *additional_info = addinfo; } else @@ -1457,6 +1627,7 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition } *nDownloads = i; + if (curl_mutex) Thread_UnlockMutex(curl_mutex); return downinfo; } @@ -1487,7 +1658,7 @@ this file for obvious reasons. */ static const char *Curl_FindPackURL(const char *filename) { - static char foundurl[1024]; + static char foundurl[1024]; // invoked only by server fs_offset_t filesize; char *buf = (char *) FS_LoadFile("curl_urls.txt", tempmempool, true, &filesize); if(buf && filesize) @@ -1607,7 +1778,7 @@ static qboolean Curl_SendRequirement(const char *filename, qboolean foundone, ch const char *thispack = FS_WhichPack(filename); const char *packurl; - if(!thispack) + if(!thispack || !*thispack) return false; p = strrchr(thispack, '/'); @@ -1649,7 +1820,7 @@ void Curl_SendRequirements(void) foundone = Curl_SendRequirement(req->filename, foundone, sendbuffer, sizeof(sendbuffer)) || foundone; p = sv_curl_serverpackages.string; - while(COM_ParseToken_Simple(&p, false, false)) + while(COM_ParseToken_Simple(&p, false, false, true)) foundone = Curl_SendRequirement(com_token, foundone, sendbuffer, sizeof(sendbuffer)) || foundone; if(foundone)