]> de.git.xonotic.org Git - xonotic/darkplaces.git/blobdiff - libcurl.c
allow use of cvar gl_vbo_dynamicindex in RENDERPATH_GLES2
[xonotic/darkplaces.git] / libcurl.c
index baa7f958d9e50be783684d89d6f302582741ee0e..3baa91b466f6da44b993a826a2f0cc2078bb1501 100644 (file)
--- a/libcurl.c
+++ b/libcurl.c
@@ -1,6 +1,7 @@
 #include "quakedef.h"
 #include "fs.h"
 #include "libcurl.h"
+#include "thread.h"
 
 static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
 static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"};
@@ -8,6 +9,8 @@ static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default
 static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"};
 static cvar_t sv_curl_maxspeed = {CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"};
 static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"};
+static cvar_t cl_curl_useragent = {0, "cl_curl_useragent","1", "send the User-Agent string (note: turning this off may break stuff)"};
+static cvar_t cl_curl_useragent_append = {0, "cl_curl_useragent_append","", "a string to append to the User-Agent string (useful for name and version number of your mod)"};
 
 /*
 =================================================================
@@ -22,6 +25,7 @@ static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether clie
 
 typedef struct CURL_s CURL;
 typedef struct CURLM_s CURLM;
+typedef struct curl_slist curl_slist;
 typedef enum
 {
        CURLE_OK = 0
@@ -47,15 +51,19 @@ typedef enum
        CINIT(URL,  OBJECTPOINT, 2),
        CINIT(ERRORBUFFER, OBJECTPOINT, 10),
        CINIT(WRITEFUNCTION, FUNCTIONPOINT, 11),
+       CINIT(POSTFIELDS, OBJECTPOINT, 15),
        CINIT(REFERER, OBJECTPOINT, 16),
        CINIT(USERAGENT, OBJECTPOINT, 18),
+       CINIT(LOW_SPEED_LIMIT, LONG , 19),
+       CINIT(LOW_SPEED_TIME, LONG, 20),
        CINIT(RESUME_FROM, LONG, 21),
+       CINIT(HTTPHEADER, OBJECTPOINT, 23),
+       CINIT(POST, LONG, 47),         /* HTTP POST method */
        CINIT(FOLLOWLOCATION, LONG, 52),  /* use Location: Luke! */
+       CINIT(POSTFIELDSIZE, LONG, 60),
        CINIT(PRIVATE, OBJECTPOINT, 103),
-       CINIT(LOW_SPEED_LIMIT, LONG , 19),
-       CINIT(LOW_SPEED_TIME, LONG, 20),
        CINIT(PROTOCOLS, LONG, 181),
-       CINIT(REDIR_PROTOCOLS, LONG, 182),
+       CINIT(REDIR_PROTOCOLS, LONG, 182)
 }
 CURLoption;
 #define CURLPROTO_HTTP   (1<<0)
@@ -108,7 +116,7 @@ typedef enum
        CURLINFO_PROXYAUTH_AVAIL  = CURLINFO_LONG   + 24,
        CURLINFO_OS_ERRNO         = CURLINFO_LONG   + 25,
        CURLINFO_NUM_CONNECTS     = CURLINFO_LONG   + 26,
-       CURLINFO_SSL_ENGINES      = CURLINFO_SLIST  + 27,
+       CURLINFO_SSL_ENGINES      = CURLINFO_SLIST  + 27
 }
 CURLINFO;
 
@@ -149,6 +157,8 @@ static CURLMcode (*qcurl_multi_remove_handle) (CURLM *multi_handle, CURL *easy_h
 static CURLMsg * (*qcurl_multi_info_read) (CURLM *multi_handle, int *msgs_in_queue);
 static void (*qcurl_multi_cleanup) (CURLM *);
 static const char * (*qcurl_multi_strerror) (CURLcode);
+static curl_slist * (*qcurl_slist_append) (curl_slist *list, const char *string);
+static void (*qcurl_slist_free_all) (curl_slist *list);
 
 static dllfunction_t curlfuncs[] =
 {
@@ -166,6 +176,8 @@ static dllfunction_t curlfuncs[] =
        {"curl_multi_info_read",        (void **) &qcurl_multi_info_read},
        {"curl_multi_cleanup",          (void **) &qcurl_multi_cleanup},
        {"curl_multi_strerror",         (void **) &qcurl_multi_strerror},
+       {"curl_slist_append",           (void **) &qcurl_slist_append},
+       {"curl_slist_free_all",         (void **) &qcurl_slist_free_all},
        {NULL, NULL}
 };
 
@@ -173,6 +185,8 @@ static dllfunction_t curlfuncs[] =
 static dllhandle_t curl_dll = NULL;
 // will be checked at many places to find out if qcurl calls are allowed
 
+void *curl_mutex = NULL;
+
 typedef struct downloadinfo_s
 {
        char filename[MAX_OSPATH];
@@ -183,15 +197,23 @@ typedef struct downloadinfo_s
        CURL *curle;
        qboolean started;
        qboolean ispak;
-       unsigned long bytes_received;
+       unsigned long bytes_received; // for buffer
+       double bytes_received_curl; // for throttling
+       double bytes_sent_curl; // for throttling
        struct downloadinfo_s *next, *prev;
        qboolean forthismap;
        double maxspeed;
+       curl_slist *slist; // http headers
 
        unsigned char *buffer;
        size_t buffersize;
        curl_callback_t callback;
        void *callback_data;
+
+       const unsigned char *postbuf;
+       size_t postbufsize;
+       const char *post_content_type;
+       const char *extraheaders;
 }
 downloadinfo;
 static downloadinfo *downloads = NULL;
@@ -214,7 +236,7 @@ all downloads since last server connect ended with a successful status.
 Setting the command to NULL clears it.
 ====================
 */
-void Curl_CommandWhenDone(const char *cmd)
+static void Curl_CommandWhenDone(const char *cmd)
 {
        if(!curl_dll)
                return;
@@ -230,7 +252,7 @@ Do not use yet. Not complete.
 Problem: what counts as an error?
 */
 
-void Curl_CommandWhenError(const char *cmd)
+static void Curl_CommandWhenError(const char *cmd)
 {
        if(!curl_dll)
                return;
@@ -252,6 +274,7 @@ void Curl_Clear_forthismap(void)
        downloadinfo *di;
        if(noclear)
                return;
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
        for(di = downloads; di; di = di->next)
                di->forthismap = false;
        Curl_CommandWhenError(NULL);
@@ -259,6 +282,7 @@ void Curl_Clear_forthismap(void)
        numdownloads_fail = 0;
        numdownloads_success = 0;
        numdownloads_added = 0;
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
 }
 
 /*
@@ -275,8 +299,10 @@ qboolean Curl_Have_forthismap(void)
 
 void Curl_Register_predownload(void)
 {
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
        Curl_CommandWhenDone("cl_begindownloads");
        Curl_CommandWhenError("cl_begindownloads");
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
 }
 
 /*
@@ -291,20 +317,22 @@ static void Curl_CheckCommandWhenDone(void)
 {
        if(!curl_dll)
                return;
-       if(numdownloads_added && (numdownloads_success == numdownloads_added) && *command_when_done)
-       {
-               Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done);
-               Cbuf_AddText("\n");
-               Cbuf_AddText(command_when_done);
-               Cbuf_AddText("\n");
-               Curl_Clear_forthismap();
-       }
-       else if(numdownloads_added && numdownloads_fail && *command_when_error)
+       if(numdownloads_added && ((numdownloads_success + numdownloads_fail) == numdownloads_added))
        {
-               Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error);
-               Cbuf_AddText("\n");
-               Cbuf_AddText(command_when_error);
-               Cbuf_AddText("\n");
+               if(numdownloads_fail == 0)
+               {
+                       Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done);
+                       Cbuf_AddText("\n");
+                       Cbuf_AddText(command_when_done);
+                       Cbuf_AddText("\n");
+               }
+               else
+               {
+                       Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error);
+                       Cbuf_AddText("\n");
+                       Cbuf_AddText(command_when_error);
+                       Cbuf_AddText("\n");
+               }
                Curl_Clear_forthismap();
        }
 }
@@ -358,7 +386,8 @@ static void CURL_CloseLibrary (void)
 
 
 static CURLM *curlm = NULL;
-static unsigned long bytes_received = 0; // used for bandwidth throttling
+static double bytes_received = 0; // used for bandwidth throttling
+static double bytes_sent = 0; // used for bandwidth throttling
 static double curltime = 0;
 
 /*
@@ -390,7 +419,6 @@ static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi)
                ret = FS_Write(di->stream, data, bytes);
        }
 
-       bytes_received += bytes;
        di->bytes_received += bytes;
 
        return ret; // why not ret / nmemb?
@@ -431,11 +459,6 @@ static void curl_default_callback(int status, size_t length_received, unsigned c
        }
 }
 
-static void curl_quiet_callback(int status, size_t length_received, unsigned char *buffer, void *cbdata)
-{
-       curl_default_callback(status, length_received, buffer, cbdata);
-}
-
 /*
 ====================
 Curl_EndDownload
@@ -445,7 +468,7 @@ CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error
 code from libcurl, or 0, if another error has occurred.
 ====================
 */
-static qboolean Curl_Begin(const char *URL, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
+static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
 static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error)
 {
        qboolean ok = false;
@@ -484,6 +507,8 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
        {
                qcurl_multi_remove_handle(curlm, di->curle);
                qcurl_easy_cleanup(di->curle);
+               if(di->slist)
+                       qcurl_slist_free_all(di->slist);
        }
 
        if(!di->callback && ok && !di->bytes_received)
@@ -510,7 +535,7 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
                        {
                                // this was a resume?
                                // then try to redownload it without reporting the error
-                               Curl_Begin(di->url, di->maxspeed, di->filename, di->ispak, di->forthismap, NULL, 0, NULL, NULL);
+                               Curl_Begin(di->url, di->extraheaders, di->maxspeed, di->filename, di->ispak, di->forthismap, di->post_content_type, di->postbuf, di->postbufsize, NULL, 0, NULL, NULL);
                                di->forthismap = false; // don't count the error
                        }
                }
@@ -534,6 +559,36 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
        Z_Free(di);
 }
 
+/*
+====================
+CleanURL
+
+Returns a "cleaned up" URL for display (to strip login data)
+====================
+*/
+static const char *CleanURL(const char *url, char *urlbuf, size_t urlbuflength)
+{
+       const char *p, *q, *r;
+
+       // if URL is of form anything://foo-without-slash@rest, replace by anything://rest
+       p = strstr(url, "://");
+       if(p)
+       {
+               q = strchr(p + 3, '@');
+               if(q)
+               {
+                       r = strchr(p + 3, '/');
+                       if(!r || q < r)
+                       {
+                               dpsnprintf(urlbuf, urlbuflength, "%.*s%s", (int)(p - url + 3), url, q + 1);
+                               return urlbuf;
+                       }
+               }
+       }
+
+       return url;
+}
+
 /*
 ====================
 CheckPendingDownloads
@@ -545,6 +600,9 @@ up to a maximum number of cl_curl_maxdownloads are running.
 */
 static void CheckPendingDownloads(void)
 {
+       const char *h;
+       char urlbuf[1024];
+       char vabuf[1024];
        if(!curl_dll)
                return;
        if(numdownloads < cl_curl_maxdownloads.integer)
@@ -556,7 +614,7 @@ static void CheckPendingDownloads(void)
                        {
                                if(!di->buffer)
                                {
-                                       Con_Printf("Downloading %s -> %s", di->url, di->filename);
+                                       Con_Printf("Downloading %s -> %s", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename);
 
                                        di->stream = FS_OpenRealFile(di->filename, "ab", false);
                                        if(!di->stream)
@@ -574,13 +632,34 @@ static void CheckPendingDownloads(void)
                                }
                                else
                                {
-                                       Con_DPrintf("Downloading %s -> memory\n", di->url);
+                                       Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url, urlbuf, sizeof(urlbuf)));
                                        di->startpos = 0;
                                }
 
                                di->curle = qcurl_easy_init();
+                               di->slist = NULL;
                                qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url);
-                               qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, engineversion);
+                               if(cl_curl_useragent.integer)
+                               {
+                                       const char *ua
+#ifdef HTTP_USER_AGENT
+                                               = HTTP_USER_AGENT;
+#else
+                                               = engineversion;
+#endif
+                                       if(!ua)
+                                               ua = "";
+                                       if(*cl_curl_useragent_append.string)
+                                               ua = va(vabuf, sizeof(vabuf), "%s%s%s",
+                                                       ua,
+                                                       (ua[0] && ua[strlen(ua)-1] != ' ')
+                                                               ? " "
+                                                               : "",
+                                                       cl_curl_useragent_append.string);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, ua);
+                               }
+                               else
+                                       qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, "");
                                qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer);
                                qcurl_easy_setopt(di->curle, CURLOPT_RESUME_FROM, (long) di->startpos);
                                qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 1);
@@ -595,6 +674,36 @@ static void CheckPendingDownloads(void)
                                        Con_Printf("^1WARNING:^7 for security reasons, please upgrade to libcurl 7.19.4 or above. In a later version of DarkPlaces, HTTP redirect support will be disabled for this libcurl version.\n");
                                        //qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 0);
                                }
+                               if(di->post_content_type)
+                               {
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POST, 1);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDS, di->postbuf);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDSIZE, di->postbufsize);
+                                       di->slist = qcurl_slist_append(di->slist, va(vabuf, sizeof(vabuf), "Content-Type: %s", di->post_content_type));
+                               }
+
+                               // parse extra headers into slist
+                               // \n separated list!
+                               h = di->extraheaders;
+                               while(h)
+                               {
+                                       const char *hh = strchr(h, '\n');
+                                       if(hh)
+                                       {
+                                               char *buf = (char *) Mem_Alloc(tempmempool, hh - h + 1);
+                                               memcpy(buf, h, hh - h);
+                                               buf[hh - h] = 0;
+                                               di->slist = qcurl_slist_append(di->slist, buf);
+                                               h = hh + 1;
+                                       }
+                                       else
+                                       {
+                                               di->slist = qcurl_slist_append(di->slist, h);
+                                               h = NULL;
+                                       }
+                               }
+
+                               qcurl_easy_setopt(di->curle, CURLOPT_HTTPHEADER, di->slist);
                                
                                qcurl_multi_add_handle(curlm, di->curle);
                                di->started = true;
@@ -619,6 +728,7 @@ void Curl_Init(void)
        CURL_OpenLibrary();
        if(!curl_dll)
                return;
+       if (Thread_HasThreads()) curl_mutex = Thread_CreateMutex();
        qcurl_global_init(CURL_GLOBAL_NOTHING);
        curlm = qcurl_multi_init();
 }
@@ -637,6 +747,7 @@ void Curl_Shutdown(void)
                return;
        Curl_ClearRequirements();
        Curl_CancelAll();
+       if (curl_mutex) Thread_DestroyMutex(curl_mutex);
        CURL_CloseLibrary();
        curl_dll = NULL;
 }
@@ -659,24 +770,6 @@ static downloadinfo *Curl_Find(const char *filename)
        return NULL;
 }
 
-void Curl_Cancel_ToMemory(curl_callback_t callback, void *cbdata)
-{
-       downloadinfo *di;
-       if(!curl_dll)
-               return;
-       for(di = downloads; di; )
-       {
-               if(di->callback == callback && di->callback_data == cbdata)
-               {
-                       di->callback = curl_quiet_callback; // do NOT call the callback
-                       Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK);
-                       di = downloads;
-               }
-               else
-                       di = di->next;
-       }
-}
-
 /*
 ====================
 Curl_Begin
@@ -685,7 +778,7 @@ Starts a download of a given URL to the file name portion of this URL (or name
 if given) in the "dlcache/" folder.
 ====================
 */
-static qboolean Curl_Begin(const char *URL, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
        if(!curl_dll)
        {
@@ -746,7 +839,9 @@ static qboolean Curl_Begin(const char *URL, double maxspeed, const char *name, q
                //   141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006"
 
                if(!name)
-                       name = URL;
+                       name = CleanURL(URL, urlbuf, sizeof(urlbuf));
+
+               if (curl_mutex) Thread_LockMutex(curl_mutex);
 
                if(!buf)
                {
@@ -763,7 +858,7 @@ static qboolean Curl_Begin(const char *URL, double maxspeed, const char *name, q
                                downloadinfo *di = Curl_Find(fn);
                                if(di)
                                {
-                                       Con_Printf("Can't download %s, already getting it from %s!\n", fn, di->url);
+                                       Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url, urlbuf, sizeof(urlbuf)));
 
                                        // however, if it was not for this map yet...
                                        if(forthismap && !di->forthismap)
@@ -828,6 +923,7 @@ static qboolean Curl_Begin(const char *URL, double maxspeed, const char *name, q
                if(strncmp(URL, "http://", 7) && strncmp(URL, "ftp://", 6) && strncmp(URL, "https://", 8))
                {
                        Con_Printf("Curl_Begin(\"%s\"): nasty URL scheme rejected\n", URL);
+                       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
                        return false;
                }
 
@@ -845,6 +941,9 @@ static qboolean Curl_Begin(const char *URL, double maxspeed, const char *name, q
                di->ispak = (ispak && !buf);
                di->maxspeed = maxspeed;
                di->bytes_received = 0;
+               di->bytes_received_curl = 0;
+               di->bytes_sent_curl = 0;
+               di->extraheaders = extraheaders;
                di->next = downloads;
                di->prev = NULL;
                if(di->next)
@@ -863,18 +962,36 @@ static qboolean Curl_Begin(const char *URL, double maxspeed, const char *name, q
                        di->callback_data = cbdata;
                }
 
+               if(post_content_type)
+               {
+                       di->post_content_type = post_content_type;
+                       di->postbuf = postbuf;
+                       di->postbufsize = postbufsize;
+               }
+               else
+               {
+                       di->post_content_type = NULL;
+                       di->postbuf = NULL;
+                       di->postbufsize = 0;
+               }
+
                downloads = di;
+               if (curl_mutex) Thread_UnlockMutex(curl_mutex);
                return true;
        }
 }
 
 qboolean Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, qboolean ispak, qboolean forthismap)
 {
-       return Curl_Begin(URL, maxspeed, name, ispak, forthismap, NULL, 0, NULL, NULL);
+       return Curl_Begin(URL, NULL, maxspeed, name, ispak, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL);
 }
 qboolean Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
-       return Curl_Begin(URL, maxspeed, NULL, false, false, buf, bufsize, callback, cbdata);
+       return Curl_Begin(URL, NULL, maxspeed, NULL, false, false, NULL, NULL, 0, buf, bufsize, callback, cbdata);
+}
+qboolean Curl_Begin_ToMemory_POST(const char *URL, const char *extraheaders, double maxspeed, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+{
+       return Curl_Begin(URL, extraheaders, maxspeed, NULL, false, false, post_content_type, postbuf, postbufsize, buf, bufsize, callback, cbdata);
 }
 
 /*
@@ -898,13 +1015,21 @@ void Curl_Run(void)
        if(!curl_dll)
                return;
 
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
+
        Curl_CheckCommandWhenDone();
 
        if(!downloads)
+       {
+               if (curl_mutex) Thread_UnlockMutex(curl_mutex);
                return;
+       }
 
        if(realtime < curltime) // throttle
+       {
+               if (curl_mutex) Thread_UnlockMutex(curl_mutex);
                return;
+       }
 
        {
                int remaining;
@@ -916,6 +1041,20 @@ void Curl_Run(void)
                }
                while(mc == CURLM_CALL_MULTI_PERFORM);
 
+               for(di = downloads; di; di = di->next)
+               {
+                       double b = 0;
+                       if(di->curle)
+                       {
+                               qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b);
+                               bytes_sent += (b - di->bytes_sent_curl);
+                               di->bytes_sent_curl = b;
+                               qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b);
+                               bytes_sent += (b - di->bytes_received_curl);
+                               di->bytes_received_curl = b;
+                       }
+               }
+
                for(;;)
                {
                        CURLMsg *msg = qcurl_multi_info_read(curlm, &remaining);
@@ -966,12 +1105,15 @@ void Curl_Run(void)
 
        if(maxspeed > 0)
        {
-               unsigned long bytes = bytes_received; // maybe smoothen a bit?
+               double bytes = bytes_sent + bytes_received; // maybe smoothen a bit?
                curltime = realtime + bytes / (cl_curl_maxspeed.value * 1024.0);
-               bytes_received -= bytes;
+               bytes_sent = 0;
+               bytes_received = 0;
        }
        else
                curltime = realtime;
+
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
 }
 
 /*
@@ -986,11 +1128,15 @@ void Curl_CancelAll(void)
        if(!curl_dll)
                return;
 
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
+
        while(downloads)
        {
                Curl_EndDownload(downloads, CURL_DOWNLOAD_ABORTED, CURLE_OK);
                // INVARIANT: downloads will point to the next download after that!
        }
+
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
 }
 
 /*
@@ -1065,15 +1211,17 @@ prints the download list
 static void Curl_Info_f(void)
 {
        downloadinfo *di;
+       char urlbuf[1024];
        if(!curl_dll)
                return;
        if(Curl_Running())
        {
+               if (curl_mutex) Thread_LockMutex(curl_mutex);
                Con_Print("Currently running downloads:\n");
                for(di = downloads; di; di = di->next)
                {
                        double speed, percent;
-                       Con_Printf("  %s -> %s ",  di->url, di->filename);
+                       Con_Printf("  %s -> %s ",  CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename);
                        percent = 100.0 * Curl_GetDownloadAmount(di);
                        speed = Curl_GetDownloadSpeed(di);
                        if(percent >= 0)
@@ -1081,6 +1229,7 @@ static void Curl_Info_f(void)
                        else
                                Con_Print("(queued)\n");
                }
+               if (curl_mutex) Thread_UnlockMutex(curl_mutex);
        }
        else
        {
@@ -1114,7 +1263,7 @@ curl --finish_autodownload
        once the last download completes successfully, reconnect to the current server
 ====================
 */
-void Curl_Curl_f(void)
+static void Curl_Curl_f(void)
 {
        double maxspeed = 0;
        int i;
@@ -1136,10 +1285,6 @@ void Curl_Curl_f(void)
                return;
        }
 
-       for(i = 0; i != Cmd_Argc(); ++i)
-               Con_DPrintf("%s ", Cmd_Argv(i));
-       Con_DPrint("\n");
-
        if(Cmd_Argc() < 2)
        {
                Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n");
@@ -1272,6 +1417,8 @@ void Curl_Init_Commands(void)
        Cvar_RegisterVariable (&sv_curl_defaulturl);
        Cvar_RegisterVariable (&sv_curl_serverpackages);
        Cvar_RegisterVariable (&sv_curl_maxspeed);
+       Cvar_RegisterVariable (&cl_curl_useragent);
+       Cvar_RegisterVariable (&cl_curl_useragent_append);
        Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path");
        //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)");
 }
@@ -1287,12 +1434,11 @@ information, or to NULL if no such display shall occur. The returned
 array must be freed later using Z_Free.
 ====================
 */
-Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info)
+Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info, char *addinfo, size_t addinfolength)
 {
        int i;
        downloadinfo *di;
        Curl_downloadinfo_t *downinfo;
-       static char addinfo[128];
 
        if(!curl_dll)
        {
@@ -1302,6 +1448,8 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
                return NULL;
        }
 
+       if (curl_mutex) Thread_LockMutex(curl_mutex);
+
        i = 0;
        for(di = downloads; di; di = di->next)
                ++i;
@@ -1311,7 +1459,7 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
        for(di = downloads; di; di = di->next)
        {
                // do not show infobars for background downloads
-               if(!developer.integer)
+               if(developer.integer <= 0)
                        if(di->buffer)
                                continue;
                strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename));
@@ -1334,11 +1482,11 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
                if(*command_when_done && !numdownloads_fail && numdownloads_added)
                {
                        if(!strncmp(command_when_done, "connect ", 8))
-                               dpsnprintf(addinfo, sizeof(addinfo), "(will join %s when done)", command_when_done + 8);
+                               dpsnprintf(addinfo, addinfolength, "(will join %s when done)", command_when_done + 8);
                        else if(!strcmp(command_when_done, "cl_begindownloads"))
-                               dpsnprintf(addinfo, sizeof(addinfo), "(will enter the game when done)");
+                               dpsnprintf(addinfo, addinfolength, "(will enter the game when done)");
                        else
-                               dpsnprintf(addinfo, sizeof(addinfo), "(will do '%s' when done)", command_when_done);
+                               dpsnprintf(addinfo, addinfolength, "(will do '%s' when done)", command_when_done);
                        *additional_info = addinfo;
                }
                else
@@ -1346,6 +1494,7 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
        }
 
        *nDownloads = i;
+       if (curl_mutex) Thread_UnlockMutex(curl_mutex);
        return downinfo;
 }
 
@@ -1376,7 +1525,7 @@ this file for obvious reasons.
 */
 static const char *Curl_FindPackURL(const char *filename)
 {
-       static char foundurl[1024];
+       static char foundurl[1024]; // invoked only by server
        fs_offset_t filesize;
        char *buf = (char *) FS_LoadFile("curl_urls.txt", tempmempool, true, &filesize);
        if(buf && filesize)
@@ -1470,20 +1619,12 @@ This should be called at every map change.
 */
 void Curl_ClearRequirements(void)
 {
-       const char *p;
        while(requirements)
        {
                requirement *req = requirements;
                requirements = requirements->next;
                Z_Free(req);
        }
-       p = sv_curl_serverpackages.string;
-       Con_DPrintf("Require all of: %s\n", p);
-       while(COM_ParseToken_Simple(&p, false, false))
-       {
-               Con_DPrintf("Require: %s\n", com_token);
-               Curl_RequireFile(com_token);
-       }
 }
 
 /*
@@ -1498,47 +1639,56 @@ This is done by sending him the following console commands:
        curl --finish_autodownload
 ====================
 */
+static qboolean Curl_SendRequirement(const char *filename, qboolean foundone, char *sendbuffer, size_t sendbuffer_len)
+{
+       const char *p;
+       const char *thispack = FS_WhichPack(filename);
+       const char *packurl;
+
+       if(!thispack)
+               return false;
+
+       p = strrchr(thispack, '/');
+       if(p)
+               thispack = p + 1;
+
+       packurl = Curl_FindPackURL(thispack);
+
+       if(packurl && *packurl && strcmp(packurl, "-"))
+       {
+               if(!foundone)
+                       strlcat(sendbuffer, "curl --clear_autodownload\n", sendbuffer_len);
+
+               strlcat(sendbuffer, "curl --pak --forthismap --as ", sendbuffer_len);
+               strlcat(sendbuffer, thispack, sendbuffer_len);
+               if(sv_curl_maxspeed.value > 0)
+                       dpsnprintf(sendbuffer + strlen(sendbuffer), sendbuffer_len - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
+               strlcat(sendbuffer, " --for ", sendbuffer_len);
+               strlcat(sendbuffer, filename, sendbuffer_len);
+               strlcat(sendbuffer, " ", sendbuffer_len);
+               strlcat(sendbuffer, packurl, sendbuffer_len);
+               strlcat(sendbuffer, thispack, sendbuffer_len);
+               strlcat(sendbuffer, "\n", sendbuffer_len);
+
+               return true;
+       }
+
+       return false;
+}
 void Curl_SendRequirements(void)
 {
        // for each requirement, find the pack name
        char sendbuffer[4096] = "";
        requirement *req;
        qboolean foundone = false;
+       const char *p;
 
        for(req = requirements; req; req = req->next)
-       {
-               const char *p;
-               const char *thispack = FS_WhichPack(req->filename);
-               const char *packurl;
-
-               if(!thispack)
-                       continue;
+               foundone = Curl_SendRequirement(req->filename, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
 
-               p = strrchr(thispack, '/');
-               if(p)
-                       thispack = p + 1;
-
-               packurl = Curl_FindPackURL(thispack);
-
-               if(packurl && *packurl && strcmp(packurl, "-"))
-               {
-                       if(!foundone)
-                               strlcat(sendbuffer, "curl --clear_autodownload\n", sizeof(sendbuffer));
-
-                       strlcat(sendbuffer, "curl --pak --forthismap --as ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, thispack, sizeof(sendbuffer));
-                       if(sv_curl_maxspeed.value > 0)
-                               dpsnprintf(sendbuffer + strlen(sendbuffer), sizeof(sendbuffer) - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
-                       strlcat(sendbuffer, " --for ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, req->filename, sizeof(sendbuffer));
-                       strlcat(sendbuffer, " ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, packurl, sizeof(sendbuffer));
-                       strlcat(sendbuffer, thispack, sizeof(sendbuffer));
-                       strlcat(sendbuffer, "\n", sizeof(sendbuffer));
-
-                       foundone = true;
-               }
-       }
+       p = sv_curl_serverpackages.string;
+       while(COM_ParseToken_Simple(&p, false, false, true))
+               foundone = Curl_SendRequirement(com_token, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
 
        if(foundone)
                strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer));