]> de.git.xonotic.org Git - xonotic/darkplaces.git/blobdiff - libcurl.c
evaluate sv_curl_serverpackages changes immediately, not on map restart
[xonotic/darkplaces.git] / libcurl.c
index 4eac714a69a677d293a470b24a7ba8549a752cfb..b472d9e9c4033d69ff98a44ea1b3e06da9eb947a 100644 (file)
--- a/libcurl.c
+++ b/libcurl.c
@@ -3,9 +3,10 @@
 #include "libcurl.h"
 
 static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
-static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","100", "maximum download speed (KiB/s)"};
+static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"};
 static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"};
 static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"};
+static cvar_t sv_curl_maxspeed = {CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"};
 static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"};
 
 /*
@@ -21,6 +22,7 @@ static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether clie
 
 typedef struct CURL_s CURL;
 typedef struct CURLM_s CURLM;
+typedef struct curl_slist curl_slist;
 typedef enum
 {
        CURLE_OK = 0
@@ -46,15 +48,24 @@ typedef enum
        CINIT(URL,  OBJECTPOINT, 2),
        CINIT(ERRORBUFFER, OBJECTPOINT, 10),
        CINIT(WRITEFUNCTION, FUNCTIONPOINT, 11),
+       CINIT(POSTFIELDS, OBJECTPOINT, 15),
        CINIT(REFERER, OBJECTPOINT, 16),
        CINIT(USERAGENT, OBJECTPOINT, 18),
+       CINIT(LOW_SPEED_LIMIT, LONG , 19),
+       CINIT(LOW_SPEED_TIME, LONG, 20),
        CINIT(RESUME_FROM, LONG, 21),
+       CINIT(HTTPHEADER, OBJECTPOINT, 23),
+       CINIT(POST, LONG, 47),         /* HTTP POST method */
        CINIT(FOLLOWLOCATION, LONG, 52),  /* use Location: Luke! */
+       CINIT(POSTFIELDSIZE, LONG, 60),
        CINIT(PRIVATE, OBJECTPOINT, 103),
-       CINIT(LOW_SPEED_LIMIT, LONG , 19),
-       CINIT(LOW_SPEED_TIME, LONG, 20),
+       CINIT(PROTOCOLS, LONG, 181),
+       CINIT(REDIR_PROTOCOLS, LONG, 182)
 }
 CURLoption;
+#define CURLPROTO_HTTP   (1<<0)
+#define CURLPROTO_HTTPS  (1<<1)
+#define CURLPROTO_FTP    (1<<2)
 typedef enum
 {
        CURLINFO_TEXT = 0,
@@ -102,7 +113,7 @@ typedef enum
        CURLINFO_PROXYAUTH_AVAIL  = CURLINFO_LONG   + 24,
        CURLINFO_OS_ERRNO         = CURLINFO_LONG   + 25,
        CURLINFO_NUM_CONNECTS     = CURLINFO_LONG   + 26,
-       CURLINFO_SSL_ENGINES      = CURLINFO_SLIST  + 27,
+       CURLINFO_SSL_ENGINES      = CURLINFO_SLIST  + 27
 }
 CURLINFO;
 
@@ -128,21 +139,23 @@ typedef struct
 CURLMsg;
 
 static void (*qcurl_global_init) (long flags);
-static void (*qcurl_global_cleanup) ();
+static void (*qcurl_global_cleanup) (void);
 
-static CURL * (*qcurl_easy_init) ();
+static CURL * (*qcurl_easy_init) (void);
 static void (*qcurl_easy_cleanup) (CURL *handle);
 static CURLcode (*qcurl_easy_setopt) (CURL *handle, CURLoption option, ...);
 static CURLcode (*qcurl_easy_getinfo) (CURL *handle, CURLINFO info, ...);
 static const char * (*qcurl_easy_strerror) (CURLcode);
 
-static CURLM * (*qcurl_multi_init) ();
+static CURLM * (*qcurl_multi_init) (void);
 static CURLMcode (*qcurl_multi_perform) (CURLM *multi_handle, int *running_handles);
 static CURLMcode (*qcurl_multi_add_handle) (CURLM *multi_handle, CURL *easy_handle);
 static CURLMcode (*qcurl_multi_remove_handle) (CURLM *multi_handle, CURL *easy_handle);
 static CURLMsg * (*qcurl_multi_info_read) (CURLM *multi_handle, int *msgs_in_queue);
 static void (*qcurl_multi_cleanup) (CURLM *);
 static const char * (*qcurl_multi_strerror) (CURLcode);
+static curl_slist * (*qcurl_slist_append) (curl_slist *list, const char *string);
+static void (*qcurl_slist_free_all) (curl_slist *list);
 
 static dllfunction_t curlfuncs[] =
 {
@@ -160,6 +173,8 @@ static dllfunction_t curlfuncs[] =
        {"curl_multi_info_read",        (void **) &qcurl_multi_info_read},
        {"curl_multi_cleanup",          (void **) &qcurl_multi_cleanup},
        {"curl_multi_strerror",         (void **) &qcurl_multi_strerror},
+       {"curl_slist_append",           (void **) &qcurl_slist_append},
+       {"curl_slist_free_all",         (void **) &qcurl_slist_free_all},
        {NULL, NULL}
 };
 
@@ -169,7 +184,7 @@ static dllhandle_t curl_dll = NULL;
 
 typedef struct downloadinfo_s
 {
-       char filename[MAX_QPATH];
+       char filename[MAX_OSPATH];
        char url[1024];
        char referer[256];
        qfile_t *stream;
@@ -177,14 +192,23 @@ typedef struct downloadinfo_s
        CURL *curle;
        qboolean started;
        qboolean ispak;
-       unsigned long bytes_received;
+       unsigned long bytes_received; // for buffer
+       double bytes_received_curl; // for throttling
+       double bytes_sent_curl; // for throttling
        struct downloadinfo_s *next, *prev;
        qboolean forthismap;
+       double maxspeed;
+       curl_slist *slist; // http headers
 
        unsigned char *buffer;
        size_t buffersize;
        curl_callback_t callback;
        void *callback_data;
+
+       const unsigned char *postbuf;
+       size_t postbufsize;
+       const char *post_content_type;
+       const char *extraheaders;
 }
 downloadinfo;
 static downloadinfo *downloads = NULL;
@@ -240,7 +264,7 @@ Curl_Clear_forthismap
 Clears the "will disconnect on failure" flags.
 ====================
 */
-void Curl_Clear_forthismap()
+void Curl_Clear_forthismap(void)
 {
        downloadinfo *di;
        if(noclear)
@@ -261,12 +285,12 @@ Curl_Have_forthismap
 Returns true if a download needed for the current game is running.
 ====================
 */
-qboolean Curl_Have_forthismap()
+qboolean Curl_Have_forthismap(void)
 {
-       return numdownloads_added;
+       return numdownloads_added != 0;
 }
 
-void Curl_Register_predownload()
+void Curl_Register_predownload(void)
 {
        Curl_CommandWhenDone("cl_begindownloads");
        Curl_CommandWhenError("cl_begindownloads");
@@ -280,7 +304,7 @@ Checks if a "done command" is to be executed.
 All downloads finished, at least one success since connect, no single failure
 -> execute the command.
 */
-static void Curl_CheckCommandWhenDone()
+static void Curl_CheckCommandWhenDone(void)
 {
        if(!curl_dll)
                return;
@@ -313,9 +337,7 @@ static qboolean CURL_OpenLibrary (void)
 {
        const char* dllnames [] =
        {
-#if defined(WIN64)
-               "libcurl64.dll",
-#elif defined(WIN32)
+#if defined(WIN32)
                "libcurl-4.dll",
                "libcurl-3.dll",
 #elif defined(MACOSX)
@@ -353,7 +375,8 @@ static void CURL_CloseLibrary (void)
 
 
 static CURLM *curlm = NULL;
-static unsigned long bytes_received = 0; // used for bandwidth throttling
+static double bytes_received = 0; // used for bandwidth throttling
+static double bytes_sent = 0; // used for bandwidth throttling
 static double curltime = 0;
 
 /*
@@ -385,7 +408,6 @@ static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi)
                ret = FS_Write(di->stream, data, bytes);
        }
 
-       bytes_received += bytes;
        di->bytes_received += bytes;
 
        return ret; // why not ret / nmemb?
@@ -406,30 +428,29 @@ static void curl_default_callback(int status, size_t length_received, unsigned c
        switch(status)
        {
                case CURLCBSTATUS_OK:
-                       Con_Printf("Download of %s: OK\n", di->filename);
+                       Con_DPrintf("Download of %s: OK\n", di->filename);
                        break;
                case CURLCBSTATUS_FAILED:
-                       Con_Printf("Download of %s: FAILED\n", di->filename);
+                       Con_DPrintf("Download of %s: FAILED\n", di->filename);
                        break;
                case CURLCBSTATUS_ABORTED:
-                       Con_Printf("Download of %s: ABORTED\n", di->filename);
+                       Con_DPrintf("Download of %s: ABORTED\n", di->filename);
                        break;
                case CURLCBSTATUS_SERVERERROR:
-                       Con_Printf("Download of %s: (unknown server error)\n", di->filename);
+                       Con_DPrintf("Download of %s: (unknown server error)\n", di->filename);
                        break;
                case CURLCBSTATUS_UNKNOWN:
-                       Con_Printf("Download of %s: (unknown client error)\n", di->filename);
+                       Con_DPrintf("Download of %s: (unknown client error)\n", di->filename);
                        break;
                default:
-                       Con_Printf("Download of %s: %d\n", di->filename, status);
+                       Con_DPrintf("Download of %s: %d\n", di->filename, status);
                        break;
        }
 }
 
 static void curl_quiet_callback(int status, size_t length_received, unsigned char *buffer, void *cbdata)
 {
-       if(developer.integer)
-               curl_default_callback(status, length_received, buffer, cbdata);
+       curl_default_callback(status, length_received, buffer, cbdata);
 }
 
 /*
@@ -441,7 +462,7 @@ CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error
 code from libcurl, or 0, if another error has occurred.
 ====================
 */
-static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
+static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
 static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error)
 {
        qboolean ok = false;
@@ -480,9 +501,11 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
        {
                qcurl_multi_remove_handle(curlm, di->curle);
                qcurl_easy_cleanup(di->curle);
+               if(di->slist)
+                       qcurl_slist_free_all(di->slist);
        }
 
-       if(ok && !di->bytes_received)
+       if(!di->callback && ok && !di->bytes_received)
        {
                Con_Printf("ERROR: empty file\n");
                ok = false;
@@ -506,7 +529,7 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
                        {
                                // this was a resume?
                                // then try to redownload it without reporting the error
-                               Curl_Begin(di->url, di->filename, di->ispak, di->forthismap, NULL, 0, NULL, NULL);
+                               Curl_Begin(di->url, di->extraheaders, di->maxspeed, di->filename, di->ispak, di->forthismap, di->post_content_type, di->postbuf, di->postbufsize, NULL, 0, NULL, NULL);
                                di->forthismap = false; // don't count the error
                        }
                }
@@ -530,6 +553,37 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
        Z_Free(di);
 }
 
+/*
+====================
+CleanURL
+
+Returns a "cleaned up" URL for display (to strip login data)
+====================
+*/
+static const char *CleanURL(const char *url)
+{
+       static char urlbuf[1024];
+       const char *p, *q, *r;
+
+       // if URL is of form anything://foo-without-slash@rest, replace by anything://rest
+       p = strstr(url, "://");
+       if(p)
+       {
+               q = strchr(p + 3, '@');
+               if(q)
+               {
+                       r = strchr(p + 3, '/');
+                       if(!r || q < r)
+                       {
+                               dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s%s", (int)(p - url + 3), url, q + 1);
+                               return urlbuf;
+                       }
+               }
+       }
+
+       return url;
+}
+
 /*
 ====================
 CheckPendingDownloads
@@ -539,8 +593,9 @@ To not start too many downloads at once, only one download is added at a time,
 up to a maximum number of cl_curl_maxdownloads are running.
 ====================
 */
-static void CheckPendingDownloads()
+static void CheckPendingDownloads(void)
 {
+       const char *h;
        if(!curl_dll)
                return;
        if(numdownloads < cl_curl_maxdownloads.integer)
@@ -552,7 +607,7 @@ static void CheckPendingDownloads()
                        {
                                if(!di->buffer)
                                {
-                                       Con_Printf("Downloading %s -> %s", di->url, di->filename);
+                                       Con_Printf("Downloading %s -> %s", CleanURL(di->url), di->filename);
 
                                        di->stream = FS_OpenRealFile(di->filename, "ab", false);
                                        if(!di->stream)
@@ -570,11 +625,12 @@ static void CheckPendingDownloads()
                                }
                                else
                                {
-                                       Con_DPrintf("Downloading %s -> memory\n", di->url);
+                                       Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url));
                                        di->startpos = 0;
                                }
 
                                di->curle = qcurl_easy_init();
+                               di->slist = NULL;
                                qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url);
                                qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, engineversion);
                                qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer);
@@ -585,6 +641,44 @@ static void CheckPendingDownloads()
                                qcurl_easy_setopt(di->curle, CURLOPT_LOW_SPEED_TIME, (long) 45);
                                qcurl_easy_setopt(di->curle, CURLOPT_WRITEDATA, (void *) di);
                                qcurl_easy_setopt(di->curle, CURLOPT_PRIVATE, (void *) di);
+                               qcurl_easy_setopt(di->curle, CURLOPT_PROTOCOLS, CURLPROTO_HTTP | CURLPROTO_HTTPS | CURLPROTO_FTP);
+                               if(qcurl_easy_setopt(di->curle, CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTP | CURLPROTO_HTTPS | CURLPROTO_FTP) != CURLE_OK)
+                               {
+                                       Con_Printf("^1WARNING:^7 for security reasons, please upgrade to libcurl 7.19.4 or above. In a later version of DarkPlaces, HTTP redirect support will be disabled for this libcurl version.\n");
+                                       //qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 0);
+                               }
+                               if(di->post_content_type)
+                               {
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POST, 1);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDS, di->postbuf);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDSIZE, di->postbufsize);
+                                       di->slist = qcurl_slist_append(di->slist, va("Content-Type: %s", di->post_content_type));
+                               }
+
+                               // parse extra headers into slist
+                               // \n separated list!
+                               h = di->extraheaders;
+                               while(h)
+                               {
+                                       const char *hh = strchr(h, '\n');
+                                       if(hh)
+                                       {
+                                               char *buf = (char *) Mem_Alloc(tempmempool, hh - h + 1);
+                                               memcpy(buf, h, hh - h);
+                                               buf[hh - h] = 0;
+                                               di->slist = qcurl_slist_append(di->slist, buf);
+                                               h = hh + 1;
+                                       }
+                                       else
+                                       {
+                                               di->slist = qcurl_slist_append(di->slist, h);
+                                               h = NULL;
+                                       }
+                               }
+
+                               qcurl_easy_setopt(di->curle, CURLOPT_HTTPHEADER, di->slist);
+
+                               
                                qcurl_multi_add_handle(curlm, di->curle);
                                di->started = true;
                                ++numdownloads;
@@ -603,7 +697,7 @@ this function MUST be called before using anything else in this file.
 On Win32, this must be called AFTER WSAStartup has been done!
 ====================
 */
-void Curl_Init()
+void Curl_Init(void)
 {
        CURL_OpenLibrary();
        if(!curl_dll)
@@ -619,8 +713,8 @@ Curl_Shutdown
 Surprise... closes all the stuff. Please do this BEFORE shutting down LHNET.
 ====================
 */
-void Curl_ClearRequirements();
-void Curl_Shutdown()
+void Curl_ClearRequirements(void);
+void Curl_Shutdown(void)
 {
        if(!curl_dll)
                return;
@@ -674,7 +768,7 @@ Starts a download of a given URL to the file name portion of this URL (or name
 if given) in the "dlcache/" folder.
 ====================
 */
-static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
        if(!curl_dll)
        {
@@ -682,11 +776,32 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
        }
        else
        {
-               char fn[MAX_QPATH];
+               char fn[MAX_OSPATH];
+               char urlbuf[1024];
                const char *p, *q;
                size_t length;
                downloadinfo *di;
 
+               // if URL is protocol:///* or protocol://:port/*, insert the IP of the current server
+               p = strchr(URL, ':');
+               if(p)
+               {
+                       if(!strncmp(p, ":///", 4) || !strncmp(p, "://:", 4))
+                       {
+                               char addressstring[128];
+                               *addressstring = 0;
+                               InfoString_GetValue(cls.userinfo, "*ip", addressstring, sizeof(addressstring));
+                               q = strchr(addressstring, ':');
+                               if(!q)
+                                       q = addressstring + strlen(addressstring);
+                               if(*addressstring)
+                               {
+                                       dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s://%.*s%s", (int) (p - URL), URL, (int) (q - addressstring), addressstring, URL + (p - URL) + 3);
+                                       URL = urlbuf;
+                               }
+                       }
+               }
+
                // Note: This extraction of the file name portion is NOT entirely correct.
                //
                // It does the following:
@@ -714,7 +829,7 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                //   141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006"
 
                if(!name)
-                       name = URL;
+                       name = CleanURL(URL);
 
                if(!buf)
                {
@@ -731,7 +846,7 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                                downloadinfo *di = Curl_Find(fn);
                                if(di)
                                {
-                                       Con_Printf("Can't download %s, already getting it from %s!\n", fn, di->url);
+                                       Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url));
 
                                        // however, if it was not for this map yet...
                                        if(forthismap && !di->forthismap)
@@ -795,7 +910,7 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                // URL scheme (so one can't read local files using file://)
                if(strncmp(URL, "http://", 7) && strncmp(URL, "ftp://", 6) && strncmp(URL, "https://", 8))
                {
-                       Con_Printf("Curl_Begin(\"%s\")): nasty URL scheme rejected\n", URL);
+                       Con_Printf("Curl_Begin(\"%s\"): nasty URL scheme rejected\n", URL);
                        return false;
                }
 
@@ -811,7 +926,11 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                di->curle = NULL;
                di->started = false;
                di->ispak = (ispak && !buf);
+               di->maxspeed = maxspeed;
                di->bytes_received = 0;
+               di->bytes_received_curl = 0;
+               di->bytes_sent_curl = 0;
+               di->extraheaders = extraheaders;
                di->next = downloads;
                di->prev = NULL;
                if(di->next)
@@ -830,18 +949,35 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                        di->callback_data = cbdata;
                }
 
+               if(post_content_type)
+               {
+                       di->post_content_type = post_content_type;
+                       di->postbuf = postbuf;
+                       di->postbufsize = postbufsize;
+               }
+               else
+               {
+                       di->post_content_type = NULL;
+                       di->postbuf = NULL;
+                       di->postbufsize = 0;
+               }
+
                downloads = di;
                return true;
        }
 }
 
-qboolean Curl_Begin_ToFile(const char *URL, const char *name, qboolean ispak, qboolean forthismap)
+qboolean Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, qboolean ispak, qboolean forthismap)
+{
+       return Curl_Begin(URL, NULL, maxspeed, name, ispak, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL);
+}
+qboolean Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
-       return Curl_Begin(URL, name, ispak, forthismap, NULL, 0, NULL, NULL);
+       return Curl_Begin(URL, NULL, maxspeed, NULL, false, false, NULL, NULL, 0, buf, bufsize, callback, cbdata);
 }
-qboolean Curl_Begin_ToMemory(const char *URL, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+qboolean Curl_Begin_ToMemory_POST(const char *URL, const char *extraheaders, double maxspeed, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
-       return Curl_Begin(URL, NULL, false, false, buf, bufsize, callback, cbdata);
+       return Curl_Begin(URL, extraheaders, maxspeed, NULL, false, false, post_content_type, postbuf, postbufsize, buf, bufsize, callback, cbdata);
 }
 
 /*
@@ -852,8 +988,11 @@ call this regularily as this will always download as much as possible without
 blocking.
 ====================
 */
-void Curl_Run()
+void Curl_Run(void)
 {
+       double maxspeed;
+       downloadinfo *di;
+
        noclear = FALSE;
 
        if(!cl_curl_enabled.integer)
@@ -880,6 +1019,17 @@ void Curl_Run()
                }
                while(mc == CURLM_CALL_MULTI_PERFORM);
 
+               for(di = downloads; di; di = di->next)
+               {
+                       double b = 0;
+                       qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b);
+                       bytes_sent += (b - di->bytes_sent_curl);
+                       di->bytes_sent_curl = b;
+                       qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b);
+                       bytes_sent += (b - di->bytes_received_curl);
+                       di->bytes_received_curl = b;
+               }
+
                for(;;)
                {
                        CURLMsg *msg = qcurl_multi_info_read(curlm, &remaining);
@@ -887,7 +1037,6 @@ void Curl_Run()
                                break;
                        if(msg->msg == CURLMSG_DONE)
                        {
-                               downloadinfo *di;
                                CurlStatus failed = CURL_DOWNLOAD_SUCCESS;
                                CURLcode result;
                                qcurl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &di);
@@ -920,11 +1069,21 @@ void Curl_Run()
        // when will we curl the next time?
        // we will wait a bit to ensure our download rate is kept.
        // we now know that realtime >= curltime... so set up a new curltime
-       if(cl_curl_maxspeed.value > 0)
+
+       // use the slowest allowing download to derive the maxspeed... this CAN
+       // be done better, but maybe later
+       maxspeed = cl_curl_maxspeed.value;
+       for(di = downloads; di; di = di->next)
+               if(di->maxspeed > 0)
+                       if(di->maxspeed < maxspeed || maxspeed <= 0)
+                               maxspeed = di->maxspeed;
+
+       if(maxspeed > 0)
        {
-               unsigned long bytes = bytes_received; // maybe smoothen a bit?
+               double bytes = bytes_sent + bytes_received; // maybe smoothen a bit?
                curltime = realtime + bytes / (cl_curl_maxspeed.value * 1024.0);
-               bytes_received -= bytes;
+               bytes_sent = 0;
+               bytes_received = 0;
        }
        else
                curltime = realtime;
@@ -937,7 +1096,7 @@ Curl_CancelAll
 Stops ALL downloads.
 ====================
 */
-void Curl_CancelAll()
+void Curl_CancelAll(void)
 {
        if(!curl_dll)
                return;
@@ -956,7 +1115,7 @@ Curl_Running
 returns true iff there is a download running.
 ====================
 */
-qboolean Curl_Running()
+qboolean Curl_Running(void)
 {
        if(!curl_dll)
                return false;
@@ -1018,7 +1177,7 @@ prints the download list
 ====================
 */
 // TODO rewrite using Curl_GetDownloadInfo?
-static void Curl_Info_f()
+static void Curl_Info_f(void)
 {
        downloadinfo *di;
        if(!curl_dll)
@@ -1029,7 +1188,7 @@ static void Curl_Info_f()
                for(di = downloads; di; di = di->next)
                {
                        double speed, percent;
-                       Con_Printf("  %s -> %s ",  di->url, di->filename);
+                       Con_Printf("  %s -> %s ",  CleanURL(di->url), di->filename);
                        percent = 100.0 * Curl_GetDownloadAmount(di);
                        speed = Curl_GetDownloadSpeed(di);
                        if(percent >= 0)
@@ -1072,6 +1231,7 @@ curl --finish_autodownload
 */
 void Curl_Curl_f(void)
 {
+       double maxspeed = 0;
        int i;
        int end;
        qboolean pak = false;
@@ -1091,10 +1251,6 @@ void Curl_Curl_f(void)
                return;
        }
 
-       for(i = 0; i != Cmd_Argc(); ++i)
-               Con_DPrintf("%s ", Cmd_Argv(i));
-       Con_DPrint("\n");
-
        if(Cmd_Argc() < 2)
        {
                Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n");
@@ -1130,7 +1286,7 @@ void Curl_Curl_f(void)
                {
                        pak = true;
                }
-               else if(!strcmp(a, "--for"))
+               else if(!strcmp(a, "--for")) // must be last option
                {
                        for(i = i + 1; i != end - 1; ++i)
                        {
@@ -1181,15 +1337,19 @@ void Curl_Curl_f(void)
                        }
                        return;
                }
+               else if(!strncmp(a, "--maxspeed=", 11))
+               {
+                       maxspeed = atof(a + 11);
+               }
                else if(*a == '-')
                {
-                       Con_Printf("invalid option %s\n", a);
-                       return;
+                       Con_Printf("curl: invalid option %s\n", a);
+                       // but we ignore the option
                }
        }
 
 needthefile:
-       Curl_Begin_ToFile(url, name, pak, forthismap);
+       Curl_Begin_ToFile(url, maxspeed, name, pak, forthismap);
 }
 
 /*
@@ -1222,6 +1382,7 @@ void Curl_Init_Commands(void)
        Cvar_RegisterVariable (&cl_curl_maxspeed);
        Cvar_RegisterVariable (&sv_curl_defaulturl);
        Cvar_RegisterVariable (&sv_curl_serverpackages);
+       Cvar_RegisterVariable (&sv_curl_maxspeed);
        Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path");
        //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)");
 }
@@ -1261,7 +1422,7 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
        for(di = downloads; di; di = di->next)
        {
                // do not show infobars for background downloads
-               if(!developer.integer)
+               if(developer.integer <= 0)
                        if(di->buffer)
                                continue;
                strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename));
@@ -1389,7 +1550,7 @@ static const char *Curl_FindPackURL(const char *filename)
 typedef struct requirement_s
 {
        struct requirement_s *next;
-       char filename[MAX_QPATH];
+       char filename[MAX_OSPATH];
 }
 requirement;
 static requirement *requirements = NULL;
@@ -1418,22 +1579,14 @@ Clears the list of required files for playing on the current map.
 This should be called at every map change.
 ====================
 */
-void Curl_ClearRequirements()
+void Curl_ClearRequirements(void)
 {
-       const char *p;
        while(requirements)
        {
                requirement *req = requirements;
                requirements = requirements->next;
                Z_Free(req);
        }
-       p = sv_curl_serverpackages.string;
-       Con_DPrintf("Require all of: %s\n", p);
-       while(COM_ParseToken_Simple(&p, false, false))
-       {
-               Con_DPrintf("Require: %s\n", com_token);
-               Curl_RequireFile(com_token);
-       }
 }
 
 /*
@@ -1448,45 +1601,56 @@ This is done by sending him the following console commands:
        curl --finish_autodownload
 ====================
 */
-void Curl_SendRequirements()
+static qboolean Curl_SendRequirement(const char *filename, qboolean foundone, char *sendbuffer, size_t sendbuffer_len)
+{
+       const char *p;
+       const char *thispack = FS_WhichPack(filename);
+       const char *packurl;
+
+       if(!thispack)
+               return false;
+
+       p = strrchr(thispack, '/');
+       if(p)
+               thispack = p + 1;
+
+       packurl = Curl_FindPackURL(thispack);
+
+       if(packurl && *packurl && strcmp(packurl, "-"))
+       {
+               if(!foundone)
+                       strlcat(sendbuffer, "curl --clear_autodownload\n", sendbuffer_len);
+
+               strlcat(sendbuffer, "curl --pak --forthismap --as ", sendbuffer_len);
+               strlcat(sendbuffer, thispack, sendbuffer_len);
+               if(sv_curl_maxspeed.value > 0)
+                       dpsnprintf(sendbuffer + strlen(sendbuffer), sendbuffer_len - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
+               strlcat(sendbuffer, " --for ", sendbuffer_len);
+               strlcat(sendbuffer, filename, sendbuffer_len);
+               strlcat(sendbuffer, " ", sendbuffer_len);
+               strlcat(sendbuffer, packurl, sendbuffer_len);
+               strlcat(sendbuffer, thispack, sendbuffer_len);
+               strlcat(sendbuffer, "\n", sendbuffer_len);
+
+               return true;
+       }
+
+       return false;
+}
+void Curl_SendRequirements(void)
 {
        // for each requirement, find the pack name
        char sendbuffer[4096] = "";
        requirement *req;
        qboolean foundone = false;
+       const char *p;
 
        for(req = requirements; req; req = req->next)
-       {
-               const char *p;
-               const char *thispack = FS_WhichPack(req->filename);
-               const char *packurl;
-
-               if(!thispack)
-                       continue;
-
-               p = strrchr(thispack, '/');
-               if(p)
-                       thispack = p + 1;
-
-               packurl = Curl_FindPackURL(thispack);
+               foundone = Curl_SendRequirement(req->filename, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
 
-               if(packurl && *packurl && strcmp(packurl, "-"))
-               {
-                       if(!foundone)
-                               strlcat(sendbuffer, "curl --clear_autodownload\n", sizeof(sendbuffer));
-
-                       strlcat(sendbuffer, "curl --pak --forthismap --as ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, thispack, sizeof(sendbuffer));
-                       strlcat(sendbuffer, " --for ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, req->filename, sizeof(sendbuffer));
-                       strlcat(sendbuffer, " ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, packurl, sizeof(sendbuffer));
-                       strlcat(sendbuffer, thispack, sizeof(sendbuffer));
-                       strlcat(sendbuffer, "\n", sizeof(sendbuffer));
-
-                       foundone = true;
-               }
-       }
+       p = sv_curl_serverpackages.string;
+       while(COM_ParseToken_Simple(&p, false, false))
+               foundone = Curl_SendRequirement(com_token, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
 
        if(foundone)
                strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer));