]> de.git.xonotic.org Git - xonotic/darkplaces.git/blobdiff - libcurl.c
evaluate sv_curl_serverpackages changes immediately, not on map restart
[xonotic/darkplaces.git] / libcurl.c
index deba3b357229a31407dcad409b2a0f44b655f1dd..b472d9e9c4033d69ff98a44ea1b3e06da9eb947a 100644 (file)
--- a/libcurl.c
+++ b/libcurl.c
@@ -3,9 +3,10 @@
 #include "libcurl.h"
 
 static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
-static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","100", "maximum download speed (KiB/s)"};
+static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"};
 static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"};
 static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"};
+static cvar_t sv_curl_maxspeed = {CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"};
 static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"};
 
 /*
@@ -21,6 +22,7 @@ static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether clie
 
 typedef struct CURL_s CURL;
 typedef struct CURLM_s CURLM;
+typedef struct curl_slist curl_slist;
 typedef enum
 {
        CURLE_OK = 0
@@ -46,15 +48,19 @@ typedef enum
        CINIT(URL,  OBJECTPOINT, 2),
        CINIT(ERRORBUFFER, OBJECTPOINT, 10),
        CINIT(WRITEFUNCTION, FUNCTIONPOINT, 11),
+       CINIT(POSTFIELDS, OBJECTPOINT, 15),
        CINIT(REFERER, OBJECTPOINT, 16),
        CINIT(USERAGENT, OBJECTPOINT, 18),
+       CINIT(LOW_SPEED_LIMIT, LONG , 19),
+       CINIT(LOW_SPEED_TIME, LONG, 20),
        CINIT(RESUME_FROM, LONG, 21),
+       CINIT(HTTPHEADER, OBJECTPOINT, 23),
+       CINIT(POST, LONG, 47),         /* HTTP POST method */
        CINIT(FOLLOWLOCATION, LONG, 52),  /* use Location: Luke! */
+       CINIT(POSTFIELDSIZE, LONG, 60),
        CINIT(PRIVATE, OBJECTPOINT, 103),
-       CINIT(LOW_SPEED_LIMIT, LONG , 19),
-       CINIT(LOW_SPEED_TIME, LONG, 20),
        CINIT(PROTOCOLS, LONG, 181),
-       CINIT(REDIR_PROTOCOLS, LONG, 182),
+       CINIT(REDIR_PROTOCOLS, LONG, 182)
 }
 CURLoption;
 #define CURLPROTO_HTTP   (1<<0)
@@ -107,7 +113,7 @@ typedef enum
        CURLINFO_PROXYAUTH_AVAIL  = CURLINFO_LONG   + 24,
        CURLINFO_OS_ERRNO         = CURLINFO_LONG   + 25,
        CURLINFO_NUM_CONNECTS     = CURLINFO_LONG   + 26,
-       CURLINFO_SSL_ENGINES      = CURLINFO_SLIST  + 27,
+       CURLINFO_SSL_ENGINES      = CURLINFO_SLIST  + 27
 }
 CURLINFO;
 
@@ -148,6 +154,8 @@ static CURLMcode (*qcurl_multi_remove_handle) (CURLM *multi_handle, CURL *easy_h
 static CURLMsg * (*qcurl_multi_info_read) (CURLM *multi_handle, int *msgs_in_queue);
 static void (*qcurl_multi_cleanup) (CURLM *);
 static const char * (*qcurl_multi_strerror) (CURLcode);
+static curl_slist * (*qcurl_slist_append) (curl_slist *list, const char *string);
+static void (*qcurl_slist_free_all) (curl_slist *list);
 
 static dllfunction_t curlfuncs[] =
 {
@@ -165,6 +173,8 @@ static dllfunction_t curlfuncs[] =
        {"curl_multi_info_read",        (void **) &qcurl_multi_info_read},
        {"curl_multi_cleanup",          (void **) &qcurl_multi_cleanup},
        {"curl_multi_strerror",         (void **) &qcurl_multi_strerror},
+       {"curl_slist_append",           (void **) &qcurl_slist_append},
+       {"curl_slist_free_all",         (void **) &qcurl_slist_free_all},
        {NULL, NULL}
 };
 
@@ -182,14 +192,23 @@ typedef struct downloadinfo_s
        CURL *curle;
        qboolean started;
        qboolean ispak;
-       unsigned long bytes_received;
+       unsigned long bytes_received; // for buffer
+       double bytes_received_curl; // for throttling
+       double bytes_sent_curl; // for throttling
        struct downloadinfo_s *next, *prev;
        qboolean forthismap;
+       double maxspeed;
+       curl_slist *slist; // http headers
 
        unsigned char *buffer;
        size_t buffersize;
        curl_callback_t callback;
        void *callback_data;
+
+       const unsigned char *postbuf;
+       size_t postbufsize;
+       const char *post_content_type;
+       const char *extraheaders;
 }
 downloadinfo;
 static downloadinfo *downloads = NULL;
@@ -356,7 +375,8 @@ static void CURL_CloseLibrary (void)
 
 
 static CURLM *curlm = NULL;
-static unsigned long bytes_received = 0; // used for bandwidth throttling
+static double bytes_received = 0; // used for bandwidth throttling
+static double bytes_sent = 0; // used for bandwidth throttling
 static double curltime = 0;
 
 /*
@@ -388,7 +408,6 @@ static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi)
                ret = FS_Write(di->stream, data, bytes);
        }
 
-       bytes_received += bytes;
        di->bytes_received += bytes;
 
        return ret; // why not ret / nmemb?
@@ -443,7 +462,7 @@ CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error
 code from libcurl, or 0, if another error has occurred.
 ====================
 */
-static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
+static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
 static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error)
 {
        qboolean ok = false;
@@ -482,6 +501,8 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
        {
                qcurl_multi_remove_handle(curlm, di->curle);
                qcurl_easy_cleanup(di->curle);
+               if(di->slist)
+                       qcurl_slist_free_all(di->slist);
        }
 
        if(!di->callback && ok && !di->bytes_received)
@@ -508,7 +529,7 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
                        {
                                // this was a resume?
                                // then try to redownload it without reporting the error
-                               Curl_Begin(di->url, di->filename, di->ispak, di->forthismap, NULL, 0, NULL, NULL);
+                               Curl_Begin(di->url, di->extraheaders, di->maxspeed, di->filename, di->ispak, di->forthismap, di->post_content_type, di->postbuf, di->postbufsize, NULL, 0, NULL, NULL);
                                di->forthismap = false; // don't count the error
                        }
                }
@@ -532,6 +553,37 @@ static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error
        Z_Free(di);
 }
 
+/*
+====================
+CleanURL
+
+Returns a "cleaned up" URL for display (to strip login data)
+====================
+*/
+static const char *CleanURL(const char *url)
+{
+       static char urlbuf[1024];
+       const char *p, *q, *r;
+
+       // if URL is of form anything://foo-without-slash@rest, replace by anything://rest
+       p = strstr(url, "://");
+       if(p)
+       {
+               q = strchr(p + 3, '@');
+               if(q)
+               {
+                       r = strchr(p + 3, '/');
+                       if(!r || q < r)
+                       {
+                               dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s%s", (int)(p - url + 3), url, q + 1);
+                               return urlbuf;
+                       }
+               }
+       }
+
+       return url;
+}
+
 /*
 ====================
 CheckPendingDownloads
@@ -543,6 +595,7 @@ up to a maximum number of cl_curl_maxdownloads are running.
 */
 static void CheckPendingDownloads(void)
 {
+       const char *h;
        if(!curl_dll)
                return;
        if(numdownloads < cl_curl_maxdownloads.integer)
@@ -554,7 +607,7 @@ static void CheckPendingDownloads(void)
                        {
                                if(!di->buffer)
                                {
-                                       Con_Printf("Downloading %s -> %s", di->url, di->filename);
+                                       Con_Printf("Downloading %s -> %s", CleanURL(di->url), di->filename);
 
                                        di->stream = FS_OpenRealFile(di->filename, "ab", false);
                                        if(!di->stream)
@@ -572,11 +625,12 @@ static void CheckPendingDownloads(void)
                                }
                                else
                                {
-                                       Con_DPrintf("Downloading %s -> memory\n", di->url);
+                                       Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url));
                                        di->startpos = 0;
                                }
 
                                di->curle = qcurl_easy_init();
+                               di->slist = NULL;
                                qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url);
                                qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, engineversion);
                                qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer);
@@ -593,6 +647,37 @@ static void CheckPendingDownloads(void)
                                        Con_Printf("^1WARNING:^7 for security reasons, please upgrade to libcurl 7.19.4 or above. In a later version of DarkPlaces, HTTP redirect support will be disabled for this libcurl version.\n");
                                        //qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 0);
                                }
+                               if(di->post_content_type)
+                               {
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POST, 1);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDS, di->postbuf);
+                                       qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDSIZE, di->postbufsize);
+                                       di->slist = qcurl_slist_append(di->slist, va("Content-Type: %s", di->post_content_type));
+                               }
+
+                               // parse extra headers into slist
+                               // \n separated list!
+                               h = di->extraheaders;
+                               while(h)
+                               {
+                                       const char *hh = strchr(h, '\n');
+                                       if(hh)
+                                       {
+                                               char *buf = (char *) Mem_Alloc(tempmempool, hh - h + 1);
+                                               memcpy(buf, h, hh - h);
+                                               buf[hh - h] = 0;
+                                               di->slist = qcurl_slist_append(di->slist, buf);
+                                               h = hh + 1;
+                                       }
+                                       else
+                                       {
+                                               di->slist = qcurl_slist_append(di->slist, h);
+                                               h = NULL;
+                                       }
+                               }
+
+                               qcurl_easy_setopt(di->curle, CURLOPT_HTTPHEADER, di->slist);
+
                                
                                qcurl_multi_add_handle(curlm, di->curle);
                                di->started = true;
@@ -683,7 +768,7 @@ Starts a download of a given URL to the file name portion of this URL (or name
 if given) in the "dlcache/" folder.
 ====================
 */
-static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qboolean forthismap, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
        if(!curl_dll)
        {
@@ -744,7 +829,7 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                //   141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006"
 
                if(!name)
-                       name = URL;
+                       name = CleanURL(URL);
 
                if(!buf)
                {
@@ -761,7 +846,7 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                                downloadinfo *di = Curl_Find(fn);
                                if(di)
                                {
-                                       Con_Printf("Can't download %s, already getting it from %s!\n", fn, di->url);
+                                       Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url));
 
                                        // however, if it was not for this map yet...
                                        if(forthismap && !di->forthismap)
@@ -841,7 +926,11 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                di->curle = NULL;
                di->started = false;
                di->ispak = (ispak && !buf);
+               di->maxspeed = maxspeed;
                di->bytes_received = 0;
+               di->bytes_received_curl = 0;
+               di->bytes_sent_curl = 0;
+               di->extraheaders = extraheaders;
                di->next = downloads;
                di->prev = NULL;
                if(di->next)
@@ -860,18 +949,35 @@ static qboolean Curl_Begin(const char *URL, const char *name, qboolean ispak, qb
                        di->callback_data = cbdata;
                }
 
+               if(post_content_type)
+               {
+                       di->post_content_type = post_content_type;
+                       di->postbuf = postbuf;
+                       di->postbufsize = postbufsize;
+               }
+               else
+               {
+                       di->post_content_type = NULL;
+                       di->postbuf = NULL;
+                       di->postbufsize = 0;
+               }
+
                downloads = di;
                return true;
        }
 }
 
-qboolean Curl_Begin_ToFile(const char *URL, const char *name, qboolean ispak, qboolean forthismap)
+qboolean Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, qboolean ispak, qboolean forthismap)
 {
-       return Curl_Begin(URL, name, ispak, forthismap, NULL, 0, NULL, NULL);
+       return Curl_Begin(URL, NULL, maxspeed, name, ispak, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL);
 }
-qboolean Curl_Begin_ToMemory(const char *URL, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+qboolean Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
 {
-       return Curl_Begin(URL, NULL, false, false, buf, bufsize, callback, cbdata);
+       return Curl_Begin(URL, NULL, maxspeed, NULL, false, false, NULL, NULL, 0, buf, bufsize, callback, cbdata);
+}
+qboolean Curl_Begin_ToMemory_POST(const char *URL, const char *extraheaders, double maxspeed, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
+{
+       return Curl_Begin(URL, extraheaders, maxspeed, NULL, false, false, post_content_type, postbuf, postbufsize, buf, bufsize, callback, cbdata);
 }
 
 /*
@@ -884,6 +990,9 @@ blocking.
 */
 void Curl_Run(void)
 {
+       double maxspeed;
+       downloadinfo *di;
+
        noclear = FALSE;
 
        if(!cl_curl_enabled.integer)
@@ -910,6 +1019,17 @@ void Curl_Run(void)
                }
                while(mc == CURLM_CALL_MULTI_PERFORM);
 
+               for(di = downloads; di; di = di->next)
+               {
+                       double b = 0;
+                       qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b);
+                       bytes_sent += (b - di->bytes_sent_curl);
+                       di->bytes_sent_curl = b;
+                       qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b);
+                       bytes_sent += (b - di->bytes_received_curl);
+                       di->bytes_received_curl = b;
+               }
+
                for(;;)
                {
                        CURLMsg *msg = qcurl_multi_info_read(curlm, &remaining);
@@ -917,7 +1037,6 @@ void Curl_Run(void)
                                break;
                        if(msg->msg == CURLMSG_DONE)
                        {
-                               downloadinfo *di;
                                CurlStatus failed = CURL_DOWNLOAD_SUCCESS;
                                CURLcode result;
                                qcurl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &di);
@@ -950,11 +1069,21 @@ void Curl_Run(void)
        // when will we curl the next time?
        // we will wait a bit to ensure our download rate is kept.
        // we now know that realtime >= curltime... so set up a new curltime
-       if(cl_curl_maxspeed.value > 0)
+
+       // use the slowest allowing download to derive the maxspeed... this CAN
+       // be done better, but maybe later
+       maxspeed = cl_curl_maxspeed.value;
+       for(di = downloads; di; di = di->next)
+               if(di->maxspeed > 0)
+                       if(di->maxspeed < maxspeed || maxspeed <= 0)
+                               maxspeed = di->maxspeed;
+
+       if(maxspeed > 0)
        {
-               unsigned long bytes = bytes_received; // maybe smoothen a bit?
+               double bytes = bytes_sent + bytes_received; // maybe smoothen a bit?
                curltime = realtime + bytes / (cl_curl_maxspeed.value * 1024.0);
-               bytes_received -= bytes;
+               bytes_sent = 0;
+               bytes_received = 0;
        }
        else
                curltime = realtime;
@@ -1059,7 +1188,7 @@ static void Curl_Info_f(void)
                for(di = downloads; di; di = di->next)
                {
                        double speed, percent;
-                       Con_Printf("  %s -> %s ",  di->url, di->filename);
+                       Con_Printf("  %s -> %s ",  CleanURL(di->url), di->filename);
                        percent = 100.0 * Curl_GetDownloadAmount(di);
                        speed = Curl_GetDownloadSpeed(di);
                        if(percent >= 0)
@@ -1102,6 +1231,7 @@ curl --finish_autodownload
 */
 void Curl_Curl_f(void)
 {
+       double maxspeed = 0;
        int i;
        int end;
        qboolean pak = false;
@@ -1121,10 +1251,6 @@ void Curl_Curl_f(void)
                return;
        }
 
-       for(i = 0; i != Cmd_Argc(); ++i)
-               Con_DPrintf("%s ", Cmd_Argv(i));
-       Con_DPrint("\n");
-
        if(Cmd_Argc() < 2)
        {
                Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n");
@@ -1160,7 +1286,7 @@ void Curl_Curl_f(void)
                {
                        pak = true;
                }
-               else if(!strcmp(a, "--for"))
+               else if(!strcmp(a, "--for")) // must be last option
                {
                        for(i = i + 1; i != end - 1; ++i)
                        {
@@ -1211,15 +1337,19 @@ void Curl_Curl_f(void)
                        }
                        return;
                }
+               else if(!strncmp(a, "--maxspeed=", 11))
+               {
+                       maxspeed = atof(a + 11);
+               }
                else if(*a == '-')
                {
-                       Con_Printf("invalid option %s\n", a);
-                       return;
+                       Con_Printf("curl: invalid option %s\n", a);
+                       // but we ignore the option
                }
        }
 
 needthefile:
-       Curl_Begin_ToFile(url, name, pak, forthismap);
+       Curl_Begin_ToFile(url, maxspeed, name, pak, forthismap);
 }
 
 /*
@@ -1252,6 +1382,7 @@ void Curl_Init_Commands(void)
        Cvar_RegisterVariable (&cl_curl_maxspeed);
        Cvar_RegisterVariable (&sv_curl_defaulturl);
        Cvar_RegisterVariable (&sv_curl_serverpackages);
+       Cvar_RegisterVariable (&sv_curl_maxspeed);
        Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path");
        //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)");
 }
@@ -1291,7 +1422,7 @@ Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **addition
        for(di = downloads; di; di = di->next)
        {
                // do not show infobars for background downloads
-               if(!developer.integer)
+               if(developer.integer <= 0)
                        if(di->buffer)
                                continue;
                strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename));
@@ -1450,20 +1581,12 @@ This should be called at every map change.
 */
 void Curl_ClearRequirements(void)
 {
-       const char *p;
        while(requirements)
        {
                requirement *req = requirements;
                requirements = requirements->next;
                Z_Free(req);
        }
-       p = sv_curl_serverpackages.string;
-       Con_DPrintf("Require all of: %s\n", p);
-       while(COM_ParseToken_Simple(&p, false, false))
-       {
-               Con_DPrintf("Require: %s\n", com_token);
-               Curl_RequireFile(com_token);
-       }
 }
 
 /*
@@ -1478,45 +1601,56 @@ This is done by sending him the following console commands:
        curl --finish_autodownload
 ====================
 */
+static qboolean Curl_SendRequirement(const char *filename, qboolean foundone, char *sendbuffer, size_t sendbuffer_len)
+{
+       const char *p;
+       const char *thispack = FS_WhichPack(filename);
+       const char *packurl;
+
+       if(!thispack)
+               return false;
+
+       p = strrchr(thispack, '/');
+       if(p)
+               thispack = p + 1;
+
+       packurl = Curl_FindPackURL(thispack);
+
+       if(packurl && *packurl && strcmp(packurl, "-"))
+       {
+               if(!foundone)
+                       strlcat(sendbuffer, "curl --clear_autodownload\n", sendbuffer_len);
+
+               strlcat(sendbuffer, "curl --pak --forthismap --as ", sendbuffer_len);
+               strlcat(sendbuffer, thispack, sendbuffer_len);
+               if(sv_curl_maxspeed.value > 0)
+                       dpsnprintf(sendbuffer + strlen(sendbuffer), sendbuffer_len - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
+               strlcat(sendbuffer, " --for ", sendbuffer_len);
+               strlcat(sendbuffer, filename, sendbuffer_len);
+               strlcat(sendbuffer, " ", sendbuffer_len);
+               strlcat(sendbuffer, packurl, sendbuffer_len);
+               strlcat(sendbuffer, thispack, sendbuffer_len);
+               strlcat(sendbuffer, "\n", sendbuffer_len);
+
+               return true;
+       }
+
+       return false;
+}
 void Curl_SendRequirements(void)
 {
        // for each requirement, find the pack name
        char sendbuffer[4096] = "";
        requirement *req;
        qboolean foundone = false;
+       const char *p;
 
        for(req = requirements; req; req = req->next)
-       {
-               const char *p;
-               const char *thispack = FS_WhichPack(req->filename);
-               const char *packurl;
-
-               if(!thispack)
-                       continue;
-
-               p = strrchr(thispack, '/');
-               if(p)
-                       thispack = p + 1;
-
-               packurl = Curl_FindPackURL(thispack);
+               foundone = Curl_SendRequirement(req->filename, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
 
-               if(packurl && *packurl && strcmp(packurl, "-"))
-               {
-                       if(!foundone)
-                               strlcat(sendbuffer, "curl --clear_autodownload\n", sizeof(sendbuffer));
-
-                       strlcat(sendbuffer, "curl --pak --forthismap --as ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, thispack, sizeof(sendbuffer));
-                       strlcat(sendbuffer, " --for ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, req->filename, sizeof(sendbuffer));
-                       strlcat(sendbuffer, " ", sizeof(sendbuffer));
-                       strlcat(sendbuffer, packurl, sizeof(sendbuffer));
-                       strlcat(sendbuffer, thispack, sizeof(sendbuffer));
-                       strlcat(sendbuffer, "\n", sizeof(sendbuffer));
-
-                       foundone = true;
-               }
-       }
+       p = sv_curl_serverpackages.string;
+       while(COM_ParseToken_Simple(&p, false, false))
+               foundone = Curl_SendRequirement(com_token, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
 
        if(foundone)
                strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer));