6 static cvar_t cl_curl_maxdownloads = {CVAR_SAVE, "cl_curl_maxdownloads","1", "maximum number of concurrent HTTP/FTP downloads"};
7 static cvar_t cl_curl_maxspeed = {CVAR_SAVE, "cl_curl_maxspeed","300", "maximum download speed (KiB/s)"};
8 static cvar_t sv_curl_defaulturl = {CVAR_SAVE, "sv_curl_defaulturl","", "default autodownload source URL"};
9 static cvar_t sv_curl_serverpackages = {CVAR_SAVE, "sv_curl_serverpackages","", "list of required files for the clients, separated by spaces"};
10 static cvar_t sv_curl_maxspeed = {CVAR_SAVE, "sv_curl_maxspeed","0", "maximum download speed for clients downloading from sv_curl_defaulturl (KiB/s)"};
11 static cvar_t cl_curl_enabled = {CVAR_SAVE, "cl_curl_enabled","1", "whether client's download support is enabled"};
12 static cvar_t cl_curl_useragent = {0, "cl_curl_useragent","1", "send the User-Agent string (note: turning this off may break stuff)"};
13 static cvar_t cl_curl_useragent_append = {0, "cl_curl_useragent_append","", "a string to append to the User-Agent string (useful for name and version number of your mod)"};
16 =================================================================
18 Minimal set of definitions from libcurl
20 WARNING: for a matter of simplicity, several pointer types are
21 casted to "void*", and most enumerated values are not included
23 =================================================================
26 typedef struct CURL_s CURL;
27 typedef struct CURLM_s CURLM;
28 typedef struct curl_slist curl_slist;
36 CURLM_CALL_MULTI_PERFORM=-1, /* please call curl_multi_perform() soon */
40 #define CURL_GLOBAL_NOTHING 0
41 #define CURL_GLOBAL_SSL 1
42 #define CURL_GLOBAL_WIN32 2
43 #define CURLOPTTYPE_LONG 0
44 #define CURLOPTTYPE_OBJECTPOINT 10000
45 #define CURLOPTTYPE_FUNCTIONPOINT 20000
46 #define CURLOPTTYPE_OFF_T 30000
47 #define CINIT(name,type,number) CURLOPT_ ## name = CURLOPTTYPE_ ## type + number
50 CINIT(WRITEDATA, OBJECTPOINT, 1),
51 CINIT(URL, OBJECTPOINT, 2),
52 CINIT(ERRORBUFFER, OBJECTPOINT, 10),
53 CINIT(WRITEFUNCTION, FUNCTIONPOINT, 11),
54 CINIT(POSTFIELDS, OBJECTPOINT, 15),
55 CINIT(REFERER, OBJECTPOINT, 16),
56 CINIT(USERAGENT, OBJECTPOINT, 18),
57 CINIT(LOW_SPEED_LIMIT, LONG , 19),
58 CINIT(LOW_SPEED_TIME, LONG, 20),
59 CINIT(RESUME_FROM, LONG, 21),
60 CINIT(HTTPHEADER, OBJECTPOINT, 23),
61 CINIT(POST, LONG, 47), /* HTTP POST method */
62 CINIT(FOLLOWLOCATION, LONG, 52), /* use Location: Luke! */
63 CINIT(POSTFIELDSIZE, LONG, 60),
64 CINIT(PRIVATE, OBJECTPOINT, 103),
65 CINIT(PROTOCOLS, LONG, 181),
66 CINIT(REDIR_PROTOCOLS, LONG, 182)
69 #define CURLPROTO_HTTP (1<<0)
70 #define CURLPROTO_HTTPS (1<<1)
71 #define CURLPROTO_FTP (1<<2)
75 CURLINFO_HEADER_IN, /* 1 */
76 CURLINFO_HEADER_OUT, /* 2 */
77 CURLINFO_DATA_IN, /* 3 */
78 CURLINFO_DATA_OUT, /* 4 */
79 CURLINFO_SSL_DATA_IN, /* 5 */
80 CURLINFO_SSL_DATA_OUT, /* 6 */
84 #define CURLINFO_STRING 0x100000
85 #define CURLINFO_LONG 0x200000
86 #define CURLINFO_DOUBLE 0x300000
87 #define CURLINFO_SLIST 0x400000
88 #define CURLINFO_MASK 0x0fffff
89 #define CURLINFO_TYPEMASK 0xf00000
92 CURLINFO_NONE, /* first, never use this */
93 CURLINFO_EFFECTIVE_URL = CURLINFO_STRING + 1,
94 CURLINFO_RESPONSE_CODE = CURLINFO_LONG + 2,
95 CURLINFO_TOTAL_TIME = CURLINFO_DOUBLE + 3,
96 CURLINFO_NAMELOOKUP_TIME = CURLINFO_DOUBLE + 4,
97 CURLINFO_CONNECT_TIME = CURLINFO_DOUBLE + 5,
98 CURLINFO_PRETRANSFER_TIME = CURLINFO_DOUBLE + 6,
99 CURLINFO_SIZE_UPLOAD = CURLINFO_DOUBLE + 7,
100 CURLINFO_SIZE_DOWNLOAD = CURLINFO_DOUBLE + 8,
101 CURLINFO_SPEED_DOWNLOAD = CURLINFO_DOUBLE + 9,
102 CURLINFO_SPEED_UPLOAD = CURLINFO_DOUBLE + 10,
103 CURLINFO_HEADER_SIZE = CURLINFO_LONG + 11,
104 CURLINFO_REQUEST_SIZE = CURLINFO_LONG + 12,
105 CURLINFO_SSL_VERIFYRESULT = CURLINFO_LONG + 13,
106 CURLINFO_FILETIME = CURLINFO_LONG + 14,
107 CURLINFO_CONTENT_LENGTH_DOWNLOAD = CURLINFO_DOUBLE + 15,
108 CURLINFO_CONTENT_LENGTH_UPLOAD = CURLINFO_DOUBLE + 16,
109 CURLINFO_STARTTRANSFER_TIME = CURLINFO_DOUBLE + 17,
110 CURLINFO_CONTENT_TYPE = CURLINFO_STRING + 18,
111 CURLINFO_REDIRECT_TIME = CURLINFO_DOUBLE + 19,
112 CURLINFO_REDIRECT_COUNT = CURLINFO_LONG + 20,
113 CURLINFO_PRIVATE = CURLINFO_STRING + 21,
114 CURLINFO_HTTP_CONNECTCODE = CURLINFO_LONG + 22,
115 CURLINFO_HTTPAUTH_AVAIL = CURLINFO_LONG + 23,
116 CURLINFO_PROXYAUTH_AVAIL = CURLINFO_LONG + 24,
117 CURLINFO_OS_ERRNO = CURLINFO_LONG + 25,
118 CURLINFO_NUM_CONNECTS = CURLINFO_LONG + 26,
119 CURLINFO_SSL_ENGINES = CURLINFO_SLIST + 27
125 CURLMSG_NONE, /* first, not used */
126 CURLMSG_DONE, /* This easy handle has completed. 'result' contains
127 the CURLcode of the transfer */
133 CURLMSG msg; /* what this message means */
134 CURL *easy_handle; /* the handle it concerns */
137 void *whatever; /* message-specific data */
138 CURLcode result; /* return code for transfer */
144 static void (*qcurl_global_init) (long flags);
145 static void (*qcurl_global_cleanup) (void);
147 static CURL * (*qcurl_easy_init) (void);
148 static void (*qcurl_easy_cleanup) (CURL *handle);
149 static CURLcode (*qcurl_easy_setopt) (CURL *handle, CURLoption option, ...);
150 static CURLcode (*qcurl_easy_getinfo) (CURL *handle, CURLINFO info, ...);
151 static const char * (*qcurl_easy_strerror) (CURLcode);
153 static CURLM * (*qcurl_multi_init) (void);
154 static CURLMcode (*qcurl_multi_perform) (CURLM *multi_handle, int *running_handles);
155 static CURLMcode (*qcurl_multi_add_handle) (CURLM *multi_handle, CURL *easy_handle);
156 static CURLMcode (*qcurl_multi_remove_handle) (CURLM *multi_handle, CURL *easy_handle);
157 static CURLMsg * (*qcurl_multi_info_read) (CURLM *multi_handle, int *msgs_in_queue);
158 static void (*qcurl_multi_cleanup) (CURLM *);
159 static const char * (*qcurl_multi_strerror) (CURLcode);
160 static curl_slist * (*qcurl_slist_append) (curl_slist *list, const char *string);
161 static void (*qcurl_slist_free_all) (curl_slist *list);
163 static dllfunction_t curlfuncs[] =
165 {"curl_global_init", (void **) &qcurl_global_init},
166 {"curl_global_cleanup", (void **) &qcurl_global_cleanup},
167 {"curl_easy_init", (void **) &qcurl_easy_init},
168 {"curl_easy_cleanup", (void **) &qcurl_easy_cleanup},
169 {"curl_easy_setopt", (void **) &qcurl_easy_setopt},
170 {"curl_easy_strerror", (void **) &qcurl_easy_strerror},
171 {"curl_easy_getinfo", (void **) &qcurl_easy_getinfo},
172 {"curl_multi_init", (void **) &qcurl_multi_init},
173 {"curl_multi_perform", (void **) &qcurl_multi_perform},
174 {"curl_multi_add_handle", (void **) &qcurl_multi_add_handle},
175 {"curl_multi_remove_handle",(void **) &qcurl_multi_remove_handle},
176 {"curl_multi_info_read", (void **) &qcurl_multi_info_read},
177 {"curl_multi_cleanup", (void **) &qcurl_multi_cleanup},
178 {"curl_multi_strerror", (void **) &qcurl_multi_strerror},
179 {"curl_slist_append", (void **) &qcurl_slist_append},
180 {"curl_slist_free_all", (void **) &qcurl_slist_free_all},
184 // Handle for CURL DLL
185 static dllhandle_t curl_dll = NULL;
186 // will be checked at many places to find out if qcurl calls are allowed
188 void *curl_mutex = NULL;
190 typedef struct downloadinfo_s
192 char filename[MAX_OSPATH];
196 fs_offset_t startpos;
200 unsigned long bytes_received; // for buffer
201 double bytes_received_curl; // for throttling
202 double bytes_sent_curl; // for throttling
203 struct downloadinfo_s *next, *prev;
206 curl_slist *slist; // http headers
208 unsigned char *buffer;
210 curl_callback_t callback;
213 const unsigned char *postbuf;
215 const char *post_content_type;
216 const char *extraheaders;
219 static downloadinfo *downloads = NULL;
220 static int numdownloads = 0;
222 static qboolean noclear = FALSE;
224 static int numdownloads_fail = 0;
225 static int numdownloads_success = 0;
226 static int numdownloads_added = 0;
227 static char command_when_done[256] = "";
228 static char command_when_error[256] = "";
234 Sets the command which is to be executed when the last download completes AND
235 all downloads since last server connect ended with a successful status.
236 Setting the command to NULL clears it.
239 static void Curl_CommandWhenDone(const char *cmd)
244 strlcpy(command_when_done, cmd, sizeof(command_when_done));
246 *command_when_done = 0;
251 Do not use yet. Not complete.
252 Problem: what counts as an error?
255 static void Curl_CommandWhenError(const char *cmd)
260 strlcpy(command_when_error, cmd, sizeof(command_when_error));
262 *command_when_error = 0;
267 Curl_Clear_forthismap
269 Clears the "will disconnect on failure" flags.
272 void Curl_Clear_forthismap(void)
277 if (curl_mutex) Thread_LockMutex(curl_mutex);
278 for(di = downloads; di; di = di->next)
279 di->forthismap = false;
280 Curl_CommandWhenError(NULL);
281 Curl_CommandWhenDone(NULL);
282 numdownloads_fail = 0;
283 numdownloads_success = 0;
284 numdownloads_added = 0;
285 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
292 Returns true if a download needed for the current game is running.
295 qboolean Curl_Have_forthismap(void)
297 return numdownloads_added != 0;
300 void Curl_Register_predownload(void)
302 if (curl_mutex) Thread_LockMutex(curl_mutex);
303 Curl_CommandWhenDone("cl_begindownloads");
304 Curl_CommandWhenError("cl_begindownloads");
305 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
310 Curl_CheckCommandWhenDone
312 Checks if a "done command" is to be executed.
313 All downloads finished, at least one success since connect, no single failure
314 -> execute the command.
316 static void Curl_CheckCommandWhenDone(void)
320 if(numdownloads_added && ((numdownloads_success + numdownloads_fail) == numdownloads_added))
322 if(numdownloads_fail == 0)
324 Con_DPrintf("cURL downloads occurred, executing %s\n", command_when_done);
326 Cbuf_AddText(command_when_done);
331 Con_DPrintf("cURL downloads FAILED, executing %s\n", command_when_error);
333 Cbuf_AddText(command_when_error);
336 Curl_Clear_forthismap();
347 static qboolean CURL_OpenLibrary (void)
349 const char* dllnames [] =
354 #elif defined(MACOSX)
355 "libcurl.4.dylib", // Mac OS X Notyetreleased
356 "libcurl.3.dylib", // Mac OS X Tiger
357 "libcurl.2.dylib", // Mac OS X Panther
361 "libcurl.so", // FreeBSD
371 return Sys_LoadLibrary (dllnames, &curl_dll, curlfuncs);
382 static void CURL_CloseLibrary (void)
384 Sys_UnloadLibrary (&curl_dll);
388 static CURLM *curlm = NULL;
389 static double bytes_received = 0; // used for bandwidth throttling
390 static double bytes_sent = 0; // used for bandwidth throttling
391 static double curltime = 0;
397 fwrite-compatible function that writes the data to a file. libcurl can call
401 static size_t CURL_fwrite(void *data, size_t size, size_t nmemb, void *vdi)
403 fs_offset_t ret = -1;
404 size_t bytes = size * nmemb;
405 downloadinfo *di = (downloadinfo *) vdi;
409 if(di->bytes_received + bytes <= di->buffersize)
411 memcpy(di->buffer + di->bytes_received, data, bytes);
414 // otherwise: buffer overrun, ret stays -1
419 ret = FS_Write(di->stream, data, bytes);
422 di->bytes_received += bytes;
424 return ret; // why not ret / nmemb?
429 CURL_DOWNLOAD_SUCCESS = 0,
430 CURL_DOWNLOAD_FAILED,
431 CURL_DOWNLOAD_ABORTED,
432 CURL_DOWNLOAD_SERVERERROR
436 static void curl_default_callback(int status, size_t length_received, unsigned char *buffer, void *cbdata)
438 downloadinfo *di = (downloadinfo *) cbdata;
441 case CURLCBSTATUS_OK:
442 Con_DPrintf("Download of %s: OK\n", di->filename);
444 case CURLCBSTATUS_FAILED:
445 Con_DPrintf("Download of %s: FAILED\n", di->filename);
447 case CURLCBSTATUS_ABORTED:
448 Con_DPrintf("Download of %s: ABORTED\n", di->filename);
450 case CURLCBSTATUS_SERVERERROR:
451 Con_DPrintf("Download of %s: (unknown server error)\n", di->filename);
453 case CURLCBSTATUS_UNKNOWN:
454 Con_DPrintf("Download of %s: (unknown client error)\n", di->filename);
457 Con_DPrintf("Download of %s: %d\n", di->filename, status);
466 stops a download. It receives a status (CURL_DOWNLOAD_SUCCESS,
467 CURL_DOWNLOAD_FAILED or CURL_DOWNLOAD_ABORTED) and in the second case the error
468 code from libcurl, or 0, if another error has occurred.
471 static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata);
472 static void Curl_EndDownload(downloadinfo *di, CurlStatus status, CURLcode error)
479 case CURL_DOWNLOAD_SUCCESS:
481 di->callback(CURLCBSTATUS_OK, di->bytes_received, di->buffer, di->callback_data);
483 case CURL_DOWNLOAD_FAILED:
484 di->callback(CURLCBSTATUS_FAILED, di->bytes_received, di->buffer, di->callback_data);
486 case CURL_DOWNLOAD_ABORTED:
487 di->callback(CURLCBSTATUS_ABORTED, di->bytes_received, di->buffer, di->callback_data);
489 case CURL_DOWNLOAD_SERVERERROR:
490 // reopen to enforce it to have zero bytes again
493 FS_Close(di->stream);
494 di->stream = FS_OpenRealFile(di->filename, "wb", false);
498 di->callback(error ? (int) error : CURLCBSTATUS_SERVERERROR, di->bytes_received, di->buffer, di->callback_data);
502 di->callback(CURLCBSTATUS_UNKNOWN, di->bytes_received, di->buffer, di->callback_data);
508 qcurl_multi_remove_handle(curlm, di->curle);
509 qcurl_easy_cleanup(di->curle);
511 qcurl_slist_free_all(di->slist);
514 if(!di->callback && ok && !di->bytes_received)
516 Con_Printf("ERROR: empty file\n");
521 FS_Close(di->stream);
525 ok = FS_AddPack(di->filename, NULL, true);
528 // pack loading failed?
530 // better clear the file again...
531 di->stream = FS_OpenRealFile(di->filename, "wb", false);
532 FS_Close(di->stream);
534 if(di->startpos && !di->callback)
536 // this was a resume?
537 // then try to redownload it without reporting the error
538 Curl_Begin(di->url, di->extraheaders, di->maxspeed, di->filename, di->ispak, di->forthismap, di->post_content_type, di->postbuf, di->postbufsize, NULL, 0, NULL, NULL);
539 di->forthismap = false; // don't count the error
545 di->prev->next = di->next;
547 downloads = di->next;
549 di->next->prev = di->prev;
555 ++numdownloads_success;
566 Returns a "cleaned up" URL for display (to strip login data)
569 static const char *CleanURL(const char *url, char *urlbuf, size_t urlbuflength)
571 const char *p, *q, *r;
573 // if URL is of form anything://foo-without-slash@rest, replace by anything://rest
574 p = strstr(url, "://");
577 q = strchr(p + 3, '@');
580 r = strchr(p + 3, '/');
583 dpsnprintf(urlbuf, urlbuflength, "%.*s%s", (int)(p - url + 3), url, q + 1);
594 CheckPendingDownloads
596 checks if there are free download slots to start new downloads in.
597 To not start too many downloads at once, only one download is added at a time,
598 up to a maximum number of cl_curl_maxdownloads are running.
601 static void CheckPendingDownloads(void)
608 if(numdownloads < cl_curl_maxdownloads.integer)
611 for(di = downloads; di; di = di->next)
617 Con_Printf("Downloading %s -> %s", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename);
619 di->stream = FS_OpenRealFile(di->filename, "ab", false);
622 Con_Printf("\nFAILED: Could not open output file %s\n", di->filename);
623 Curl_EndDownload(di, CURL_DOWNLOAD_FAILED, CURLE_OK);
626 FS_Seek(di->stream, 0, SEEK_END);
627 di->startpos = FS_Tell(di->stream);
630 Con_Printf(", resuming from position %ld", (long) di->startpos);
635 Con_DPrintf("Downloading %s -> memory\n", CleanURL(di->url, urlbuf, sizeof(urlbuf)));
639 di->curle = qcurl_easy_init();
641 qcurl_easy_setopt(di->curle, CURLOPT_URL, di->url);
642 if(cl_curl_useragent.integer)
645 #ifdef HTTP_USER_AGENT
652 if(*cl_curl_useragent_append.string)
653 ua = va(vabuf, sizeof(vabuf), "%s%s%s",
655 (ua[0] && ua[strlen(ua)-1] != ' ')
658 cl_curl_useragent_append.string);
659 qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, ua);
662 qcurl_easy_setopt(di->curle, CURLOPT_USERAGENT, "");
663 qcurl_easy_setopt(di->curle, CURLOPT_REFERER, di->referer);
664 qcurl_easy_setopt(di->curle, CURLOPT_RESUME_FROM, (long) di->startpos);
665 qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 1);
666 qcurl_easy_setopt(di->curle, CURLOPT_WRITEFUNCTION, CURL_fwrite);
667 qcurl_easy_setopt(di->curle, CURLOPT_LOW_SPEED_LIMIT, (long) 256);
668 qcurl_easy_setopt(di->curle, CURLOPT_LOW_SPEED_TIME, (long) 45);
669 qcurl_easy_setopt(di->curle, CURLOPT_WRITEDATA, (void *) di);
670 qcurl_easy_setopt(di->curle, CURLOPT_PRIVATE, (void *) di);
671 qcurl_easy_setopt(di->curle, CURLOPT_PROTOCOLS, CURLPROTO_HTTP | CURLPROTO_HTTPS | CURLPROTO_FTP);
672 if(qcurl_easy_setopt(di->curle, CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTP | CURLPROTO_HTTPS | CURLPROTO_FTP) != CURLE_OK)
674 Con_Printf("^1WARNING:^7 for security reasons, please upgrade to libcurl 7.19.4 or above. In a later version of DarkPlaces, HTTP redirect support will be disabled for this libcurl version.\n");
675 //qcurl_easy_setopt(di->curle, CURLOPT_FOLLOWLOCATION, 0);
677 if(di->post_content_type)
679 qcurl_easy_setopt(di->curle, CURLOPT_POST, 1);
680 qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDS, di->postbuf);
681 qcurl_easy_setopt(di->curle, CURLOPT_POSTFIELDSIZE, di->postbufsize);
682 di->slist = qcurl_slist_append(di->slist, va(vabuf, sizeof(vabuf), "Content-Type: %s", di->post_content_type));
685 // parse extra headers into slist
686 // \n separated list!
687 h = di->extraheaders;
690 const char *hh = strchr(h, '\n');
693 char *buf = (char *) Mem_Alloc(tempmempool, hh - h + 1);
694 memcpy(buf, h, hh - h);
696 di->slist = qcurl_slist_append(di->slist, buf);
701 di->slist = qcurl_slist_append(di->slist, h);
706 qcurl_easy_setopt(di->curle, CURLOPT_HTTPHEADER, di->slist);
708 qcurl_multi_add_handle(curlm, di->curle);
711 if(numdownloads >= cl_curl_maxdownloads.integer)
722 this function MUST be called before using anything else in this file.
723 On Win32, this must be called AFTER WSAStartup has been done!
731 if (Thread_HasThreads()) curl_mutex = Thread_CreateMutex();
732 qcurl_global_init(CURL_GLOBAL_NOTHING);
733 curlm = qcurl_multi_init();
740 Surprise... closes all the stuff. Please do this BEFORE shutting down LHNET.
743 void Curl_ClearRequirements(void);
744 void Curl_Shutdown(void)
748 Curl_ClearRequirements();
750 if (curl_mutex) Thread_DestroyMutex(curl_mutex);
759 Finds the internal information block for a download given by file name.
762 static downloadinfo *Curl_Find(const char *filename)
767 for(di = downloads; di; di = di->next)
768 if(!strcasecmp(di->filename, filename))
777 Starts a download of a given URL to the file name portion of this URL (or name
778 if given) in the "dlcache/" folder.
781 static qboolean Curl_Begin(const char *URL, const char *extraheaders, double maxspeed, const char *name, qboolean ispak, qboolean forthismap, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
795 // if URL is protocol:///* or protocol://:port/*, insert the IP of the current server
796 p = strchr(URL, ':');
799 if(!strncmp(p, ":///", 4) || !strncmp(p, "://:", 4))
801 char addressstring[128];
803 InfoString_GetValue(cls.userinfo, "*ip", addressstring, sizeof(addressstring));
804 q = strchr(addressstring, ':');
806 q = addressstring + strlen(addressstring);
809 dpsnprintf(urlbuf, sizeof(urlbuf), "%.*s://%.*s%s", (int) (p - URL), URL, (int) (q - addressstring), addressstring, URL + (p - URL) + 3);
815 // Note: This extraction of the file name portion is NOT entirely correct.
817 // It does the following:
819 // http://host/some/script.cgi/SomeFile.pk3?uid=ABCDE -> SomeFile.pk3
820 // http://host/some/script.php?uid=ABCDE&file=/SomeFile.pk3 -> SomeFile.pk3
821 // http://host/some/script.php?uid=ABCDE&file=SomeFile.pk3 -> script.php
823 // However, I'd like to keep this "buggy" behavior so that PHP script
824 // authors can write download scripts without having to enable
825 // AcceptPathInfo on Apache. They just have to ensure that their script
826 // can be called with such a "fake" path name like
827 // http://host/some/script.php?uid=ABCDE&file=/SomeFile.pk3
829 // By the way, such PHP scripts should either send the file or a
830 // "Location:" redirect; PHP code example:
832 // header("Location: http://www.example.com/");
834 // By the way, this will set User-Agent to something like
835 // "Nexuiz build 22:27:55 Mar 17 2006" (engineversion) and Referer to
836 // dp://serverhost:serverport/ so you can filter on this; an example
837 // httpd log file line might be:
839 // 141.2.16.3 - - [17/Mar/2006:22:32:43 +0100] "GET /maps/tznex07.pk3 HTTP/1.1" 200 1077455 "dp://141.2.16.7:26000/" "Nexuiz Linux 22:07:43 Mar 17 2006"
842 name = CleanURL(URL, urlbuf, sizeof(urlbuf));
844 if (curl_mutex) Thread_LockMutex(curl_mutex);
848 p = strrchr(name, '/');
849 p = p ? (p+1) : name;
851 length = q ? (size_t)(q - p) : strlen(p);
852 dpsnprintf(fn, sizeof(fn), "dlcache/%.*s", (int)length, p);
854 name = fn; // make it point back
856 // already downloading the file?
858 downloadinfo *di = Curl_Find(fn);
861 Con_Printf("Can't download %s, already getting it from %s!\n", fn, CleanURL(di->url, urlbuf, sizeof(urlbuf)));
863 // however, if it was not for this map yet...
864 if(forthismap && !di->forthismap)
866 di->forthismap = true;
867 // this "fakes" a download attempt so the client will wait for
868 // the download to finish and then reconnect
869 ++numdownloads_added;
876 if(ispak && FS_FileExists(fn))
878 qboolean already_loaded;
879 if(FS_AddPack(fn, &already_loaded, true))
881 Con_DPrintf("%s already exists, not downloading!\n", fn);
883 Con_DPrintf("(pak was already loaded)\n");
888 ++numdownloads_added;
889 ++numdownloads_success;
897 qfile_t *f = FS_OpenRealFile(fn, "rb", false);
901 FS_Read(f, buf, sizeof(buf)); // no "-1", I will use memcmp
903 if(memcmp(buf, "PK\x03\x04", 4) && memcmp(buf, "PACK", 4))
905 Con_DPrintf("Detected non-PAK %s, clearing and NOT resuming.\n", fn);
907 f = FS_OpenRealFile(fn, "wb", false);
921 // if we get here, we actually want to download... so first verify the
922 // URL scheme (so one can't read local files using file://)
923 if(strncmp(URL, "http://", 7) && strncmp(URL, "ftp://", 6) && strncmp(URL, "https://", 8))
925 Con_Printf("Curl_Begin(\"%s\"): nasty URL scheme rejected\n", URL);
926 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
931 ++numdownloads_added;
932 di = (downloadinfo *) Z_Malloc(sizeof(*di));
933 strlcpy(di->filename, name, sizeof(di->filename));
934 strlcpy(di->url, URL, sizeof(di->url));
935 dpsnprintf(di->referer, sizeof(di->referer), "dp://%s/", cls.netcon ? cls.netcon->address : "notconnected.invalid");
936 di->forthismap = forthismap;
941 di->ispak = (ispak && !buf);
942 di->maxspeed = maxspeed;
943 di->bytes_received = 0;
944 di->bytes_received_curl = 0;
945 di->bytes_sent_curl = 0;
946 di->extraheaders = extraheaders;
947 di->next = downloads;
953 di->buffersize = bufsize;
956 di->callback = curl_default_callback;
957 di->callback_data = di;
961 di->callback = callback;
962 di->callback_data = cbdata;
965 if(post_content_type)
967 di->post_content_type = post_content_type;
968 di->postbuf = postbuf;
969 di->postbufsize = postbufsize;
973 di->post_content_type = NULL;
979 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
984 qboolean Curl_Begin_ToFile(const char *URL, double maxspeed, const char *name, qboolean ispak, qboolean forthismap)
986 return Curl_Begin(URL, NULL, maxspeed, name, ispak, forthismap, NULL, NULL, 0, NULL, 0, NULL, NULL);
988 qboolean Curl_Begin_ToMemory(const char *URL, double maxspeed, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
990 return Curl_Begin(URL, NULL, maxspeed, NULL, false, false, NULL, NULL, 0, buf, bufsize, callback, cbdata);
992 qboolean Curl_Begin_ToMemory_POST(const char *URL, const char *extraheaders, double maxspeed, const char *post_content_type, const unsigned char *postbuf, size_t postbufsize, unsigned char *buf, size_t bufsize, curl_callback_t callback, void *cbdata)
994 return Curl_Begin(URL, extraheaders, maxspeed, NULL, false, false, post_content_type, postbuf, postbufsize, buf, bufsize, callback, cbdata);
1001 call this regularily as this will always download as much as possible without
1003 ====================
1012 if(!cl_curl_enabled.integer)
1018 if (curl_mutex) Thread_LockMutex(curl_mutex);
1020 Curl_CheckCommandWhenDone();
1024 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
1028 if(realtime < curltime) // throttle
1030 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
1040 mc = qcurl_multi_perform(curlm, &remaining);
1042 while(mc == CURLM_CALL_MULTI_PERFORM);
1044 for(di = downloads; di; di = di->next)
1049 qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_UPLOAD, &b);
1050 bytes_sent += (b - di->bytes_sent_curl);
1051 di->bytes_sent_curl = b;
1052 qcurl_easy_getinfo(di->curle, CURLINFO_SIZE_DOWNLOAD, &b);
1053 bytes_sent += (b - di->bytes_received_curl);
1054 di->bytes_received_curl = b;
1060 CURLMsg *msg = qcurl_multi_info_read(curlm, &remaining);
1063 if(msg->msg == CURLMSG_DONE)
1065 CurlStatus failed = CURL_DOWNLOAD_SUCCESS;
1067 qcurl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &di);
1068 result = msg->data.result;
1071 failed = CURL_DOWNLOAD_FAILED;
1076 qcurl_easy_getinfo(msg->easy_handle, CURLINFO_RESPONSE_CODE, &code);
1079 case 4: // e.g. 404?
1080 case 5: // e.g. 500?
1081 failed = CURL_DOWNLOAD_SERVERERROR;
1082 result = (CURLcode) code;
1087 Curl_EndDownload(di, failed, result);
1092 CheckPendingDownloads();
1094 // when will we curl the next time?
1095 // we will wait a bit to ensure our download rate is kept.
1096 // we now know that realtime >= curltime... so set up a new curltime
1098 // use the slowest allowing download to derive the maxspeed... this CAN
1099 // be done better, but maybe later
1100 maxspeed = cl_curl_maxspeed.value;
1101 for(di = downloads; di; di = di->next)
1102 if(di->maxspeed > 0)
1103 if(di->maxspeed < maxspeed || maxspeed <= 0)
1104 maxspeed = di->maxspeed;
1108 double bytes = bytes_sent + bytes_received; // maybe smoothen a bit?
1109 curltime = realtime + bytes / (cl_curl_maxspeed.value * 1024.0);
1114 curltime = realtime;
1116 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
1120 ====================
1123 Stops ALL downloads.
1124 ====================
1126 void Curl_CancelAll(void)
1131 if (curl_mutex) Thread_LockMutex(curl_mutex);
1135 Curl_EndDownload(downloads, CURL_DOWNLOAD_ABORTED, CURLE_OK);
1136 // INVARIANT: downloads will point to the next download after that!
1139 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
1143 ====================
1146 returns true iff there is a download running.
1147 ====================
1149 qboolean Curl_Running(void)
1154 return downloads != NULL;
1158 ====================
1159 Curl_GetDownloadAmount
1161 returns a value from 0.0 to 1.0 which represents the downloaded amount of data
1162 for the given download.
1163 ====================
1165 static double Curl_GetDownloadAmount(downloadinfo *di)
1172 qcurl_easy_getinfo(di->curle, CURLINFO_CONTENT_LENGTH_DOWNLOAD, &length);
1174 return (di->startpos + di->bytes_received) / (di->startpos + length);
1183 ====================
1184 Curl_GetDownloadSpeed
1186 returns the speed of the given download in bytes per second
1187 ====================
1189 static double Curl_GetDownloadSpeed(downloadinfo *di)
1196 qcurl_easy_getinfo(di->curle, CURLINFO_SPEED_DOWNLOAD, &speed);
1204 ====================
1207 prints the download list
1208 ====================
1210 // TODO rewrite using Curl_GetDownloadInfo?
1211 static void Curl_Info_f(void)
1219 if (curl_mutex) Thread_LockMutex(curl_mutex);
1220 Con_Print("Currently running downloads:\n");
1221 for(di = downloads; di; di = di->next)
1223 double speed, percent;
1224 Con_Printf(" %s -> %s ", CleanURL(di->url, urlbuf, sizeof(urlbuf)), di->filename);
1225 percent = 100.0 * Curl_GetDownloadAmount(di);
1226 speed = Curl_GetDownloadSpeed(di);
1228 Con_Printf("(%.1f%% @ %.1f KiB/s)\n", percent, speed / 1024.0);
1230 Con_Print("(queued)\n");
1232 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
1236 Con_Print("No downloads running.\n");
1241 ====================
1244 implements the "curl" console command
1248 curl --cancel filename
1253 curl [--pak] [--forthismap] [--for filename filename...] url
1254 --pak: after downloading, load the package into the virtual file system
1255 --for filename...: only download of at least one of the named files is missing
1256 --forthismap: don't reconnect on failure
1258 curl --clear_autodownload
1259 clears the download success/failure counters
1261 curl --finish_autodownload
1262 if at least one download has been started, disconnect and drop to the menu
1263 once the last download completes successfully, reconnect to the current server
1264 ====================
1266 static void Curl_Curl_f(void)
1268 double maxspeed = 0;
1271 qboolean pak = false;
1272 qboolean forthismap = false;
1274 const char *name = 0;
1278 Con_Print("libcurl DLL not found, this command is inactive.\n");
1282 if(!cl_curl_enabled.integer)
1284 Con_Print("curl support not enabled. Set cl_curl_enabled to 1 to enable.\n");
1290 Con_Print("usage:\ncurl --info, curl --cancel [filename], curl url\n");
1294 url = Cmd_Argv(Cmd_Argc() - 1);
1297 for(i = 1; i != end; ++i)
1299 const char *a = Cmd_Argv(i);
1300 if(!strcmp(a, "--info"))
1305 else if(!strcmp(a, "--cancel"))
1307 if(i == end - 1) // last argument
1311 downloadinfo *di = Curl_Find(url);
1313 Curl_EndDownload(di, CURL_DOWNLOAD_ABORTED, CURLE_OK);
1315 Con_Print("download not found\n");
1319 else if(!strcmp(a, "--pak"))
1323 else if(!strcmp(a, "--for")) // must be last option
1325 for(i = i + 1; i != end - 1; ++i)
1327 if(!FS_FileExists(Cmd_Argv(i)))
1328 goto needthefile; // why can't I have a "double break"?
1330 // if we get here, we have all the files...
1333 else if(!strcmp(a, "--forthismap"))
1337 else if(!strcmp(a, "--as"))
1345 else if(!strcmp(a, "--clear_autodownload"))
1347 // mark all running downloads as "not for this map", so if they
1348 // fail, it does not matter
1349 Curl_Clear_forthismap();
1352 else if(!strcmp(a, "--finish_autodownload"))
1354 if(numdownloads_added)
1356 char donecommand[256];
1359 if(cl.loadbegun) // curling won't inhibit loading the map any more when at this stage, so bail out and force a reconnect
1361 dpsnprintf(donecommand, sizeof(donecommand), "connect %s", cls.netcon->address);
1362 Curl_CommandWhenDone(donecommand);
1366 Curl_CheckCommandWhenDone();
1369 Curl_Register_predownload();
1374 else if(!strncmp(a, "--maxspeed=", 11))
1376 maxspeed = atof(a + 11);
1380 Con_Printf("curl: invalid option %s\n", a);
1381 // but we ignore the option
1386 Curl_Begin_ToFile(url, maxspeed, name, pak, forthismap);
1390 static void curl_curlcat_callback(int code, size_t length_received, unsigned char *buffer, void *cbdata)
1392 Con_Printf("Received %d bytes (status %d):\n%.*s\n", (int) length_received, code, (int) length_received, buffer);
1396 void Curl_CurlCat_f(void)
1399 const char *url = Cmd_Argv(1);
1400 buf = Z_Malloc(16384);
1401 Curl_Begin_ToMemory(url, buf, 16384, curl_curlcat_callback, NULL);
1406 ====================
1409 loads the commands and cvars this library uses
1410 ====================
1412 void Curl_Init_Commands(void)
1414 Cvar_RegisterVariable (&cl_curl_enabled);
1415 Cvar_RegisterVariable (&cl_curl_maxdownloads);
1416 Cvar_RegisterVariable (&cl_curl_maxspeed);
1417 Cvar_RegisterVariable (&sv_curl_defaulturl);
1418 Cvar_RegisterVariable (&sv_curl_serverpackages);
1419 Cvar_RegisterVariable (&sv_curl_maxspeed);
1420 Cvar_RegisterVariable (&cl_curl_useragent);
1421 Cvar_RegisterVariable (&cl_curl_useragent_append);
1422 Cmd_AddCommand ("curl", Curl_Curl_f, "download data from an URL and add to search path");
1423 //Cmd_AddCommand ("curlcat", Curl_CurlCat_f, "display data from an URL (debugging command)");
1427 ====================
1428 Curl_GetDownloadInfo
1430 returns an array of Curl_downloadinfo_t structs for usage by GUIs.
1431 The number of elements in the array is returned in int *nDownloads.
1432 const char **additional_info may be set to a string of additional user
1433 information, or to NULL if no such display shall occur. The returned
1434 array must be freed later using Z_Free.
1435 ====================
1437 Curl_downloadinfo_t *Curl_GetDownloadInfo(int *nDownloads, const char **additional_info, char *addinfo, size_t addinfolength)
1441 Curl_downloadinfo_t *downinfo;
1447 *additional_info = NULL;
1451 if (curl_mutex) Thread_LockMutex(curl_mutex);
1454 for(di = downloads; di; di = di->next)
1457 downinfo = (Curl_downloadinfo_t *) Z_Malloc(sizeof(*downinfo) * i);
1459 for(di = downloads; di; di = di->next)
1461 // do not show infobars for background downloads
1462 if(developer.integer <= 0)
1465 strlcpy(downinfo[i].filename, di->filename, sizeof(downinfo[i].filename));
1468 downinfo[i].progress = Curl_GetDownloadAmount(di);
1469 downinfo[i].speed = Curl_GetDownloadSpeed(di);
1470 downinfo[i].queued = false;
1474 downinfo[i].queued = true;
1481 // TODO: can I clear command_when_done as soon as the first download fails?
1482 if(*command_when_done && !numdownloads_fail && numdownloads_added)
1484 if(!strncmp(command_when_done, "connect ", 8))
1485 dpsnprintf(addinfo, addinfolength, "(will join %s when done)", command_when_done + 8);
1486 else if(!strcmp(command_when_done, "cl_begindownloads"))
1487 dpsnprintf(addinfo, addinfolength, "(will enter the game when done)");
1489 dpsnprintf(addinfo, addinfolength, "(will do '%s' when done)", command_when_done);
1490 *additional_info = addinfo;
1493 *additional_info = NULL;
1497 if (curl_mutex) Thread_UnlockMutex(curl_mutex);
1503 ====================
1506 finds the URL where to find a given package.
1508 For this, it reads a file "curl_urls.txt" of the following format:
1511 revdm*.pk3 http://revdm/downloads/are/here/
1512 * http://any/other/stuff/is/here/
1514 The URLs should end in /. If not, downloads will still work, but the cached files
1515 can't be just put into the data directory with the same download configuration
1516 (you might want to do this if you want to tag downloaded files from your
1517 server, but you should not). "-" means "don't download".
1519 If no single pattern matched, the cvar sv_curl_defaulturl is used as download
1522 Note: pak1.pak and data*.pk3 are excluded from autodownload at another point in
1523 this file for obvious reasons.
1524 ====================
1526 static const char *Curl_FindPackURL(const char *filename)
1528 static char foundurl[1024]; // invoked only by server
1529 fs_offset_t filesize;
1530 char *buf = (char *) FS_LoadFile("curl_urls.txt", tempmempool, true, &filesize);
1533 // read lines of format "pattern url"
1535 char *pattern = NULL, *patternend = NULL, *url = NULL, *urlend = NULL;
1536 qboolean eof = false;
1548 if(pattern && url && patternend)
1554 if(matchpattern(filename, pattern, true))
1556 strlcpy(foundurl, url, sizeof(foundurl));
1568 if(pattern && !patternend)
1570 else if(url && !urlend)
1576 else if(pattern && patternend && !url)
1585 return sv_curl_defaulturl.string;
1588 typedef struct requirement_s
1590 struct requirement_s *next;
1591 char filename[MAX_OSPATH];
1594 static requirement *requirements = NULL;
1598 ====================
1601 Adds the given file to the list of requirements.
1602 ====================
1604 void Curl_RequireFile(const char *filename)
1606 requirement *req = (requirement *) Z_Malloc(sizeof(*requirements));
1607 req->next = requirements;
1608 strlcpy(req->filename, filename, sizeof(req->filename));
1613 ====================
1614 Curl_ClearRequirements
1616 Clears the list of required files for playing on the current map.
1617 This should be called at every map change.
1618 ====================
1620 void Curl_ClearRequirements(void)
1624 requirement *req = requirements;
1625 requirements = requirements->next;
1631 ====================
1632 Curl_SendRequirements
1634 Makes the current host_clients download all files he needs.
1635 This is done by sending him the following console commands:
1637 curl --clear_autodownload
1638 curl --pak --for maps/pushmoddm1.bsp --forthismap http://where/this/darn/map/is/pushmoddm1.pk3
1639 curl --finish_autodownload
1640 ====================
1642 static qboolean Curl_SendRequirement(const char *filename, qboolean foundone, char *sendbuffer, size_t sendbuffer_len)
1645 const char *thispack = FS_WhichPack(filename);
1646 const char *packurl;
1651 p = strrchr(thispack, '/');
1655 packurl = Curl_FindPackURL(thispack);
1657 if(packurl && *packurl && strcmp(packurl, "-"))
1660 strlcat(sendbuffer, "curl --clear_autodownload\n", sendbuffer_len);
1662 strlcat(sendbuffer, "curl --pak --forthismap --as ", sendbuffer_len);
1663 strlcat(sendbuffer, thispack, sendbuffer_len);
1664 if(sv_curl_maxspeed.value > 0)
1665 dpsnprintf(sendbuffer + strlen(sendbuffer), sendbuffer_len - strlen(sendbuffer), " --maxspeed=%.1f", sv_curl_maxspeed.value);
1666 strlcat(sendbuffer, " --for ", sendbuffer_len);
1667 strlcat(sendbuffer, filename, sendbuffer_len);
1668 strlcat(sendbuffer, " ", sendbuffer_len);
1669 strlcat(sendbuffer, packurl, sendbuffer_len);
1670 strlcat(sendbuffer, thispack, sendbuffer_len);
1671 strlcat(sendbuffer, "\n", sendbuffer_len);
1678 void Curl_SendRequirements(void)
1680 // for each requirement, find the pack name
1681 char sendbuffer[4096] = "";
1683 qboolean foundone = false;
1686 for(req = requirements; req; req = req->next)
1687 foundone = Curl_SendRequirement(req->filename, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
1689 p = sv_curl_serverpackages.string;
1690 while(COM_ParseToken_Simple(&p, false, false, true))
1691 foundone = Curl_SendRequirement(com_token, foundone, sendbuffer, sizeof(sendbuffer)) || foundone;
1694 strlcat(sendbuffer, "curl --finish_autodownload\n", sizeof(sendbuffer));
1696 if(strlen(sendbuffer) + 1 < sizeof(sendbuffer))
1697 Host_ClientCommands("%s", sendbuffer);
1699 Con_Printf("Could not initiate autodownload due to URL buffer overflow\n");