]> de.git.xonotic.org Git - xonotic/darkplaces.git/blobdiff - cl_screen.c
optimized SHOWLMP code (only used by Nehahra) to not eat any CPU time
[xonotic/darkplaces.git] / cl_screen.c
index 869b893f033501c99a5ec95f6d4f173b1d3d2242..ab5bafc644bc67ae2e466fb8693e9b33db39028c 100644 (file)
@@ -30,6 +30,8 @@ cvar_t scr_screenshot_jpeg_quality = {CVAR_SAVE, "scr_screenshot_jpeg_quality","
 cvar_t scr_screenshot_gammaboost = {CVAR_SAVE, "scr_screenshot_gammaboost","1", "gamma correction on saved screenshots and videos, 1.0 saves unmodified images"};
 // scr_screenshot_name is defined in fs.c
 cvar_t cl_capturevideo = {0, "cl_capturevideo", "0", "enables saving of video to a .avi file using uncompressed I420 colorspace and PCM audio, note that scr_screenshot_gammaboost affects the brightness of the output)"};
+cvar_t cl_capturevideo_width = {0, "cl_capturevideo_width", "0", "scales all frames to this resolution before saving the video"};
+cvar_t cl_capturevideo_height = {0, "cl_capturevideo_height", "0", "scales all frames to this resolution before saving the video"};
 cvar_t cl_capturevideo_realtime = {0, "cl_capturevideo_realtime", "0", "causes video saving to operate in realtime (mostly useful while playing, not while capturing demos), this can produce a much lower quality video due to poor sound/video sync and will abort saving if your machine stalls for over 1 second"};
 cvar_t cl_capturevideo_fps = {0, "cl_capturevideo_fps", "30", "how many frames per second to save (29.97 for NTSC, 30 for typical PC video, 15 can be useful)"};
 cvar_t cl_capturevideo_number = {CVAR_SAVE, "cl_capturevideo_number", "1", "number to append to video filename, incremented each time a capture begins"};
@@ -46,8 +48,9 @@ cvar_t scr_zoomwindow_viewsizey = {CVAR_SAVE, "scr_zoomwindow_viewsizey", "20",
 cvar_t scr_zoomwindow_fov = {CVAR_SAVE, "scr_zoomwindow_fov", "20", "fov of zoom window"};
 cvar_t scr_stipple = {0, "scr_stipple", "0", "interlacing-like stippling of the display"};
 cvar_t scr_refresh = {0, "scr_refresh", "1", "allows you to completely shut off rendering for benchmarking purposes"};
-cvar_t shownetgraph = {CVAR_SAVE, "shownetgraph", "0", "shows a graph of packet sizes and other information"};
+cvar_t shownetgraph = {CVAR_SAVE, "shownetgraph", "0", "shows a graph of packet sizes and other information, 0 = off, 1 = show client netgraph, 2 = show client and server netgraphs (when hosting a server)"};
 
+#define AVI_MASTER_INDEX_SIZE 640 // GB ought to be enough for anyone
 
 int jpeg_supported = false;
 
@@ -106,11 +109,14 @@ void SCR_CenterPrint(char *str)
 void SCR_DrawCenterString (void)
 {
        char    *start;
-       int             l;
        int             x, y;
        int             remaining;
        int             color;
 
+       if(cl.intermission == 2) // in finale,
+               if(sb_showscores) // make TAB hide the finale message (sb_showscores overrides finale in sbar.c)
+                       return;
+
 // the finale prints the characters one at a time, except if printspeed is an absurdly high value
        if (cl.intermission && scr_printspeed.value > 0 && scr_printspeed.value < 1000000)
                remaining = (int)(scr_printspeed.value * (cl.time - scr_centertime_start));
@@ -132,36 +138,25 @@ void SCR_DrawCenterString (void)
        do
        {
                // scan the number of characters on the line, not counting color codes
-               int chars = 0;
-               for (l=0 ; l<vid_conwidth.integer/8 ; l++)
-               {
-                       if (start[l] == '\n' || !start[l])
-                               break;
-                       // color codes add no visible characters, so don't count them
-                       if (start[l] == STRING_COLOR_TAG && (start[l+1] >= '0' && start[l+1] <= '9'))
-                               l++;
-                       else
-                               chars++;
-               }
+               char *newline = strchr(start, '\n');
+               int l = newline ? (newline - start) : (int)strlen(start);
+               int chars = COM_StringLengthNoColors(start, l, NULL);
+
                x = (vid_conwidth.integer - chars*8)/2;
                if (l > 0)
                {
                        if (remaining < l)
                                l = remaining;
-                       DrawQ_ColoredString(x, y, start, l, 8, 8, 1, 1, 1, 1, 0, &color);
+                       DrawQ_String(x, y, start, l, 8, 8, 1, 1, 1, 1, 0, &color, false);
                        remaining -= l;
                        if (remaining <= 0)
                                return;
                }
-
                y += 8;
 
-               while (*start && *start != '\n')
-                       start++;
-
-               if (!*start)
+               if (!newline)
                        break;
-               start++;                // skip the \n
+               start = newline + 1; // skip the \n
        } while (1);
 }
 
@@ -188,20 +183,19 @@ void SCR_CheckDrawCenterString (void)
 void SCR_DrawNetGraph_DrawGraph (int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *label, float textsize, int packetcounter, int numparameters, const int **parameters, const float parametercolors[][4])
 {
        int j, k, x, y, index, offset, height;
-       // dim background
-       DrawQ_Pic (graphx, graphy, NULL, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
-       // draw a label
-       DrawQ_String (graphx, graphy + barheight, label, 0, textsize, textsize, 1, 1, 1, 1, 0);
        // draw the bar graph itself
+       // advance the packet counter because it is the latest packet column being
+       // built up and should come last
+       packetcounter = (packetcounter + 1) % NETGRAPH_PACKETS;
        for (j = 0;j < NETGRAPH_PACKETS;j++)
        {
                x = graphx + j * barwidth;
                y = graphy + barheight;
                index = (packetcounter + j) % NETGRAPH_PACKETS;
                if (parameters[0][index] == NETGRAPH_LOSTPACKET)
-                       DrawQ_Pic(x, y - barheight, NULL, barwidth, barheight, 1, 0, 0, 1, 0);
+                       DrawQ_Fill(x, y - barheight, barwidth, barheight, 1, 0, 0, 1, 0);
                else if (parameters[0][index] == NETGRAPH_CHOKEDPACKET)
-                       DrawQ_Pic(x, y - min(2, barheight), NULL, barwidth, min(2, barheight), 1, 1, 0, 1, 0);
+                       DrawQ_Fill(x, y - min(2, barheight), barwidth, min(2, barheight), 1, 1, 0, 1, 0);
                else
                {
                        offset = 0;
@@ -211,7 +205,7 @@ void SCR_DrawNetGraph_DrawGraph (int graphx, int graphy, int barwidth, int barhe
                                height = min(height, barheight - offset);
                                offset += height;
                                if (height)
-                                       DrawQ_Pic(x, y - offset, NULL, barwidth, height, parametercolors[k][0], parametercolors[k][1], parametercolors[k][2], parametercolors[k][3], 0);
+                                       DrawQ_Fill(x, y - offset, barwidth, height, parametercolors[k][0], parametercolors[k][1], parametercolors[k][2], parametercolors[k][3], 0);
                        }
                }
        }
@@ -224,10 +218,14 @@ const float netgraphcolors[3][4] =
        {0  , 1  , 0  , 1},
 };
 
-void SCR_DrawNetGraph_DrawConnection (netconn_t *conn, int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *labelincoming, int separator, const char *labeloutgoing, float textsize)
+void SCR_DrawNetGraph_DrawConnection_Client (netconn_t *conn, int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *labelincoming, int separator, const char *labeloutgoing, float textsize)
 {
        int numparameters;
        const int *parameters[3];
+       // dim background
+       DrawQ_Fill(graphx                                          , graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+       DrawQ_Fill(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+       // draw the bar graphs
        numparameters = 3;
        parameters[0] = conn->incoming_unreliablesize;
        parameters[1] = conn->incoming_reliablesize;
@@ -237,6 +235,31 @@ void SCR_DrawNetGraph_DrawConnection (netconn_t *conn, int graphx, int graphy, i
        parameters[1] = conn->outgoing_reliablesize;
        parameters[2] = conn->outgoing_acksize;
        SCR_DrawNetGraph_DrawGraph(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth, barheight, bardivide, labeloutgoing, textsize, conn->outgoing_packetcounter, numparameters, parameters, netgraphcolors);
+       // draw labels
+       DrawQ_String(graphx                                          , graphy + barheight, labelincoming, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+       DrawQ_String(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy + barheight, labeloutgoing, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+}
+
+void SCR_DrawNetGraph_DrawConnection_Server (netconn_t *conn, int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *labeloutgoing, int separator, const char *labelincoming, float textsize)
+{
+       int numparameters;
+       const int *parameters[3];
+       // dim background
+       DrawQ_Fill(graphx                                          , graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+       DrawQ_Fill(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+       // draw the bar graphs
+       numparameters = 3;
+       parameters[0] = conn->outgoing_unreliablesize;
+       parameters[1] = conn->outgoing_reliablesize;
+       parameters[2] = conn->outgoing_acksize;
+       SCR_DrawNetGraph_DrawGraph(graphx                                          , graphy, barwidth, barheight, bardivide, labeloutgoing, textsize, conn->outgoing_packetcounter, numparameters, parameters, netgraphcolors);
+       parameters[0] = conn->incoming_unreliablesize;
+       parameters[1] = conn->incoming_reliablesize;
+       parameters[2] = conn->incoming_acksize;
+       SCR_DrawNetGraph_DrawGraph(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth, barheight, bardivide, labelincoming, textsize, conn->incoming_packetcounter, numparameters, parameters, netgraphcolors);
+       // draw labels
+       DrawQ_String(graphx                                          , graphy + barheight, labeloutgoing, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+       DrawQ_String(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy + barheight, labelincoming, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
 }
 
 /*
@@ -246,7 +269,7 @@ SCR_DrawNetGraph
 */
 void SCR_DrawNetGraph (void)
 {
-       int separator, barwidth, barheight, bardivide, netgraph_x, netgraph_y, textsize;
+       int i, separator1, separator2, barwidth, barheight, bardivide, netgraph_x, netgraph_y, textsize, index, netgraphsperrow;
 
        if (cls.state != ca_connected)
                return;
@@ -255,14 +278,34 @@ void SCR_DrawNetGraph (void)
        if (!shownetgraph.integer)
                return;
 
-       separator = 4;
+       separator1 = 2;
+       separator2 = 4;
        textsize = 8;
        barwidth = 1;
        barheight = 50;
        bardivide = 20;
-       netgraph_x = 0;
-       netgraph_y = vid_conheight.integer - 48 - barheight - textsize;
-       SCR_DrawNetGraph_DrawConnection(cls.netcon, netgraph_x, netgraph_y, barwidth, barheight, bardivide, "incoming", separator, "outgoing", textsize);
+
+       netgraphsperrow = (vid_conwidth.integer + separator2) / (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+       netgraphsperrow = max(netgraphsperrow, 1);
+
+       index = 0;
+       netgraph_x = (vid_conwidth.integer + separator2) - (1 + (index % netgraphsperrow)) * (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+       netgraph_y = (vid_conheight.integer - 48 + separator2) - (1 + (index / netgraphsperrow)) * (barheight + textsize + separator2);
+       SCR_DrawNetGraph_DrawConnection_Client(cls.netcon, netgraph_x, netgraph_y, barwidth, barheight, bardivide, "incoming", separator1, "outgoing", textsize);
+       index++;
+
+       if (sv.active && shownetgraph.integer >= 2)
+       {
+               for (i = 0;i < svs.maxclients;i++)
+               {
+                       if (!svs.clients[i].netconnection)
+                               continue;
+                       netgraph_x = (vid_conwidth.integer + separator2) - (1 + (index % netgraphsperrow)) * (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+                       netgraph_y = (vid_conheight.integer - 48 + separator2) - (1 + (index / netgraphsperrow)) * (barheight + textsize + separator2);
+                       SCR_DrawNetGraph_DrawConnection_Server(svs.clients[i].netconnection, netgraph_x, netgraph_y, barwidth, barheight, bardivide, va("%s", svs.clients[i].name), separator1, "", textsize);
+                       index++;
+               }
+       }
 }
 
 /*
@@ -419,8 +462,8 @@ static int SCR_DrawQWDownload(int offset)
        len = (int)strlen(temp);
        x = (vid_conwidth.integer - len*size) / 2;
        y = vid_conheight.integer - size - offset;
-       DrawQ_Pic(0, y, NULL, vid_conwidth.integer, size, 0, 0, 0, 0.5, 0);
-       DrawQ_String(x, y, temp, len, size, size, 1, 1, 1, 1, 0);
+       DrawQ_Fill(0, y, vid_conwidth.integer, size, 0, 0, 0, 0.5, 0);
+       DrawQ_String(x, y, temp, len, size, size, 1, 1, 1, 1, 0, NULL, true);
        return 8;
 }
 
@@ -450,8 +493,8 @@ static int SCR_DrawCurlDownload(int offset)
        {
                len = (int)strlen(addinfo);
                x = (vid_conwidth.integer - len*size) / 2;
-               DrawQ_Pic(0, y - size, NULL, vid_conwidth.integer, size, 1, 1, 1, 0.8, 0);
-               DrawQ_String(x, y - size, addinfo, len, size, size, 0, 0, 0, 1, 0);
+               DrawQ_Fill(0, y - size, vid_conwidth.integer, size, 1, 1, 1, 0.8, 0);
+               DrawQ_String(x, y - size, addinfo, len, size, size, 0, 0, 0, 1, 0, NULL, true);
        }
 
        for(i = 0; i != nDownloads; ++i)
@@ -464,8 +507,8 @@ static int SCR_DrawCurlDownload(int offset)
                        dpsnprintf(temp, sizeof(temp), "Downloading %s ...  %5.1f%% @ %.1f KiB/s\n", downinfo[i].filename, 100.0 * downinfo[i].progress, downinfo[i].speed / 1024.0);
                len = (int)strlen(temp);
                x = (vid_conwidth.integer - len*size) / 2;
-               DrawQ_Pic(0, y + i * size, NULL, vid_conwidth.integer, size, 0, 0, 0, 0.8, 0);
-               DrawQ_String(x, y + i * size, temp, len, size, size, 1, 1, 1, 1, 0);
+               DrawQ_Fill(0, y + i * size, vid_conwidth.integer, size, 0, 0, 0, 0.8, 0);
+               DrawQ_String(x, y + i * size, temp, len, size, size, 1, 1, 1, 1, 0, NULL, true);
        }
 
        Z_Free(downinfo);
@@ -558,7 +601,6 @@ void SCR_BeginLoadingPlaque (void)
        Log_Start();
 
        Host_StartVideo();
-       S_StopAllSounds();
        SCR_UpdateLoadingScreen(false);
 }
 
@@ -615,14 +657,14 @@ void R_TimeReport_Frame(void)
                                lines++;
                y = vid_conheight.integer - sb_lines - lines * 8;
                i = j = 0;
-               DrawQ_Pic(0, y, NULL, vid_conwidth.integer, lines * 8, 0, 0, 0, 0.5, 0);
+               DrawQ_Fill(0, y, vid_conwidth.integer, lines * 8, 0, 0, 0, 0.5, 0);
                while (r_speeds_string[i])
                {
                        j = i;
                        while (r_speeds_string[i] && r_speeds_string[i] != '\n')
                                i++;
                        if (i - j > 0)
-                               DrawQ_String(0, y, r_speeds_string + j, i - j, 8, 8, 1, 1, 1, 1, 0);
+                               DrawQ_String(0, y, r_speeds_string + j, i - j, 8, 8, 1, 1, 1, 1, 0, NULL, true);
                        if (r_speeds_string[i] == '\n')
                                i++;
                        y += 8;
@@ -683,6 +725,12 @@ void SCR_SizeDown_f (void)
        Cvar_SetValue ("viewsize",scr_viewsize.value-10);
 }
 
+void SCR_CaptureVideo_EndVideo(void);
+void CL_Screen_Shutdown(void)
+{
+       SCR_CaptureVideo_EndVideo();
+}
+
 void CL_Screen_Init(void)
 {
        Cvar_RegisterVariable (&scr_fov);
@@ -704,6 +752,8 @@ void CL_Screen_Init(void)
        Cvar_RegisterVariable (&scr_screenshot_jpeg_quality);
        Cvar_RegisterVariable (&scr_screenshot_gammaboost);
        Cvar_RegisterVariable (&cl_capturevideo);
+       Cvar_RegisterVariable (&cl_capturevideo_width);
+       Cvar_RegisterVariable (&cl_capturevideo_height);
        Cvar_RegisterVariable (&cl_capturevideo_realtime);
        Cvar_RegisterVariable (&cl_capturevideo_fps);
        Cvar_RegisterVariable (&cl_capturevideo_number);
@@ -871,72 +921,200 @@ static void SCR_CaptureVideo_RIFF_Pop(void)
        }
 }
 
-static void SCR_CaptureVideo_RIFF_IndexEntry(const char *chunkfourcc, int chunksize, int flags)
+static void GrowBuf(sizebuf_t *buf, int extralen)
 {
-       if (cls.capturevideo.riffstacklevel != 2)
-               Sys_Error("SCR_Capturevideo_RIFF_IndexEntry: RIFF stack level is %i (should be 2)\n", cls.capturevideo.riffstacklevel);
-       if (cls.capturevideo.riffindexbuffer.cursize + 16 > cls.capturevideo.riffindexbuffer.maxsize)
+       if(buf->cursize + extralen > buf->maxsize)
        {
-               int oldsize = cls.capturevideo.riffindexbuffer.maxsize;
+               int oldsize = buf->maxsize;
                unsigned char *olddata;
-               olddata = cls.capturevideo.riffindexbuffer.data;
-               cls.capturevideo.riffindexbuffer.maxsize = max(cls.capturevideo.riffindexbuffer.maxsize * 2, 4096);
-               cls.capturevideo.riffindexbuffer.data = Mem_Alloc(tempmempool, cls.capturevideo.riffindexbuffer.maxsize);
-               if (olddata)
+               olddata = buf->data;
+               buf->maxsize = max(buf->maxsize * 2, 4096);
+               buf->data = Mem_Alloc(tempmempool, buf->maxsize);
+               if(olddata)
                {
-                       memcpy(cls.capturevideo.riffindexbuffer.data, olddata, oldsize);
+                       memcpy(buf->data, olddata, oldsize);
                        Mem_Free(olddata);
                }
        }
+}
+
+static void SCR_CaptureVideo_RIFF_IndexEntry(const char *chunkfourcc, int chunksize, int flags)
+{
+       if (cls.capturevideo.riffstacklevel != 2)
+               Sys_Error("SCR_Capturevideo_RIFF_IndexEntry: RIFF stack level is %i (should be 2)\n", cls.capturevideo.riffstacklevel);
+       GrowBuf(&cls.capturevideo.riffindexbuffer, 16);
+       SCR_CaptureVideo_RIFF_Flush();
        MSG_WriteUnterminatedString(&cls.capturevideo.riffindexbuffer, chunkfourcc);
        MSG_WriteLong(&cls.capturevideo.riffindexbuffer, flags);
        MSG_WriteLong(&cls.capturevideo.riffindexbuffer, (int)FS_Tell(cls.capturevideo.videofile) - cls.capturevideo.riffstackstartoffset[1]);
        MSG_WriteLong(&cls.capturevideo.riffindexbuffer, chunksize);
 }
 
-static void SCR_CaptureVideo_RIFF_Finish(void)
+static void SCR_CaptureVideo_RIFF_MakeIxChunk(const char *fcc, const char *dwChunkId, fs_offset_t masteridx_counter, int *masteridx_count, fs_offset_t masteridx_start)
 {
-       // close the "movi" list
+       int nMatching;
+       int i;
+       fs_offset_t ix = SCR_CaptureVideo_RIFF_GetPosition();
+       fs_offset_t pos;
+
+       if(*masteridx_count >= AVI_MASTER_INDEX_SIZE)
+               return;
+
+       nMatching = 0; // go through index and enumerate them
+       for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
+               if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
+                       ++nMatching;
+
+       SCR_CaptureVideo_RIFF_Push(fcc, NULL);
+       SCR_CaptureVideo_RIFF_Write16(2); // wLongsPerEntry
+       SCR_CaptureVideo_RIFF_Write16(0x0100); // bIndexType=1, bIndexSubType=0
+       SCR_CaptureVideo_RIFF_Write32(nMatching); // nEntriesInUse
+       SCR_CaptureVideo_RIFF_WriteFourCC(dwChunkId); // dwChunkId
+       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.videofile_ix_movistart & (fs_offset_t) 0xFFFFFFFFu);
+       SCR_CaptureVideo_RIFF_Write32(((long long) cls.capturevideo.videofile_ix_movistart) >> 32);
+       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved
+
+       for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
+               if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
+               {
+                       unsigned int *p = (unsigned int *) (cls.capturevideo.riffindexbuffer.data + i);
+                       unsigned int flags = p[1];
+                       unsigned int rpos = p[2];
+                       unsigned int size = p[3];
+                       size &= ~0x80000000;
+                       if(!(flags & 0x10)) // no keyframe?
+                               size |= 0x80000000;
+                       SCR_CaptureVideo_RIFF_Write32(rpos + 8);
+                       SCR_CaptureVideo_RIFF_Write32(size);
+               }
+
        SCR_CaptureVideo_RIFF_Pop();
-       // write the idx1 chunk that we've been building while saving the frames
-       SCR_CaptureVideo_RIFF_Push("idx1", NULL);
-       SCR_CaptureVideo_RIFF_WriteBytes(cls.capturevideo.riffindexbuffer.data, cls.capturevideo.riffindexbuffer.cursize);
+       pos = SCR_CaptureVideo_RIFF_GetPosition();
+       SCR_CaptureVideo_RIFF_Flush();
+
+       FS_Seek(cls.capturevideo.videofile, masteridx_start + 16 * *masteridx_count, SEEK_SET);
+       SCR_CaptureVideo_RIFF_Write32(ix & (fs_offset_t) 0xFFFFFFFFu);
+       SCR_CaptureVideo_RIFF_Write32(((long long) ix) >> 32);
+       SCR_CaptureVideo_RIFF_Write32(pos - ix);
+       SCR_CaptureVideo_RIFF_Write32(nMatching);
+       SCR_CaptureVideo_RIFF_Flush();
+
+       FS_Seek(cls.capturevideo.videofile, masteridx_counter, SEEK_SET);
+       SCR_CaptureVideo_RIFF_Write32(++*masteridx_count);
+       SCR_CaptureVideo_RIFF_Flush();
+
+       FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+}
+
+static void SCR_CaptureVideo_RIFF_Finish(qboolean final)
+{
+       // close the "movi" list
        SCR_CaptureVideo_RIFF_Pop();
+       if(cls.capturevideo.videofile_ix_master_video_inuse_offset)
+               SCR_CaptureVideo_RIFF_MakeIxChunk("ix00", "00dc", cls.capturevideo.videofile_ix_master_video_inuse_offset, &cls.capturevideo.videofile_ix_master_video_inuse, cls.capturevideo.videofile_ix_master_video_start_offset);
+       if(cls.capturevideo.videofile_ix_master_audio_inuse_offset)
+               SCR_CaptureVideo_RIFF_MakeIxChunk("ix01", "01wb", cls.capturevideo.videofile_ix_master_audio_inuse_offset, &cls.capturevideo.videofile_ix_master_audio_inuse, cls.capturevideo.videofile_ix_master_audio_start_offset);
+       // write the idx1 chunk that we've been building while saving the frames (for old style players)
+       if(final && cls.capturevideo.videofile_firstchunkframes_offset)
+       // TODO replace index creating by OpenDML ix##/##ix/indx chunk so it works for more than one AVI part too
+       {
+               SCR_CaptureVideo_RIFF_Push("idx1", NULL);
+               SCR_CaptureVideo_RIFF_WriteBytes(cls.capturevideo.riffindexbuffer.data, cls.capturevideo.riffindexbuffer.cursize);
+               SCR_CaptureVideo_RIFF_Pop();
+       }
        cls.capturevideo.riffindexbuffer.cursize = 0;
        // pop the RIFF chunk itself
        while (cls.capturevideo.riffstacklevel > 0)
                SCR_CaptureVideo_RIFF_Pop();
        SCR_CaptureVideo_RIFF_Flush();
+       if(cls.capturevideo.videofile_firstchunkframes_offset)
+       {
+               Con_DPrintf("Finishing first chunk (%d frames)\n", cls.capturevideo.frame);
+               FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_firstchunkframes_offset, SEEK_SET);
+               SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+               SCR_CaptureVideo_RIFF_Flush();
+               FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+               cls.capturevideo.videofile_firstchunkframes_offset = 0;
+       }
+       else
+               Con_DPrintf("Finishing another chunk (%d frames)\n", cls.capturevideo.frame);
 }
 
 static void SCR_CaptureVideo_RIFF_OverflowCheck(int framesize)
 {
-       fs_offset_t cursize;
+       fs_offset_t cursize, curfilesize;
        if (cls.capturevideo.riffstacklevel != 2)
                Sys_Error("SCR_CaptureVideo_RIFF_OverflowCheck: chunk stack leakage!\n");
        // check where we are in the file
        SCR_CaptureVideo_RIFF_Flush();
        cursize = SCR_CaptureVideo_RIFF_GetPosition() - cls.capturevideo.riffstackstartoffset[0];
+       curfilesize = SCR_CaptureVideo_RIFF_GetPosition();
+
        // if this would overflow the windows limit of 1GB per RIFF chunk, we need
        // to close the current RIFF chunk and open another for future frames
-       if (8 + cursize + framesize + cls.capturevideo.riffindexbuffer.cursize + 8 > 1<<30)
+       if (8 + cursize + framesize + cls.capturevideo.riffindexbuffer.cursize + 8 + cls.capturevideo.riffindexbuffer.cursize + 64 > 1<<30) // note that the Ix buffer takes less space... I just don't dare to / 2 here now... sorry, maybe later
        {
-               SCR_CaptureVideo_RIFF_Finish();
+               SCR_CaptureVideo_RIFF_Finish(false);
                // begin a new 1GB extended section of the AVI
                SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX");
                SCR_CaptureVideo_RIFF_Push("LIST", "movi");
+               cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
+       }
+}
+
+static void FindFraction(double val, int *num, int *denom, int denomMax)
+{
+       int i;
+       double bestdiff;
+       // initialize
+       bestdiff = fabs(val);
+       *num = 0;
+       *denom = 1;
+
+       for(i = 1; i <= denomMax; ++i)
+       {
+               int inum = floor(0.5 + val * i);
+               double diff = fabs(val - inum / (double)i);
+               if(diff < bestdiff)
+               {
+                       bestdiff = diff;
+                       *num = inum;
+                       *denom = i;
+               }
        }
 }
 
 void SCR_CaptureVideo_BeginVideo(void)
 {
-       double gamma, g;
-       int width = vid.width, height = vid.height, x;
+       double gamma, g, aspect;
+       int width = cl_capturevideo_width.integer, height = cl_capturevideo_height.integer;
+       int n, d;
        unsigned int i;
        if (cls.capturevideo.active)
                return;
        memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
        // soundrate is figured out on the first SoundFrame
+
+       if(width == 0 && height != 0)
+               width = (int) (height * (double)vid.width / ((double)vid.height * vid_pixelheight.value)); // keep aspect
+       if(width != 0 && height == 0)
+               height = (int) (width * ((double)vid.height * vid_pixelheight.value) / (double)vid.width); // keep aspect
+
+       if(width < 2 || width > vid.width) // can't scale up
+               width = vid.width;
+       if(height < 2 || height > vid.height) // can't scale up
+               height = vid.height;
+
+       aspect = vid.width / (vid.height * vid_pixelheight.value);
+
+       // ensure it's all even; if not, scale down a little
+       if(width % 1)
+               --width;
+       if(height % 1)
+               --height;
+
+       cls.capturevideo.width = width;
+       cls.capturevideo.height = height;
        cls.capturevideo.active = true;
        cls.capturevideo.starttime = realtime;
        cls.capturevideo.framerate = bound(1, cl_capturevideo_fps.value, 1000);
@@ -944,7 +1122,8 @@ void SCR_CaptureVideo_BeginVideo(void)
        cls.capturevideo.frame = 0;
        cls.capturevideo.soundsampleframe = 0;
        cls.capturevideo.realtime = cl_capturevideo_realtime.integer != 0;
-       cls.capturevideo.buffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * (3+3+3) + 18);
+       cls.capturevideo.screenbuffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * 3);
+       cls.capturevideo.outbuffer = (unsigned char *)Mem_Alloc(tempmempool, width * height * (3+3+3) + 18);
        gamma = 1.0/scr_screenshot_gammaboost.value;
        dpsnprintf(cls.capturevideo.basename, sizeof(cls.capturevideo.basename), "video/dpvideo%03i", cl_capturevideo_number.integer);
        Cvar_SetValueQuick(&cl_capturevideo_number, cl_capturevideo_number.integer + 1);
@@ -1004,7 +1183,7 @@ Cr = R *  .500 + G * -.419 + B * -.0813 + 128.;
                SCR_CaptureVideo_RIFF_Write32(0); // max bytes per second
                SCR_CaptureVideo_RIFF_Write32(0); // padding granularity
                SCR_CaptureVideo_RIFF_Write32(0x910); // flags (AVIF_HASINDEX | AVIF_ISINTERLEAVED | AVIF_TRUSTCKTYPE)
-               cls.capturevideo.videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
+               cls.capturevideo.videofile_firstchunkframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
                SCR_CaptureVideo_RIFF_Write32(0); // total frames
                SCR_CaptureVideo_RIFF_Write32(0); // initial frames
                if (cls.capturevideo.soundrate)
@@ -1028,13 +1207,11 @@ Cr = R *  .500 + G * -.419 + B * -.0813 + 128.;
                SCR_CaptureVideo_RIFF_Write16(0); // language
                SCR_CaptureVideo_RIFF_Write32(0); // initial frames
                // find an ideal divisor for the framerate
-               for (x = 1;x < 1000;x++)
-                       if (cls.capturevideo.framerate * x == floor(cls.capturevideo.framerate * x))
-                               break;
-               SCR_CaptureVideo_RIFF_Write32(x); // samples/second divisor
-               SCR_CaptureVideo_RIFF_Write32((int)(cls.capturevideo.framerate * x)); // samples/second multiplied by divisor
+               FindFraction(cls.capturevideo.framerate, &n, &d, 1000);
+               SCR_CaptureVideo_RIFF_Write32(d); // samples/second divisor
+               SCR_CaptureVideo_RIFF_Write32(n); // samples/second multiplied by divisor
                SCR_CaptureVideo_RIFF_Write32(0); // start
-               cls.capturevideo.videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
+               cls.capturevideo.videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
                SCR_CaptureVideo_RIFF_Write32(0); // length
                SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // suggested buffer size
                SCR_CaptureVideo_RIFF_Write32(0); // quality
@@ -1058,6 +1235,41 @@ Cr = R *  .500 + G * -.419 + B * -.0813 + 128.;
                SCR_CaptureVideo_RIFF_Write32(0); // color used
                SCR_CaptureVideo_RIFF_Write32(0); // color important
                SCR_CaptureVideo_RIFF_Pop();
+               // master index
+               SCR_CaptureVideo_RIFF_Push("indx", NULL);
+               SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+               SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+               cls.capturevideo.videofile_ix_master_video_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+               SCR_CaptureVideo_RIFF_WriteFourCC("00dc"); // dwChunkId
+               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+               cls.capturevideo.videofile_ix_master_video_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+               for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+                       SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+               SCR_CaptureVideo_RIFF_Pop();
+               // extended format (aspect!)
+               SCR_CaptureVideo_RIFF_Push("vprp", NULL);
+               SCR_CaptureVideo_RIFF_Write32(0); // VideoFormatToken
+               SCR_CaptureVideo_RIFF_Write32(0); // VideoStandard
+               SCR_CaptureVideo_RIFF_Write32((int)cls.capturevideo.framerate); // dwVerticalRefreshRate (bogus)
+               SCR_CaptureVideo_RIFF_Write32(width); // dwHTotalInT
+               SCR_CaptureVideo_RIFF_Write32(height); // dwVTotalInLines
+               FindFraction(aspect, &n, &d, 1000);
+               SCR_CaptureVideo_RIFF_Write32((n << 16) | d); // dwFrameAspectRatio // TODO a word
+               SCR_CaptureVideo_RIFF_Write32(width); // dwFrameWidthInPixels
+               SCR_CaptureVideo_RIFF_Write32(height); // dwFrameHeightInLines
+               SCR_CaptureVideo_RIFF_Write32(1); // nFieldPerFrame
+               SCR_CaptureVideo_RIFF_Write32(width); // CompressedBMWidth
+               SCR_CaptureVideo_RIFF_Write32(height); // CompressedBMHeight
+               SCR_CaptureVideo_RIFF_Write32(width); // ValidBMHeight
+               SCR_CaptureVideo_RIFF_Write32(height); // ValidBMWidth
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffset
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYOffset
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffsetInT
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYValidStartLine
+               SCR_CaptureVideo_RIFF_Pop();
                SCR_CaptureVideo_RIFF_Pop();
                if (cls.capturevideo.soundrate)
                {
@@ -1092,8 +1304,33 @@ Cr = R *  .500 + G * -.419 + B * -.0813 + 128.;
                        SCR_CaptureVideo_RIFF_Write16(16); // bits per sample
                        SCR_CaptureVideo_RIFF_Write16(0); // size
                        SCR_CaptureVideo_RIFF_Pop();
+                       // master index
+                       SCR_CaptureVideo_RIFF_Push("indx", NULL);
+                       SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+                       SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+                       cls.capturevideo.videofile_ix_master_audio_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                       SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+                       SCR_CaptureVideo_RIFF_WriteFourCC("01wb"); // dwChunkId
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+                       cls.capturevideo.videofile_ix_master_audio_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                       for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+                               SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+                       SCR_CaptureVideo_RIFF_Pop();
                        SCR_CaptureVideo_RIFF_Pop();
                }
+
+               cls.capturevideo.videofile_ix_master_audio_inuse = cls.capturevideo.videofile_ix_master_video_inuse = 0;
+
+               // extended header (for total #frames)
+               SCR_CaptureVideo_RIFF_Push("LIST", "odml");
+               SCR_CaptureVideo_RIFF_Push("dmlh", NULL);
+               cls.capturevideo.videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0);
+               SCR_CaptureVideo_RIFF_Pop();
+               SCR_CaptureVideo_RIFF_Pop();
+
                // close the AVI header list
                SCR_CaptureVideo_RIFF_Pop();
                // software that produced this AVI video file
@@ -1117,6 +1354,7 @@ Cr = R *  .500 + G * -.419 + B * -.0813 + 128.;
                SCR_CaptureVideo_RIFF_Pop();
                // begin the actual video section now
                SCR_CaptureVideo_RIFF_Push("LIST", "movi");
+               cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
                // we're done with the headers now...
                SCR_CaptureVideo_RIFF_Flush();
                if (cls.capturevideo.riffstacklevel != 2)
@@ -1143,8 +1381,9 @@ void SCR_CaptureVideo_EndVideo(void)
                {
                case CAPTUREVIDEOFORMAT_AVI_I420:
                        // close any open chunks
-                       SCR_CaptureVideo_RIFF_Finish();
+                       SCR_CaptureVideo_RIFF_Finish(true);
                        // go back and fix the video frames and audio samples fields
+                       Con_DPrintf("Finishing capture (%d frames, %d audio frames)\n", cls.capturevideo.frame, cls.capturevideo.soundsampleframe);
                        FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset1, SEEK_SET);
                        SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
                        SCR_CaptureVideo_RIFF_Flush();
@@ -1165,10 +1404,16 @@ void SCR_CaptureVideo_EndVideo(void)
                cls.capturevideo.videofile = NULL;
        }
 
-       if (cls.capturevideo.buffer)
+       if (cls.capturevideo.screenbuffer)
+       {
+               Mem_Free (cls.capturevideo.screenbuffer);
+               cls.capturevideo.screenbuffer = NULL;
+       }
+
+       if (cls.capturevideo.outbuffer)
        {
-               Mem_Free (cls.capturevideo.buffer);
-               cls.capturevideo.buffer = NULL;
+               Mem_Free (cls.capturevideo.outbuffer);
+               cls.capturevideo.outbuffer = NULL;
        }
 
        if (cls.capturevideo.riffindexbuffer.data)
@@ -1223,9 +1468,55 @@ void SCR_CaptureVideo_ConvertFrame_RGB_to_I420_flip(int width, int height, unsig
        }
 }
 
+static void SCR_ScaleDown(unsigned char *in, int inw, int inh, unsigned char *out, int outw, int outh)
+{
+       // TODO optimize this function
+
+       int x, y;
+       float area;
+
+       // memcpy is faster than me
+       if(inw == outw && inh == outh)
+       {
+               memcpy(out, in, 3 * inw * inh);
+               return;
+       }
+
+       // otherwise: a box filter
+       area = (float)outw * (float)outh / (float)inw / (float)inh;
+       for(y = 0; y < outh; ++y)
+       {
+               float iny0 =  y    / (float)outh * inh; int iny0_i = floor(iny0);
+               float iny1 = (y+1) / (float)outh * inh; int iny1_i = ceil(iny1);
+               for(x = 0; x < outw; ++x)
+               {
+                       float inx0 =  x    / (float)outw * inw; int inx0_i = floor(inx0);
+                       float inx1 = (x+1) / (float)outw * inw; int inx1_i = ceil(inx1);
+                       float r = 0, g = 0, b = 0;
+                       int xx, yy;
+
+                       for(yy = iny0_i; yy < iny1_i; ++yy)
+                       {
+                               float ya = min(yy+1, iny1) - max(iny0, yy);
+                               for(xx = inx0_i; xx < inx1_i; ++xx)
+                               {
+                                       float a = ya * (min(xx+1, inx1) - max(inx0, xx));
+                                       r += a * in[3*(xx + inw * yy)+0];
+                                       g += a * in[3*(xx + inw * yy)+1];
+                                       b += a * in[3*(xx + inw * yy)+2];
+                               }
+                       }
+
+                       out[3*(x + outw * y)+0] = r * area;
+                       out[3*(x + outw * y)+1] = g * area;
+                       out[3*(x + outw * y)+2] = b * area;
+               }
+       }
+}
+
 qboolean SCR_CaptureVideo_VideoFrame(int newframenum)
 {
-       int x = 0, y = 0, width = vid.width, height = vid.height;
+       int x = 0, y = 0, width = cls.capturevideo.width, height = cls.capturevideo.height;
        unsigned char *in, *out;
        CHECKGLERROR
        //return SCR_ScreenShot(filename, cls.capturevideo.buffer, cls.capturevideo.buffer + vid.width * vid.height * 3, cls.capturevideo.buffer + vid.width * vid.height * 6, 0, 0, vid.width, vid.height, false, false, false, jpeg, true);
@@ -1237,9 +1528,10 @@ qboolean SCR_CaptureVideo_VideoFrame(int newframenum)
                if (!cls.capturevideo.videofile)
                        return false;
                // FIXME: width/height must be multiple of 2, enforce this?
-               qglReadPixels (x, y, width, height, GL_RGB, GL_UNSIGNED_BYTE, cls.capturevideo.buffer);CHECKGLERROR
-               in = cls.capturevideo.buffer;
-               out = cls.capturevideo.buffer + width*height*3;
+               qglReadPixels (x, y, vid.width, vid.height, GL_RGB, GL_UNSIGNED_BYTE, cls.capturevideo.screenbuffer);CHECKGLERROR
+               SCR_ScaleDown (cls.capturevideo.screenbuffer, vid.width, vid.height, cls.capturevideo.outbuffer, width, height);
+               in = cls.capturevideo.outbuffer;
+               out = cls.capturevideo.outbuffer + width*height*3;
                SCR_CaptureVideo_ConvertFrame_RGB_to_I420_flip(width, height, in, out);
                x = width*height+(width/2)*(height/2)*2;
                SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
@@ -1386,6 +1678,7 @@ static void R_Envmap_f (void)
        r_view.width = size;
        r_view.height = size;
        r_view.depth = 1;
+       r_view.useperspective = true;
 
        r_view.frustum_x = tan(90 * M_PI / 360.0);
        r_view.frustum_y = tan(90 * M_PI / 360.0);
@@ -1414,36 +1707,22 @@ static void R_Envmap_f (void)
 
 //=============================================================================
 
-// LordHavoc: SHOWLMP stuff
-#define SHOWLMP_MAXLABELS 256
-typedef struct showlmp_s
-{
-       qboolean        isactive;
-       float           x;
-       float           y;
-       char            label[32];
-       char            pic[128];
-}
-showlmp_t;
-
-showlmp_t showlmp[SHOWLMP_MAXLABELS];
-
 void SHOWLMP_decodehide(void)
 {
        int i;
        char *lmplabel;
        lmplabel = MSG_ReadString();
-       for (i = 0;i < SHOWLMP_MAXLABELS;i++)
-               if (showlmp[i].isactive && strcmp(showlmp[i].label, lmplabel) == 0)
+       for (i = 0;i < cl.num_showlmps;i++)
+               if (cl.showlmps[i].isactive && strcmp(cl.showlmps[i].label, lmplabel) == 0)
                {
-                       showlmp[i].isactive = false;
+                       cl.showlmps[i].isactive = false;
                        return;
                }
 }
 
 void SHOWLMP_decodeshow(void)
 {
-       int i, k;
+       int k;
        char lmplabel[256], picname[256];
        float x, y;
        strlcpy (lmplabel,MSG_ReadString(), sizeof (lmplabel));
@@ -1458,41 +1737,37 @@ void SHOWLMP_decodeshow(void)
                x = MSG_ReadShort();
                y = MSG_ReadShort();
        }
-       k = -1;
-       for (i = 0;i < SHOWLMP_MAXLABELS;i++)
-               if (showlmp[i].isactive)
-               {
-                       if (strcmp(showlmp[i].label, lmplabel) == 0)
-                       {
-                               k = i;
-                               break; // drop out to replace it
-                       }
-               }
-               else if (k < 0) // find first empty one to replace
-                       k = i;
-       if (k < 0)
-               return; // none found to replace
-       // change existing one
-       showlmp[k].isactive = true;
-       strlcpy (showlmp[k].label, lmplabel, sizeof (showlmp[k].label));
-       strlcpy (showlmp[k].pic, picname, sizeof (showlmp[k].pic));
-       showlmp[k].x = x;
-       showlmp[k].y = y;
+       if (!cl.showlmps || cl.num_showlmps >= cl.max_showlmps)
+       {
+               showlmp_t *oldshowlmps = cl.showlmps;
+               cl.max_showlmps += 16;
+               cl.showlmps = Mem_Alloc(cls.levelmempool, cl.max_showlmps * sizeof(showlmp_t));
+               if (cl.num_showlmps)
+                       memcpy(cl.showlmps, oldshowlmps, cl.num_showlmps * sizeof(showlmp_t));
+               if (oldshowlmps)
+                       Mem_Free(oldshowlmps);
+       }
+       for (k = 0;k < cl.max_showlmps;k++)
+               if (cl.showlmps[k].isactive && !strcmp(cl.showlmps[k].label, lmplabel))
+                       break;
+       if (k == cl.max_showlmps)
+               for (k = 0;k < cl.max_showlmps;k++)
+                       if (!cl.showlmps[k].isactive)
+                               break;
+       cl.showlmps[k].isactive = true;
+       strlcpy (cl.showlmps[k].label, lmplabel, sizeof (cl.showlmps[k].label));
+       strlcpy (cl.showlmps[k].pic, picname, sizeof (cl.showlmps[k].pic));
+       cl.showlmps[k].x = x;
+       cl.showlmps[k].y = y;
+       cl.num_showlmps = max(cl.num_showlmps, k + 1);
 }
 
 void SHOWLMP_drawall(void)
 {
        int i;
-       for (i = 0;i < SHOWLMP_MAXLABELS;i++)
-               if (showlmp[i].isactive)
-                       DrawQ_Pic(showlmp[i].x, showlmp[i].y, Draw_CachePic(showlmp[i].pic, true), 0, 0, 1, 1, 1, 1, 0);
-}
-
-void SHOWLMP_clear(void)
-{
-       int i;
-       for (i = 0;i < SHOWLMP_MAXLABELS;i++)
-               showlmp[i].isactive = false;
+       for (i = 0;i < cl.num_showlmps;i++)
+               if (cl.showlmps[i].isactive)
+                       DrawQ_Pic(cl.showlmps[i].x, cl.showlmps[i].y, Draw_CachePic(cl.showlmps[i].pic, true), 0, 0, 1, 1, 1, 1, 0);
 }
 
 /*
@@ -1582,10 +1857,14 @@ void SCR_DrawScreen (void)
        R_Mesh_Start();
 
        if (r_timereport_active)
-               R_TimeReport("setup");
+               R_TimeReport("screensetup");
 
        R_UpdateVariables();
 
+       // Quake uses clockwise winding, so these are swapped
+       r_view.cullface_front = GL_BACK;
+       r_view.cullface_back = GL_FRONT;
+
        if (cls.signon == SIGNONS)
        {
                float size;
@@ -1620,6 +1899,7 @@ void SCR_DrawScreen (void)
                // this it simply assumes the requested fov is the vertical fov
                // for a 4x3 display, if the ratio is not 4x3 this makes the fov
                // higher/lower according to the ratio
+               r_view.useperspective = true;
                r_view.frustum_y = tan(scr_fov.value * M_PI / 360.0) * (3.0/4.0) * cl.viewzoom;
                r_view.frustum_x = r_view.frustum_y * (float)r_view.width / (float)r_view.height / vid_pixelheight.value;
 
@@ -1640,6 +1920,7 @@ void SCR_DrawScreen (void)
                        r_view.y = 0;
                        r_view.z = 0;
 
+                       r_view.useperspective = true;
                        r_view.frustum_y = tan(scr_zoomwindow_fov.value * M_PI / 360.0) * (3.0/4.0) * cl.viewzoom;
                        r_view.frustum_x = r_view.frustum_y * vid_pixelheight.value * (float)r_view.width / (float)r_view.height;
 
@@ -1659,6 +1940,7 @@ void SCR_DrawScreen (void)
                r_view.x = 0;
                r_view.y = 0;
                r_view.z = 0;
+               r_view.useperspective = false;
        }
 
        // draw 2D stuff
@@ -1733,6 +2015,7 @@ void SCR_UpdateLoadingScreen (qboolean clear)
        GL_Color(1,1,1,1);
        GL_BlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
        GL_DepthRange(0, 1);
+       GL_PolygonOffset(0, 0);
        GL_DepthTest(false);
        R_Mesh_VertexPointer(vertex3f, 0, 0);
        R_Mesh_ColorPointer(NULL, 0, 0);
@@ -1838,7 +2121,8 @@ void CL_UpdateScreen(void)
        qglDepthMask(1);CHECKGLERROR
        qglColorMask(1,1,1,1);CHECKGLERROR
        qglClearColor(0,0,0,0);CHECKGLERROR
-       qglClear(GL_COLOR_BUFFER_BIT);CHECKGLERROR
+       R_ClearScreen();
+       r_view.clear = false;
 
        if(scr_stipple.integer)
        {
@@ -1863,7 +2147,7 @@ void CL_UpdateScreen(void)
                qglDisable(GL_POLYGON_STIPPLE);
 
        if (r_timereport_active)
-               R_TimeReport("clear");
+               R_TimeReport("screenclear");
 
        qglDrawBuffer(GL_BACK);
 
@@ -1921,5 +2205,4 @@ void CL_UpdateScreen(void)
 
 void CL_Screen_NewMap(void)
 {
-       SHOWLMP_clear();
 }