]> de.git.xonotic.org Git - xonotic/darkplaces.git/blobdiff - cl_screen.c
optimized SHOWLMP code (only used by Nehahra) to not eat any CPU time
[xonotic/darkplaces.git] / cl_screen.c
index 56fb1ff1f37bbc59836e3f409832f54e9cfc0ff2..ab5bafc644bc67ae2e466fb8693e9b33db39028c 100644 (file)
@@ -4,20 +4,24 @@
 #include "image.h"
 #include "jpeg.h"
 #include "cl_collision.h"
+#include "libcurl.h"
 #include "csprogs.h"
 
+// we have to include snd_main.h here only to get access to snd_renderbuffer->format.speed when writing the AVI headers
+#include "snd_main.h"
+
 cvar_t scr_viewsize = {CVAR_SAVE, "viewsize","100", "how large the view should be, 110 disables inventory bar, 120 disables status bar"};
-cvar_t scr_fov = {CVAR_SAVE, "fov","90", "field of vision, 1-170 degrees, default 90, some players use 110-130"};      // 1 - 170
+cvar_t scr_fov = {CVAR_SAVE, "fov","90", "field of vision, 1-170 degrees, default 90, some players use 110-130"};
 cvar_t scr_conalpha = {CVAR_SAVE, "scr_conalpha", "1", "opacity of console background"};
 cvar_t scr_conbrightness = {CVAR_SAVE, "scr_conbrightness", "1", "brightness of console background (0 = black, 1 = image)"};
 cvar_t scr_conforcewhiledisconnected = {0, "scr_conforcewhiledisconnected", "1", "forces fullscreen console while disconnected"};
 cvar_t scr_menuforcewhiledisconnected = {0, "scr_menuforcewhiledisconnected", "0", "forces menu while disconnected"};
 cvar_t scr_centertime = {0, "scr_centertime","2", "how long centerprint messages show"};
 cvar_t scr_showram = {CVAR_SAVE, "showram","1", "show ram icon if low on surface cache memory (not used)"};
-cvar_t scr_showturtle = {CVAR_SAVE, "showturtle","0", "show turtle icon when framerate is too low (not used)"};
+cvar_t scr_showturtle = {CVAR_SAVE, "showturtle","0", "show turtle icon when framerate is too low"};
 cvar_t scr_showpause = {CVAR_SAVE, "showpause","1", "show pause icon when game is paused"};
 cvar_t scr_showbrand = {0, "showbrand","0", "shows gfx/brand.tga in a corner of the screen (different values select different positions, including centered)"};
-cvar_t scr_printspeed = {0, "scr_printspeed","8", "speed of intermission printing (episode end texts)"};
+cvar_t scr_printspeed = {0, "scr_printspeed","0", "speed of intermission printing (episode end texts), a value of 0 disables the slow printing"};
 cvar_t vid_conwidth = {CVAR_SAVE, "vid_conwidth", "640", "virtual width of 2D graphics system"};
 cvar_t vid_conheight = {CVAR_SAVE, "vid_conheight", "480", "virtual height of 2D graphics system"};
 cvar_t vid_pixelheight = {CVAR_SAVE, "vid_pixelheight", "1", "adjusts vertical field of vision to account for non-square pixels (1280x1024 on a CRT monitor for example)"};
@@ -25,22 +29,28 @@ cvar_t scr_screenshot_jpeg = {CVAR_SAVE, "scr_screenshot_jpeg","1", "save jpeg i
 cvar_t scr_screenshot_jpeg_quality = {CVAR_SAVE, "scr_screenshot_jpeg_quality","0.9", "image quality of saved jpeg"};
 cvar_t scr_screenshot_gammaboost = {CVAR_SAVE, "scr_screenshot_gammaboost","1", "gamma correction on saved screenshots and videos, 1.0 saves unmodified images"};
 // scr_screenshot_name is defined in fs.c
-cvar_t cl_capturevideo = {0, "cl_capturevideo", "0", "enables saving of video to a file or files (default is .tga files, if scr_screenshot_jpeg is on it saves .jpg files (VERY SLOW), if any rawrgb or rawyv12 are on it saves those formats instead, note that scr_screenshot_gammaboost affects the brightness of the output)"};
-cvar_t cl_capturevideo_sound = {0, "cl_capturevideo_sound", "0", "enables saving of sound to a .wav file (warning: this requires exact sync, if your hard drive can't keep up it will abort, if your graphics can't keep up it will save duplicate frames to maintain sound sync)"};
+cvar_t cl_capturevideo = {0, "cl_capturevideo", "0", "enables saving of video to a .avi file using uncompressed I420 colorspace and PCM audio, note that scr_screenshot_gammaboost affects the brightness of the output)"};
+cvar_t cl_capturevideo_width = {0, "cl_capturevideo_width", "0", "scales all frames to this resolution before saving the video"};
+cvar_t cl_capturevideo_height = {0, "cl_capturevideo_height", "0", "scales all frames to this resolution before saving the video"};
+cvar_t cl_capturevideo_realtime = {0, "cl_capturevideo_realtime", "0", "causes video saving to operate in realtime (mostly useful while playing, not while capturing demos), this can produce a much lower quality video due to poor sound/video sync and will abort saving if your machine stalls for over 1 second"};
 cvar_t cl_capturevideo_fps = {0, "cl_capturevideo_fps", "30", "how many frames per second to save (29.97 for NTSC, 30 for typical PC video, 15 can be useful)"};
-cvar_t cl_capturevideo_rawrgb = {0, "cl_capturevideo_rawrgb", "0", "saves a single .rgb video file containing raw RGB images (you'll need special processing tools to encode this to something more useful)"};
-cvar_t cl_capturevideo_rawyv12 = {0, "cl_capturevideo_rawyv12", "0", "saves a single .yv12 video file containing raw YV12 (luma plane, then half resolution chroma planes, first chroma blue then chroma red, this is the format used internally by many encoders, some tools can read it directly)"};
+cvar_t cl_capturevideo_number = {CVAR_SAVE, "cl_capturevideo_number", "1", "number to append to video filename, incremented each time a capture begins"};
 cvar_t r_letterbox = {0, "r_letterbox", "0", "reduces vertical height of view to simulate a letterboxed movie effect (can be used by mods for cutscenes)"};
-cvar_t r_stereo_separation = {0, "r_stereo_separation", "4", "separation of eyes in the world (try negative values too)"};
-cvar_t r_stereo_sidebyside = {0, "r_stereo_sidebyside", "0", "side by side views (for those who can't afford glasses but can afford eye strain)"};
+cvar_t r_stereo_separation = {0, "r_stereo_separation", "4", "separation distance of eyes in the world (negative values are only useful for cross-eyed viewing)"};
+cvar_t r_stereo_sidebyside = {0, "r_stereo_sidebyside", "0", "side by side views for those who can't afford glasses but can afford eye strain (note: use a negative r_stereo_separation if you want cross-eyed viewing)"};
 cvar_t r_stereo_redblue = {0, "r_stereo_redblue", "0", "red/blue anaglyph stereo glasses (note: most of these glasses are actually red/cyan, try that one too)"};
 cvar_t r_stereo_redcyan = {0, "r_stereo_redcyan", "0", "red/cyan anaglyph stereo glasses, the kind given away at drive-in movies like Creature From The Black Lagoon In 3D"};
 cvar_t r_stereo_redgreen = {0, "r_stereo_redgreen", "0", "red/green anaglyph stereo glasses (for those who don't mind yellow)"};
+cvar_t r_stereo_angle = {0, "r_stereo_angle", "0", "separation angle of eyes (makes the views look different directions, as an example, 90 gives a 90 degree separation where the views are 45 degrees left and 45 degrees right)"};
 cvar_t scr_zoomwindow = {CVAR_SAVE, "scr_zoomwindow", "0", "displays a zoomed in overlay window"};
 cvar_t scr_zoomwindow_viewsizex = {CVAR_SAVE, "scr_zoomwindow_viewsizex", "20", "horizontal viewsize of zoom window"};
 cvar_t scr_zoomwindow_viewsizey = {CVAR_SAVE, "scr_zoomwindow_viewsizey", "20", "vertical viewsize of zoom window"};
 cvar_t scr_zoomwindow_fov = {CVAR_SAVE, "scr_zoomwindow_fov", "20", "fov of zoom window"};
+cvar_t scr_stipple = {0, "scr_stipple", "0", "interlacing-like stippling of the display"};
+cvar_t scr_refresh = {0, "scr_refresh", "1", "allows you to completely shut off rendering for benchmarking purposes"};
+cvar_t shownetgraph = {CVAR_SAVE, "shownetgraph", "0", "shows a graph of packet sizes and other information, 0 = off, 1 = show client netgraph, 2 = show client and server netgraphs (when hosting a server)"};
 
+#define AVI_MASTER_INDEX_SIZE 640 // GB ought to be enough for anyone
 
 int jpeg_supported = false;
 
@@ -50,7 +60,6 @@ float         scr_con_current;
 
 extern int     con_vislines;
 
-void DrawCrosshair(int num);
 static void SCR_ScreenShot_f (void);
 static void R_Envmap_f (void);
 
@@ -100,13 +109,16 @@ void SCR_CenterPrint(char *str)
 void SCR_DrawCenterString (void)
 {
        char    *start;
-       int             l;
        int             x, y;
        int             remaining;
        int             color;
 
-// the finale prints the characters one at a time
-       if (cl.intermission)
+       if(cl.intermission == 2) // in finale,
+               if(sb_showscores) // make TAB hide the finale message (sb_showscores overrides finale in sbar.c)
+                       return;
+
+// the finale prints the characters one at a time, except if printspeed is an absurdly high value
+       if (cl.intermission && scr_printspeed.value > 0 && scr_printspeed.value < 1000000)
                remaining = (int)(scr_printspeed.value * (cl.time - scr_centertime_start));
        else
                remaining = 9999;
@@ -125,29 +137,26 @@ void SCR_DrawCenterString (void)
        color = -1;
        do
        {
-       // scan the width of the line
-               for (l=0 ; l<vid_conwidth.integer/8 ; l++)
-                       if (start[l] == '\n' || !start[l])
-                               break;
-               x = (vid_conwidth.integer - l*8)/2;
+               // scan the number of characters on the line, not counting color codes
+               char *newline = strchr(start, '\n');
+               int l = newline ? (newline - start) : (int)strlen(start);
+               int chars = COM_StringLengthNoColors(start, l, NULL);
+
+               x = (vid_conwidth.integer - chars*8)/2;
                if (l > 0)
                {
                        if (remaining < l)
                                l = remaining;
-                       DrawQ_ColoredString(x, y, start, l, 8, 8, 1, 1, 1, 1, 0, &color);
+                       DrawQ_String(x, y, start, l, 8, 8, 1, 1, 1, 1, 0, &color, false);
                        remaining -= l;
                        if (remaining <= 0)
                                return;
                }
-
                y += 8;
 
-               while (*start && *start != '\n')
-                       start++;
-
-               if (!*start)
+               if (!newline)
                        break;
-               start++;                // skip the \n
+               start = newline + 1; // skip the \n
        } while (1);
 }
 
@@ -156,7 +165,8 @@ void SCR_CheckDrawCenterString (void)
        if (scr_center_lines > scr_erase_lines)
                scr_erase_lines = scr_center_lines;
 
-       scr_centertime_off -= cl.realframetime;
+       if (cl.time > cl.oldtime)
+               scr_centertime_off -= cl.time - cl.oldtime;
 
        // don't draw if this is a normal stats-screen intermission,
        // only if it is not an intermission, or a finale intermission
@@ -170,6 +180,134 @@ void SCR_CheckDrawCenterString (void)
        SCR_DrawCenterString ();
 }
 
+void SCR_DrawNetGraph_DrawGraph (int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *label, float textsize, int packetcounter, int numparameters, const int **parameters, const float parametercolors[][4])
+{
+       int j, k, x, y, index, offset, height;
+       // draw the bar graph itself
+       // advance the packet counter because it is the latest packet column being
+       // built up and should come last
+       packetcounter = (packetcounter + 1) % NETGRAPH_PACKETS;
+       for (j = 0;j < NETGRAPH_PACKETS;j++)
+       {
+               x = graphx + j * barwidth;
+               y = graphy + barheight;
+               index = (packetcounter + j) % NETGRAPH_PACKETS;
+               if (parameters[0][index] == NETGRAPH_LOSTPACKET)
+                       DrawQ_Fill(x, y - barheight, barwidth, barheight, 1, 0, 0, 1, 0);
+               else if (parameters[0][index] == NETGRAPH_CHOKEDPACKET)
+                       DrawQ_Fill(x, y - min(2, barheight), barwidth, min(2, barheight), 1, 1, 0, 1, 0);
+               else
+               {
+                       offset = 0;
+                       for (k = 0;k < numparameters;k++)
+                       {
+                               height = (parameters[k][index] + bardivide - 1) / bardivide;
+                               height = min(height, barheight - offset);
+                               offset += height;
+                               if (height)
+                                       DrawQ_Fill(x, y - offset, barwidth, height, parametercolors[k][0], parametercolors[k][1], parametercolors[k][2], parametercolors[k][3], 0);
+                       }
+               }
+       }
+}
+
+const float netgraphcolors[3][4] =
+{
+       {1  , 0.5, 0  , 1},
+       {1  , 1  , 1  , 1},
+       {0  , 1  , 0  , 1},
+};
+
+void SCR_DrawNetGraph_DrawConnection_Client (netconn_t *conn, int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *labelincoming, int separator, const char *labeloutgoing, float textsize)
+{
+       int numparameters;
+       const int *parameters[3];
+       // dim background
+       DrawQ_Fill(graphx                                          , graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+       DrawQ_Fill(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+       // draw the bar graphs
+       numparameters = 3;
+       parameters[0] = conn->incoming_unreliablesize;
+       parameters[1] = conn->incoming_reliablesize;
+       parameters[2] = conn->incoming_acksize;
+       SCR_DrawNetGraph_DrawGraph(graphx, graphy, barwidth, barheight, bardivide, labelincoming, textsize, conn->incoming_packetcounter, numparameters, parameters, netgraphcolors);
+       parameters[0] = conn->outgoing_unreliablesize;
+       parameters[1] = conn->outgoing_reliablesize;
+       parameters[2] = conn->outgoing_acksize;
+       SCR_DrawNetGraph_DrawGraph(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth, barheight, bardivide, labeloutgoing, textsize, conn->outgoing_packetcounter, numparameters, parameters, netgraphcolors);
+       // draw labels
+       DrawQ_String(graphx                                          , graphy + barheight, labelincoming, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+       DrawQ_String(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy + barheight, labeloutgoing, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+}
+
+void SCR_DrawNetGraph_DrawConnection_Server (netconn_t *conn, int graphx, int graphy, int barwidth, int barheight, int bardivide, const char *labeloutgoing, int separator, const char *labelincoming, float textsize)
+{
+       int numparameters;
+       const int *parameters[3];
+       // dim background
+       DrawQ_Fill(graphx                                          , graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+       DrawQ_Fill(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth * NETGRAPH_PACKETS, barheight + textsize, 0, 0, 0, 0.5, 0);
+       // draw the bar graphs
+       numparameters = 3;
+       parameters[0] = conn->outgoing_unreliablesize;
+       parameters[1] = conn->outgoing_reliablesize;
+       parameters[2] = conn->outgoing_acksize;
+       SCR_DrawNetGraph_DrawGraph(graphx                                          , graphy, barwidth, barheight, bardivide, labeloutgoing, textsize, conn->outgoing_packetcounter, numparameters, parameters, netgraphcolors);
+       parameters[0] = conn->incoming_unreliablesize;
+       parameters[1] = conn->incoming_reliablesize;
+       parameters[2] = conn->incoming_acksize;
+       SCR_DrawNetGraph_DrawGraph(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy, barwidth, barheight, bardivide, labelincoming, textsize, conn->incoming_packetcounter, numparameters, parameters, netgraphcolors);
+       // draw labels
+       DrawQ_String(graphx                                          , graphy + barheight, labeloutgoing, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+       DrawQ_String(graphx + barwidth * NETGRAPH_PACKETS + separator, graphy + barheight, labelincoming, 0, textsize, textsize, 1, 1, 1, 1, 0, NULL, false);
+}
+
+/*
+==============
+SCR_DrawNetGraph
+==============
+*/
+void SCR_DrawNetGraph (void)
+{
+       int i, separator1, separator2, barwidth, barheight, bardivide, netgraph_x, netgraph_y, textsize, index, netgraphsperrow;
+
+       if (cls.state != ca_connected)
+               return;
+       if (!cls.netcon)
+               return;
+       if (!shownetgraph.integer)
+               return;
+
+       separator1 = 2;
+       separator2 = 4;
+       textsize = 8;
+       barwidth = 1;
+       barheight = 50;
+       bardivide = 20;
+
+       netgraphsperrow = (vid_conwidth.integer + separator2) / (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+       netgraphsperrow = max(netgraphsperrow, 1);
+
+       index = 0;
+       netgraph_x = (vid_conwidth.integer + separator2) - (1 + (index % netgraphsperrow)) * (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+       netgraph_y = (vid_conheight.integer - 48 + separator2) - (1 + (index / netgraphsperrow)) * (barheight + textsize + separator2);
+       SCR_DrawNetGraph_DrawConnection_Client(cls.netcon, netgraph_x, netgraph_y, barwidth, barheight, bardivide, "incoming", separator1, "outgoing", textsize);
+       index++;
+
+       if (sv.active && shownetgraph.integer >= 2)
+       {
+               for (i = 0;i < svs.maxclients;i++)
+               {
+                       if (!svs.clients[i].netconnection)
+                               continue;
+                       netgraph_x = (vid_conwidth.integer + separator2) - (1 + (index % netgraphsperrow)) * (barwidth * NETGRAPH_PACKETS * 2 + separator1 + separator2);
+                       netgraph_y = (vid_conheight.integer - 48 + separator2) - (1 + (index / netgraphsperrow)) * (barheight + textsize + separator2);
+                       SCR_DrawNetGraph_DrawConnection_Server(svs.clients[i].netconnection, netgraph_x, netgraph_y, barwidth, barheight, bardivide, va("%s", svs.clients[i].name), separator1, "", textsize);
+                       index++;
+               }
+       }
+}
+
 /*
 ==============
 SCR_DrawTurtle
@@ -295,27 +433,102 @@ void SCR_DrawBrand (void)
 
 /*
 ==============
-SCR_DrawDownload
+SCR_DrawQWDownload
 ==============
 */
-static void SCR_DrawDownload(void)
+static int SCR_DrawQWDownload(int offset)
 {
        int len;
        float x, y;
        float size = 8;
        char temp[256];
        if (!cls.qw_downloadname[0])
-               return;
-       dpsnprintf(temp, sizeof(temp), "Downloading %s ...  %3i%%\n", cls.qw_downloadname, cls.qw_downloadpercent);
+       {
+               cls.qw_downloadspeedrate = 0;
+               cls.qw_downloadspeedtime = realtime;
+               cls.qw_downloadspeedcount = 0;
+               return 0;
+       }
+       if (realtime >= cls.qw_downloadspeedtime + 1)
+       {
+               cls.qw_downloadspeedrate = cls.qw_downloadspeedcount;
+               cls.qw_downloadspeedtime = realtime;
+               cls.qw_downloadspeedcount = 0;
+       }
+       if (cls.protocol == PROTOCOL_QUAKEWORLD)
+               dpsnprintf(temp, sizeof(temp), "Downloading %s %3i%% (%i) at %i bytes/s\n", cls.qw_downloadname, cls.qw_downloadpercent, cls.qw_downloadmemorycursize, cls.qw_downloadspeedrate);
+       else
+               dpsnprintf(temp, sizeof(temp), "Downloading %s %3i%% (%i/%i) at %i bytes/s\n", cls.qw_downloadname, cls.qw_downloadpercent, cls.qw_downloadmemorycursize, cls.qw_downloadmemorymaxsize, cls.qw_downloadspeedrate);
        len = (int)strlen(temp);
        x = (vid_conwidth.integer - len*size) / 2;
-       y = vid_conheight.integer - size;
-       DrawQ_Pic(0, y, NULL, vid_conwidth.integer, size, 0, 0, 0, 0.5, 0);
-       DrawQ_String(x, y, temp, len, size, size, 1, 1, 1, 1, 0);
+       y = vid_conheight.integer - size - offset;
+       DrawQ_Fill(0, y, vid_conwidth.integer, size, 0, 0, 0, 0.5, 0);
+       DrawQ_String(x, y, temp, len, size, size, 1, 1, 1, 1, 0, NULL, true);
+       return 8;
 }
 
-//=============================================================================
+/*
+==============
+SCR_DrawCurlDownload
+==============
+*/
+static int SCR_DrawCurlDownload(int offset)
+{
+       int len;
+       int nDownloads;
+       int i;
+       float x, y;
+       float size = 8;
+       Curl_downloadinfo_t *downinfo;
+       char temp[256];
+       const char *addinfo;
+
+       downinfo = Curl_GetDownloadInfo(&nDownloads, &addinfo);
+       if(!downinfo)
+               return 0;
 
+       y = vid_conheight.integer - size * nDownloads - offset;
+
+       if(addinfo)
+       {
+               len = (int)strlen(addinfo);
+               x = (vid_conwidth.integer - len*size) / 2;
+               DrawQ_Fill(0, y - size, vid_conwidth.integer, size, 1, 1, 1, 0.8, 0);
+               DrawQ_String(x, y - size, addinfo, len, size, size, 0, 0, 0, 1, 0, NULL, true);
+       }
+
+       for(i = 0; i != nDownloads; ++i)
+       {
+               if(downinfo[i].queued)
+                       dpsnprintf(temp, sizeof(temp), "Still in queue: %s\n", downinfo[i].filename);
+               else if(downinfo[i].progress <= 0)
+                       dpsnprintf(temp, sizeof(temp), "Downloading %s ...  ???.?%% @ %.1f KiB/s\n", downinfo[i].filename, downinfo[i].speed / 1024.0);
+               else
+                       dpsnprintf(temp, sizeof(temp), "Downloading %s ...  %5.1f%% @ %.1f KiB/s\n", downinfo[i].filename, 100.0 * downinfo[i].progress, downinfo[i].speed / 1024.0);
+               len = (int)strlen(temp);
+               x = (vid_conwidth.integer - len*size) / 2;
+               DrawQ_Fill(0, y + i * size, vid_conwidth.integer, size, 0, 0, 0, 0.8, 0);
+               DrawQ_String(x, y + i * size, temp, len, size, size, 1, 1, 1, 1, 0, NULL, true);
+       }
+
+       Z_Free(downinfo);
+
+       return 8 * (nDownloads + (addinfo ? 1 : 0));
+}
+
+/*
+==============
+SCR_DrawDownload
+==============
+*/
+static void SCR_DrawDownload()
+{
+       int offset = 0;
+       offset += SCR_DrawQWDownload(offset);
+       offset += SCR_DrawCurlDownload(offset);
+}
+
+//=============================================================================
 
 /*
 ==================
@@ -371,7 +584,7 @@ void SCR_DrawConsole (void)
        else
        {
                con_vislines = 0;
-               if (key_dest == key_game || key_dest == key_message)
+               if ((key_dest == key_game || key_dest == key_message) && !r_letterbox.value)
                        Con_DrawNotify ();      // only draw notify in game
        }
 }
@@ -388,8 +601,7 @@ void SCR_BeginLoadingPlaque (void)
        Log_Start();
 
        Host_StartVideo();
-       S_StopAllSounds();
-       SCR_UpdateLoadingScreen();
+       SCR_UpdateLoadingScreen(false);
 }
 
 //=============================================================================
@@ -427,6 +639,7 @@ void R_TimeReport(char *desc)
 void R_TimeReport_Frame(void)
 {
        int i, j, lines, y;
+       cl_locnode_t *loc;
 
        if (r_speeds_string[0])
        {
@@ -444,14 +657,14 @@ void R_TimeReport_Frame(void)
                                lines++;
                y = vid_conheight.integer - sb_lines - lines * 8;
                i = j = 0;
-               DrawQ_Pic(0, y, NULL, vid_conwidth.integer, lines * 8, 0, 0, 0, 0.5, 0);
+               DrawQ_Fill(0, y, vid_conwidth.integer, lines * 8, 0, 0, 0, 0.5, 0);
                while (r_speeds_string[i])
                {
                        j = i;
                        while (r_speeds_string[i] && r_speeds_string[i] != '\n')
                                i++;
                        if (i - j > 0)
-                               DrawQ_String(0, y, r_speeds_string + j, i - j, 8, 8, 1, 1, 1, 1, 0);
+                               DrawQ_String(0, y, r_speeds_string + j, i - j, 8, 8, 1, 1, 1, 1, 0, NULL, true);
                        if (r_speeds_string[i] == '\n')
                                i++;
                        y += 8;
@@ -464,6 +677,11 @@ void R_TimeReport_Frame(void)
                speedstringcount = 0;
                r_speeds_string[0] = 0;
                r_timereport_active = false;
+               // put the location name in the r_speeds display as it greatly helps
+               // when creating loc files
+               loc = CL_Locs_FindNearest(cl.movement_origin);
+               if (loc)
+                       sprintf(r_speeds_string + strlen(r_speeds_string), "Location: %s\n", loc->name);
                sprintf(r_speeds_string + strlen(r_speeds_string), "org:'%+8.2f %+8.2f %+8.2f' dir:'%+2.3f %+2.3f %+2.3f'\n", r_view.origin[0], r_view.origin[1], r_view.origin[2], r_view.forward[0], r_view.forward[1], r_view.forward[2]);
                sprintf(r_speeds_string + strlen(r_speeds_string), "%5i entities%6i surfaces%6i triangles%5i leafs%5i portals%6i particles\n", r_refdef.stats.entities, r_refdef.stats.entities_surfaces, r_refdef.stats.entities_triangles, r_refdef.stats.world_leafs, r_refdef.stats.world_portals, r_refdef.stats.particles);
                sprintf(r_speeds_string + strlen(r_speeds_string), "%4i lights%4i clears%4i scissored%7i light%7i shadow%7i dynamic\n", r_refdef.stats.lights, r_refdef.stats.lights_clears, r_refdef.stats.lights_scissored, r_refdef.stats.lights_lighttriangles, r_refdef.stats.lights_shadowtriangles, r_refdef.stats.lights_dynamicshadowtriangles);
@@ -507,6 +725,12 @@ void SCR_SizeDown_f (void)
        Cvar_SetValue ("viewsize",scr_viewsize.value-10);
 }
 
+void SCR_CaptureVideo_EndVideo(void);
+void CL_Screen_Shutdown(void)
+{
+       SCR_CaptureVideo_EndVideo();
+}
+
 void CL_Screen_Init(void)
 {
        Cvar_RegisterVariable (&scr_fov);
@@ -528,20 +752,25 @@ void CL_Screen_Init(void)
        Cvar_RegisterVariable (&scr_screenshot_jpeg_quality);
        Cvar_RegisterVariable (&scr_screenshot_gammaboost);
        Cvar_RegisterVariable (&cl_capturevideo);
-       Cvar_RegisterVariable (&cl_capturevideo_sound);
+       Cvar_RegisterVariable (&cl_capturevideo_width);
+       Cvar_RegisterVariable (&cl_capturevideo_height);
+       Cvar_RegisterVariable (&cl_capturevideo_realtime);
        Cvar_RegisterVariable (&cl_capturevideo_fps);
-       Cvar_RegisterVariable (&cl_capturevideo_rawrgb);
-       Cvar_RegisterVariable (&cl_capturevideo_rawyv12);
+       Cvar_RegisterVariable (&cl_capturevideo_number);
        Cvar_RegisterVariable (&r_letterbox);
        Cvar_RegisterVariable(&r_stereo_separation);
        Cvar_RegisterVariable(&r_stereo_sidebyside);
        Cvar_RegisterVariable(&r_stereo_redblue);
        Cvar_RegisterVariable(&r_stereo_redcyan);
        Cvar_RegisterVariable(&r_stereo_redgreen);
+       Cvar_RegisterVariable(&r_stereo_angle);
        Cvar_RegisterVariable(&scr_zoomwindow);
        Cvar_RegisterVariable(&scr_zoomwindow_viewsizex);
        Cvar_RegisterVariable(&scr_zoomwindow_viewsizey);
        Cvar_RegisterVariable(&scr_zoomwindow_fov);
+       Cvar_RegisterVariable(&scr_stipple);
+       Cvar_RegisterVariable(&scr_refresh);
+       Cvar_RegisterVariable(&shownetgraph);
 
        Cmd_AddCommand ("sizeup",SCR_SizeUp_f, "increase view size (increases viewsize cvar)");
        Cmd_AddCommand ("sizedown",SCR_SizeDown_f, "decrease view size (decreases viewsize cvar)");
@@ -603,29 +832,309 @@ void SCR_ScreenShot_f (void)
        shotnumber++;
 }
 
+static void SCR_CaptureVideo_RIFF_Start(void)
+{
+       memset(&cls.capturevideo.riffbuffer, 0, sizeof(sizebuf_t));
+       cls.capturevideo.riffbuffer.maxsize = sizeof(cls.capturevideo.riffbufferdata);
+       cls.capturevideo.riffbuffer.data = cls.capturevideo.riffbufferdata;
+}
+
+static void SCR_CaptureVideo_RIFF_Flush(void)
+{
+       if (cls.capturevideo.riffbuffer.cursize > 0)
+       {
+               if (!FS_Write(cls.capturevideo.videofile, cls.capturevideo.riffbuffer.data, cls.capturevideo.riffbuffer.cursize))
+                       cls.capturevideo.error = true;
+               cls.capturevideo.riffbuffer.cursize = 0;
+               cls.capturevideo.riffbuffer.overflowed = false;
+       }
+}
+
+static void SCR_CaptureVideo_RIFF_WriteBytes(const unsigned char *data, size_t size)
+{
+       SCR_CaptureVideo_RIFF_Flush();
+       if (!FS_Write(cls.capturevideo.videofile, data, size))
+               cls.capturevideo.error = true;
+}
+
+static void SCR_CaptureVideo_RIFF_Write32(int n)
+{
+       if (cls.capturevideo.riffbuffer.cursize + 4 > cls.capturevideo.riffbuffer.maxsize)
+               SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteLong(&cls.capturevideo.riffbuffer, n);
+}
+
+static void SCR_CaptureVideo_RIFF_Write16(int n)
+{
+       if (cls.capturevideo.riffbuffer.cursize + 2 > cls.capturevideo.riffbuffer.maxsize)
+               SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteShort(&cls.capturevideo.riffbuffer, n);
+}
+
+static void SCR_CaptureVideo_RIFF_WriteFourCC(const char *chunkfourcc)
+{
+       if (cls.capturevideo.riffbuffer.cursize + (int)strlen(chunkfourcc) > cls.capturevideo.riffbuffer.maxsize)
+               SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteUnterminatedString(&cls.capturevideo.riffbuffer, chunkfourcc);
+}
+
+static void SCR_CaptureVideo_RIFF_WriteTerminatedString(const char *string)
+{
+       if (cls.capturevideo.riffbuffer.cursize + (int)strlen(string) > cls.capturevideo.riffbuffer.maxsize)
+               SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteString(&cls.capturevideo.riffbuffer, string);
+}
+
+static fs_offset_t SCR_CaptureVideo_RIFF_GetPosition(void)
+{
+       SCR_CaptureVideo_RIFF_Flush();
+       return FS_Tell(cls.capturevideo.videofile);
+}
+
+static void SCR_CaptureVideo_RIFF_Push(const char *chunkfourcc, const char *listtypefourcc)
+{
+       SCR_CaptureVideo_RIFF_WriteFourCC(chunkfourcc);
+       SCR_CaptureVideo_RIFF_Write32(0);
+       SCR_CaptureVideo_RIFF_Flush();
+       cls.capturevideo.riffstackstartoffset[cls.capturevideo.riffstacklevel++] = SCR_CaptureVideo_RIFF_GetPosition();
+       if (listtypefourcc)
+               SCR_CaptureVideo_RIFF_WriteFourCC(listtypefourcc);
+}
+
+static void SCR_CaptureVideo_RIFF_Pop(void)
+{
+       fs_offset_t offset;
+       int x;
+       unsigned char sizebytes[4];
+       // write out the chunk size and then return to the current file position
+       cls.capturevideo.riffstacklevel--;
+       offset = SCR_CaptureVideo_RIFF_GetPosition();
+       x = (int)(offset - (cls.capturevideo.riffstackstartoffset[cls.capturevideo.riffstacklevel]));
+       sizebytes[0] = (x) & 0xff;sizebytes[1] = (x >> 8) & 0xff;sizebytes[2] = (x >> 16) & 0xff;sizebytes[3] = (x >> 24) & 0xff;
+       FS_Seek(cls.capturevideo.videofile, -(x + 4), SEEK_END);
+       FS_Write(cls.capturevideo.videofile, sizebytes, 4);
+       FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+       if (offset & 1)
+       {
+               unsigned char c = 0;
+               FS_Write(cls.capturevideo.videofile, &c, 1);
+       }
+}
+
+static void GrowBuf(sizebuf_t *buf, int extralen)
+{
+       if(buf->cursize + extralen > buf->maxsize)
+       {
+               int oldsize = buf->maxsize;
+               unsigned char *olddata;
+               olddata = buf->data;
+               buf->maxsize = max(buf->maxsize * 2, 4096);
+               buf->data = Mem_Alloc(tempmempool, buf->maxsize);
+               if(olddata)
+               {
+                       memcpy(buf->data, olddata, oldsize);
+                       Mem_Free(olddata);
+               }
+       }
+}
+
+static void SCR_CaptureVideo_RIFF_IndexEntry(const char *chunkfourcc, int chunksize, int flags)
+{
+       if (cls.capturevideo.riffstacklevel != 2)
+               Sys_Error("SCR_Capturevideo_RIFF_IndexEntry: RIFF stack level is %i (should be 2)\n", cls.capturevideo.riffstacklevel);
+       GrowBuf(&cls.capturevideo.riffindexbuffer, 16);
+       SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteUnterminatedString(&cls.capturevideo.riffindexbuffer, chunkfourcc);
+       MSG_WriteLong(&cls.capturevideo.riffindexbuffer, flags);
+       MSG_WriteLong(&cls.capturevideo.riffindexbuffer, (int)FS_Tell(cls.capturevideo.videofile) - cls.capturevideo.riffstackstartoffset[1]);
+       MSG_WriteLong(&cls.capturevideo.riffindexbuffer, chunksize);
+}
+
+static void SCR_CaptureVideo_RIFF_MakeIxChunk(const char *fcc, const char *dwChunkId, fs_offset_t masteridx_counter, int *masteridx_count, fs_offset_t masteridx_start)
+{
+       int nMatching;
+       int i;
+       fs_offset_t ix = SCR_CaptureVideo_RIFF_GetPosition();
+       fs_offset_t pos;
+
+       if(*masteridx_count >= AVI_MASTER_INDEX_SIZE)
+               return;
+
+       nMatching = 0; // go through index and enumerate them
+       for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
+               if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
+                       ++nMatching;
+
+       SCR_CaptureVideo_RIFF_Push(fcc, NULL);
+       SCR_CaptureVideo_RIFF_Write16(2); // wLongsPerEntry
+       SCR_CaptureVideo_RIFF_Write16(0x0100); // bIndexType=1, bIndexSubType=0
+       SCR_CaptureVideo_RIFF_Write32(nMatching); // nEntriesInUse
+       SCR_CaptureVideo_RIFF_WriteFourCC(dwChunkId); // dwChunkId
+       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.videofile_ix_movistart & (fs_offset_t) 0xFFFFFFFFu);
+       SCR_CaptureVideo_RIFF_Write32(((long long) cls.capturevideo.videofile_ix_movistart) >> 32);
+       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved
+
+       for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
+               if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
+               {
+                       unsigned int *p = (unsigned int *) (cls.capturevideo.riffindexbuffer.data + i);
+                       unsigned int flags = p[1];
+                       unsigned int rpos = p[2];
+                       unsigned int size = p[3];
+                       size &= ~0x80000000;
+                       if(!(flags & 0x10)) // no keyframe?
+                               size |= 0x80000000;
+                       SCR_CaptureVideo_RIFF_Write32(rpos + 8);
+                       SCR_CaptureVideo_RIFF_Write32(size);
+               }
+
+       SCR_CaptureVideo_RIFF_Pop();
+       pos = SCR_CaptureVideo_RIFF_GetPosition();
+       SCR_CaptureVideo_RIFF_Flush();
+
+       FS_Seek(cls.capturevideo.videofile, masteridx_start + 16 * *masteridx_count, SEEK_SET);
+       SCR_CaptureVideo_RIFF_Write32(ix & (fs_offset_t) 0xFFFFFFFFu);
+       SCR_CaptureVideo_RIFF_Write32(((long long) ix) >> 32);
+       SCR_CaptureVideo_RIFF_Write32(pos - ix);
+       SCR_CaptureVideo_RIFF_Write32(nMatching);
+       SCR_CaptureVideo_RIFF_Flush();
+
+       FS_Seek(cls.capturevideo.videofile, masteridx_counter, SEEK_SET);
+       SCR_CaptureVideo_RIFF_Write32(++*masteridx_count);
+       SCR_CaptureVideo_RIFF_Flush();
+
+       FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+}
+
+static void SCR_CaptureVideo_RIFF_Finish(qboolean final)
+{
+       // close the "movi" list
+       SCR_CaptureVideo_RIFF_Pop();
+       if(cls.capturevideo.videofile_ix_master_video_inuse_offset)
+               SCR_CaptureVideo_RIFF_MakeIxChunk("ix00", "00dc", cls.capturevideo.videofile_ix_master_video_inuse_offset, &cls.capturevideo.videofile_ix_master_video_inuse, cls.capturevideo.videofile_ix_master_video_start_offset);
+       if(cls.capturevideo.videofile_ix_master_audio_inuse_offset)
+               SCR_CaptureVideo_RIFF_MakeIxChunk("ix01", "01wb", cls.capturevideo.videofile_ix_master_audio_inuse_offset, &cls.capturevideo.videofile_ix_master_audio_inuse, cls.capturevideo.videofile_ix_master_audio_start_offset);
+       // write the idx1 chunk that we've been building while saving the frames (for old style players)
+       if(final && cls.capturevideo.videofile_firstchunkframes_offset)
+       // TODO replace index creating by OpenDML ix##/##ix/indx chunk so it works for more than one AVI part too
+       {
+               SCR_CaptureVideo_RIFF_Push("idx1", NULL);
+               SCR_CaptureVideo_RIFF_WriteBytes(cls.capturevideo.riffindexbuffer.data, cls.capturevideo.riffindexbuffer.cursize);
+               SCR_CaptureVideo_RIFF_Pop();
+       }
+       cls.capturevideo.riffindexbuffer.cursize = 0;
+       // pop the RIFF chunk itself
+       while (cls.capturevideo.riffstacklevel > 0)
+               SCR_CaptureVideo_RIFF_Pop();
+       SCR_CaptureVideo_RIFF_Flush();
+       if(cls.capturevideo.videofile_firstchunkframes_offset)
+       {
+               Con_DPrintf("Finishing first chunk (%d frames)\n", cls.capturevideo.frame);
+               FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_firstchunkframes_offset, SEEK_SET);
+               SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+               SCR_CaptureVideo_RIFF_Flush();
+               FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+               cls.capturevideo.videofile_firstchunkframes_offset = 0;
+       }
+       else
+               Con_DPrintf("Finishing another chunk (%d frames)\n", cls.capturevideo.frame);
+}
+
+static void SCR_CaptureVideo_RIFF_OverflowCheck(int framesize)
+{
+       fs_offset_t cursize, curfilesize;
+       if (cls.capturevideo.riffstacklevel != 2)
+               Sys_Error("SCR_CaptureVideo_RIFF_OverflowCheck: chunk stack leakage!\n");
+       // check where we are in the file
+       SCR_CaptureVideo_RIFF_Flush();
+       cursize = SCR_CaptureVideo_RIFF_GetPosition() - cls.capturevideo.riffstackstartoffset[0];
+       curfilesize = SCR_CaptureVideo_RIFF_GetPosition();
+
+       // if this would overflow the windows limit of 1GB per RIFF chunk, we need
+       // to close the current RIFF chunk and open another for future frames
+       if (8 + cursize + framesize + cls.capturevideo.riffindexbuffer.cursize + 8 + cls.capturevideo.riffindexbuffer.cursize + 64 > 1<<30) // note that the Ix buffer takes less space... I just don't dare to / 2 here now... sorry, maybe later
+       {
+               SCR_CaptureVideo_RIFF_Finish(false);
+               // begin a new 1GB extended section of the AVI
+               SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX");
+               SCR_CaptureVideo_RIFF_Push("LIST", "movi");
+               cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
+       }
+}
+
+static void FindFraction(double val, int *num, int *denom, int denomMax)
+{
+       int i;
+       double bestdiff;
+       // initialize
+       bestdiff = fabs(val);
+       *num = 0;
+       *denom = 1;
+
+       for(i = 1; i <= denomMax; ++i)
+       {
+               int inum = floor(0.5 + val * i);
+               double diff = fabs(val - inum / (double)i);
+               if(diff < bestdiff)
+               {
+                       bestdiff = diff;
+                       *num = inum;
+                       *denom = i;
+               }
+       }
+}
+
 void SCR_CaptureVideo_BeginVideo(void)
 {
-       double gamma, g;
+       double gamma, g, aspect;
+       int width = cl_capturevideo_width.integer, height = cl_capturevideo_height.integer;
+       int n, d;
        unsigned int i;
-       unsigned char out[44];
-       if (cls.capturevideo_active)
+       if (cls.capturevideo.active)
                return;
+       memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
        // soundrate is figured out on the first SoundFrame
-       cls.capturevideo_active = true;
-       cls.capturevideo_starttime = Sys_DoubleTime();
-       cls.capturevideo_framerate = bound(1, cl_capturevideo_fps.value, 1000);
-       cls.capturevideo_soundrate = 0;
-       cls.capturevideo_frame = 0;
-       cls.capturevideo_buffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * (3+3+3) + 18);
+
+       if(width == 0 && height != 0)
+               width = (int) (height * (double)vid.width / ((double)vid.height * vid_pixelheight.value)); // keep aspect
+       if(width != 0 && height == 0)
+               height = (int) (width * ((double)vid.height * vid_pixelheight.value) / (double)vid.width); // keep aspect
+
+       if(width < 2 || width > vid.width) // can't scale up
+               width = vid.width;
+       if(height < 2 || height > vid.height) // can't scale up
+               height = vid.height;
+
+       aspect = vid.width / (vid.height * vid_pixelheight.value);
+
+       // ensure it's all even; if not, scale down a little
+       if(width % 1)
+               --width;
+       if(height % 1)
+               --height;
+
+       cls.capturevideo.width = width;
+       cls.capturevideo.height = height;
+       cls.capturevideo.active = true;
+       cls.capturevideo.starttime = realtime;
+       cls.capturevideo.framerate = bound(1, cl_capturevideo_fps.value, 1000);
+       cls.capturevideo.soundrate = S_GetSoundRate();
+       cls.capturevideo.frame = 0;
+       cls.capturevideo.soundsampleframe = 0;
+       cls.capturevideo.realtime = cl_capturevideo_realtime.integer != 0;
+       cls.capturevideo.screenbuffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * 3);
+       cls.capturevideo.outbuffer = (unsigned char *)Mem_Alloc(tempmempool, width * height * (3+3+3) + 18);
        gamma = 1.0/scr_screenshot_gammaboost.value;
+       dpsnprintf(cls.capturevideo.basename, sizeof(cls.capturevideo.basename), "video/dpvideo%03i", cl_capturevideo_number.integer);
+       Cvar_SetValueQuick(&cl_capturevideo_number, cl_capturevideo_number.integer + 1);
 
        /*
        for (i = 0;i < 256;i++)
        {
                unsigned char j = (unsigned char)bound(0, 255*pow(i/255.0, gamma), 255);
-               cls.capturevideo_rgbgammatable[0][i] = j;
-               cls.capturevideo_rgbgammatable[1][i] = j;
-               cls.capturevideo_rgbgammatable[2][i] = j;
+               cls.capturevideo.rgbgammatable[0][i] = j;
+               cls.capturevideo.rgbgammatable[1][i] = j;
+               cls.capturevideo.rgbgammatable[2][i] = j;
        }
        */
 /*
@@ -640,201 +1149,398 @@ Cr = R *  .500 + G * -.419 + B * -.0813 + 128.;
        {
                g = 255*pow(i/255.0, gamma);
                // Y weights from RGB
-               cls.capturevideo_rgbtoyuvscaletable[0][0][i] = (short)(g *  0.299);
-               cls.capturevideo_rgbtoyuvscaletable[0][1][i] = (short)(g *  0.587);
-               cls.capturevideo_rgbtoyuvscaletable[0][2][i] = (short)(g *  0.114);
+               cls.capturevideo.rgbtoyuvscaletable[0][0][i] = (short)(g *  0.299);
+               cls.capturevideo.rgbtoyuvscaletable[0][1][i] = (short)(g *  0.587);
+               cls.capturevideo.rgbtoyuvscaletable[0][2][i] = (short)(g *  0.114);
                // Cb weights from RGB
-               cls.capturevideo_rgbtoyuvscaletable[1][0][i] = (short)(g * -0.169);
-               cls.capturevideo_rgbtoyuvscaletable[1][1][i] = (short)(g * -0.332);
-               cls.capturevideo_rgbtoyuvscaletable[1][2][i] = (short)(g *  0.500);
+               cls.capturevideo.rgbtoyuvscaletable[1][0][i] = (short)(g * -0.169);
+               cls.capturevideo.rgbtoyuvscaletable[1][1][i] = (short)(g * -0.332);
+               cls.capturevideo.rgbtoyuvscaletable[1][2][i] = (short)(g *  0.500);
                // Cr weights from RGB
-               cls.capturevideo_rgbtoyuvscaletable[2][0][i] = (short)(g *  0.500);
-               cls.capturevideo_rgbtoyuvscaletable[2][1][i] = (short)(g * -0.419);
-               cls.capturevideo_rgbtoyuvscaletable[2][2][i] = (short)(g * -0.0813);
+               cls.capturevideo.rgbtoyuvscaletable[2][0][i] = (short)(g *  0.500);
+               cls.capturevideo.rgbtoyuvscaletable[2][1][i] = (short)(g * -0.419);
+               cls.capturevideo.rgbtoyuvscaletable[2][2][i] = (short)(g * -0.0813);
                // range reduction of YCbCr to valid signal range
-               cls.capturevideo_yuvnormalizetable[0][i] = 16 + i * (236-16) / 256;
-               cls.capturevideo_yuvnormalizetable[1][i] = 16 + i * (240-16) / 256;
-               cls.capturevideo_yuvnormalizetable[2][i] = 16 + i * (240-16) / 256;
+               cls.capturevideo.yuvnormalizetable[0][i] = 16 + i * (236-16) / 256;
+               cls.capturevideo.yuvnormalizetable[1][i] = 16 + i * (240-16) / 256;
+               cls.capturevideo.yuvnormalizetable[2][i] = 16 + i * (240-16) / 256;
        }
 
-       if (cl_capturevideo_rawrgb.integer)
-       {
-               cls.capturevideo_format = CAPTUREVIDEOFORMAT_RAWRGB;
-               cls.capturevideo_videofile = FS_Open ("video/dpvideo.rgb", "wb", false, true);
-       }
-       else if (cl_capturevideo_rawyv12.integer)
-       {
-               cls.capturevideo_format = CAPTUREVIDEOFORMAT_RAWYV12;
-               cls.capturevideo_videofile = FS_Open ("video/dpvideo.yv12", "wb", false, true);
-       }
-       else if (scr_screenshot_jpeg.integer)
+       //if (cl_capturevideo_)
+       //{
+       //}
+       //else
        {
-               cls.capturevideo_format = CAPTUREVIDEOFORMAT_JPEG;
-               cls.capturevideo_videofile = NULL;
-       }
-       else
-       {
-               cls.capturevideo_format = CAPTUREVIDEOFORMAT_TARGA;
-               cls.capturevideo_videofile = NULL;
+               cls.capturevideo.format = CAPTUREVIDEOFORMAT_AVI_I420;
+               cls.capturevideo.videofile = FS_Open (va("%s.avi", cls.capturevideo.basename), "wb", false, true);
+               SCR_CaptureVideo_RIFF_Start();
+               // enclosing RIFF chunk (there can be multiple of these in >1GB files, the later ones are "AVIX" instead of "AVI " and have no header/stream info)
+               SCR_CaptureVideo_RIFF_Push("RIFF", "AVI ");
+               // AVI main header
+               SCR_CaptureVideo_RIFF_Push("LIST", "hdrl");
+               SCR_CaptureVideo_RIFF_Push("avih", NULL);
+               SCR_CaptureVideo_RIFF_Write32((int)(1000000.0 / cls.capturevideo.framerate)); // microseconds per frame
+               SCR_CaptureVideo_RIFF_Write32(0); // max bytes per second
+               SCR_CaptureVideo_RIFF_Write32(0); // padding granularity
+               SCR_CaptureVideo_RIFF_Write32(0x910); // flags (AVIF_HASINDEX | AVIF_ISINTERLEAVED | AVIF_TRUSTCKTYPE)
+               cls.capturevideo.videofile_firstchunkframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0); // total frames
+               SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+               if (cls.capturevideo.soundrate)
+                       SCR_CaptureVideo_RIFF_Write32(2); // number of streams
+               else
+                       SCR_CaptureVideo_RIFF_Write32(1); // number of streams
+               SCR_CaptureVideo_RIFF_Write32(0); // suggested buffer size
+               SCR_CaptureVideo_RIFF_Write32(width); // width
+               SCR_CaptureVideo_RIFF_Write32(height); // height
+               SCR_CaptureVideo_RIFF_Write32(0); // reserved[0]
+               SCR_CaptureVideo_RIFF_Write32(0); // reserved[1]
+               SCR_CaptureVideo_RIFF_Write32(0); // reserved[2]
+               SCR_CaptureVideo_RIFF_Write32(0); // reserved[3]
+               SCR_CaptureVideo_RIFF_Pop();
+               // video stream info
+               SCR_CaptureVideo_RIFF_Push("LIST", "strl");
+               SCR_CaptureVideo_RIFF_Push("strh", "vids");
+               SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // stream fourcc (I420 colorspace, uncompressed)
+               SCR_CaptureVideo_RIFF_Write32(0); // flags
+               SCR_CaptureVideo_RIFF_Write16(0); // priority
+               SCR_CaptureVideo_RIFF_Write16(0); // language
+               SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+               // find an ideal divisor for the framerate
+               FindFraction(cls.capturevideo.framerate, &n, &d, 1000);
+               SCR_CaptureVideo_RIFF_Write32(d); // samples/second divisor
+               SCR_CaptureVideo_RIFF_Write32(n); // samples/second multiplied by divisor
+               SCR_CaptureVideo_RIFF_Write32(0); // start
+               cls.capturevideo.videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0); // length
+               SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // suggested buffer size
+               SCR_CaptureVideo_RIFF_Write32(0); // quality
+               SCR_CaptureVideo_RIFF_Write32(0); // sample size
+               SCR_CaptureVideo_RIFF_Write16(0); // frame left
+               SCR_CaptureVideo_RIFF_Write16(0); // frame top
+               SCR_CaptureVideo_RIFF_Write16(width); // frame right
+               SCR_CaptureVideo_RIFF_Write16(height); // frame bottom
+               SCR_CaptureVideo_RIFF_Pop();
+               // video stream format
+               SCR_CaptureVideo_RIFF_Push("strf", NULL);
+               SCR_CaptureVideo_RIFF_Write32(40); // BITMAPINFO struct size
+               SCR_CaptureVideo_RIFF_Write32(width); // width
+               SCR_CaptureVideo_RIFF_Write32(height); // height
+               SCR_CaptureVideo_RIFF_Write16(3); // planes
+               SCR_CaptureVideo_RIFF_Write16(12); // bitcount
+               SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // compression
+               SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // size of image
+               SCR_CaptureVideo_RIFF_Write32(0); // x pixels per meter
+               SCR_CaptureVideo_RIFF_Write32(0); // y pixels per meter
+               SCR_CaptureVideo_RIFF_Write32(0); // color used
+               SCR_CaptureVideo_RIFF_Write32(0); // color important
+               SCR_CaptureVideo_RIFF_Pop();
+               // master index
+               SCR_CaptureVideo_RIFF_Push("indx", NULL);
+               SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+               SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+               cls.capturevideo.videofile_ix_master_video_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+               SCR_CaptureVideo_RIFF_WriteFourCC("00dc"); // dwChunkId
+               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+               cls.capturevideo.videofile_ix_master_video_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+               for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+                       SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+               SCR_CaptureVideo_RIFF_Pop();
+               // extended format (aspect!)
+               SCR_CaptureVideo_RIFF_Push("vprp", NULL);
+               SCR_CaptureVideo_RIFF_Write32(0); // VideoFormatToken
+               SCR_CaptureVideo_RIFF_Write32(0); // VideoStandard
+               SCR_CaptureVideo_RIFF_Write32((int)cls.capturevideo.framerate); // dwVerticalRefreshRate (bogus)
+               SCR_CaptureVideo_RIFF_Write32(width); // dwHTotalInT
+               SCR_CaptureVideo_RIFF_Write32(height); // dwVTotalInLines
+               FindFraction(aspect, &n, &d, 1000);
+               SCR_CaptureVideo_RIFF_Write32((n << 16) | d); // dwFrameAspectRatio // TODO a word
+               SCR_CaptureVideo_RIFF_Write32(width); // dwFrameWidthInPixels
+               SCR_CaptureVideo_RIFF_Write32(height); // dwFrameHeightInLines
+               SCR_CaptureVideo_RIFF_Write32(1); // nFieldPerFrame
+               SCR_CaptureVideo_RIFF_Write32(width); // CompressedBMWidth
+               SCR_CaptureVideo_RIFF_Write32(height); // CompressedBMHeight
+               SCR_CaptureVideo_RIFF_Write32(width); // ValidBMHeight
+               SCR_CaptureVideo_RIFF_Write32(height); // ValidBMWidth
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffset
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYOffset
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffsetInT
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYValidStartLine
+               SCR_CaptureVideo_RIFF_Pop();
+               SCR_CaptureVideo_RIFF_Pop();
+               if (cls.capturevideo.soundrate)
+               {
+                       // audio stream info
+                       SCR_CaptureVideo_RIFF_Push("LIST", "strl");
+                       SCR_CaptureVideo_RIFF_Push("strh", "auds");
+                       SCR_CaptureVideo_RIFF_Write32(1); // stream fourcc (PCM audio, uncompressed)
+                       SCR_CaptureVideo_RIFF_Write32(0); // flags
+                       SCR_CaptureVideo_RIFF_Write16(0); // priority
+                       SCR_CaptureVideo_RIFF_Write16(0); // language
+                       SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+                       SCR_CaptureVideo_RIFF_Write32(1); // samples/second divisor
+                       SCR_CaptureVideo_RIFF_Write32((int)(cls.capturevideo.soundrate)); // samples/second multiplied by divisor
+                       SCR_CaptureVideo_RIFF_Write32(0); // start
+                       cls.capturevideo.videofile_totalsampleframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                       SCR_CaptureVideo_RIFF_Write32(0); // length
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 2); // suggested buffer size (this is a half second)
+                       SCR_CaptureVideo_RIFF_Write32(0); // quality
+                       SCR_CaptureVideo_RIFF_Write32(4); // sample size
+                       SCR_CaptureVideo_RIFF_Write16(0); // frame left
+                       SCR_CaptureVideo_RIFF_Write16(0); // frame top
+                       SCR_CaptureVideo_RIFF_Write16(0); // frame right
+                       SCR_CaptureVideo_RIFF_Write16(0); // frame bottom
+                       SCR_CaptureVideo_RIFF_Pop();
+                       // audio stream format
+                       SCR_CaptureVideo_RIFF_Push("strf", NULL);
+                       SCR_CaptureVideo_RIFF_Write16(1); // format (uncompressed PCM?)
+                       SCR_CaptureVideo_RIFF_Write16(2); // channels (stereo)
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate); // sampleframes per second
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 4); // average bytes per second
+                       SCR_CaptureVideo_RIFF_Write16(4); // block align
+                       SCR_CaptureVideo_RIFF_Write16(16); // bits per sample
+                       SCR_CaptureVideo_RIFF_Write16(0); // size
+                       SCR_CaptureVideo_RIFF_Pop();
+                       // master index
+                       SCR_CaptureVideo_RIFF_Push("indx", NULL);
+                       SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+                       SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+                       cls.capturevideo.videofile_ix_master_audio_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                       SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+                       SCR_CaptureVideo_RIFF_WriteFourCC("01wb"); // dwChunkId
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+                       cls.capturevideo.videofile_ix_master_audio_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                       for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+                               SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+                       SCR_CaptureVideo_RIFF_Pop();
+                       SCR_CaptureVideo_RIFF_Pop();
+               }
+
+               cls.capturevideo.videofile_ix_master_audio_inuse = cls.capturevideo.videofile_ix_master_video_inuse = 0;
+
+               // extended header (for total #frames)
+               SCR_CaptureVideo_RIFF_Push("LIST", "odml");
+               SCR_CaptureVideo_RIFF_Push("dmlh", NULL);
+               cls.capturevideo.videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0);
+               SCR_CaptureVideo_RIFF_Pop();
+               SCR_CaptureVideo_RIFF_Pop();
+
+               // close the AVI header list
+               SCR_CaptureVideo_RIFF_Pop();
+               // software that produced this AVI video file
+               SCR_CaptureVideo_RIFF_Push("LIST", "INFO");
+               SCR_CaptureVideo_RIFF_Push("ISFT", NULL);
+               SCR_CaptureVideo_RIFF_WriteTerminatedString(engineversion);
+               SCR_CaptureVideo_RIFF_Pop();
+               // enable this junk filler if you like the LIST movi to always begin at 4KB in the file (why?)
+#if 0
+               SCR_CaptureVideo_RIFF_Push("JUNK", NULL);
+               x = 4096 - SCR_CaptureVideo_RIFF_GetPosition();
+               while (x > 0)
+               {
+                       const char *junkfiller = "[ DarkPlaces junk data ]";
+                       int i = min(x, (int)strlen(junkfiller));
+                       SCR_CaptureVideo_RIFF_WriteBytes((const unsigned char *)junkfiller, i);
+                       x -= i;
+               }
+               SCR_CaptureVideo_RIFF_Pop();
+#endif
+               SCR_CaptureVideo_RIFF_Pop();
+               // begin the actual video section now
+               SCR_CaptureVideo_RIFF_Push("LIST", "movi");
+               cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
+               // we're done with the headers now...
+               SCR_CaptureVideo_RIFF_Flush();
+               if (cls.capturevideo.riffstacklevel != 2)
+                       Sys_Error("SCR_CaptureVideo_BeginVideo: broken AVI writing code (stack level is %i (should be 2) at end of headers)\n", cls.capturevideo.riffstacklevel);
        }
 
-       if (cl_capturevideo_sound.integer)
+       switch(cls.capturevideo.format)
        {
-               cls.capturevideo_soundfile = FS_Open ("video/dpvideo.wav", "wb", false, true);
-               // wave header will be filled out when video ends
-               memset(out, 0, 44);
-               FS_Write (cls.capturevideo_soundfile, out, 44);
+       case CAPTUREVIDEOFORMAT_AVI_I420:
+               break;
+       default:
+               break;
        }
-       else
-               cls.capturevideo_soundfile = NULL;
 }
 
 void SCR_CaptureVideo_EndVideo(void)
 {
-       int i, n;
-       unsigned char out[44];
-       if (!cls.capturevideo_active)
+       if (!cls.capturevideo.active)
                return;
-       cls.capturevideo_active = false;
+       cls.capturevideo.active = false;
+       if (cls.capturevideo.videofile)
+       {
+               switch(cls.capturevideo.format)
+               {
+               case CAPTUREVIDEOFORMAT_AVI_I420:
+                       // close any open chunks
+                       SCR_CaptureVideo_RIFF_Finish(true);
+                       // go back and fix the video frames and audio samples fields
+                       Con_DPrintf("Finishing capture (%d frames, %d audio frames)\n", cls.capturevideo.frame, cls.capturevideo.soundsampleframe);
+                       FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset1, SEEK_SET);
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+                       SCR_CaptureVideo_RIFF_Flush();
+                       FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset2, SEEK_SET);
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+                       SCR_CaptureVideo_RIFF_Flush();
+                       if (cls.capturevideo.soundrate)
+                       {
+                               FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalsampleframes_offset, SEEK_SET);
+                               SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundsampleframe);
+                               SCR_CaptureVideo_RIFF_Flush();
+                       }
+                       break;
+               default:
+                       break;
+               }
+               FS_Close(cls.capturevideo.videofile);
+               cls.capturevideo.videofile = NULL;
+       }
 
-       if (cls.capturevideo_videofile)
+       if (cls.capturevideo.screenbuffer)
        {
-               FS_Close(cls.capturevideo_videofile);
-               cls.capturevideo_videofile = NULL;
+               Mem_Free (cls.capturevideo.screenbuffer);
+               cls.capturevideo.screenbuffer = NULL;
        }
 
-       // finish the wave file
-       if (cls.capturevideo_soundfile)
+       if (cls.capturevideo.outbuffer)
        {
-               i = (int)FS_Tell (cls.capturevideo_soundfile);
-               //"RIFF", (int) unknown (chunk size), "WAVE",
-               //"fmt ", (int) 16 (chunk size), (short) format 1 (uncompressed PCM), (short) 2 channels, (int) unknown rate, (int) unknown bytes per second, (short) 4 bytes per sample (channels * bytes per channel), (short) 16 bits per channel
-               //"data", (int) unknown (chunk size)
-               memcpy (out, "RIFF****WAVEfmt \x10\x00\x00\x00\x01\x00\x02\x00********\x04\x00\x10\0data****", 44);
-               // the length of the whole RIFF chunk
-               n = i - 8;
-               out[4] = (n) & 0xFF;
-               out[5] = (n >> 8) & 0xFF;
-               out[6] = (n >> 16) & 0xFF;
-               out[7] = (n >> 24) & 0xFF;
-               // rate
-               n = cls.capturevideo_soundrate;
-               out[24] = (n) & 0xFF;
-               out[25] = (n >> 8) & 0xFF;
-               out[26] = (n >> 16) & 0xFF;
-               out[27] = (n >> 24) & 0xFF;
-               // bytes per second (rate * channels * bytes per channel)
-               n = cls.capturevideo_soundrate * 2 * 2;
-               out[28] = (n) & 0xFF;
-               out[29] = (n >> 8) & 0xFF;
-               out[30] = (n >> 16) & 0xFF;
-               out[31] = (n >> 24) & 0xFF;
-               // the length of the data chunk
-               n = i - 44;
-               out[40] = (n) & 0xFF;
-               out[41] = (n >> 8) & 0xFF;
-               out[42] = (n >> 16) & 0xFF;
-               out[43] = (n >> 24) & 0xFF;
-               FS_Seek (cls.capturevideo_soundfile, 0, SEEK_SET);
-               FS_Write (cls.capturevideo_soundfile, out, 44);
-               FS_Close (cls.capturevideo_soundfile);
-               cls.capturevideo_soundfile = NULL;
+               Mem_Free (cls.capturevideo.outbuffer);
+               cls.capturevideo.outbuffer = NULL;
        }
 
-       if (cls.capturevideo_buffer)
+       if (cls.capturevideo.riffindexbuffer.data)
        {
-               Mem_Free (cls.capturevideo_buffer);
-               cls.capturevideo_buffer = NULL;
+               Mem_Free(cls.capturevideo.riffindexbuffer.data);
+               cls.capturevideo.riffindexbuffer.data = NULL;
        }
 
-       cls.capturevideo_starttime = 0;
-       cls.capturevideo_framerate = 0;
-       cls.capturevideo_frame = 0;
+       memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
 }
 
-qboolean SCR_CaptureVideo_VideoFrame(int newframenum)
+// converts from RGB24 to I420 colorspace (identical to YV12 except chroma plane order is reversed), this colorspace is handled by the Intel(r) 4:2:0 codec on Windows
+void SCR_CaptureVideo_ConvertFrame_RGB_to_I420_flip(int width, int height, unsigned char *instart, unsigned char *outstart)
 {
-       int x = 0, y = 0, width = vid.width, height = vid.height;
-       unsigned char *b, *out;
-       char filename[32];
+       int x, y;
        int outoffset = (width/2)*(height/2);
-       CHECKGLERROR
-       //return SCR_ScreenShot(filename, cls.capturevideo_buffer, cls.capturevideo_buffer + vid.width * vid.height * 3, cls.capturevideo_buffer + vid.width * vid.height * 6, 0, 0, vid.width, vid.height, false, false, false, jpeg, true);
-       // speed is critical here, so do saving as directly as possible
-       switch (cls.capturevideo_format)
+       unsigned char *b, *out;
+       // process one line at a time, and CbCr every other line at 2 pixel intervals
+       for (y = 0;y < height;y++)
        {
-       case CAPTUREVIDEOFORMAT_RAWYV12:
-               // FIXME: width/height must be multiple of 2, enforce this?
-               qglReadPixels (x, y, width, height, GL_RGB, GL_UNSIGNED_BYTE, cls.capturevideo_buffer);CHECKGLERROR
-               // process one line at a time, and CbCr every other line at 2 pixel intervals
-               for (y = 0;y < height;y++)
+               // 1x1 Y
+               for (b = instart + (height-1-y)*width*3, out = outstart + y*width, x = 0;x < width;x++, b += 3, out++)
+                       *out = cls.capturevideo.yuvnormalizetable[0][cls.capturevideo.rgbtoyuvscaletable[0][0][b[0]] + cls.capturevideo.rgbtoyuvscaletable[0][1][b[1]] + cls.capturevideo.rgbtoyuvscaletable[0][2][b[2]]];
+               if ((y & 1) == 0)
                {
-                       // 1x1 Y
-                       for (b = cls.capturevideo_buffer + (height-1-y)*width*3, out = cls.capturevideo_buffer + width*height*3 + y*width, x = 0;x < width;x++, b += 3, out++)
-                               *out = cls.capturevideo_yuvnormalizetable[0][cls.capturevideo_rgbtoyuvscaletable[0][0][b[0]] + cls.capturevideo_rgbtoyuvscaletable[0][1][b[1]] + cls.capturevideo_rgbtoyuvscaletable[0][2][b[2]]];
-                       if ((y & 1) == 0)
+                       // 2x2 Cr and Cb planes
+#if 0
+                       // low quality, no averaging
+                       for (b = instart + (height-2-y)*width*3, out = outstart + width*height + (y/2)*(width/2), x = 0;x < width/2;x++, b += 6, out++)
                        {
-                               // 2x2 Cb and Cr planes
-#if 1
-                               // low quality, no averaging
-                               for (b = cls.capturevideo_buffer + (height-2-y)*width*3, out = cls.capturevideo_buffer + width*height*3 + width*height + (y/2)*(width/2), x = 0;x < width/2;x++, b += 6, out++)
-                               {
-                                       // Cr
-                                       out[0        ] = cls.capturevideo_yuvnormalizetable[2][cls.capturevideo_rgbtoyuvscaletable[2][0][b[0]] + cls.capturevideo_rgbtoyuvscaletable[2][1][b[1]] + cls.capturevideo_rgbtoyuvscaletable[2][2][b[2]] + 128];
-                                       // Cb
-                                       out[outoffset] = cls.capturevideo_yuvnormalizetable[1][cls.capturevideo_rgbtoyuvscaletable[1][0][b[0]] + cls.capturevideo_rgbtoyuvscaletable[1][1][b[1]] + cls.capturevideo_rgbtoyuvscaletable[1][2][b[2]] + 128];
-                               }
+                               // Cr
+                               out[0        ] = cls.capturevideo.yuvnormalizetable[1][cls.capturevideo.rgbtoyuvscaletable[1][0][b[0]] + cls.capturevideo.rgbtoyuvscaletable[1][1][b[1]] + cls.capturevideo.rgbtoyuvscaletable[1][2][b[2]] + 128];
+                               // Cb
+                               out[outoffset] = cls.capturevideo.yuvnormalizetable[2][cls.capturevideo.rgbtoyuvscaletable[2][0][b[0]] + cls.capturevideo.rgbtoyuvscaletable[2][1][b[1]] + cls.capturevideo.rgbtoyuvscaletable[2][2][b[2]] + 128];
+                       }
 #else
-                               // high quality, averaging
-                               int inpitch = width*3;
-                               for (b = cls.capturevideo_buffer + (height-2-y)*width*3, out = cls.capturevideo_buffer + width*height*3 + width*height + (y/2)*(width/2), x = 0;x < width/2;x++, b += 6, out++)
-                               {
-                                       int blockr, blockg, blockb;
-                                       blockr = (b[0] + b[3] + b[inpitch+0] + b[inpitch+3]) >> 2;
-                                       blockg = (b[1] + b[4] + b[inpitch+1] + b[inpitch+4]) >> 2;
-                                       blockb = (b[2] + b[5] + b[inpitch+2] + b[inpitch+5]) >> 2;
-                                       // Cr
-                                       out[0        ] = cls.capturevideo_yuvnormalizetable[2][cls.capturevideo_rgbtoyuvscaletable[2][0][blockr] + cls.capturevideo_rgbtoyuvscaletable[2][1][blockg] + cls.capturevideo_rgbtoyuvscaletable[2][2][blockb] + 128];
-                                       // Cb
-                                       out[outoffset] = cls.capturevideo_yuvnormalizetable[1][cls.capturevideo_rgbtoyuvscaletable[1][0][blockr] + cls.capturevideo_rgbtoyuvscaletable[1][1][blockg] + cls.capturevideo_rgbtoyuvscaletable[1][2][blockb] + 128];
-                               }
-#endif
+                       // high quality, averaging
+                       int inpitch = width*3;
+                       for (b = instart + (height-2-y)*width*3, out = outstart + width*height + (y/2)*(width/2), x = 0;x < width/2;x++, b += 6, out++)
+                       {
+                               int blockr, blockg, blockb;
+                               blockr = (b[0] + b[3] + b[inpitch+0] + b[inpitch+3]) >> 2;
+                               blockg = (b[1] + b[4] + b[inpitch+1] + b[inpitch+4]) >> 2;
+                               blockb = (b[2] + b[5] + b[inpitch+2] + b[inpitch+5]) >> 2;
+                               // Cr
+                               out[0        ] = cls.capturevideo.yuvnormalizetable[1][cls.capturevideo.rgbtoyuvscaletable[1][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[1][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[1][2][blockb] + 128];
+                               // Cb
+                               out[outoffset] = cls.capturevideo.yuvnormalizetable[2][cls.capturevideo.rgbtoyuvscaletable[2][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[2][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[2][2][blockb] + 128];
                        }
+#endif
                }
-               for (;cls.capturevideo_frame < newframenum;cls.capturevideo_frame++)
-                       if (!FS_Write (cls.capturevideo_videofile, cls.capturevideo_buffer + width*height*3, width*height+(width/2)*(height/2)*2))
-                               return false;
-               return true;
-       case CAPTUREVIDEOFORMAT_RAWRGB:
-               qglReadPixels (x, y, width, height, GL_RGB, GL_UNSIGNED_BYTE, cls.capturevideo_buffer);CHECKGLERROR
-               for (;cls.capturevideo_frame < newframenum;cls.capturevideo_frame++)
-                       if (!FS_Write (cls.capturevideo_videofile, cls.capturevideo_buffer, width*height*3))
-                               return false;
-               return true;
-       case CAPTUREVIDEOFORMAT_JPEG:
-               qglReadPixels (x, y, width, height, GL_RGB, GL_UNSIGNED_BYTE, cls.capturevideo_buffer);CHECKGLERROR
-               for (;cls.capturevideo_frame < newframenum;cls.capturevideo_frame++)
+       }
+}
+
+static void SCR_ScaleDown(unsigned char *in, int inw, int inh, unsigned char *out, int outw, int outh)
+{
+       // TODO optimize this function
+
+       int x, y;
+       float area;
+
+       // memcpy is faster than me
+       if(inw == outw && inh == outh)
+       {
+               memcpy(out, in, 3 * inw * inh);
+               return;
+       }
+
+       // otherwise: a box filter
+       area = (float)outw * (float)outh / (float)inw / (float)inh;
+       for(y = 0; y < outh; ++y)
+       {
+               float iny0 =  y    / (float)outh * inh; int iny0_i = floor(iny0);
+               float iny1 = (y+1) / (float)outh * inh; int iny1_i = ceil(iny1);
+               for(x = 0; x < outw; ++x)
                {
-                       sprintf(filename, "video/dp%06d.jpg", cls.capturevideo_frame);
-                       if (!JPEG_SaveImage_preflipped (filename, width, height, cls.capturevideo_buffer))
-                               return false;
+                       float inx0 =  x    / (float)outw * inw; int inx0_i = floor(inx0);
+                       float inx1 = (x+1) / (float)outw * inw; int inx1_i = ceil(inx1);
+                       float r = 0, g = 0, b = 0;
+                       int xx, yy;
+
+                       for(yy = iny0_i; yy < iny1_i; ++yy)
+                       {
+                               float ya = min(yy+1, iny1) - max(iny0, yy);
+                               for(xx = inx0_i; xx < inx1_i; ++xx)
+                               {
+                                       float a = ya * (min(xx+1, inx1) - max(inx0, xx));
+                                       r += a * in[3*(xx + inw * yy)+0];
+                                       g += a * in[3*(xx + inw * yy)+1];
+                                       b += a * in[3*(xx + inw * yy)+2];
+                               }
+                       }
+
+                       out[3*(x + outw * y)+0] = r * area;
+                       out[3*(x + outw * y)+1] = g * area;
+                       out[3*(x + outw * y)+2] = b * area;
                }
-               return true;
-       case CAPTUREVIDEOFORMAT_TARGA:
-               //return Image_WriteTGARGB_preflipped (filename, width, height, cls.capturevideo_buffer, cls.capturevideo_buffer + vid.width * vid.height * 3, );
-               memset (cls.capturevideo_buffer, 0, 18);
-               cls.capturevideo_buffer[2] = 2;         // uncompressed type
-               cls.capturevideo_buffer[12] = (width >> 0) & 0xFF;
-               cls.capturevideo_buffer[13] = (width >> 8) & 0xFF;
-               cls.capturevideo_buffer[14] = (height >> 0) & 0xFF;
-               cls.capturevideo_buffer[15] = (height >> 8) & 0xFF;
-               cls.capturevideo_buffer[16] = 24;       // pixel size
-               qglReadPixels (x, y, width, height, GL_BGR, GL_UNSIGNED_BYTE, cls.capturevideo_buffer + 18);CHECKGLERROR
-               for (;cls.capturevideo_frame < newframenum;cls.capturevideo_frame++)
+       }
+}
+
+qboolean SCR_CaptureVideo_VideoFrame(int newframenum)
+{
+       int x = 0, y = 0, width = cls.capturevideo.width, height = cls.capturevideo.height;
+       unsigned char *in, *out;
+       CHECKGLERROR
+       //return SCR_ScreenShot(filename, cls.capturevideo.buffer, cls.capturevideo.buffer + vid.width * vid.height * 3, cls.capturevideo.buffer + vid.width * vid.height * 6, 0, 0, vid.width, vid.height, false, false, false, jpeg, true);
+       // speed is critical here, so do saving as directly as possible
+       switch (cls.capturevideo.format)
+       {
+       case CAPTUREVIDEOFORMAT_AVI_I420:
+               // if there's no videofile we have to just give up, and abort saving
+               if (!cls.capturevideo.videofile)
+                       return false;
+               // FIXME: width/height must be multiple of 2, enforce this?
+               qglReadPixels (x, y, vid.width, vid.height, GL_RGB, GL_UNSIGNED_BYTE, cls.capturevideo.screenbuffer);CHECKGLERROR
+               SCR_ScaleDown (cls.capturevideo.screenbuffer, vid.width, vid.height, cls.capturevideo.outbuffer, width, height);
+               in = cls.capturevideo.outbuffer;
+               out = cls.capturevideo.outbuffer + width*height*3;
+               SCR_CaptureVideo_ConvertFrame_RGB_to_I420_flip(width, height, in, out);
+               x = width*height+(width/2)*(height/2)*2;
+               SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
+               for (;cls.capturevideo.frame < newframenum;cls.capturevideo.frame++)
                {
-                       sprintf(filename, "video/dp%06d.tga", cls.capturevideo_frame);
-                       if (!FS_WriteFile (filename, cls.capturevideo_buffer, width*height*3 + 18))
-                               return false;
+                       SCR_CaptureVideo_RIFF_IndexEntry("00dc", x, 0x10); // AVIIF_KEYFRAME
+                       SCR_CaptureVideo_RIFF_Push("00dc", NULL);
+                       SCR_CaptureVideo_RIFF_WriteBytes(out, x);
+                       SCR_CaptureVideo_RIFF_Pop();
                }
                return true;
        default:
@@ -844,14 +1550,21 @@ qboolean SCR_CaptureVideo_VideoFrame(int newframenum)
 
 void SCR_CaptureVideo_SoundFrame(unsigned char *bufstereo16le, size_t length, int rate)
 {
-       if (!cls.capturevideo_soundfile)
-               return;
-       cls.capturevideo_soundrate = rate;
-       if (FS_Write (cls.capturevideo_soundfile, bufstereo16le, 4 * length) < (fs_offset_t)(4 * length))
+       int x;
+       cls.capturevideo.soundrate = rate;
+       cls.capturevideo.soundsampleframe += length;
+       switch (cls.capturevideo.format)
        {
-               Cvar_SetValueQuick(&cl_capturevideo, 0);
-               Con_Printf("video sound saving failed on frame %i, out of disk space? stopping video capture.\n", cls.capturevideo_frame);
-               SCR_CaptureVideo_EndVideo();
+       case CAPTUREVIDEOFORMAT_AVI_I420:
+               x = length*4;
+               SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
+               SCR_CaptureVideo_RIFF_IndexEntry("01wb", x, 0x10); // AVIIF_KEYFRAME
+               SCR_CaptureVideo_RIFF_Push("01wb", NULL);
+               SCR_CaptureVideo_RIFF_WriteBytes(bufstereo16le, x);
+               SCR_CaptureVideo_RIFF_Pop();
+               break;
+       default:
+               break;
        }
 }
 
@@ -860,37 +1573,41 @@ void SCR_CaptureVideo(void)
        int newframenum;
        if (cl_capturevideo.integer && r_render.integer)
        {
-               if (!cls.capturevideo_active)
+               if (!cls.capturevideo.active)
                        SCR_CaptureVideo_BeginVideo();
-               if (cls.capturevideo_framerate != cl_capturevideo_fps.value)
+               if (cls.capturevideo.framerate != cl_capturevideo_fps.value)
                {
                        Con_Printf("You can not change the video framerate while recording a video.\n");
-                       Cvar_SetValueQuick(&cl_capturevideo_fps, cls.capturevideo_framerate);
+                       Cvar_SetValueQuick(&cl_capturevideo_fps, cls.capturevideo.framerate);
                }
-               if (cls.capturevideo_soundfile)
+               // for AVI saving we have to make sure that sound is saved before video
+               if (cls.capturevideo.soundrate && !cls.capturevideo.soundsampleframe)
+                       return;
+               if (cls.capturevideo.realtime)
                {
                        // preserve sound sync by duplicating frames when running slow
-                       newframenum = (int)((Sys_DoubleTime() - cls.capturevideo_starttime) * cls.capturevideo_framerate);
+                       newframenum = (int)((realtime - cls.capturevideo.starttime) * cls.capturevideo.framerate);
                }
                else
-                       newframenum = cls.capturevideo_frame + 1;
+                       newframenum = cls.capturevideo.frame + 1;
                // if falling behind more than one second, stop
-               if (newframenum - cls.capturevideo_frame > (int)ceil(cls.capturevideo_framerate))
+               if (newframenum - cls.capturevideo.frame > (int)ceil(cls.capturevideo.framerate))
                {
                        Cvar_SetValueQuick(&cl_capturevideo, 0);
-                       Con_Printf("video saving failed on frame %i, your machine is too slow for this capture speed.\n", cls.capturevideo_frame);
+                       Con_Printf("video saving failed on frame %i, your machine is too slow for this capture speed.\n", cls.capturevideo.frame);
                        SCR_CaptureVideo_EndVideo();
                        return;
                }
                // write frames
-               if (!SCR_CaptureVideo_VideoFrame(newframenum))
+               SCR_CaptureVideo_VideoFrame(newframenum);
+               if (cls.capturevideo.error)
                {
                        Cvar_SetValueQuick(&cl_capturevideo, 0);
-                       Con_Printf("video saving failed on frame %i, out of disk space? stopping video capture.\n", cls.capturevideo_frame);
+                       Con_Printf("video saving failed on frame %i, out of disk space? stopping video capture.\n", cls.capturevideo.frame);
                        SCR_CaptureVideo_EndVideo();
                }
        }
-       else if (cls.capturevideo_active)
+       else if (cls.capturevideo.active)
                SCR_CaptureVideo_EndVideo();
 }
 
@@ -961,6 +1678,7 @@ static void R_Envmap_f (void)
        r_view.width = size;
        r_view.height = size;
        r_view.depth = 1;
+       r_view.useperspective = true;
 
        r_view.frustum_x = tan(90 * M_PI / 360.0);
        r_view.frustum_y = tan(90 * M_PI / 360.0);
@@ -989,36 +1707,22 @@ static void R_Envmap_f (void)
 
 //=============================================================================
 
-// LordHavoc: SHOWLMP stuff
-#define SHOWLMP_MAXLABELS 256
-typedef struct showlmp_s
-{
-       qboolean        isactive;
-       float           x;
-       float           y;
-       char            label[32];
-       char            pic[128];
-}
-showlmp_t;
-
-showlmp_t showlmp[SHOWLMP_MAXLABELS];
-
 void SHOWLMP_decodehide(void)
 {
        int i;
        char *lmplabel;
        lmplabel = MSG_ReadString();
-       for (i = 0;i < SHOWLMP_MAXLABELS;i++)
-               if (showlmp[i].isactive && strcmp(showlmp[i].label, lmplabel) == 0)
+       for (i = 0;i < cl.num_showlmps;i++)
+               if (cl.showlmps[i].isactive && strcmp(cl.showlmps[i].label, lmplabel) == 0)
                {
-                       showlmp[i].isactive = false;
+                       cl.showlmps[i].isactive = false;
                        return;
                }
 }
 
 void SHOWLMP_decodeshow(void)
 {
-       int i, k;
+       int k;
        char lmplabel[256], picname[256];
        float x, y;
        strlcpy (lmplabel,MSG_ReadString(), sizeof (lmplabel));
@@ -1033,41 +1737,37 @@ void SHOWLMP_decodeshow(void)
                x = MSG_ReadShort();
                y = MSG_ReadShort();
        }
-       k = -1;
-       for (i = 0;i < SHOWLMP_MAXLABELS;i++)
-               if (showlmp[i].isactive)
-               {
-                       if (strcmp(showlmp[i].label, lmplabel) == 0)
-                       {
-                               k = i;
-                               break; // drop out to replace it
-                       }
-               }
-               else if (k < 0) // find first empty one to replace
-                       k = i;
-       if (k < 0)
-               return; // none found to replace
-       // change existing one
-       showlmp[k].isactive = true;
-       strlcpy (showlmp[k].label, lmplabel, sizeof (showlmp[k].label));
-       strlcpy (showlmp[k].pic, picname, sizeof (showlmp[k].pic));
-       showlmp[k].x = x;
-       showlmp[k].y = y;
+       if (!cl.showlmps || cl.num_showlmps >= cl.max_showlmps)
+       {
+               showlmp_t *oldshowlmps = cl.showlmps;
+               cl.max_showlmps += 16;
+               cl.showlmps = Mem_Alloc(cls.levelmempool, cl.max_showlmps * sizeof(showlmp_t));
+               if (cl.num_showlmps)
+                       memcpy(cl.showlmps, oldshowlmps, cl.num_showlmps * sizeof(showlmp_t));
+               if (oldshowlmps)
+                       Mem_Free(oldshowlmps);
+       }
+       for (k = 0;k < cl.max_showlmps;k++)
+               if (cl.showlmps[k].isactive && !strcmp(cl.showlmps[k].label, lmplabel))
+                       break;
+       if (k == cl.max_showlmps)
+               for (k = 0;k < cl.max_showlmps;k++)
+                       if (!cl.showlmps[k].isactive)
+                               break;
+       cl.showlmps[k].isactive = true;
+       strlcpy (cl.showlmps[k].label, lmplabel, sizeof (cl.showlmps[k].label));
+       strlcpy (cl.showlmps[k].pic, picname, sizeof (cl.showlmps[k].pic));
+       cl.showlmps[k].x = x;
+       cl.showlmps[k].y = y;
+       cl.num_showlmps = max(cl.num_showlmps, k + 1);
 }
 
 void SHOWLMP_drawall(void)
 {
        int i;
-       for (i = 0;i < SHOWLMP_MAXLABELS;i++)
-               if (showlmp[i].isactive)
-                       DrawQ_Pic(showlmp[i].x, showlmp[i].y, Draw_CachePic(showlmp[i].pic, true), 0, 0, 1, 1, 1, 1, 0);
-}
-
-void SHOWLMP_clear(void)
-{
-       int i;
-       for (i = 0;i < SHOWLMP_MAXLABELS;i++)
-               showlmp[i].isactive = false;
+       for (i = 0;i < cl.num_showlmps;i++)
+               if (cl.showlmps[i].isactive)
+                       DrawQ_Pic(cl.showlmps[i].x, cl.showlmps[i].y, Draw_CachePic(cl.showlmps[i].pic, true), 0, 0, 1, 1, 1, 1, 0);
 }
 
 /*
@@ -1157,10 +1857,14 @@ void SCR_DrawScreen (void)
        R_Mesh_Start();
 
        if (r_timereport_active)
-               R_TimeReport("setup");
+               R_TimeReport("screensetup");
 
        R_UpdateVariables();
 
+       // Quake uses clockwise winding, so these are swapped
+       r_view.cullface_front = GL_BACK;
+       r_view.cullface_back = GL_FRONT;
+
        if (cls.signon == SIGNONS)
        {
                float size;
@@ -1195,7 +1899,8 @@ void SCR_DrawScreen (void)
                // this it simply assumes the requested fov is the vertical fov
                // for a 4x3 display, if the ratio is not 4x3 this makes the fov
                // higher/lower according to the ratio
-               r_view.frustum_y = tan(scr_fov.value * cl.viewzoom * M_PI / 360.0) * (3.0/4.0);
+               r_view.useperspective = true;
+               r_view.frustum_y = tan(scr_fov.value * M_PI / 360.0) * (3.0/4.0) * cl.viewzoom;
                r_view.frustum_x = r_view.frustum_y * (float)r_view.width / (float)r_view.height / vid_pixelheight.value;
 
                r_view.frustum_x *= r_refdef.frustumscale_x;
@@ -1203,8 +1908,6 @@ void SCR_DrawScreen (void)
 
                if(!CL_VM_UpdateView())
                        R_RenderView();
-               else
-                       SCR_DrawConsole();
 
                if (scr_zoomwindow.integer)
                {
@@ -1217,7 +1920,8 @@ void SCR_DrawScreen (void)
                        r_view.y = 0;
                        r_view.z = 0;
 
-                       r_view.frustum_y = tan(scr_zoomwindow_fov.value * cl.viewzoom * M_PI / 360.0) * (3.0/4.0);
+                       r_view.useperspective = true;
+                       r_view.frustum_y = tan(scr_zoomwindow_fov.value * M_PI / 360.0) * (3.0/4.0) * cl.viewzoom;
                        r_view.frustum_x = r_view.frustum_y * vid_pixelheight.value * (float)r_view.width / (float)r_view.height;
 
                        r_view.frustum_x *= r_refdef.frustumscale_x;
@@ -1236,13 +1940,10 @@ void SCR_DrawScreen (void)
                r_view.x = 0;
                r_view.y = 0;
                r_view.z = 0;
+               r_view.useperspective = false;
        }
 
        // draw 2D stuff
-       DrawQ_Begin();
-
-       //FIXME: force menu if nothing else to look at?
-       //if (key_dest == key_game && cls.signon != SIGNONS && cls.state == ca_disconnected)
 
        if (cls.signon == SIGNONS)
        {
@@ -1254,12 +1955,12 @@ void SCR_DrawScreen (void)
                SHOWLMP_drawall();
                SCR_CheckDrawCenterString();
        }
+       SCR_DrawNetGraph ();
        MR_Draw();
        CL_DrawVideo();
        R_Shadow_EditLights_DrawSelectedLightProperties();
 
-       if(!csqc_loaded)
-               SCR_DrawConsole();
+       SCR_DrawConsole();
 
        SCR_DrawBrand();
 
@@ -1281,23 +1982,26 @@ void SCR_DrawScreen (void)
                R_TimeReport("meshfinish");
 }
 
-void SCR_UpdateLoadingScreen (void)
+void SCR_UpdateLoadingScreen (qboolean clear)
 {
        float x, y;
        cachepic_t *pic;
        float vertex3f[12];
        float texcoord2f[8];
        // don't do anything if not initialized yet
-       if (vid_hidden)
+       if (vid_hidden || !scr_refresh.integer)
                return;
        CHECKGLERROR
        qglViewport(0, 0, vid.width, vid.height);CHECKGLERROR
        //qglDisable(GL_SCISSOR_TEST);CHECKGLERROR
        //qglDepthMask(1);CHECKGLERROR
        qglColorMask(1,1,1,1);CHECKGLERROR
-       //qglClearColor(0,0,0,0);CHECKGLERROR
-       //qglClear(GL_COLOR_BUFFER_BIT);CHECKGLERROR
-       //qglCullFace(GL_FRONT);CHECKGLERROR
+       qglClearColor(0,0,0,0);CHECKGLERROR
+       // when starting up a new video mode, make sure the screen is cleared to black
+       if (clear)
+       {
+               qglClear(GL_COLOR_BUFFER_BIT);CHECKGLERROR
+       }
        //qglDisable(GL_CULL_FACE);CHECKGLERROR
        //R_ClearScreen();
        R_Textures_Frame();
@@ -1310,12 +2014,14 @@ void SCR_UpdateLoadingScreen (void)
        y = (vid_conheight.integer - pic->height)/2;
        GL_Color(1,1,1,1);
        GL_BlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
+       GL_DepthRange(0, 1);
+       GL_PolygonOffset(0, 0);
        GL_DepthTest(false);
-       R_Mesh_VertexPointer(vertex3f);
-       R_Mesh_ColorPointer(NULL);
+       R_Mesh_VertexPointer(vertex3f, 0, 0);
+       R_Mesh_ColorPointer(NULL, 0, 0);
        R_Mesh_ResetTextureState();
        R_Mesh_TexBind(0, R_GetTexture(pic->tex));
-       R_Mesh_TexCoordPointer(0, 2, texcoord2f);
+       R_Mesh_TexCoordPointer(0, 2, texcoord2f, 0, 0);
        vertex3f[2] = vertex3f[5] = vertex3f[8] = vertex3f[11] = 0;
        vertex3f[0] = vertex3f[9] = x;
        vertex3f[1] = vertex3f[4] = y;
@@ -1325,31 +2031,36 @@ void SCR_UpdateLoadingScreen (void)
        texcoord2f[2] = 1;texcoord2f[3] = 0;
        texcoord2f[4] = 1;texcoord2f[5] = 1;
        texcoord2f[6] = 0;texcoord2f[7] = 1;
-       R_Mesh_Draw(0, 4, 2, polygonelements);
+       if (vid.stereobuffer)
+       {
+               qglDrawBuffer(GL_FRONT_LEFT);
+               R_Mesh_Draw(0, 4, 2, polygonelements, 0, 0);
+               qglDrawBuffer(GL_FRONT_RIGHT);
+               R_Mesh_Draw(0, 4, 2, polygonelements, 0, 0);
+       }
+       else
+       {
+               qglDrawBuffer(GL_FRONT);
+               R_Mesh_Draw(0, 4, 2, polygonelements, 0, 0);
+       }
        R_Mesh_Finish();
        // refresh
-       VID_Finish(false);
+       // not necessary when rendering to GL_FRONT buffers
+       //VID_Finish(false);
+       // however this IS necessary on Windows Vista
+       qglFinish();
 }
 
 void CL_UpdateScreen(void)
 {
        float conwidth, conheight;
 
-       if (vid_hidden)
+       if (vid_hidden || !scr_refresh.integer)
                return;
 
-       if (!scr_initialized || !con_initialized || vid_hidden)
+       if (!scr_initialized || !con_initialized)
                return;                         // not initialized yet
 
-       // don't allow cheats in multiplayer
-       if (!cl.islocalgame && cl.worldmodel)
-       {
-               if (r_fullbright.integer != 0)
-                       Cvar_Set ("r_fullbright", "0");
-               if (r_ambient.value != 0)
-                       Cvar_Set ("r_ambient", "0");
-       }
-
        conwidth = bound(320, vid_conwidth.value, 2048);
        conheight = bound(200, vid_conheight.value, 1536);
        if (vid_conwidth.value != conwidth)
@@ -1410,17 +2121,42 @@ void CL_UpdateScreen(void)
        qglDepthMask(1);CHECKGLERROR
        qglColorMask(1,1,1,1);CHECKGLERROR
        qglClearColor(0,0,0,0);CHECKGLERROR
-       qglClear(GL_COLOR_BUFFER_BIT);CHECKGLERROR
+       R_ClearScreen();
+       r_view.clear = false;
+
+       if(scr_stipple.integer)
+       {
+               GLubyte stipple[128];
+               int i, s, width, parts;
+               static int frame = 0;
+               ++frame;
+
+               s = scr_stipple.integer;
+               parts = (s & 007);
+               width = (s & 070) >> 3;
+
+               qglEnable(GL_POLYGON_STIPPLE); // 0x0B42
+               for(i = 0; i < 128; ++i)
+               {
+                       int line = i/4;
+                       stipple[i] = (((line >> width) + frame) & ((1 << parts) - 1)) ? 0x00 : 0xFF;
+               }
+               qglPolygonStipple(stipple);
+       }
+       else
+               qglDisable(GL_POLYGON_STIPPLE);
 
        if (r_timereport_active)
-               R_TimeReport("clear");
+               R_TimeReport("screenclear");
+
+       qglDrawBuffer(GL_BACK);
 
-       if (r_stereo_redblue.integer || r_stereo_redgreen.integer || r_stereo_redcyan.integer || r_stereo_sidebyside.integer)
+       if (vid.stereobuffer || r_stereo_redblue.integer || r_stereo_redgreen.integer || r_stereo_redcyan.integer || r_stereo_sidebyside.integer)
        {
                matrix4x4_t originalmatrix = r_view.matrix;
-               r_view.matrix.m[0][3] = originalmatrix.m[0][3] + r_stereo_separation.value * -0.5f * r_view.matrix.m[0][1];
-               r_view.matrix.m[1][3] = originalmatrix.m[1][3] + r_stereo_separation.value * -0.5f * r_view.matrix.m[1][1];
-               r_view.matrix.m[2][3] = originalmatrix.m[2][3] + r_stereo_separation.value * -0.5f * r_view.matrix.m[2][1];
+               matrix4x4_t offsetmatrix;
+               Matrix4x4_CreateFromQuakeEntity(&offsetmatrix, 0, r_stereo_separation.value * 0.5f, 0, 0, r_stereo_angle.value * 0.5f, 0, 1);
+               Matrix4x4_Concat(&r_view.matrix, &originalmatrix, &offsetmatrix);
 
                if (r_stereo_sidebyside.integer)
                        r_stereo_side = 0;
@@ -1432,11 +2168,13 @@ void CL_UpdateScreen(void)
                        r_view.colormask[2] = 0;
                }
 
+               if (vid.stereobuffer)
+                       qglDrawBuffer(GL_BACK_RIGHT);
+
                SCR_DrawScreen();
 
-               r_view.matrix.m[0][3] = originalmatrix.m[0][3] + r_stereo_separation.value * 0.5f * r_view.matrix.m[0][1];
-               r_view.matrix.m[1][3] = originalmatrix.m[1][3] + r_stereo_separation.value * 0.5f * r_view.matrix.m[1][1];
-               r_view.matrix.m[2][3] = originalmatrix.m[2][3] + r_stereo_separation.value * 0.5f * r_view.matrix.m[2][1];
+               Matrix4x4_CreateFromQuakeEntity(&offsetmatrix, 0, r_stereo_separation.value * -0.5f, 0, 0, r_stereo_angle.value * -0.5f, 0, 1);
+               Matrix4x4_Concat(&r_view.matrix, &originalmatrix, &offsetmatrix);
 
                if (r_stereo_sidebyside.integer)
                        r_stereo_side = 1;
@@ -1448,6 +2186,9 @@ void CL_UpdateScreen(void)
                        r_view.colormask[2] = r_stereo_redcyan.integer || r_stereo_redblue.integer;
                }
 
+               if (vid.stereobuffer)
+                       qglDrawBuffer(GL_BACK_LEFT);
+
                SCR_DrawScreen();
 
                r_view.matrix = originalmatrix;
@@ -1464,5 +2205,4 @@ void CL_UpdateScreen(void)
 
 void CL_Screen_NewMap(void)
 {
-       SHOWLMP_clear();
 }