+ int nMatching;
+ int i;
+ fs_offset_t ix = SCR_CaptureVideo_RIFF_GetPosition();
+ fs_offset_t pos;
+
+ if(*masteridx_count >= AVI_MASTER_INDEX_SIZE)
+ return;
+
+ nMatching = 0; // go through index and enumerate them
+ for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
+ if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
+ ++nMatching;
+
+ SCR_CaptureVideo_RIFF_Push(fcc, NULL);
+ SCR_CaptureVideo_RIFF_Write16(2); // wLongsPerEntry
+ SCR_CaptureVideo_RIFF_Write16(0x0100); // bIndexType=1, bIndexSubType=0
+ SCR_CaptureVideo_RIFF_Write32(nMatching); // nEntriesInUse
+ SCR_CaptureVideo_RIFF_WriteFourCC(dwChunkId); // dwChunkId
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.videofile_ix_movistart & (fs_offset_t) 0xFFFFFFFFu);
+ SCR_CaptureVideo_RIFF_Write32(((long long) cls.capturevideo.videofile_ix_movistart) >> 32);
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved
+
+ for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
+ if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
+ {
+ unsigned int *p = (unsigned int *) (cls.capturevideo.riffindexbuffer.data + i);
+ unsigned int flags = p[1];
+ unsigned int rpos = p[2];
+ unsigned int size = p[3];
+ size &= ~0x80000000;
+ if(!(flags & 0x10)) // no keyframe?
+ size |= 0x80000000;
+ SCR_CaptureVideo_RIFF_Write32(rpos + 8);
+ SCR_CaptureVideo_RIFF_Write32(size);
+ }
+
+ SCR_CaptureVideo_RIFF_Pop();
+ pos = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Flush();
+
+ FS_Seek(cls.capturevideo.videofile, masteridx_start + 16 * *masteridx_count, SEEK_SET);
+ SCR_CaptureVideo_RIFF_Write32(ix & (fs_offset_t) 0xFFFFFFFFu);
+ SCR_CaptureVideo_RIFF_Write32(((long long) ix) >> 32);
+ SCR_CaptureVideo_RIFF_Write32(pos - ix);
+ SCR_CaptureVideo_RIFF_Write32(nMatching);
+ SCR_CaptureVideo_RIFF_Flush();
+
+ FS_Seek(cls.capturevideo.videofile, masteridx_counter, SEEK_SET);
+ SCR_CaptureVideo_RIFF_Write32(++*masteridx_count);
+ SCR_CaptureVideo_RIFF_Flush();
+
+ FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+}
+
+static void SCR_CaptureVideo_RIFF_Finish(qboolean final)
+{
+ // close the "movi" list
+ SCR_CaptureVideo_RIFF_Pop();
+ if(cls.capturevideo.videofile_ix_master_video_inuse_offset)
+ SCR_CaptureVideo_RIFF_MakeIxChunk("ix00", "00dc", cls.capturevideo.videofile_ix_master_video_inuse_offset, &cls.capturevideo.videofile_ix_master_video_inuse, cls.capturevideo.videofile_ix_master_video_start_offset);
+ if(cls.capturevideo.videofile_ix_master_audio_inuse_offset)
+ SCR_CaptureVideo_RIFF_MakeIxChunk("ix01", "01wb", cls.capturevideo.videofile_ix_master_audio_inuse_offset, &cls.capturevideo.videofile_ix_master_audio_inuse, cls.capturevideo.videofile_ix_master_audio_start_offset);
+ // write the idx1 chunk that we've been building while saving the frames (for old style players)
+ if(final && cls.capturevideo.videofile_firstchunkframes_offset)
+ // TODO replace index creating by OpenDML ix##/##ix/indx chunk so it works for more than one AVI part too
+ {
+ SCR_CaptureVideo_RIFF_Push("idx1", NULL);
+ SCR_CaptureVideo_RIFF_WriteBytes(cls.capturevideo.riffindexbuffer.data, cls.capturevideo.riffindexbuffer.cursize);
+ SCR_CaptureVideo_RIFF_Pop();
+ }
+ cls.capturevideo.riffindexbuffer.cursize = 0;
+ // pop the RIFF chunk itself
+ while (cls.capturevideo.riffstacklevel > 0)
+ SCR_CaptureVideo_RIFF_Pop();
+ SCR_CaptureVideo_RIFF_Flush();
+ if(cls.capturevideo.videofile_firstchunkframes_offset)
+ {
+ Con_DPrintf("Finishing first chunk (%d frames)\n", cls.capturevideo.frame);
+ FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_firstchunkframes_offset, SEEK_SET);
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+ SCR_CaptureVideo_RIFF_Flush();
+ FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+ cls.capturevideo.videofile_firstchunkframes_offset = 0;
+ }
+ else
+ Con_DPrintf("Finishing another chunk (%d frames)\n", cls.capturevideo.frame);
+}
+
+static void SCR_CaptureVideo_RIFF_OverflowCheck(int framesize)
+{
+ fs_offset_t cursize, curfilesize;
+ if (cls.capturevideo.riffstacklevel != 2)
+ Sys_Error("SCR_CaptureVideo_RIFF_OverflowCheck: chunk stack leakage!\n");
+ // check where we are in the file
+ SCR_CaptureVideo_RIFF_Flush();
+ cursize = SCR_CaptureVideo_RIFF_GetPosition() - cls.capturevideo.riffstackstartoffset[0];
+ curfilesize = SCR_CaptureVideo_RIFF_GetPosition();
+
+ // if this would overflow the windows limit of 1GB per RIFF chunk, we need
+ // to close the current RIFF chunk and open another for future frames
+ if (8 + cursize + framesize + cls.capturevideo.riffindexbuffer.cursize + 8 + cls.capturevideo.riffindexbuffer.cursize + 64 > 1<<30) // note that the Ix buffer takes less space... I just don't dare to / 2 here now... sorry, maybe later
+ {
+ SCR_CaptureVideo_RIFF_Finish(false);
+ // begin a new 1GB extended section of the AVI
+ SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX");
+ SCR_CaptureVideo_RIFF_Push("LIST", "movi");
+ cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
+ }
+}
+
+static void FindFraction(double val, int *num, int *denom, int denomMax)
+{
+ int i;
+ double bestdiff;
+ // initialize
+ bestdiff = fabs(val);
+ *num = 0;
+ *denom = 1;
+
+ for(i = 1; i <= denomMax; ++i)
+ {
+ int inum = floor(0.5 + val * i);
+ double diff = fabs(val - inum / (double)i);
+ if(diff < bestdiff)
+ {
+ bestdiff = diff;
+ *num = inum;
+ *denom = i;
+ }
+ }
+}
+
+void SCR_CaptureVideo_BeginVideo(void)
+{
+ double gamma, g, aspect;
+ int width = cl_capturevideo_width.integer, height = cl_capturevideo_height.integer;
+ int n, d;
+ unsigned int i;
+ if (cls.capturevideo.active)
+ return;
+ memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
+ // soundrate is figured out on the first SoundFrame
+
+ if(width == 0 && height != 0)
+ width = (int) (height * (double)vid.width / ((double)vid.height * vid_pixelheight.value)); // keep aspect
+ if(width != 0 && height == 0)
+ height = (int) (width * ((double)vid.height * vid_pixelheight.value) / (double)vid.width); // keep aspect
+
+ if(width < 2 || width > vid.width) // can't scale up
+ width = vid.width;
+ if(height < 2 || height > vid.height) // can't scale up
+ height = vid.height;
+
+ aspect = vid.width / (vid.height * vid_pixelheight.value);
+
+ // ensure it's all even; if not, scale down a little
+ if(width % 1)
+ --width;
+ if(height % 1)
+ --height;
+
+ cls.capturevideo.width = width;
+ cls.capturevideo.height = height;
+ cls.capturevideo.active = true;
+ cls.capturevideo.starttime = realtime;
+ cls.capturevideo.framerate = bound(1, cl_capturevideo_fps.value, 1000);
+ cls.capturevideo.soundrate = S_GetSoundRate();
+ cls.capturevideo.frame = 0;
+ cls.capturevideo.soundsampleframe = 0;
+ cls.capturevideo.realtime = cl_capturevideo_realtime.integer != 0;
+ cls.capturevideo.screenbuffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * 3);
+ cls.capturevideo.outbuffer = (unsigned char *)Mem_Alloc(tempmempool, width * height * (3+3+3) + 18);
+ gamma = 1.0/scr_screenshot_gammaboost.value;
+ dpsnprintf(cls.capturevideo.basename, sizeof(cls.capturevideo.basename), "video/dpvideo%03i", cl_capturevideo_number.integer);
+ Cvar_SetValueQuick(&cl_capturevideo_number, cl_capturevideo_number.integer + 1);
+
+ /*
+ for (i = 0;i < 256;i++)
+ {
+ unsigned char j = (unsigned char)bound(0, 255*pow(i/255.0, gamma), 255);
+ cls.capturevideo.rgbgammatable[0][i] = j;
+ cls.capturevideo.rgbgammatable[1][i] = j;
+ cls.capturevideo.rgbgammatable[2][i] = j;
+ }
+ */
+/*
+R = Y + 1.4075 * (Cr - 128);
+G = Y + -0.3455 * (Cb - 128) + -0.7169 * (Cr - 128);
+B = Y + 1.7790 * (Cb - 128);
+Y = R * .299 + G * .587 + B * .114;
+Cb = R * -.169 + G * -.332 + B * .500 + 128.;
+Cr = R * .500 + G * -.419 + B * -.0813 + 128.;
+*/
+ for (i = 0;i < 256;i++)
+ {
+ g = 255*pow(i/255.0, gamma);
+ // Y weights from RGB
+ cls.capturevideo.rgbtoyuvscaletable[0][0][i] = (short)(g * 0.299);
+ cls.capturevideo.rgbtoyuvscaletable[0][1][i] = (short)(g * 0.587);
+ cls.capturevideo.rgbtoyuvscaletable[0][2][i] = (short)(g * 0.114);
+ // Cb weights from RGB
+ cls.capturevideo.rgbtoyuvscaletable[1][0][i] = (short)(g * -0.169);
+ cls.capturevideo.rgbtoyuvscaletable[1][1][i] = (short)(g * -0.332);
+ cls.capturevideo.rgbtoyuvscaletable[1][2][i] = (short)(g * 0.500);
+ // Cr weights from RGB
+ cls.capturevideo.rgbtoyuvscaletable[2][0][i] = (short)(g * 0.500);
+ cls.capturevideo.rgbtoyuvscaletable[2][1][i] = (short)(g * -0.419);
+ cls.capturevideo.rgbtoyuvscaletable[2][2][i] = (short)(g * -0.0813);
+ // range reduction of YCbCr to valid signal range
+ cls.capturevideo.yuvnormalizetable[0][i] = 16 + i * (236-16) / 256;
+ cls.capturevideo.yuvnormalizetable[1][i] = 16 + i * (240-16) / 256;
+ cls.capturevideo.yuvnormalizetable[2][i] = 16 + i * (240-16) / 256;
+ }
+
+ //if (cl_capturevideo_)
+ //{
+ //}
+ //else
+ {
+ cls.capturevideo.format = CAPTUREVIDEOFORMAT_AVI_I420;
+ cls.capturevideo.videofile = FS_Open (va("%s.avi", cls.capturevideo.basename), "wb", false, true);
+ SCR_CaptureVideo_RIFF_Start();
+ // enclosing RIFF chunk (there can be multiple of these in >1GB files, the later ones are "AVIX" instead of "AVI " and have no header/stream info)
+ SCR_CaptureVideo_RIFF_Push("RIFF", "AVI ");
+ // AVI main header
+ SCR_CaptureVideo_RIFF_Push("LIST", "hdrl");
+ SCR_CaptureVideo_RIFF_Push("avih", NULL);
+ SCR_CaptureVideo_RIFF_Write32((int)(1000000.0 / cls.capturevideo.framerate)); // microseconds per frame
+ SCR_CaptureVideo_RIFF_Write32(0); // max bytes per second
+ SCR_CaptureVideo_RIFF_Write32(0); // padding granularity
+ SCR_CaptureVideo_RIFF_Write32(0x910); // flags (AVIF_HASINDEX | AVIF_ISINTERLEAVED | AVIF_TRUSTCKTYPE)
+ cls.capturevideo.videofile_firstchunkframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0); // total frames
+ SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+ if (cls.capturevideo.soundrate)
+ SCR_CaptureVideo_RIFF_Write32(2); // number of streams
+ else
+ SCR_CaptureVideo_RIFF_Write32(1); // number of streams
+ SCR_CaptureVideo_RIFF_Write32(0); // suggested buffer size
+ SCR_CaptureVideo_RIFF_Write32(width); // width
+ SCR_CaptureVideo_RIFF_Write32(height); // height
+ SCR_CaptureVideo_RIFF_Write32(0); // reserved[0]
+ SCR_CaptureVideo_RIFF_Write32(0); // reserved[1]
+ SCR_CaptureVideo_RIFF_Write32(0); // reserved[2]
+ SCR_CaptureVideo_RIFF_Write32(0); // reserved[3]
+ SCR_CaptureVideo_RIFF_Pop();
+ // video stream info
+ SCR_CaptureVideo_RIFF_Push("LIST", "strl");
+ SCR_CaptureVideo_RIFF_Push("strh", "vids");
+ SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // stream fourcc (I420 colorspace, uncompressed)
+ SCR_CaptureVideo_RIFF_Write32(0); // flags
+ SCR_CaptureVideo_RIFF_Write16(0); // priority
+ SCR_CaptureVideo_RIFF_Write16(0); // language
+ SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+ // find an ideal divisor for the framerate
+ FindFraction(cls.capturevideo.framerate, &n, &d, 1000);
+ SCR_CaptureVideo_RIFF_Write32(d); // samples/second divisor
+ SCR_CaptureVideo_RIFF_Write32(n); // samples/second multiplied by divisor
+ SCR_CaptureVideo_RIFF_Write32(0); // start
+ cls.capturevideo.videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0); // length
+ SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // suggested buffer size
+ SCR_CaptureVideo_RIFF_Write32(0); // quality
+ SCR_CaptureVideo_RIFF_Write32(0); // sample size
+ SCR_CaptureVideo_RIFF_Write16(0); // frame left
+ SCR_CaptureVideo_RIFF_Write16(0); // frame top
+ SCR_CaptureVideo_RIFF_Write16(width); // frame right
+ SCR_CaptureVideo_RIFF_Write16(height); // frame bottom
+ SCR_CaptureVideo_RIFF_Pop();
+ // video stream format
+ SCR_CaptureVideo_RIFF_Push("strf", NULL);
+ SCR_CaptureVideo_RIFF_Write32(40); // BITMAPINFO struct size
+ SCR_CaptureVideo_RIFF_Write32(width); // width
+ SCR_CaptureVideo_RIFF_Write32(height); // height
+ SCR_CaptureVideo_RIFF_Write16(3); // planes
+ SCR_CaptureVideo_RIFF_Write16(12); // bitcount
+ SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // compression
+ SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // size of image
+ SCR_CaptureVideo_RIFF_Write32(0); // x pixels per meter
+ SCR_CaptureVideo_RIFF_Write32(0); // y pixels per meter
+ SCR_CaptureVideo_RIFF_Write32(0); // color used
+ SCR_CaptureVideo_RIFF_Write32(0); // color important
+ SCR_CaptureVideo_RIFF_Pop();
+ // master index
+ SCR_CaptureVideo_RIFF_Push("indx", NULL);
+ SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+ SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+ cls.capturevideo.videofile_ix_master_video_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+ SCR_CaptureVideo_RIFF_WriteFourCC("00dc"); // dwChunkId
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+ cls.capturevideo.videofile_ix_master_video_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+ SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+ SCR_CaptureVideo_RIFF_Pop();
+ // extended format (aspect!)
+ SCR_CaptureVideo_RIFF_Push("vprp", NULL);
+ SCR_CaptureVideo_RIFF_Write32(0); // VideoFormatToken
+ SCR_CaptureVideo_RIFF_Write32(0); // VideoStandard
+ SCR_CaptureVideo_RIFF_Write32((int)cls.capturevideo.framerate); // dwVerticalRefreshRate (bogus)
+ SCR_CaptureVideo_RIFF_Write32(width); // dwHTotalInT
+ SCR_CaptureVideo_RIFF_Write32(height); // dwVTotalInLines
+ FindFraction(aspect, &n, &d, 1000);
+ SCR_CaptureVideo_RIFF_Write32((n << 16) | d); // dwFrameAspectRatio // TODO a word
+ SCR_CaptureVideo_RIFF_Write32(width); // dwFrameWidthInPixels
+ SCR_CaptureVideo_RIFF_Write32(height); // dwFrameHeightInLines
+ SCR_CaptureVideo_RIFF_Write32(1); // nFieldPerFrame
+ SCR_CaptureVideo_RIFF_Write32(width); // CompressedBMWidth
+ SCR_CaptureVideo_RIFF_Write32(height); // CompressedBMHeight
+ SCR_CaptureVideo_RIFF_Write32(width); // ValidBMHeight
+ SCR_CaptureVideo_RIFF_Write32(height); // ValidBMWidth
+ SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffset
+ SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYOffset
+ SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffsetInT
+ SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYValidStartLine
+ SCR_CaptureVideo_RIFF_Pop();
+ SCR_CaptureVideo_RIFF_Pop();
+ if (cls.capturevideo.soundrate)
+ {
+ // audio stream info
+ SCR_CaptureVideo_RIFF_Push("LIST", "strl");
+ SCR_CaptureVideo_RIFF_Push("strh", "auds");
+ SCR_CaptureVideo_RIFF_Write32(1); // stream fourcc (PCM audio, uncompressed)
+ SCR_CaptureVideo_RIFF_Write32(0); // flags
+ SCR_CaptureVideo_RIFF_Write16(0); // priority
+ SCR_CaptureVideo_RIFF_Write16(0); // language
+ SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+ SCR_CaptureVideo_RIFF_Write32(1); // samples/second divisor
+ SCR_CaptureVideo_RIFF_Write32((int)(cls.capturevideo.soundrate)); // samples/second multiplied by divisor
+ SCR_CaptureVideo_RIFF_Write32(0); // start
+ cls.capturevideo.videofile_totalsampleframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0); // length
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 2); // suggested buffer size (this is a half second)
+ SCR_CaptureVideo_RIFF_Write32(0); // quality
+ SCR_CaptureVideo_RIFF_Write32(4); // sample size
+ SCR_CaptureVideo_RIFF_Write16(0); // frame left
+ SCR_CaptureVideo_RIFF_Write16(0); // frame top
+ SCR_CaptureVideo_RIFF_Write16(0); // frame right
+ SCR_CaptureVideo_RIFF_Write16(0); // frame bottom
+ SCR_CaptureVideo_RIFF_Pop();
+ // audio stream format
+ SCR_CaptureVideo_RIFF_Push("strf", NULL);
+ SCR_CaptureVideo_RIFF_Write16(1); // format (uncompressed PCM?)
+ SCR_CaptureVideo_RIFF_Write16(2); // channels (stereo)
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate); // sampleframes per second
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 4); // average bytes per second
+ SCR_CaptureVideo_RIFF_Write16(4); // block align
+ SCR_CaptureVideo_RIFF_Write16(16); // bits per sample
+ SCR_CaptureVideo_RIFF_Write16(0); // size
+ SCR_CaptureVideo_RIFF_Pop();
+ // master index
+ SCR_CaptureVideo_RIFF_Push("indx", NULL);
+ SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+ SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+ cls.capturevideo.videofile_ix_master_audio_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+ SCR_CaptureVideo_RIFF_WriteFourCC("01wb"); // dwChunkId
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+ cls.capturevideo.videofile_ix_master_audio_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+ SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+ SCR_CaptureVideo_RIFF_Pop();
+ SCR_CaptureVideo_RIFF_Pop();
+ }
+
+ cls.capturevideo.videofile_ix_master_audio_inuse = cls.capturevideo.videofile_ix_master_video_inuse = 0;
+
+ // extended header (for total #frames)
+ SCR_CaptureVideo_RIFF_Push("LIST", "odml");
+ SCR_CaptureVideo_RIFF_Push("dmlh", NULL);
+ cls.capturevideo.videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0);
+ SCR_CaptureVideo_RIFF_Pop();
+ SCR_CaptureVideo_RIFF_Pop();
+
+ // close the AVI header list
+ SCR_CaptureVideo_RIFF_Pop();
+ // software that produced this AVI video file
+ SCR_CaptureVideo_RIFF_Push("LIST", "INFO");
+ SCR_CaptureVideo_RIFF_Push("ISFT", NULL);
+ SCR_CaptureVideo_RIFF_WriteTerminatedString(engineversion);
+ SCR_CaptureVideo_RIFF_Pop();
+ // enable this junk filler if you like the LIST movi to always begin at 4KB in the file (why?)
+#if 0
+ SCR_CaptureVideo_RIFF_Push("JUNK", NULL);
+ x = 4096 - SCR_CaptureVideo_RIFF_GetPosition();
+ while (x > 0)
+ {
+ const char *junkfiller = "[ DarkPlaces junk data ]";
+ int i = min(x, (int)strlen(junkfiller));
+ SCR_CaptureVideo_RIFF_WriteBytes((const unsigned char *)junkfiller, i);
+ x -= i;
+ }
+ SCR_CaptureVideo_RIFF_Pop();
+#endif
+ SCR_CaptureVideo_RIFF_Pop();
+ // begin the actual video section now
+ SCR_CaptureVideo_RIFF_Push("LIST", "movi");
+ cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
+ // we're done with the headers now...
+ SCR_CaptureVideo_RIFF_Flush();
+ if (cls.capturevideo.riffstacklevel != 2)
+ Sys_Error("SCR_CaptureVideo_BeginVideo: broken AVI writing code (stack level is %i (should be 2) at end of headers)\n", cls.capturevideo.riffstacklevel);
+ }
+
+ switch(cls.capturevideo.format)
+ {
+ case CAPTUREVIDEOFORMAT_AVI_I420:
+ break;
+ default:
+ break;
+ }
+}
+
+void SCR_CaptureVideo_EndVideo(void)
+{
+ if (!cls.capturevideo.active)
+ return;
+ cls.capturevideo.active = false;
+ if (cls.capturevideo.videofile)
+ {
+ switch(cls.capturevideo.format)
+ {
+ case CAPTUREVIDEOFORMAT_AVI_I420:
+ // close any open chunks
+ SCR_CaptureVideo_RIFF_Finish(true);
+ // go back and fix the video frames and audio samples fields
+ Con_DPrintf("Finishing capture (%d frames, %d audio frames)\n", cls.capturevideo.frame, cls.capturevideo.soundsampleframe);
+ FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset1, SEEK_SET);
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+ SCR_CaptureVideo_RIFF_Flush();
+ FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset2, SEEK_SET);
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+ SCR_CaptureVideo_RIFF_Flush();
+ if (cls.capturevideo.soundrate)
+ {
+ FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalsampleframes_offset, SEEK_SET);
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundsampleframe);
+ SCR_CaptureVideo_RIFF_Flush();
+ }
+ break;
+ default:
+ break;
+ }
+ FS_Close(cls.capturevideo.videofile);
+ cls.capturevideo.videofile = NULL;
+ }
+
+ if (cls.capturevideo.screenbuffer)
+ {
+ Mem_Free (cls.capturevideo.screenbuffer);
+ cls.capturevideo.screenbuffer = NULL;
+ }
+
+ if (cls.capturevideo.outbuffer)
+ {
+ Mem_Free (cls.capturevideo.outbuffer);
+ cls.capturevideo.outbuffer = NULL;
+ }
+
+ if (cls.capturevideo.riffindexbuffer.data)
+ {
+ Mem_Free(cls.capturevideo.riffindexbuffer.data);
+ cls.capturevideo.riffindexbuffer.data = NULL;
+ }
+
+ memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
+}
+
+// converts from RGB24 to I420 colorspace (identical to YV12 except chroma plane order is reversed), this colorspace is handled by the Intel(r) 4:2:0 codec on Windows
+void SCR_CaptureVideo_ConvertFrame_RGB_to_I420_flip(int width, int height, unsigned char *instart, unsigned char *outstart)
+{
+ int x, y;