]> de.git.xonotic.org Git - xonotic/darkplaces.git/commitdiff
capturevideo refactoring, making AVI also "just a module" for it
authordivverent <divverent@d7cf8633-e32d-0410-b094-e92efae38249>
Wed, 18 Feb 2009 14:05:10 +0000 (14:05 +0000)
committerdivverent <divverent@d7cf8633-e32d-0410-b094-e92efae38249>
Wed, 18 Feb 2009 14:05:10 +0000 (14:05 +0000)
git-svn-id: svn://svn.icculus.org/twilight/trunk/darkplaces@8725 d7cf8633-e32d-0410-b094-e92efae38249

cap_avi.c [new file with mode: 0644]
cap_avi.h [new file with mode: 0644]
cap_ogg.c
cap_ogg.h
cl_screen.c
client.h
makefile.inc

diff --git a/cap_avi.c b/cap_avi.c
new file mode 100644 (file)
index 0000000..df75c7d
--- /dev/null
+++ b/cap_avi.c
@@ -0,0 +1,705 @@
+#include "quakedef.h"
+#include "cap_avi.h"
+
+#define AVI_MASTER_INDEX_SIZE 640 // GB ought to be enough for anyone
+
+typedef struct capturevideostate_avi_formatspecific_s
+{
+       // AVI stuff
+       fs_offset_t videofile_firstchunkframes_offset;
+       fs_offset_t videofile_totalframes_offset1;
+       fs_offset_t videofile_totalframes_offset2;
+       fs_offset_t videofile_totalsampleframes_offset;
+       int videofile_ix_master_audio_inuse;
+       fs_offset_t videofile_ix_master_audio_inuse_offset;
+       fs_offset_t videofile_ix_master_audio_start_offset;
+       int videofile_ix_master_video_inuse;
+       fs_offset_t videofile_ix_master_video_inuse_offset;
+       fs_offset_t videofile_ix_master_video_start_offset;
+       fs_offset_t videofile_ix_movistart;
+       fs_offset_t position;
+       qboolean canseek;
+       sizebuf_t riffbuffer;
+       unsigned char riffbufferdata[128];
+       sizebuf_t riffindexbuffer;
+       int riffstacklevel;
+       fs_offset_t riffstackstartoffset[4];
+       fs_offset_t riffstacksizehint[4];
+       const char *riffstackfourcc[4];
+}
+capturevideostate_avi_formatspecific_t;
+#define LOAD_FORMATSPECIFIC_AVI() capturevideostate_avi_formatspecific_t *format = (capturevideostate_avi_formatspecific_t *) cls.capturevideo.formatspecific
+
+static void SCR_CaptureVideo_RIFF_Start(void)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       memset(&format->riffbuffer, 0, sizeof(sizebuf_t));
+       format->riffbuffer.maxsize = sizeof(format->riffbufferdata);
+       format->riffbuffer.data = format->riffbufferdata;
+       format->position = 0;
+}
+
+static void SCR_CaptureVideo_RIFF_Flush(void)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       if (format->riffbuffer.cursize > 0)
+       {
+               if (!FS_Write(cls.capturevideo.videofile, format->riffbuffer.data, format->riffbuffer.cursize))
+                       cls.capturevideo.error = true;
+               format->position += format->riffbuffer.cursize;
+               format->riffbuffer.cursize = 0;
+               format->riffbuffer.overflowed = false;
+       }
+}
+
+static void SCR_CaptureVideo_RIFF_WriteBytes(const unsigned char *data, size_t size)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       SCR_CaptureVideo_RIFF_Flush();
+       if (!FS_Write(cls.capturevideo.videofile, data, size))
+               cls.capturevideo.error = true;
+       format->position += size;
+}
+
+static void SCR_CaptureVideo_RIFF_Write32(int n)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       if (format->riffbuffer.cursize + 4 > format->riffbuffer.maxsize)
+               SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteLong(&format->riffbuffer, n);
+}
+
+static void SCR_CaptureVideo_RIFF_Write16(int n)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       if (format->riffbuffer.cursize + 2 > format->riffbuffer.maxsize)
+               SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteShort(&format->riffbuffer, n);
+}
+
+static void SCR_CaptureVideo_RIFF_WriteFourCC(const char *chunkfourcc)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       if (format->riffbuffer.cursize + (int)strlen(chunkfourcc) > format->riffbuffer.maxsize)
+               SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteUnterminatedString(&format->riffbuffer, chunkfourcc);
+}
+
+static void SCR_CaptureVideo_RIFF_WriteTerminatedString(const char *string)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       if (format->riffbuffer.cursize + (int)strlen(string) > format->riffbuffer.maxsize)
+               SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteString(&format->riffbuffer, string);
+}
+
+static fs_offset_t SCR_CaptureVideo_RIFF_GetPosition(void)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       SCR_CaptureVideo_RIFF_Flush();
+       //return FS_Tell(cls.capturevideo.videofile);
+       return format->position;
+}
+
+static void SCR_CaptureVideo_RIFF_Push(const char *chunkfourcc, const char *listtypefourcc, fs_offset_t sizeHint)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       if (listtypefourcc && sizeHint >= 0)
+               sizeHint += 4; // size hint is for INNER size
+       SCR_CaptureVideo_RIFF_WriteFourCC(chunkfourcc);
+       SCR_CaptureVideo_RIFF_Write32(sizeHint);
+       SCR_CaptureVideo_RIFF_Flush();
+       format->riffstacksizehint[format->riffstacklevel] = sizeHint;
+       format->riffstackstartoffset[format->riffstacklevel] = SCR_CaptureVideo_RIFF_GetPosition();
+       format->riffstackfourcc[format->riffstacklevel] = chunkfourcc;
+       ++format->riffstacklevel;
+       if (listtypefourcc)
+               SCR_CaptureVideo_RIFF_WriteFourCC(listtypefourcc);
+}
+
+static void SCR_CaptureVideo_RIFF_Pop(void)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       fs_offset_t offset, sizehint;
+       int x;
+       unsigned char sizebytes[4];
+       // write out the chunk size and then return to the current file position
+       format->riffstacklevel--;
+       offset = SCR_CaptureVideo_RIFF_GetPosition();
+
+       sizehint = format->riffstacksizehint[format->riffstacklevel];
+       x = (int)(offset - (format->riffstackstartoffset[format->riffstacklevel]));
+
+       if(x != sizehint)
+       {
+               if(sizehint != -1)
+               {
+                       int i;
+                       Con_Printf("WARNING: invalid size hint %d when writing video data (actual size: %d)\n", (int) sizehint, x);
+                       for(i = 0; i <= format->riffstacklevel; ++i)
+                       {
+                               Con_Printf("  RIFF level %d = %s\n", i, format->riffstackfourcc[i]);
+                       }
+               }
+               sizebytes[0] = (x) & 0xff;sizebytes[1] = (x >> 8) & 0xff;sizebytes[2] = (x >> 16) & 0xff;sizebytes[3] = (x >> 24) & 0xff;
+               if(FS_Seek(cls.capturevideo.videofile, -(x + 4), SEEK_END) >= 0)
+               {
+                       FS_Write(cls.capturevideo.videofile, sizebytes, 4);
+               }
+               FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+       }
+
+       if (offset & 1)
+       {
+               SCR_CaptureVideo_RIFF_WriteBytes((unsigned char *) "\0", 1);
+       }
+}
+
+static void GrowBuf(sizebuf_t *buf, int extralen)
+{
+       if(buf->cursize + extralen > buf->maxsize)
+       {
+               int oldsize = buf->maxsize;
+               unsigned char *olddata;
+               olddata = buf->data;
+               buf->maxsize = max(buf->maxsize * 2, 4096);
+               buf->data = (unsigned char *) Mem_Alloc(tempmempool, buf->maxsize);
+               if(olddata)
+               {
+                       memcpy(buf->data, olddata, oldsize);
+                       Mem_Free(olddata);
+               }
+       }
+}
+
+static void SCR_CaptureVideo_RIFF_IndexEntry(const char *chunkfourcc, int chunksize, int flags)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       if(!format->canseek)
+               Host_Error("SCR_CaptureVideo_RIFF_IndexEntry called on non-seekable AVI");
+
+       if (format->riffstacklevel != 2)
+               Sys_Error("SCR_Capturevideo_RIFF_IndexEntry: RIFF stack level is %i (should be 2)\n", format->riffstacklevel);
+       GrowBuf(&format->riffindexbuffer, 16);
+       SCR_CaptureVideo_RIFF_Flush();
+       MSG_WriteUnterminatedString(&format->riffindexbuffer, chunkfourcc);
+       MSG_WriteLong(&format->riffindexbuffer, flags);
+       MSG_WriteLong(&format->riffindexbuffer, (int)FS_Tell(cls.capturevideo.videofile) - format->riffstackstartoffset[1]);
+       MSG_WriteLong(&format->riffindexbuffer, chunksize);
+}
+
+static void SCR_CaptureVideo_RIFF_MakeIxChunk(const char *fcc, const char *dwChunkId, fs_offset_t masteridx_counter, int *masteridx_count, fs_offset_t masteridx_start)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       int nMatching;
+       int i;
+       fs_offset_t ix = SCR_CaptureVideo_RIFF_GetPosition();
+       fs_offset_t pos, sz;
+       
+       if(!format->canseek)
+               Host_Error("SCR_CaptureVideo_RIFF_MakeIxChunk called on non-seekable AVI");
+
+       if(*masteridx_count >= AVI_MASTER_INDEX_SIZE)
+               return;
+
+       nMatching = 0; // go through index and enumerate them
+       for(i = 0; i < format->riffindexbuffer.cursize; i += 16)
+               if(!memcmp(format->riffindexbuffer.data + i, dwChunkId, 4))
+                       ++nMatching;
+
+       sz = 2+2+4+4+4+4+4;
+       for(i = 0; i < format->riffindexbuffer.cursize; i += 16)
+               if(!memcmp(format->riffindexbuffer.data + i, dwChunkId, 4))
+                       sz += 8;
+
+       SCR_CaptureVideo_RIFF_Push(fcc, NULL, sz);
+       SCR_CaptureVideo_RIFF_Write16(2); // wLongsPerEntry
+       SCR_CaptureVideo_RIFF_Write16(0x0100); // bIndexType=1, bIndexSubType=0
+       SCR_CaptureVideo_RIFF_Write32(nMatching); // nEntriesInUse
+       SCR_CaptureVideo_RIFF_WriteFourCC(dwChunkId); // dwChunkId
+       SCR_CaptureVideo_RIFF_Write32(format->videofile_ix_movistart & (fs_offset_t) 0xFFFFFFFFu);
+       SCR_CaptureVideo_RIFF_Write32(((fs_offset_t) format->videofile_ix_movistart) >> 32);
+       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved
+
+       for(i = 0; i < format->riffindexbuffer.cursize; i += 16)
+               if(!memcmp(format->riffindexbuffer.data + i, dwChunkId, 4))
+               {
+                       unsigned int *p = (unsigned int *) (format->riffindexbuffer.data + i);
+                       unsigned int flags = p[1];
+                       unsigned int rpos = p[2];
+                       unsigned int size = p[3];
+                       size &= ~0x80000000;
+                       if(!(flags & 0x10)) // no keyframe?
+                               size |= 0x80000000;
+                       SCR_CaptureVideo_RIFF_Write32(rpos + 8);
+                       SCR_CaptureVideo_RIFF_Write32(size);
+               }
+
+       SCR_CaptureVideo_RIFF_Flush();
+       SCR_CaptureVideo_RIFF_Pop();
+       pos = SCR_CaptureVideo_RIFF_GetPosition();
+
+       if(FS_Seek(cls.capturevideo.videofile, masteridx_start + 16 * *masteridx_count, SEEK_SET) >= 0)
+       {
+               SCR_CaptureVideo_RIFF_Write32(ix & (fs_offset_t) 0xFFFFFFFFu);
+               SCR_CaptureVideo_RIFF_Write32(((fs_offset_t) ix) >> 32);
+               SCR_CaptureVideo_RIFF_Write32(pos - ix);
+               SCR_CaptureVideo_RIFF_Write32(nMatching);
+               SCR_CaptureVideo_RIFF_Flush();
+       }
+
+       if(FS_Seek(cls.capturevideo.videofile, masteridx_counter, SEEK_SET) >= 0)
+       {
+               SCR_CaptureVideo_RIFF_Write32(++*masteridx_count);
+               SCR_CaptureVideo_RIFF_Flush();
+       }
+
+       FS_Seek(cls.capturevideo.videofile, 0, SEEK_END); // return value doesn't matter here
+}
+
+static void SCR_CaptureVideo_RIFF_Finish(qboolean final)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       // close the "movi" list
+       SCR_CaptureVideo_RIFF_Pop();
+       if(format->videofile_ix_master_video_inuse_offset)
+               SCR_CaptureVideo_RIFF_MakeIxChunk("ix00", "00dc", format->videofile_ix_master_video_inuse_offset, &format->videofile_ix_master_video_inuse, format->videofile_ix_master_video_start_offset);
+       if(format->videofile_ix_master_audio_inuse_offset)
+               SCR_CaptureVideo_RIFF_MakeIxChunk("ix01", "01wb", format->videofile_ix_master_audio_inuse_offset, &format->videofile_ix_master_audio_inuse, format->videofile_ix_master_audio_start_offset);
+       // write the idx1 chunk that we've been building while saving the frames (for old style players)
+       if(final && format->videofile_firstchunkframes_offset)
+       // TODO replace index creating by OpenDML ix##/##ix/indx chunk so it works for more than one AVI part too
+       {
+               SCR_CaptureVideo_RIFF_Push("idx1", NULL, format->riffindexbuffer.cursize);
+               SCR_CaptureVideo_RIFF_WriteBytes(format->riffindexbuffer.data, format->riffindexbuffer.cursize);
+               SCR_CaptureVideo_RIFF_Pop();
+       }
+       format->riffindexbuffer.cursize = 0;
+       // pop the RIFF chunk itself
+       while (format->riffstacklevel > 0)
+               SCR_CaptureVideo_RIFF_Pop();
+       SCR_CaptureVideo_RIFF_Flush();
+       if(format->videofile_firstchunkframes_offset)
+       {
+               Con_DPrintf("Finishing first chunk (%d frames)\n", cls.capturevideo.frame);
+               if(FS_Seek(cls.capturevideo.videofile, format->videofile_firstchunkframes_offset, SEEK_SET) >= 0)
+               {
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+                       SCR_CaptureVideo_RIFF_Flush();
+               }
+               FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+               format->videofile_firstchunkframes_offset = 0;
+       }
+       else
+               Con_DPrintf("Finishing another chunk (%d frames)\n", cls.capturevideo.frame);
+}
+
+static void SCR_CaptureVideo_RIFF_OverflowCheck(int framesize)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       fs_offset_t cursize, curfilesize;
+       if (format->riffstacklevel != 2)
+               Sys_Error("SCR_CaptureVideo_RIFF_OverflowCheck: chunk stack leakage!\n");
+       
+       if(!format->canseek)
+               return;
+
+       // check where we are in the file
+       SCR_CaptureVideo_RIFF_Flush();
+       cursize = SCR_CaptureVideo_RIFF_GetPosition() - format->riffstackstartoffset[0];
+       curfilesize = SCR_CaptureVideo_RIFF_GetPosition();
+
+       // if this would overflow the windows limit of 1GB per RIFF chunk, we need
+       // to close the current RIFF chunk and open another for future frames
+       if (8 + cursize + framesize + format->riffindexbuffer.cursize + 8 + format->riffindexbuffer.cursize + 64 > 1<<30) // note that the Ix buffer takes less space... I just don't dare to / 2 here now... sorry, maybe later
+       {
+               SCR_CaptureVideo_RIFF_Finish(false);
+               // begin a new 1GB extended section of the AVI
+               SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", -1);
+               SCR_CaptureVideo_RIFF_Push("LIST", "movi", -1);
+               format->videofile_ix_movistart = format->riffstackstartoffset[1];
+       }
+}
+
+// converts from BGRA32 to I420 colorspace (identical to YV12 except chroma plane order is reversed), this colorspace is handled by the Intel(r) 4:2:0 codec on Windows
+static void SCR_CaptureVideo_ConvertFrame_BGRA_to_I420_flip(int width, int height, unsigned char *instart, unsigned char *outstart)
+{
+       int x, y;
+       int blockr, blockg, blockb;
+       int outoffset = (width/2)*(height/2);
+       unsigned char *b, *out;
+       // process one line at a time, and CbCr every other line at 2 pixel intervals
+       for (y = 0;y < height;y++)
+       {
+               // 1x1 Y
+               for (b = instart + (height-1-y)*width*4, out = outstart + y*width, x = 0;x < width;x++, b += 4, out++)
+               {
+                       blockr = b[2];
+                       blockg = b[1];
+                       blockb = b[0];
+                       *out = cls.capturevideo.yuvnormalizetable[0][cls.capturevideo.rgbtoyuvscaletable[0][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[0][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[0][2][blockb]];
+               }
+               if ((y & 1) == 0)
+               {
+                       // 2x2 Cr and Cb planes
+                       int inpitch = width*4;
+                       for (b = instart + (height-2-y)*width*4, out = outstart + width*height + (y/2)*(width/2), x = 0;x < width/2;x++, b += 8, out++)
+                       {
+                               blockr = (b[2] + b[6] + b[inpitch+2] + b[inpitch+6]) >> 2;
+                               blockg = (b[1] + b[5] + b[inpitch+1] + b[inpitch+5]) >> 2;
+                               blockb = (b[0] + b[4] + b[inpitch+0] + b[inpitch+4]) >> 2;
+                               // Cr
+                               out[0        ] = cls.capturevideo.yuvnormalizetable[1][cls.capturevideo.rgbtoyuvscaletable[1][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[1][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[1][2][blockb] + 128];
+                               // Cb
+                               out[outoffset] = cls.capturevideo.yuvnormalizetable[2][cls.capturevideo.rgbtoyuvscaletable[2][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[2][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[2][2][blockb] + 128];
+                       }
+               }
+       }
+}
+
+static void SCR_CaptureVideo_Avi_VideoFrames(int num)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       int x = 0, width = cls.capturevideo.width, height = cls.capturevideo.height;
+       unsigned char *in, *out;
+       // FIXME: width/height must be multiple of 2, enforce this?
+       in = cls.capturevideo.outbuffer;
+       out = cls.capturevideo.outbuffer + width*height*4;
+       SCR_CaptureVideo_ConvertFrame_BGRA_to_I420_flip(width, height, in, out);
+       x = width*height+(width/2)*(height/2)*2;
+       while(num-- > 0)
+       {
+               if(format->canseek)
+               {
+                       SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
+                       SCR_CaptureVideo_RIFF_IndexEntry("00dc", x, 0x10); // AVIIF_KEYFRAME
+               }
+
+               if(!format->canseek)
+               {
+                       SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", 12+8+x);
+                       SCR_CaptureVideo_RIFF_Push("LIST", "movi", 8+x);
+               }
+               SCR_CaptureVideo_RIFF_Push("00dc", NULL, x);
+               SCR_CaptureVideo_RIFF_WriteBytes(out, x);
+               SCR_CaptureVideo_RIFF_Pop();
+               if(!format->canseek)
+               {
+                       SCR_CaptureVideo_RIFF_Pop();
+                       SCR_CaptureVideo_RIFF_Pop();
+               }
+       }
+}
+
+void SCR_CaptureVideo_Avi_EndVideo()
+{
+       LOAD_FORMATSPECIFIC_AVI();
+
+       if(format->canseek)
+       {
+               // close any open chunks
+               SCR_CaptureVideo_RIFF_Finish(true);
+
+               // go back and fix the video frames and audio samples fields
+               if(format->videofile_totalframes_offset1)
+                       if(FS_Seek(cls.capturevideo.videofile, format->videofile_totalframes_offset1, SEEK_SET) >= 0)
+                       {
+                               SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+                               SCR_CaptureVideo_RIFF_Flush();
+                       }
+               if(format->videofile_totalframes_offset2)
+                       if(FS_Seek(cls.capturevideo.videofile, format->videofile_totalframes_offset2, SEEK_SET) >= 0)
+                       {
+                               SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+                               SCR_CaptureVideo_RIFF_Flush();
+                       }
+               if (cls.capturevideo.soundrate)
+               {
+                       if(format->videofile_totalsampleframes_offset)
+                               if(FS_Seek(cls.capturevideo.videofile, format->videofile_totalsampleframes_offset, SEEK_SET) >= 0)
+                               {
+                                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundsampleframe);
+                                       SCR_CaptureVideo_RIFF_Flush();
+                               }
+               }
+       }
+
+       if (format->riffindexbuffer.data)
+       {
+               Mem_Free(format->riffindexbuffer.data);
+               format->riffindexbuffer.data = NULL;
+       }
+
+       FS_Close(cls.capturevideo.videofile);
+       cls.capturevideo.videofile = NULL;
+
+       Mem_Free(format);
+}
+
+void SCR_CaptureVideo_Avi_SoundFrame(const portable_sampleframe_t *paintbuffer, size_t length)
+{
+       LOAD_FORMATSPECIFIC_AVI();
+       int x;
+       unsigned char bufstereo16le[PAINTBUFFER_SIZE * 4];
+       unsigned char* out_ptr;
+       size_t i;
+
+       // write the sound buffer as little endian 16bit interleaved stereo
+       for(i = 0, out_ptr = bufstereo16le; i < length; i++, out_ptr += 4)
+       {
+               int n0, n1;
+
+               n0 = paintbuffer[i].sample[0];
+               n0 = bound(-32768, n0, 32767);
+               out_ptr[0] = (unsigned char)n0;
+               out_ptr[1] = (unsigned char)(n0 >> 8);
+
+               n1 = paintbuffer[i].sample[1];
+               n1 = bound(-32768, n1, 32767);
+               out_ptr[2] = (unsigned char)n1;
+               out_ptr[3] = (unsigned char)(n1 >> 8);
+       }
+
+       x = length*4;
+       if(format->canseek)
+       {
+               SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
+               SCR_CaptureVideo_RIFF_IndexEntry("01wb", x, 0x10); // AVIIF_KEYFRAME
+       }
+
+       if(!format->canseek)
+       {
+               SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", 12+8+x);
+               SCR_CaptureVideo_RIFF_Push("LIST", "movi", 8+x);
+       }
+       SCR_CaptureVideo_RIFF_Push("01wb", NULL, x);
+       SCR_CaptureVideo_RIFF_WriteBytes(bufstereo16le, x);
+       SCR_CaptureVideo_RIFF_Pop();
+       if(!format->canseek)
+       {
+               SCR_CaptureVideo_RIFF_Pop();
+               SCR_CaptureVideo_RIFF_Pop();
+       }
+}
+
+void SCR_CaptureVideo_Avi_BeginVideo()
+{
+       int width = cls.capturevideo.width;
+       int height = cls.capturevideo.height;
+       int n, d;
+       unsigned int i;
+       double aspect;
+
+       aspect = vid.width / (vid.height * vid_pixelheight.value);
+
+       cls.capturevideo.format = CAPTUREVIDEOFORMAT_AVI_I420;
+       cls.capturevideo.videofile = FS_OpenRealFile(va("%s.avi", cls.capturevideo.basename), "wb", false);
+       cls.capturevideo.endvideo = SCR_CaptureVideo_Avi_EndVideo;
+       cls.capturevideo.videoframes = SCR_CaptureVideo_Avi_VideoFrames;
+       cls.capturevideo.soundframe = SCR_CaptureVideo_Avi_SoundFrame;
+       cls.capturevideo.formatspecific = Mem_Alloc(tempmempool, sizeof(capturevideostate_avi_formatspecific_t));
+       {
+               LOAD_FORMATSPECIFIC_AVI();
+               format->canseek = (FS_Seek(cls.capturevideo.videofile, 0, SEEK_SET) == 0);
+               SCR_CaptureVideo_RIFF_Start();
+               // enclosing RIFF chunk (there can be multiple of these in >1GB files, the later ones are "AVIX" instead of "AVI " and have no header/stream info)
+               SCR_CaptureVideo_RIFF_Push("RIFF", "AVI ", format->canseek ? -1 : 12+(8+56+12+(12+52+8+40+8+68)+(cls.capturevideo.soundrate?(12+12+52+8+18):0)+12+(8+4))+12+(8+(((int) strlen(engineversion) | 1) + 1))+12);
+               // AVI main header
+               SCR_CaptureVideo_RIFF_Push("LIST", "hdrl", format->canseek ? -1 : 8+56+12+(12+52+8+40+8+68)+(cls.capturevideo.soundrate?(12+12+52+8+18):0)+12+(8+4));
+               SCR_CaptureVideo_RIFF_Push("avih", NULL, 56);
+               SCR_CaptureVideo_RIFF_Write32((int)(1000000.0 / cls.capturevideo.framerate)); // microseconds per frame
+               SCR_CaptureVideo_RIFF_Write32(0); // max bytes per second
+               SCR_CaptureVideo_RIFF_Write32(0); // padding granularity
+               SCR_CaptureVideo_RIFF_Write32(0x910); // flags (AVIF_HASINDEX | AVIF_ISINTERLEAVED | AVIF_TRUSTCKTYPE)
+               format->videofile_firstchunkframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0); // total frames
+               SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+               if (cls.capturevideo.soundrate)
+                       SCR_CaptureVideo_RIFF_Write32(2); // number of streams
+               else
+                       SCR_CaptureVideo_RIFF_Write32(1); // number of streams
+               SCR_CaptureVideo_RIFF_Write32(0); // suggested buffer size
+               SCR_CaptureVideo_RIFF_Write32(width); // width
+               SCR_CaptureVideo_RIFF_Write32(height); // height
+               SCR_CaptureVideo_RIFF_Write32(0); // reserved[0]
+               SCR_CaptureVideo_RIFF_Write32(0); // reserved[1]
+               SCR_CaptureVideo_RIFF_Write32(0); // reserved[2]
+               SCR_CaptureVideo_RIFF_Write32(0); // reserved[3]
+               SCR_CaptureVideo_RIFF_Pop();
+               // video stream info
+               SCR_CaptureVideo_RIFF_Push("LIST", "strl", format->canseek ? -1 : 12+52+8+40+8+68);
+               SCR_CaptureVideo_RIFF_Push("strh", "vids", 52);
+               SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // stream fourcc (I420 colorspace, uncompressed)
+               SCR_CaptureVideo_RIFF_Write32(0); // flags
+               SCR_CaptureVideo_RIFF_Write16(0); // priority
+               SCR_CaptureVideo_RIFF_Write16(0); // language
+               SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+               // find an ideal divisor for the framerate
+               FindFraction(cls.capturevideo.framerate, &n, &d, 1000);
+               SCR_CaptureVideo_RIFF_Write32(d); // samples/second divisor
+               SCR_CaptureVideo_RIFF_Write32(n); // samples/second multiplied by divisor
+               SCR_CaptureVideo_RIFF_Write32(0); // start
+               format->videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF); // length
+               SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // suggested buffer size
+               SCR_CaptureVideo_RIFF_Write32(0); // quality
+               SCR_CaptureVideo_RIFF_Write32(0); // sample size
+               SCR_CaptureVideo_RIFF_Write16(0); // frame left
+               SCR_CaptureVideo_RIFF_Write16(0); // frame top
+               SCR_CaptureVideo_RIFF_Write16(width); // frame right
+               SCR_CaptureVideo_RIFF_Write16(height); // frame bottom
+               SCR_CaptureVideo_RIFF_Pop();
+               // video stream format
+               SCR_CaptureVideo_RIFF_Push("strf", NULL, 40);
+               SCR_CaptureVideo_RIFF_Write32(40); // BITMAPINFO struct size
+               SCR_CaptureVideo_RIFF_Write32(width); // width
+               SCR_CaptureVideo_RIFF_Write32(height); // height
+               SCR_CaptureVideo_RIFF_Write16(3); // planes
+               SCR_CaptureVideo_RIFF_Write16(12); // bitcount
+               SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // compression
+               SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // size of image
+               SCR_CaptureVideo_RIFF_Write32(0); // x pixels per meter
+               SCR_CaptureVideo_RIFF_Write32(0); // y pixels per meter
+               SCR_CaptureVideo_RIFF_Write32(0); // color used
+               SCR_CaptureVideo_RIFF_Write32(0); // color important
+               SCR_CaptureVideo_RIFF_Pop();
+               // master index
+               if(format->canseek)
+               {
+                       SCR_CaptureVideo_RIFF_Push("indx", NULL, -1);
+                       SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+                       SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+                       format->videofile_ix_master_video_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                       SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+                       SCR_CaptureVideo_RIFF_WriteFourCC("00dc"); // dwChunkId
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+                       format->videofile_ix_master_video_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                       for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+                               SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+                       SCR_CaptureVideo_RIFF_Pop();
+               }
+               // extended format (aspect!)
+               SCR_CaptureVideo_RIFF_Push("vprp", NULL, 68);
+               SCR_CaptureVideo_RIFF_Write32(0); // VideoFormatToken
+               SCR_CaptureVideo_RIFF_Write32(0); // VideoStandard
+               SCR_CaptureVideo_RIFF_Write32((int)cls.capturevideo.framerate); // dwVerticalRefreshRate (bogus)
+               SCR_CaptureVideo_RIFF_Write32(width); // dwHTotalInT
+               SCR_CaptureVideo_RIFF_Write32(height); // dwVTotalInLines
+               FindFraction(aspect, &n, &d, 1000);
+               SCR_CaptureVideo_RIFF_Write32((n << 16) | d); // dwFrameAspectRatio // TODO a word
+               SCR_CaptureVideo_RIFF_Write32(width); // dwFrameWidthInPixels
+               SCR_CaptureVideo_RIFF_Write32(height); // dwFrameHeightInLines
+               SCR_CaptureVideo_RIFF_Write32(1); // nFieldPerFrame
+               SCR_CaptureVideo_RIFF_Write32(width); // CompressedBMWidth
+               SCR_CaptureVideo_RIFF_Write32(height); // CompressedBMHeight
+               SCR_CaptureVideo_RIFF_Write32(width); // ValidBMHeight
+               SCR_CaptureVideo_RIFF_Write32(height); // ValidBMWidth
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffset
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYOffset
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffsetInT
+               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYValidStartLine
+               SCR_CaptureVideo_RIFF_Pop();
+               SCR_CaptureVideo_RIFF_Pop();
+               if (cls.capturevideo.soundrate)
+               {
+                       // audio stream info
+                       SCR_CaptureVideo_RIFF_Push("LIST", "strl", format->canseek ? -1 : 12+52+8+18);
+                       SCR_CaptureVideo_RIFF_Push("strh", "auds", 52);
+                       SCR_CaptureVideo_RIFF_Write32(1); // stream fourcc (PCM audio, uncompressed)
+                       SCR_CaptureVideo_RIFF_Write32(0); // flags
+                       SCR_CaptureVideo_RIFF_Write16(0); // priority
+                       SCR_CaptureVideo_RIFF_Write16(0); // language
+                       SCR_CaptureVideo_RIFF_Write32(0); // initial frames
+                       SCR_CaptureVideo_RIFF_Write32(1); // samples/second divisor
+                       SCR_CaptureVideo_RIFF_Write32((int)(cls.capturevideo.soundrate)); // samples/second multiplied by divisor
+                       SCR_CaptureVideo_RIFF_Write32(0); // start
+                       format->videofile_totalsampleframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                       SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF); // length
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 2); // suggested buffer size (this is a half second)
+                       SCR_CaptureVideo_RIFF_Write32(0); // quality
+                       SCR_CaptureVideo_RIFF_Write32(4); // sample size
+                       SCR_CaptureVideo_RIFF_Write16(0); // frame left
+                       SCR_CaptureVideo_RIFF_Write16(0); // frame top
+                       SCR_CaptureVideo_RIFF_Write16(0); // frame right
+                       SCR_CaptureVideo_RIFF_Write16(0); // frame bottom
+                       SCR_CaptureVideo_RIFF_Pop();
+                       // audio stream format
+                       SCR_CaptureVideo_RIFF_Push("strf", NULL, 18);
+                       SCR_CaptureVideo_RIFF_Write16(1); // format (uncompressed PCM?)
+                       SCR_CaptureVideo_RIFF_Write16(2); // channels (stereo)
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate); // sampleframes per second
+                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 4); // average bytes per second
+                       SCR_CaptureVideo_RIFF_Write16(4); // block align
+                       SCR_CaptureVideo_RIFF_Write16(16); // bits per sample
+                       SCR_CaptureVideo_RIFF_Write16(0); // size
+                       SCR_CaptureVideo_RIFF_Pop();
+                       // master index
+                       if(format->canseek)
+                       {
+                               SCR_CaptureVideo_RIFF_Push("indx", NULL, -1);
+                               SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+                               SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+                               format->videofile_ix_master_audio_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                               SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+                               SCR_CaptureVideo_RIFF_WriteFourCC("01wb"); // dwChunkId
+                               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+                               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+                               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+                               format->videofile_ix_master_audio_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+                               for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+                                       SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+                               SCR_CaptureVideo_RIFF_Pop();
+                       }
+                       SCR_CaptureVideo_RIFF_Pop();
+               }
+
+               format->videofile_ix_master_audio_inuse = format->videofile_ix_master_video_inuse = 0;
+
+               // extended header (for total #frames)
+               SCR_CaptureVideo_RIFF_Push("LIST", "odml", 8+4);
+               SCR_CaptureVideo_RIFF_Push("dmlh", NULL, 4);
+               format->videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
+               SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF);
+               SCR_CaptureVideo_RIFF_Pop();
+               SCR_CaptureVideo_RIFF_Pop();
+
+               // close the AVI header list
+               SCR_CaptureVideo_RIFF_Pop();
+               // software that produced this AVI video file
+               SCR_CaptureVideo_RIFF_Push("LIST", "INFO", 8+((strlen(engineversion) | 1) + 1));
+               SCR_CaptureVideo_RIFF_Push("ISFT", NULL, strlen(engineversion) + 1);
+               SCR_CaptureVideo_RIFF_WriteTerminatedString(engineversion);
+               SCR_CaptureVideo_RIFF_Pop();
+               // enable this junk filler if you like the LIST movi to always begin at 4KB in the file (why?)
+#if 0
+               SCR_CaptureVideo_RIFF_Push("JUNK", NULL);
+               x = 4096 - SCR_CaptureVideo_RIFF_GetPosition();
+               while (x > 0)
+               {
+                       const char *junkfiller = "[ DarkPlaces junk data ]";
+                       int i = min(x, (int)strlen(junkfiller));
+                       SCR_CaptureVideo_RIFF_WriteBytes((const unsigned char *)junkfiller, i);
+                       x -= i;
+               }
+               SCR_CaptureVideo_RIFF_Pop();
+#endif
+               SCR_CaptureVideo_RIFF_Pop();
+               // begin the actual video section now
+               SCR_CaptureVideo_RIFF_Push("LIST", "movi", format->canseek ? -1 : 0);
+               format->videofile_ix_movistart = format->riffstackstartoffset[1];
+               // we're done with the headers now...
+               SCR_CaptureVideo_RIFF_Flush();
+               if (format->riffstacklevel != 2)
+                       Sys_Error("SCR_CaptureVideo_BeginVideo: broken AVI writing code (stack level is %i (should be 2) at end of headers)\n", format->riffstacklevel);
+
+               if(!format->canseek)
+               {
+                       // close the movi immediately
+                       SCR_CaptureVideo_RIFF_Pop();
+                       // close the AVI immediately (we'll put all frames into AVIX)
+                       SCR_CaptureVideo_RIFF_Pop();
+               }
+       }
+}
diff --git a/cap_avi.h b/cap_avi.h
new file mode 100644 (file)
index 0000000..29b294f
--- /dev/null
+++ b/cap_avi.h
@@ -0,0 +1 @@
+void SCR_CaptureVideo_Avi_BeginVideo();
index bed8630c05c919ed6114d71e328d04694e75aab3..c1030a02597dec24b98a39c4566bfcd5455f62fa 100644 (file)
--- a/cap_ogg.c
+++ b/cap_ogg.c
@@ -6,14 +6,14 @@
 
 // video capture cvars
 static cvar_t cl_capturevideo_ogg_theora_quality = {CVAR_SAVE, "cl_capturevideo_ogg_theora_quality", "16", "video quality factor (0 to 63), or -1 to use bitrate only; higher is better"};
 
 // video capture cvars
 static cvar_t cl_capturevideo_ogg_theora_quality = {CVAR_SAVE, "cl_capturevideo_ogg_theora_quality", "16", "video quality factor (0 to 63), or -1 to use bitrate only; higher is better"};
-static cvar_t cl_capturevideo_ogg_theora_bitrate = {CVAR_SAVE, "cl_capturevideo_ogg_theora_quality", "-1", "video bitrate (45000 to 2000000 kbps), or -1 to use quality only; higher is better"};
+static cvar_t cl_capturevideo_ogg_theora_bitrate = {CVAR_SAVE, "cl_capturevideo_ogg_theora_bitrate", "-1", "video bitrate (45 to 2000 kbps), or -1 to use quality only; higher is better"};
 static cvar_t cl_capturevideo_ogg_theora_keyframe_bitrate_multiplier = {CVAR_SAVE, "cl_capturevideo_ogg_theora_keyframe_bitrate_multiplier", "1.5", "how much more bit rate to use for keyframes, specified as a factor of at least 1"};
 static cvar_t cl_capturevideo_ogg_theora_keyframe_frequency = {CVAR_SAVE, "cl_capturevideo_ogg_theora_keyframe_frequency", "64", "maximum number of frames between two key frames (1 to 1000)"};
 static cvar_t cl_capturevideo_ogg_theora_keyframe_mindistance = {CVAR_SAVE, "cl_capturevideo_ogg_theora_keyframe_mindistance", "8", "minimum number of frames between two key frames (1 to 1000)"};
 static cvar_t cl_capturevideo_ogg_theora_keyframe_auto_threshold = {CVAR_SAVE, "cl_capturevideo_ogg_theora_keyframe_auto_threshold", "80", "threshold for key frame decision (0 to 100)"};
 static cvar_t cl_capturevideo_ogg_theora_noise_sensitivity = {CVAR_SAVE, "cl_capturevideo_ogg_theora_noise_sensitivity", "1", "video noise sensitivity (0 to 6); lower is better"};
 static cvar_t cl_capturevideo_ogg_theora_sharpness = {CVAR_SAVE, "cl_capturevideo_ogg_theora_sharpness", "0", "sharpness (0 to 2); lower is sharper"};
 static cvar_t cl_capturevideo_ogg_theora_keyframe_bitrate_multiplier = {CVAR_SAVE, "cl_capturevideo_ogg_theora_keyframe_bitrate_multiplier", "1.5", "how much more bit rate to use for keyframes, specified as a factor of at least 1"};
 static cvar_t cl_capturevideo_ogg_theora_keyframe_frequency = {CVAR_SAVE, "cl_capturevideo_ogg_theora_keyframe_frequency", "64", "maximum number of frames between two key frames (1 to 1000)"};
 static cvar_t cl_capturevideo_ogg_theora_keyframe_mindistance = {CVAR_SAVE, "cl_capturevideo_ogg_theora_keyframe_mindistance", "8", "minimum number of frames between two key frames (1 to 1000)"};
 static cvar_t cl_capturevideo_ogg_theora_keyframe_auto_threshold = {CVAR_SAVE, "cl_capturevideo_ogg_theora_keyframe_auto_threshold", "80", "threshold for key frame decision (0 to 100)"};
 static cvar_t cl_capturevideo_ogg_theora_noise_sensitivity = {CVAR_SAVE, "cl_capturevideo_ogg_theora_noise_sensitivity", "1", "video noise sensitivity (0 to 6); lower is better"};
 static cvar_t cl_capturevideo_ogg_theora_sharpness = {CVAR_SAVE, "cl_capturevideo_ogg_theora_sharpness", "0", "sharpness (0 to 2); lower is sharper"};
-static cvar_t cl_capturevideo_ogg_vorbis_quality = {CVAR_SAVE, "cl_capturevideo_ogg_vorbis_quality", "5", "audio quality (-1 to 10); higher is better"};
+static cvar_t cl_capturevideo_ogg_vorbis_quality = {CVAR_SAVE, "cl_capturevideo_ogg_vorbis_quality", "1", "audio quality (-1 to 10); higher is better"};
 
 // ogg.h stuff
 typedef int16_t ogg_int16_t;
 
 // ogg.h stuff
 typedef int16_t ogg_int16_t;
@@ -606,25 +606,203 @@ typedef struct capturevideostate_ogg_formatspecific_s
        theora_state ts;
        vorbis_dsp_state vd;
        vorbis_block vb;
        theora_state ts;
        vorbis_dsp_state vd;
        vorbis_block vb;
+       vorbis_info vi;
        yuv_buffer yuv;
        int channels;
 }
 capturevideostate_ogg_formatspecific_t;
        yuv_buffer yuv;
        int channels;
 }
 capturevideostate_ogg_formatspecific_t;
-#define LOAD_FORMATSPECIFIC() capturevideostate_ogg_formatspecific_t *format = (capturevideostate_ogg_formatspecific_t *) cls.capturevideo.formatspecific
+#define LOAD_FORMATSPECIFIC_OGG() capturevideostate_ogg_formatspecific_t *format = (capturevideostate_ogg_formatspecific_t *) cls.capturevideo.formatspecific
 
 
-void SCR_CaptureVideo_Ogg_Begin()
+static void SCR_CaptureVideo_Ogg_EndVideo()
 {
 {
+       LOAD_FORMATSPECIFIC_OGG();
+       ogg_page pg;
+       ogg_packet pt;
+
+       // repeat the last frame so we can set the end-of-stream flag
+       qtheora_encode_YUVin(&format->ts, &format->yuv);
+       qtheora_encode_packetout(&format->ts, true, &pt);
+       qogg_stream_packetin(&format->to, &pt);
+
+       if(cls.capturevideo.soundrate)
+       {
+               qvorbis_analysis_wrote(&format->vd, 0);
+               while(qvorbis_analysis_blockout(&format->vd, &format->vb) == 1)
+               {
+                       qvorbis_analysis(&format->vb, NULL);
+                       qvorbis_bitrate_addblock(&format->vb);
+                       while(qvorbis_bitrate_flushpacket(&format->vd, &pt))
+                               qogg_stream_packetin(&format->vo, &pt);
+               }
+       }
+
+       while(qogg_stream_pageout(&format->to, &pg) > 0)
+       {
+               FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
+               FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
+       }
+
+       if(cls.capturevideo.soundrate)
+       {
+               while(qogg_stream_pageout(&format->vo, &pg) > 0)
+               {
+                       FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
+                       FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
+               }
+       }
+               
+       while (1) {
+               int result = qogg_stream_flush (&format->to, &pg);
+               if (result < 0)
+                       fprintf (stderr, "Internal Ogg library error.\n"); // TODO Host_Error
+               if (result <= 0)
+                       break;
+               FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
+               FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
+       }
+
+       if(cls.capturevideo.soundrate)
+       {
+               while (1) {
+                       int result = qogg_stream_flush (&format->vo, &pg);
+                       if (result < 0)
+                               fprintf (stderr, "Internal Ogg library error.\n"); // TODO Host_Error
+                       if (result <= 0)
+                               break;
+                       FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
+                       FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
+               }
+
+               qogg_stream_clear(&format->vo);
+               qvorbis_block_clear(&format->vb);
+               qvorbis_dsp_clear(&format->vd);
+       }
+
+       qogg_stream_clear(&format->to);
+       qtheora_clear(&format->ts);
+       qvorbis_info_clear(&format->vi);
+
+       Mem_Free(format->yuv.y);
+       Mem_Free(format->yuv.u);
+       Mem_Free(format->yuv.v);
+       Mem_Free(format);
+
+       FS_Close(cls.capturevideo.videofile);
+       cls.capturevideo.videofile = NULL;
+}
+
+static void SCR_CaptureVideo_Ogg_ConvertFrame_BGRA_to_YUV()
+{
+       LOAD_FORMATSPECIFIC_OGG();
+       int x, y;
+       int blockr, blockg, blockb;
+       unsigned char *b = cls.capturevideo.outbuffer;
+       int w = cls.capturevideo.width;
+       int h = cls.capturevideo.height;
+       int inpitch = w*4;
+
+       for(y = 0; y < h; ++y)
+       {
+               for(b = cls.capturevideo.outbuffer + (h-1-y)*w*4, x = 0; x < w; ++x)
+               {
+                       blockr = b[2];
+                       blockg = b[1];
+                       blockb = b[0];
+                       format->yuv.y[x + format->yuv.y_stride * y] =
+                               cls.capturevideo.yuvnormalizetable[0][cls.capturevideo.rgbtoyuvscaletable[0][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[0][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[0][2][blockb]];
+                       b += 4;
+               }
+
+               if((y & 1) == 0)
+               {
+                       for(b = cls.capturevideo.outbuffer + (h-2-y)*w*4, x = 0; x < w/2; ++x)
+                       {
+                               blockr = (b[2] + b[6] + b[inpitch+2] + b[inpitch+6]) >> 2;
+                               blockg = (b[1] + b[5] + b[inpitch+1] + b[inpitch+5]) >> 2;
+                               blockb = (b[0] + b[4] + b[inpitch+0] + b[inpitch+4]) >> 2;
+                               format->yuv.u[x + format->yuv.uv_stride * (y/2)] =
+                                       cls.capturevideo.yuvnormalizetable[1][cls.capturevideo.rgbtoyuvscaletable[1][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[1][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[1][2][blockb] + 128];
+                               format->yuv.v[x + format->yuv.uv_stride * (y/2)] =
+                                       cls.capturevideo.yuvnormalizetable[2][cls.capturevideo.rgbtoyuvscaletable[2][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[2][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[2][2][blockb] + 128];
+                               b += 8;
+                       }
+               }
+       }
+}
+
+static void SCR_CaptureVideo_Ogg_VideoFrames(int num)
+{
+       LOAD_FORMATSPECIFIC_OGG();
+       ogg_page pg;
+       ogg_packet pt;
+
+       // data is in cls.capturevideo.outbuffer as BGRA and has size width*height
+
+       SCR_CaptureVideo_Ogg_ConvertFrame_BGRA_to_YUV();
+
+       while(num-- > 0)
+       {
+               qtheora_encode_YUVin(&format->ts, &format->yuv);
+               qtheora_encode_packetout(&format->ts, false, &pt);
+               qogg_stream_packetin(&format->to, &pt);
+
+               while(qogg_stream_pageout(&format->to, &pg) > 0)
+               {
+                       FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
+                       FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
+               }
+       }
+}
+
+static void SCR_CaptureVideo_Ogg_SoundFrame(const portable_sampleframe_t *paintbuffer, size_t length)
+{
+       LOAD_FORMATSPECIFIC_OGG();
+       float **vorbis_buffer;
+       size_t i;
+       int j;
+       ogg_page pg;
+       ogg_packet pt;
+
+       vorbis_buffer = qvorbis_analysis_buffer(&format->vd, length);
+       for(i = 0; i < length; ++i)
+       {
+               for(j = 0; j < cls.capturevideo.soundchannels; ++j)
+                       vorbis_buffer[j][i] = paintbuffer[i].sample[j] / 32768.0f;
+       }
+       qvorbis_analysis_wrote(&format->vd, length);
+
+       while(qvorbis_analysis_blockout(&format->vd, &format->vb) == 1)
+       {
+               qvorbis_analysis(&format->vb, NULL);
+               qvorbis_bitrate_addblock(&format->vb);
+
+               while(qvorbis_bitrate_flushpacket(&format->vd, &pt))
+                       qogg_stream_packetin(&format->vo, &pt);
+       }
+
+       while(qogg_stream_pageout(&format->vo, &pg) > 0)
+       {
+               FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
+               FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
+       }
+}
+
+void SCR_CaptureVideo_Ogg_BeginVideo()
+{
+       cls.capturevideo.format = CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA;
        cls.capturevideo.videofile = FS_OpenRealFile(va("%s.ogv", cls.capturevideo.basename), "wb", false);
        cls.capturevideo.videofile = FS_OpenRealFile(va("%s.ogv", cls.capturevideo.basename), "wb", false);
+       cls.capturevideo.endvideo = SCR_CaptureVideo_Ogg_EndVideo;
+       cls.capturevideo.videoframes = SCR_CaptureVideo_Ogg_VideoFrames;
+       cls.capturevideo.soundframe = SCR_CaptureVideo_Ogg_SoundFrame;
        cls.capturevideo.formatspecific = Mem_Alloc(tempmempool, sizeof(capturevideostate_ogg_formatspecific_t));
        {
        cls.capturevideo.formatspecific = Mem_Alloc(tempmempool, sizeof(capturevideostate_ogg_formatspecific_t));
        {
-               LOAD_FORMATSPECIFIC();
+               LOAD_FORMATSPECIFIC_OGG();
                int num, denom;
                ogg_page pg;
                ogg_packet pt, pt2, pt3;
                theora_comment tc;
                vorbis_comment vc;
                theora_info ti;
                int num, denom;
                ogg_page pg;
                ogg_packet pt, pt2, pt3;
                theora_comment tc;
                vorbis_comment vc;
                theora_info ti;
-               vorbis_info vi;
 
                format->serial1 = rand();
                qogg_stream_init(&format->to, format->serial1);
 
                format->serial1 = rand();
                qogg_stream_init(&format->to, format->serial1);
@@ -673,7 +851,7 @@ void SCR_CaptureVideo_Ogg_Begin()
                ti.quick_p = true; // http://mlblog.osdir.com/multimedia.ogg.theora.general/2004-07/index.shtml
                ti.dropframes_p = false;
 
                ti.quick_p = true; // http://mlblog.osdir.com/multimedia.ogg.theora.general/2004-07/index.shtml
                ti.dropframes_p = false;
 
-               ti.target_bitrate = cl_capturevideo_ogg_theora_bitrate.integer;
+               ti.target_bitrate = cl_capturevideo_ogg_theora_bitrate.integer * 1000;
                ti.quality = cl_capturevideo_ogg_theora_quality.integer;
 
                if(ti.target_bitrate <= 0)
                ti.quality = cl_capturevideo_ogg_theora_quality.integer;
 
                if(ti.target_bitrate <= 0)
@@ -722,10 +900,10 @@ void SCR_CaptureVideo_Ogg_Begin()
                // vorbis?
                if(cls.capturevideo.soundrate)
                {
                // vorbis?
                if(cls.capturevideo.soundrate)
                {
-                       qvorbis_info_init(&vi);
-                       qvorbis_encode_init_vbr(&vi, cls.capturevideo.soundchannels, cls.capturevideo.soundrate, bound(-1, cl_capturevideo_ogg_vorbis_quality.value, 10) * 0.1);
+                       qvorbis_info_init(&format->vi);
+                       qvorbis_encode_init_vbr(&format->vi, cls.capturevideo.soundchannels, cls.capturevideo.soundrate, bound(-1, cl_capturevideo_ogg_vorbis_quality.value, 10) * 0.099);
                        qvorbis_comment_init(&vc);
                        qvorbis_comment_init(&vc);
-                       qvorbis_analysis_init(&format->vd, &vi);
+                       qvorbis_analysis_init(&format->vd, &format->vi);
                        qvorbis_block_init(&format->vd, &format->vb);
                }
 
                        qvorbis_block_init(&format->vd, &format->vb);
                }
 
@@ -759,7 +937,6 @@ void SCR_CaptureVideo_Ogg_Begin()
                        qogg_stream_packetin(&format->vo, &pt3);
 
                        qvorbis_comment_clear(&vc);
                        qogg_stream_packetin(&format->vo, &pt3);
 
                        qvorbis_comment_clear(&vc);
-                       qvorbis_info_clear(&vi);
                }
 
                for(;;)
                }
 
                for(;;)
@@ -786,173 +963,3 @@ void SCR_CaptureVideo_Ogg_Begin()
                }
        }
 }
                }
        }
 }
-
-void SCR_CaptureVideo_Ogg_EndVideo()
-{
-       LOAD_FORMATSPECIFIC();
-       ogg_page pg;
-       ogg_packet pt;
-
-       // repeat the last frame so we can set the end-of-stream flag
-       qtheora_encode_YUVin(&format->ts, &format->yuv);
-       qtheora_encode_packetout(&format->ts, true, &pt);
-       qogg_stream_packetin(&format->to, &pt);
-
-       if(cls.capturevideo.soundrate)
-       {
-               qvorbis_analysis_wrote(&format->vd, 0);
-               while(qvorbis_analysis_blockout(&format->vd, &format->vb) == 1)
-               {
-                       qvorbis_analysis(&format->vb, NULL);
-                       qvorbis_bitrate_addblock(&format->vb);
-                       while(qvorbis_bitrate_flushpacket(&format->vd, &pt))
-                               qogg_stream_packetin(&format->vo, &pt);
-               }
-       }
-
-       if(qogg_stream_pageout(&format->to, &pg) > 0)
-       {
-               FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
-               FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
-       }
-
-       if(cls.capturevideo.soundrate)
-       {
-               if(qogg_stream_pageout(&format->vo, &pg) > 0)
-               {
-                       FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
-                       FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
-               }
-       }
-               
-       while (1) {
-               int result = qogg_stream_flush (&format->to, &pg);
-               if (result < 0)
-                       fprintf (stderr, "Internal Ogg library error.\n"); // TODO Host_Error
-               if (result <= 0)
-                       break;
-               FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
-               FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
-       }
-
-       if(cls.capturevideo.soundrate)
-       {
-               while (1) {
-                       int result = qogg_stream_flush (&format->vo, &pg);
-                       if (result < 0)
-                               fprintf (stderr, "Internal Ogg library error.\n"); // TODO Host_Error
-                       if (result <= 0)
-                               break;
-                       FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
-                       FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
-               }
-
-               qogg_stream_clear(&format->vo);
-               qvorbis_block_clear(&format->vb);
-               qvorbis_dsp_clear(&format->vd);
-       }
-
-       qogg_stream_clear(&format->to);
-       qtheora_clear(&format->ts);
-
-       Mem_Free(format->yuv.y);
-       Mem_Free(format->yuv.u);
-       Mem_Free(format->yuv.v);
-       Mem_Free(format);
-
-       // cl_screen.c does this
-       // FS_Close(cls.capturevideo.videofile);
-       // cls.capturevideo.videofile = NULL;
-}
-
-void SCR_CaptureVideo_Ogg_ConvertFrame_BGRA_to_YUV()
-{
-       LOAD_FORMATSPECIFIC();
-       int x, y;
-       int blockr, blockg, blockb;
-       unsigned char *b = cls.capturevideo.outbuffer;
-       int w = cls.capturevideo.width;
-       int h = cls.capturevideo.height;
-       int inpitch = w*4;
-
-       for(y = 0; y < h; ++y)
-       {
-               for(b = cls.capturevideo.outbuffer + (h-1-y)*w*4, x = 0; x < w; ++x)
-               {
-                       blockr = b[2];
-                       blockg = b[1];
-                       blockb = b[0];
-                       format->yuv.y[x + format->yuv.y_stride * y] =
-                               cls.capturevideo.yuvnormalizetable[0][cls.capturevideo.rgbtoyuvscaletable[0][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[0][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[0][2][blockb]];
-                       b += 4;
-               }
-
-               if((y & 1) == 0)
-               {
-                       for(b = cls.capturevideo.outbuffer + (h-2-y)*w*4, x = 0; x < w/2; ++x)
-                       {
-                               blockr = (b[2] + b[6] + b[inpitch+2] + b[inpitch+6]) >> 2;
-                               blockg = (b[1] + b[5] + b[inpitch+1] + b[inpitch+5]) >> 2;
-                               blockb = (b[0] + b[4] + b[inpitch+0] + b[inpitch+4]) >> 2;
-                               format->yuv.u[x + format->yuv.uv_stride * (y/2)] =
-                                       cls.capturevideo.yuvnormalizetable[1][cls.capturevideo.rgbtoyuvscaletable[1][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[1][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[1][2][blockb] + 128];
-                               format->yuv.v[x + format->yuv.uv_stride * (y/2)] =
-                                       cls.capturevideo.yuvnormalizetable[2][cls.capturevideo.rgbtoyuvscaletable[2][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[2][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[2][2][blockb] + 128];
-                               b += 8;
-                       }
-               }
-       }
-}
-
-void SCR_CaptureVideo_Ogg_VideoFrame()
-{
-       LOAD_FORMATSPECIFIC();
-       ogg_page pg;
-       ogg_packet pt;
-
-       // data is in cls.capturevideo.outbuffer as BGRA and has size width*height
-
-       SCR_CaptureVideo_Ogg_ConvertFrame_BGRA_to_YUV();
-       qtheora_encode_YUVin(&format->ts, &format->yuv);
-       qtheora_encode_packetout(&format->ts, false, &pt);
-       qogg_stream_packetin(&format->to, &pt);
-
-       while(qogg_stream_pageout(&format->to, &pg) > 0)
-       {
-               FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
-               FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
-       }
-}
-
-void SCR_CaptureVideo_Ogg_SoundFrame(const portable_sampleframe_t *paintbuffer, size_t length)
-{
-       LOAD_FORMATSPECIFIC();
-       float **vorbis_buffer;
-       size_t i;
-       int j;
-       ogg_page pg;
-       ogg_packet pt;
-
-       vorbis_buffer = qvorbis_analysis_buffer(&format->vd, length);
-       for(i = 0; i < length; ++i)
-       {
-               for(j = 0; j < cls.capturevideo.soundchannels; ++j)
-                       vorbis_buffer[j][i] = paintbuffer[i].sample[j] / 32768.0f;
-       }
-       qvorbis_analysis_wrote(&format->vd, length);
-
-       while(qvorbis_analysis_blockout(&format->vd, &format->vb) == 1)
-       {
-               qvorbis_analysis(&format->vb, NULL);
-               qvorbis_bitrate_addblock(&format->vb);
-
-               while(qvorbis_bitrate_flushpacket(&format->vd, &pt))
-                       qogg_stream_packetin(&format->vo, &pt);
-       }
-
-       while(qogg_stream_pageout(&format->vo, &pg) > 0)
-       {
-               FS_Write(cls.capturevideo.videofile, pg.header, pg.header_len);
-               FS_Write(cls.capturevideo.videofile, pg.body, pg.body_len);
-       }
-}
index 287e96a0e0e029c041dc8a23b15ecf863fd58d46..a6f4bd087c475f92662169daaaa37e3975e61262 100644 (file)
--- a/cap_ogg.h
+++ b/cap_ogg.h
@@ -1,8 +1,4 @@
-#include "snd_main.h"
 void SCR_CaptureVideo_Ogg_Init();
 qboolean SCR_CaptureVideo_Ogg_Available();
 void SCR_CaptureVideo_Ogg_Init();
 qboolean SCR_CaptureVideo_Ogg_Available();
-void SCR_CaptureVideo_Ogg_Begin();
-void SCR_CaptureVideo_Ogg_EndVideo();
-void SCR_CaptureVideo_Ogg_VideoFrame();
-void SCR_CaptureVideo_Ogg_SoundFrame(const portable_sampleframe_t *paintbuffer, size_t length);
+void SCR_CaptureVideo_Ogg_BeginVideo();
 void SCR_CaptureVideo_Ogg_CloseDLL();
 void SCR_CaptureVideo_Ogg_CloseDLL();
index 05032cc115265796b242e044737f94530f3397c6..b3eb2a67297f16ea6a221cfe197052ff6c04a148 100644 (file)
@@ -6,6 +6,7 @@
 #include "cl_collision.h"
 #include "libcurl.h"
 #include "csprogs.h"
 #include "cl_collision.h"
 #include "libcurl.h"
 #include "csprogs.h"
+#include "cap_avi.h"
 #include "cap_ogg.h"
 
 // we have to include snd_main.h here only to get access to snd_renderbuffer->format.speed when writing the AVI headers
 #include "cap_ogg.h"
 
 // we have to include snd_main.h here only to get access to snd_renderbuffer->format.speed when writing the AVI headers
@@ -31,6 +32,7 @@ cvar_t scr_screenshot_jpeg_quality = {CVAR_SAVE, "scr_screenshot_jpeg_quality","
 cvar_t scr_screenshot_gammaboost = {CVAR_SAVE, "scr_screenshot_gammaboost","1", "gamma correction on saved screenshots and videos, 1.0 saves unmodified images"};
 // scr_screenshot_name is defined in fs.c
 cvar_t cl_capturevideo = {0, "cl_capturevideo", "0", "enables saving of video to a .avi file using uncompressed I420 colorspace and PCM audio, note that scr_screenshot_gammaboost affects the brightness of the output)"};
 cvar_t scr_screenshot_gammaboost = {CVAR_SAVE, "scr_screenshot_gammaboost","1", "gamma correction on saved screenshots and videos, 1.0 saves unmodified images"};
 // scr_screenshot_name is defined in fs.c
 cvar_t cl_capturevideo = {0, "cl_capturevideo", "0", "enables saving of video to a .avi file using uncompressed I420 colorspace and PCM audio, note that scr_screenshot_gammaboost affects the brightness of the output)"};
+cvar_t cl_capturevideo_printfps = {0, "cl_capturevideo_printfps", "1", "prints the frames per second captured in capturevideo (is only written to the log file, not to the console, as that would be visible on the video)"};
 cvar_t cl_capturevideo_width = {0, "cl_capturevideo_width", "0", "scales all frames to this resolution before saving the video"};
 cvar_t cl_capturevideo_height = {0, "cl_capturevideo_height", "0", "scales all frames to this resolution before saving the video"};
 cvar_t cl_capturevideo_realtime = {0, "cl_capturevideo_realtime", "0", "causes video saving to operate in realtime (mostly useful while playing, not while capturing demos), this can produce a much lower quality video due to poor sound/video sync and will abort saving if your machine stalls for over a minute"};
 cvar_t cl_capturevideo_width = {0, "cl_capturevideo_width", "0", "scales all frames to this resolution before saving the video"};
 cvar_t cl_capturevideo_height = {0, "cl_capturevideo_height", "0", "scales all frames to this resolution before saving the video"};
 cvar_t cl_capturevideo_realtime = {0, "cl_capturevideo_realtime", "0", "causes video saving to operate in realtime (mostly useful while playing, not while capturing demos), this can produce a much lower quality video due to poor sound/video sync and will abort saving if your machine stalls for over a minute"};
@@ -53,8 +55,6 @@ cvar_t scr_refresh = {0, "scr_refresh", "1", "allows you to completely shut off
 cvar_t shownetgraph = {CVAR_SAVE, "shownetgraph", "0", "shows a graph of packet sizes and other information, 0 = off, 1 = show client netgraph, 2 = show client and server netgraphs (when hosting a server)"};
 cvar_t cl_demo_mousegrab = {0, "cl_demo_mousegrab", "0", "Allows reading the mouse input while playing demos. Useful for camera mods developed in csqc. (0: never, 1: always)"};
 
 cvar_t shownetgraph = {CVAR_SAVE, "shownetgraph", "0", "shows a graph of packet sizes and other information, 0 = off, 1 = show client netgraph, 2 = show client and server netgraphs (when hosting a server)"};
 cvar_t cl_demo_mousegrab = {0, "cl_demo_mousegrab", "0", "Allows reading the mouse input while playing demos. Useful for camera mods developed in csqc. (0: never, 1: always)"};
 
-#define AVI_MASTER_INDEX_SIZE 640 // GB ought to be enough for anyone
-
 int jpeg_supported = false;
 
 qboolean       scr_initialized;                // ready to draw
 int jpeg_supported = false;
 
 qboolean       scr_initialized;                // ready to draw
@@ -852,6 +852,7 @@ void CL_Screen_Init(void)
        Cvar_RegisterVariable (&scr_screenshot_jpeg_quality);
        Cvar_RegisterVariable (&scr_screenshot_gammaboost);
        Cvar_RegisterVariable (&cl_capturevideo);
        Cvar_RegisterVariable (&scr_screenshot_jpeg_quality);
        Cvar_RegisterVariable (&scr_screenshot_gammaboost);
        Cvar_RegisterVariable (&cl_capturevideo);
+       Cvar_RegisterVariable (&cl_capturevideo_printfps);
        Cvar_RegisterVariable (&cl_capturevideo_width);
        Cvar_RegisterVariable (&cl_capturevideo_height);
        Cvar_RegisterVariable (&cl_capturevideo_realtime);
        Cvar_RegisterVariable (&cl_capturevideo_width);
        Cvar_RegisterVariable (&cl_capturevideo_height);
        Cvar_RegisterVariable (&cl_capturevideo_realtime);
@@ -937,289 +938,11 @@ void SCR_ScreenShot_f (void)
        shotnumber++;
 }
 
        shotnumber++;
 }
 
-static void SCR_CaptureVideo_RIFF_Start(void)
-{
-       memset(&cls.capturevideo.riffbuffer, 0, sizeof(sizebuf_t));
-       cls.capturevideo.riffbuffer.maxsize = sizeof(cls.capturevideo.riffbufferdata);
-       cls.capturevideo.riffbuffer.data = cls.capturevideo.riffbufferdata;
-       cls.capturevideo.position = 0;
-}
-
-static void SCR_CaptureVideo_RIFF_Flush(void)
-{
-       if (cls.capturevideo.riffbuffer.cursize > 0)
-       {
-               if (!FS_Write(cls.capturevideo.videofile, cls.capturevideo.riffbuffer.data, cls.capturevideo.riffbuffer.cursize))
-                       cls.capturevideo.error = true;
-               cls.capturevideo.position += cls.capturevideo.riffbuffer.cursize;
-               cls.capturevideo.riffbuffer.cursize = 0;
-               cls.capturevideo.riffbuffer.overflowed = false;
-       }
-}
-
-static void SCR_CaptureVideo_RIFF_WriteBytes(const unsigned char *data, size_t size)
-{
-       SCR_CaptureVideo_RIFF_Flush();
-       if (!FS_Write(cls.capturevideo.videofile, data, size))
-               cls.capturevideo.error = true;
-       cls.capturevideo.position += size;
-}
-
-static void SCR_CaptureVideo_RIFF_Write32(int n)
-{
-       if (cls.capturevideo.riffbuffer.cursize + 4 > cls.capturevideo.riffbuffer.maxsize)
-               SCR_CaptureVideo_RIFF_Flush();
-       MSG_WriteLong(&cls.capturevideo.riffbuffer, n);
-}
-
-static void SCR_CaptureVideo_RIFF_Write16(int n)
-{
-       if (cls.capturevideo.riffbuffer.cursize + 2 > cls.capturevideo.riffbuffer.maxsize)
-               SCR_CaptureVideo_RIFF_Flush();
-       MSG_WriteShort(&cls.capturevideo.riffbuffer, n);
-}
-
-static void SCR_CaptureVideo_RIFF_WriteFourCC(const char *chunkfourcc)
-{
-       if (cls.capturevideo.riffbuffer.cursize + (int)strlen(chunkfourcc) > cls.capturevideo.riffbuffer.maxsize)
-               SCR_CaptureVideo_RIFF_Flush();
-       MSG_WriteUnterminatedString(&cls.capturevideo.riffbuffer, chunkfourcc);
-}
-
-static void SCR_CaptureVideo_RIFF_WriteTerminatedString(const char *string)
-{
-       if (cls.capturevideo.riffbuffer.cursize + (int)strlen(string) > cls.capturevideo.riffbuffer.maxsize)
-               SCR_CaptureVideo_RIFF_Flush();
-       MSG_WriteString(&cls.capturevideo.riffbuffer, string);
-}
-
-static fs_offset_t SCR_CaptureVideo_RIFF_GetPosition(void)
-{
-       SCR_CaptureVideo_RIFF_Flush();
-       //return FS_Tell(cls.capturevideo.videofile);
-       return cls.capturevideo.position;
-}
-
-static void SCR_CaptureVideo_RIFF_Push(const char *chunkfourcc, const char *listtypefourcc, fs_offset_t sizeHint)
-{
-       if (listtypefourcc && sizeHint >= 0)
-               sizeHint += 4; // size hint is for INNER size
-       SCR_CaptureVideo_RIFF_WriteFourCC(chunkfourcc);
-       SCR_CaptureVideo_RIFF_Write32(sizeHint);
-       SCR_CaptureVideo_RIFF_Flush();
-       cls.capturevideo.riffstacksizehint[cls.capturevideo.riffstacklevel] = sizeHint;
-       cls.capturevideo.riffstackstartoffset[cls.capturevideo.riffstacklevel] = SCR_CaptureVideo_RIFF_GetPosition();
-       cls.capturevideo.riffstackfourcc[cls.capturevideo.riffstacklevel] = chunkfourcc;
-       ++cls.capturevideo.riffstacklevel;
-       if (listtypefourcc)
-               SCR_CaptureVideo_RIFF_WriteFourCC(listtypefourcc);
-}
-
-static void SCR_CaptureVideo_RIFF_Pop(void)
-{
-       fs_offset_t offset, sizehint;
-       int x;
-       unsigned char sizebytes[4];
-       // write out the chunk size and then return to the current file position
-       cls.capturevideo.riffstacklevel--;
-       offset = SCR_CaptureVideo_RIFF_GetPosition();
-
-       sizehint = cls.capturevideo.riffstacksizehint[cls.capturevideo.riffstacklevel];
-       x = (int)(offset - (cls.capturevideo.riffstackstartoffset[cls.capturevideo.riffstacklevel]));
-
-       if(x != sizehint)
-       {
-               if(sizehint != -1)
-               {
-                       int i;
-                       Con_Printf("WARNING: invalid size hint %d when writing video data (actual size: %d)\n", (int) sizehint, x);
-                       for(i = 0; i <= cls.capturevideo.riffstacklevel; ++i)
-                       {
-                               Con_Printf("  RIFF level %d = %s\n", i, cls.capturevideo.riffstackfourcc[i]);
-                       }
-               }
-               sizebytes[0] = (x) & 0xff;sizebytes[1] = (x >> 8) & 0xff;sizebytes[2] = (x >> 16) & 0xff;sizebytes[3] = (x >> 24) & 0xff;
-               if(FS_Seek(cls.capturevideo.videofile, -(x + 4), SEEK_END) >= 0)
-               {
-                       FS_Write(cls.capturevideo.videofile, sizebytes, 4);
-               }
-               FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
-       }
-
-       if (offset & 1)
-       {
-               SCR_CaptureVideo_RIFF_WriteBytes((unsigned char *) "\0", 1);
-       }
-}
-
-static void GrowBuf(sizebuf_t *buf, int extralen)
-{
-       if(buf->cursize + extralen > buf->maxsize)
-       {
-               int oldsize = buf->maxsize;
-               unsigned char *olddata;
-               olddata = buf->data;
-               buf->maxsize = max(buf->maxsize * 2, 4096);
-               buf->data = (unsigned char *) Mem_Alloc(tempmempool, buf->maxsize);
-               if(olddata)
-               {
-                       memcpy(buf->data, olddata, oldsize);
-                       Mem_Free(olddata);
-               }
-       }
-}
-
-static void SCR_CaptureVideo_RIFF_IndexEntry(const char *chunkfourcc, int chunksize, int flags)
-{
-       if(!cls.capturevideo.canseek)
-               Host_Error("SCR_CaptureVideo_RIFF_IndexEntry called on non-seekable AVI");
-
-       if (cls.capturevideo.riffstacklevel != 2)
-               Sys_Error("SCR_Capturevideo_RIFF_IndexEntry: RIFF stack level is %i (should be 2)\n", cls.capturevideo.riffstacklevel);
-       GrowBuf(&cls.capturevideo.riffindexbuffer, 16);
-       SCR_CaptureVideo_RIFF_Flush();
-       MSG_WriteUnterminatedString(&cls.capturevideo.riffindexbuffer, chunkfourcc);
-       MSG_WriteLong(&cls.capturevideo.riffindexbuffer, flags);
-       MSG_WriteLong(&cls.capturevideo.riffindexbuffer, (int)FS_Tell(cls.capturevideo.videofile) - cls.capturevideo.riffstackstartoffset[1]);
-       MSG_WriteLong(&cls.capturevideo.riffindexbuffer, chunksize);
-}
-
-static void SCR_CaptureVideo_RIFF_MakeIxChunk(const char *fcc, const char *dwChunkId, fs_offset_t masteridx_counter, int *masteridx_count, fs_offset_t masteridx_start)
-{
-       int nMatching;
-       int i;
-       fs_offset_t ix = SCR_CaptureVideo_RIFF_GetPosition();
-       fs_offset_t pos, sz;
-       
-       if(!cls.capturevideo.canseek)
-               Host_Error("SCR_CaptureVideo_RIFF_MakeIxChunk called on non-seekable AVI");
-
-       if(*masteridx_count >= AVI_MASTER_INDEX_SIZE)
-               return;
-
-       nMatching = 0; // go through index and enumerate them
-       for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
-               if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
-                       ++nMatching;
-
-       sz = 2+2+4+4+4+4+4;
-       for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
-               if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
-                       sz += 8;
-
-       SCR_CaptureVideo_RIFF_Push(fcc, NULL, sz);
-       SCR_CaptureVideo_RIFF_Write16(2); // wLongsPerEntry
-       SCR_CaptureVideo_RIFF_Write16(0x0100); // bIndexType=1, bIndexSubType=0
-       SCR_CaptureVideo_RIFF_Write32(nMatching); // nEntriesInUse
-       SCR_CaptureVideo_RIFF_WriteFourCC(dwChunkId); // dwChunkId
-       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.videofile_ix_movistart & (fs_offset_t) 0xFFFFFFFFu);
-       SCR_CaptureVideo_RIFF_Write32(((fs_offset_t) cls.capturevideo.videofile_ix_movistart) >> 32);
-       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved
-
-       for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
-               if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
-               {
-                       unsigned int *p = (unsigned int *) (cls.capturevideo.riffindexbuffer.data + i);
-                       unsigned int flags = p[1];
-                       unsigned int rpos = p[2];
-                       unsigned int size = p[3];
-                       size &= ~0x80000000;
-                       if(!(flags & 0x10)) // no keyframe?
-                               size |= 0x80000000;
-                       SCR_CaptureVideo_RIFF_Write32(rpos + 8);
-                       SCR_CaptureVideo_RIFF_Write32(size);
-               }
-
-       SCR_CaptureVideo_RIFF_Flush();
-       SCR_CaptureVideo_RIFF_Pop();
-       pos = SCR_CaptureVideo_RIFF_GetPosition();
-
-       if(FS_Seek(cls.capturevideo.videofile, masteridx_start + 16 * *masteridx_count, SEEK_SET) >= 0)
-       {
-               SCR_CaptureVideo_RIFF_Write32(ix & (fs_offset_t) 0xFFFFFFFFu);
-               SCR_CaptureVideo_RIFF_Write32(((fs_offset_t) ix) >> 32);
-               SCR_CaptureVideo_RIFF_Write32(pos - ix);
-               SCR_CaptureVideo_RIFF_Write32(nMatching);
-               SCR_CaptureVideo_RIFF_Flush();
-       }
-
-       if(FS_Seek(cls.capturevideo.videofile, masteridx_counter, SEEK_SET) >= 0)
-       {
-               SCR_CaptureVideo_RIFF_Write32(++*masteridx_count);
-               SCR_CaptureVideo_RIFF_Flush();
-       }
-
-       FS_Seek(cls.capturevideo.videofile, 0, SEEK_END); // return value doesn't matter here
-}
-
-static void SCR_CaptureVideo_RIFF_Finish(qboolean final)
-{
-       // close the "movi" list
-       SCR_CaptureVideo_RIFF_Pop();
-       if(cls.capturevideo.videofile_ix_master_video_inuse_offset)
-               SCR_CaptureVideo_RIFF_MakeIxChunk("ix00", "00dc", cls.capturevideo.videofile_ix_master_video_inuse_offset, &cls.capturevideo.videofile_ix_master_video_inuse, cls.capturevideo.videofile_ix_master_video_start_offset);
-       if(cls.capturevideo.videofile_ix_master_audio_inuse_offset)
-               SCR_CaptureVideo_RIFF_MakeIxChunk("ix01", "01wb", cls.capturevideo.videofile_ix_master_audio_inuse_offset, &cls.capturevideo.videofile_ix_master_audio_inuse, cls.capturevideo.videofile_ix_master_audio_start_offset);
-       // write the idx1 chunk that we've been building while saving the frames (for old style players)
-       if(final && cls.capturevideo.videofile_firstchunkframes_offset)
-       // TODO replace index creating by OpenDML ix##/##ix/indx chunk so it works for more than one AVI part too
-       {
-               SCR_CaptureVideo_RIFF_Push("idx1", NULL, cls.capturevideo.riffindexbuffer.cursize);
-               SCR_CaptureVideo_RIFF_WriteBytes(cls.capturevideo.riffindexbuffer.data, cls.capturevideo.riffindexbuffer.cursize);
-               SCR_CaptureVideo_RIFF_Pop();
-       }
-       cls.capturevideo.riffindexbuffer.cursize = 0;
-       // pop the RIFF chunk itself
-       while (cls.capturevideo.riffstacklevel > 0)
-               SCR_CaptureVideo_RIFF_Pop();
-       SCR_CaptureVideo_RIFF_Flush();
-       if(cls.capturevideo.videofile_firstchunkframes_offset)
-       {
-               Con_DPrintf("Finishing first chunk (%d frames)\n", cls.capturevideo.frame);
-               if(FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_firstchunkframes_offset, SEEK_SET) >= 0)
-               {
-                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
-                       SCR_CaptureVideo_RIFF_Flush();
-               }
-               FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
-               cls.capturevideo.videofile_firstchunkframes_offset = 0;
-       }
-       else
-               Con_DPrintf("Finishing another chunk (%d frames)\n", cls.capturevideo.frame);
-}
-
-static void SCR_CaptureVideo_RIFF_OverflowCheck(int framesize)
-{
-       fs_offset_t cursize, curfilesize;
-       if (cls.capturevideo.riffstacklevel != 2)
-               Sys_Error("SCR_CaptureVideo_RIFF_OverflowCheck: chunk stack leakage!\n");
-       
-       if(!cls.capturevideo.canseek)
-               return;
-
-       // check where we are in the file
-       SCR_CaptureVideo_RIFF_Flush();
-       cursize = SCR_CaptureVideo_RIFF_GetPosition() - cls.capturevideo.riffstackstartoffset[0];
-       curfilesize = SCR_CaptureVideo_RIFF_GetPosition();
-
-       // if this would overflow the windows limit of 1GB per RIFF chunk, we need
-       // to close the current RIFF chunk and open another for future frames
-       if (8 + cursize + framesize + cls.capturevideo.riffindexbuffer.cursize + 8 + cls.capturevideo.riffindexbuffer.cursize + 64 > 1<<30) // note that the Ix buffer takes less space... I just don't dare to / 2 here now... sorry, maybe later
-       {
-               SCR_CaptureVideo_RIFF_Finish(false);
-               // begin a new 1GB extended section of the AVI
-               SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", -1);
-               SCR_CaptureVideo_RIFF_Push("LIST", "movi", -1);
-               cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
-       }
-}
-
 void SCR_CaptureVideo_BeginVideo(void)
 {
 void SCR_CaptureVideo_BeginVideo(void)
 {
-       double gamma, g, aspect;
-       int width = cl_capturevideo_width.integer, height = cl_capturevideo_height.integer;
-       int n, d;
+       double gamma, g;
        unsigned int i;
        unsigned int i;
+       int width = cl_capturevideo_width.integer, height = cl_capturevideo_height.integer;
        if (cls.capturevideo.active)
                return;
        memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
        if (cls.capturevideo.active)
                return;
        memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
@@ -1235,8 +958,6 @@ void SCR_CaptureVideo_BeginVideo(void)
        if(height < 2 || height > vid.height) // can't scale up
                height = vid.height;
 
        if(height < 2 || height > vid.height) // can't scale up
                height = vid.height;
 
-       aspect = vid.width / (vid.height * vid_pixelheight.value);
-
        // ensure it's all even; if not, scale down a little
        if(width % 1)
                --width;
        // ensure it's all even; if not, scale down a little
        if(width % 1)
                --width;
@@ -1246,11 +967,12 @@ void SCR_CaptureVideo_BeginVideo(void)
        cls.capturevideo.width = width;
        cls.capturevideo.height = height;
        cls.capturevideo.active = true;
        cls.capturevideo.width = width;
        cls.capturevideo.height = height;
        cls.capturevideo.active = true;
-       cls.capturevideo.starttime = realtime;
        cls.capturevideo.framerate = bound(1, cl_capturevideo_fps.value, 1001);
        cls.capturevideo.soundrate = S_GetSoundRate();
        cls.capturevideo.soundchannels = S_GetSoundChannels();
        cls.capturevideo.framerate = bound(1, cl_capturevideo_fps.value, 1001);
        cls.capturevideo.soundrate = S_GetSoundRate();
        cls.capturevideo.soundchannels = S_GetSoundChannels();
-       cls.capturevideo.frame = 0;
+       cls.capturevideo.startrealtime = realtime;
+       cls.capturevideo.frame = cls.capturevideo.lastfpsframe = 0;
+       cls.capturevideo.starttime = cls.capturevideo.lastfpstime = Sys_DoubleTime();
        cls.capturevideo.soundsampleframe = 0;
        cls.capturevideo.realtime = cl_capturevideo_realtime.integer != 0;
        cls.capturevideo.screenbuffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * 4);
        cls.capturevideo.soundsampleframe = 0;
        cls.capturevideo.realtime = cl_capturevideo_realtime.integer != 0;
        cls.capturevideo.screenbuffer = (unsigned char *)Mem_Alloc(tempmempool, vid.width * vid.height * 4);
@@ -1299,224 +1021,11 @@ Cr = R *  .500 + G * -.419 + B * -.0813 + 128.;
 
        if (cl_capturevideo_ogg.integer && SCR_CaptureVideo_Ogg_Available())
        {
 
        if (cl_capturevideo_ogg.integer && SCR_CaptureVideo_Ogg_Available())
        {
-               cls.capturevideo.format = CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA;
-               SCR_CaptureVideo_Ogg_Begin();
+               SCR_CaptureVideo_Ogg_BeginVideo();
        }
        else
        {
        }
        else
        {
-               cls.capturevideo.format = CAPTUREVIDEOFORMAT_AVI_I420;
-               cls.capturevideo.videofile = FS_OpenRealFile(va("%s.avi", cls.capturevideo.basename), "wb", false);
-               cls.capturevideo.canseek = (FS_Seek(cls.capturevideo.videofile, 0, SEEK_SET) == 0);
-               SCR_CaptureVideo_RIFF_Start();
-               // enclosing RIFF chunk (there can be multiple of these in >1GB files, the later ones are "AVIX" instead of "AVI " and have no header/stream info)
-               SCR_CaptureVideo_RIFF_Push("RIFF", "AVI ", cls.capturevideo.canseek ? -1 : 12+(8+56+12+(12+52+8+40+8+68)+(cls.capturevideo.soundrate?(12+12+52+8+18):0)+12+(8+4))+12+(8+(((int) strlen(engineversion) | 1) + 1))+12);
-               // AVI main header
-               SCR_CaptureVideo_RIFF_Push("LIST", "hdrl", cls.capturevideo.canseek ? -1 : 8+56+12+(12+52+8+40+8+68)+(cls.capturevideo.soundrate?(12+12+52+8+18):0)+12+(8+4));
-               SCR_CaptureVideo_RIFF_Push("avih", NULL, 56);
-               SCR_CaptureVideo_RIFF_Write32((int)(1000000.0 / cls.capturevideo.framerate)); // microseconds per frame
-               SCR_CaptureVideo_RIFF_Write32(0); // max bytes per second
-               SCR_CaptureVideo_RIFF_Write32(0); // padding granularity
-               SCR_CaptureVideo_RIFF_Write32(0x910); // flags (AVIF_HASINDEX | AVIF_ISINTERLEAVED | AVIF_TRUSTCKTYPE)
-               cls.capturevideo.videofile_firstchunkframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
-               SCR_CaptureVideo_RIFF_Write32(0); // total frames
-               SCR_CaptureVideo_RIFF_Write32(0); // initial frames
-               if (cls.capturevideo.soundrate)
-                       SCR_CaptureVideo_RIFF_Write32(2); // number of streams
-               else
-                       SCR_CaptureVideo_RIFF_Write32(1); // number of streams
-               SCR_CaptureVideo_RIFF_Write32(0); // suggested buffer size
-               SCR_CaptureVideo_RIFF_Write32(width); // width
-               SCR_CaptureVideo_RIFF_Write32(height); // height
-               SCR_CaptureVideo_RIFF_Write32(0); // reserved[0]
-               SCR_CaptureVideo_RIFF_Write32(0); // reserved[1]
-               SCR_CaptureVideo_RIFF_Write32(0); // reserved[2]
-               SCR_CaptureVideo_RIFF_Write32(0); // reserved[3]
-               SCR_CaptureVideo_RIFF_Pop();
-               // video stream info
-               SCR_CaptureVideo_RIFF_Push("LIST", "strl", cls.capturevideo.canseek ? -1 : 12+52+8+40+8+68);
-               SCR_CaptureVideo_RIFF_Push("strh", "vids", 52);
-               SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // stream fourcc (I420 colorspace, uncompressed)
-               SCR_CaptureVideo_RIFF_Write32(0); // flags
-               SCR_CaptureVideo_RIFF_Write16(0); // priority
-               SCR_CaptureVideo_RIFF_Write16(0); // language
-               SCR_CaptureVideo_RIFF_Write32(0); // initial frames
-               // find an ideal divisor for the framerate
-               FindFraction(cls.capturevideo.framerate, &n, &d, 1000);
-               SCR_CaptureVideo_RIFF_Write32(d); // samples/second divisor
-               SCR_CaptureVideo_RIFF_Write32(n); // samples/second multiplied by divisor
-               SCR_CaptureVideo_RIFF_Write32(0); // start
-               cls.capturevideo.videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
-               SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF); // length
-               SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // suggested buffer size
-               SCR_CaptureVideo_RIFF_Write32(0); // quality
-               SCR_CaptureVideo_RIFF_Write32(0); // sample size
-               SCR_CaptureVideo_RIFF_Write16(0); // frame left
-               SCR_CaptureVideo_RIFF_Write16(0); // frame top
-               SCR_CaptureVideo_RIFF_Write16(width); // frame right
-               SCR_CaptureVideo_RIFF_Write16(height); // frame bottom
-               SCR_CaptureVideo_RIFF_Pop();
-               // video stream format
-               SCR_CaptureVideo_RIFF_Push("strf", NULL, 40);
-               SCR_CaptureVideo_RIFF_Write32(40); // BITMAPINFO struct size
-               SCR_CaptureVideo_RIFF_Write32(width); // width
-               SCR_CaptureVideo_RIFF_Write32(height); // height
-               SCR_CaptureVideo_RIFF_Write16(3); // planes
-               SCR_CaptureVideo_RIFF_Write16(12); // bitcount
-               SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // compression
-               SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // size of image
-               SCR_CaptureVideo_RIFF_Write32(0); // x pixels per meter
-               SCR_CaptureVideo_RIFF_Write32(0); // y pixels per meter
-               SCR_CaptureVideo_RIFF_Write32(0); // color used
-               SCR_CaptureVideo_RIFF_Write32(0); // color important
-               SCR_CaptureVideo_RIFF_Pop();
-               // master index
-               if(cls.capturevideo.canseek)
-               {
-                       SCR_CaptureVideo_RIFF_Push("indx", NULL, -1);
-                       SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
-                       SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
-                       cls.capturevideo.videofile_ix_master_video_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
-                       SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
-                       SCR_CaptureVideo_RIFF_WriteFourCC("00dc"); // dwChunkId
-                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
-                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
-                       SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
-                       cls.capturevideo.videofile_ix_master_video_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
-                       for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
-                               SCR_CaptureVideo_RIFF_Write32(0); // fill up later
-                       SCR_CaptureVideo_RIFF_Pop();
-               }
-               // extended format (aspect!)
-               SCR_CaptureVideo_RIFF_Push("vprp", NULL, 68);
-               SCR_CaptureVideo_RIFF_Write32(0); // VideoFormatToken
-               SCR_CaptureVideo_RIFF_Write32(0); // VideoStandard
-               SCR_CaptureVideo_RIFF_Write32((int)cls.capturevideo.framerate); // dwVerticalRefreshRate (bogus)
-               SCR_CaptureVideo_RIFF_Write32(width); // dwHTotalInT
-               SCR_CaptureVideo_RIFF_Write32(height); // dwVTotalInLines
-               FindFraction(aspect, &n, &d, 1000);
-               SCR_CaptureVideo_RIFF_Write32((n << 16) | d); // dwFrameAspectRatio // TODO a word
-               SCR_CaptureVideo_RIFF_Write32(width); // dwFrameWidthInPixels
-               SCR_CaptureVideo_RIFF_Write32(height); // dwFrameHeightInLines
-               SCR_CaptureVideo_RIFF_Write32(1); // nFieldPerFrame
-               SCR_CaptureVideo_RIFF_Write32(width); // CompressedBMWidth
-               SCR_CaptureVideo_RIFF_Write32(height); // CompressedBMHeight
-               SCR_CaptureVideo_RIFF_Write32(width); // ValidBMHeight
-               SCR_CaptureVideo_RIFF_Write32(height); // ValidBMWidth
-               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffset
-               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYOffset
-               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMXOffsetInT
-               SCR_CaptureVideo_RIFF_Write32(0); // ValidBMYValidStartLine
-               SCR_CaptureVideo_RIFF_Pop();
-               SCR_CaptureVideo_RIFF_Pop();
-               if (cls.capturevideo.soundrate)
-               {
-                       // audio stream info
-                       SCR_CaptureVideo_RIFF_Push("LIST", "strl", cls.capturevideo.canseek ? -1 : 12+52+8+18);
-                       SCR_CaptureVideo_RIFF_Push("strh", "auds", 52);
-                       SCR_CaptureVideo_RIFF_Write32(1); // stream fourcc (PCM audio, uncompressed)
-                       SCR_CaptureVideo_RIFF_Write32(0); // flags
-                       SCR_CaptureVideo_RIFF_Write16(0); // priority
-                       SCR_CaptureVideo_RIFF_Write16(0); // language
-                       SCR_CaptureVideo_RIFF_Write32(0); // initial frames
-                       SCR_CaptureVideo_RIFF_Write32(1); // samples/second divisor
-                       SCR_CaptureVideo_RIFF_Write32((int)(cls.capturevideo.soundrate)); // samples/second multiplied by divisor
-                       SCR_CaptureVideo_RIFF_Write32(0); // start
-                       cls.capturevideo.videofile_totalsampleframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
-                       SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF); // length
-                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 2); // suggested buffer size (this is a half second)
-                       SCR_CaptureVideo_RIFF_Write32(0); // quality
-                       SCR_CaptureVideo_RIFF_Write32(4); // sample size
-                       SCR_CaptureVideo_RIFF_Write16(0); // frame left
-                       SCR_CaptureVideo_RIFF_Write16(0); // frame top
-                       SCR_CaptureVideo_RIFF_Write16(0); // frame right
-                       SCR_CaptureVideo_RIFF_Write16(0); // frame bottom
-                       SCR_CaptureVideo_RIFF_Pop();
-                       // audio stream format
-                       SCR_CaptureVideo_RIFF_Push("strf", NULL, 18);
-                       SCR_CaptureVideo_RIFF_Write16(1); // format (uncompressed PCM?)
-                       SCR_CaptureVideo_RIFF_Write16(2); // channels (stereo)
-                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate); // sampleframes per second
-                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 4); // average bytes per second
-                       SCR_CaptureVideo_RIFF_Write16(4); // block align
-                       SCR_CaptureVideo_RIFF_Write16(16); // bits per sample
-                       SCR_CaptureVideo_RIFF_Write16(0); // size
-                       SCR_CaptureVideo_RIFF_Pop();
-                       // master index
-                       if(cls.capturevideo.canseek)
-                       {
-                               SCR_CaptureVideo_RIFF_Push("indx", NULL, -1);
-                               SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
-                               SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
-                               cls.capturevideo.videofile_ix_master_audio_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
-                               SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
-                               SCR_CaptureVideo_RIFF_WriteFourCC("01wb"); // dwChunkId
-                               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
-                               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
-                               SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
-                               cls.capturevideo.videofile_ix_master_audio_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
-                               for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
-                                       SCR_CaptureVideo_RIFF_Write32(0); // fill up later
-                               SCR_CaptureVideo_RIFF_Pop();
-                       }
-                       SCR_CaptureVideo_RIFF_Pop();
-               }
-
-               cls.capturevideo.videofile_ix_master_audio_inuse = cls.capturevideo.videofile_ix_master_video_inuse = 0;
-
-               // extended header (for total #frames)
-               SCR_CaptureVideo_RIFF_Push("LIST", "odml", 8+4);
-               SCR_CaptureVideo_RIFF_Push("dmlh", NULL, 4);
-               cls.capturevideo.videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
-               SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF);
-               SCR_CaptureVideo_RIFF_Pop();
-               SCR_CaptureVideo_RIFF_Pop();
-
-               // close the AVI header list
-               SCR_CaptureVideo_RIFF_Pop();
-               // software that produced this AVI video file
-               SCR_CaptureVideo_RIFF_Push("LIST", "INFO", 8+((strlen(engineversion) | 1) + 1));
-               SCR_CaptureVideo_RIFF_Push("ISFT", NULL, strlen(engineversion) + 1);
-               SCR_CaptureVideo_RIFF_WriteTerminatedString(engineversion);
-               SCR_CaptureVideo_RIFF_Pop();
-               // enable this junk filler if you like the LIST movi to always begin at 4KB in the file (why?)
-#if 0
-               SCR_CaptureVideo_RIFF_Push("JUNK", NULL);
-               x = 4096 - SCR_CaptureVideo_RIFF_GetPosition();
-               while (x > 0)
-               {
-                       const char *junkfiller = "[ DarkPlaces junk data ]";
-                       int i = min(x, (int)strlen(junkfiller));
-                       SCR_CaptureVideo_RIFF_WriteBytes((const unsigned char *)junkfiller, i);
-                       x -= i;
-               }
-               SCR_CaptureVideo_RIFF_Pop();
-#endif
-               SCR_CaptureVideo_RIFF_Pop();
-               // begin the actual video section now
-               SCR_CaptureVideo_RIFF_Push("LIST", "movi", cls.capturevideo.canseek ? -1 : 0);
-               cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
-               // we're done with the headers now...
-               SCR_CaptureVideo_RIFF_Flush();
-               if (cls.capturevideo.riffstacklevel != 2)
-                       Sys_Error("SCR_CaptureVideo_BeginVideo: broken AVI writing code (stack level is %i (should be 2) at end of headers)\n", cls.capturevideo.riffstacklevel);
-
-               if(!cls.capturevideo.canseek)
-               {
-                       // close the movi immediately
-                       SCR_CaptureVideo_RIFF_Pop();
-                       // close the AVI immediately (we'll put all frames into AVIX)
-                       SCR_CaptureVideo_RIFF_Pop();
-               }
-       }
-
-       switch(cls.capturevideo.format)
-       {
-       case CAPTUREVIDEOFORMAT_AVI_I420:
-               break;
-       case CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA:
-               break;
-       default:
-               break;
+               SCR_CaptureVideo_Avi_BeginVideo();
        }
 }
 
        }
 }
 
@@ -1525,50 +1034,12 @@ void SCR_CaptureVideo_EndVideo(void)
        if (!cls.capturevideo.active)
                return;
        cls.capturevideo.active = false;
        if (!cls.capturevideo.active)
                return;
        cls.capturevideo.active = false;
-       if (cls.capturevideo.videofile)
-       {
-               switch(cls.capturevideo.format)
-               {
-               case CAPTUREVIDEOFORMAT_AVI_I420:
-                       Con_DPrintf("Finishing capture (%d frames, %d audio frames)\n", cls.capturevideo.frame, cls.capturevideo.soundsampleframe);
 
 
-                       if(cls.capturevideo.canseek)
-                       {
-                               // close any open chunks
-                               SCR_CaptureVideo_RIFF_Finish(true);
+       Con_DPrintf("Finishing capture (%d frames, %d audio frames)\n", cls.capturevideo.frame, cls.capturevideo.soundsampleframe);
 
 
-                               // go back and fix the video frames and audio samples fields
-                               if(cls.capturevideo.videofile_totalframes_offset1)
-                               if(FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset1, SEEK_SET) >= 0)
-                               {
-                                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
-                                       SCR_CaptureVideo_RIFF_Flush();
-                               }
-                               if(cls.capturevideo.videofile_totalframes_offset2)
-                               if(FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset2, SEEK_SET) >= 0)
-                               {
-                                       SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
-                                       SCR_CaptureVideo_RIFF_Flush();
-                               }
-                               if (cls.capturevideo.soundrate)
-                               {
-                                       if(cls.capturevideo.videofile_totalsampleframes_offset)
-                                       if(FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalsampleframes_offset, SEEK_SET) >= 0)
-                                       {
-                                               SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundsampleframe);
-                                               SCR_CaptureVideo_RIFF_Flush();
-                                       }
-                               }
-                       }
-                       break;
-               case CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA:
-                       SCR_CaptureVideo_Ogg_EndVideo();
-                       break;
-               default:
-                       break;
-               }
-               FS_Close(cls.capturevideo.videofile);
-               cls.capturevideo.videofile = NULL;
+       if (cls.capturevideo.videofile)
+       {
+               cls.capturevideo.endvideo();
        }
 
        if (cls.capturevideo.screenbuffer)
        }
 
        if (cls.capturevideo.screenbuffer)
@@ -1583,51 +1054,9 @@ void SCR_CaptureVideo_EndVideo(void)
                cls.capturevideo.outbuffer = NULL;
        }
 
                cls.capturevideo.outbuffer = NULL;
        }
 
-       if (cls.capturevideo.riffindexbuffer.data)
-       {
-               Mem_Free(cls.capturevideo.riffindexbuffer.data);
-               cls.capturevideo.riffindexbuffer.data = NULL;
-       }
-
        memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
 }
 
        memset(&cls.capturevideo, 0, sizeof(cls.capturevideo));
 }
 
-// converts from BGRA32 to I420 colorspace (identical to YV12 except chroma plane order is reversed), this colorspace is handled by the Intel(r) 4:2:0 codec on Windows
-void SCR_CaptureVideo_ConvertFrame_BGRA_to_I420_flip(int width, int height, unsigned char *instart, unsigned char *outstart)
-{
-       int x, y;
-       int blockr, blockg, blockb;
-       int outoffset = (width/2)*(height/2);
-       unsigned char *b, *out;
-       // process one line at a time, and CbCr every other line at 2 pixel intervals
-       for (y = 0;y < height;y++)
-       {
-               // 1x1 Y
-               for (b = instart + (height-1-y)*width*4, out = outstart + y*width, x = 0;x < width;x++, b += 4, out++)
-               {
-                       blockr = b[2];
-                       blockg = b[1];
-                       blockb = b[0];
-                       *out = cls.capturevideo.yuvnormalizetable[0][cls.capturevideo.rgbtoyuvscaletable[0][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[0][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[0][2][blockb]];
-               }
-               if ((y & 1) == 0)
-               {
-                       // 2x2 Cr and Cb planes
-                       int inpitch = width*4;
-                       for (b = instart + (height-2-y)*width*4, out = outstart + width*height + (y/2)*(width/2), x = 0;x < width/2;x++, b += 8, out++)
-                       {
-                               blockr = (b[2] + b[6] + b[inpitch+2] + b[inpitch+6]) >> 2;
-                               blockg = (b[1] + b[5] + b[inpitch+1] + b[inpitch+5]) >> 2;
-                               blockb = (b[0] + b[4] + b[inpitch+0] + b[inpitch+4]) >> 2;
-                               // Cr
-                               out[0        ] = cls.capturevideo.yuvnormalizetable[1][cls.capturevideo.rgbtoyuvscaletable[1][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[1][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[1][2][blockb] + 128];
-                               // Cb
-                               out[outoffset] = cls.capturevideo.yuvnormalizetable[2][cls.capturevideo.rgbtoyuvscaletable[2][0][blockr] + cls.capturevideo.rgbtoyuvscaletable[2][1][blockg] + cls.capturevideo.rgbtoyuvscaletable[2][2][blockb] + 128];
-                       }
-               }
-       }
-}
-
 static void SCR_ScaleDownBGRA(unsigned char *in, int inw, int inh, unsigned char *out, int outw, int outh)
 {
        // TODO optimize this function
 static void SCR_ScaleDownBGRA(unsigned char *in, int inw, int inh, unsigned char *out, int outw, int outh)
 {
        // TODO optimize this function
@@ -1676,10 +1105,11 @@ static void SCR_ScaleDownBGRA(unsigned char *in, int inw, int inh, unsigned char
        }
 }
 
        }
 }
 
-qboolean SCR_CaptureVideo_VideoFrame(int newframenum)
+void SCR_CaptureVideo_VideoFrame(int newframenum)
 {
 {
-       int x = 0, y = 0, width = cls.capturevideo.width, height = cls.capturevideo.height;
-       unsigned char *in, *out;
+       int x = 0, y = 0;
+       int width = cls.capturevideo.width, height = cls.capturevideo.height;
+
        CHECKGLERROR
        //return SCR_ScreenShot(filename, cls.capturevideo.buffer, cls.capturevideo.buffer + vid.width * vid.height * 3, cls.capturevideo.buffer + vid.width * vid.height * 6, 0, 0, vid.width, vid.height, false, false, false, jpeg, true);
        // speed is critical here, so do saving as directly as possible
        CHECKGLERROR
        //return SCR_ScreenShot(filename, cls.capturevideo.buffer, cls.capturevideo.buffer + vid.width * vid.height * 3, cls.capturevideo.buffer + vid.width * vid.height * 6, 0, 0, vid.width, vid.height, false, false, false, jpeg, true);
        // speed is critical here, so do saving as directly as possible
@@ -1687,105 +1117,29 @@ qboolean SCR_CaptureVideo_VideoFrame(int newframenum)
        qglReadPixels (x, y, vid.width, vid.height, GL_BGRA, GL_UNSIGNED_BYTE, cls.capturevideo.screenbuffer);CHECKGLERROR
        SCR_ScaleDownBGRA (cls.capturevideo.screenbuffer, vid.width, vid.height, cls.capturevideo.outbuffer, width, height);
 
        qglReadPixels (x, y, vid.width, vid.height, GL_BGRA, GL_UNSIGNED_BYTE, cls.capturevideo.screenbuffer);CHECKGLERROR
        SCR_ScaleDownBGRA (cls.capturevideo.screenbuffer, vid.width, vid.height, cls.capturevideo.outbuffer, width, height);
 
-       switch (cls.capturevideo.format)
+       cls.capturevideo.videoframes(newframenum - cls.capturevideo.frame);
+       cls.capturevideo.frame = newframenum;
+
+       if(cl_capturevideo_printfps.integer)
        {
        {
-       case CAPTUREVIDEOFORMAT_AVI_I420:
-               // if there's no videofile we have to just give up, and abort saving
-               if (!cls.capturevideo.videofile)
-                       return false;
-               // FIXME: width/height must be multiple of 2, enforce this?
-               in = cls.capturevideo.outbuffer;
-               out = cls.capturevideo.outbuffer + width*height*4;
-               SCR_CaptureVideo_ConvertFrame_BGRA_to_I420_flip(width, height, in, out);
-               x = width*height+(width/2)*(height/2)*2;
-               for (;cls.capturevideo.frame < newframenum;cls.capturevideo.frame++)
+               char buf[80];
+               double t = Sys_DoubleTime();
+               if(t > cls.capturevideo.lastfpstime + 1)
                {
                {
-                       if(cls.capturevideo.canseek)
-                       {
-                               SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
-                               SCR_CaptureVideo_RIFF_IndexEntry("00dc", x, 0x10); // AVIIF_KEYFRAME
-                       }
-
-                       if(!cls.capturevideo.canseek)
-                       {
-                               SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", 12+8+x);
-                               SCR_CaptureVideo_RIFF_Push("LIST", "movi", 8+x);
-                       }
-                       SCR_CaptureVideo_RIFF_Push("00dc", NULL, x);
-                       SCR_CaptureVideo_RIFF_WriteBytes(out, x);
-                       SCR_CaptureVideo_RIFF_Pop();
-                       if(!cls.capturevideo.canseek)
-                       {
-                               SCR_CaptureVideo_RIFF_Pop();
-                               SCR_CaptureVideo_RIFF_Pop();
-                       }
+                       double fps1 = (cls.capturevideo.frame - cls.capturevideo.lastfpsframe) / (t - cls.capturevideo.lastfpstime + 0.0000001);
+                       double fps  = (cls.capturevideo.frame                                ) / (t - cls.capturevideo.starttime   + 0.0000001);
+                       dpsnprintf(buf, sizeof(buf), "capturevideo: (%.1fs) last second %.3ffps, total %.3ffps\n", cls.capturevideo.frame / cls.capturevideo.framerate, fps1, fps);
+                       Sys_PrintToTerminal(buf);
+                       cls.capturevideo.lastfpstime = t;
+                       cls.capturevideo.lastfpsframe = cls.capturevideo.frame;
                }
                }
-               return true;
-       case CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA:
-               for (;cls.capturevideo.frame < newframenum;cls.capturevideo.frame++)
-                       SCR_CaptureVideo_Ogg_VideoFrame();
-               return true;
-       default:
-               return false;
        }
 }
 
 void SCR_CaptureVideo_SoundFrame(const portable_sampleframe_t *paintbuffer, size_t length)
 {
        }
 }
 
 void SCR_CaptureVideo_SoundFrame(const portable_sampleframe_t *paintbuffer, size_t length)
 {
-       int x;
-       unsigned char bufstereo16le[PAINTBUFFER_SIZE * 4];
-       unsigned char* out_ptr;
-       size_t i;
-
        cls.capturevideo.soundsampleframe += length;
        cls.capturevideo.soundsampleframe += length;
-
-       switch (cls.capturevideo.format)
-       {
-       case CAPTUREVIDEOFORMAT_AVI_I420:
-
-               // write the sound buffer as little endian 16bit interleaved stereo
-               for(i = 0, out_ptr = bufstereo16le; i < length; i++, out_ptr += 4)
-               {
-                       int n0, n1;
-
-                       n0 = paintbuffer[i].sample[0];
-                       n0 = bound(-32768, n0, 32767);
-                       out_ptr[0] = (unsigned char)n0;
-                       out_ptr[1] = (unsigned char)(n0 >> 8);
-
-                       n1 = paintbuffer[i].sample[1];
-                       n1 = bound(-32768, n1, 32767);
-                       out_ptr[2] = (unsigned char)n1;
-                       out_ptr[3] = (unsigned char)(n1 >> 8);
-               }
-
-               x = length*4;
-               if(cls.capturevideo.canseek)
-               {
-                       SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
-                       SCR_CaptureVideo_RIFF_IndexEntry("01wb", x, 0x10); // AVIIF_KEYFRAME
-               }
-
-               if(!cls.capturevideo.canseek)
-               {
-                       SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", 12+8+x);
-                       SCR_CaptureVideo_RIFF_Push("LIST", "movi", 8+x);
-               }
-               SCR_CaptureVideo_RIFF_Push("01wb", NULL, x);
-               SCR_CaptureVideo_RIFF_WriteBytes(bufstereo16le, x);
-               SCR_CaptureVideo_RIFF_Pop();
-               if(!cls.capturevideo.canseek)
-               {
-                       SCR_CaptureVideo_RIFF_Pop();
-                       SCR_CaptureVideo_RIFF_Pop();
-               }
-               break;
-       case CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA:
-               SCR_CaptureVideo_Ogg_SoundFrame(paintbuffer, length);
-               break;
-       default:
-               break;
-       }
+       cls.capturevideo.soundframe(paintbuffer, length);
 }
 
 void SCR_CaptureVideo(void)
 }
 
 void SCR_CaptureVideo(void)
@@ -1806,7 +1160,7 @@ void SCR_CaptureVideo(void)
                if (cls.capturevideo.realtime)
                {
                        // preserve sound sync by duplicating frames when running slow
                if (cls.capturevideo.realtime)
                {
                        // preserve sound sync by duplicating frames when running slow
-                       newframenum = (int)((realtime - cls.capturevideo.starttime) * cls.capturevideo.framerate);
+                       newframenum = (int)((realtime - cls.capturevideo.startrealtime) * cls.capturevideo.framerate);
                }
                else
                        newframenum = cls.capturevideo.frame + 1;
                }
                else
                        newframenum = cls.capturevideo.frame + 1;
index d7b8c36773163f4b0d09cb0ca0d0a8331e6516f8..2372df941ca30c943239d2b9b734d9c4bf35703f 100644 (file)
--- a/client.h
+++ b/client.h
@@ -23,6 +23,7 @@ Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
 #define CLIENT_H
 
 #include "matrixlib.h"
 #define CLIENT_H
 
 #include "matrixlib.h"
+#include "snd_main.h"
 
 // LordHavoc: 256 dynamic lights
 #define MAX_DLIGHTS 256
 
 // LordHavoc: 256 dynamic lights
 #define MAX_DLIGHTS 256
@@ -455,45 +456,33 @@ capturevideoformat_t;
 
 typedef struct capturevideostate_s
 {
 
 typedef struct capturevideostate_s
 {
-       double starttime;
+       double startrealtime;
        double framerate;
        double framerate;
-       // for AVI saving some values have to be written after capture ends
-       fs_offset_t videofile_firstchunkframes_offset;
-       fs_offset_t videofile_totalframes_offset1;
-       fs_offset_t videofile_totalframes_offset2;
-       fs_offset_t videofile_totalsampleframes_offset;
-       int videofile_ix_master_audio_inuse;
-       fs_offset_t videofile_ix_master_audio_inuse_offset;
-       fs_offset_t videofile_ix_master_audio_start_offset;
-       int videofile_ix_master_video_inuse;
-       fs_offset_t videofile_ix_master_video_inuse_offset;
-       fs_offset_t videofile_ix_master_video_start_offset;
-       fs_offset_t videofile_ix_movistart;
-       fs_offset_t position;
        qfile_t *videofile;
        qboolean active;
        qboolean realtime;
        qboolean error;
        qfile_t *videofile;
        qboolean active;
        qboolean realtime;
        qboolean error;
-       qboolean canseek;
        capturevideoformat_t format;
        int soundrate;
        int soundchannels;
        int frame;
        capturevideoformat_t format;
        int soundrate;
        int soundchannels;
        int frame;
-       int soundsampleframe; // for AVI saving
+       double starttime;
+       double lastfpstime;
+       int lastfpsframe;
+       int soundsampleframe;
        unsigned char *screenbuffer;
        unsigned char *outbuffer;
        unsigned char *screenbuffer;
        unsigned char *outbuffer;
-       sizebuf_t riffbuffer;
-       unsigned char riffbufferdata[128];
-       // note: riffindex buffer has an allocated ->data member, not static like most!
-       sizebuf_t riffindexbuffer;
-       int riffstacklevel;
-       fs_offset_t riffstackstartoffset[4];
-       fs_offset_t riffstacksizehint[4];
-       const char *riffstackfourcc[4];
-       short rgbtoyuvscaletable[3][3][256];
-       unsigned char yuvnormalizetable[3][256];
        char basename[64];
        int width, height;
        char basename[64];
        int width, height;
+       short rgbtoyuvscaletable[3][3][256];
+       unsigned char yuvnormalizetable[3][256];
+
+       // format specific functions
+       void (*endvideo) ();
+       void (*videoframes) (int num);
+       void (*soundframe) (const portable_sampleframe_t *paintbuffer, size_t length);
+
+       // format specific data
        void *formatspecific;
 }
 capturevideostate_t;
        void *formatspecific;
 }
 capturevideostate_t;
index 3195204bc5e8d8ffc1d465c1475725ea148a60f5..88d324d05ed3667932bea9831519cfe938e08ec2 100644 (file)
@@ -77,6 +77,7 @@ OBJ_NOCD=cd_null.o
 
 # Common objects
 OBJ_COMMON= \
 
 # Common objects
 OBJ_COMMON= \
+       cap_avi.o \
        cap_ogg.o \
        cd_shared.o \
        cl_collision.o \
        cap_ogg.o \
        cd_shared.o \
        cl_collision.o \