#include "cl_collision.h"
#include "libcurl.h"
#include "csprogs.h"
+#include "cap_ogg.h"
// we have to include snd_main.h here only to get access to snd_renderbuffer->format.speed when writing the AVI headers
#include "snd_main.h"
cvar_t cl_capturevideo_realtime = {0, "cl_capturevideo_realtime", "0", "causes video saving to operate in realtime (mostly useful while playing, not while capturing demos), this can produce a much lower quality video due to poor sound/video sync and will abort saving if your machine stalls for over a minute"};
cvar_t cl_capturevideo_fps = {0, "cl_capturevideo_fps", "30", "how many frames per second to save (29.97 for NTSC, 30 for typical PC video, 15 can be useful)"};
cvar_t cl_capturevideo_number = {CVAR_SAVE, "cl_capturevideo_number", "1", "number to append to video filename, incremented each time a capture begins"};
+cvar_t cl_capturevideo_ogg = {0, "cl_capturevideo_ogg", "0", "save captured video data as Ogg/Vorbis/Theora streams"};
cvar_t r_letterbox = {0, "r_letterbox", "0", "reduces vertical height of view to simulate a letterboxed movie effect (can be used by mods for cutscenes)"};
cvar_t r_stereo_separation = {0, "r_stereo_separation", "4", "separation distance of eyes in the world (negative values are only useful for cross-eyed viewing)"};
cvar_t r_stereo_sidebyside = {0, "r_stereo_sidebyside", "0", "side by side views for those who can't afford glasses but can afford eye strain (note: use a negative r_stereo_separation if you want cross-eyed viewing)"};
cvar_t scr_stipple = {0, "scr_stipple", "0", "interlacing-like stippling of the display"};
cvar_t scr_refresh = {0, "scr_refresh", "1", "allows you to completely shut off rendering for benchmarking purposes"};
cvar_t shownetgraph = {CVAR_SAVE, "shownetgraph", "0", "shows a graph of packet sizes and other information, 0 = off, 1 = show client netgraph, 2 = show client and server netgraphs (when hosting a server)"};
+cvar_t cl_demo_mousegrab = {0, "cl_demo_mousegrab", "0", "Allows reading the mouse input while playing demos. Useful for camera mods developed in csqc. (0: never, 1: always)"};
#define AVI_MASTER_INDEX_SIZE 640 // GB ought to be enough for anyone
int l = newline ? (newline - start) : (int)strlen(start);
float width = DrawQ_TextWidth_Font(start, l, false, FONT_CENTERPRINT) * 8;
- x = (vid_conwidth.integer - width)/2;
+ x = (int) (vid_conwidth.integer - width)/2;
if (l > 0)
{
if (remaining < l)
SCR_InfoBar_f
==============
*/
-void SCR_InfoBar_f()
+void SCR_InfoBar_f(void)
{
if(Cmd_Argc() == 3)
{
Cvar_RegisterVariable (&cl_capturevideo_realtime);
Cvar_RegisterVariable (&cl_capturevideo_fps);
Cvar_RegisterVariable (&cl_capturevideo_number);
+ Cvar_RegisterVariable (&cl_capturevideo_ogg);
Cvar_RegisterVariable (&r_letterbox);
Cvar_RegisterVariable(&r_stereo_separation);
Cvar_RegisterVariable(&r_stereo_sidebyside);
Cvar_RegisterVariable(&scr_stipple);
Cvar_RegisterVariable(&scr_refresh);
Cvar_RegisterVariable(&shownetgraph);
+ Cvar_RegisterVariable(&cl_demo_mousegrab);
Cmd_AddCommand ("sizeup",SCR_SizeUp_f, "increase view size (increases viewsize cvar)");
Cmd_AddCommand ("sizedown",SCR_SizeDown_f, "decrease view size (decreases viewsize cvar)");
Cmd_AddCommand ("envmap", R_Envmap_f, "render a cubemap (skybox) of the current scene");
Cmd_AddCommand ("infobar", SCR_InfoBar_f, "display a text in the infobar (usage: infobar expiretime string)");
+ SCR_CaptureVideo_Ogg_Init();
+
scr_initialized = true;
}
memset(&cls.capturevideo.riffbuffer, 0, sizeof(sizebuf_t));
cls.capturevideo.riffbuffer.maxsize = sizeof(cls.capturevideo.riffbufferdata);
cls.capturevideo.riffbuffer.data = cls.capturevideo.riffbufferdata;
+ cls.capturevideo.position = 0;
}
static void SCR_CaptureVideo_RIFF_Flush(void)
{
if (!FS_Write(cls.capturevideo.videofile, cls.capturevideo.riffbuffer.data, cls.capturevideo.riffbuffer.cursize))
cls.capturevideo.error = true;
+ cls.capturevideo.position += cls.capturevideo.riffbuffer.cursize;
cls.capturevideo.riffbuffer.cursize = 0;
cls.capturevideo.riffbuffer.overflowed = false;
}
SCR_CaptureVideo_RIFF_Flush();
if (!FS_Write(cls.capturevideo.videofile, data, size))
cls.capturevideo.error = true;
+ cls.capturevideo.position += size;
}
static void SCR_CaptureVideo_RIFF_Write32(int n)
static fs_offset_t SCR_CaptureVideo_RIFF_GetPosition(void)
{
SCR_CaptureVideo_RIFF_Flush();
- return FS_Tell(cls.capturevideo.videofile);
+ //return FS_Tell(cls.capturevideo.videofile);
+ return cls.capturevideo.position;
}
-static void SCR_CaptureVideo_RIFF_Push(const char *chunkfourcc, const char *listtypefourcc)
+static void SCR_CaptureVideo_RIFF_Push(const char *chunkfourcc, const char *listtypefourcc, fs_offset_t sizeHint)
{
+ if (listtypefourcc && sizeHint >= 0)
+ sizeHint += 4; // size hint is for INNER size
SCR_CaptureVideo_RIFF_WriteFourCC(chunkfourcc);
- SCR_CaptureVideo_RIFF_Write32(0);
+ SCR_CaptureVideo_RIFF_Write32(sizeHint);
SCR_CaptureVideo_RIFF_Flush();
- cls.capturevideo.riffstackstartoffset[cls.capturevideo.riffstacklevel++] = SCR_CaptureVideo_RIFF_GetPosition();
+ cls.capturevideo.riffstacksizehint[cls.capturevideo.riffstacklevel] = sizeHint;
+ cls.capturevideo.riffstackstartoffset[cls.capturevideo.riffstacklevel] = SCR_CaptureVideo_RIFF_GetPosition();
+ cls.capturevideo.riffstackfourcc[cls.capturevideo.riffstacklevel] = chunkfourcc;
+ ++cls.capturevideo.riffstacklevel;
if (listtypefourcc)
SCR_CaptureVideo_RIFF_WriteFourCC(listtypefourcc);
}
static void SCR_CaptureVideo_RIFF_Pop(void)
{
- fs_offset_t offset;
+ fs_offset_t offset, sizehint;
int x;
unsigned char sizebytes[4];
// write out the chunk size and then return to the current file position
cls.capturevideo.riffstacklevel--;
offset = SCR_CaptureVideo_RIFF_GetPosition();
+
+ sizehint = cls.capturevideo.riffstacksizehint[cls.capturevideo.riffstacklevel];
x = (int)(offset - (cls.capturevideo.riffstackstartoffset[cls.capturevideo.riffstacklevel]));
- sizebytes[0] = (x) & 0xff;sizebytes[1] = (x >> 8) & 0xff;sizebytes[2] = (x >> 16) & 0xff;sizebytes[3] = (x >> 24) & 0xff;
- FS_Seek(cls.capturevideo.videofile, -(x + 4), SEEK_END);
- FS_Write(cls.capturevideo.videofile, sizebytes, 4);
- FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+
+ if(x != sizehint)
+ {
+ if(sizehint != -1)
+ {
+ int i;
+ Con_Printf("WARNING: invalid size hint %d when writing video data (actual size: %d)\n", (int) sizehint, x);
+ for(i = 0; i <= cls.capturevideo.riffstacklevel; ++i)
+ {
+ Con_Printf(" RIFF level %d = %s\n", i, cls.capturevideo.riffstackfourcc[i]);
+ }
+ }
+ sizebytes[0] = (x) & 0xff;sizebytes[1] = (x >> 8) & 0xff;sizebytes[2] = (x >> 16) & 0xff;sizebytes[3] = (x >> 24) & 0xff;
+ if(FS_Seek(cls.capturevideo.videofile, -(x + 4), SEEK_END) >= 0)
+ {
+ FS_Write(cls.capturevideo.videofile, sizebytes, 4);
+ }
+ FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+ }
+
if (offset & 1)
{
- unsigned char c = 0;
- FS_Write(cls.capturevideo.videofile, &c, 1);
+ SCR_CaptureVideo_RIFF_WriteBytes((unsigned char *) "\0", 1);
}
}
unsigned char *olddata;
olddata = buf->data;
buf->maxsize = max(buf->maxsize * 2, 4096);
- buf->data = Mem_Alloc(tempmempool, buf->maxsize);
+ buf->data = (unsigned char *) Mem_Alloc(tempmempool, buf->maxsize);
if(olddata)
{
memcpy(buf->data, olddata, oldsize);
static void SCR_CaptureVideo_RIFF_IndexEntry(const char *chunkfourcc, int chunksize, int flags)
{
+ if(!cls.capturevideo.canseek)
+ Host_Error("SCR_CaptureVideo_RIFF_IndexEntry called on non-seekable AVI");
+
if (cls.capturevideo.riffstacklevel != 2)
Sys_Error("SCR_Capturevideo_RIFF_IndexEntry: RIFF stack level is %i (should be 2)\n", cls.capturevideo.riffstacklevel);
GrowBuf(&cls.capturevideo.riffindexbuffer, 16);
int nMatching;
int i;
fs_offset_t ix = SCR_CaptureVideo_RIFF_GetPosition();
- fs_offset_t pos;
+ fs_offset_t pos, sz;
+
+ if(!cls.capturevideo.canseek)
+ Host_Error("SCR_CaptureVideo_RIFF_MakeIxChunk called on non-seekable AVI");
if(*masteridx_count >= AVI_MASTER_INDEX_SIZE)
return;
if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
++nMatching;
- SCR_CaptureVideo_RIFF_Push(fcc, NULL);
+ sz = 2+2+4+4+4+4+4;
+ for(i = 0; i < cls.capturevideo.riffindexbuffer.cursize; i += 16)
+ if(!memcmp(cls.capturevideo.riffindexbuffer.data + i, dwChunkId, 4))
+ sz += 8;
+
+ SCR_CaptureVideo_RIFF_Push(fcc, NULL, sz);
SCR_CaptureVideo_RIFF_Write16(2); // wLongsPerEntry
SCR_CaptureVideo_RIFF_Write16(0x0100); // bIndexType=1, bIndexSubType=0
SCR_CaptureVideo_RIFF_Write32(nMatching); // nEntriesInUse
SCR_CaptureVideo_RIFF_Write32(size);
}
+ SCR_CaptureVideo_RIFF_Flush();
SCR_CaptureVideo_RIFF_Pop();
pos = SCR_CaptureVideo_RIFF_GetPosition();
- SCR_CaptureVideo_RIFF_Flush();
- FS_Seek(cls.capturevideo.videofile, masteridx_start + 16 * *masteridx_count, SEEK_SET);
- SCR_CaptureVideo_RIFF_Write32(ix & (fs_offset_t) 0xFFFFFFFFu);
- SCR_CaptureVideo_RIFF_Write32(((fs_offset_t) ix) >> 32);
- SCR_CaptureVideo_RIFF_Write32(pos - ix);
- SCR_CaptureVideo_RIFF_Write32(nMatching);
- SCR_CaptureVideo_RIFF_Flush();
+ if(FS_Seek(cls.capturevideo.videofile, masteridx_start + 16 * *masteridx_count, SEEK_SET) >= 0)
+ {
+ SCR_CaptureVideo_RIFF_Write32(ix & (fs_offset_t) 0xFFFFFFFFu);
+ SCR_CaptureVideo_RIFF_Write32(((fs_offset_t) ix) >> 32);
+ SCR_CaptureVideo_RIFF_Write32(pos - ix);
+ SCR_CaptureVideo_RIFF_Write32(nMatching);
+ SCR_CaptureVideo_RIFF_Flush();
+ }
- FS_Seek(cls.capturevideo.videofile, masteridx_counter, SEEK_SET);
- SCR_CaptureVideo_RIFF_Write32(++*masteridx_count);
- SCR_CaptureVideo_RIFF_Flush();
+ if(FS_Seek(cls.capturevideo.videofile, masteridx_counter, SEEK_SET) >= 0)
+ {
+ SCR_CaptureVideo_RIFF_Write32(++*masteridx_count);
+ SCR_CaptureVideo_RIFF_Flush();
+ }
- FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
+ FS_Seek(cls.capturevideo.videofile, 0, SEEK_END); // return value doesn't matter here
}
static void SCR_CaptureVideo_RIFF_Finish(qboolean final)
if(final && cls.capturevideo.videofile_firstchunkframes_offset)
// TODO replace index creating by OpenDML ix##/##ix/indx chunk so it works for more than one AVI part too
{
- SCR_CaptureVideo_RIFF_Push("idx1", NULL);
+ SCR_CaptureVideo_RIFF_Push("idx1", NULL, cls.capturevideo.riffindexbuffer.cursize);
SCR_CaptureVideo_RIFF_WriteBytes(cls.capturevideo.riffindexbuffer.data, cls.capturevideo.riffindexbuffer.cursize);
SCR_CaptureVideo_RIFF_Pop();
}
if(cls.capturevideo.videofile_firstchunkframes_offset)
{
Con_DPrintf("Finishing first chunk (%d frames)\n", cls.capturevideo.frame);
- FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_firstchunkframes_offset, SEEK_SET);
- SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
- SCR_CaptureVideo_RIFF_Flush();
+ if(FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_firstchunkframes_offset, SEEK_SET) >= 0)
+ {
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+ SCR_CaptureVideo_RIFF_Flush();
+ }
FS_Seek(cls.capturevideo.videofile, 0, SEEK_END);
cls.capturevideo.videofile_firstchunkframes_offset = 0;
}
fs_offset_t cursize, curfilesize;
if (cls.capturevideo.riffstacklevel != 2)
Sys_Error("SCR_CaptureVideo_RIFF_OverflowCheck: chunk stack leakage!\n");
+
+ if(!cls.capturevideo.canseek)
+ return;
+
// check where we are in the file
SCR_CaptureVideo_RIFF_Flush();
cursize = SCR_CaptureVideo_RIFF_GetPosition() - cls.capturevideo.riffstackstartoffset[0];
{
SCR_CaptureVideo_RIFF_Finish(false);
// begin a new 1GB extended section of the AVI
- SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX");
- SCR_CaptureVideo_RIFF_Push("LIST", "movi");
+ SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", -1);
+ SCR_CaptureVideo_RIFF_Push("LIST", "movi", -1);
cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
}
}
-static void FindFraction(double val, int *num, int *denom, int denomMax)
-{
- int i;
- double bestdiff;
- // initialize
- bestdiff = fabs(val);
- *num = 0;
- *denom = 1;
-
- for(i = 1; i <= denomMax; ++i)
- {
- int inum = floor(0.5 + val * i);
- double diff = fabs(val - inum / (double)i);
- if(diff < bestdiff)
- {
- bestdiff = diff;
- *num = inum;
- *denom = i;
- }
- }
-}
-
void SCR_CaptureVideo_BeginVideo(void)
{
double gamma, g, aspect;
cls.capturevideo.height = height;
cls.capturevideo.active = true;
cls.capturevideo.starttime = realtime;
- cls.capturevideo.framerate = bound(1, cl_capturevideo_fps.value, 1000);
+ cls.capturevideo.framerate = bound(1, cl_capturevideo_fps.value, 1001);
cls.capturevideo.soundrate = S_GetSoundRate();
+ cls.capturevideo.soundchannels = S_GetSoundChannels();
cls.capturevideo.frame = 0;
cls.capturevideo.soundsampleframe = 0;
cls.capturevideo.realtime = cl_capturevideo_realtime.integer != 0;
cls.capturevideo.yuvnormalizetable[2][i] = 16 + i * (240-16) / 256;
}
- //if (cl_capturevideo_)
- //{
- //}
- //else
+ if (cl_capturevideo_ogg.integer && SCR_CaptureVideo_Ogg_Available())
+ {
+ cls.capturevideo.format = CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA;
+ SCR_CaptureVideo_Ogg_Begin();
+ }
+ else
{
cls.capturevideo.format = CAPTUREVIDEOFORMAT_AVI_I420;
- cls.capturevideo.videofile = FS_Open (va("%s.avi", cls.capturevideo.basename), "wb", false, true);
+ cls.capturevideo.videofile = FS_OpenRealFile(va("%s.avi", cls.capturevideo.basename), "wb", false);
+ cls.capturevideo.canseek = (FS_Seek(cls.capturevideo.videofile, 0, SEEK_SET) == 0);
SCR_CaptureVideo_RIFF_Start();
// enclosing RIFF chunk (there can be multiple of these in >1GB files, the later ones are "AVIX" instead of "AVI " and have no header/stream info)
- SCR_CaptureVideo_RIFF_Push("RIFF", "AVI ");
+ SCR_CaptureVideo_RIFF_Push("RIFF", "AVI ", cls.capturevideo.canseek ? -1 : 12+(8+56+12+(12+52+8+40+8+68)+(cls.capturevideo.soundrate?(12+12+52+8+18):0)+12+(8+4))+12+(8+(((int) strlen(engineversion) | 1) + 1))+12);
// AVI main header
- SCR_CaptureVideo_RIFF_Push("LIST", "hdrl");
- SCR_CaptureVideo_RIFF_Push("avih", NULL);
+ SCR_CaptureVideo_RIFF_Push("LIST", "hdrl", cls.capturevideo.canseek ? -1 : 8+56+12+(12+52+8+40+8+68)+(cls.capturevideo.soundrate?(12+12+52+8+18):0)+12+(8+4));
+ SCR_CaptureVideo_RIFF_Push("avih", NULL, 56);
SCR_CaptureVideo_RIFF_Write32((int)(1000000.0 / cls.capturevideo.framerate)); // microseconds per frame
SCR_CaptureVideo_RIFF_Write32(0); // max bytes per second
SCR_CaptureVideo_RIFF_Write32(0); // padding granularity
SCR_CaptureVideo_RIFF_Write32(0); // reserved[3]
SCR_CaptureVideo_RIFF_Pop();
// video stream info
- SCR_CaptureVideo_RIFF_Push("LIST", "strl");
- SCR_CaptureVideo_RIFF_Push("strh", "vids");
+ SCR_CaptureVideo_RIFF_Push("LIST", "strl", cls.capturevideo.canseek ? -1 : 12+52+8+40+8+68);
+ SCR_CaptureVideo_RIFF_Push("strh", "vids", 52);
SCR_CaptureVideo_RIFF_WriteFourCC("I420"); // stream fourcc (I420 colorspace, uncompressed)
SCR_CaptureVideo_RIFF_Write32(0); // flags
SCR_CaptureVideo_RIFF_Write16(0); // priority
SCR_CaptureVideo_RIFF_Write32(n); // samples/second multiplied by divisor
SCR_CaptureVideo_RIFF_Write32(0); // start
cls.capturevideo.videofile_totalframes_offset1 = SCR_CaptureVideo_RIFF_GetPosition();
- SCR_CaptureVideo_RIFF_Write32(0); // length
+ SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF); // length
SCR_CaptureVideo_RIFF_Write32(width*height+(width/2)*(height/2)*2); // suggested buffer size
SCR_CaptureVideo_RIFF_Write32(0); // quality
SCR_CaptureVideo_RIFF_Write32(0); // sample size
SCR_CaptureVideo_RIFF_Write16(height); // frame bottom
SCR_CaptureVideo_RIFF_Pop();
// video stream format
- SCR_CaptureVideo_RIFF_Push("strf", NULL);
+ SCR_CaptureVideo_RIFF_Push("strf", NULL, 40);
SCR_CaptureVideo_RIFF_Write32(40); // BITMAPINFO struct size
SCR_CaptureVideo_RIFF_Write32(width); // width
SCR_CaptureVideo_RIFF_Write32(height); // height
SCR_CaptureVideo_RIFF_Write32(0); // color important
SCR_CaptureVideo_RIFF_Pop();
// master index
- SCR_CaptureVideo_RIFF_Push("indx", NULL);
- SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
- SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
- cls.capturevideo.videofile_ix_master_video_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
- SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
- SCR_CaptureVideo_RIFF_WriteFourCC("00dc"); // dwChunkId
- SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
- SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
- SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
- cls.capturevideo.videofile_ix_master_video_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
- for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
- SCR_CaptureVideo_RIFF_Write32(0); // fill up later
- SCR_CaptureVideo_RIFF_Pop();
+ if(cls.capturevideo.canseek)
+ {
+ SCR_CaptureVideo_RIFF_Push("indx", NULL, -1);
+ SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+ SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+ cls.capturevideo.videofile_ix_master_video_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+ SCR_CaptureVideo_RIFF_WriteFourCC("00dc"); // dwChunkId
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+ cls.capturevideo.videofile_ix_master_video_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+ SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+ SCR_CaptureVideo_RIFF_Pop();
+ }
// extended format (aspect!)
- SCR_CaptureVideo_RIFF_Push("vprp", NULL);
+ SCR_CaptureVideo_RIFF_Push("vprp", NULL, 68);
SCR_CaptureVideo_RIFF_Write32(0); // VideoFormatToken
SCR_CaptureVideo_RIFF_Write32(0); // VideoStandard
SCR_CaptureVideo_RIFF_Write32((int)cls.capturevideo.framerate); // dwVerticalRefreshRate (bogus)
if (cls.capturevideo.soundrate)
{
// audio stream info
- SCR_CaptureVideo_RIFF_Push("LIST", "strl");
- SCR_CaptureVideo_RIFF_Push("strh", "auds");
+ SCR_CaptureVideo_RIFF_Push("LIST", "strl", cls.capturevideo.canseek ? -1 : 12+52+8+18);
+ SCR_CaptureVideo_RIFF_Push("strh", "auds", 52);
SCR_CaptureVideo_RIFF_Write32(1); // stream fourcc (PCM audio, uncompressed)
SCR_CaptureVideo_RIFF_Write32(0); // flags
SCR_CaptureVideo_RIFF_Write16(0); // priority
SCR_CaptureVideo_RIFF_Write32((int)(cls.capturevideo.soundrate)); // samples/second multiplied by divisor
SCR_CaptureVideo_RIFF_Write32(0); // start
cls.capturevideo.videofile_totalsampleframes_offset = SCR_CaptureVideo_RIFF_GetPosition();
- SCR_CaptureVideo_RIFF_Write32(0); // length
+ SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF); // length
SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate * 2); // suggested buffer size (this is a half second)
SCR_CaptureVideo_RIFF_Write32(0); // quality
SCR_CaptureVideo_RIFF_Write32(4); // sample size
SCR_CaptureVideo_RIFF_Write16(0); // frame bottom
SCR_CaptureVideo_RIFF_Pop();
// audio stream format
- SCR_CaptureVideo_RIFF_Push("strf", NULL);
+ SCR_CaptureVideo_RIFF_Push("strf", NULL, 18);
SCR_CaptureVideo_RIFF_Write16(1); // format (uncompressed PCM?)
SCR_CaptureVideo_RIFF_Write16(2); // channels (stereo)
SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundrate); // sampleframes per second
SCR_CaptureVideo_RIFF_Write16(0); // size
SCR_CaptureVideo_RIFF_Pop();
// master index
- SCR_CaptureVideo_RIFF_Push("indx", NULL);
- SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
- SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
- cls.capturevideo.videofile_ix_master_audio_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
- SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
- SCR_CaptureVideo_RIFF_WriteFourCC("01wb"); // dwChunkId
- SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
- SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
- SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
- cls.capturevideo.videofile_ix_master_audio_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
- for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
- SCR_CaptureVideo_RIFF_Write32(0); // fill up later
- SCR_CaptureVideo_RIFF_Pop();
+ if(cls.capturevideo.canseek)
+ {
+ SCR_CaptureVideo_RIFF_Push("indx", NULL, -1);
+ SCR_CaptureVideo_RIFF_Write16(4); // wLongsPerEntry
+ SCR_CaptureVideo_RIFF_Write16(0); // bIndexSubType=0, bIndexType=0
+ cls.capturevideo.videofile_ix_master_audio_inuse_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ SCR_CaptureVideo_RIFF_Write32(0); // nEntriesInUse
+ SCR_CaptureVideo_RIFF_WriteFourCC("01wb"); // dwChunkId
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved1
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved2
+ SCR_CaptureVideo_RIFF_Write32(0); // dwReserved3
+ cls.capturevideo.videofile_ix_master_audio_start_offset = SCR_CaptureVideo_RIFF_GetPosition();
+ for(i = 0; i < AVI_MASTER_INDEX_SIZE * 4; ++i)
+ SCR_CaptureVideo_RIFF_Write32(0); // fill up later
+ SCR_CaptureVideo_RIFF_Pop();
+ }
SCR_CaptureVideo_RIFF_Pop();
}
cls.capturevideo.videofile_ix_master_audio_inuse = cls.capturevideo.videofile_ix_master_video_inuse = 0;
// extended header (for total #frames)
- SCR_CaptureVideo_RIFF_Push("LIST", "odml");
- SCR_CaptureVideo_RIFF_Push("dmlh", NULL);
+ SCR_CaptureVideo_RIFF_Push("LIST", "odml", 8+4);
+ SCR_CaptureVideo_RIFF_Push("dmlh", NULL, 4);
cls.capturevideo.videofile_totalframes_offset2 = SCR_CaptureVideo_RIFF_GetPosition();
- SCR_CaptureVideo_RIFF_Write32(0);
+ SCR_CaptureVideo_RIFF_Write32(0xFFFFFFFF);
SCR_CaptureVideo_RIFF_Pop();
SCR_CaptureVideo_RIFF_Pop();
// close the AVI header list
SCR_CaptureVideo_RIFF_Pop();
// software that produced this AVI video file
- SCR_CaptureVideo_RIFF_Push("LIST", "INFO");
- SCR_CaptureVideo_RIFF_Push("ISFT", NULL);
+ SCR_CaptureVideo_RIFF_Push("LIST", "INFO", 8+((strlen(engineversion) | 1) + 1));
+ SCR_CaptureVideo_RIFF_Push("ISFT", NULL, strlen(engineversion) + 1);
SCR_CaptureVideo_RIFF_WriteTerminatedString(engineversion);
SCR_CaptureVideo_RIFF_Pop();
// enable this junk filler if you like the LIST movi to always begin at 4KB in the file (why?)
#endif
SCR_CaptureVideo_RIFF_Pop();
// begin the actual video section now
- SCR_CaptureVideo_RIFF_Push("LIST", "movi");
+ SCR_CaptureVideo_RIFF_Push("LIST", "movi", cls.capturevideo.canseek ? -1 : 0);
cls.capturevideo.videofile_ix_movistart = cls.capturevideo.riffstackstartoffset[1];
// we're done with the headers now...
SCR_CaptureVideo_RIFF_Flush();
if (cls.capturevideo.riffstacklevel != 2)
Sys_Error("SCR_CaptureVideo_BeginVideo: broken AVI writing code (stack level is %i (should be 2) at end of headers)\n", cls.capturevideo.riffstacklevel);
+
+ if(!cls.capturevideo.canseek)
+ {
+ // close the movi immediately
+ SCR_CaptureVideo_RIFF_Pop();
+ // close the AVI immediately (we'll put all frames into AVIX)
+ SCR_CaptureVideo_RIFF_Pop();
+ }
}
switch(cls.capturevideo.format)
{
case CAPTUREVIDEOFORMAT_AVI_I420:
break;
+ case CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA:
+ break;
default:
break;
}
switch(cls.capturevideo.format)
{
case CAPTUREVIDEOFORMAT_AVI_I420:
- // close any open chunks
- SCR_CaptureVideo_RIFF_Finish(true);
- // go back and fix the video frames and audio samples fields
Con_DPrintf("Finishing capture (%d frames, %d audio frames)\n", cls.capturevideo.frame, cls.capturevideo.soundsampleframe);
- FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset1, SEEK_SET);
- SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
- SCR_CaptureVideo_RIFF_Flush();
- FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset2, SEEK_SET);
- SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
- SCR_CaptureVideo_RIFF_Flush();
- if (cls.capturevideo.soundrate)
+
+ if(cls.capturevideo.canseek)
{
- FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalsampleframes_offset, SEEK_SET);
- SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundsampleframe);
- SCR_CaptureVideo_RIFF_Flush();
+ // close any open chunks
+ SCR_CaptureVideo_RIFF_Finish(true);
+
+ // go back and fix the video frames and audio samples fields
+ if(cls.capturevideo.videofile_totalframes_offset1)
+ if(FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset1, SEEK_SET) >= 0)
+ {
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+ SCR_CaptureVideo_RIFF_Flush();
+ }
+ if(cls.capturevideo.videofile_totalframes_offset2)
+ if(FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalframes_offset2, SEEK_SET) >= 0)
+ {
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.frame);
+ SCR_CaptureVideo_RIFF_Flush();
+ }
+ if (cls.capturevideo.soundrate)
+ {
+ if(cls.capturevideo.videofile_totalsampleframes_offset)
+ if(FS_Seek(cls.capturevideo.videofile, cls.capturevideo.videofile_totalsampleframes_offset, SEEK_SET) >= 0)
+ {
+ SCR_CaptureVideo_RIFF_Write32(cls.capturevideo.soundsampleframe);
+ SCR_CaptureVideo_RIFF_Flush();
+ }
+ }
}
break;
+ case CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA:
+ SCR_CaptureVideo_Ogg_EndVideo();
+ break;
default:
break;
}
area = (float)outw * (float)outh / (float)inw / (float)inh;
for(y = 0; y < outh; ++y)
{
- float iny0 = y / (float)outh * inh; int iny0_i = floor(iny0);
- float iny1 = (y+1) / (float)outh * inh; int iny1_i = ceil(iny1);
+ float iny0 = y / (float)outh * inh; int iny0_i = (int) floor(iny0);
+ float iny1 = (y+1) / (float)outh * inh; int iny1_i = (int) ceil(iny1);
for(x = 0; x < outw; ++x)
{
- float inx0 = x / (float)outw * inw; int inx0_i = floor(inx0);
- float inx1 = (x+1) / (float)outw * inw; int inx1_i = ceil(inx1);
+ float inx0 = x / (float)outw * inw; int inx0_i = (int) floor(inx0);
+ float inx1 = (x+1) / (float)outw * inw; int inx1_i = (int) ceil(inx1);
float r = 0, g = 0, b = 0, alpha = 0;
int xx, yy;
}
}
- out[4*(x + outw * y)+0] = r * area;
- out[4*(x + outw * y)+1] = g * area;
- out[4*(x + outw * y)+2] = b * area;
- out[4*(x + outw * y)+3] = alpha * area;
+ out[4*(x + outw * y)+0] = (unsigned char) (r * area);
+ out[4*(x + outw * y)+1] = (unsigned char) (g * area);
+ out[4*(x + outw * y)+2] = (unsigned char) (b * area);
+ out[4*(x + outw * y)+3] = (unsigned char) (alpha * area);
}
}
}
CHECKGLERROR
//return SCR_ScreenShot(filename, cls.capturevideo.buffer, cls.capturevideo.buffer + vid.width * vid.height * 3, cls.capturevideo.buffer + vid.width * vid.height * 6, 0, 0, vid.width, vid.height, false, false, false, jpeg, true);
// speed is critical here, so do saving as directly as possible
+
+ qglReadPixels (x, y, vid.width, vid.height, GL_BGRA, GL_UNSIGNED_BYTE, cls.capturevideo.screenbuffer);CHECKGLERROR
+ SCR_ScaleDownBGRA (cls.capturevideo.screenbuffer, vid.width, vid.height, cls.capturevideo.outbuffer, width, height);
+
switch (cls.capturevideo.format)
{
case CAPTUREVIDEOFORMAT_AVI_I420:
if (!cls.capturevideo.videofile)
return false;
// FIXME: width/height must be multiple of 2, enforce this?
- qglReadPixels (x, y, vid.width, vid.height, GL_BGRA, GL_UNSIGNED_BYTE, cls.capturevideo.screenbuffer);CHECKGLERROR
- SCR_ScaleDownBGRA (cls.capturevideo.screenbuffer, vid.width, vid.height, cls.capturevideo.outbuffer, width, height);
in = cls.capturevideo.outbuffer;
out = cls.capturevideo.outbuffer + width*height*4;
SCR_CaptureVideo_ConvertFrame_BGRA_to_I420_flip(width, height, in, out);
x = width*height+(width/2)*(height/2)*2;
- SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
for (;cls.capturevideo.frame < newframenum;cls.capturevideo.frame++)
{
- SCR_CaptureVideo_RIFF_IndexEntry("00dc", x, 0x10); // AVIIF_KEYFRAME
- SCR_CaptureVideo_RIFF_Push("00dc", NULL);
+ if(cls.capturevideo.canseek)
+ {
+ SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
+ SCR_CaptureVideo_RIFF_IndexEntry("00dc", x, 0x10); // AVIIF_KEYFRAME
+ }
+
+ if(!cls.capturevideo.canseek)
+ {
+ SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", 12+8+x);
+ SCR_CaptureVideo_RIFF_Push("LIST", "movi", 8+x);
+ }
+ SCR_CaptureVideo_RIFF_Push("00dc", NULL, x);
SCR_CaptureVideo_RIFF_WriteBytes(out, x);
SCR_CaptureVideo_RIFF_Pop();
+ if(!cls.capturevideo.canseek)
+ {
+ SCR_CaptureVideo_RIFF_Pop();
+ SCR_CaptureVideo_RIFF_Pop();
+ }
}
return true;
+ case CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA:
+ for (;cls.capturevideo.frame < newframenum;cls.capturevideo.frame++)
+ SCR_CaptureVideo_Ogg_VideoFrame();
+ return true;
default:
return false;
}
}
-void SCR_CaptureVideo_SoundFrame(unsigned char *bufstereo16le, size_t length, int rate)
+void SCR_CaptureVideo_SoundFrame(const portable_sampleframe_t *paintbuffer, size_t length)
{
int x;
- cls.capturevideo.soundrate = rate;
+ unsigned char bufstereo16le[PAINTBUFFER_SIZE * 4];
+ unsigned char* out_ptr;
+ size_t i;
+
cls.capturevideo.soundsampleframe += length;
+
switch (cls.capturevideo.format)
{
case CAPTUREVIDEOFORMAT_AVI_I420:
+
+ // write the sound buffer as little endian 16bit interleaved stereo
+ for(i = 0, out_ptr = bufstereo16le; i < length; i++, out_ptr += 4)
+ {
+ int n0, n1;
+
+ n0 = paintbuffer[i].sample[0];
+ n0 = bound(-32768, n0, 32767);
+ out_ptr[0] = (unsigned char)n0;
+ out_ptr[1] = (unsigned char)(n0 >> 8);
+
+ n1 = paintbuffer[i].sample[1];
+ n1 = bound(-32768, n1, 32767);
+ out_ptr[2] = (unsigned char)n1;
+ out_ptr[3] = (unsigned char)(n1 >> 8);
+ }
+
x = length*4;
- SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
- SCR_CaptureVideo_RIFF_IndexEntry("01wb", x, 0x10); // AVIIF_KEYFRAME
- SCR_CaptureVideo_RIFF_Push("01wb", NULL);
+ if(cls.capturevideo.canseek)
+ {
+ SCR_CaptureVideo_RIFF_OverflowCheck(8 + x);
+ SCR_CaptureVideo_RIFF_IndexEntry("01wb", x, 0x10); // AVIIF_KEYFRAME
+ }
+
+ if(!cls.capturevideo.canseek)
+ {
+ SCR_CaptureVideo_RIFF_Push("RIFF", "AVIX", 12+8+x);
+ SCR_CaptureVideo_RIFF_Push("LIST", "movi", 8+x);
+ }
+ SCR_CaptureVideo_RIFF_Push("01wb", NULL, x);
SCR_CaptureVideo_RIFF_WriteBytes(bufstereo16le, x);
SCR_CaptureVideo_RIFF_Pop();
+ if(!cls.capturevideo.canseek)
+ {
+ SCR_CaptureVideo_RIFF_Pop();
+ SCR_CaptureVideo_RIFF_Pop();
+ }
+ break;
+ case CAPTUREVIDEOFORMAT_OGG_VORBIS_THEORA:
+ SCR_CaptureVideo_Ogg_SoundFrame(paintbuffer, length);
break;
default:
break;
{
showlmp_t *oldshowlmps = cl.showlmps;
cl.max_showlmps += 16;
- cl.showlmps = Mem_Alloc(cls.levelmempool, cl.max_showlmps * sizeof(showlmp_t));
+ cl.showlmps = (showlmp_t *) Mem_Alloc(cls.levelmempool, cl.max_showlmps * sizeof(showlmp_t));
if (cl.num_showlmps)
memcpy(cl.showlmps, oldshowlmps, cl.num_showlmps * sizeof(showlmp_t));
if (oldshowlmps)
if(gamemode == GAME_NEXUIZ)
{
// play a bit with the palette (experimental)
- palette_rgb_pantscolormap[15][0] = 128 + 127 * sin(cl.time / exp(1) + 0*M_PI/3);
- palette_rgb_pantscolormap[15][1] = 128 + 127 * sin(cl.time / exp(1) + 2*M_PI/3);
- palette_rgb_pantscolormap[15][2] = 128 + 127 * sin(cl.time / exp(1) + 4*M_PI/3);
- palette_rgb_shirtcolormap[15][0] = 128 + 127 * sin(cl.time / M_PI + 5*M_PI/3);
- palette_rgb_shirtcolormap[15][1] = 128 + 127 * sin(cl.time / M_PI + 3*M_PI/3);
- palette_rgb_shirtcolormap[15][2] = 128 + 127 * sin(cl.time / M_PI + 1*M_PI/3);
+ palette_rgb_pantscolormap[15][0] = (unsigned char) (128 + 127 * sin(cl.time / exp(1) + 0*M_PI/3));
+ palette_rgb_pantscolormap[15][1] = (unsigned char) (128 + 127 * sin(cl.time / exp(1) + 2*M_PI/3));
+ palette_rgb_pantscolormap[15][2] = (unsigned char) (128 + 127 * sin(cl.time / exp(1) + 4*M_PI/3));
+ palette_rgb_shirtcolormap[15][0] = (unsigned char) (128 + 127 * sin(cl.time / M_PI + 5*M_PI/3));
+ palette_rgb_shirtcolormap[15][1] = (unsigned char) (128 + 127 * sin(cl.time / M_PI + 3*M_PI/3));
+ palette_rgb_shirtcolormap[15][2] = (unsigned char) (128 + 127 * sin(cl.time / M_PI + 1*M_PI/3));
memcpy(palette_rgb_pantsscoreboard[15], palette_rgb_pantscolormap[15], sizeof(*palette_rgb_pantscolormap));
memcpy(palette_rgb_shirtscoreboard[15], palette_rgb_shirtcolormap[15], sizeof(*palette_rgb_shirtcolormap));
}
else if (key_consoleactive)
VID_SetMouse(vid.fullscreen, false, false);
else if (key_dest == key_menu_grabbed)
- VID_SetMouse(true, !in_client_mouse, true);
+ VID_SetMouse(true, vid_mouse.integer && !in_client_mouse, true);
else if (key_dest == key_menu)
- VID_SetMouse(vid.fullscreen, !in_client_mouse, true);
+ VID_SetMouse(vid.fullscreen, vid_mouse.integer && !in_client_mouse, true);
else
- VID_SetMouse(vid.fullscreen, vid_mouse.integer && !cls.demoplayback && !cl.csqc_wantsmousemove, true);
+ VID_SetMouse(vid.fullscreen, vid_mouse.integer && !cl.csqc_wantsmousemove && (!cls.demoplayback || cl_demo_mousegrab.integer), true);
VID_Finish();
}