#include "quakedef.h" #include "cl_dyntexture.h" #include "cl_video.h" #include "dpvsimpledecode.h" // cvars cvar_t cl_video_subtitles = {CVAR_SAVE, "cl_video_subtitles", "0", "show subtitles for videos (if they are presented)"}; cvar_t cl_video_subtitles_lines = {CVAR_SAVE, "cl_video_subtitles_lines", "4", "how many lines to occupy for subtitles"}; cvar_t cl_video_subtitles_textsize = {CVAR_SAVE, "cl_video_subtitles_textsize", "16", "textsize for subtitles"}; cvar_t cl_video_scale = {CVAR_SAVE, "cl_video_scale", "1", "scale of video, 1 = fullscreen, 0.75 - 3/4 of screen etc."}; cvar_t cl_video_brightness = {CVAR_SAVE, "cl_video_brightness", "1", "brightness of video, 1 = fullbright, 0.75 - 3/4 etc."}; // constants (and semi-constants) static int cl_videormask; static int cl_videobmask; static int cl_videogmask; static int cl_videobytesperpixel; static int cl_num_videos; static clvideo_t cl_videos[ MAXCLVIDEOS ]; static rtexturepool_t *cl_videotexturepool; static clvideo_t *FindUnusedVid( void ) { int i; for( i = 1 ; i < MAXCLVIDEOS ; i++ ) if( cl_videos[ i ].state == CLVIDEO_UNUSED ) return &cl_videos[ i ]; return NULL; } static qboolean OpenStream( clvideo_t * video ) { char *errorstring; video->stream = dpvsimpledecode_open( video->filename, &errorstring); if (!video->stream ) { Con_Printf("unable to open \"%s\", error: %s\n", video->filename, errorstring); return false; } return true; } static void VideoUpdateCallback(rtexture_t *rt, void *data) { clvideo_t *video = (clvideo_t *) data; R_UpdateTexture( video->cpif.tex, (unsigned char *)video->imagedata, 0, 0, video->cpif.width, video->cpif.height ); } static void LinkVideoTexture( clvideo_t *video ) { video->cpif.tex = R_LoadTexture2D( cl_videotexturepool, video->cpif.name, video->cpif.width, video->cpif.height, NULL, TEXTYPE_BGRA, TEXF_PERSISTENT | TEXF_ALLOWUPDATES, NULL ); R_MakeTextureDynamic( video->cpif.tex, VideoUpdateCallback, video ); CL_LinkDynTexture( video->cpif.name, video->cpif.tex ); } static void UnlinkVideoTexture( clvideo_t *video ) { CL_UnlinkDynTexture( video->cpif.name ); // free the texture R_FreeTexture( video->cpif.tex ); // free the image data Mem_Free( video->imagedata ); } static void SuspendVideo( clvideo_t * video ) { if( video->suspended ) return; video->suspended = true; UnlinkVideoTexture( video ); // if we are in firstframe mode, also close the stream if( video->state == CLVIDEO_FIRSTFRAME ) dpvsimpledecode_close( video->stream ); } static qboolean WakeVideo( clvideo_t * video ) { if( !video->suspended ) return true; video->suspended = false; if( video->state == CLVIDEO_FIRSTFRAME ) if( !OpenStream( video ) ) { video->state = CLVIDEO_UNUSED; return false; } video->imagedata = Mem_Alloc( cls.permanentmempool, video->cpif.width * video->cpif.height * cl_videobytesperpixel ); LinkVideoTexture( video ); // update starttime video->starttime += realtime - video->lasttime; return true; } static void LoadSubtitles( clvideo_t *video, const char *subtitlesfile ) { char *subtitle_text, *data; float subtime, sublen; int numsubs = 0; subtitle_text = (char *)FS_LoadFile(subtitlesfile, cls.permanentmempool, false, NULL); if (!subtitle_text) { Con_DPrintf( "LoadSubtitles: can't open subtitle file '%s'!\n", subtitlesfile ); return; } // parse subtitle_text // line is: x y "text" where // x - start time // y - seconds last (if 0 - last thru next sub, if negative - last to next sub - this amount of seconds) data = subtitle_text; for (;;) { if (!COM_ParseToken_QuakeC(&data, false)) break; subtime = atof( com_token ); if (!COM_ParseToken_QuakeC(&data, false)) break; sublen = atof( com_token ); if (!COM_ParseToken_QuakeC(&data, false)) break; if (!com_token[0]) continue; // check limits if (video->subtitles == CLVIDEO_MAX_SUBTITLES) { Con_Printf("WARNING: CLVIDEO_MAX_SUBTITLES = %i reached when reading subtitles from '%s'\n", CLVIDEO_MAX_SUBTITLES, subtitlesfile); break; } // add a sub video->subtitle_text[numsubs] = (char *) Mem_Alloc(cls.permanentmempool, strlen(com_token) + 1); memcpy(video->subtitle_text[numsubs], com_token, strlen(com_token) + 1); video->subtitle_start[numsubs] = subtime; video->subtitle_end[numsubs] = sublen; if (numsubs > 0) // make true len for prev sub, autofix overlapping subtitles { if (video->subtitle_end[numsubs-1] <= 0) video->subtitle_end[numsubs-1] = max(video->subtitle_start[numsubs-1], video->subtitle_start[numsubs] - video->subtitle_end[numsubs-1]); else video->subtitle_end[numsubs-1] = min(video->subtitle_start[numsubs-1] + video->subtitle_end[numsubs-1], video->subtitle_start[numsubs]); } numsubs++; // todo: check timing for consistency? } if (numsubs > 0) // make true len for prev sub, autofix overlapping subtitles { if (video->subtitle_end[numsubs-1] <= 0) video->subtitle_end[numsubs-1] = 99999999; // fixme: make it end when video ends? else video->subtitle_end[numsubs-1] = min(video->subtitle_start[numsubs-1] + video->subtitle_end[numsubs-1], video->subtitle_start[numsubs]); } Z_Free( subtitle_text ); video->subtitles = numsubs; /* Con_Printf( "video->subtitles: %i\n", video->subtitles ); for (numsubs = 0; numsubs < video->subtitles; numsubs++) Con_Printf( " %03.2f %03.2f : %s\n", video->subtitle_start[numsubs], video->subtitle_end[numsubs], video->subtitle_text[numsubs] ); */ } static clvideo_t* OpenVideo( clvideo_t *video, const char *filename, const char *name, int owner, const char *subtitlesfile ) { strlcpy( video->filename, filename, sizeof(video->filename) ); video->ownertag = owner; if( strncmp( name, CLVIDEOPREFIX, sizeof( CLVIDEOPREFIX ) - 1 ) ) return NULL; strlcpy( video->cpif.name, name, sizeof(video->cpif.name) ); if( !OpenStream( video ) ) return NULL; video->state = CLVIDEO_FIRSTFRAME; video->framenum = -1; video->framerate = dpvsimpledecode_getframerate( video->stream ); video->lasttime = realtime; video->subtitles = 0; video->cpif.width = dpvsimpledecode_getwidth( video->stream ); video->cpif.height = dpvsimpledecode_getheight( video->stream ); video->imagedata = Mem_Alloc( cls.permanentmempool, video->cpif.width * video->cpif.height * cl_videobytesperpixel ); LinkVideoTexture( video ); // VorteX: load simple subtitle_text file if (subtitlesfile[0]) LoadSubtitles( video, subtitlesfile ); return video; } clvideo_t* CL_OpenVideo( const char *filename, const char *name, int owner, const char *subtitlesfile ) { clvideo_t *video; // sanity check if( !name || !*name || strncmp( name, CLVIDEOPREFIX, sizeof( CLVIDEOPREFIX ) - 1 ) != 0 ) { Con_DPrintf( "CL_OpenVideo: Bad video texture name '%s'!\n", name ); return NULL; } video = FindUnusedVid(); if( !video ) { Con_Printf( "CL_OpenVideo: unable to open video \"%s\" - video limit reached\n", filename ); return NULL; } video = OpenVideo( video, filename, name, owner, subtitlesfile ); // expand the active range to include the new entry if (video) { cl_num_videos = max(cl_num_videos, (int)(video - cl_videos) + 1); } return video; } static clvideo_t* CL_GetVideoBySlot( int slot ) { clvideo_t *video = &cl_videos[ slot ]; if( video->suspended ) { if( !WakeVideo( video ) ) return NULL; else if( video->state == CLVIDEO_RESETONWAKEUP ) video->framenum = -1; } video->lasttime = realtime; return video; } clvideo_t *CL_GetVideoByName( const char *name ) { int i; for( i = 0 ; i < cl_num_videos ; i++ ) if( cl_videos[ i ].state != CLVIDEO_UNUSED && !strcmp( cl_videos[ i ].cpif.name , name ) ) break; if( i != cl_num_videos ) return CL_GetVideoBySlot( i ); else return NULL; } void CL_SetVideoState( clvideo_t *video, clvideostate_t state ) { if( !video ) return; video->lasttime = realtime; video->state = state; if( state == CLVIDEO_FIRSTFRAME ) CL_RestartVideo( video ); } void CL_RestartVideo( clvideo_t *video ) { if( !video ) return; video->starttime = video->lasttime = realtime; video->framenum = -1; dpvsimpledecode_close( video->stream ); if( !OpenStream( video ) ) video->state = CLVIDEO_UNUSED; } void CL_CloseVideo( clvideo_t * video ) { int i; if( !video || video->state == CLVIDEO_UNUSED ) return; if( !video->suspended || video->state != CLVIDEO_FIRSTFRAME ) dpvsimpledecode_close( video->stream ); if( !video->suspended ) UnlinkVideoTexture( video ); if (video->subtitles) { for (i = 0; i < video->subtitles; i++) Z_Free( video->subtitle_text[i] ); video->subtitles = 0; } video->state = CLVIDEO_UNUSED; } static void VideoFrame( clvideo_t *video ) { int destframe; if( video->state == CLVIDEO_FIRSTFRAME ) destframe = 0; else destframe = (int)((realtime - video->starttime) * video->framerate); if( destframe < 0 ) destframe = 0; if( video->framenum < destframe ) { do { video->framenum++; if( dpvsimpledecode_video( video->stream, video->imagedata, cl_videormask, cl_videogmask, cl_videobmask, cl_videobytesperpixel, cl_videobytesperpixel * video->cpif.width ) ) { // finished? CL_RestartVideo( video ); if( video->state == CLVIDEO_PLAY ) video->state = CLVIDEO_FIRSTFRAME; return; } } while( video->framenum < destframe ); R_MarkDirtyTexture( video->cpif.tex ); } } void CL_Video_Frame( void ) // update all videos { int i; clvideo_t *video; if (!cl_num_videos) return; for( video = cl_videos, i = 0 ; i < cl_num_videos ; video++, i++ ) if( video->state != CLVIDEO_UNUSED && !video->suspended ) { if( realtime - video->lasttime > CLTHRESHOLD ) SuspendVideo( video ); else if( video->state == CLVIDEO_PAUSE ) video->starttime = realtime - video->framenum * video->framerate; else VideoFrame( video ); } if( cl_videos->state == CLVIDEO_FIRSTFRAME ) CL_VideoStop(); // reduce range to exclude unnecessary entries while (cl_num_videos > 0 && cl_videos[cl_num_videos-1].state == CLVIDEO_UNUSED) cl_num_videos--; } void CL_Video_Shutdown( void ) { int i; for( i = 0 ; i < cl_num_videos ; i++ ) CL_CloseVideo( &cl_videos[ i ] ); } void CL_PurgeOwner( int owner ) { int i; for( i = 0 ; i < cl_num_videos ; i++ ) if( cl_videos[ i ].ownertag == owner ) CL_CloseVideo( &cl_videos[ i ] ); } typedef struct { dp_font_t *font; float x; float y; float width; float height; float alignment; // 0 = left, 0.5 = center, 1 = right float fontsize; float textalpha; } cl_video_subtitle_info_t; float CL_DrawVideo_WordWidthFunc(void *passthrough, const char *w, size_t *length, float maxWidth) { cl_video_subtitle_info_t *si = (cl_video_subtitle_info_t *) passthrough; if(w == NULL) return si->fontsize * si->font->maxwidth; if(maxWidth >= 0) return DrawQ_TextWidth_UntilWidth(w, length, si->fontsize, si->fontsize, false, si->font, -maxWidth); // -maxWidth: we want at least one char else if(maxWidth == -1) return DrawQ_TextWidth(w, *length, si->fontsize, si->fontsize, false, si->font); else return 0; } int CL_DrawVideo_DisplaySubtitleLine(void *passthrough, const char *line, size_t length, float width, qboolean isContinuation) { cl_video_subtitle_info_t *si = (cl_video_subtitle_info_t *) passthrough; int x = (int) (si->x + (si->width - width) * si->alignment); if (length > 0) DrawQ_String(x, si->y, line, length, si->fontsize, si->fontsize, 1.0, 1.0, 1.0, si->textalpha, 0, NULL, false, si->font); si->y += si->fontsize; return 1; } int cl_videoplaying = false; // old, but still supported void CL_DrawVideo(void) { clvideo_t *video; float videotime; cl_video_subtitle_info_t si; int i; if (!cl_videoplaying) return; video = CL_GetVideoBySlot( 0 ); // fix cvars if (cl_video_scale.value <= 0 || cl_video_scale.value > 1) Cvar_SetValueQuick( &cl_video_scale, 1); if (cl_video_brightness.value <= 0 || cl_video_brightness.value > 10) Cvar_SetValueQuick( &cl_video_brightness, 1); #if 0 // enable video-only polygon stipple (of global stipple is not active) if (qglPolygonStipple && !scr_stipple.integer) { GLubyte stipple[128]; int i, s, width, parts; s = 1; parts = (s & 007); width = (s & 070) >> 3; qglEnable(GL_POLYGON_STIPPLE);CHECKGLERROR // 0x0B42 for(i = 0; i < 128; ++i) { int line = i/4; stipple[i] = ((line >> width) & ((1 << parts) - 1)) ? 0x00 : 0xFF; } qglPolygonStipple(stipple);CHECKGLERROR } #endif // draw video if (cl_video_scale.value == 1) DrawQ_Pic(0, 0, &video->cpif, vid_conwidth.integer, vid_conheight.integer, cl_video_brightness.value, cl_video_brightness.value, cl_video_brightness.value, 1, 0); else { DrawQ_Fill(0, 0, vid_conwidth.integer, vid_conheight.integer, 0, 0, 0, 1, 0); DrawQ_Pic((int)(vid_conwidth.integer * (1 - cl_video_scale.value) * 0.5), (int)(vid_conheight.integer * (1 - cl_video_scale.value) * 0.5), &video->cpif, (int)(vid_conwidth.integer * cl_video_scale.value), (int)(vid_conheight.integer * cl_video_scale.value), cl_video_brightness.value, cl_video_brightness.value, cl_video_brightness.value, 1, 0); } #if 0 // disable video-only stipple if (qglPolygonStipple && !scr_stipple.integer) { qglDisable(GL_POLYGON_STIPPLE);CHECKGLERROR } #endif // VorteX: draw subtitle_text if (!video->subtitles || !cl_video_subtitles.integer) return; // find current subtitle videotime = realtime - video->starttime; for (i = 0; i < video->subtitles; i++) { if (videotime >= video->subtitle_start[i] && videotime <= video->subtitle_end[i]) { // found, draw it si.font = FONT_NOTIFY; si.x = vid_conwidth.integer * 0.1; si.y = vid_conheight.integer - (max(1, cl_video_subtitles_lines.integer) * cl_video_subtitles_textsize.value); si.width = vid_conwidth.integer * 0.8; si.height = max(1, cl_video_subtitles_lines.integer) * cl_video_subtitles_textsize.value; si.alignment = 0.5; si.fontsize = cl_video_subtitles_textsize.value; si.textalpha = min(1, (videotime - video->subtitle_start[i])/0.5) * min(1, ((video->subtitle_end[i] - videotime)/0.3)); // fade in and fade out COM_Wordwrap(video->subtitle_text[i], strlen(video->subtitle_text[i]), 0, si.width, CL_DrawVideo_WordWidthFunc, &si, CL_DrawVideo_DisplaySubtitleLine, &si); break; } } } void CL_VideoStart(char *filename, const char *subtitlesfile) { Host_StartVideo(); if( cl_videos->state != CLVIDEO_UNUSED ) CL_CloseVideo( cl_videos ); // already contains video/ if( !OpenVideo( cl_videos, filename, va( CLDYNTEXTUREPREFIX "%s", filename ), 0, subtitlesfile ) ) return; // expand the active range to include the new entry cl_num_videos = max(cl_num_videos, 1); cl_videoplaying = true; CL_SetVideoState( cl_videos, CLVIDEO_PLAY ); CL_RestartVideo( cl_videos ); } void CL_Video_KeyEvent( int key, int ascii, qboolean down ) { // only react to up events, to allow the user to delay the abortion point if it suddenly becomes interesting.. if( !down ) { if( key == K_ESCAPE || key == K_ENTER || key == K_SPACE ) { CL_VideoStop(); } } } void CL_VideoStop(void) { cl_videoplaying = false; CL_CloseVideo( cl_videos ); } static void CL_PlayVideo_f(void) { char name[MAX_QPATH], subtitlesfile[MAX_QPATH]; Host_StartVideo(); if (Cmd_Argc() < 2) { Con_Print("usage: playvideo [custom_subtitles_file]\nplays video named video/.dpv\nif custom subtitles file is not presented\nit tries video/.sub"); return; } dpsnprintf(name, sizeof(name), "video/%s.dpv", Cmd_Argv(1)); if ( Cmd_Argc() > 2) CL_VideoStart(name, Cmd_Argv(2)); else { dpsnprintf(subtitlesfile, sizeof(subtitlesfile), "video/%s.dpsubs", Cmd_Argv(1)); CL_VideoStart(name, subtitlesfile); } } static void CL_StopVideo_f(void) { CL_VideoStop(); } static void cl_video_start( void ) { int i; clvideo_t *video; cl_videotexturepool = R_AllocTexturePool(); for( video = cl_videos, i = 0 ; i < cl_num_videos ; i++, video++ ) if( video->state != CLVIDEO_UNUSED && !video->suspended ) LinkVideoTexture( video ); } static void cl_video_shutdown( void ) { // TODO: unlink video textures? R_FreeTexturePool( &cl_videotexturepool ); } static void cl_video_newmap( void ) { } void CL_Video_Init( void ) { union { unsigned char b[4]; unsigned int i; } bgra; cl_num_videos = 0; cl_videobytesperpixel = 4; // set masks in an endian-independent way (as they really represent bytes) bgra.i = 0;bgra.b[0] = 0xFF;cl_videobmask = bgra.i; bgra.i = 0;bgra.b[1] = 0xFF;cl_videogmask = bgra.i; bgra.i = 0;bgra.b[2] = 0xFF;cl_videormask = bgra.i; Cmd_AddCommand( "playvideo", CL_PlayVideo_f, "play a .dpv video file" ); Cmd_AddCommand( "stopvideo", CL_StopVideo_f, "stop playing a .dpv video file" ); Cvar_RegisterVariable(&cl_video_subtitles); Cvar_RegisterVariable(&cl_video_subtitles_lines); Cvar_RegisterVariable(&cl_video_subtitles_textsize); Cvar_RegisterVariable(&cl_video_scale); Cvar_RegisterVariable(&cl_video_brightness); R_RegisterModule( "CL_Video", cl_video_start, cl_video_shutdown, cl_video_newmap ); }