2012-04-23 64 views
1

事实上,这是一种技术的混搭,但我的问题(我认为)的答案最接近于Direct3D 9.我挂钩到一个任意的D3D9应用程序,在大多数情况下它是一个游戏,并注入我自己的代码来模仿EndScene函数的行为。后台缓冲区被复制到一个表面中,该表面被设置为指向推送源DirectShow过滤器中的位图。过滤器以25 fps采样位图并将视频流传输到.avi文件。游戏屏幕上显示的文字覆盖图告诉用户有关应该停止游戏捕捉的热键组合,但此覆盖图不应显示在再现的视频中。除了一个令人讨厌的事实外,一切都快速而美丽。在随机的场合,带有文字的框架会过分地进入再现的视频。这不是一个真正想要的人造物,最终用户只想看到他在视频中的游戏玩法,而没有其他任何东西。我很想听听有没有人可以分享为什么会发生这种情况。这里是EndScene钩子的源代码:挂钩IDirect3DDevice9 :: EndScene方法捕捉游戏视频:无法摆脱录制的视频中的文本覆盖

using System; 
using SlimDX; 
using SlimDX.Direct3D9; 
using System.Diagnostics; 
using DirectShowLib; 
using System.Runtime.InteropServices; 

[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] 
[System.Security.SuppressUnmanagedCodeSecurity] 
[Guid("EA2829B9-F644-4341-B3CF-82FF92FD7C20")] 

public interface IScene 
{ 
    unsafe int PassMemoryPtr(void* ptr, bool noheaders); 
    int SetBITMAPINFO([MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 1)]byte[] ptr, bool noheaders); 
} 

public class Class1 
{ 
    object _lockRenderTarget = new object(); 
    public string StatusMess { get; set; } 
    Surface _renderTarget; 
    //points to image bytes 
    unsafe void* bytesptr; 
    //used to store headers AND image bytes 
    byte[] bytes; 
    IFilterGraph2 ifg2; 
    ICaptureGraphBuilder2 icgb2; 
    IBaseFilter push; 
    IBaseFilter compressor; 
    IScene scene; 
    IBaseFilter mux; 
    IFileSinkFilter sink; 
    IMediaControl media; 
    bool NeedRunGraphInit = true; 
    bool NeedRunGraphClean = true; 
    DataStream s; 
    DataRectangle dr; 

    unsafe int EndSceneHook(IntPtr devicePtr) 
    { 
     int hr; 

     using (Device device = Device.FromPointer(devicePtr)) 
      { 
      try 
      { 
       lock (_lockRenderTarget) 
       { 

        bool TimeToGrabFrame = false; 

        //.... 
        //logic based on elapsed milliseconds deciding if it is time to grab another frame 

        if (TimeToGrabFrame) 
        { 

         //First ensure we have a Surface to render target data into 
         //called only once 
         if (_renderTarget == null) 
         { 

          //Create offscreen surface to use as copy of render target data 
          using (SwapChain sc = device.GetSwapChain(0)) 
          { 

           //Att: created in system memory, not in video memory 
           _renderTarget = Surface.CreateOffscreenPlain(device, sc.PresentParameters.BackBufferWidth, sc.PresentParameters.BackBufferHeight, sc.PresentParameters.BackBufferFormat, Pool.SystemMemory); 

          } //end using 
         } // end if 

         using (Surface backBuffer = device.GetBackBuffer(0, 0)) 
         { 
          //The following line is where main action takes place: 
          //Direct3D 9 back buffer gets copied to Surface _renderTarget, 
          //which has been connected by references to DirectShow's 
          //bitmap capture filter 
          //Inside the filter (code not shown in this listing) the bitmap is periodically 
          //scanned to create a streaming video. 
          device.GetRenderTargetData(backBuffer, _renderTarget); 

          if (NeedRunGraphInit) //ran only once 
          { 
           ifg2 = (IFilterGraph2)new FilterGraph(); 
           icgb2 = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); 
           icgb2.SetFiltergraph(ifg2); 
           push = (IBaseFilter) new PushSourceFilter(); 
           scene = (IScene)push; 

           //this way we get bitmapfile and bitmapinfo headers 
           //ToStream is slow, but run it only once to get the headers 
           s = Surface.ToStream(_renderTarget, ImageFileFormat.Bmp); 
           bytes = new byte[s.Length]; 

           s.Read(bytes, 0, (int)s.Length); 
           hr = scene.SetBITMAPINFO(bytes, false); 

           //we just supplied the header to the PushSource 
           //filter. Let's pass reference to 
           //just image bytes from LockRectangle 

           dr = _renderTarget.LockRectangle(LockFlags.None); 
           s = dr.Data; 
           Result r = _renderTarget.UnlockRectangle(); 
           bytesptr = s.DataPointer.ToPointer(); 
           hr = scene.PassMemoryPtr(bytesptr, true); 

           //continue building graph 
           ifg2.AddFilter(push, "MyPushSource"); 

           icgb2.SetOutputFileName(MediaSubType.Avi, "C:\foo.avi", out mux, out sink); 

           icgb2.RenderStream(null, null, push, null, mux); 

           media = (IMediaControl)ifg2; 

           media.Run(); 

           NeedRunGraphInit = false; 
           NeedRunGraphClean = true; 

           StatusMess = "now capturing, press shift-F11 to stop"; 

          } //end if 

         } // end using backbuffer 
        } // end if Time to grab frame 

       } //end lock 
      } // end try 

      //It is usually thrown when the user makes game window inactive 
      //or it is thrown deliberately when time is up, or the user pressed F11 and 
      //it resulted in stopping a capture. 
      //If it is thrown for another reason, it is still a good 
      //idea to stop recording and free the graph 
      catch (Exception ex) 
      { 
       //.. 
       //stop the DirectShow graph and cleanup 

      } // end catch 

      //draw overlay 
      using (SlimDX.Direct3D9.Font font = new SlimDX.Direct3D9.Font(device, new System.Drawing.Font("Times New Roman", 26.0f, FontStyle.Bold))) 
      { 
       font.DrawString(null, StatusMess, 20, 100, System.Drawing.Color.FromArgb(255, 255, 255, 255)); 
      } 

      return device.EndScene().Code; 

     } // end using device 

    } //end EndSceneHook 

回答

0

有时,我终于自己找到了这个问题的答案,如果有人感兴趣的话。事实证明,每次调用挂钩的EndScene时,某些Direct3D9应用程序中的backbuffer不一定会刷新。因此,有时将来自前一个EndScene挂接调用的文本覆盖的后台缓存传递给负责收集输入帧的DirectShow源过滤器。我开始用已知RGB值的小3像素覆盖图对每个帧进行加盖,并在将帧传递给DirectShow过滤器之前检查此虚拟覆盖图是否仍存在。如果叠加层在那里,则先前缓存的帧被传递而不是当前的帧。该方法有效地从DirectShow图表中记录的视频中删除文本叠加。