2016-03-02 78 views
1

我正在从服务器传输H264 NAL,将它们作为FLV标签封装,并使用appendBytes(数据生成模式)将它们传递到NetStream中。但是,当视频正常播放时,流会延迟大约一秒钟。在H264 NAL中使用NetStream.appendBytes时有没有办法阻止缓冲?

我试着设置bufferTime,bufferTimeMax但没有运气,以防止缓冲继续。

我也尝试了各种组合的NetStream.seek()和NetStream.appendBytesAction()与RESET_SEEK和END_SEQUENCE,再次无济于事。

是否有一招我在这里失踪,有没有办法来防止这种延迟?

有趣的是,我没有看到我传入的音频(PCMU)的延迟,所以我最终出现了唇形同步问题。

更新时间:仍然停留,所以张贴我使用的代码:

 var timestamp : uint = networkPayload.readUnsignedInt(); 
     if (videoTimestampBase == 0) { 
      videoTimestampBase = timestamp; 
     } 
     timestamp = timestamp - videoTimestampBase; 
     timestamp = timestamp/90.0; 

     // skip 7 bytes of marker 
     networkPayload.position = 7; 
     var nalType : int = networkPayload.readByte(); 
     nalType &= 0x1F; 
     networkPayload.position = 7; 

     // reformat Annex B bitstream encoding, to Mp4 - remove timestamp and bitstream marker (3 bytes) 
     var mp4Payload : ByteArray = new ByteArray(); 
     var mp4PayloadLength : int = networkPayload.bytesAvailable; 
     mp4Payload.writeUnsignedInt(mp4PayloadLength); 
     mp4Payload.writeBytes(networkPayload, 7, mp4PayloadLength); 
     mp4Payload.position = 0; 

     if (nalType == 8) { 
      // PPS 
      ppsNAL = new ByteArray(); 
      // special case for PPS/SPS - don't length encode 
      ppsLength = mp4Payload.bytesAvailable - 4; 
      ppsNAL.writeBytes(mp4Payload, 4, mp4Payload.bytesAvailable - 4); 
      if (spsNAL == null) { 
       return; 
      } 
     } else if (nalType == 7) { 
      // SPS 
      spsNAL = new ByteArray(); 
      // special case for PPS/SPS - don't length encode 
      spsLength = mp4Payload.bytesAvailable - 4; 
      spsNAL.writeBytes(mp4Payload, 4, mp4Payload.bytesAvailable - 4); 

      if (ppsNAL == null) { 
       return; 
      } 
     } 

     if ((spsNAL != null) && (ppsNAL != null)) { 
      Log.debug(TAG, "Writing sequence header: " + spsLength + "," + ppsLength + "," + timestamp); 

      var sequenceHeaderTag : FLVTagVideo = new FLVTagVideo(); 
      sequenceHeaderTag.codecID = FLVTagVideo.CODEC_ID_AVC; 
      sequenceHeaderTag.frameType = FLVTagVideo.FRAME_TYPE_KEYFRAME; 
      sequenceHeaderTag.timestamp = timestamp; 
      sequenceHeaderTag.avcPacketType = FLVTagVideo.AVC_PACKET_TYPE_SEQUENCE_HEADER; 

      spsNAL.position = 1; 
      var profile : int = spsNAL.readByte(); 
      var compatibility : int = spsNAL.readByte(); 
      var level : int = spsNAL.readByte(); 
      Log.debug(TAG, profile + "," + compatibility + "," + level + "," + spsLength); 

      var avcc : ByteArray = new ByteArray(); 
      avcc.writeByte(0x01); // avcC version 1 
      // profile, compatibility, level 
      avcc.writeByte(profile); 
      avcc.writeByte(compatibility); 
      avcc.writeByte(0x20); //level); 
      avcc.writeByte(0xff); // 111111 + 2 bit NAL size - 1 
      avcc.writeByte(0xe1); // number of SPS 
      avcc.writeByte(spsLength >> 8); // 16-bit SPS byte count 
      avcc.writeByte(spsLength); 
      avcc.writeBytes(spsNAL, 0, spsLength); // the SPS 
      avcc.writeByte(0x01); // number of PPS 
      avcc.writeByte(ppsLength >> 8); // 16-bit PPS byte count 
      avcc.writeByte(ppsLength); 
      avcc.writeBytes(ppsNAL, 0, ppsLength); 
      sequenceHeaderTag.data = avcc; 

      // clear the pps/sps til next buffer 
      var bytes : ByteArray = new ByteArray(); 
      sequenceHeaderTag.write(bytes); 

      stream.appendBytes(bytes); 

      ppsNAL = null; 
      spsNAL = null; 
     } else { 
      if ((timestamp != currentTimestamp) || (currentVideoTag == null)) { 
       if (currentVideoTag != null) { 
        currentVideoTag.data = currentSegment; 

        var tagData : ByteArray = new ByteArray(); 
        currentVideoTag.write(tagData); 

        stream.appendBytes(tagData); 
       } 

       currentVideoTag = new FLVTagVideo(); 
       currentVideoTag.codecID = FLVTagVideo.CODEC_ID_AVC; 
       currentVideoTag.frameType = FLVTagVideo.FRAME_TYPE_INTER; 
       if (nalType == 5) { 
        currentVideoTag.frameType = FLVTagVideo.FRAME_TYPE_KEYFRAME; 
       } 
       lastNalType = nalType; 
       currentVideoTag.avcPacketType = FLVTagVideo.AVC_PACKET_TYPE_NALU; 
       currentVideoTag.timestamp = timestamp; 
       currentVideoTag.avcCompositionTimeOffset = 0; 

       currentSegment = new ByteArray(); 
       currentTimestamp = timestamp; 
      } 

      mp4Payload.position = 0; 
      currentSegment.writeBytes(mp4Payload); 
     } 

更新,一些详细信息,这里是正在传递的时间戳:

DEBUG: StreamPlayback: 66,-32,20,19 
DEBUG: StreamPlayback: Timestamp: 0 
DEBUG: StreamPlayback: Timestamp: 63 
DEBUG: StreamPlayback: stream status update: netStatus NetStream.Buffer.Full 
DEBUG: StreamPlayback: Timestamp: 137 
DEBUG: StreamPlayback: Timestamp: 200 
DEBUG: StreamPlayback: Timestamp: 264 
DEBUG: StreamPlayback: Timestamp: 328 
DEBUG: StreamPlayback: Timestamp: 403 
DEBUG: StreamPlayback: Timestamp: 467 
DEBUG: StreamPlayback: Timestamp: 531 
DEBUG: StreamPlayback: Timestamp: 595 
DEBUG: StreamPlayback: Timestamp: 659 
DEBUG: StreamPlayback: Timestamp: 723 
DEBUG: StreamPlayback: Timestamp: 830 
DEBUG: StreamPlayback: Timestamp: 894 
DEBUG: StreamPlayback: Timestamp: 958 
DEBUG: StreamPlayback: Timestamp: 1021 
DEBUG: StreamPlayback: Timestamp: 1086 
DEBUG: StreamPlayback: Timestamp: 1161 
DEBUG: StreamPlayback: Timestamp: 1225 
DEBUG: StreamPlayback: Timestamp: 1289 
DEBUG: StreamPlayback: Timestamp: 1353 
DEBUG: StreamPlayback: Timestamp: 1418 
DEBUG: StreamPlayback: Timestamp: 1491 
DEBUG: StreamPlayback: Timestamp: 1556 
DEBUG: StreamPlayback: Timestamp: 1633 
DEBUG: StreamPlayback: Timestamp: 1684 
DEBUG: StreamPlayback: Timestamp: 1747 
DEBUG: StreamPlayback: stream status update: netStatus NetStream.Video.DimensionChange 
DEBUG: StreamPlayback: Timestamp: 1811 

干杯,

Kev

+1

可能是一个时间戳的问题?首先输入两个音频标签(连续追加),然后按照该顺序追加一个视频标签(帧)......'bufferTime'等只是负责任何“提前”解码,因此当播放头到达它时,内容就已准备就绪。使用** H.264 **时,它不能被停止,因为在显示当前帧图像之前,解码器需要一组“图片”(供参考)。 –

+0

我实际上已经关闭atm音频,所以它只是视频流。时间戳由RTP时间戳除以90生成,以使其达到毫秒。我会看一看,看看里面是否有东西混在一起。净结果,但你说我不需要冲洗流,反正它应该立即播放。 –

+0

是的,不要每次追加刷新。只要继续追加和Flash解码器照顾的事情。如果您使用'Reset_Seek',解码器现在预期**关键帧**视频标签。所有音频标签都是音频关键帧。 –

回答

0

解决方案一:

媒体实际上并没有开始播放,直到 NetStream.Video.DimensionChange是为什么不暂停的NetStream开始任何附加之前触发

?然后添加标签,直到NetStream确认“尺寸更改”。在此状态的Net Status处理程序中,您接着unPause NetStream。
希望它会播放同步,因为在暂停模式下播放头既不是声音也不是视频。

stream.addEventListener(NetStatusEvent.NET_STATUS, stream_StatusHandler); 

stream.play(null); 
stream.appendBytesAction(NetStreamAppendBytesAction.RESET_BEGIN); 
stream.pause(); //# pause before beginning FLV A/V Tag appends 

public function stream_StatusHandler (evt:NetStatusEvent) : void 
{ 
    trace("DEBUG: StreamPlayback : NEW evt.info.code : " + evt.info.code); 

    switch (evt.info.code) 
    { 
     //# in case its "NetStream.Buffer.Full" 
     case "NetStream.Buffer.Full" :  
     trace("DEBUG: StreamPlayback : NetStream.Buffer.Full..."); 
     break; 

     //# in case its "NetStream.Video.DimensionChange" : 
     case "NetStream.Video.DimensionChange" : 
     trace("DEBUG: StreamPlayback : #### Video Dimensions have changed...");   
     trace("DEBUG: StreamPlayback : #### NEW Detected video.videoHeight : " + video.videoHeight); 
     stream.resume(); //# resume playback 
     //# or use : stream.togglePause(); 
     break; 

    } 
} 

如果不工作,那么你可以尝试...

解决方法二:

我不知道如何引起尺寸变化事件尽快发生 ...
有没有办法伪装它?

使用位图数据创建动态视频帧仅由一个简单的色块组成。该块有一个分辨率大小不同到您的视频流。您首先追加该块,并与您自己的视频帧的差异将触发尺寸更改。

注意:如果您的视频触发太晚(即:A/V未同步,那么意味着您最初发送的音频标签过多(可能在视频时间后有不正确的时间戳?)。 ..尝试检查时间戳,音频始终在视频之前,不得超过相关视频标签的时间戳)。

下面的示例代码使宽度×50高度视频帧(位图数据被编码为屏幕视频格式并附加为视频标签)。

// 1) ## Setup Video Object + Append FLV header + Append Metadata etc 
// 2) ## Run function below before appending your first Video tag... 

force_Dimension_Adjust(); 

// 3) ## Do your usual appends... 

下面是相关的代码:force_Dimension_Adjust();

public var BA_BMD_Frame : ByteArray; 
public var BA_Temp : ByteArray; 

public function force_Dimension_Adjust() : void 
{ 
    trace("DEBUG: #### doing function : force_Dimension_Adjust"); 

    //create BMD frame for dimension change 
    generate_Frame_BMPdata(); //# Puts result video tag into BA_BMD_Frame 

    BA_BMD_Frame.position = 0; 
    stream.appendBytes(BA_BMD_Frame); //should trigger "dimesion change" for video picture size 
    trace("DEBUG: StreamPlayback : #### APPENDED :::: BA_BMD_Frame : "); 

} 

public function generate_Frame_BMPdata() : void 
{ 
    //## Simple colour block as video frame content 
    //## (pW = Picture Width, pH = Picture Height) 

    var pW : int = 100; var pH : int = 50; 
    var temp_BMD = new BitmapData(pW, pH, false, 0x5500AA); //R-G-B 5500AA = purple 
    var temp_BMP = new Bitmap(temp_BMD); 

    // 1) #### encode BitmapData to codec Screen Video 
    BA_BMD_Frame.clear(); BA_Temp.clear(); BA_Temp.position = 0; //# Resets 
    encode_SCREENVIDEO (BA_Temp, temp_BMD); //# Put encoded BMD into a temp ByteArray 

    // 2) #### Create Video Frame TAG to hold encoded frame 
    BA_BMD_Frame.writeByte(0x09); //# is video TAG 

    writeUInt24(BA_BMD_Frame, BA_Temp.length); //# Write 3 bytes : size of BMD bytes length 

    BA_BMD_Frame.writeUnsignedInt(0x00); //# Write 4 byte timestamp : 0x00 0x00 0x00 0x00 

    writeUInt24(BA_BMD_Frame, 0x00); //# Write 3 bytes (stream ID etc) : 0x00 0x00 0x00 

    BA_BMD_Frame.position = BA_BMD_Frame.length; 
    BA_BMD_Frame.writeBytes(BA_Temp); //# Write encoded BMD bytes here 

    BA_BMD_Frame.position = BA_BMD_Frame.length; 
    BA_BMD_Frame.writeUnsignedInt(BA_BMD_Frame.length - 4); //# Close : total size of this byteArray (TAG) length minus 4 

    BA_BMD_Frame.position = 0; //# Reset position 

} 


public function encode_SCREENVIDEO (input_BA : ByteArray , input_BMD : BitmapData) : void //ByteArray 
{ 
    var w:int = input_BMD.width; var h:int = input_BMD.height; 

    //# Video Type = 1 (Keyframe) |&&| Codec ID = 3 (Screen-Video) 
    input_BA.writeByte(0x13); 

    //# SCREENVIDEOPACKET 'header' 
    writeUI4_12(input_BA, int((BLOCK_WIDTH /16) - 1), w); //12 bits for width 
    writeUI4_12(input_BA, int((BLOCK_HEIGHT /16) - 1), h); //12 bits for height 

    //# Create IMAGEBLOCKS 

    const BLOCK_WIDTH:int = input_BMD.width; //# is 100; 
    const BLOCK_HEIGHT:int = input_BMD.height; //# is 50; 

    var rowMax:int = int(h/BLOCK_HEIGHT); 
    var rowRemainder:int = h % BLOCK_HEIGHT; 
    if (rowRemainder > 0) rowMax += 1; 

    var colMax:int = int(w/BLOCK_WIDTH); 
    var colRemainder:int = w % BLOCK_WIDTH;    
    if (colRemainder > 0) colMax += 1; 

    var block:ByteArray = new ByteArray(); 
    block.endian = Endian.LITTLE_ENDIAN; 

    for (var row:int = 0; row < rowMax; row++) 
    { 
     for (var col:int = 0; col < colMax; col++) 
     { 
      var xStart:uint = col * BLOCK_WIDTH; 
      var xLimit:int = (colRemainder > 0 && col + 1 == colMax) ? colRemainder : BLOCK_WIDTH; 
      var xEnd:int = xStart + xLimit; 

      var yStart:uint = h - (row * BLOCK_HEIGHT); //# Read BMP Data from bottom to top 
      var yLimit:int = (rowRemainder > 0 && row + 1 == rowMax) ? rowRemainder : BLOCK_HEIGHT; 
      var yEnd:int = yStart - yLimit; 

      block.clear(); //# re-use ByteArray 

      for (var y:int = yStart-1; y >= yEnd; y--) //# FLV stores Bitmap Data from bottom to top) 
      { 
       for (var x:int = xStart; x < xEnd; x++) 
       { 
        var p:uint = input_BMD.getPixel(x, y); 
        writeUInt24(block, p); //# write B-G-R pixel values 
       } 
      } 

      block.compress(); 

      input_BA.writeShort(block.length); // write block length (2 bytes == 16 bits) 
      input_BA.writeBytes(block); // write block 
     } 
    } 

    block.length = 0; block = null; 
    input_BA.position = input_BA.length; 
} 

//// Supporting functions 

public function writeUInt24(input_BA:ByteArray, val:uint) : void 
{ 
    input_BA.position = input_BA.position; 

    temp_Int_1 = val >> 16; 
    temp_Int_2 = val >> 8 & 0xff; 
    temp_Int_3 = val & 0xff; 

    input_BA.writeByte(temp_Int_1); input_BA.writeByte(temp_Int_2); 
    input_BA.writeByte(temp_Int_3); 
} 

public function writeUI4_12(input_BA:ByteArray, p1:uint, p2:uint):void 
{ 
    // writes a 4-bit value followed by a 12-bit value in a total of 16 bits (2 bytes) 

    var byte1a:int = p1 << 4; 
    var byte1b:int = p2 >> 8; 
    var byte1:int = byte1a + byte1b; 
    var byte2:int = p2 & 0xff; 

    input_BA.writeByte(byte1); input_BA.writeByte(byte2); 
} 
+0

终于回过头来看,这些都没有成功。 –

相关问题