DeliverEndOfStream Method

Summary
Informs the objects "downstream" of the MultiStreamSource object that the source has reached the end of the data stream.
Syntax
C#
C++/CLI
public void DeliverEndOfStream( 
   int Stream, 
   int TimeOut 
) 
public: 
void DeliverEndOfStream(  
   int Stream, 
   int TimeOut 
)  

Parameters

Stream
The index of the stream for which the EOS is being delivered.

TimeOut
Maximum time to allow for end of stream notification, in milliseconds.

Remarks

In some cases (for example, when writing AVI files), stopping the object without calling this method can indicate a stop caused by an error. Such a stop can have unpredictable results (the AVI might not get indexed properly). It is best to call this method before stopping the object. If the method fails, an error is raised. For more information, refer to the Error Codes. Most time-out situations are the result of an error condition. The error returned for a timeout condition is E_ABORT. Receiving an E_ABORT error code when the timeout is set sufficiently high indicates that a problem has occurred downstream. In such a case, call the main object's "Stop" method and report an error.

Example
C#
using Leadtools; 
using Leadtools.Multimedia; 
using LeadtoolsMultimediaExamples.Fixtures; 
 
 
public bool _result = false; 
public ConvertCtrlForm _form = new ConvertCtrlForm(); 
public ConvertCtrl _convertctrl; 
 
// input files and output file 
string inFile1 = Path.Combine(LEAD_VARS.MediaDir, "ConvertCtrl_Source1.avi"); 
string inFile2 = Path.Combine(LEAD_VARS.MediaDir, "ConvertCtrl_Source2.avi"); 
string outFile = Path.Combine(LEAD_VARS.MediaDir, "ConvertCtrl_ConcatAVIFilesExample.avi"); 
 
public void ConcatAVIFilesExample() 
{ 
   ConvertCtrl pConvert1; 
   ConvertCtrl pConvert2; 
   MultiStreamSource pMSSource; 
   MultiStreamTarget pMSTarget; 
   Int64 lStart, lVideoMediaStart, lAudioMediaStart; 
 
   MediaType pmt; 
   MediaType pInsertedMediaType; 
 
   pConvert1 = new ConvertCtrl(true); 
   pConvert2 = new ConvertCtrl(true); 
 
   pMSSource = new MultiStreamSource(); 
   pMSTarget = new MultiStreamTarget(); 
 
   // set the start time to be 0 
   lStart = 0; 
   lVideoMediaStart = 0; 
   lAudioMediaStart = 0; 
 
   // set the input filename 
   pConvert1.SourceFile = inFile1; 
 
   // set the output sample to a target object 
   pMSTarget.StreamCount = 2; 
 
   // set the target media types for video and audio streams 
   pmt = new MediaType(); 
   pmt.Type = Constants.MEDIATYPE_Video; 
   pMSTarget.SetAcceptedMediaType(0, pmt); 
 
   pmt.Type = Constants.MEDIATYPE_Audio; 
   pMSTarget.SetAcceptedMediaType(1, pmt); 
 
   pmt = null; 
 
   // get the inserted media type for the first stream 
   pInsertedMediaType = pMSTarget.GetAcceptedMediaType(0); 
   pInsertedMediaType = null; 
 
   // set convert 1 target object 
   pConvert1.TargetObject = pMSTarget; 
 
   // start the source conversion, so we can get the media sample format 
   pConvert1.StartConvert(); 
 
   // initialize convert 2 
   // get the output media sample format and put it into the source object 
   pMSSource.StreamCount = 2; 
   pmt = pMSTarget.GetConnectedMediaType(0); 
   pMSSource.SetMediaType(0, pmt); 
   pmt = null; 
 
   pmt = pMSTarget.GetConnectedMediaType(1); 
   pMSSource.SetMediaType(1, pmt); 
   pmt = null; 
 
   // get the inserted media type for the first stream 
   pInsertedMediaType = pMSSource.GetMediaType(0); 
   pInsertedMediaType = null; 
 
   // set the output filename 
   pConvert2.TargetFile = outFile; 
 
   // set the source for convert 2 
   pConvert2.SourceObject = pMSSource; 
 
   // start the dest conversion 
   pConvert2.StartConvert(); 
 
   // convert first file 
   ConcateFile(pConvert1, pMSTarget, pMSSource, ref lStart, ref lVideoMediaStart, ref lAudioMediaStart); 
 
   /* 
   Restrict the output format to the media type of the source for the first file 
   That is because the two files must have the same media type for both video and audio 
   With video, you have to make sure the files have the same frame rate! Minor changes in 
   the frame rate might make the connection fail! 
   The control will tolerate differences in frame rate if you comment the next line 
   */ 
 
   pmt = pMSTarget.GetConnectedMediaType(0); 
   pMSTarget.SetAcceptedMediaType(0, pmt); 
   pmt = null; 
 
   pmt = pMSTarget.GetConnectedMediaType(1); 
   pMSTarget.SetAcceptedMediaType(1, pmt); 
   pmt = null; 
 
   // change the source file to second file 
   pConvert1.SourceFile = inFile2; 
 
   // start converting again 
   pConvert1.StartConvert(); 
 
   // convert second file 
   ConcateFile(pConvert1, pMSTarget, pMSSource, ref lStart, ref lVideoMediaStart, ref lAudioMediaStart); 
 
   // deliver end of sample to stop the conversion 
   pMSSource.DeliverEndOfStream(0, 1000); 
   pMSSource.DeliverEndOfStream(1, 1000); 
 
   if (pConvert2.State == ConvertState.Running) 
      pConvert2.StopConvert(); 
 
   // free the source and target objects 
   pConvert2.ResetSource(); 
   pConvert1.ResetTarget(); 
 
   pConvert1.Dispose(); 
   pConvert2.Dispose(); 
   pMSSource.Dispose(); 
   pMSTarget.Dispose(); 
 
   _result = File.Exists(outFile); 
} 
 
void ConcateFile(ConvertCtrl pConvert1, 
                 MultiStreamTarget pMSTarget, 
                 MultiStreamSource pMSSource, 
                 ref long lStart, 
                 ref long lVideoMediaStart, 
                 ref long lAudioMediaStart) 
{ 
   MediaSample pmsSrc = null; 
   MediaSample pmsDst = null; 
   long MediaTimeStart; 
   long MediaTimeStop; 
   long LastVideoMediaStop; 
   long LastAudioMediaStop; 
   long LastStart; 
   long LastStop; 
   int lSampleStream; 
   int lActualDataLength; 
 
   LastStop = 0; 
   LastVideoMediaStop = 0; 
   LastAudioMediaStop = 0; 
 
   do 
   { 
      // get the sample, allowing 10 s for the operation to complete 
      try 
      { 
         lSampleStream = pMSTarget.WaitForSample(1000); 
      } 
      catch (COMException cex) 
      { 
         if (cex.ErrorCode == (int)ErrorCode.VFW_E_TIMEOUT) 
         { 
            // end of the stream 
            break; 
         } 
         _result = false; 
         break; 
      } 
 
      try 
      { 
         // get the target sample 
         pmsSrc = pMSTarget.GetSample(lSampleStream, 0); 
         // get the source buffer 
         pmsDst = pMSSource.GetSampleBuffer(lSampleStream, 2000); 
      } 
      catch (Exception) 
      { 
         _result = false; 
         break; 
      } 
 
 
      try 
      { 
         // get the source media time 
         pmsSrc.GetMediaTime(out MediaTimeStart, out MediaTimeStop); 
 
         // check sample media type 
         MediaType sampleMediaType = pMSTarget.GetConnectedMediaType(lSampleStream); 
         if (sampleMediaType.Type == Constants.MEDIATYPE_Video) // video stream media time 
         { 
            pmsDst.SetMediaTime(MediaTimeStart + lVideoMediaStart, MediaTimeStop + lVideoMediaStart); 
            LastVideoMediaStop = MediaTimeStop; 
         } 
         else // audio stream media time 
         { 
            pmsDst.SetMediaTime(MediaTimeStart + lAudioMediaStart, MediaTimeStop + lAudioMediaStart); 
            LastAudioMediaStop = MediaTimeStop; 
         } 
      } 
      catch (Exception) 
      { 
         pmsDst.ResetMediaTime(); 
      } 
 
      try 
      { 
         // get the source sample time 
         pmsSrc.GetTime(out LastStart, out LastStop); 
 
         // set the destination sample time 
         pmsDst.SetTime(lStart + LastStart, lStart + LastStop); 
      } 
      catch(Exception)  
      { 
         pmsDst.ResetTime(); 
      } 
 
      // copy the data 
      lActualDataLength = pmsSrc.ActualDataLength; 
 
      // set the destination buffer  
      // we could Marshal the unmanaged buffer here, but no need since we are mearly  
      // setting the destination to the source buffer contents (unaltered data) 
      pmsDst.SetData(lActualDataLength, pmsSrc.GetData(lActualDataLength)); 
 
      // copy the other flags 
      pmsDst.Discontinuity = pmsSrc.Discontinuity; 
      pmsDst.Preroll = pmsSrc.Preroll; 
      pmsDst.SyncPoint = pmsSrc.SyncPoint; 
 
      // dispose the source sample, since we don't need it anymore. This makes the buffer available for reuse 
      pmsSrc.Dispose(); 
 
      // deliver the destination sample 
      pMSSource.DeliverSample(lSampleStream, 1000, pmsDst); 
 
      // dispose the destination sample, since we don't need it anymore. This makes the buffer available for reuse 
      pmsDst.Dispose(); 
   } 
   while (true); 
 
   pConvert1.StopConvert(); 
   lStart += LastStop; 
   lVideoMediaStart += LastVideoMediaStop; 
   lAudioMediaStart += LastAudioMediaStop; 
} 
 
static class LEAD_VARS 
{ 
   public const string MediaDir = @"C:\LEADTOOLS23\Media"; 
} 
Requirements

Target Platforms

Help Version 23.0.2024.2.29
Products | Support | Contact Us | Intellectual Property Notices
© 1991-2024 LEAD Technologies, Inc. All Rights Reserved.

Leadtools.Multimedia Assembly
Products | Support | Contact Us | Intellectual Property Notices
© 1991-2023 LEAD Technologies, Inc. All Rights Reserved.