RtspSourceOutputPin.cpp

Go to the documentation of this file.
00001 
00034 #include "RtspSourceOutputPin.h"
00035 #include "RtspSourceFilter.h"
00036 
00037 // DS // For MP3 header
00038 #include <mmreg.h>
00039 
00040 // STL
00041 #include <iostream>
00042 
00043 // LiveMedia
00044 #include <liveMedia.hh>
00045 
00046 // RTVC
00047 #include <Shared/MediaSample.h>
00048 
00049 RtspSourceOutputPin::RtspSourceOutputPin( HRESULT* phr, RtspSourceFilter* pFilter, MediaSubsession* pMediaSubsession, int nID )
00050 : CSourceStream(NAME("Audio Source Filter Output Pin"), phr, pFilter, L"Out"),
00051         m_pFilter(pFilter), // Parent filter
00052         m_pMediaType(NULL), //
00053         m_nBitsPerSample(0),
00054         m_nChannels(0),
00055         m_nBytesPerSecond(0),
00056         m_nSamplesPerSecond(0),
00057         m_nID(nID),
00058         m_bOffsetSet(false),
00059         m_bHasBeenSynced(false),
00060         m_dGlobalStartTime(-1),
00061         m_dLocalStartTime(-1)
00062 {
00063         // Make sure the CSourceStream constructor was successful
00064         if (!SUCCEEDED(*phr))
00065         {
00066                 return;
00067         }
00068 
00069         initialiseMediaType(pMediaSubsession, phr);
00070 }
00071 
00072 RtspSourceOutputPin::~RtspSourceOutputPin(void)
00073 {
00074         // Free media type
00075         if (m_pMediaType)
00076                 FreeMediaType(*m_pMediaType);
00077 }
00078 
00079 void RtspSourceOutputPin::initialiseMediaType(MediaSubsession* pSubsession, HRESULT* phr)
00080 {
00081         // Now iterate over all media subsessions and create our RTVC media attributes
00082         // Get medium name
00083         const char* szMedium = pSubsession->mediumName();
00084         const char* szCodec = pSubsession->codecName();
00085 
00086         if (pSubsession)
00087         {
00088                 // Audio
00089                 if (strcmp(szMedium, "audio")==0)
00090                 {
00091                         m_pMediaType = new CMediaType();
00092 
00093                         // Setup media type for 8 bit PCM
00094                         if (strcmp(szCodec, "L8")==0)
00095                         {
00096                                 m_pMediaType->SetType(&MEDIATYPE_Audio);
00097 
00098                                 // For PCM
00099                                 m_pMediaType->SetFormatType(&FORMAT_WaveFormatEx);
00100                                 m_pMediaType->SetSubtype(&MEDIASUBTYPE_PCM);
00101                                 // 8 bit audio
00102                                 m_nBitsPerSample = 8;
00103                                 // Get sample rate
00104                                 m_nSamplesPerSecond = pSubsession->rtpTimestampFrequency();
00105 
00106                                 // Get channels
00107                                 m_nChannels = pSubsession->numChannels();
00108 
00109                                 m_nBytesPerSecond = m_nSamplesPerSecond * (m_nBitsPerSample >> 3) * m_nChannels;
00110 
00111                                 WAVEFORMATEX *pWavHeader = (WAVEFORMATEX*) m_pMediaType->AllocFormatBuffer(sizeof(WAVEFORMATEX));
00112                                 if (pWavHeader == 0)
00113                                         *phr = (E_OUTOFMEMORY);
00114 
00115                                 // Initialize the video info header
00116                                 ZeroMemory(pWavHeader, m_mt.cbFormat);   
00117 
00118                                 pWavHeader->wFormatTag = WAVE_FORMAT_PCM;
00119                                 pWavHeader->nChannels = m_nChannels;
00120                                 pWavHeader->nSamplesPerSec = m_nSamplesPerSecond;
00121                                 pWavHeader->wBitsPerSample = m_nBitsPerSample;
00122 
00123                                 // From MSDN: http://msdn.microsoft.com/en-us/library/ms713497(VS.85).aspx
00124                                 // Block alignment, in bytes. The block alignment is the minimum atomic unit of data for the wFormatTag format type. If wFormatTag is WAVE_FORMAT_PCM or WAVE_FORMAT_EXTENSIBLE, nBlockAlign must be equal to the product of nChannels and wBitsPerSample divided by 8 (bits per byte).
00125                                 //OLD pWavHeader->nBlockAlign = 1;
00126                                 pWavHeader->nBlockAlign = m_nChannels * (m_nBitsPerSample >> 3);
00127 
00128                                 // From MSDN: Required average data-transfer rate, in bytes per second, for the format tag. If wFormatTag is WAVE_FORMAT_PCM, nAvgBytesPerSec should be equal to the product of nSamplesPerSec and nBlockAlign. For non-PCM formats, this member must be computed according to the manufacturer's specification of the format tag. 
00129                                 // OLD pWavHeader->nAvgBytesPerSec = m_nChannels*m_nSamplesPerSecond;
00130                                 pWavHeader->nAvgBytesPerSec =  m_nSamplesPerSecond * pWavHeader->nBlockAlign;
00131 
00132                                 pWavHeader->cbSize = 0;
00133 
00134                                 m_pMediaType->SetTemporalCompression(FALSE);
00135                                 // From using graph studio to look at the pins media types
00136                                 //m_pMediaType->SetSampleSize(1);
00137                                 m_pMediaType->SetSampleSize(4);
00138                         }
00139                         // Setup media type for 16 bit PCM
00140                         else if (strcmp(szCodec, "L16")==0)
00141                         {
00142                                 m_pMediaType->SetType(&MEDIATYPE_Audio);
00143                                 // For PCM
00144                                 m_pMediaType->SetFormatType(&FORMAT_WaveFormatEx);
00145                                 m_pMediaType->SetSubtype(&MEDIASUBTYPE_PCM);
00146                                 // 16-bit audio
00147                                 m_nBitsPerSample = 16;
00148                                 // Get sample rate
00149                                 m_nSamplesPerSecond = pSubsession->rtpTimestampFrequency();
00150 
00151                                 // Get channels
00152                                 m_nChannels = pSubsession->numChannels();
00153 
00154                                 m_nBytesPerSecond = m_nSamplesPerSecond * (m_nBitsPerSample >> 3) * m_nChannels;
00155 
00156                                 WAVEFORMATEX *pWavHeader = (WAVEFORMATEX*) m_pMediaType->AllocFormatBuffer(sizeof(WAVEFORMATEX));
00157                                 if (pWavHeader == 0)
00158                                         *phr = (E_OUTOFMEMORY);
00159 
00160                                 // Initialize the video info header
00161                                 ZeroMemory(pWavHeader, m_mt.cbFormat);   
00162 
00163                                 pWavHeader->wFormatTag = WAVE_FORMAT_PCM;
00164                                 pWavHeader->nChannels = m_nChannels;
00165                                 pWavHeader->nSamplesPerSec = m_nSamplesPerSecond;
00166                                 pWavHeader->wBitsPerSample = m_nBitsPerSample;
00167 
00168                                 // From MSDN: http://msdn.microsoft.com/en-us/library/ms713497(VS.85).aspx
00169                                 // Block alignment, in bytes. The block alignment is the minimum atomic unit of data for the wFormatTag format type. If wFormatTag is WAVE_FORMAT_PCM or WAVE_FORMAT_EXTENSIBLE, nBlockAlign must be equal to the product of nChannels and wBitsPerSample divided by 8 (bits per byte).
00170                                 //OLD pWavHeader->nBlockAlign = 1;
00171                                 pWavHeader->nBlockAlign = m_nChannels * (m_nBitsPerSample >> 3);
00172 
00173                                 // From MSDN: Required average data-transfer rate, in bytes per second, for the format tag. If wFormatTag is WAVE_FORMAT_PCM, nAvgBytesPerSec should be equal to the product of nSamplesPerSec and nBlockAlign. For non-PCM formats, this member must be computed according to the manufacturer's specification of the format tag. 
00174                                 // OLD pWavHeader->nAvgBytesPerSec = m_nChannels*m_nSamplesPerSecond;
00175                                 pWavHeader->nAvgBytesPerSec =  m_nSamplesPerSecond * pWavHeader->nBlockAlign;
00176 
00177                                 pWavHeader->cbSize = 0;
00178 
00179                                 m_pMediaType->SetTemporalCompression(FALSE);
00180                                 // From using graph studio to look at the pins media types
00181                                 //m_pMediaType->SetSampleSize(1);
00182                                 m_pMediaType->SetSampleSize(4);
00183                         }
00184                         // This section caters for MP3 audio but does NOT work yet
00193 
00205 
00214 
00219 
00220 
00229 
00238 
00242 
00246 
00247                         else
00248                         {
00249                                 // Unsupported
00250                                 //m_sLastError = "Unsupported audio codec: " + std::string(szMedium) + std::string(szCodec);
00251                                 *phr = VFW_E_INVALIDSUBTYPE;
00252                         }       
00253                 }
00254         }
00255         else
00256         {
00257                 // Invalid pointer
00258                 *phr = E_POINTER;
00259         }
00260 }
00261 
00262 HRESULT RtspSourceOutputPin::GetMediaType( CMediaType* pMediaType )
00263 {
00264         CAutoLock cAutoLock(m_pFilter->pStateLock());
00265         CheckPointer(pMediaType, E_POINTER);
00266         if (!m_pMediaType)
00267         {
00268                 return E_UNEXPECTED;
00269         }
00270         // First try to free the format block just in case it has already been allocated according to MSDN doc
00271         FreeMediaType(*pMediaType);
00272 
00273         // Copy media type
00274         HRESULT hr = CopyMediaType(pMediaType, m_pMediaType);
00275         if (FAILED(hr))
00276         {
00277                 return E_UNEXPECTED;
00278         }
00279         else
00280         {
00281                 return S_OK;
00282         }
00283 }
00284 
00285 HRESULT RtspSourceOutputPin::DecideBufferSize( IMemAllocator* pAlloc, ALLOCATOR_PROPERTIES* pRequest )
00286 {
00287         HRESULT hr;
00288         CAutoLock cAutoLock(m_pFilter->pStateLock());
00289 
00290         CheckPointer(pAlloc, E_POINTER);
00291         CheckPointer(pRequest, E_POINTER);
00292 
00293         // Ensure a minimum number of buffers
00294         if (pRequest->cBuffers == 0)
00295         {
00296                 pRequest->cBuffers = 2;
00297         }
00298 
00299         if (m_pMediaType->formattype == FORMAT_VideoInfo)
00300         {
00301                 VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_pMediaType->pbFormat;
00302                 // Now get size of video
00303                 pRequest->cbBuffer = pVih->bmiHeader.biSizeImage;
00304         }
00305         else if (m_pMediaType->formattype == FORMAT_WaveFormatEx)
00306         {
00307                 // Audio buffer size
00308                 //Buffer size calculation for PCM
00309                 pRequest->cbBuffer = m_nSamplesPerSecond * m_nChannels * (m_nBitsPerSample >> 3);
00310                 // Not sure what buffer size to use for MP3
00311                 //pRequest->cbBuffer = 200000;
00312         }
00313 
00314         ALLOCATOR_PROPERTIES Actual;
00315         hr = pAlloc->SetProperties(pRequest, &Actual);
00316         if (FAILED(hr)) 
00317         {
00318                 return hr;
00319         }
00320 
00321         // Is this allocator unsuitable?
00322         if (Actual.cbBuffer < pRequest->cbBuffer) 
00323         {
00324                 return E_FAIL;
00325         }
00326 
00327         return S_OK;
00328 }
00329 
00330 HRESULT RtspSourceOutputPin::FillBuffer( IMediaSample* pSample )
00331 {
00332         BYTE *pData;
00333         long cbData;
00334 
00335         CheckPointer(pSample, E_POINTER);
00336         CAutoLock cAutoLockShared(&m_cSharedState);
00337 
00338         // Access the sample's data buffer
00339         pSample->GetPointer(&pData);
00340 
00341         MediaSample* pSampleData = m_pFilter->m_rtpPacketManager.getNextSample(m_nID);
00342         if (!pSampleData) return S_FALSE;
00343 
00344         // Set the output pin's local start time: i.e. the time of the first sample that this pin has received
00345         if ( m_dLocalStartTime == -1)
00346         {
00347                 m_dLocalStartTime = pSampleData->StartTime();
00348         }
00349 
00350         // Set the global start time: this occurs when the first sample is received for a pin
00351         // This sets the global base starting time before RTCP sync kicks in
00352         if (!m_bOffsetSet)
00353         {
00354                 m_pFilter->notifyFilterAboutOffset(pSampleData->StartTime());
00355         }
00356         else
00357         {
00358                 // If this is the first sample time has already been synchronised using RTCP, we update the base time globally
00359                 if (pSampleData->isSynchronisationPoint())
00360                 {
00361                         m_bHasBeenSynced = true;
00362                         // Notify the filter about the "new" offset since the base starting time usually jumps per media stream once RTCP synchronisation kicks in
00363                         m_pFilter->notifyFilterAboutOffset(pSampleData->StartTime());
00364                 }
00365         }
00366 
00367         // Get buffer size
00368         cbData = pSampleData->getSize();
00369 
00370         // Copy the data from the sample into the buffer
00371         if (m_nBitsPerSample == 16)
00372         {
00373                 // 16 bit audio is always deliver in network byte order by the liveMedia library, we need to swap the byte order first
00374                 // before delivering the media downstream
00375                 
00376                 // Swap the byte order of the 16-bit values that we have just read:
00377                 unsigned numValues = cbData >> 1;
00378                 
00379                 short* pDest = (short*)pData;
00380                 short* pSrc = (short*)pSampleData->getData();
00381 
00382                 for (unsigned i = 0; i < numValues; ++i) 
00383                 {
00384                         short const orig = pSrc[i];
00385                         pDest[i] = ((orig&0xFF)<<8) | ((orig&0xFF00)>>8);
00386                 }
00387         }
00388         else
00389         {
00390                 // 8 bit PCM audio: simply copy the media buffer
00391                 if (pSampleData)
00392                 {
00393                         memcpy(pData, pSampleData->getData(), (DWORD) cbData);
00394                 }
00395                 else
00396                 {
00397                         // Error!!!! What to do?
00398                         return S_FALSE;
00399                 }
00400         }
00401 
00402         // Calculate adjusted times: 
00403         // 1)   Subtract the first timestamp to make our time zero-based (the time on the RTSP server is gettimeofday-based) 
00404         //              And the DirectShow time is zero-based. Pick the first timestamp according to the following criteria:
00405         //              If this pin has been offset but is unsynced: use the local offset
00406         //              Once this pin has been syned:                            use the global offset
00407         //
00408         // 2) Add the last stream offset time (Since sync occurs after DS time zero, we need to remember when sync last occurred so that we can offset the time by at least that amount into the future
00409         //    If this is not done, the samples would arrive at the renderer late 
00410 
00411         double dOffset = (m_bHasBeenSynced) ? m_dGlobalStartTime : m_dLocalStartTime;
00412         double dAdjustedTime = pSampleData->StartTime() - dOffset + m_pFilter->m_dStreamTimeOffset;
00413         REFERENCE_TIME rtStart = dAdjustedTime * 1000000 * 10;
00414 
00415         // Set stop timestamp to be slightly in the future
00416         // The commented out NULL time stamp for the stop time seemed to work the same
00417         REFERENCE_TIME rtStop = rtStart + 1;
00418         pSample->SetTime(&rtStart, &rtStop);
00419 
00420         // USE THIS IN ORDER TO GET THE RENDERER TO RENDER SAMPLES IMMEDIATELY AS IT RECEIVES THEM
00421 //#define RTVC_DEBUG_SET_NULL_TIMESTAMPS
00422 #ifdef RTVC_DEBUG_SET_NULL_TIMESTAMPS
00423         pSample->SetTime(NULL, NULL);
00424 #endif
00425 
00426         if (m_pMediaType->subtype == MEDIASUBTYPE_PCM)
00427         {
00428                 // All samples are sync points
00429                 pSample->SetSyncPoint(TRUE);
00430         }
00431 
00432         // Set length of media sample
00433         pSample->SetActualDataLength(cbData);
00434 
00435         // Free memory used by sample
00436         delete pSampleData;
00437 
00438         return S_OK;
00439 }
00440 
00441 HRESULT RtspSourceOutputPin::DoBufferProcessingLoop( void )
00442 {
00443         Command com;
00444 
00445         OnThreadStartPlay();
00446 
00447         m_bOffsetSet = false;
00448         do {
00449                 while (!CheckRequest(&com)) 
00450                 {
00451                         if (m_pFilter->m_rtpPacketManager.hasSamples(m_nID))
00452                         {
00453                                 // Get buffer
00454                                 IMediaSample *pSample = NULL;
00455                                 HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0);
00456                                 if (FAILED(hr)) {
00457                                         Sleep(1);
00458                                         continue;       // go round again. Perhaps the error will go away
00459                                         // or the allocator is decommited & we will be asked to
00460                                         // exit soon.
00461                                 }
00462 
00463                                 // Virtual function user will override.
00464                                 hr = FillBuffer(pSample);
00465 
00466                                 if (hr == S_OK) {
00467                                         hr = Deliver(pSample);
00468                                         pSample->Release();
00469 
00470                                         // downstream filter returns S_FALSE if it wants us to
00471                                         // stop or an error if it's reporting an error.
00472                                         if(hr != S_OK)
00473                                         {
00474                                                 DbgLog((LOG_TRACE, 2, TEXT("Deliver() returned %08x; stopping"), hr));
00475                                                 //return S_OK;
00476                                         }
00477 
00478                                 } else if (hr == S_FALSE) {
00479                                         // derived class wants us to stop pushing data
00480                                         pSample->Release();
00481 
00482                                         // Should I use this to signal when there is no data
00483                                         continue;
00484                                 } else {
00485                                         // derived class encountered an error
00486                                         pSample->Release();
00487                                         DbgLog((LOG_ERROR, 1, TEXT("Error %08lX from FillBuffer!!!"), hr));
00488                                         DeliverEndOfStream();
00489                                         m_pFilter->NotifyEvent(EC_ERRORABORT, hr, 0);
00490                                         return hr;
00491                                 }
00492                         }
00493                         else
00494                         {
00495                                 Sleep(10);
00496                         }
00497                 }
00498 
00499                 // For all commands sent to us there must be a Reply call!
00500                 if (com == CMD_RUN || com == CMD_PAUSE) 
00501                 {
00502                         Reply(NOERROR);
00503                 } else if (com != CMD_STOP) 
00504                 {
00505                         Reply((DWORD) E_UNEXPECTED);
00506                         DbgLog((LOG_ERROR, 1, TEXT("Unexpected command!!!")));
00507                 }
00508         } while (com != CMD_STOP);
00509 
00510         return S_FALSE;
00511 }
00512 
00513 void RtspSourceOutputPin::setOffset( double dOffset)
00514 {
00515         m_dGlobalStartTime = dOffset;
00516         m_bOffsetSet = true;
00517 }

Generated on Fri Mar 13 14:12:38 2009 for RTVC by  doxygen 1.5.3