MJPEGVideoSource.cpp

Go to the documentation of this file.
00001 /*
00002  *  Copyright (C) Massimo Cora' 2006 <maxcvs@email.it>
00003  *
00004  *  This program is free software; you can redistribute it and/or modify
00005  *  it under the terms of the GNU General Public License as published by
00006  *  the Free Software Foundation; either version 2 of the License, or
00007  *  (at your option) any later version.
00008  *
00009  *  This program is distributed in the hope that it will be useful,
00010  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
00011  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00012  *  GNU Library General Public License for more details.
00013  *
00014  *  You should have received a copy of the GNU General Public License
00015  *  along with this program; if not, write to the Free Software
00016  *  Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
00017  */
00018 
00019 
00020 #include "MJPEGVideoSource.hh"
00021 
00022 // livemedia
00023 #include <GroupsockHelper.hh>
00024 
00025 #include <MediaSink.hh>
00026 
00027 // opencv
00028 #include <cv.h>
00029 #include <highgui.h>
00030 
00031 
00032 
00033 //--------------------------------------------------------------------------
00034 // constructor
00035 
00036 MJPEGVideoSource::MJPEGVideoSource( UsageEnvironment& env, BaseInputVideo* input_video, 
00037                                                                    int stream_id,  int frame_rate, int encoder_buf_size ) 
00038                                                 : JPEGVideoSource( env ), _input_video( input_video ), 
00039                                                 _encoder_internal_buf_size( encoder_buf_size ), 
00040                                                 _fps( frame_rate ),
00041                                                 _stream_id( stream_id )
00042 {
00043         _initialized = false;
00044         
00045         // set the max output packet buffer size
00046         OutPacketBuffer::maxSize = OUTPUT_PACKET_BUFFER_MAXSIZE;
00047         _video_encoder = NULL;
00048 
00049         qTable = NULL;
00050         
00051         _frame_rate_millis = 1000/frame_rate;
00052 
00053         // initialize device
00054         if ( _input_video->init_device () == true )
00055                 _initialized = true;            
00056 }
00057 
00058 //--------------------------------------------------------------------------
00059 //
00060 
00061 MJPEGVideoSource* MJPEGVideoSource::createNew( UsageEnvironment &env, BaseInputVideo* input_video, 
00062                                                                                            int stream_id /* = 0 */, int frame_rate /*= 25*/, 
00063                                                                                            int encoder_buf_size /* = 1000000*/ ) {
00064         return new MJPEGVideoSource( env, input_video, stream_id, frame_rate, encoder_buf_size );
00065 }
00066 
00067 //--------------------------------------------------------------------------
00068 //
00069 
00070 void  MJPEGVideoSource::startCapture() 
00071 {
00072         IplImage* current_frame = NULL;
00073         fPreferredFrameSize = 0;
00074 
00075         if ( _input_video->is_multi_stream () ) {
00076                 if ( (current_frame = _input_video->get_next_frame_by_stream_id ( _stream_id )) == NULL ) {
00077                         DEBUG_PRINT ("id #%d :_input_video->get_next_frame_by_stream_id = NULL\n", _stream_id );
00078                 }
00079         }
00080         else if ( (current_frame = _input_video->get_next_frame ()) == NULL ) {
00081                 DEBUG_PRINT ("_input_video->get_next_frame = NULL\n");
00082         }
00083 
00084         // we cannot proceed with encoding or something else. Just exit here
00085         if ( current_frame == NULL ) {
00086                 fFrameSize = fPreferredFrameSize = 0;
00087                 _encoded_frame = NULL;
00088                 return;
00089         }
00090         gettimeofday( &fLastCaptureTime, &Idunno );
00091 
00092         // proceed with encoding
00093         if ( _video_encoder == NULL ) {
00094                 // encoder is NULL, create it.
00095                 _video_encoder = new OStreamVideoEncoder( 1000000, OSTREAM_ENCODING_MJPEG,
00096                                                                 current_frame->width, current_frame->height, _fps );
00097         }
00098 
00099         int size_taken;
00100         _encoded_frame = _video_encoder->encode_video_frame( current_frame, &size_taken );
00101 
00102         if ( size_taken > _encoder_internal_buf_size || _encoded_frame == NULL ) {
00103                 DEBUG_PRINT ("Error: the jpeg frame to stream is too big.\n");
00104                 fPreferredFrameSize = 0;
00105                 _encoded_frame = NULL;
00106                 return;
00107         }
00108                 
00109         int jpeg_buf_size = size_taken;
00110 
00111     if ( jpeg_buf_size <= 0 || (unsigned int)jpeg_buf_size > fMaxSize ) {
00112                 DEBUG_PRINT ("Error: the jpeg frame to stream is too big.\n");
00113                 fPreferredFrameSize = 0;
00114                 _encoded_frame = NULL;
00115                 return;
00116         }
00117 
00118         fPreferredFrameSize = jpeg_buf_size;
00119         cvReleaseImage( &current_frame );
00120 }
00121 
00122 
00123 MJPEGVideoSource::~MJPEGVideoSource() {
00124 
00125         // we don't need the encoder anymore
00126         if ( _video_encoder )
00127                 delete _video_encoder;
00128         _video_encoder = NULL;
00129         
00130         // unuseful but anyway..
00131         _encoded_frame = NULL;  
00132         
00133         if ( qTable != NULL )
00134                 free( qTable );
00135 
00136         _initialized = false;
00137 }
00138 
00139 u_int8_t const* MJPEGVideoSource::quantizationTables(u_int8_t& precision,
00140                                                                                                          u_int16_t& length) 
00141 {
00142         length = 0;
00143         precision = 0;
00144         
00145         if ( qTable == NULL )
00146                 return NULL;
00147         
00148         precision = 8;
00149         length = 64 * 2; //Q-table is 64-bytes.
00150         
00151         return qTable;
00152 }
00153 
00154 Boolean MJPEGVideoSource::isJPEGVideoSource() const 
00155 {
00156   return true;
00157 }
00158 
00159 
00160 
00161 //--------------------------------------------------------------------------
00162 // 
00163 
00164 void MJPEGVideoSource::doGetNextFrame() {
00165         // let the cpu breath.
00166         CV_WAIT_KEY( _frame_rate_millis );
00167 
00168         deliverFrame();
00169 }
00170 
00171 //--------------------------------------------------------------------------
00172 //
00173 
00174 void MJPEGVideoSource::deliverFrame() {
00175         //
00176         // This would be called when new frame data is available from the device.
00177         // This function should deliver the next frame of data from the device,
00178         // using the following parameters (class members):
00179         // 'in' parameters (these should *not* be modified by this function):
00180         //     fTo: The frame data is copied to this address.
00181         //         (Note that the variable "fTo" is *not* modified.  Instead,
00182         //          the frame data is copied to the address pointed to by "fTo".)
00183         //     fMaxSize: This is the maximum number of bytes that can be copied
00184         //         (If the actual frame is larger than this, then it should
00185         //          be truncated, and "fNumTruncatedBytes" set accordingly.)
00186         // 'out' parameters (these are modified by this function):
00187         //     fFrameSize: Should be set to the delivered frame size (<= fMaxSize).
00188         //     fNumTruncatedBytes: Should be set iff the delivered frame would have been
00189         //         bigger than "fMaxSize", in which case it's set to the number of bytes
00190         //         that have been omitted.
00191         //     fPresentationTime: Should be set to the frame's presentation time
00192         //         (seconds, microseconds).
00193         //     fDurationInMicroseconds: Should be set to the frame's duration, if known.
00194 
00195         // Start capturing the next frame:
00196         startCapture();
00197 
00198         // Set the 'presentation time': the time that this frame was captured
00199         fPresentationTime = fLastCaptureTime;
00200 
00201         // Now, read the previously captured frame:
00202         // Start with the JPEG header:
00203         int jpeg_header_size = setParamsFromHeader( _encoded_frame );
00204 
00205         // if the header size has missed go on with the next task
00206         if ( jpeg_header_size <= 0 || fPreferredFrameSize == 0 ) {
00207                 nextTask()  = envir().taskScheduler().scheduleDelayedTask( 0, (TaskFunc*)afterGetting,
00208                                                                                                                                         this);
00209                 return;
00210         }
00211         
00212         // Then, the JPEG payload: fTo *do not have to* include jpeg header. 
00213         // It will automatically filled with the rtp header which is
00214         // smaller
00215 
00216         fFrameSize = fPreferredFrameSize - jpeg_header_size;
00217         memcpy( fTo, _encoded_frame + jpeg_header_size, fFrameSize );
00218 
00219         if ( fFrameSize == fMaxSize ) {
00220                 DEBUG_PRINT( "MJPEGVideoSource::doGetNextFrame(): "
00221                         "read maximum buffer size: %d bytes.  Frame may be truncated\n", fMaxSize);
00222         }
00223 
00224         // After delivering the data, switch to another task, and inform
00225         // the reader that he has data:
00226         nextTask()  = envir().taskScheduler().scheduleDelayedTask( 0, (TaskFunc*)afterGetting,
00227                                                                                                                                 this);
00228 }
00229 
00230 
00231 //--------------------------------------------------------------------------
00232 //
00233 
00234 u_int8_t MJPEGVideoSource::type() {
00235         return 1;               // 1 is for jpeg type.
00236 }
00237 
00238 //--------------------------------------------------------------------------
00239 //
00240 
00241 u_int8_t MJPEGVideoSource::width() {
00242         return fLastWidth;
00243 }
00244 
00245 //--------------------------------------------------------------------------
00246 //
00247 
00248 u_int8_t MJPEGVideoSource::height() {
00249         return fLastHeight;
00250 }
00251 
00252 //--------------------------------------------------------------------------
00253 //
00254 
00255 u_int8_t MJPEGVideoSource::qFactor() {
00256         // ffmpeg needs this to be returned.
00257         return 195;
00258 }
00259 
00260 
00261 //--------------------------------------------------------------------------
00262 // try to find width and height markers and grab their values.
00263 // -1 on error
00264 
00265 int MJPEGVideoSource::setParamsFromHeader( const unsigned char *fJPEGHeader ) {
00266         // Look for the "SOF0" marker (0xFF 0xC0), to get the frame
00267         // width and height:
00268         bool found_sofo = false;
00269         bool found_hdr_size = false;
00270         int jpeg_header_size;
00271 
00272         for (int i = 0; i < JPEG_HEADER_MAX_SIZE/*-8*/; ++i) {
00273                 if ( !found_sofo && fJPEGHeader[i] == 0xFF && fJPEGHeader[i+1] == 0xC0 ) {
00274                         fLastHeight = (fJPEGHeader[i+5]<<5)|(fJPEGHeader[i+6]>>3);
00275                         fLastWidth = (fJPEGHeader[i+7]<<5)|(fJPEGHeader[i+8]>>3);
00276                         found_sofo = true;
00277                 }
00278 
00279                 if (fJPEGHeader[i] == 0xFF && fJPEGHeader[i+1] == 0xDB) {
00280                         if(qTable != NULL)
00281                                 free (qTable);
00282                         
00283                         qTable = (u_int8_t *) malloc(sizeof(u_int8_t) * 128);
00284                         memcpy(qTable, fJPEGHeader + i + 5, 64);
00285                         memcpy(qTable + 64, fJPEGHeader + i + 5, 64);
00286                 }
00287                 
00288                 // we should look for the final byte of the 0xFF 0xDA marker,
00289                 // the start of scan part.
00290                 if ( fJPEGHeader[i] == 0x3F && fJPEGHeader[i+1] == 0x00 ) {
00291                         found_hdr_size = true;
00292                         jpeg_header_size = i+2;
00293                         break;
00294                 }
00295         }
00296 
00297         
00298         if ( !found_sofo )
00299                 DEBUG_PRINT( "MJPEGVideoSource: Failed to find SOF0 marker in header!\n");
00300                 
00301         if ( !found_hdr_size )  
00302                 DEBUG_PRINT( "MJPEGVideoSource: Failed to find header size!\n");
00303                 
00304         if ( found_hdr_size == false )
00305                 return -1;
00306 
00307         return jpeg_header_size;
00308 }
00309 

Generated on Tue Dec 26 10:32:38 2006 for Omnimeeting by  doxygen 1.4.7