source: trunk/src/testing/app/hd-video-player/GrabberProc.cpp @ 4

Revision 4, 24.1 KB checked in by ajaworski, 13 years ago (diff)

Added modified SAGE sources

Line 
1/******************************************************************************
2 * SAGE - Scalable Adaptive Graphics Environment
3 *
4 * Copyright (C) 2004 Electronic Visualization Laboratory,
5 * University of Illinois at Chicago
6 *
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions are met:
11 *
12 *  * Redistributions of source code must retain the above copyright
13 *    notice, this list of conditions and the following disclaimer.
14 *  * Redistributions in binary form must reproduce the above
15 *    copyright notice, this list of conditions and the following disclaimer
16 *    in the documentation and/or other materials provided with the distribution.
17 *  * Neither the name of the University of Illinois at Chicago nor
18 *    the names of its contributors may be used to endorse or promote
19 *    products derived from this software without specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
25 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
26 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
27 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
28 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
29 * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
30 * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
31 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 *
33 * Direct questions, comments etc about SAGE to http://www.evl.uic.edu/cavern/forum/
34 *
35 *****************************************************************************/
36
37#include "StdAfx.h"
38#include <aviriff.h>  // defines 'FCC' macro
39
40#include "math.h"
41#include "YUV_RGB.h"
42
43#include "GrabberProc.h"
44#include "Utils2.h"
45
46//------------------------------------
47// If you want to test with saving BMP files, delete this define.
48#define _SAGE_
49//------------------------------------
50
51//-------------------------------------------------------------------------------------------
52//=================================[ class GrabberCB ]=======================================
53//-------------------------------------------------------------------------------------------
54
55//-------------------------------------------------------------------------------------------
56GrabberCB::GrabberCB() : m_fFirstSample(true)
57{
58}
59
60//-------------------------------------------------------------------------------------------
61GrabberCB::~GrabberCB()
62{
63}
64
65//-------------------------------------------------------------------------------------------
66// Support querying for ISampleGrabberCB interface
67HRESULT GrabberCB::QueryInterface(REFIID iid, void**ppv)
68{
69    if (!ppv) { return E_POINTER; }
70    if (iid == IID_IUnknown)
71    {
72        *ppv = static_cast<IUnknown*>(this);
73    }
74    else if (iid == IID_ISampleGrabberCB)
75    {
76        *ppv = static_cast<ISampleGrabberCB*>(this);
77    }
78    else
79    {
80        return E_NOINTERFACE;
81    }
82    AddRef();  // We don't actually ref count, but in case we change the implementation later.
83    return S_OK;
84}
85
86//-------------------------------------------------------------------------------------------
87// SampleCB: This is where we process each sample
88HRESULT GrabberCB::SampleCB(double SampleTime, IMediaSample *pSample)
89{
90        static int init_flag = 0;
91/*
92        FILE *fp;
93
94        fp = fopen("./log.dat", "w");
95        fprintf(fp, "\r\nin sampleCB");
96        fclose(fp);
97*/
98
99    HRESULT hr;
100
101    // Get the pointer to the buffer
102    BYTE *pBuffer;
103    hr = pSample->GetPointer(&pBuffer);
104
105        // This funtion must be called every time
106        m_GrabProc.SetImageBuffer(pBuffer);
107
108    if (FAILED(hr))
109    {
110        OutputDebugString(TEXT("SampleCB: GetPointer FAILED\n"));
111        return hr;
112    }
113
114    // Scan the image on the first sample. Re-scan is there is a discontinuity.
115    // (This will produce horrible results if there are big scene changes in the
116    // video that are not associated with discontinuities. Might be safer to re-scan
117    // each image, at a higher perf cost.)
118
119#ifdef _SAGE_
120        // RGB -> Send
121        //if(m_GrabProc.GetSageOn() == true) {
122                hr = m_GrabProc.SendToSage();
123        //}
124#else
125    // Convert the image
126        // RGB -> Save
127        hr = m_GrabProc.SendToBmp();
128        // YUV -> RGB -> Send or Save
129//      hr = m_GrabProc.ConvertImage();
130#endif
131
132        if(FAILED(hr)) {
133                exit(1);
134        }
135
136    return hr;
137}
138
139//-------------------------------------------------------------------------------------------
140//=================================[ class AudioGrabberCB ]=======================================
141//-------------------------------------------------------------------------------------------
142
143//-------------------------------------------------------------------------------------------
144AudioGrabberCB::AudioGrabberCB() : m_fFirstSample(true), m_index(0)
145{
146        m_GrabProc = NULL;
147}
148
149//-------------------------------------------------------------------------------------------
150AudioGrabberCB::~AudioGrabberCB()
151{
152}
153
154//-------------------------------------------------------------------------------------------
155// Support querying for ISampleGrabberCB interface
156HRESULT AudioGrabberCB::QueryInterface(REFIID iid, void**ppv)
157{
158    if (!ppv) { return E_POINTER; }
159    if (iid == IID_IUnknown)
160    {
161        *ppv = static_cast<IUnknown*>(this);
162    }
163    else if (iid == IID_ISampleGrabberCB)
164    {
165        *ppv = static_cast<ISampleGrabberCB*>(this);
166    }
167    else
168    {
169        return E_NOINTERFACE;
170    }
171    AddRef();  // We don't actually ref count, but in case we change the implementation later.
172    return S_OK;
173}
174
175//-------------------------------------------------------------------------------------------
176// SampleCB: This is where we process each sample
177HRESULT AudioGrabberCB::SampleCB(double SampleTime, IMediaSample *pSample)
178{
179
180    HRESULT hr;
181
182        // Get the pointer to the buffer
183    BYTE *pBuffer;
184    hr = pSample->GetPointer(&pBuffer);
185
186        /////// for the test
187        int lenBuf = pSample->GetActualDataLength();
188
189        // This funtion must be called every time
190        m_GrabProc->SetAudioBuffer(pBuffer);
191
192    if (FAILED(hr))
193    {
194        OutputDebugString(TEXT("SampleCB: GetPointer FAILED\n"));
195        return hr;
196    }
197
198    // Scan the image on the first sample. Re-scan is there is a discontinuity.
199    // (This will produce horrible results if there are big scene changes in the
200    // video that are not associated with discontinuities. Might be safer to re-scan
201    // each image, at a higher perf cost.)
202
203#ifdef _SAGE_
204        // RGB -> Send
205        //if(m_GrabProc->GetSageOn() == true) {
206                hr = m_GrabProc->SendAudioToSage(lenBuf);
207        //}
208#else
209
210#endif
211
212        if(FAILED(hr)) {
213                exit(1);
214        }
215
216    return hr;
217}
218
219
220//-------------------------------------------------------------------------------------------
221//=================================[ class GrabProc ]=======================================
222//-------------------------------------------------------------------------------------------
223
224// #define _YUVtransF_
225
226#define MAX_SUB_TYPE    4
227GUID media_type_ary[] = {MEDIASUBTYPE_RGB24, MEDIASUBTYPE_UYVY, MEDIASUBTYPE_YUY2, MEDIASUBTYPE_YUYV};
228
229// {A6512C9F-A47B-45ba-A054-0DB0D4BB87F7}
230//static const GUID CLSID_YuvGray =
231//{ 0xa6512c9f, 0xa47b, 0x45ba, { 0xa0, 0x54, 0xd, 0xb0, 0xd4, 0xbb, 0x87, 0xf7 } };
232//{ 0xB179A682, 0x641B, 0x11D2, { 0xA4, 0xD9, 0x00, 0x60, 0x08, 0x0B, 0xA6, 0x34} };
233
234//{B179A682-641B-11D2-A4D9-0060080BA634}
235#ifdef _YUVtransF_
236static const GUID CLSID_YUVxfm =
237{ 0xB179A682, 0x641B, 0x11D2, { 0xA4, 0xD9, 0x00, 0x60, 0x08, 0x0B, 0xA6, 0x34} };
238#endif
239
240//{ 0xa6512c9f, 0xa47b, 0x45ba, { 0xa0, 0x54, 0xd, 0xb0, 0xd4, 0xbb, 0x87, 0xf7 } };
241// {B179A682-641B-11D2-A4D9-0060080BA634}
242//DEFINE_GUID(CLSID_YUVxfm,
243//0xB179A682, 0x641B, 0x11D2, 0xA4, 0xD9, 0x00, 0x60, 0x08, 0x0B, 0xA6, 0x34);
244// 0x2fa4f053, 0x6d60, 0x4cb0, 0x95, 0x3, 0x8e, 0x89, 0x23, 0x4f, 0x3f, 0x73);
245
246//-------------------------------------------------------------------------------------------
247C_GrabProc::C_GrabProc(void) {
248        m_pGrabF = NULL;
249        m_pAudioGrabF = NULL;
250        m_pYUVtransF = NULL;
251
252        m_dwWidth = m_dwHeight = m_lStride = m_iBitDepth;
253        m_pImg = NULL;
254        m_pAudioSamples = NULL;
255        m_pbRGB_buff = NULL;
256        m_sageOn = false;
257}
258
259
260//-------------------------------------------------------------------------------------------
261void C_GrabProc::FreeFilter(void) {
262//      m_pGrabF->Release();
263
264///*
265        if(m_pGrabF) {
266                grabberCallback.Release();
267
268#ifdef _YUVtransF_
269                delete m_pYUVtransF;
270                m_pYUVtransF = NULL;
271#endif
272
273// Don't use delete with filter. Delete is ungraceful
274//              delete m_pGrabF;
275                m_pGrabF = NULL;
276        }
277// */
278        if(m_pbRGB_buff) {
279                free(m_pbRGB_buff);
280                m_pbRGB_buff = NULL;
281        }
282}
283
284HRESULT C_GrabProc::SetFilter(IGraphBuilder* pGraph, IBaseFilter* pUpstream, wchar_t* pUpstreamPinName, int out_color_mode, bool sage) {
285        HRESULT hr;
286
287        EXECUTE_ASSERT(ERROR_SUCCESS ==SaveGraphFile(pGraph, L"Before Add filter.GRF"));
288
289        m_sageOn = sage;
290#ifdef _YUVtransF_
291                hr = CDSUtils::AddFilter(pGraph, CLSID_YUVxfm, L"YUV Transform", &m_pYUVtransF);
292#endif
293
294        if(m_pGrabF == NULL) {
295                hr = CDSUtils::AddFilter(pGraph, CLSID_SampleGrabber, L"Grabber", &m_pGrabF);
296                if(hr < 0) {
297                        //SaveLog.ToLog("\n Can not Add filter : Grabber.");
298                        return hr;
299                }
300                //SaveLog.ToLog("\n Grabber is added ");
301                //SaveLog.ToLog((int)&m_pGrabF);
302        }
303        EXECUTE_ASSERT(ERROR_SUCCESS ==SaveGraphFile(pGraph, L"Before connect filter.GRF"));
304
305    CComQIPtr<ISampleGrabber> pGrabber(m_pGrabF);
306
307//      if(++m_sub_type > MAX_SUB_TYPE) return -1;
308
309        // Configure the sample grabber
310        ZeroMemory(&m_mt, sizeof(AM_MEDIA_TYPE));
311        m_mt.majortype = MEDIATYPE_Video;
312        m_mt.formattype = FORMAT_VideoInfo;
313
314        if(out_color_mode == RGB24) m_mt.subtype = MEDIASUBTYPE_RGB24;
315                else if(out_color_mode == RGB16) m_mt.subtype = MEDIASUBTYPE_RGB565;
316                        else if(out_color_mode == YUV)m_mt.subtype = MEDIASUBTYPE_UYVY;
317                                else {
318                                        //SaveLog.ToLog("\n Color mode is not correct");
319                                        return -1;
320                                }
321
322
323        // Note: I don't expect the next few methods to fail ....
324        hr = pGrabber->SetMediaType(&m_mt);  // Set the media type we want for the connection.
325        hr = pGrabber->SetOneShot(FALSE);  // Disable "one-shot" mode
326        hr = pGrabber->SetBufferSamples(FALSE); // Disable sample buffering
327        hr = pGrabber->SetCallback(&grabberCallback, 0); // Set our callback. '0' means 'use the SampleCB callback'
328
329#ifdef _YUVtransF_
330
331        hr = CDSUtils::ConnectFilters(pGraph, pUpstream, pUpstreamPinName, m_pYUVtransF, NULL);
332        //if(hr < 0) SaveLog.ToLog("\n Can not ConnectFilter : YUV Transform.");
333        //SaveGraphFile(pGraph, L"ConnectYUVtransF.GRF");
334        hr = CDSUtils::ConnectFilters(pGraph, m_pYUVtransF, L"XForm Out", m_pGrabF, NULL);
335//      hr = CDSUtils::ConnectFilters(pGraph, m_pYUVtransF, pUpstreamPinName, m_pGrabF, NULL);
336        //if(hr < 0) SaveLog.ToLog("\n Can not ConnectFilter : Grabber.");
337        //SaveGraphFile(pGraph, L"ConnectGrabber.GRF");
338#else
339        hr = CDSUtils::ConnectFilters(pGraph, pUpstream, pUpstreamPinName, m_pGrabF, NULL);
340        if(hr < 0) {
341                //SaveLog.ToLog("\n Can not ConnectFilter : Grabber.");
342                return hr;
343        }
344        EXECUTE_ASSERT(ERROR_SUCCESS ==SaveGraphFile(pGraph, L"ConnectGrabber.GRF"));
345#endif
346        // If input is YUV, add YUVtransform filter.
347        //SaveLog.ToLog("\n In SetFilter m_sub_type");
348        //SaveLog.ToLog(m_sub_type);
349/*
350        if(m_sub_type == 0) {
351
352        } else {
353                hr = CDSUtils::ConnectFilters(pGraph, pUpstream, NULL, m_pGrabF, NULL);
354                if(FAILED(hr)) continue;
355        }
356*/
357        // Find out the exact video format.
358        hr = pGrabber->GetConnectedMediaType(&m_mt);
359        if (FAILED(hr))
360        {
361                //SaveLog.ToLog("\nCould not get the video format.");
362                return -1;
363        }
364
365        VIDEOINFOHEADER *pVih;
366        pVih = reinterpret_cast<VIDEOINFOHEADER*>(m_mt.pbFormat);
367
368        if(SetVideoFormat(*pVih, out_color_mode) < 0, sage) return -1;
369        CoTaskMemFree(m_mt.pbFormat);
370
371        EXECUTE_ASSERT(ERROR_SUCCESS ==SaveGraphFile(pGraph, L"InSetFilter.GRF"));
372
373//      if(pGrabber) delete pGrabber;
374//      SAFE_RELEASE(pGrabber);
375
376        return hr;
377}
378HRESULT C_GrabProc::SetAudioFilter(IGraphBuilder* pGraph, IBaseFilter* pUpstream, wchar_t* pUpstreamPinName)
379{
380        HRESULT hr;
381        AudioGrabCallback.m_GrabProc = this;
382
383        EXECUTE_ASSERT(ERROR_SUCCESS == SaveGraphFile(pGraph, L"Before Add filter.GRF"));
384
385        hr = CDSUtils::AddFilter(pGraph, CLSID_SampleGrabber, L"Audio Grabber", &m_pAudioGrabF);
386        if(hr < 0) {
387                //SaveLog.ToLog("\n Can not Add filter : Audio Grabber.");
388                return hr;
389        }
390        //SaveLog.ToLog("\n Grabber is added ");
391        //SaveLog.ToLog((int)&m_pAudioGrabF);
392        EXECUTE_ASSERT(ERROR_SUCCESS == SaveGraphFile(pGraph, L"Before connect filter.GRF"));
393
394    CComQIPtr<ISampleGrabber> pGrabber(m_pAudioGrabF);
395
396        // mediatype definition
397        /*typedef struct _MediaType {
398                GUID     majortype;
399                GUID     subtype;
400                BOOL     bFixedSizeSamples;
401                BOOL     bTemporalCompression;
402                ULONG    lSampleSize;
403                GUID     formattype;
404                IUnknown *pUnk;
405                ULONG    cbFormat;
406                BYTE     *pbFormat;
407        } AM_MEDIA_TYPE;*/
408
409        // Configure the sample grabber
410        ZeroMemory(&m_audiomt, sizeof(AM_MEDIA_TYPE));
411        m_audiomt.majortype = MEDIATYPE_Audio;
412        m_audiomt.subtype = MEDIASUBTYPE_PCM;
413        m_audiomt.lSampleSize = 1024 * 2 * 2; //(16 but-> 2byte) // Size of the sample in bytes. For compressed data, the value can be zero
414        m_audiomt.bFixedSizeSamples = TRUE;             // 1024 1536
415        m_audiomt.bTemporalCompression = FALSE;
416        m_audiomt.formattype = FORMAT_WaveFormatEx;
417
418        // Allocate the format block to hold the WAVEFORMATEX structure.
419        // waveformat definition
420        /*typedef struct
421        {
422          WORD  wFormatTag;
423          WORD  nChannels;
424          DWORD  nSamplesPerSec;
425          DWORD  nAvgBytesPerSec;
426          WORD  nBlockAlign;
427          WORD  wBitsPerSample;
428          WORD  cbSize;
429        } WAVEFORMATEX; *PWAVEFORMATEX;|*/
430
431        WAVEFORMATEX *pwav = new WAVEFORMATEX;
432        m_audiomt.pbFormat = (BYTE *)pwav;
433        //WAVEFORMATEX *pwav = (WAVEFORMATEX*)m_audiomt.pbFormat;
434
435        //WAVE_FORMAT_PCM       PCM (pulse-code modulated) data in integer format.
436        //WAVE_FORMAT_IEEE_FLOAT        PCM data in IEEE floating-point format.
437        //WAVE_FORMAT_DRM       DRM-encoded format (for digital-audio content protected by Microsoft Digital Rights Management).
438        //WAVE_FORMAT_EXTENSIBLE        Extended WAVEFORMATEX structure (see WAVEFORMATEXTENSIBLE).
439        //WAVE_FORMAT_ALAW      A-law-encoded format.
440        //WAVE_FORMAT_MULAW     Mu-law-encoded format.
441        //WAVE_FORMAT_ADPCM     ADPCM (adaptive differential pulse-code modulated) data.
442        //WAVE_FORMAT_MPEG      MPEG-1 data format (stream conforms to ISO 11172-3 Audio specification).
443        //WAVE_FORMAT_DOLBY_AC3_SPDIF   AC3 (aka Dolby Digital) over S/PDIF.
444        pwav->wFormatTag = WAVE_FORMAT_PCM;
445
446        pwav->nChannels = 2;
447        pwav->nSamplesPerSec = 48000;
448
449        // Specifies the number of bits per sample for the format type specified by wFormatTag.
450        // If wFormatTag = WAVE_FORMAT_PCM, then wBitsPerSample should be set to either 8 or 16.
451        pwav->wBitsPerSample = 16;
452
453        // Specifies the block alignment in bytes. The block alignment is the size of the minimum
454        // atomic unit of data for the wFormatTag format type. If wFormatTag = WAVE_FORMAT_PCM,
455        // set nBlockAlign to (nChannels*wBitsPerSample)/8, which is the size of a single audio frame.
456        pwav->nBlockAlign = (pwav->nChannels * pwav->wBitsPerSample)/8;
457
458        pwav->nAvgBytesPerSec = pwav->nSamplesPerSec * pwav->nBlockAlign;
459
460        // Specifies the size, in bytes, of extra format information appended to the end of the WAVEFORMATEX structure.
461        // This information can be used by non-PCM formats to store extra attributes for the wFormatTag.
462        pwav->cbSize = 0;
463
464        // Note: I don't expect the next few methods to fail ....
465        hr = pGrabber->SetMediaType(&m_audiomt);  // Set the media type we want for the connection.
466        hr = pGrabber->SetOneShot(FALSE);  // Disable "one-shot" mode
467        hr = pGrabber->SetBufferSamples(FALSE); // Disable sample buffering
468        hr = pGrabber->SetCallback(&AudioGrabCallback,0); // Set our callback. '0' means 'use the SampleCB callback'
469
470        hr = CDSUtils::ConnectFilters(pGraph, pUpstream, pUpstreamPinName, m_pAudioGrabF, NULL);
471        if(hr < 0) {
472                //SaveLog.ToLog("\n Can not ConnectFilter : Audio Grabber.");
473                return hr;
474        }
475        EXECUTE_ASSERT(ERROR_SUCCESS ==SaveGraphFile(pGraph, L"ConnectGrabber.GRF"));
476
477        // Find out the exact video format.
478        hr = pGrabber->GetConnectedMediaType(&m_audiomt);
479        if (FAILED(hr))
480        {
481                //SaveLog.ToLog("\nCould not get the audio format.");
482                return -1;
483        }
484
485#ifdef _SAGE_
486        //if(m_sageOn == true) {
487                if(Out_sail.init(pwav->nAvgBytesPerSec) < 0)
488                        return -1;
489
490        //}     //Out_sail.setSailEnv(NULL, 1);
491#endif
492
493        //CoTaskMemFree(m_mt.pbFormat);
494
495        EXECUTE_ASSERT(ERROR_SUCCESS ==SaveGraphFile(pGraph, L"InSetFilter.GRF"));
496
497        return hr;
498}
499
500void C_GrabProc::Shutdown()
501{
502#ifdef _SAGE_
503        //if(m_sageOn == true) {
504                Out_sail.shutdown();
505        //}
506        //Out_sail.setSailEnv(NULL, 1);
507#endif
508}
509
510//-------------------------------------------------------------------------------------------
511IBaseFilter* C_GrabProc::GrabF(void) {
512        return m_pGrabF;
513}
514
515void C_GrabProc::setIP(CString ip)
516{
517        m_ip = ip;
518}
519
520// SetFormat: Sets the image format (height, width, etc).
521
522// Use UYVY only!
523/*
524typedef struct tagVIDEOINFOHEADER {
525
526    RECT            rcSource;          // The bit we really want to use
527    RECT            rcTarget;          // Where the video should go
528    DWORD           dwBitRate;         // Approximate bit data rate
529    DWORD           dwBitErrorRate;    // Bit error rate for this stream
530    REFERENCE_TIME  AvgTimePerFrame;   // Average time per frame (100ns units)
531
532    BITMAPINFOHEADER bmiHeader;
533
534} VIDEOINFOHEADER;
535*/
536//-------------------------------------------------------------------------------------------
537HRESULT C_GrabProc::SetVideoFormat(const VIDEOINFOHEADER& vih, int color_mode)
538{
539
540// /*
541    // Check if UYVY
542        char tmp[125];
543        //SaveLog.ToLog("\n In SetFormat ");
544//      SaveLog.ToLog((char*)&(vih.bmiHeader.biCompression), sizeof(DWORD));
545//      SaveLog.ToLog16((char*)&(vih.bmiHeader.biCompression), sizeof(DWORD));
546
547        if (vih.bmiHeader.biCompression == BI_RGB)
548    {
549                //SaveLog.ToLog("\r\n It is RGB24");
550    }
551
552        if (vih.bmiHeader.biCompression == FCC('UYVY'))
553    {
554                //SaveLog.ToLog("\r\n It's UYVY");
555    }
556
557        if (vih.bmiHeader.biCompression == FCC('YUYV'))
558    {
559                //SaveLog.ToLog("\r\n It's YUYV");
560    }
561
562        if (vih.bmiHeader.biCompression == FCC('2YUV'))
563    {
564                //SaveLog.ToLog("\r\n It's 2YUV");
565    }
566
567        if (vih.bmiHeader.biCompression == FCC('YUV2'))
568    {
569                //SaveLog.ToLog("\r\n It's YUV2");
570    }
571/*
572        if (vih.bmiHeader.biCompression != FCC('UYVY'))
573    {
574                //SaveLog.ToLog("\r\n In SetFormat : It's not UYVY");
575        return E_INVALIDARG;
576    }
577// */
578    int BytesPerPixel = vih.bmiHeader.biBitCount / 8;
579
580    // If the target rectangle (rcTarget) is empty, the image width and the stride are both biWidth.
581    // Otherwise, image width is given by rcTarget and the stride is given by biWidth.
582
583    if (IsRectEmpty(&vih.rcTarget))
584    {
585        m_dwWidth = vih.bmiHeader.biWidth;
586        m_lStride = m_dwWidth;
587    }
588    else
589    {
590        m_dwWidth = vih.rcTarget.right;
591        m_lStride = vih.bmiHeader.biWidth;
592    }
593
594    m_lStride = (m_lStride * BytesPerPixel + 3) & ~3; // stride for UYVY is rounded to the nearest DWORD
595
596    m_dwHeight = abs(vih.bmiHeader.biHeight);  // biHeight can be < 0, but YUV is always top-down
597    m_iBitDepth = vih.bmiHeader.biBitCount;
598        m_out_color_mode = color_mode;
599
600        if(m_pbRGB_buff) {
601                free(m_pbRGB_buff);
602                m_pbRGB_buff = NULL;
603        }
604        m_bufLen = m_lStride * m_dwHeight;
605
606#ifdef _SAGE_
607        //if(m_sageOn == true) {
608                //int sail_init(int width, int height, int color_mode,char* output_addr, int output_port);
609                //if(Out_sail.init(m_dwWidth, m_dwHeight, m_out_color_mode, "192.168.81.120", 0) < 0)
610                char sip[25];
611                sprintf(sip, "%s", m_ip.GetBuffer());
612
613                if(Out_sail.init(m_dwWidth, m_dwHeight, m_out_color_mode, sip, 22000) < 0)
614                        return -1;
615                //void setSailEnv(char* execConfigName, int nwID);
616                Out_sail.setSailEnv(NULL, 1);
617        //}
618#endif
619// /*
620        //SaveLog.ToLog("\nLog test");
621
622        //SaveLog.ToLog("\n width : ");
623        //SaveLog.ToLog(m_dwWidth);
624        //SaveLog.ToLog("height : ");
625        //SaveLog.ToLog(m_dwHeight);
626// */
627    return S_OK;
628}
629
630//-------------------------------------------------------------------------------------------
631HRESULT C_GrabProc::SetImageBuffer(BYTE *pBuffer)
632{
633    m_pImg = pBuffer;
634    return S_OK;
635}
636
637//-------------------------------------------------------------------------------------------
638HRESULT C_GrabProc::SetAudioBuffer(BYTE *pBuffer)
639{
640        /*m_startPoint
641        m_avgBytesPerSec
642
643        int m_startPoint;
644        int m_avgBytesPerSec;
645        int m_bitsPerSample;
646        int m_channels;
647*/
648        // seek first of pointer
649        /*if(m_bitsPerSample == 16) {
650                short* data = (short*) pBuffer;
651                int index = m_startPoint/2;
652                m_pAudioSamples = (BYTE *) &data[index];
653                // 2 bytes
654        }
655        else if(m_bitsPerSample == 8){
656                char* data = (char*) pBuffer;
657                m_pAudioSamples = (BYTE *) &data[m_startPoint];
658                // 1 bytes
659        }
660
661        m_startPoint += m_fixedSampleSize;
662        if(m_startPoint >= m_avgBytesPerSec) {
663                m_startPoint =0;
664        }*/
665
666        m_pAudioSamples = pBuffer;
667    return S_OK;
668}
669
670
671//-------------------------------------------------------------------------------------------
672// RGB -> Bmp file
673int C_GrabProc::SendToBmp(void) {
674        //SaveLog.ToLog("\n SendToBmp ");
675        char fileName[125];
676        static int index = 0;
677
678        sprintf(fileName, "outBmp_%0d.bmp", ++index);
679        SaveToBmp(fileName, m_pImg, m_dwWidth, m_dwHeight, 24);
680
681        return 1;
682}
683
684//-------------------------------------------------------------------------------------------
685// RGB -> sail
686int C_GrabProc::SendToSage(void) {
687        //SaveLog.ToLog("\n SendToSage ");
688        //void push_data(unsigned char* buf , int buf_len);
689        if(!m_pImg) return -1;
690        return Out_sail.push_data(m_pImg , m_bufLen);
691}
692
693//-------------------------------------------------------------------------------------------
694// AUDIOSAMPLES -> sail
695int C_GrabProc::SendAudioToSage(int buflen) {
696        //SaveLog.ToLog("\n SendToSage ");
697        if(!m_pAudioSamples) return -1;
698        return Out_sail.push_audiodata(m_pAudioSamples, buflen);
699}
700
701//-------------------------------------------------------------------------------------------
702// /*
703// YUV -> Bmp file or sail.
704HRESULT C_GrabProc::ConvertImage()
705{
706        //SaveLog.ToLog("\n ConvertImage ");
707        if (!m_pImg)
708    {
709        return E_UNEXPECTED;
710    }
711
712        if(!m_pbRGB_buff) {
713                m_pbRGB_buff = (unsigned char*)malloc(m_dwWidth * m_dwHeight * 3);
714        }
715
716        DWORD iRow, iPixel;  // looping variables
717    BYTE *pRow = m_pImg; // pointer to the first row in the buffer (don't care about image orientation)
718        unsigned char *in_buff, *out_buff;
719
720        UYVY_ARRAY uyvy;
721
722        in_buff = m_pImg;
723        out_buff = m_pbRGB_buff;
724
725    for (iRow = 0; iRow < m_dwHeight; iRow++)
726    {
727        for (iPixel = 0; iPixel < m_dwWidth; iPixel += 2)
728        {
729                        memcpy(&uyvy, in_buff, 4);
730
731                        // for saving in a file : BGR
732                        // for sending to sail : RGB
733                        *(out_buff) = YUV2Red(uyvy.y0, uyvy.u, uyvy.v);
734                        *(++out_buff) = YUV2Green(uyvy.y0, uyvy.u, uyvy.v);
735                        *(++out_buff) = YUV2Blue(uyvy.y0, uyvy.u, uyvy.v);
736                        out_buff++;
737
738                        *(out_buff) = YUV2Red(uyvy.y1, uyvy.u, uyvy.v);
739                        *(++out_buff) = YUV2Green(uyvy.y1, uyvy.u, uyvy.v);
740                        *(++out_buff) = YUV2Blue(uyvy.y1, uyvy.u, uyvy.v);
741                        out_buff++;
742
743                        in_buff += 4;
744
745//                      //SaveLog.ToLog("\n iPixel : "); SaveLog.ToLog(iPixel);
746                }
747
748//      SaveLog.ToLog("\n----iRow : "); SaveLog.ToLog(iRow);
749    }
750
751#ifdef _SAGE_
752        //void push_data(unsigned char* buf , int buf_len);
753        Out_sail.push_data(m_pbRGB_buff , 0);
754#else
755        char fileName[125];
756        static int index = 0;
757        sprintf(fileName, "outBmp_%03d.bmp", ++index);
758        SaveToBmp(fileName, m_pbRGB_buff, m_dwWidth, m_dwHeight, 24);
759#endif
760
761    return S_OK;
762}
763
764void C_GrabProc::SetAudioOn(bool on)
765{
766#ifdef _SAGE_
767        //void push_data(unsigned char* buf , int buf_len);
768        Out_sail.setAudioOn(on);
769#endif
770}
771
772bool C_GrabProc::GetSageOn()
773{
774        return m_sageOn;
775}
776
777void C_GrabProc::SetAudioCaptureMode(bool on)
778{
779        if(on == true)
780        {
781                Out_sail.initAudioCaptureMode();
782        }
783}
784// */\\
Note: See TracBrowser for help on using the repository browser.