• blackmagic Capture.cpp


    #include <stdio.h>
    #include <stdlib.h>
    #include <string.h>
    #include <pthread.h>
    #include <unistd.h>
    #include <fcntl.h>
    
    #include "DeckLinkAPI.h"
    #include "Capture.h"
    
    pthread_mutex_t					sleepMutex;
    pthread_cond_t					sleepCond;
    int								videoOutputFile = -1;
    int								audioOutputFile = -1;
    
    IDeckLink 						*deckLink;
    IDeckLinkInput					*deckLinkInput;
    IDeckLinkDisplayModeIterator	*displayModeIterator;
    
    static BMDTimecodeFormat		g_timecodeFormat = 0;
    static int						g_videoModeIndex = -1;
    static int						g_audioChannels = 2;
    static int						g_audioSampleDepth = 16;
    const char *					g_videoOutputFile = NULL;
    const char *					g_audioOutputFile = NULL;
    static int						g_maxFrames = -1;
    
    static unsigned long 			frameCount = 0;
    
    DeckLinkCaptureDelegate::DeckLinkCaptureDelegate() : m_refCount(0)
    {
    	pthread_mutex_init(&m_mutex, NULL);
    }
    
    DeckLinkCaptureDelegate::~DeckLinkCaptureDelegate()
    {
    	pthread_mutex_destroy(&m_mutex);
    }
    
    ULONG DeckLinkCaptureDelegate::AddRef(void)
    {
    	pthread_mutex_lock(&m_mutex);
    		m_refCount++;
    	pthread_mutex_unlock(&m_mutex);
    
    	return (ULONG)m_refCount;
    }
    
    ULONG DeckLinkCaptureDelegate::Release(void)
    {
    	pthread_mutex_lock(&m_mutex);
    		m_refCount--;
    	pthread_mutex_unlock(&m_mutex);
    
    	if (m_refCount == 0)
    	{
    		delete this;
    		return 0;
    	}
    
    	return (ULONG)m_refCount;
    }
    
    HRESULT DeckLinkCaptureDelegate::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioFrame)
    {
    	IDeckLinkVideoFrame*	                rightEyeFrame = NULL;
    	IDeckLinkVideoFrame3DExtensions*        threeDExtensions = NULL;
    	void*					frameBytes;
    	void*					audioFrameBytes;
    	
    	// Handle Video Frame
    	if(videoFrame)
    	{	
    		// If 3D mode is enabled we retreive the 3D extensions interface which gives.
    		// us access to the right eye frame by calling GetFrameForRightEye() .
    		if ( (videoFrame->QueryInterface(IID_IDeckLinkVideoFrame3DExtensions, (void **) &threeDExtensions) != S_OK) ||
    			(threeDExtensions->GetFrameForRightEye(&rightEyeFrame) != S_OK))
    		{
    			rightEyeFrame = NULL;
    		}
    		
    		if (threeDExtensions)
    			threeDExtensions->Release();
    
    		if (videoFrame->GetFlags() & bmdFrameHasNoInputSource)
    		{
    			fprintf(stderr, "Frame received (#%lu) - No input signal detected
    ", frameCount);
    		}
    		else
    		{
    			const char *timecodeString = NULL;
    			if (g_timecodeFormat != 0)
    			{
    				IDeckLinkTimecode *timecode;
    				if (videoFrame->GetTimecode(g_timecodeFormat, &timecode) == S_OK)
    				{
    					timecode->GetString(&timecodeString);
    				}
    			}
    
    			fprintf(stderr, "Frame received (#%lu) [%s] - %s - Size: %li bytes
    ", 
    				frameCount,
    				timecodeString != NULL ? timecodeString : "No timecode",
    				rightEyeFrame != NULL ? "Valid Frame (3D left/right)" : "Valid Frame", 
    				videoFrame->GetRowBytes() * videoFrame->GetHeight());
    
    			if (timecodeString)
    				free((void*)timecodeString);
    			
    			if (videoOutputFile != -1)
    			{
    				videoFrame->GetBytes(&frameBytes);
    				write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
    				
    				if (rightEyeFrame)
    				{
    					rightEyeFrame->GetBytes(&frameBytes);
    					write(videoOutputFile, frameBytes, videoFrame->GetRowBytes() * videoFrame->GetHeight());
    				}
    			}
    		}
    		
    		if (rightEyeFrame)
    			rightEyeFrame->Release();
    
    		frameCount++;
    
    		if (g_maxFrames > 0 && frameCount >= g_maxFrames)
    		{
    			pthread_cond_signal(&sleepCond);
    		}
    	}
    
    	// Handle Audio Frame
    	if (audioFrame)
    	{
    		if (audioOutputFile != -1)
    		{
    			audioFrame->GetBytes(&audioFrameBytes);
    			write(audioOutputFile, audioFrameBytes, audioFrame->GetSampleFrameCount() * g_audioChannels * (g_audioSampleDepth / 8));
    		}
    	}
        return S_OK;
    }
    
    HRESULT DeckLinkCaptureDelegate::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode *mode, BMDDetectedVideoInputFormatFlags)
    {
        return S_OK;
    }
    
    int usage(int status)
    {
    	HRESULT result;
    	IDeckLinkDisplayMode *displayMode;
    	int displayModeCount = 0;
    
    	fprintf(stderr, 
    		"Usage: Capture -m <mode id> [OPTIONS]
    "
    		"
    "
    		"    -m <mode id>:
    "
    	);
    
        while (displayModeIterator->Next(&displayMode) == S_OK)
        {
            char *          displayModeString = NULL;
    
            result = displayMode->GetName((const char **) &displayModeString);
            if (result == S_OK)
            {
    			BMDTimeValue frameRateDuration, frameRateScale;
                displayMode->GetFrameRate(&frameRateDuration, &frameRateScale);
    
    			fprintf(stderr, "        %2d:  %-20s 	 %li x %li 	 %g FPS
    ", 
    				displayModeCount, displayModeString, displayMode->GetWidth(), displayMode->GetHeight(), (double)frameRateScale / (double)frameRateDuration);
    			
                free(displayModeString);
    			displayModeCount++;
            }
    
            // Release the IDeckLinkDisplayMode object to prevent a leak
            displayMode->Release();
        }
    
    	fprintf(stderr, 
    		"    -p <pixelformat>
    " 
    		"         0:  8 bit YUV (4:2:2) (default)
    "
    		"         1:  10 bit YUV (4:2:2)
    "
    		"         2:  10 bit RGB (4:4:4)
    "
    		"    -t <format>          Print timecode
    "
    		"     rp188:  RP 188
    "
    		"      vitc:  VITC
    "
    		"    serial:  Serial Timecode
    "
    		"    -f <filename>        Filename raw video will be written to
    "
    		"    -a <filename>        Filename raw audio will be written to
    "
    		"    -c <channels>        Audio Channels (2, 8 or 16 - default is 2)
    "
    		"    -s <depth>           Audio Sample Depth (16 or 32 - default is 16)
    "
    		"    -n <frames>          Number of frames to capture (default is unlimited)
    "
    		"    -3                   Capture Stereoscopic 3D (Requires 3D Hardware support)
    "
    		"
    "
    		"Capture video and/or audio to a file. Raw video and/or audio can be viewed with mplayer eg:
    "
    		"
    "
    		"    Capture -m2 -n 50 -f video.raw -a audio.raw
    "
    		"    mplayer video.raw -demuxer rawvideo -rawvideo pal:uyvy -audiofile audio.raw -audio-demuxer 20 -rawaudio rate=48000
    "
    	);
    
    	exit(status);
    }
    
    int main(int argc, char *argv[])
    {
    	IDeckLinkIterator			*deckLinkIterator = CreateDeckLinkIteratorInstance();
    	DeckLinkCaptureDelegate 	*delegate;
    	IDeckLinkDisplayMode		*displayMode;
    	BMDVideoInputFlags			inputFlags = 0;
    	BMDDisplayMode				selectedDisplayMode = bmdModeNTSC;
    	BMDPixelFormat				pixelFormat = bmdFormat8BitYUV;
    	int							displayModeCount = 0;
    	int							exitStatus = 1;
    	int							ch;
    	bool 						foundDisplayMode = false;
    	HRESULT						result;
    	
    	pthread_mutex_init(&sleepMutex, NULL);
    	pthread_cond_init(&sleepCond, NULL);
    	
    	if (!deckLinkIterator)
    	{
    		fprintf(stderr, "This application requires the DeckLink drivers installed.
    ");
    		goto bail;
    	}
    	
    	/* Connect to the first DeckLink instance */
    	result = deckLinkIterator->Next(&deckLink);
    	if (result != S_OK)
    	{
    		fprintf(stderr, "No DeckLink PCI cards found.
    ");
    		goto bail;
    	}
        
    	if (deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput) != S_OK)
    		goto bail;
    
    	delegate = new DeckLinkCaptureDelegate();
    	deckLinkInput->SetCallback(delegate);
       
    	// Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
    	result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
    	if (result != S_OK)
    	{
    		fprintf(stderr, "Could not obtain the video output display mode iterator - result = %08x
    ", result);
    		goto bail;
    	}
    	
    	// Parse command line options
    	while ((ch = getopt(argc, argv, "?h3c:s:f:a:m:n:p:t:")) != -1) 
    	{
    		switch (ch) 
    		{
    			case 'm':
    				g_videoModeIndex = atoi(optarg);
    				break;
    			case 'c':
    				g_audioChannels = atoi(optarg);
    				if (g_audioChannels != 2 &&
    				    g_audioChannels != 8 &&
    					g_audioChannels != 16)
    				{
    					fprintf(stderr, "Invalid argument: Audio Channels must be either 2, 8 or 16
    ");
    					goto bail;
    				}
    				break;
    			case 's':
    				g_audioSampleDepth = atoi(optarg);
    				if (g_audioSampleDepth != 16 && g_audioSampleDepth != 32)
    				{
    					fprintf(stderr, "Invalid argument: Audio Sample Depth must be either 16 bits or 32 bits
    ");
    					goto bail;
    				}
    				break;
    			case 'f':
    				g_videoOutputFile = optarg;
    				break;
    			case 'a':
    				g_audioOutputFile = optarg;
    				break;
    			case 'n':
    				g_maxFrames = atoi(optarg);
    				break;
    			case '3':
    				inputFlags |= bmdVideoInputDualStream3D;
    				break;
    			case 'p':
    				switch(atoi(optarg))
    				{
    					case 0: pixelFormat = bmdFormat8BitYUV; break;
    					case 1: pixelFormat = bmdFormat10BitYUV; break;
    					case 2: pixelFormat = bmdFormat10BitRGB; break;
    					default:
    						fprintf(stderr, "Invalid argument: Pixel format %d is not valid", atoi(optarg));
    						goto bail;
    				}
    				break;
    			case 't':
    				if (!strcmp(optarg, "rp188"))
    					g_timecodeFormat = bmdTimecodeRP188Any;
        			else if (!strcmp(optarg, "vitc"))
    					g_timecodeFormat = bmdTimecodeVITC;
        			else if (!strcmp(optarg, "serial"))
    					g_timecodeFormat = bmdTimecodeSerial;
    				else
    				{
    					fprintf(stderr, "Invalid argument: Timecode format "%s" is invalid
    ", optarg);
    					goto bail;
    				}
    				break;
    			case '?':
    			case 'h':
    				usage(0);
    		}
    	}
    
    	if (g_videoModeIndex < 0)
    	{
    		fprintf(stderr, "No video mode specified
    ");
    		usage(0);
    	}
    
    	if (g_videoOutputFile != NULL)
    	{
    		videoOutputFile = open(g_videoOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
    		if (videoOutputFile < 0)
    		{
    			fprintf(stderr, "Could not open video output file "%s"
    ", g_videoOutputFile);
    			goto bail;
    		}
    	}
    	if (g_audioOutputFile != NULL)
    	{
    		audioOutputFile = open(g_audioOutputFile, O_WRONLY|O_CREAT|O_TRUNC, 0664);
    		if (audioOutputFile < 0)
    		{
    			fprintf(stderr, "Could not open audio output file "%s"
    ", g_audioOutputFile);
    			goto bail;
    		}
    	}
    	
    	while (displayModeIterator->Next(&displayMode) == S_OK)
    	{
    		if (g_videoModeIndex == displayModeCount)
    		{
    			BMDDisplayModeSupport result;
    			const char *displayModeName;
    			
    			foundDisplayMode = true;
    			displayMode->GetName(&displayModeName);
    			selectedDisplayMode = displayMode->GetDisplayMode();
    			
    			deckLinkInput->DoesSupportVideoMode(selectedDisplayMode, pixelFormat, bmdVideoInputFlagDefault, &result, NULL);
    
    			if (result == bmdDisplayModeNotSupported)
    			{
    				fprintf(stderr, "The display mode %s is not supported with the selected pixel format
    ", displayModeName);
    				goto bail;
    			}
    
    			if (inputFlags & bmdVideoInputDualStream3D)
    			{
    				if (!(displayMode->GetFlags() & bmdDisplayModeSupports3D))
    				{
    					fprintf(stderr, "The display mode %s is not supported with 3D
    ", displayModeName);
    					goto bail;
    				}
    			}
    			
    			break;
    		}
    		displayModeCount++;
    		displayMode->Release();
    	}
    
    	if (!foundDisplayMode)
    	{
    		fprintf(stderr, "Invalid mode %d specified
    ", g_videoModeIndex);
    		goto bail;
    	}
    
        result = deckLinkInput->EnableVideoInput(selectedDisplayMode, pixelFormat, inputFlags);
        if(result != S_OK)
        {
    		fprintf(stderr, "Failed to enable video input. Is another application using the card?
    ");
            goto bail;
        }
    
        result = deckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, g_audioSampleDepth, g_audioChannels);
        if(result != S_OK)
        {
            goto bail;
        }
    
    	result = deckLinkInput->StartStreams();
        if(result != S_OK)
        {
            goto bail;
        }
    
    	// All Okay.
    	exitStatus = 0;
    	
    	// Block main thread until signal occurs
    	pthread_mutex_lock(&sleepMutex);
    	pthread_cond_wait(&sleepCond, &sleepMutex);
    	pthread_mutex_unlock(&sleepMutex);
    	fprintf(stderr, "Stopping Capture
    ");
    
    bail:
       	
    	if (videoOutputFile)
    		close(videoOutputFile);
    	if (audioOutputFile)
    		close(audioOutputFile);
    	
    	if (displayModeIterator != NULL)
    	{
    		displayModeIterator->Release();
    		displayModeIterator = NULL;
    	}
    
        if (deckLinkInput != NULL)
        {
            deckLinkInput->Release();
            deckLinkInput = NULL;
        }
    
        if (deckLink != NULL)
        {
            deckLink->Release();
            deckLink = NULL;
        }
    
    	if (deckLinkIterator != NULL)
    		deckLinkIterator->Release();
    
        return exitStatus;
    }
    

    代码说明:

    1. linux下,调用之前要判断设备和驱动是否正确

    IDeckLinkIterator * deckLinkIterator = CreatedeckLinkIteratorInstance();

    ...

    一直到 delegate = new DeckLinkCaptureDelegate();

    2.. IDeckLinkIterator 类用来列举可用的设备

    3. Next();可理解为找到卡,前面的是驱动

    4. 

    每天早上叫醒你的不是闹钟,而是心中的梦~
  • 相关阅读:
    千个常用DOS命令全面收藏
    面向对象设计的11原则
    SQL语句判断指定的数据库、表、字段、存储过程是否存在
    ASP.NET MVC2 Areas区域新概念
    标准的 SQL 解析顺序
    Improvements to workspaces in TFS 2010
    jquery ajax return值不能取得的解决方案
    用 SQL 语句创建数据库用户(SQL Server 2005)
    简单实现.net MVC自定义错误处理页面
    自定义截图类(C#)
  • 原文地址:https://www.cnblogs.com/vintion/p/4116945.html
Copyright © 2020-2023  润新知