采用大恒MER系列工业摄像机,不能用opencv自带函数打开摄像头,需要用厂商提供的API
定义控制台应用程序的入口点。

#include "stdafx.h"
#include "core/core.hpp"  
#include <opencv2/imgproc/imgproc.hpp&gt;  
#include <opencv2/highgui/highgui.hpp>  
#include <set>
#include<stdio.h>

#include <opencv2/core.hpp>
#include <opencv2/opencv.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/videoio.hpp>//for camera
#include <opencv2/video.hpp>
#include <opencv2/imgPRoc//imgproc.hpp>
#include <opencv2/ml/ml.hpp>

#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/ml/ml.hpp"
#include <time.h>
#include <ctime>
#include <iostream>
#include <string>
using namespace std;
using namespace cv;

//一般的情况是不支持使用命名空间的

int main(int argc, char ** argv)
{
    char filename[1024];
    if (argc == 1)
        sprintf(filename, "%s", "camera1.avi");
    if (argc == 2)
        sprintf(filename, "%s", "123");


    VideoCapture capture;
    capture.open(0);
    if (!capture.isOpened())
    {
        cout << "Could not initialize capturing...\n" << endl;
        return -1;
    }

    //按时间格式命名
    time_t now = time(nullprt);获取1970.1.1至当前秒数time_t
    struct tm * timeinfo = localtime(&now); //创建TimeDate,并转化为当地时间,
                                            //struct tm * timeinfo = gmtime ( &currTime );   //创建TimeDate,并转化为GM时间,
    char path[60];
    strftime(path, 60, "%Y_%m_%d_%H_%M_%S", timeinfo);
    char strPath[100];
    sprintf(strPath, "%s.avi", path);//将创建文件的命令存入cmdchar中

                                     //保存为avi格式视频
    Mat frame;
    VideoWriter writer;
    writer.open(strPath, CV_FOURCC('X', 'V', 'I', 'D'), 25, Size(640, 480), true);//Size(640, 480)//Size(frame.rows, frame.cols)//"cam.avi"

    int n = 1;
    while (true)
    {
        capture >> frame;
        char* cstr = new char[120];

    //  sprintf(cstr, "%s%d%s", "/home/caros/Documents/../", n++, ".jpg");

        sprintf(cstr, "%s%d%s", "/home/baidu/Doc/shipin_capture", n++, ".jpg");

        imwrite(cstr, frame);

        imshow("Video_Capture", frame);
        if (frame.empty())
        {
            break;
        }
        writer << frame;
        waitKey(3);

    }

    //return 0;
}
#include <opencv2/core/core.hpp>    
#include <opencv2/highgui/highgui.hpp>    

using namespace cv;    

void main()    
{    
    VideoCapture capture(0);    
   VideoWriter writer("VideoTest.avi", CV_FOURCC('M', 'J', 'P', 'G'), 25.0, Size(640, 480));    


    while (capture.isOpened())    
    {    
  Mat frame;    

if ((frame.rows==0)||(frame.cols==0))  
{  
printf("frame capture failed\n");  
system("pause");  
exit(0);  
}  
        capture >> frame;    
        writer << frame;    
        imshow("video", frame);    
        if (cvWaitKey(20) == 27)    
        {    
            break;    
        }    
    }    
} 
void  MainWindow::OnFrameCallbackFun1(GX_FRAME_CALLBACK_PARAM* frame)
{
//    qDebug() << "OnFrameCallbackFun1";
    if (frame->status != 0)
    {
        return;
    }

MainWindow *pf1 = (MainWindow*)(frame->pUserParam);
pf1->m_frame_first = frame;

//QImage image_circle;
uchar *pSrc = nullptr;
int		id = 0;                                //ID号
int		image_width     = 0;           //图像的宽
int                image_height    = 0;          //图像的高
int64_t	        bayer_layout  = 0;              //Bayer格式
void             *result_image = nullptr;      //显示图像指针
QImage       *show_image = nullptr;
QString       device_name = "";              //相机
QString       display_fps = "";                 //显示帧率

image_width    = (int)(pf1->m_struct_camera[id].image_width);
image_height   = (int)(pf1->m_struct_camera[id].image_height);
bayer_layout = pf1->m_struct_camera[id].bayer_layout;
result_image = pf1->m_struct_camera[id].result_image;
show_image = pf1->m_struct_camera[id].show_image;

/******对原始图像进行取校准,去除畸变:求畸变参数*****/
pf1->frame_size[id] = cv::Size(2048, 1536);
std::string params_file = "/home/caros/baidu/adu/valetparking/modules/conf/camera_params.yml";
load_camera_params(params_file, pf1->params[id]);
pf1->mapx[id] = cv::Mat(pf1->frame_size[id], CV_32FC1);
pf1->mapy[id] = cv::Mat(pf1->frame_size[id], CV_32FC1);
cv::Mat R = cv::Mat::eye(3, 3, CV_32F);
initUndistortRectifyMap(pf1->params[id].cameraMatrix, pf1->params[id].distCoeffs, R, pf1->params[id].cameraMatrix,
                        pf1->frame_size[id], CV_32FC1, pf1->mapx[id], pf1->mapy[id]);

//若支持彩色,转换为RGB图像后输出
if (pf1->m_struct_camera[id].color_filter_flag)
{
    //将Raw8图像转换为RGB图像以供显示
    //DxRaw8toRGB24((char*)frame->pImgBuf, result_image, image_width, image_height, RAW2RGB_NEIGHBOUR,
    //    DX_PIXEL_COLOR_FILTER(bayer_layout), false);
    DxRaw8toRGB24((char*)frame->pImgBuf, pf1->m_circle_image[id].data, image_width, image_height,
                  RAW2RGB_NEIGHBOUR, DX_PIXEL_COLOR_FILTER(bayer_layout), false);
    cv::remap(pf1->m_circle_image[id], pf1->m_remap_image[id], pf1->mapx[id], pf1->mapy[id], CV_INTER_LINEAR);
    //         input_images          ,   output_images      ,  the_one_map   ,the_second_map,

    if (calibrate_flag == true)
    {
        for (int i = 0; i < 13; i++)
        {
            cv::circle(pf1->m_remap_image[id],
                       cv::Point(pf1->m_remap_image[id].cols / 2, pf1->m_remap_image[id].rows * i / 12),
                       10, cv::Scalar(255, 255, 0), 3);
        }
        cv::circle(pf1->m_remap_image[id],
                   cv::Point(pf1->m_remap_image[id].cols / 2, pf1->m_remap_image[id].rows / 2),
                   10, cv::Scalar(255, 0, 0), 3);
    }

    if (record_flag[id] == true)
    {
        qDebug() << "camera1 frameindex: " << frame_index[id]++;
        cv::imwrite(writer1, pf1->m_remap_image[id]);
        record_flag[id] = false;
    }
    pSrc = (uchar*)(pf1->m_remap_image[id].data);
    pf1->m_qimage[id] = QImage(pSrc, pf1->m_remap_image[id].cols, pf1->m_remap_image[id].rows,
                             pf1->m_remap_image[id].step, QImage::Format_RGB888);
    pf1->image[id] = &(pf1->m_qimage[id]);
}
else
{
    DxRaw8toRGB24((char*)frame->pImgBuf, result_image, image_width, image_height,RAW2RGB_NEIGHBOUR,
        DX_PIXEL_COLOR_FILTER(NONE),false);
}

device_name.sprintf("相机: %s", pf1->m_baseinfo[id].szDisplayName);
display_fps.sprintf("序列号: %s 显示帧率: %.2f FPS", pf1->m_baseinfo[id].szSN, pf1->m_struct_camera[id].fps);

{
    pf1->m_child_window[id]->ShowImage(pf1->image[id], device_name, display_fps, pf1->m_view_flag);
}

if (pf1->m_display_flag[id] == true)
{
    pf1->m_child_window[id]->setWindowTitle(device_name);
    pf1->m_child_window[id]->update();
    pf1->m_display_flag[id] = false;
}
pf1->UpdateUI();

}


#include "stdafx.h"
#include "camera.h"
#include <iostream>
#include <opencv2/opencv.hpp>
using namespace cv;
 
 
GX_DEV_HANDLE hDevice = nullptr; 
 static int keycode;
 
MERCamera::MERCamera()
{
	GX_STATUS status = GX_STATUS_SUCCESS;
	int64_t nValue  = nullptr;
	
	GXInitLib();
	status = GXOpenDeviceByIndex(1, &hDevice);
	if(status != GX_STATUS_SUCCESS)
	{
			std::cout<<"open error"<<std::endl;
			return;
	}
}
 
MERCamera::~MERCamera()
{
	GX_STATUS emStatus = GX_STATUS_SUCCESS;
	if(m_bIsSnaping)
	{
		EndContinuesMode();
	}
 
	emStatus = GXCloseDevice(hDevice);
	if(emStatus != GX_STATUS_SUCCESS)
	{ 
		std::cout<<"close error"<<std::endl;
	} 
	
	//关闭设备库
	emStatus = GXCloseLib();
	if(emStatus != GX_STATUS_SUCCESS)
	{ 
		std::cout<<"close lib error"<<std::endl;
	} 
}
 
void MERCamera::InitCamera()
{
	GX_STATUS status = GX_STATUS_SUCCESS;
	int64_t nValue  = nullptr;
 
	m_bIsSnaping = false;
	status = GXGetEnum(hDevice, GX_ENUM_PIXEL_FORMAT, &m_nPixelFomat);
	//获取宽度(需在停止采集状态下设置)
	status = GXGetInt(hDevice, GX_INT_WIDTH, &nValue);
	m_nImageWidth = (int)nValue;
 
	//获取高度(需在停止采集状态下设置)
	status = GXGetInt(hDevice, GX_INT_HEIGHT, &nValue);
	m_nImageHeight = (int)nValue;
 
	//获取图象数据大小
	status = GXGetInt(hDevice, GX_INT_PAYLOAD_SIZE, &nValue);
	m_nPayLoadSize = (int)nValue;	
	//设置采集模式。一般相机的默认采集模式为连续模式。
	int64_t nAcqMode = GX_ACQ_MODE_CONTINUOUS;
    	status = GXSetEnum(hDevice, GX_ENUM_ACQUISITION_MODE, nAcqMode);
 
	 
	do 
	{
		m_pBufferRaw8 = new BYTE[m_nImageWidth * m_nImageHeight];
		if (m_pBufferRaw8 == nullptr)
		{
			status = GX_STATUS_ERROR;
			break;
		}
		
		//为存储RGB图像数据开辟空间
		m_pBufferRGB = new BYTE[m_nImageWidth * m_nImageHeight * 3];
		if (m_pBufferRGB == nullptr)
		{
			status = GX_STATUS_ERROR;
			break;
		}
		
		//为存储原始图像数据开辟空间
		m_pBufferRaw = new BYTE[m_nPayLoadSize];
		if (m_pBufferRaw == nullptr)
		{
			status = GX_STATUS_ERROR;
			break;
		}
		
	} while (0);
	if (status != GX_STATUS_SUCCESS)
	{
		if (m_pBufferRaw8 != nullptr)
		{
			delete[]m_pBufferRaw8;
			m_pBufferRaw8 = nullptr;
		}
		if (m_pBufferRaw != nullptr)
		{
			delete[]m_pBufferRaw;
			m_pBufferRaw = nullptr;
		}
		if (m_pBufferRGB != nullptr)
		{
			delete[]m_pBufferRGB;
			m_pBufferRGB = nullptr;
		}
	}
 
}
 
void MERCamera::StartContinuesMode()
{
	GX_STATUS status = GX_STATUS_SUCCESS;
	InitCamera();
	//注册图像处理回调函数
	 status = GXRegisterCaptureCallback(hDevice, NULL,OnFrameCallbackFun); 
	if(!m_bIsSnaping)
	{	
		//发送开采命令
		status = GXSendCommand(hDevice, GX_COMMAND_ACQUISITION_START);   
		m_bIsSnaping = true;
	}
	while(1)
	{
		if(keycode == 'q') EndContinuesMode();
	}
}
 
void MERCamera::EndContinuesMode()
{  
	GX_STATUS status = GX_STATUS_SUCCESS;
	if(m_bIsSnaping)
	{
		//发送停采命令
		status = GXSendCommand(hDevice, GX_COMMAND_ACQUISITION_STOP);
		//注销采集回调
		status = GXUnregisterCaptureCallback(hDevice);
		m_bIsSnaping = false;
	}
}
//fuction:ProcessImage(BYTE* pImageBuf)
//功能:raw格式图像转换为RGB图
void MERCamera::ProcessImage(BYTE *pImageBuf)
{
	//m_objDrawImg.Enter();
	memcpy(m_pBufferRaw, pImageBuf, m_nPayLoadSize);
		
	switch(m_nPixelFomat)
	{
	//当数据格式为12位时,位数转换为4-11
	case GX_PIXEL_FORMAT_MONO12:
		//将12位格式的图像转换为8位格式
		DxRaw16toRaw8(m_pBufferRaw, m_pBufferRaw8, m_nImageWidth, m_nImageHeight, DX_BIT_4_11);
 
		//将转换完成后的8位图转换为RGB图,以供显示
		DxRaw8toRGB24(m_pBufferRaw8, m_pBufferRGB, m_nImageWidth, m_nImageHeight, RAW2RGB_NEIGHBOUR, (DX_PIXEL_COLOR_FILTER)NONE, TRUE);
		break;
		
	//当数据格式为12位时,位数转换为2-9
	case GX_PIXEL_FORMAT_MONO10:
		//将12位格式的图像转换为8位格式
		DxRaw16toRaw8(m_pBufferRaw, m_pBufferRaw8, m_nImageWidth, m_nImageHeight, DX_BIT_2_9);
 
		//将转换完成后的8位图转换为RGB图,以供显示
		DxRaw8toRGB24(m_pBufferRaw8, m_pBufferRGB, m_nImageWidth, m_nImageHeight, RAW2RGB_NEIGHBOUR, (DX_PIXEL_COLOR_FILTER)NONE, TRUE);
		break;
 
	//当数据格式为8位时,将8位图转换为RGB图,以供显示
	case GX_PIXEL_FORMAT_MONO8:
		DxRaw8toRGB24(m_pBufferRaw, m_pBufferRGB, m_nImageWidth, m_nImageHeight, RAW2RGB_NEIGHBOUR, (DX_PIXEL_COLOR_FILTER)NONE, TRUE);	
		break;
		
	default:
		//m_objDrawImg.Leave();
		return;
	}
 
	m_objDrawImg.Leave();
}
 
//图像回调处理函数
void __stdcall MERCamera::OnFrameCallbackFun(GX_FRAME_CALLBACK_PARAM* pFrame)
{
	MERCamera *pDlg = (MERCamera*)(pFrame->pUserParam);
   if (pFrame->status == nullptr)
   {
        //图像获取成功
		//对图像进行处理...
	   // cout<<"successful"<<endl;
	   pDlg->DrawImage((BYTE*)pFrame->pImgBuf, pFrame->nImgSize);
   }   
   return;    
}
 
 void MERCamera::DrawImage(BYTE *pImageBuf, int nImageSize)
{ 
 
	cv::namedWindow("window",0);
	cv::Mat img(Size(2592,1944),CV_8U,pImageBuf );
	flip(img,img,-1);
	cv::imshow("window",img);
	cv::waitKey(30);
	 
}

参考:
https://blog.csdn.net/pockyym/article/details/13016839

Logo

CSDN联合极客时间,共同打造面向开发者的精品内容学习社区,助力成长!

更多推荐