当尝试从立体声校准数据中删除emgu cv CvInvoke.cvRemap时挂起。
我正在尝试使用emgu cv实施立体声相机校准应用程序。
我的问题是当我尝试使用CvInvoke.cvRemap取消扭曲函数刚刚挂起的图像时。没有错误或崩溃,它只是挂起,我把它放了两个小时,以防它变慢。这是我正在做的事情:
捕获10对Chessboard样本(左和右),确保在每个上都能使用FindChessboardCorners。我没有做任何特别的事情来同步摄像头,只是同时捕捉它们。
根据所使用的棋盘生成一组对象点。
使用2的对象点和1的图像点在每个样本的左右图像上分别做一个CalibrateCamera。
在3中使用CalibrateCamera生成的IntrinsicCameraParameters进行StereoCalibrate,在2中将对象指向棋盘,在1中从棋盘上捕获图像点。
使用3/4的IntrinsicCameraParameters执行StereoRectify。
使用5的输出从cvInitUndistortRectifyMap左右生成mapx和mapy。
尝试使用mapx和6中的mapy和从相机捕获的新鲜图像进行cvRemap。
NEXT:使用StereoBM.FindStereoCorrespondence和PointCollection.ReprojectImageTo3D从希望校准的立体数据中生成点云。
所以当我到达7 cvRemap只是挂起。我已经使用cvRemap可以从单个摄像机捕获图像,所以我知道该功能在某种程度上可以通过我的设置工作。
我写了一个班管理多个摄像机:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.VideoSurveillance;
namespace Capture2Cams
{
class camData
{
public Capture capture;
public Image<Bgr, Byte> lastFrame;
public Image<Gray, Byte> lastFrameGray;
public bool lastChessboardFound;
public PointF[] lastChessboardCorners;
public Image<Gray, Byte>[] samplesGray;
public PointF[][] samplesChessboardCorners;
public Size cbDimensions;
public Size imageDimensions;
public int cursampleIndex = 0;
public ImageList sampleIcons;
private Image<Gray, Byte> _chessBoardDisplay;
private int _iconWidth = 160;
private int _icnonHeight = 90;
private int _numSamples = 0;
public int numSamples()
{
return _numSamples;
}
public void numSamples(int val)
{
_numSamples = val;
this.samplesGray = new Image<Gray, Byte>[val];
this.samplesChessboardCorners = new PointF[val][];
this.sampleIcons.ImageSize = new Size(_iconWidth, _icnonHeight);
Bitmap tmp = new Bitmap(_iconWidth, _icnonHeight);
this.sampleIcons.Images.Clear();
for (int c = 0; c < _numSamples; c++) this.sampleIcons.Images.Add(tmp);
}
public camData(int camIndex, int capWidth, int capHeight, int pcbWidth, int pcbHeight, int pNumSamples)
{
this.sampleIcons = new ImageList();
try
{
this.capture = new Capture(camIndex);
this.capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, capWidth);
this.capture.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, capHeight);
}
catch (Exception e)
{
MessageBox.Show(e.Message);
return;
}
this.imageDimensions = new Size(capWidth, capHeight);
this.cbDimensions = new Size(pcbWidth, pcbHeight);
this.numSamples(pNumSamples);
}
public Image<Gray, Byte> captureFrame()
{
this.lastFrame = this.capture.QueryFrame();
this.lastFrameGray = this.lastFrame.Convert<Gray, Byte>();
return this.lastFrameGray;
}
public int captureSample()
{
this.detectChessboard(true); // detectChessboard calls -> captureFrame
if (lastChessboardFound)
{
this.samplesGray[cursampleIndex] = this.lastFrameGray;
this.samplesChessboardCorners[cursampleIndex] = this.lastChessboardCorners;
this.sampleIcons.Images[this.cursampleIndex] = this.lastFrameGray.ToBitmap(_iconWidth, _icnonHeight);
this.cursampleIndex++;
if (this.cursampleIndex >= _numSamples) this.cursampleIndex = 0;
}
return cursampleIndex;
}
public void clearSamples()
{
this.cursampleIndex = 0;
this.numSamples(_numSamples);
}
public Image<Gray, Byte> detectChessboard(bool pDoCapture)
{
if (pDoCapture) this.captureFrame();
this.lastChessboardFound = CameraCalibration.FindChessboardCorners(this.lastFrameGray, this.cbDimensions, CALIB_CB_TYPE.ADAPTIVE_THRESH | CALIB_CB_TYPE.FILTER_QUADS, out this.lastChessboardCorners);
_chessBoardDisplay = this.lastFrameGray.Clone();
CameraCalibration.DrawChessboardCorners(this._chessBoardDisplay, this.cbDimensions, this.lastChessboardCorners, this.lastChessboardFound);
return this._chessBoardDisplay;
}
public void saveSampleImages(string pPath, string pID)
{
for(int ic = 0; ic < this._numSamples; ic++)
{
this.samplesGray[ic].Save(pPath + pID + ic.ToString() + \".bmp\");
}
}
public void loadSampleImages(string pPath, string pID)
{
clearSamples();
for (int ic = 0; ic < this._numSamples; ic++)
{
this.lastFrameGray = new Image<Gray, byte>(new Bitmap(pPath + pID + ic.ToString() + \".bmp\"));
this.detectChessboard(false);
this.samplesChessboardCorners[ic] = this.lastChessboardCorners;
this.sampleIcons.Images[ic] = this.lastFrameGray.ToBitmap(_iconWidth, _icnonHeight);
this.samplesGray[ic] = this.lastFrameGray;
}
}
}
}
这是我的带有其余校准逻辑的表单代码:
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using System.Runtime.InteropServices;
using Emgu.CV.Util;
using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.CV.VideoSurveillance;
namespace Capture2Cams
{
public partial class CaptureForm : Form
{
private static camData camLeft;
private static camData camRight;
private int _numSamples = 10; // Number of calibration samples
private int _imageWidth = 1280; // web cam resolution
private int _imageHeight = 720; // web cam resolution
private int _cbWidth = 9; // chessboard corner count
private int _cbHeight = 5; // chessboard corner count
// TODO: Test post calibration values, these will need to be loaded and saved
private static Matrix<double> _foundamentalMatrix;
private static Matrix<double> _essentialMatrix;
private static IntrinsicCameraParameters _inPramsLeft;
private static IntrinsicCameraParameters _inPramsRight;
private static ExtrinsicCameraParameters _outExtParamsStereo;
private Matrix<float> _mapxLeft;
private Matrix<float> _mapyLeft;
private Matrix<float> _mapxRight;
private Matrix<float> _mapyRight;
public CaptureForm()
{
InitializeComponent();
Run();
}
void Run()
{
camLeft = new camData(0, _imageWidth, _imageHeight, _cbWidth, _cbHeight, _numSamples);
camRight = new camData(1, _imageWidth, _imageHeight, _cbWidth, _cbHeight, _numSamples);
this.listViewLeft.LargeImageList = camLeft.sampleIcons;
for (int c = 0; c < _numSamples; c++)
{
ListViewItem curItem = new ListViewItem();
curItem.ImageIndex = c;
curItem.Text = \"Sample\" + c.ToString();
this.listViewLeft.Items.Add(curItem);
}
this.listViewRight.LargeImageList = camRight.sampleIcons;
for (int c = 0; c < _numSamples; c++)
{
ListViewItem curItem = new ListViewItem();
curItem.ImageIndex = c;
curItem.Text = \"Sample\" + c.ToString();
this.listViewRight.Items.Add(curItem);
}
Application.Idle += ProcessFrame;
}
void ProcessFrame(object sender, EventArgs e)
{
if (!checkBoxRectify.Checked)
{
if (this.checkBoxCapCB.Checked)
{
imageBoxLeft.Image = camLeft.detectChessboard(true);
imageBoxRight.Image = camRight.detectChessboard(true);
}
else
{
imageBoxLeft.Image = camLeft.captureFrame();
imageBoxRight.Image = camRight.captureFrame();
}
}
else
{
camLeft.captureFrame();
camRight.captureFrame();
Image<Gray, byte> imgLeft = camLeft.lastFrameGray.Clone();
Image<Gray, byte> imgRight = camRight.lastFrameGray.Clone();
CvInvoke.cvRemap(camLeft.lastFrameGray.Ptr, imgLeft.Ptr, _mapxLeft.Ptr, _mapyLeft.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
CvInvoke.cvRemap(camRight.lastFrameGray.Ptr, imgRight.Ptr, _mapxRight.Ptr, _mapyRight.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
imageBoxLeft.Image = imgLeft;
imageBoxRight.Image = imgRight;
}
//checkBoxRectify
}
private void buttonCaptureSample_Click(object sender, EventArgs e)
{
camLeft.captureSample();
camRight.captureSample();
this.listViewLeft.Refresh();
this.listViewRight.Refresh();
}
private void buttonStereoCalibrate_Click(object sender, EventArgs e)
{
// We should have most of the data needed from the sampling with the camData objects
int numCorners = _cbWidth * _cbHeight;
// Calc intrisitcs / camera
_inPramsLeft = new IntrinsicCameraParameters();
_inPramsRight = new IntrinsicCameraParameters();
ExtrinsicCameraParameters[] outExtParamsLeft;
ExtrinsicCameraParameters[] outExtParamsRight;
//Matrix<double> foundamentalMatrix;
//Matrix<double> essentialMatrix;
outExtParamsLeft = new ExtrinsicCameraParameters[_numSamples];
outExtParamsRight = new ExtrinsicCameraParameters[_numSamples];
_outExtParamsStereo = new ExtrinsicCameraParameters();
// Building object points
// These are the points on the cessboard in local 3d coordinates
// Requires one set per sample, if the same calibration object (chessboard) is used for each sample then just use the same set of points for each sample
// Also setting sub pixel analasys on samples
MCvPoint3D32f[][] objectPoints = new MCvPoint3D32f[_numSamples][];
for (int sc = 0; sc < _numSamples; sc++) // Samples count
{
// indivual cam setup
outExtParamsLeft[sc] = new ExtrinsicCameraParameters();
outExtParamsRight[sc] = new ExtrinsicCameraParameters();
// Sub pixel analasys
camLeft.samplesGray[sc].FindCornerSubPix(new PointF[][] { camLeft.samplesChessboardCorners[sc] }, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(300, 0.01));
camRight.samplesGray[sc].FindCornerSubPix(new PointF[][] { camRight.samplesChessboardCorners[sc] }, new Size(10, 10), new Size(-1, -1), new MCvTermCriteria(300, 0.01));
// Object points
objectPoints[sc] = new MCvPoint3D32f[numCorners];
for (int cc = 0; cc < numCorners; cc++) // chessboard corners count
{
objectPoints[sc][cc].x = cc / _cbWidth;
objectPoints[sc][cc].y = cc % _cbWidth;
objectPoints[sc][cc].z = 0.0f;
}
}
Size imageSize = new Size(_imageWidth, _imageHeight);
// Indivual cam camibration
CameraCalibration.CalibrateCamera(objectPoints, camLeft.samplesChessboardCorners, imageSize, _inPramsLeft, CALIB_TYPE.DEFAULT, out outExtParamsLeft);
CameraCalibration.CalibrateCamera(objectPoints, camRight.samplesChessboardCorners, imageSize, _inPramsRight, CALIB_TYPE.DEFAULT, out outExtParamsRight);
// Stereo Cam calibration
CameraCalibration.StereoCalibrate(
objectPoints,
camLeft.samplesChessboardCorners,
camRight.samplesChessboardCorners,
_inPramsLeft,
_inPramsRight,
imageSize,
CALIB_TYPE.CV_CALIB_FIX_ASPECT_RATIO | CALIB_TYPE.CV_CALIB_ZERO_TANGENT_DIST | CALIB_TYPE.CV_CALIB_FIX_FOCAL_LENGTH,
new MCvTermCriteria(100, 0.001),
out _outExtParamsStereo,
out _foundamentalMatrix,
out _essentialMatrix
);
PrintIntrinsic(_inPramsLeft);
PrintIntrinsic(_inPramsRight);
}
private void listViewLeft_ItemSelectionChanged(object sender, ListViewItemSelectionChangedEventArgs e)
{
}
private void listViewRight_ItemSelectionChanged(object sender, ListViewItemSelectionChangedEventArgs e)
{
}
private void buttonSaveSamples_Click(object sender, EventArgs e)
{
camLeft.saveSampleImages(textBoxSavePath.Text, \"left\");
camRight.saveSampleImages(textBoxSavePath.Text, \"right\");
}
private void buttonLoadSamples_Click(object sender, EventArgs e)
{
camLeft.loadSampleImages(textBoxSavePath.Text, \"left\");
camRight.loadSampleImages(textBoxSavePath.Text, \"right\");
this.listViewLeft.Refresh();
this.listViewRight.Refresh();
}
private void buttonCapture_Click(object sender, EventArgs e)
{
}
private void buttonCaptureCurframe_Click(object sender, EventArgs e)
{
camLeft.captureFrame();
camRight.captureFrame();
camLeft.lastFrame.Save(textBoxSavePath.Text + \"frameLeft\" + \".bmp\");
camLeft.lastFrameGray.Save(textBoxSavePath.Text + \"frameLeftGray\" + \".bmp\");
camRight.lastFrame.Save(textBoxSavePath.Text + \"frameRight\" + \".bmp\");
camRight.lastFrameGray.Save(textBoxSavePath.Text + \"frameRightGray\" + \".bmp\");
}
public void StereoRectify(
IntrinsicCameraParameters intrinsicParam1,
IntrinsicCameraParameters intrinsicParam2,
Size imageSize,
ExtrinsicCameraParameters extrinsicParams,
out Matrix<double> R1,
out Matrix<double> R2,
out Matrix<double> P1,
out Matrix<double> P2,
out Matrix<double> Q,
STEREO_RECTIFY_TYPE flags,
double alpha,
Size newImageSize,
ref Rectangle validPixROI1,
ref Rectangle validPixROI2
)
{
R1 = new Matrix<double>(3, 3);
R2 = new Matrix<double>(3, 3);
P1 = new Matrix<double>(3, 4);
P2 = new Matrix<double>(3, 4);
Q = new Matrix<double>(4, 4);
CvInvoke.cvStereoRectify(
_inPramsLeft.IntrinsicMatrix.Ptr,
_inPramsRight.IntrinsicMatrix.Ptr,
_inPramsLeft.DistortionCoeffs.Ptr,
_inPramsRight.DistortionCoeffs.Ptr,
imageSize,
extrinsicParams.RotationVector.Ptr,
extrinsicParams.TranslationVector.Ptr,
R1.Ptr,
R2.Ptr,
P1.Ptr,
P2.Ptr,
Q.Ptr,
STEREO_RECTIFY_TYPE.DEFAULT,
alpha,
newImageSize,
ref validPixROI1,
ref validPixROI1);
}
public void InitUndistortRectifyMap(
IntrinsicCameraParameters intrinsicParam,
Matrix<double> R,
Matrix<double> newCameraMatrix,
out Matrix<float> mapx,
out Matrix<float> mapy
)
{
mapx = new Matrix<float>(new Size(_imageWidth, _imageHeight));
mapy = new Matrix<float>(new Size(_imageWidth, _imageHeight));
CvInvoke.cvInitUndistortRectifyMap(intrinsicParam.IntrinsicMatrix.Ptr, intrinsicParam.DistortionCoeffs.Ptr, R.Ptr, newCameraMatrix.Ptr, mapx.Ptr, mapy.Ptr);
}
private void buttonTestCalc_Click(object sender, EventArgs e)
{
// Stereo Rectify images
Matrix<double> R1;
Matrix<double> R2;
Matrix<double> P1;
Matrix<double> P2;
Matrix<double> Q;
Rectangle validPixROI1, validPixROI2;
validPixROI1 = new Rectangle();
validPixROI2 = new Rectangle();
StereoRectify(_inPramsLeft, _inPramsRight, new Size(_imageWidth, _imageHeight), _outExtParamsStereo, out R1, out R2, out P1, out P2, out Q, 0, 0, new Size(_imageWidth, _imageHeight), ref validPixROI1, ref validPixROI2);
//InitUndistortRectifyMap(_inPramsLeft, R1, P1, out _mapxLeft, out _mapyLeft);
//InitUndistortRectifyMap(_inPramsRight, R2, P2, out _mapxRight, out _mapyRight);
_inPramsLeft.InitUndistortMap(_imageWidth, _imageHeight, out _mapxLeft, out _mapyLeft);
_inPramsRight.InitUndistortMap(_imageWidth, _imageHeight, out _mapxRight, out _mapyRight);
Image<Gray, byte> imgLeft = camLeft.lastFrameGray.Clone();
Image<Gray, byte> imgRight = camRight.lastFrameGray.Clone();
// **** THIS IS WHERE IM UP TO, no errors, it just hangs ****
CvInvoke.cvRemap(camLeft.lastFrameGray.Ptr, imgLeft.Ptr, _mapxLeft.Ptr, _mapyLeft.Ptr, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
// StereoBM stereoSolver = new StereoBM(Emgu.CV.CvEnum.STEREO_BM_TYPE.BASIC, 0);
//stereoSolver.FindStereoCorrespondence(
}
public void PrintIntrinsic(IntrinsicCameraParameters CamIntrinsic)
{
// Prints the Intrinsic camera parameters to the command line
Console.WriteLine(\"Intrinsic Matrix:\");
string outStr = \"\";
int i = 0;
int j = 0;
for (i = 0; i < CamIntrinsic.IntrinsicMatrix.Height; i++)
{
for (j = 0; j < CamIntrinsic.IntrinsicMatrix.Width; j++)
{
outStr = outStr + CamIntrinsic.IntrinsicMatrix.Data[i, j].ToString();
outStr = outStr + \" \";
}
Console.WriteLine(outStr);
outStr = \"\";
}
Console.WriteLine(\"Distortion Coefficients: \");
outStr = \"\";
for (j = 0; j < CamIntrinsic.DistortionCoeffs.Height; j++)
{
outStr = outStr + CamIntrinsic.DistortionCoeffs.Data[j, 0].ToString();
outStr = outStr + \" \";
}
Console.WriteLine(outStr);
}
public void PrintExtrinsic(ExtrinsicCameraParameters CamExtrinsic)
{
// Prints the Extrinsic camera parameters to the command line
Console.WriteLine(\"Extrinsic Matrix:\");
string outStr = \"\";
int i = 0;
int j = 0;
for (i = 0; i < CamExtrinsic.ExtrinsicMatrix.Height; i++)
{
for (j = 0; j < CamExtrinsic.ExtrinsicMatrix.Width; j++)
{
outStr = outStr + CamExtrinsic.ExtrinsicMatrix.Data[i, j].ToString();
outStr = outStr + \" \";
}
Console.WriteLine(outStr);
outStr = \"\";
}
Console.WriteLine(\"Rotation Vector: \");
outStr = \"\";
for (i = 0; i < CamExtrinsic.RotationVector.Height; i++)
{
for (j = 0; j < CamExtrinsic.RotationVector.Width; j++)
{
outStr = outStr + CamExtrinsic.RotationVector.Data[i, j].ToString();
outStr = outStr + \" \";
}
Console.WriteLine(outStr);
outStr = \"\";
}
Console.WriteLine(\"Translation Vector: \");
outStr = \"\";
for (i = 0; i < CamExtrinsic.TranslationVector.Height; i++)
{
for (j = 0; j < CamExtrinsic.TranslationVector.Width; j++)
{
outStr = outStr + CamExtrinsic.TranslationVector.Data[i, j].ToString();
outStr = outStr + \" \";
}
Console.WriteLine(outStr);
outStr = \"\";
}
}
}
}
TNKS!
没有找到相关结果
已邀请:
1 个回复
矾醒忻