• SURF 特征法


     public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
            {
                int k = 2;
                double uniquenessThreshold = 0.8;
                double hessianThresh = 300;
    
                Stopwatch watch;
                homography = null;
    
                modelKeyPoints = new VectorOfKeyPoint();
                observedKeyPoints = new VectorOfKeyPoint();
                using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
                using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
                {
                    SURF surfCPU = new SURF(hessianThresh);
                    //extract features from the object image
                    UMat modelDescriptors = new UMat();
                    surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);
                    
                    watch = Stopwatch.StartNew();
    
                    // extract features from the observed image
                    UMat observedDescriptors = new UMat();
                    surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                    BFMatcher matcher = new BFMatcher(DistanceType.L2);
                    matcher.Add(modelDescriptors);
    
                    matcher.KnnMatch(observedDescriptors, matches, k, null);
                    mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                    mask.SetTo(new MCvScalar(255));
                    Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
    
                    int nonZeroCount = CvInvoke.CountNonZero(mask);
                    if (nonZeroCount >= 4)
                    {
                        nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                           matches, mask, 1.5, 20);
                        if (nonZeroCount >= 4)
                            homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                               observedKeyPoints, matches, mask, 2);
                    }
    
                    watch.Stop();
    
                }
                matchTime = watch.ElapsedMilliseconds;
            }
    
            /// <summary>
            /// Draw the model image and observed image, the matched features and homography projection.
            /// </summary>
            /// <param name="modelImage">The model image</param>
            /// <param name="observedImage">The observed image</param>
            /// <param name="matchTime">The output total time for computing the homography matrix.</param>
            /// <returns>The model image and observed image, the matched features and homography projection.</returns>
            public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
            {
                Mat homography;
                VectorOfKeyPoint modelKeyPoints;
                VectorOfKeyPoint observedKeyPoints;
                using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                {
                    Mat mask;
                    FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
                       out mask, out homography);
    
                    //Draw the matched keypoints
                    Mat result = new Mat();
                    Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                       matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask);
    
                    #region draw the projected region on the image
    
                    if (homography != null)
                    {
                        //draw a rectangle along the projected model
                        Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                        PointF[] pts = new PointF[]
                        {
                      new PointF(rect.Left, rect.Bottom),
                      new PointF(rect.Right, rect.Bottom),
                      new PointF(rect.Right, rect.Top),
                      new PointF(rect.Left, rect.Top)
                        };
                        pts = CvInvoke.PerspectiveTransform(pts, homography);
    
                        Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
                        using (VectorOfPoint vp = new VectorOfPoint(points))
                        {
                            CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                        }
    
                    }
    
                    #endregion
    
                    return result;
    
                }
            }
  • 相关阅读:
    2019-2020-2 网络对抗技术 20172327 Exp5 信息搜集与漏洞扫描
    2019-2020-2 网络对抗技术 20172327 Exp4 恶意代码分析
    2019-2020-2 网络对抗技术 20172327 Exp3 免杀原理与实践
    2019-2020-2 网络对抗技术 20172327 Exp2 后门原理与实践
    2019-2020-2 网络对抗技术 20172327 Exp1 PC平台逆向破解
    20172327马瑞蕃 2019-2020-2 《网络对抗技术》第一周Kali的安装
    Python创建virtualenv虚拟环境方法
    unittest--单元测试框架
    Linux--SSH免密码登录
    Linux--NIS
  • 原文地址:https://www.cnblogs.com/daxiongblog/p/5729957.html
Copyright © 2020-2023  润新知