c# onnx 调用yolo v11进行目标检测
先上图,支持图片,视频检测
FormYoloV11.cs
using Microsoft.ML.OnnxRuntime;
using Microsoft.ML.OnnxRuntime.Tensors;
using OpenCvSharp;
using OpenCvSharp.Dnn;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Text;
using System.Windows.Forms;
namespace WindowsFormsApp12
{
public partial class FormYoloV11 : Form
{
public FormYoloV11()
{
InitializeComponent();
InitializeCameraSelector();
LoadModel();
InitializeProcessingTimeLabel();
timerUpdateLog = new System.Threading.Timer(UpdateTextBox, null, 1000, 1000);
}
private void FormYoloV11_Load(object sender, EventArgs e)
{
}
private VideoCapture capture;
private Timer timer;
private InferenceSession onnx_session;
private int input_height = 320;
private int input_width = 240;
private float ratio_height;
private float ratio_width;
private int box_num = 8400;
private float conf_threshold = 0.25f;
private float nms_threshold = 0.5f;
private string[] class_names;
private int class_num;
private Label processingTimeLabel;
private void InitializeCamera(int camerID)
{
// Initialize video capture for the default camera
capture = new VideoCapture(camerID);
if (!capture.IsOpened())
{
add_log("Unable to access the camera.");
return;
}
// Reduce the camera resolution to speed up processing
capture.Set(VideoCaptureProperties.FrameWidth, capture.FrameWidth / 2);
capture.Set(VideoCaptureProperties.FrameHeight, capture.FrameHeight / 2);
add_log("视频信息" + capture.FrameWidth + "," + capture.FrameHeight);
// Set up a timer to fetch frames at regular intervals
timer = new Timer { Interval = 40 }; // Approximately 30 frames per second
timer.Tick += Timer_Tick;
timer.Start();
}
private void LoadModel()
{
// Get the base directory of the current application
string baseDirectory = AppDomain.CurrentDomain.BaseDirectory;
// Set the model path and class label path
string model_path = Path.Combine(baseDirectory, "model", "yolo11n.onnx");
string classer_path = Path.Combine(baseDirectory, "model", "label_chinese.txt");
class_names = File.ReadAllLines(classer_path, Encoding.UTF8);
class_num = class_names.Length;
// Create an inference session for the ONNX model
SessionOptions options = new SessionOptions();
options.LogSeverityLevel = OrtLoggingLevel.ORT_LOGGING_LEVEL_INFO;
try
{
options.AppendExecutionProvider_CUDA(0);
}
catch (Exception ex)
{
add_log($"Unable to use GPU for inference: {ex.Message}. Falling back to CPU.");
options.AppendExecutionProvider_CPU(0);
}
options.IntraOpNumThreads = 1; // 限定只使用一个 CPU 核心
onnx_session = new InferenceSession(model_path, options);
}
private void InitializeProcessingTimeLabel()
{
processingTimeLabel = new Label
{
Location = new System.Drawing.Point(10, 10),
AutoSize = true,
ForeColor = System.Drawing.Color.Black,
BackColor = System.Drawing.Color.White,
Font = new Font("Arial", 10, FontStyle.Bold)
};
Controls.Add(processingTimeLabel);
}
private void Timer_Tick(object sender, EventArgs e)
{
Mat frame = new Mat();
capture.Read(frame);
if (!frame.Empty())
{
Stopwatch stopwatch = Stopwatch.StartNew();
ProcessFrame(frame);
stopwatch.Stop();
processingTimeLabel.Text = $"Processing Time: {stopwatch.ElapsedMilliseconds} ms";
}
}
public static class BitmapConverterExtension
{
public static Bitmap ToBitmap(Mat mat)
{
return OpenCvSharp.Extensions.BitmapConverter.ToBitmap(mat);
}
public static Mat ToMat(Bitmap bitmap)
{
return OpenCvSharp.Extensions.BitmapConverter.ToMat(bitmap);
}
}
public static void PutTextZH(Mat image, string text, OpenCvSharp.Point org, Scalar color, int fontSize, string fontName, bool italic, bool underline)
{
if (image.Empty())
{
throw new ArgumentException("The input image cannot be empty.");
}
// Create a new bitmap with the same size as the input Mat
Bitmap bitmap = BitmapConverterExtension.ToBitmap(image);
// Create graphics from the bitmap
using (Graphics graphics = Graphics.FromImage(bitmap))
{
// Set text rendering properties
graphics.TextRenderingHint = System.Drawing.Text.TextRenderingHint.AntiAlias;
// Create font with specified properties
FontStyle fontStyle = FontStyle.Regular;
if (italic) fontStyle |= FontStyle.Italic;
if (underline) fontStyle |= FontStyle.Underline;
using (Font font = new Font(fontName, fontSize, fontStyle))
using (Brush brush = new SolidBrush(System.Drawing.Color.FromArgb((int)color.Val2, (int)color.Val1, (int)color.Val0)))
{
// Draw the text at the specified location
graphics.DrawString(text, font, brush, org.X, org.Y);
}
}
// Convert the bitmap back to Mat
var result = BitmapConverterExtension.ToMat(bitmap);
result.CopyTo(image);
}
private System.Threading.Timer timerUpdateLog;
public string bufferedLogs = "";
private void UpdateTextBox(object state)
{
if (!string.IsNullOrEmpty(bufferedLogs))
{
textBox1.Invoke
(
//委托,托管无参数的任何方法
new MethodInvoker
(
delegate
{
textBox1.AppendText(bufferedLogs);
textBox1.ScrollToCaret();
if (textBox1.Text.Length > 1000000)
textBox1.Text = textBox1.Text.Substring(900000);
}
)
);
bufferedLogs = "";
}
}
public static string strlastLog_show = "";
public void add_log(string log, bool bRemoveLast = false)
{
bufferedLogs += $"{DateTime.Now.ToString("HH:mm:ss ")} {log}\r\n";
strlastLog_show = log;
}
private void ProcessFrame(Mat frame)
{
int height = frame.Rows;
int width = frame.Cols;
Mat temp_image = frame.Clone();
// Resize the image if necessary
if (height > input_height || width > input_width)
{
float scale = Math.Min((float)input_height / height, (float)input_width / width);
OpenCvSharp.Size new_size = new OpenCvSharp.Size((int)(width * scale), (int)(height * scale));
Cv2.Resize(frame, temp_image, new_size);
}
ratio_height = (float)height / temp_image.Rows;
ratio_width = (float)width / temp_image.Cols;
Mat input_img = new Mat();
Cv2.CopyMakeBorder(temp_image, input_img, 0, input_height - temp_image.Rows, 0, input_width - temp_image.Cols, BorderTypes.Constant, null);
// Prepare input tensor
Tensor<float> input_tensor = new DenseTensor<float>(new[] { 1, 3, 640, 640 });
for (int y = 0; y < input_img.Height; y++)
{
for (int x = 0; x < input_img.Width; x++)
{
input_tensor[0, 0, y, x] = input_img.At<Vec3b>(y, x)[0] / 255f;
input_tensor[0, 1, y, x] = input_img.At<Vec3b>(y, x)[1] / 255f;
input_tensor[0, 2, y, x] = input_img.At<Vec3b>(y, x)[2] / 255f;
}
}
List<NamedOnnxValue> input_container = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor("images", input_tensor)
};
// Perform inference
var ort_outputs = onnx_session.Run(input_container).ToArray();
// Post-process the output
float[] data = Transpose(ort_outputs[0].AsTensor<float>().ToArray(), 4 + class_num, box_num);
float[] confidenceInfo = new float[class_num];
float[] rectData = new float[4];
List<DetectionResult> detResults = new List<DetectionResult>();
for (int i = 0; i < box_num; i++)
{
Array.Copy(data, i * (class_num + 4), rectData, 0, 4);
Array.Copy(data, i * (class_num + 4) + 4, confidenceInfo, 0, class_num);
float score = confidenceInfo.Max();
int maxIndex = Array.IndexOf(confidenceInfo, score);
if (score > conf_threshold)
{
int _centerX = (int)(rectData[0] * ratio_width);
int _centerY = (int)(rectData[1] * ratio_height);
int _width = (int)(rectData[2] * ratio_width);
int _height = (int)(rectData[3] * ratio_height);
detResults.Add(new DetectionResult(
maxIndex,
class_names[maxIndex],
new Rect(_centerX - _width / 2, _centerY - _height / 2, _width, _height),
score));
}
}
// Apply Non-Maximum Suppression
CvDnn.NMSBoxes(detResults.Select(x => x.Rect), detResults.Select(x => x.Confidence), conf_threshold, nms_threshold, out int[] indices);
detResults = detResults.Where((x, index) => indices.Contains(index)).ToList();
// Draw the detection results on the frame
Mat processedFrame = frame.Clone();
foreach (DetectionResult r in detResults)
{
PutTextZH(processedFrame, string.Format("{0}:{1:P0}", r.Class, r.Confidence), new OpenCvSharp.Point(r.Rect.TopLeft.X, r.Rect.TopLeft.Y + 30), Scalar.Red, 15, "", false, false);
Cv2.Rectangle(processedFrame, r.Rect, Scalar.Red, thickness: 2);
}
// Display the original frame in pictureBox1
pictureBox1.Image = new Bitmap(frame.ToMemoryStream());
// Display the processed frame in pictureBox2
pictureBox2.Image = new Bitmap(processedFrame.ToMemoryStream());
}
public unsafe float[] Transpose(float[] tensorData, int rows, int cols)
{
float[] transposedTensorData = new float[tensorData.Length];
fixed (float* pTensorData = tensorData)
{
fixed (float* pTransposedData = transposedTensorData)
{
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < cols; j++)
{
int index = i * cols + j;
int transposedIndex = j * rows + i;
pTransposedData[transposedIndex] = pTensorData[index];
}
}
}
}
return transposedTensorData;
}
private void button1_Click(object sender, EventArgs e)
{
int selectedCameraIndex = cameraSelector.SelectedIndex;
if (selectedCameraIndex >= 0)
{
// Stop the current capture if it's running
timer?.Stop();
capture?.Release();
// Initialize the selected camera
InitializeCamera(selectedCameraIndex);
}
}
private void InitializeCameraSelector()
{
// Populate the ComboBox with camera indices (0, 1, 2, ...)
for (int i = 0; i < 5; i++)
{
using (VideoCapture tempCapture = new VideoCapture(i))
{
if (tempCapture.IsOpened())
{
cameraSelector.Items.Add("Camera " + i);
}
}
}
if (cameraSelector.Items.Count >= 1)
cameraSelector.SelectedIndex = 0;
cameraSelector.SelectedIndexChanged += CameraSelector_SelectedIndexChanged;
}
private void CameraSelector_SelectedIndexChanged(object sender, EventArgs e)
{
}
string fileFilter = "*.*|*.bmp;*.jpg;*.jpeg;*.tiff;*.tiff;*.png";
string image_path = "";
DateTime dt1 = DateTime.Now;
DateTime dt2 = DateTime.Now;
private void button2_Click(object sender, EventArgs e)
{
OpenFileDialog ofd = new OpenFileDialog();
ofd.Filter = fileFilter;
if (ofd.ShowDialog() != DialogResult.OK) return;
pictureBox1.Image = null;
image_path = ofd.FileName;
pictureBox1.Image = new Bitmap(image_path);
textBox1.Text = "";
pictureBox2.Image = null;
}
private void SaveAndOpenResultImage(Mat resultImage)
{
string resultImagePath = Path.Combine(Path.GetTempPath(), "result_image.png");
resultImage.SaveImage(resultImagePath);
System.Diagnostics.Process.Start(new System.Diagnostics.ProcessStartInfo(resultImagePath) { UseShellExecute = true });
}
private void button3_Click(object sender, EventArgs e)
{
if (image_path == "")
{
return;
}
button2.Enabled = false;
pictureBox2.Image = null;
textBox1.Text = "";
Application.DoEvents();
Mat image = new Mat(image_path);
//图片缩放
int height = image.Rows;
int width = image.Cols;
Mat temp_image = image.Clone();
if (height > input_height || width > input_width)
{
float scale = Math.Min((float)input_height / height, (float)input_width / width);
OpenCvSharp.Size new_size = new OpenCvSharp.Size((int)(width * scale), (int)(height * scale));
Cv2.Resize(image, temp_image, new_size);
}
ratio_height = (float)height / temp_image.Rows;
ratio_width = (float)width / temp_image.Cols;
Mat input_img = new Mat();
Cv2.CopyMakeBorder(temp_image, input_img, 0, input_height - temp_image.Rows, 0, input_width - temp_image.Cols, BorderTypes.Constant, null);
//Cv2.ImShow("input_img", input_img);
//输入Tensor
Tensor<float> input_tensor = new DenseTensor<float>(new[] { 1, 3, 640, 640 });
for (int y = 0; y < input_img.Height; y++)
{
for (int x = 0; x < input_img.Width; x++)
{
input_tensor[0, 0, y, x] = input_img.At<Vec3b>(y, x)[0] / 255f;
input_tensor[0, 1, y, x] = input_img.At<Vec3b>(y, x)[1] / 255f;
input_tensor[0, 2, y, x] = input_img.At<Vec3b>(y, x)[2] / 255f;
}
}
List<NamedOnnxValue> input_container = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor("images", input_tensor)
};
//推理
dt1 = DateTime.Now;
var ort_outputs = onnx_session.Run(input_container).ToArray();
dt2 = DateTime.Now;
float[] data = Transpose(ort_outputs[0].AsTensor<float>().ToArray(), 4 + class_num, box_num);
float[] confidenceInfo = new float[class_num];
float[] rectData = new float[4];
List<DetectionResult> detResults = new List<DetectionResult>();
for (int i = 0; i < box_num; i++)
{
Array.Copy(data, i * (class_num + 4), rectData, 0, 4);
Array.Copy(data, i * (class_num + 4) + 4, confidenceInfo, 0, class_num);
float score = confidenceInfo.Max(); // 获取最大值
int maxIndex = Array.IndexOf(confidenceInfo, score); // 获取最大值的位置
int _centerX = (int)(rectData[0] * ratio_width);
int _centerY = (int)(rectData[1] * ratio_height);
int _width = (int)(rectData[2] * ratio_width);
int _height = (int)(rectData[3] * ratio_height);
detResults.Add(new DetectionResult(
maxIndex,
class_names[maxIndex],
new Rect(_centerX - _width / 2, _centerY - _height / 2, _width, _height),
score));
}
//NMS
CvDnn.NMSBoxes(detResults.Select(x => x.Rect), detResults.Select(x => x.Confidence), conf_threshold, nms_threshold, out int[] indices);
detResults = detResults.Where((x, index) => indices.Contains(index)).ToList();
//绘制结果
Mat result_image = image.Clone();
foreach (DetectionResult r in detResults)
{
//Cv2.PutText(result_image, $"{r.Class}:{r.Confidence:P0}", new OpenCvSharp.Point(r.Rect.TopLeft.X, r.Rect.TopLeft.Y - 10), HersheyFonts.HersheySimplex, 1, Scalar.Red, 2);
PutTextZH(result_image, string.Format("{0}:{1:P0}", r.Class, r.Confidence), new OpenCvSharp.Point(r.Rect.TopLeft.X, r.Rect.TopLeft.Y + 30), Scalar.Red, 15, "", false, false);
Cv2.Rectangle(result_image, r.Rect, Scalar.Red, thickness: 2);
}
pictureBox2.Image = new Bitmap(result_image.ToMemoryStream());
textBox1.Text = "推理耗时:" + (dt2 - dt1).TotalMilliseconds + "ms";
// 保存并打开结果图片
SaveAndOpenResultImage(result_image);
button2.Enabled = true;
}
private void Form2_Resize(object sender, EventArgs e)
{
int pictureBoxWidth = (this.ClientSize.Width - 30) / 2;
pictureBox1.Width = pictureBoxWidth;
pictureBox2.Width = pictureBoxWidth;
pictureBox2.Left = pictureBox1.Right + 10;
}
private void FormYoloV11_FormClosing(object sender, FormClosingEventArgs e)
{
// Release the camera and stop the timer
timer?.Stop();
capture?.Release();
}
}
}
DetectResult.cs
using OpenCvSharp;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace WindowsFormsApp12
{
public class DetectionResult
{
public DetectionResult(int ClassId, string Class, Rect Rect, float Confidence)
{
this.ClassId = ClassId;
this.Confidence = Confidence;
this.Rect = Rect;
this.Class = Class;
}
public string Class { get; set; }
public int ClassId { get; set; }
public float Confidence { get; set; }
public Rect Rect { get; set; }
}
}
FormYoloV11.Designer.cs
namespace WindowsFormsApp12
{
partial class FormYoloV11
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.pictureBox1 = new System.Windows.Forms.PictureBox();
this.pictureBox2 = new System.Windows.Forms.PictureBox();
this.button1 = new System.Windows.Forms.Button();
this.cameraSelector = new System.Windows.Forms.ComboBox();
this.textBox1 = new System.Windows.Forms.TextBox();
this.button2 = new System.Windows.Forms.Button();
this.button3 = new System.Windows.Forms.Button();
((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.pictureBox2)).BeginInit();
this.SuspendLayout();
//
// pictureBox1
//
this.pictureBox1.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)));
this.pictureBox1.BackColor = System.Drawing.SystemColors.ActiveBorder;
this.pictureBox1.Location = new System.Drawing.Point(1, 49);
this.pictureBox1.Name = "pictureBox1";
this.pictureBox1.Size = new System.Drawing.Size(387, 309);
this.pictureBox1.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
this.pictureBox1.TabIndex = 0;
this.pictureBox1.TabStop = false;
//
// pictureBox2
//
this.pictureBox2.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Right)));
this.pictureBox2.BackColor = System.Drawing.SystemColors.ActiveBorder;
this.pictureBox2.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.pictureBox2.Location = new System.Drawing.Point(411, 49);
this.pictureBox2.Name = "pictureBox2";
this.pictureBox2.Size = new System.Drawing.Size(387, 309);
this.pictureBox2.SizeMode = System.Windows.Forms.PictureBoxSizeMode.Zoom;
this.pictureBox2.TabIndex = 1;
this.pictureBox2.TabStop = false;
//
// button1
//
this.button1.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.button1.Location = new System.Drawing.Point(148, 408);
this.button1.Name = "button1";
this.button1.Size = new System.Drawing.Size(75, 23);
this.button1.TabIndex = 2;
this.button1.Text = "打开摄像头";
this.button1.UseVisualStyleBackColor = true;
this.button1.Click += new System.EventHandler(this.button1_Click);
//
// cameraSelector
//
this.cameraSelector.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.cameraSelector.FormattingEnabled = true;
this.cameraSelector.Location = new System.Drawing.Point(1, 411);
this.cameraSelector.Name = "cameraSelector";
this.cameraSelector.Size = new System.Drawing.Size(121, 20);
this.cameraSelector.TabIndex = 3;
//
// textBox1
//
this.textBox1.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.textBox1.Location = new System.Drawing.Point(1, 437);
this.textBox1.Multiline = true;
this.textBox1.Name = "textBox1";
this.textBox1.Size = new System.Drawing.Size(798, 77);
this.textBox1.TabIndex = 4;
//
// button2
//
this.button2.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.button2.Location = new System.Drawing.Point(236, 408);
this.button2.Margin = new System.Windows.Forms.Padding(2);
this.button2.Name = "button2";
this.button2.Size = new System.Drawing.Size(75, 23);
this.button2.TabIndex = 6;
this.button2.Text = "打开图像";
this.button2.UseVisualStyleBackColor = true;
this.button2.Click += new System.EventHandler(this.button2_Click);
//
// button3
//
this.button3.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.button3.Location = new System.Drawing.Point(309, 408);
this.button3.Margin = new System.Windows.Forms.Padding(2);
this.button3.Name = "button3";
this.button3.Size = new System.Drawing.Size(75, 23);
this.button3.TabIndex = 5;
this.button3.Text = "检测图像";
this.button3.UseVisualStyleBackColor = true;
this.button3.Click += new System.EventHandler(this.button3_Click);
//
// FormYoloV11
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(800, 511);
this.Controls.Add(this.button2);
this.Controls.Add(this.button3);
this.Controls.Add(this.textBox1);
this.Controls.Add(this.cameraSelector);
this.Controls.Add(this.button1);
this.Controls.Add(this.pictureBox2);
this.Controls.Add(this.pictureBox1);
this.Name = "FormYoloV11";
this.Text = "FormYoloV11";
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.FormYoloV11_FormClosing);
this.Load += new System.EventHandler(this.FormYoloV11_Load);
((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.pictureBox2)).EndInit();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.PictureBox pictureBox1;
private System.Windows.Forms.PictureBox pictureBox2;
private System.Windows.Forms.Button button1;
private System.Windows.Forms.ComboBox cameraSelector;
private System.Windows.Forms.TextBox textBox1;
private System.Windows.Forms.Button button2;
private System.Windows.Forms.Button button3;
}
}
参考文献
onnx模型部署(一) ONNXRuntime_caffe_luoganttcc-开放原子开发者工作坊
pytorch gpu推理、onnxruntime gpu推理、tensorrt gpu推理比较,及安装教程,有详细代码解释-CSDN博客
深度学习模型部署——基于Onnx Runtime的深度学习模型CPU与GPU部署(C++实现)-CSDN博客
GitHub - ultralytics/ultralytics: Ultralytics YOLO11 🚀
原文地址:https://blog.csdn.net/babytiger/article/details/143686955
免责声明:本站文章内容转载自网络资源,如本站内容侵犯了原著者的合法权益,可联系本站删除。更多内容请关注自学内容网(zxcms.com)!