Unity Physical Camera如何使用真实摄像头内参

通过OpneCV等工具提供的标定方法1,获得真实摄像头的针孔模型内参矩阵\(K\)

\[K= {\begin{bmatrix} {f_x}&0&{c_x} \newline 0&{f_y}&{c_y} \newline 0&0&1 \end{bmatrix}}\]

Unity的Physcial Camera2仿真了真实世界的摄像头,可以将标定后的内参矩阵信息输入到Unity的Physcial Camera中,方便两者配合使用。

Unity中的转换方法:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.UI;

public class MyCamera : MonoBehaviour
{
public string camera_name = "Integrated Camera"; // USB Video Device //Integrated Camera //Virtual Camera
public int width = 1280;
public int height = 720;
public int fps = 120;
public float focal = 28F;
public float fx = 1087.4199F;
public float fy = 1087.4199F;
public float cx = 640F;
public float cy = 360F;
public float skew = 0F;
public AspectRatioFitter fit;
private RawImage cambackground = null;
private Camera cam_obj = null;
private WebCamTexture cam = null;
private volatile Color32[] cam_data = null;
private Texture2D tex = null;

IEnumerator Start()
{
cambackground = GameObject.FindWithTag("CamBackground").GetComponent<RawImage>();
cam_obj = GameObject.FindWithTag("MainCamera").GetComponent<Camera>();

yield return Application.RequestUserAuthorization(UserAuthorization.WebCam);

if (Application.HasUserAuthorization(UserAuthorization.WebCam))
{
WebCamDevice[] devices = WebCamTexture.devices;

if (devices.Length == 0)
{
Debug.Log("No camera detected!");
yield break;
}

bool find_camera = false;

for (int i = 0; i < devices.Length; i++)
{
Debug.Log("Found camera:" + devices[i].name);
if (camera_name.Equals(devices[i].name)) {
find_camera = true;
}
}

if (!find_camera)
{
Debug.Log("Unable to find camera: " + camera_name + "\n");
Debug.Log("Open the first camera" + devices[0].name + "\n");
camera_name = devices[0].name;
}

cam = new WebCamTexture(camera_name, width, height, fps);

WebCamTexture.allowThreadedTextureCreation = true;
WebCamTexture.streamingTextureDiscardUnusedMips = true;

cam.Play();

width = cam.width;
height = cam.height;

// ******************************************************
// Transfer Real Camera Inrinsic Parameters to Unity Physical Camera Parameters
cam_obj.usePhysicalProperties = true;
cam_obj.focalLength = focal;
cam_obj.sensorSize = new Vector2(focal * width / fx,
focal * height / fy);
cam_obj.lensShift = new Vector2(-(cx - width * 0.5f) / width,
(cy - height * 0.5f) / height);
// ******************************************************

Debug.Log(camera_name +" Running...\n");
}
}

void Update()
{
if (cam == null || !cam.isPlaying) return;

if (cam_data == null)
{
cam_data = new Color32[width * height];
tex = new Texture2D(width, height);
}

cam_data = cam.GetPixels32();
tex.SetPixels32(cam_data);
tex.Apply();
cambackground.texture = tex;

fit.aspectRatio = (float)cam.width / (float)cam.height;

}
}

其中float focal为任意设定的参数(因为无法通过标定获得),一般选择一个靠近真实值的值。


  1. https://docs.opencv.org/master/dc/dbb/tutorial_py_calibration.html↩︎

  2. https://docs.unity3d.com/Manual/PhysicalCameras.html↩︎