基于Web的AR学习

思路

  • 获取移动设备的摄像头
  • 截取视频流并展示在网页上
  • 在视频画面上渲染图像
  • 分析源,并识别出特定的Marker
  • 将模型展示在Marker之上

获取摄像头

WebRTC 技术

WebarRTC主要包含3部分:

  • MediaStream
  • RTCPeerConnection
  • RTCDataChannel
    这里借助于getUserMedia来获取MediaStream

MDN:https://developer.mozilla.org/zh-CN/docs/Web/API/WebRTC_API

example

在网页上获取截图:https://github.com/mdn/samples-server/tree/master/s/webrtc-capturestill

处理兼容

getUserMedia

*chrome47版本后只支持https页面拉起摄像头

1
2
3
4
navigator.getMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
需引入的库

这里采用了artoolkit,
artoolkit是个相对比较成熟的虚拟现实库:
artoolkit

几种模型
  • .mmd —— MMDLoader
  • .obj —— OBJLoader
  • .mtl —— MTLLoader
demo
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
window.ARThreeOnLoad = function() {
ARController.getUserMediaThreeScene({maxARVideoSize: 320, cameraParam: './camera_para-iPhone.dat', facing: { exact: 'environment'},
onSuccess: function(arScene, arController, arCamera) {
document.body.className = arController.orientation;
var renderer = new THREE.WebGLRenderer({antialias: true});
if (arController.orientation === 'portrait') {
var w = (window.innerWidth / arController.videoHeight) * arController.videoWidth;
var h = window.innerWidth;
renderer.setSize(w, h);
renderer.domElement.style.paddingBottom = (w-h) + 'px';
} else {
if (/Android|mobile|iPad|iPhone/i.test(navigator.userAgent)) {
renderer.setSize(window.innerWidth, (window.innerWidth / arController.videoWidth) * arController.videoHeight);
} else {
renderer.setSize(arController.videoWidth, arController.videoHeight);
document.body.className += ' desktop';
}
}
document.body.insertBefore(renderer.domElement, document.body.firstChild);
// Create a couple of lights for our AR scene.
var light = new THREE.PointLight(0xffffff);
light.position.set(40, 40, 40);
arScene.scene.add(light);
var light = new THREE.PointLight(0xff8800);
light.position.set(-40, -20, -30);
arScene.scene.add(light);
var markerObject3D = new THREE.Object3D()
arScene.scene.add(markerObject3D);
var mtlLoader = new THREE.MTLLoader();
mtlLoader.setPath('/');
mtlLoader.load('final-diandiandian.mtl', function(materials) {
materials.preload();
var objLoader = new THREE.OBJLoader();
objLoader.setMaterials(materials);
objLoader.setPath('/');
objLoader.load('final-diandiandian.obj', function(object) {
object.scale.set(2, 2, 2).multiplyScalar(1 / 20);
object.rotation.x = Math.PI / 2;
markerObject3D.add(object);
}, onProgress, onError);
});
function onProgress(xhr) {
console.log( ( xhr.loaded / xhr.total * 100 ) + '% loaded' );
}
function onError() {
console.log( 'An error happened' );
}
var flag = 1;
// 添加事件监听, 转动坐标轴
renderer.domElement.addEventListener('click', function(ev) {
markerObject3D.rotation.z= flag * Math.PI/2;
if(flag > 4) {
var imgNode=document.createElement("IMG");
imgNode.src = './red.png';
imgNode.className = 'redbag';
document.body.appendChild(imgNode);
} else {
flag++;
}
}, false);
arController.loadMarker('meituan.patt', function(markerId) {
var markerRoot = arController.createThreeMarker(markerId);
markerRoot.add(markerObject3D);
arScene.scene.add(markerRoot);
});
var tick = function() {
arScene.process();
arScene.renderOn(renderer);
requestAnimationFrame(tick);
};
tick();
}});
delete window.ARThreeOnLoad;
};
if (window.ARController && ARController.getUserMediaThreeScene) {
ARThreeOnLoad();
}
参考