/ WebRTC  

webrtc使用RTCPeerConnection进行流传输

视频流与RTCPeerConnection

学习提纲

  • 各浏览器质检webrtc的差异,adapter.js
  • 使用RTCPeerConnection API 传输视频流.
  • 流的控制与捕捉

RTCPeerConnection简介

RTCPeerConnection是一个是得webrtc之间视频与音频数据交换的api协议.本案例是在同一个页面上使用RTCPeerConnection建立一个P2P连接.没有太大实用性只是为了方便理解.

在页面添加视频控制按钮

一个视频元素将显示来自getUserMedia()的流,另一个将显示通过RTCPeerconnection流式传输的相同视频. (在实际应用程序中,一个视频元素将显示本地流,另一个视频元素将显示远程流.)

1
2
3
4
5
6
7
8
<video id="localVideo" autoplay playsinline></video>
<video id="remoteVideo" autoplay playsinline></video>

<div>
<button id="startButton">Start</button>
<button id="callButton">Call</button>
<button id="hangupButton">Hang Up</button>
</div>

去除浏览器间的差异

在页面js最前边增加 adapter.js

1
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>

新增RTCPeerConnection代码

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
<!DOCTYPE html>
<html>
<head>
<title>demo2</title>
</head>
<style>
</style>
<body>

<video id="localVideo" autoplay playsinline></video>
<video id="remoteVideo" autoplay playsinline></video>

<div>
<button id="startButton">Start</button>
<button id="callButton">Call</button>
<button id="hangupButton">Hang Up</button>
</div>

<div class="box">
<span>SDP Semantics:</span>
<select id="sdpSemantics">
<option selected value="">Default</option>
<option value="unified-plan">Unified Plan</option>
<option value="plan-b">Plan B</option>
</select>
</div>
</body>

<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<script type="text/javascript">
'use strict';

const startButton = document.getElementById('startButton');
const callButton = document.getElementById('callButton');
const hangupButton = document.getElementById('hangupButton');
callButton.disabled = true;
hangupButton.disabled = true;
startButton.addEventListener('click', start);
callButton.addEventListener('click', call);
hangupButton.addEventListener('click', hangup);

let startTime;
const localVideo = document.getElementById('localVideo');
const remoteVideo = document.getElementById('remoteVideo');

localVideo.addEventListener('loadedmetadata', function() {
console.log(`Local video videoWidth: ${this.videoWidth}px, videoHeight: ${this.videoHeight}px`);
});

remoteVideo.addEventListener('loadedmetadata', function() {
console.log(`Remote video videoWidth: ${this.videoWidth}px, videoHeight: ${this.videoHeight}px`);
});

remoteVideo.addEventListener('resize', () => {
console.log(`Remote video size changed to ${remoteVideo.videoWidth}x${remoteVideo.videoHeight}`);
// We'll use the first onsize callback as an indication that video has started
// playing out.
if (startTime) {
const elapsedTime = window.performance.now() - startTime;
console.log('Setup time: ' + elapsedTime.toFixed(3) + 'ms');
startTime = null;
}
});

let localStream;
let pc1;
let pc2;
const offerOptions = {
offerToReceiveAudio: 1,
offerToReceiveVideo: 1
};

function getName(pc) {
return (pc === pc1) ? 'pc1' : 'pc2';
}

function getOtherPc(pc) {
return (pc === pc1) ? pc2 : pc1;
}

async function start() {
console.log('Requesting local stream');
startButton.disabled = true;
try {
const stream = await navigator.mediaDevices.getUserMedia({audio: true, video: true});
console.log('Received local stream');
localVideo.srcObject = stream;
localStream = stream;
callButton.disabled = false;
} catch (e) {
alert(`getUserMedia() error: ${e.name}`);
}
}

function getSelectedSdpSemantics() {
const sdpSemanticsSelect = document.querySelector('#sdpSemantics');
const option = sdpSemanticsSelect.options[sdpSemanticsSelect.selectedIndex];
return option.value === '' ? {} : {sdpSemantics: option.value};
}

async function call() {
callButton.disabled = true;
hangupButton.disabled = false;
console.log('Starting call');
startTime = window.performance.now();
const videoTracks = localStream.getVideoTracks();
const audioTracks = localStream.getAudioTracks();
if (videoTracks.length > 0) {
console.log(`Using video device: ${videoTracks[0].label}`);
}
if (audioTracks.length > 0) {
console.log(`Using audio device: ${audioTracks[0].label}`);
}
const configuration = getSelectedSdpSemantics();
console.log('RTCPeerConnection configuration:', configuration);
pc1 = new RTCPeerConnection(configuration);
console.log('Created local peer connection object pc1');
pc1.addEventListener('icecandidate', e => onIceCandidate(pc1, e));
pc2 = new RTCPeerConnection(configuration);
console.log('Created remote peer connection object pc2');
pc2.addEventListener('icecandidate', e => onIceCandidate(pc2, e));
pc1.addEventListener('iceconnectionstatechange', e => onIceStateChange(pc1, e));
pc2.addEventListener('iceconnectionstatechange', e => onIceStateChange(pc2, e));
pc2.addEventListener('track', gotRemoteStream);

localStream.getTracks().forEach(track => pc1.addTrack(track, localStream));
console.log('Added local stream to pc1');

try {
console.log('pc1 createOffer start');
const offer = await pc1.createOffer(offerOptions);
await onCreateOfferSuccess(offer);
} catch (e) {
onCreateSessionDescriptionError(e);
}
}

function onCreateSessionDescriptionError(error) {
console.log(`Failed to create session description: ${error.toString()}`);
}

async function onCreateOfferSuccess(desc) {
console.log(`Offer from pc1\n${desc.sdp}`);
console.log('pc1 setLocalDescription start');
try {
await pc1.setLocalDescription(desc);
onSetLocalSuccess(pc1);
} catch (e) {
onSetSessionDescriptionError();
}

console.log('pc2 setRemoteDescription start');
try {
await pc2.setRemoteDescription(desc);
onSetRemoteSuccess(pc2);
} catch (e) {
onSetSessionDescriptionError();
}

console.log('pc2 createAnswer start');
// Since the 'remote' side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
try {
const answer = await pc2.createAnswer();
await onCreateAnswerSuccess(answer);
} catch (e) {
onCreateSessionDescriptionError(e);
}
}

function onSetLocalSuccess(pc) {
console.log(`${getName(pc)} setLocalDescription complete`);
}

function onSetRemoteSuccess(pc) {
console.log(`${getName(pc)} setRemoteDescription complete`);
}

function onSetSessionDescriptionError(error) {
console.log(`Failed to set session description: ${error.toString()}`);
}

function gotRemoteStream(e) {
if (remoteVideo.srcObject !== e.streams[0]) {
remoteVideo.srcObject = e.streams[0];
console.log('pc2 received remote stream');
}
}

async function onCreateAnswerSuccess(desc) {
console.log(`Answer from pc2:\n${desc.sdp}`);
console.log('pc2 setLocalDescription start');
try {
await pc2.setLocalDescription(desc);
onSetLocalSuccess(pc2);
} catch (e) {
onSetSessionDescriptionError(e);
}
console.log('pc1 setRemoteDescription start');
try {
await pc1.setRemoteDescription(desc);
onSetRemoteSuccess(pc1);
} catch (e) {
onSetSessionDescriptionError(e);
}
}

async function onIceCandidate(pc, event) {
try {
await (getOtherPc(pc).addIceCandidate(event.candidate));
onAddIceCandidateSuccess(pc);
} catch (e) {
onAddIceCandidateError(pc, e);
}
console.log(`${getName(pc)} ICE candidate:\n${event.candidate ? event.candidate.candidate : '(null)'}`);
}

function onAddIceCandidateSuccess(pc) {
console.log(`${getName(pc)} addIceCandidate success`);
}

function onAddIceCandidateError(pc, error) {
console.log(`${getName(pc)} failed to add ICE Candidate: ${error.toString()}`);
}

function onIceStateChange(pc, event) {
if (pc) {
console.log(`${getName(pc)} ICE state: ${pc.iceConnectionState}`);
console.log('ICE state change event: ', event);
}
}

function hangup() {
console.log('Ending call');
pc1.close();
pc2.close();
pc1 = null;
pc2 = null;
hangupButton.disabled = true;
callButton.disabled = false;
}
</script>

</html>

工作原理

WebRTC使用RTCPeerConnection API在不同的rtc客户端建立连接从来进行流式视频传输,这就是点对点通讯.

在webrtc点对点通讯的的三个步骤中设置对应的通话:

  • 在每个会话端添加一个RTCPeerConnection对象,同时用getUserMedia()获取本地视频流
  • 获取并共享网络信息:潜在的连接端点称为ICE候选者
  • 获取并共享本地和远程描述:SDP格式的本地媒体元数据.

想象一下,Alice和Bob想要使用RTCPeerConnection来设置视频聊天.
首先,Alice和Bob交换网络信息.”查找候选者”一词是指使用ICE框架查找网络接口和端口的过程.

  1. Alice 使用 onicecandidate(addEventListener(‘icecandidate’)) 处理程序创建一个 RTCPeerConnection 对象.这对应于main.js中的以下代码:
    1
    2
    3
    4
    5
    let localPeerConnection;
    //作为此过程的一部分,WebRTC API 使用STUN服务器获取计算机的IP地址,并使用TURN服务器作为中继服务器,以防对等通信失败
    localPeerConnection = new RTCPeerConnection(servers); // servers特指stun或者turn服务
    localPeerConnection.addEventListener('icecandidate', handleConnection);
    localPeerConnection.addEventListener('iceconnectionstatechange',handleConnectionChange);

2.Alice调用getUserMedia()并添加参数传递本地流:

1
2
3
4
5
6
7
8
9
10
11
navigator.mediaDevices.getUserMedia(mediaStreamConstraints). then(gotLocalMediaStream). catch(handleLocalMediaStreamError);

function gotLocalMediaStream(mediaStream) {
localVideo.srcObject = mediaStream;
localStream = mediaStream;
trace('Received local stream.');
callButton.disabled = false; // Enable call button.
}

localPeerConnection.addStream(localStream);
trace('Added local stream to localPeerConnection.');

3.当网络候选可用时,将调用步骤1中的onicecandidate处理程序.

4.lice将序列化的候选数据发送给Bob.在实际应用程序中,此过程(称为信令)通过消息传递服务进行 - 您将在后续步骤中学习如何执行此操作. 当然,在此步骤中,两个RTCPeerConnection对象位于同一页面上,可以直接通信,无需外部消息传递.

5.当Bob从Alice获取候选消息时,他调用addIceCandidate(),将候选者添加到远程对等描述中:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
function handleConnection(event) {
const peerConnection = event.target;
const iceCandidate = event.candidate;

if (iceCandidate) {
const newIceCandidate = new RTCIceCandidate(iceCandidate);
const otherPeer = getOtherPeer(peerConnection);

otherPeer.addIceCandidate(newIceCandidate)
.then(() => {
handleConnectionSuccess(peerConnection);
}).catch((error) => {
handleConnectionFailure(peerConnection, error);
});

trace(`${getPeerName(peerConnection)} ICE candidate:\n` +
`${event.candidate.candidate}.`);
}
}

WebRTC点对点还需要找出交换本地和远程的音频与视频媒体信息,例如分辨率和编解码器功能.通过使用称为SDP的会话描述协议格式交换元数据块(称为offer和answer)来进行交换媒体配置信息的信令:

1.Alice运行RTCPeerConnection createOffer()方法.返回的promise提供了一个RTCSessionDescription:Alice的本地会话描述:

1
2
3
trace('localPeerConnection createOffer start.');
localPeerConnection.createOffer(offerOptions)
.then(createdOffer).catch(setSessionDescriptionError);

2.如果成功,Alice使用setLocalDescription()设置本地描述,然后通过其信令通道将此会话描述发送给Bob.

3.Bob使用setRemoteDescription()将Alice发送给他的描述设置为远程描述.

4.ob运行RTCPeerConnection createAnswer()方法,向其传递从Alice获得的远程描述,因此可以生成与她兼容的本地会话.createAnswer()承诺传递RTCSessionDescription:Bob将其设置为本地描述并将其发送给Alice.

5.当Alice获取Bob的会话描述时,她使用setRemoteDescription()将其设置为远程描述.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
// Logs offer creation and sets peer connection session descriptions.
function createdOffer(description) {
trace(`Offer from localPeerConnection:\n${description.sdp}`);

trace('localPeerConnection setLocalDescription start.');
localPeerConnection.setLocalDescription(description)
.then(() => {
setLocalDescriptionSuccess(localPeerConnection);
}).catch(setSessionDescriptionError);

trace('remotePeerConnection setRemoteDescription start.');
remotePeerConnection.setRemoteDescription(description)
.then(() => {
setRemoteDescriptionSuccess(remotePeerConnection);
}).catch(setSessionDescriptionError);

trace('remotePeerConnection createAnswer start.');
remotePeerConnection.createAnswer()
.then(createdAnswer)
.catch(setSessionDescriptionError);
}

// Logs answer to offer creation and sets peer connection session descriptions.
function createdAnswer(description) {
trace(`Answer from remotePeerConnection:\n${description.sdp}.`);

trace('remotePeerConnection setLocalDescription start.');
remotePeerConnection.setLocalDescription(description)
.then(() => {
setLocalDescriptionSuccess(remotePeerConnection);
}).catch(setSessionDescriptionError);

trace('localPeerConnection setRemoteDescription start.');
localPeerConnection.setRemoteDescription(description)
.then(() => {
setRemoteDescriptionSuccess(localPeerConnection);
}).catch(setSessionDescriptionError);
}

6.ping!

关键点

1.看看chrome://webrtc-internals.这提供了WebRTC统计信息和调试数据.(Chrome网址的完整列表位于chrome:// about.)

2.使用CSS设置页面样式:

  • 将视频并排放置.
  • 使按钮具有相同的宽度,文本更大.
  • 确保布局适用于移动设备.

3.在Chrome DevTools控制台中,查看localStream,localPeerConnection和remotePeerConnection.

4.在控制台中,查看localPeerConnectionpc1.localDescription. SDP格式是什么样的?

提示

  • 这一步有很多值得学习的地方!要查找更详细地解释RTCPeerConnection的其他资源,请查看webrtc.org/start.此页面包含JavaScript框架的建议 - 如果您想使用WebRTC,但又不想争论API.
  • 从adapter.js GitHub repo中了解有关adapter.js shim的更多信息.
  • 想看看世界上最好的视频聊天应用程序是什么样的? 看看AppRTC,这是WebRTC项目的WebRTC调用规范应用程序:app,code.呼叫建立时间小于500毫秒.