gpt4 book ai didi

javascript - 如何发送和接收通过 getUsermedia() 生成的桌面捕获流

转载 作者:行者123 更新时间:2023-12-03 00:34:00 24 4
gpt4 key购买 nike

我正在使用 WebRTC + Socket.io 制作一个屏幕共享应用程序,但卡在一个地方。使用WebRTC + Socket.io连接两个浏览器并且可以发送文本

我正在接受 codelab 的支持但它不适用于流。(如果解决方案基于此链接,则非常有帮助)

如何发送 getUserMedia() 流:

dataChannel.send(stream);

并在channel.onmessage()上接收相同的流:我将 event.data 作为“[object MediaStream]”而不是流获取。

channel.onmessage = function(event){
// unable to get correct stream
// event.data is "[object MediaStream]" in string
}

function createPeerConnection(isInitiator, config) {
console.log('Creating Peer connection as initiator?', isInitiator, 'config:', config);
peerConn = new RTCPeerConnection(config);

// send any ice candidates to the other peer
peerConn.onicecandidate = function (event) {
console.log('onIceCandidate event:', event);
if (event.candidate) {
sendMessage({
type: 'candidate',
label: event.candidate.sdpMLineIndex,
id: event.candidate.sdpMid,
candidate: event.candidate.candidate
});
} else {
console.log('End of candidates.');
}
};

if (isInitiator) {
console.log('Creating Data Channel');
dataChannel = peerConn.createDataChannel("screen");
onDataChannelCreated(dataChannel);

console.log('Creating an offer');
peerConn.createOffer(onLocalSessionCreated, logError);
} else {
peerConn.ondatachannel = function (event) {
console.log('ondatachannel:', event.channel);
dataChannel = event.channel;
onDataChannelCreated(dataChannel);
};
}
}

它对于字符串或 json 工作正常,即 dataChannel.send('Hello');

我已经创建了一个相同的维基页面:wiki

请帮忙。

最佳答案

请尝试这样的操作:(代码末尾有说明)

var btnShareYourCamera = document.querySelector('#share-your-camera');
var localVideo = document.querySelector('#local-video');
var remoteVideo = document.querySelector('#remote-video');

var websocket = new WebSocket('wss://path-to-server:port/');
websocket.onmessage = function(event) {
var data = JSON.parse(event.data);
if (data.sdp) {
if (data.sdp.type === 'offer') {
getUserMedia(function(video_stream) {
localVideo.srcObject = video_stream;
answererPeer(new RTCSessionDescription(data.sdp), video_stream);
});
}

if (data.sdp.type === 'answer') {
offerer.setRemoteDescription(new RTCSessionDescription(data.sdp));
}
}

if (data.candidate) {
addIceCandidate((offerer || answerer), new RTCIceCandidate(data.candidate));
}
};

var iceTransportPolicy = 'all';
var iceTransportLimitation = 'udp';

function addIceCandidate(peer, candidate) {
if (iceTransportLimitation === 'tcp') {
if (candidate.candidate.toLowerCase().indexOf('tcp') === -1) {
return; // ignore UDP
}
}

peer.addIceCandidate(candidate);
}

var offerer, answerer;

var iceServers = {
iceServers: [{
'urls': [
'stun:stun.l.google.com:19302',
'stun:stun1.l.google.com:19302',
'stun:stun2.l.google.com:19302',
'stun:stun.l.google.com:19302?transport=udp',
]
}],
iceTransportPolicy: iceTransportPolicy,
rtcpMuxPolicy: 'require',
bundlePolicy: 'max-bundle'
};

// https://https;//cdn.webrtc-experiment.com/IceServersHandler.js
if (typeof IceServersHandler !== 'undefined') {
iceServers.iceServers = IceServersHandler.getIceServers();
}

var mediaConstraints = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
};

/* offerer */

function offererPeer(video_stream) {
offerer = new RTCPeerConnection(iceServers);
offerer.idx = 1;

video_stream.getTracks().forEach(function(track) {
offerer.addTrack(track, video_stream);
});

offerer.ontrack = function(event) {
remoteVideo.srcObject = event.streams[0];
};

offerer.onicecandidate = function(event) {
if (!event || !event.candidate) return;
websocket.send(JSON.stringify({
candidate: event.candidate
}));
};

offerer.createOffer(mediaConstraints).then(function(offer) {
offerer.setLocalDescription(offer).then(function() {
websocket.send(JSON.stringify({
sdp: offer
}));
});
});
}

/* answerer */

function answererPeer(offer, video_stream) {
answerer = new RTCPeerConnection(iceServers);
answerer.idx = 2;

video_stream.getTracks().forEach(function(track) {
answerer.addTrack(track, video_stream);
});

answerer.ontrack = function(event) {
remoteVideo.srcObject = event.streams[0];
};

answerer.onicecandidate = function(event) {
if (!event || !event.candidate) return;
websocket.send(JSON.stringify({
candidate: event.candidate
}));
};

answerer.setRemoteDescription(offer).then(function() {
answerer.createAnswer(mediaConstraints).then(function(answer) {
answerer.setLocalDescription(answer).then(function() {
websocket.send(JSON.stringify({
sdp: answer
}));
});
});
});
}

var video_constraints = {
mandatory: {},
optional: []
};

function getUserMedia(successCallback) {
function errorCallback(e) {
alert(JSON.stringify(e, null, '\t'));
}

var mediaConstraints = {
video: true,
audio: true
};

navigator.mediaDevices.getUserMedia(mediaConstraints).then(successCallback).catch(errorCallback);
}

btnShareYourCamera.onclick = function() {
getUserMedia(function(video_stream) {
localVideo.srcObject = video_stream;
offererPeer(video_stream);
});
};
  1. 您必须使用 peer.addTrack 附加流,如上面的示例所示
  2. 您必须使用 peer.ontrack 接收远程流,如上例所示

即使用 addTrack 连接相机并使用 ontrack 接收远程相机。

您绝不能使用dataChannel.send发送流。两者是完全不同的协议(protocol)。 MediaStream 必须使用 RTP 共享;不是 SCTP。仅当您调用 peer.addTrack 方法附加相机流时才使用 RTP。

此过程发生在您打开或加入房间之前。

在此处查看单页演示:https://www.webrtc-experiment.com/getStats/

上述代码片段的 HTML:

<button id="share-your-camera"></button>
<video id="local-video" controls autoplay playsinline></video>
<video id="remote-video" controls autoplay playsinline></video>

关于javascript - 如何发送和接收通过 getUsermedia() 生成的桌面捕获流,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/53731348/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com