gpt4 book ai didi

ffmpeg - 将 Webrtc 轨道流转换为 Video 标签中的 URL (RTSP/UDP/RTP/Http)

转载 作者:行者123 更新时间:2023-12-04 23:12:33 26 4
gpt4 key购买 nike

我是 WebRTC 的新手,我已经完成了客​​户端/服务器连接,从客户端我选择 WebCam 并使用 Track 将流发布到服务器,在服务器端我正在获取该轨道并将轨道流分配给视频源。到目前为止一切都很好,但问题是现在我包括 AI(人工智能),现在我想将我的跟踪流转换为 URL,可能是 UDP/RTSP/RTP 等。所以 AI 将使用该 URL 进行对象检测。我不知道我们如何将轨道流转换为 URL。
虽然有几个像 https://ffmpeg.org/ 这样的包和 RTP 到 Webrtc 等,我正在使用 Nodejs、Socket.io 和 Webrtc,您可以在下面查看我的客户端和服务器端代码以获取和发布流,我正在关注 github 代码 https://github.com/Basscord/webrtc-video-broadcast .
现在我主要关心的是跟踪作为视频标签的 URL,是否可能或请建议,任何帮助将不胜感激。
服务器.js
这是nodejs服务器代码

const express = require("express");
const app = express();

let broadcaster;
const port = 4000;

const http = require("http");
const server = http.createServer(app);

const io = require("socket.io")(server);
app.use(express.static(__dirname + "/public"));

io.sockets.on("error", e => console.log(e));
io.sockets.on("connection", socket => {
socket.on("broadcaster", () => {
broadcaster = socket.id;
socket.broadcast.emit("broadcaster");
});
socket.on("watcher", () => {
socket.to(broadcaster).emit("watcher", socket.id);
});
socket.on("offer", (id, message) => {
socket.to(id).emit("offer", socket.id, message);
});
socket.on("answer", (id, message) => {
socket.to(id).emit("answer", socket.id, message);
});
socket.on("candidate", (id, message) => {
socket.to(id).emit("candidate", socket.id, message);
});
socket.on("disconnect", () => {
socket.to(broadcaster).emit("disconnectPeer", socket.id);
});
});
server.listen(port, () => console.log(`Server is running on port ${port}`));

广播.js
这是发射流(轨道)的代码

const peerConnections = {};
const config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
};

const socket = io.connect(window.location.origin);

socket.on("answer", (id, description) => {
peerConnections[id].setRemoteDescription(description);
});

socket.on("watcher", id => {
const peerConnection = new RTCPeerConnection(config);
peerConnections[id] = peerConnection;

let stream = videoElement.srcObject;
stream.getTracks().forEach(track => peerConnection.addTrack(track, stream));

peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit("candidate", id, event.candidate);
}
};

peerConnection
.createOffer()
.then(sdp => peerConnection.setLocalDescription(sdp))
.then(() => {
socket.emit("offer", id, peerConnection.localDescription);
});
});

socket.on("candidate", (id, candidate) => {
peerConnections[id].addIceCandidate(new RTCIceCandidate(candidate));
});

socket.on("disconnectPeer", id => {
peerConnections[id].close();
delete peerConnections[id];
});

window.onunload = window.onbeforeunload = () => {
socket.close();
};

// Get camera and microphone
const videoElement = document.querySelector("video");
const audioSelect = document.querySelector("select#audioSource");
const videoSelect = document.querySelector("select#videoSource");

audioSelect.onchange = getStream;
videoSelect.onchange = getStream;

getStream()
.then(getDevices)
.then(gotDevices);

function getDevices() {
return navigator.mediaDevices.enumerateDevices();
}

function gotDevices(deviceInfos) {
window.deviceInfos = deviceInfos;
for (const deviceInfo of deviceInfos) {
const option = document.createElement("option");
option.value = deviceInfo.deviceId;
if (deviceInfo.kind === "audioinput") {
option.text = deviceInfo.label || `Microphone ${audioSelect.length + 1}`;
audioSelect.appendChild(option);
} else if (deviceInfo.kind === "videoinput") {
option.text = deviceInfo.label || `Camera ${videoSelect.length + 1}`;
videoSelect.appendChild(option);
}
}
}

function getStream() {
if (window.stream) {
window.stream.getTracks().forEach(track => {
track.stop();
});
}
const audioSource = audioSelect.value;
const videoSource = videoSelect.value;
const constraints = {
audio: { deviceId: audioSource ? { exact: audioSource } : undefined },
video: { deviceId: videoSource ? { exact: videoSource } : undefined }
};
return navigator.mediaDevices
.getUserMedia(constraints)
.then(gotStream)
.catch(handleError);
}

function gotStream(stream) {
window.stream = stream;
audioSelect.selectedIndex = [...audioSelect.options].findIndex(
option => option.text === stream.getAudioTracks()[0].label
);
videoSelect.selectedIndex = [...videoSelect.options].findIndex(
option => option.text === stream.getVideoTracks()[0].label
);
videoElement.srcObject = stream;
socket.emit("broadcaster");
}

function handleError(error) {
console.error("Error: ", error);
}

远程服务器.js
此代码正在跟踪并分配给视频标签

let peerConnection;
const config = {
iceServers: [
{
urls: ["stun:stun.l.google.com:19302"]
}
]
};

const socket = io.connect(window.location.origin);
const video = document.querySelector("video");

socket.on("offer", (id, description) => {
peerConnection = new RTCPeerConnection(config);
peerConnection
.setRemoteDescription(description)
.then(() => peerConnection.createAnswer())
.then(sdp => peerConnection.setLocalDescription(sdp))
.then(() => {
socket.emit("answer", id, peerConnection.localDescription);
});
peerConnection.ontrack = event => {
video.srcObject = event.streams[0];
};
peerConnection.onicecandidate = event => {
if (event.candidate) {
socket.emit("candidate", id, event.candidate);
}
};
});

socket.on("candidate", (id, candidate) => {
peerConnection
.addIceCandidate(new RTCIceCandidate(candidate))
.catch(e => console.error(e));
});

socket.on("connect", () => {
socket.emit("watcher");
});

socket.on("broadcaster", () => {
socket.emit("watcher");
});

socket.on("disconnectPeer", () => {
peerConnection.close();
});

window.onunload = window.onbeforeunload = () => {
socket.close();
};

最佳答案

rtp-to-webrtc做你想要的。
不幸的是,您需要运行某种服务器来实现这一点,它不能全部在浏览器中。如果您不想使用 WebRTC,也可以通过其他协议(protocol)(通过 MediaRecorder 捕获)上传。

关于ffmpeg - 将 Webrtc 轨道流转换为 Video 标签中的 URL (RTSP/UDP/RTP/Http),我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/62852542/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com