gpt4 book ai didi

java - 如何使用 sdp 详细信息在 "offer"端点上调用 POST 方法?

转载 作者:行者123 更新时间:2023-12-01 17:30:57 36 4
gpt4 key购买 nike

我是 WebRTC 新手,对此我有很多困惑。我不知道我在这里问的是可能还是不可能?

我想要从 Android 到网络进行视频直播。我进行了基本设置并达到了我们可以创建报价的步骤。

可以通过 RTCDataChannel 以 SDP 形式传输要约和应答。

如何使用 WebRTC 将实时视频流数据发送到后端?

到目前为止我所做的都在这里。

 public class MediaTestNew extends AppCompatActivity implements View.OnClickListener{

PeerConnectionFactory peerConnectionFactory;
MediaConstraints audioConstraints;
MediaConstraints videoConstraints;
MediaConstraints sdpConstraints;
VideoSource videoSource;
VideoTrack localVideoTrack;
AudioSource audioSource;
AudioTrack localAudioTrack;
SurfaceTextureHelper surfaceTextureHelper;

SurfaceViewRenderer localVideoView;
SurfaceViewRenderer remoteVideoView;

Button hangup;
PeerConnection localPeer,remotePeer;
List<IceServer> iceServers;
EglBase rootEglBase;

private static final String TAG = "MediaTestNew";

boolean gotUserMedia;
List<PeerConnection.IceServer> peerIceServers = new ArrayList<>();

final int ALL_PERMISSIONS_CODE = 1;

@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
|| ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, ALL_PERMISSIONS_CODE);
} else {
// all permissions already granted
start();
}
}

@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);

if (requestCode == ALL_PERMISSIONS_CODE
&& grantResults.length == 2
&& grantResults[0] == PackageManager.PERMISSION_GRANTED
&& grantResults[1] == PackageManager.PERMISSION_GRANTED) {
// all permissions granted
start();
} else {
finish();
}
}

public void start() {
// keep screen on
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

initViews();
initVideos();
getIceServers();
initPeerConnectionFactory();
}

private void initViews() {
hangup = findViewById(R.id.end_call);
localVideoView = findViewById(R.id.local_gl_surface_view);
remoteVideoView = findViewById(R.id.remote_gl_surface_view);
hangup.setOnClickListener(this);
}

private void initVideos() {
rootEglBase = EglBase.create();
localVideoView.init(rootEglBase.getEglBaseContext(), null);
remoteVideoView.init(rootEglBase.getEglBaseContext(), null);
localVideoView.setZOrderMediaOverlay(true);
remoteVideoView.setZOrderMediaOverlay(true);
}

private void initPeerConnectionFactory(){
//Initialize PeerConnectionFactory globals.
PeerConnectionFactory.InitializationOptions initializationOptions =
PeerConnectionFactory.InitializationOptions.builder(this)
.createInitializationOptions();
PeerConnectionFactory.initialize(initializationOptions);

//Create a new PeerConnectionFactory instance - using Hardware encoder and decoder.
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
DefaultVideoEncoderFactory defaultVideoEncoderFactory = new DefaultVideoEncoderFactory(
rootEglBase.getEglBaseContext(), /* enableIntelVp8Encoder */true, /* enableH264HighProfile */true);
DefaultVideoDecoderFactory defaultVideoDecoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext());
peerConnectionFactory = PeerConnectionFactory.builder()
.setOptions(options)
.setVideoEncoderFactory(defaultVideoEncoderFactory)
.setVideoDecoderFactory(defaultVideoDecoderFactory)
.createPeerConnectionFactory();

//Now create a VideoCapturer instance.
VideoCapturer videoCapturerAndroid;
videoCapturerAndroid = createCameraCapturer(new Camera1Enumerator(false));

//Create MediaConstraints - Will be useful for specifying video and audio constraints.
audioConstraints = new MediaConstraints();
videoConstraints = new MediaConstraints();

//Create a VideoSource instance
if (videoCapturerAndroid != null) {
surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext());
videoSource = peerConnectionFactory.createVideoSource(videoCapturerAndroid.isScreencast());
videoCapturerAndroid.initialize(surfaceTextureHelper, this, videoSource.getCapturerObserver());
}
localVideoTrack = peerConnectionFactory.createVideoTrack("100", videoSource);

//create an AudioSource instance
audioSource = peerConnectionFactory.createAudioSource(audioConstraints);
localAudioTrack = peerConnectionFactory.createAudioTrack("101", audioSource);

if (videoCapturerAndroid != null) {
videoCapturerAndroid.startCapture(1024, 720, 30);
}

localVideoView.setVisibility(View.VISIBLE);
// And finally, with our VideoRenderer ready, we
// can add our renderer to the VideoTrack.
localVideoTrack.addSink(localVideoView);

localVideoView.setMirror(true);
remoteVideoView.setMirror(true);

gotUserMedia = true;

createPeerConnection();
doCall();
}

@Override
public void onClick(View v) {

}

private VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
final String[] deviceNames = enumerator.getDeviceNames();

// First, try to find front facing camera
Logging.d(TAG, "Looking for front facing cameras.");
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "Creating front facing camera capturer.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

if (videoCapturer != null) {
return videoCapturer;
}
}
}

// Front facing camera not found, try something else
Logging.d(TAG, "Looking for other cameras.");
for (String deviceName : deviceNames) {
if (!enumerator.isFrontFacing(deviceName)) {
Logging.d(TAG, "Creating other camera capturer.");
VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);

if (videoCapturer != null) {
return videoCapturer;
}
}
}

return null;
}

private void getIceServers() {
PeerConnection.IceServer peerIceServer = PeerConnection.IceServer.builder("stun:stun.l.google.com:19302").createIceServer();
peerIceServers.add(peerIceServer);
}

/**
* Creating the local peerconnection instance
*/
private void createPeerConnection() {
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(peerIceServers);
// TCP candidates are only useful when connecting to a server that supports
// ICE-TCP.
rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
// Use ECDSA encryption.
rtcConfig.keyType = PeerConnection.KeyType.ECDSA;

//creating localPeer
localPeer = peerConnectionFactory.createPeerConnection(rtcConfig, new CustomPeerConnectionObserver("localPeerCreation") {
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
super.onIceCandidate(iceCandidate);
onIceCandidateReceived(iceCandidate);
}

@Override
public void onAddStream(MediaStream mediaStream) {
showToast("Received Remote stream");
super.onAddStream(mediaStream);
//gotRemoteStream(mediaStream);
}
});



addStreamToLocalPeer();
}

/**
* Adding the stream to the localpeer
*/
private void addStreamToLocalPeer() {
//creating local mediastream
MediaStream stream = peerConnectionFactory.createLocalMediaStream("102");
stream.addTrack(localAudioTrack);
stream.addTrack(localVideoTrack);
localPeer.addStream(stream);
}

/**
* This method is called when the app is the initiator - We generate the offer and send it over through socket
* to remote peer
*/
private void doCall() {
sdpConstraints = new MediaConstraints();
sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "false"));
sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
sdpConstraints.optional.add(new MediaConstraints.KeyValuePair("Testing", "local"));

//creating Offer
localPeer.createOffer(new CustomSdpObserver("localCreateOffer") {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
super.onCreateSuccess(sessionDescription);
localPeer.setLocalDescription(new CustomSdpObserver("localSetLocalDesc"), sessionDescription);
Log.d("onCreateSuccess", "SignallingClient emit ");
}
}, sdpConstraints);
}

/**
* Received local ice candidate. Send it to remote peer through signalling for negotiation
*/
public void onIceCandidateReceived(IceCandidate iceCandidate) {
//we have received ice candidate. We can set it to the other peer.
remotePeer.addIceCandidate(iceCandidate);
}

public void showToast(final String msg) {
runOnUiThread(() -> Toast.makeText(MediaTestNew.this, msg, Toast.LENGTH_SHORT).show());
}

@Override
protected void onDestroy() {
//SignallingClient.getInstance().close();
super.onDestroy();

if (surfaceTextureHelper != null) {
surfaceTextureHelper.dispose();
surfaceTextureHelper = null;
}
}

}

任何帮助将不胜感激。

最佳答案

For Android there is less tutorial available or I am not good at my googling skill.

Here, http://myhexaville.com/?s=webrtc all WebRTC detail available and all demo code. It's really good to understand all basics.

关于java - 如何使用 sdp 详细信息在 "offer"端点上调用 POST 方法?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/61120456/

36 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com