gpt4 book ai didi

Android Webrtc 从来自其他对等方的流中录制视频

转载 作者:塔克拉玛干 更新时间:2023-11-02 08:36:37 29 4
gpt4 key购买 nike

我正在开发一个 webrtc 视频通话 Android 应用程序,它工作得非常好,我需要录制另一个对等点 (remoteVideoStream) 和 myStream (localVideoStream) 的视频并将其转换为一些可保存的格式,如 mp4 或任何其他格式,我真的在寻找它,但无法弄清楚如何完成这项工作。

我读过 VideoFileRenderer,我试图将它添加到我的代码中以保存视频,但也无法使用它,它没有任何方法调用,例如 record() 或 save(),尽管它有一个方法调用release() 将用于结束保存视频。如果有人有任何想法,这里是类(class):

@JNINamespace("webrtc::jni")
public class VideoFileRenderer implements Callbacks, VideoSink {
private static final String TAG = "VideoFileRenderer";
private final HandlerThread renderThread;
private final Handler renderThreadHandler;
private final FileOutputStream videoOutFile;
private final String outputFileName;
private final int outputFileWidth;
private final int outputFileHeight;
private final int outputFrameSize;
private final ByteBuffer outputFrameBuffer;
private EglBase eglBase;
private YuvConverter yuvConverter;
private ArrayList<ByteBuffer> rawFrames = new ArrayList();

public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight, final Context sharedContext) throws IOException {
if (outputFileWidth % 2 != 1 && outputFileHeight % 2 != 1) {
this.outputFileName = outputFile;
this.outputFileWidth = outputFileWidth;
this.outputFileHeight = outputFileHeight;
this.outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
this.outputFrameBuffer = ByteBuffer.allocateDirect(this.outputFrameSize);
this.videoOutFile = new FileOutputStream(outputFile);
this.videoOutFile.write(("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n").getBytes(Charset.forName("US-ASCII")));
this.renderThread = new HandlerThread("VideoFileRenderer");
this.renderThread.start();
this.renderThreadHandler = new Handler(this.renderThread.getLooper());
ThreadUtils.invokeAtFrontUninterruptibly(this.renderThreadHandler, new Runnable() {
public void run() {
VideoFileRenderer.this.eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
VideoFileRenderer.this.eglBase.createDummyPbufferSurface();
VideoFileRenderer.this.eglBase.makeCurrent();
VideoFileRenderer.this.yuvConverter = new YuvConverter();
}
});
} else {
throw new IllegalArgumentException("Does not support uneven width or height");
}
}

public void renderFrame(I420Frame i420Frame) {
VideoFrame frame = i420Frame.toVideoFrame();
this.onFrame(frame);
frame.release();
}

public void onFrame(VideoFrame frame) {
frame.retain();
this.renderThreadHandler.post(() -> {
this.renderFrameOnRenderThread(frame);
});
}

private void renderFrameOnRenderThread(VideoFrame frame) {
Buffer buffer = frame.getBuffer();
int targetWidth = frame.getRotation() % 180 == 0 ? this.outputFileWidth : this.outputFileHeight;
int targetHeight = frame.getRotation() % 180 == 0 ? this.outputFileHeight : this.outputFileWidth;
float frameAspectRatio = (float)buffer.getWidth() / (float)buffer.getHeight();
float fileAspectRatio = (float)targetWidth / (float)targetHeight;
int cropWidth = buffer.getWidth();
int cropHeight = buffer.getHeight();
if (fileAspectRatio > frameAspectRatio) {
cropHeight = (int)((float)cropHeight * (frameAspectRatio / fileAspectRatio));
} else {
cropWidth = (int)((float)cropWidth * (fileAspectRatio / frameAspectRatio));
}

int cropX = (buffer.getWidth() - cropWidth) / 2;
int cropY = (buffer.getHeight() - cropHeight) / 2;
Buffer scaledBuffer = buffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, targetWidth, targetHeight);
frame.release();
I420Buffer i420 = scaledBuffer.toI420();
scaledBuffer.release();
ByteBuffer byteBuffer = JniCommon.nativeAllocateByteBuffer(this.outputFrameSize);
YuvHelper.I420Rotate(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), i420.getDataV(), i420.getStrideV(), byteBuffer, i420.getWidth(), i420.getHeight(), frame.getRotation());
i420.release();
byteBuffer.rewind();
this.rawFrames.add(byteBuffer);
}

public void release() {
CountDownLatch cleanupBarrier = new CountDownLatch(1);
this.renderThreadHandler.post(() -> {
this.yuvConverter.release();
this.eglBase.release();
this.renderThread.quit();
cleanupBarrier.countDown();
});
ThreadUtils.awaitUninterruptibly(cleanupBarrier);

try {
Iterator var2 = this.rawFrames.iterator();

while(var2.hasNext()) {
ByteBuffer buffer = (ByteBuffer)var2.next();
this.videoOutFile.write("FRAME\n".getBytes(Charset.forName("US-ASCII")));
byte[] data = new byte[this.outputFrameSize];
buffer.get(data);
this.videoOutFile.write(data);
JniCommon.nativeFreeByteBuffer(buffer);
}

this.videoOutFile.close();
Logging.d("VideoFileRenderer", "Video written to disk as " + this.outputFileName + ". Number frames are " + this.rawFrames.size() + " and the dimension of the frames are " + this.outputFileWidth + "x" + this.outputFileHeight + ".");
} catch (IOException var5) {
Logging.e("VideoFileRenderer", "Error writing video to disk", var5);
}

}

我找不到任何有用的方法可以提供帮助。

最佳答案

VideoFileRenderer 类仅演示如何访问远程/本地对等方的解码原始视频帧。这不是录制有效的视频文件。
您应该手动实现将原始视频帧编码和混合到容器(如 mp4)中的逻辑。

主要流程如下:

  • 切换到最新的 webrtc 版本(目前为 v.1.0.25331)
  • 创建视频容器。例如参见 MediaMuxer来自 Android SDK 的类
  • 实现接口(interface) VideoSink 以从特定视频源获取原始帧。例如参见 apprtc/CallActivity.java类 ProxyVideoSink
  • 使用 MediaCodec 对每一帧进行编码并写入视频容器
  • 完成混合器

关于Android Webrtc 从来自其他对等方的流中录制视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/53031340/

29 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com