gpt4 book ai didi

android - FFmpegFrameRecorder videoBroadcasting audio comes faster than video frame in 3G 网络

转载 作者:太空狗 更新时间:2023-10-29 13:25:03 26 4
gpt4 key购买 nike

我正在使用 FFmpegFrameRecorder 进行视频广播。问题是音频比视频帧来得更快。我正在使用以下代码但无法生成完整的视频,因为音频视频时间戳有问题。

Java 代码:

import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;

import java.io.IOException;
import java.nio.ShortBuffer;

import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;

import com.googlecode.javacv.FFmpegFrameRecorder;
import com.googlecode.javacv.cpp.opencv_core.IplImage;

public class MainActivity extends Activity implements OnClickListener {

private final static String LOG_TAG = "MainActivity";

private PowerManager.WakeLock mWakeLock;

private String ffmpeg_link = "";

private volatile FFmpegFrameRecorder recorder;
boolean recording = false;
long startTime = 0;

private int sampleAudioRateInHz = 16000;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 24;

private Thread audioThread;
volatile boolean runAudioThread = true;
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;

private CameraView cameraView;
private IplImage yuvIplimage = null;

private Button recordButton;
private LinearLayout mainLayout;

@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.activity_main);

initLayout();
initRecorder();
}

@Override
protected void onResume() {
super.onResume();

if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
LOG_TAG);
mWakeLock.acquire();
}
}

@Override
protected void onPause() {
super.onPause();

if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}

@Override
protected void onDestroy() {
super.onDestroy();

recording = false;
}

private void initLayout() {

mainLayout = (LinearLayout) this.findViewById(R.id.record_layout);

recordButton = (Button) findViewById(R.id.recorder_control);
recordButton.setText("Start");
recordButton.setOnClickListener(this);

cameraView = new CameraView(this);

LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
mainLayout.addView(cameraView, layoutParam);
Log.v(LOG_TAG, "added cameraView to mainLayout");
}

private void initRecorder() {
Log.w(LOG_TAG, "initRecorder");

if (yuvIplimage == null) {
// Recreated after frame size is set in surface change method
yuvIplimage = IplImage.create(imageWidth, imageHeight,
IPL_DEPTH_8U, 2);
// yuvIplimage = IplImage.create(imageWidth, imageHeight,
// IPL_DEPTH_32S, 2);

Log.v(LOG_TAG, "IplImage.create");
}

recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth,
imageHeight, 1);
Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: "
+ imageWidth + " imageHeight " + imageHeight);

recorder.setFormat("flv");
Log.v(LOG_TAG, "recorder.setFormat(\"flv\")");

recorder.setSampleRate(sampleAudioRateInHz);
Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");

// re-set in the surface changed method as well
recorder.setFrameRate(frameRate);
Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");

// Create audio recording thread
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
}

// Start the capture
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}

public void stopRecording() {
// This should stop the audio thread from running
runAudioThread = false;

if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,
"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}

@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
// Quit when back button is pushed
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}

@Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
recordButton.setText("Stop");
} else {
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
recordButton.setText("Start");
}
}

// ---------------------------------------------
// audio thread, gets and encodes audio data
// ---------------------------------------------
class AudioRecordRunnable implements Runnable {

@Override
public void run() {
// Set the thread priority
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;

bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleAudioRateInHz,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);

audioData = new short[bufferSize];

Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();

// Audio Capture/Encoding Loop
while (runAudioThread) {
// Read from audioRecord
bufferReadResult = audioRecord.read(audioData, 0,
audioData.length);
if (bufferReadResult > 0) {
// Log.v(LOG_TAG,"audioRecord bufferReadResult: " +
// bufferReadResult);

// Changes in this variable may not be picked up despite it
// being "volatile"
if (recording) {
try {
// Write to FFmpegFrameRecorder
recorder.record(ShortBuffer.wrap(audioData, 0,
bufferReadResult));
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished");

/* Capture/Encoding finished, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}

class CameraView extends SurfaceView implements SurfaceHolder.Callback,
PreviewCallback {

private boolean previewRunning = false;

private SurfaceHolder holder;
private Camera camera;

private byte[] previewBuffer;

long videoTimestamp = 0;

Bitmap bitmap;
Canvas canvas;

public CameraView(Context _context) {
super(_context);

holder = this.getHolder();
holder.addCallback(this);
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
camera = Camera.open();

try {
camera.setPreviewDisplay(holder);
camera.setPreviewCallback(this);

Camera.Parameters currentParams = camera.getParameters();
Log.v(LOG_TAG,
"Preview Framerate: "
+ currentParams.getPreviewFrameRate());
Log.v(LOG_TAG,
"Preview imageWidth: "
+ currentParams.getPreviewSize().width
+ " imageHeight: "
+ currentParams.getPreviewSize().height);

// Use these values
imageWidth = currentParams.getPreviewSize().width;
imageHeight = currentParams.getPreviewSize().height;
frameRate = currentParams.getPreviewFrameRate();

bitmap = Bitmap.createBitmap(imageWidth, imageHeight,
Bitmap.Config.ALPHA_8);

/*
* Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth *
* imageHeight *
* ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat
* ())/8); previewBuffer = new byte[imageWidth * imageHeight *
* ImageFormat
* .getBitsPerPixel(currentParams.getPreviewFormat())/8];
* camera.addCallbackBuffer(previewBuffer);
* camera.setPreviewCallbackWithBuffer(this);
*/

camera.startPreview();
previewRunning = true;
} catch (IOException e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}

public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.v(LOG_TAG, "Surface Changed: width " + width + " height: "
+ height);

// We would do this if we want to reset the camera parameters
/*
* if (!recording) { if (previewRunning){ camera.stopPreview(); }
*
* try { //Camera.Parameters cameraParameters =
* camera.getParameters(); //p.setPreviewSize(imageWidth,
* imageHeight); //p.setPreviewFrameRate(frameRate);
* //camera.setParameters(cameraParameters);
*
* camera.setPreviewDisplay(holder); camera.startPreview();
* previewRunning = true; } catch (IOException e) {
* Log.e(LOG_TAG,e.getMessage()); e.printStackTrace(); } }
*/

// Get the current parameters
Camera.Parameters currentParams = camera.getParameters();
Log.v(LOG_TAG,
"Preview Framerate: " + currentParams.getPreviewFrameRate());
Log.v(LOG_TAG,
"Preview imageWidth: "
+ currentParams.getPreviewSize().width
+ " imageHeight: "
+ currentParams.getPreviewSize().height);

// Use these values
imageWidth = currentParams.getPreviewSize().width;
imageHeight = currentParams.getPreviewSize().height;
frameRate = currentParams.getPreviewFrameRate();

// Create the yuvIplimage if needed
yuvIplimage = IplImage.create(imageWidth, imageHeight,
IPL_DEPTH_8U, 1);
// yuvIplimage = IplImage.create(imageWidth, imageHeight,
// IPL_DEPTH_32S, 2);
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
camera.setPreviewCallback(null);

previewRunning = false;
camera.release();

} catch (RuntimeException e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}

@Override
public void onPreviewFrame(byte[] data, Camera camera) {

if (yuvIplimage != null && recording) {
videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);

// Put the camera preview frame right into the yuvIplimage
// object
System.out.println("value of data=============" + data);
yuvIplimage.getByteBuffer().put(data);

// FAQ about IplImage:
// - For custom raw processing of data, getByteBuffer() returns
// an NIO direct
// buffer wrapped around the memory pointed by imageData, and
// under Android we can
// also use that Buffer with Bitmap.copyPixelsFromBuffer() and
// copyPixelsToBuffer().
// - To get a BufferedImage from an IplImage, we may call
// getBufferedImage().
// - The createFrom() factory method can construct an IplImage
// from a BufferedImage.
// - There are also a few copy*() methods for
// BufferedImage<->IplImage data transfers.

// Let's try it..
// This works but only on transparency
// Need to find the right Bitmap and IplImage matching types

/*
* bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer());
* //bitmap.setPixel(10,10,Color.MAGENTA);
*
* canvas = new Canvas(bitmap); Paint paint = new Paint();
* paint.setColor(Color.GREEN); float leftx = 20; float topy =
* 20; float rightx = 50; float bottomy = 100; RectF rectangle =
* new RectF(leftx,topy,rightx,bottomy);
* canvas.drawRect(rectangle, paint);
*
* bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer());
*/
// Log.v(LOG_TAG,"Writing Frame");

try {

// Get the correct time
recorder.setTimestamp(videoTimestamp);

// Record the image into FFmpegFrameRecorder
recorder.record(yuvIplimage);

} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}

list

<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.javacv.stream.test2"
android:versionCode="1"
android:versionName="1.0" >

<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="15" />

<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

<application
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme" >
<activity
android:name="com.example.javacv.stream.test2.MainActivity"
android:label="@string/title_activity_main" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />

<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>

</manifest>

最佳答案

这也是我们在 iOS 端遇到的一个已知问题。基本上,当音频继续播放时,视频数据包会被丢弃,这一切都会变得糟糕。低带宽下的一些硬件不能很好地播放并且不同步。我不相信有一个可靠的修复,我们不得不通过在顶部为音频/视频构建我们自己的缓冲区并重新同步来破解它使用时间戳、帧大小和数据包计数。

恐怕我不能发布该代码(这不是我发布的)但如果你知道协议(protocol),应该不难重新创建..

关于android - FFmpegFrameRecorder videoBroadcasting audio comes faster than video frame in 3G 网络,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/22342732/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com