gpt4 book ai didi

android - JavaCV 录像机方向在纵向模式下不正确

转载 作者:塔克拉玛干 更新时间:2023-11-02 18:54:21 38 4
gpt4 key购买 nike

您好,我正在使用 https://github.com/bytedeco/javacv/用于录制视频。使用横向模式时方向很好,但是当我将方向更改为纵向模式时,视频会旋转 -90 度。任何人都知道我可能做错了什么。这是代码。

package org.bytedeco.javacv_android_example.record;

import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;

import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacpp.opencv_imgproc;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.javacv_android_example.R;

import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;

public class RecordActivity extends Activity implements OnClickListener {

private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
/* The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
long startTime = 0;
boolean recording = false;
volatile boolean runAudioThread = true;
Frame[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
private PowerManager.WakeLock mWakeLock;
private File ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
private FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private Frame yuvImage = null;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
OpenCVFrameConverter.ToIplImage converter = new OpenCVFrameConverter.ToIplImage();


@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);

setContentView(R.layout.activity_record);

PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();

initLayout();
}

@Override
protected void onResume() {
super.onResume();

if(mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}

@Override
protected void onPause() {
super.onPause();

if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}

@Override
protected void onDestroy() {
super.onDestroy();

recording = false;

if(cameraView != null) {
cameraView.stopPreview();
}

if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}

if(mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}

@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {

if(keyCode == KeyEvent.KEYCODE_BACK) {
if(recording) {
stopRecording();
}

finish();

return true;
}

return super.onKeyDown(keyCode, event);
}

private void initLayout() {

/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_record, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);

/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);

/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if(1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);

cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}

//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {

Log.w(LOG_TAG, "init recorder");

if(RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new Frame[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for(int i = 0; i < images.length; i++) {
images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
timestamps[i] = -1;
}
} else if(yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(LOG_TAG, "create yuvImage");
}

Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);

Log.i(LOG_TAG, "recorder initialize success");

audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}

public void startRecording() {
initRecorder();

try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}

public void stopRecording() {

runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;

if(recorder != null && recording) {
if(RECORD_LENGTH > 0) {
Log.v(LOG_TAG, "Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if(imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if(lastIndex < firstIndex) {
lastIndex += images.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if(t >= 0) {
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}

firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if(samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if(lastIndex < firstIndex) {
lastIndex += samples.length;
}
for(int i = firstIndex; i <= lastIndex; i++) {
recorder.recordSamples(samples[i % samples.length]);
}
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}

recording = false;
Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}

@Override
public void onClick(View v) {
if(!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}

//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {

@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;

bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

if(RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for(int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}

Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();

/* ffmpeg_audio encoding loop */
while(runAudioThread) {
if(RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
if(RECORD_LENGTH <= 0) {
try {
recorder.recordSamples(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}
Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");

/* encoding finish, release recorder */
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG, "audioRecord released");
}
}
}

//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

private SurfaceHolder mHolder;
private Camera mCamera;

public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera", "camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch(IOException exception) {
mCamera.release();
mCamera = null;
}
}

public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters camParams = mCamera.getParameters();
List<Camera.Size> sizes = camParams.getSupportedPreviewSizes();
// Sort the list in ascending order
Collections.sort(sizes, new Comparator<Camera.Size>() {

public int compare(final Camera.Size a, final Camera.Size b) {
return a.width * a.height - b.width * b.height;
}
});

// Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
// reach the initial settings of imageWidth/imageHeight.
for(int i = 0; i < sizes.size(); i++) {
if((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) {
imageWidth = sizes.get(i).width;
imageHeight = sizes.get(i).height;
Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight);
break;
}
}
camParams.setPreviewSize(imageWidth, imageHeight);

Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);

camParams.setPreviewFrameRate(frameRate);
Log.v(LOG_TAG, "Preview Framerate: " + camParams.getPreviewFrameRate());

mCamera.setParameters(camParams);
startPreview();
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch(RuntimeException e) {
// The camera has probably just been released, ignore.
}
}

public void startPreview() {
if(!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}

public void stopPreview() {
if(isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}

@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if(audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if(RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvImage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if(yuvImage != null && recording) {
((ByteBuffer) yuvImage.image[0].position(0)).put(data);

if(RECORD_LENGTH <= 0) {
try {
Log.v(LOG_TAG, "Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if(t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
opencv_core.IplImage toBeRotated = converter.convert(yuvImage);
toBeRotated = rotate(toBeRotated, -90);
Frame newFrame = converter.convert(toBeRotated);
opencv_core.cvReleaseImage(toBeRotated);
recorder.record(newFrame);
// recorder.record(yuvImage);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
}

public static opencv_core.IplImage rotate(opencv_core.IplImage image, double angle) {
opencv_core.IplImage copy = opencv_core.cvCloneImage(image);

double radians = Math.toRadians(angle);
double sin = Math.abs(Math.sin(radians));
double cos = Math.abs(Math.cos(radians));

int newWidth = (int) (copy.width() * cos + copy.height() * sin);
int newHeight = (int) (copy.width() * sin + copy.height() * cos);

int[] newWidthHeight = {newWidth, newHeight};

opencv_core.CvSize size = new opencv_core.CvSize(copy.height(), copy.width());

opencv_core.IplImage rotatedImage = opencv_core.cvCreateImage(size, copy.depth(), copy.nChannels());
opencv_core.CvMat mapMatrix = opencv_core.cvCreateMat( 2, 3, opencv_core.CV_32FC1 );

//Define Mid Point
opencv_core.CvPoint2D32f centerPoint = new opencv_core.CvPoint2D32f();
centerPoint.x(copy.width()/2);
centerPoint.y(copy.width()/2);

//Get Rotational Matrix
opencv_imgproc.cv2DRotationMatrix(centerPoint, angle, 1, mapMatrix);

opencv_imgproc.cvWarpAffine(copy, rotatedImage, mapMatrix, opencv_imgproc.CV_INTER_CUBIC + opencv_imgproc.CV_WARP_FILL_OUTLIERS, opencv_core.cvScalarAll(170));
opencv_core.cvReleaseImage(copy);
opencv_core.cvReleaseMat(mapMatrix);
return rotatedImage;
}
}

最佳答案

您可以执行类似 recorder.setVideoMetadata("rotate", "90") 的操作,也可以使用如下方式转换图像:

FFmpegFrameFilter filter = new FFmpegFrameFilter("transpose=cclock_flip", imageWidth, imageHeight);
filter.push(frame);
Frame frame2;
while ((frame2 = filter.pull()) != null) {
recorder.record(frame2);
}

关于android - JavaCV 录像机方向在纵向模式下不正确,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/33315787/

38 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com