gpt4 book ai didi

android - 来自相机的 MediaCodec 视频流错误的方向和颜色

转载 作者:太空狗 更新时间:2023-10-29 16:39:08 26 4
gpt4 key购买 nike

我正在尝试为 Android 设备直接从相机流式传输视频捕获。到目前为止,我已经能够从 android 相机的 onPreviewFrame(byte[] data, Camera camera) 函数中捕获每一帧,对数据进行编码,然后成功解码数据并显示到表面。我使用 android 的 MediaCodec 进行编码和解码。但是视频的颜色和方向不正确[90 度旋转]。搜索一段时间后,我发现了这个 YV12toYUV420PackedSemiPlanar 函数——如果我在将原始相机数据传递给编码器之前对原始相机数据使用这个函数,颜色会正确显示,但它仍然旋转了 90 度

public static byte[] YV12toYUV420PackedSemiPlanar(final byte[] input, final int width, final int height) {

final int frameSize = width * height;
final int qFrameSize = frameSize/4;
byte[] output = new byte[input.length];


System.arraycopy(input, 0, output, 0, frameSize);
for (int i = 0; i < (qFrameSize); i++)
{
byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
output[frameSize + i*2] = b;
output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);
}
System.arraycopy(input, 0, output, 0, frameSize); // Y
for (int i = 0; i < qFrameSize; i++) {
output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
}
return output;
}

enter image description here

然后我用了这个函数,rotateYUV420Degree90after调用YV12toYUV420PackedSemiPlanar函数。看起来方向和颜色都不错,但输出视频非常失真

private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight) 
{
byte [] yuv = new byte[imageWidth*imageHeight*3/2];
// Rotate the Y luma
int i = 0;
for(int x = 0;x < imageWidth;x++)
{
for(int y = imageHeight-1;y >= 0;y--)
{
yuv[i] = data[y*imageWidth+x];
i++;
}
}
// Rotate the U and V color components
i = imageWidth*imageHeight*3/2-1;
for(int x = imageWidth-1;x > 0;x=x-2)
{
for(int y = 0;y < imageHeight/2;y++)
{
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x];
i--;
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)];
i--;
}
}
return yuv;
}

enter image description here

由于我对颜色规范和相机数据知之甚少,我无法理解我做错了什么。这是我的全部代码 - 请看一下并帮助我找出错误。

提前致谢。

public class MainActivity extends Activity implements SurfaceHolder.Callback  {

Camera mCamera;
FileOutputStream fos;
File mVideoFile;
MediaCodec mMediaCodec;
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
MySurfaceView cameraSurfaceView ;
SurfaceView decodedSurfaceView ;
LinearLayout ll;
RelativeLayout rl;
Button btn;
boolean mPreviewRunning = false;
boolean firstTime = true;
boolean isRunning = false;
public static final String ENCODING = "h264";

private PlayerThread mPlayer = null;
Handler handler = null;
public static byte[] SPS = null;
public static byte[] PPS = null;
public static int frameID = 0;
BlockingQueue<Frame> queue = new ArrayBlockingQueue<Frame>(100);

private static class Frame
{
public int id;
public byte[] frameData;

public Frame(int id)
{
this.id = id;
}
}

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

ll = new LinearLayout(getApplicationContext());
ll.setOrientation(LinearLayout.VERTICAL);

cameraSurfaceView = new MySurfaceView(getApplicationContext());
if(ENCODING.equalsIgnoreCase("h264"))
{
cameraSurfaceView.setLayoutParams(new android.widget.FrameLayout.LayoutParams(320, 240));
}
else if(ENCODING.equalsIgnoreCase("h263"))
{
cameraSurfaceView.setLayoutParams(new android.widget.FrameLayout.LayoutParams(352, 288));
}
ll.addView(cameraSurfaceView);

initCodec();
setContentView(ll);

}

@Override
protected void onPause() {

super.onPause();
mPreviewRunning = false;

if(cameraSurfaceView !=null && cameraSurfaceView.isEnabled())
cameraSurfaceView.setEnabled(false);
cameraSurfaceView = null;

if(mCamera != null)
{
mCamera.stopPreview();
mCamera.release();
}

System.exit(0);

mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;

};


private void initCodec() {

MediaFormat mediaFormat = null;

if(mMediaCodec != null)
{
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
}

if(ENCODING.equalsIgnoreCase("h264"))
{
mMediaCodec = MediaCodec.createEncoderByType("video/avc");
mediaFormat = MediaFormat.createVideoFormat("video/avc",
320,
240);
}
else if(ENCODING.equalsIgnoreCase("h263"))
{
mMediaCodec = MediaCodec.createEncoderByType("video/3gpp");
mediaFormat = MediaFormat.createVideoFormat("video/3gpp",
352,
288);
}

mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 8000);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);

try
{
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);

mMediaCodec.configure(mediaFormat,
null,
null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
frameID = 0;
mMediaCodec.start();
}
catch(Exception e)
{
Toast.makeText(getApplicationContext(), "mediaformat error", Toast.LENGTH_LONG).show();
e.printStackTrace();
}

}

/**========================================================================*/
/** This function gets the starting index of the first appearance of match array in source array. The function will search in source array from startIndex position.*/
public static int find(byte[] source, byte[] match, int startIndex)
{
if(source == null || match == null)
{
Log.d("EncodeDecode", "ERROR in find : null");
return -1;
}
if(source.length == 0 || match.length == 0)
{
Log.d("EncodeDecode", "ERROR in find : length 0");
return -1;
}

int ret = -1;
int spos = startIndex;
int mpos = 0;
byte m = match[mpos];
for( ; spos < source.length; spos++ )
{
if(m == source[spos])
{
// starting match
if(mpos == 0)
ret = spos;
// finishing match
else if(mpos == match.length - 1)
return ret;

mpos++;
m = match[mpos];
}
else
{
ret = -1;
mpos = 0;
m = match[mpos];
}
}
return ret;
}


/**========================================================================*/
/** For H264 encoding, this function will retrieve SPS & PPS from the given data and will insert into SPS & PPS global arrays. */
public static void getSPS_PPS(byte[] data, int startingIndex)
{
byte[] spsHeader = {0x00, 0x00, 0x00, 0x01, 0x67};
byte[] ppsHeader = {0x00, 0x00, 0x00, 0x01, 0x68};
byte[] frameHeader = {0x00, 0x00, 0x00, 0x01};

int spsStartingIndex = -1;
int nextFrameStartingIndex = -1;
int ppsStartingIndex = -1;

spsStartingIndex = find(data, spsHeader, startingIndex);
Log.d("EncodeDecode", "spsStartingIndex: " + spsStartingIndex);
if(spsStartingIndex >= 0)
{
nextFrameStartingIndex = find(data, frameHeader, spsStartingIndex+1);
int spsLength = 0;
if(nextFrameStartingIndex>=0)
spsLength = nextFrameStartingIndex - spsStartingIndex;
else
spsLength = data.length - spsStartingIndex;
if(spsLength > 0)
{
SPS = new byte[spsLength];
System.arraycopy(data, spsStartingIndex, SPS, 0, spsLength);
}
}

ppsStartingIndex = find(data, ppsHeader, startingIndex);
Log.d("EncodeDecode", "ppsStartingIndex: " + ppsStartingIndex);
if(ppsStartingIndex >= 0)
{
nextFrameStartingIndex = find(data, frameHeader, ppsStartingIndex+1);
int ppsLength = 0;
if(nextFrameStartingIndex>=0)
ppsLength = nextFrameStartingIndex - ppsStartingIndex;
else
ppsLength = data.length - ppsStartingIndex;
if(ppsLength > 0)
{
PPS = new byte[ppsLength];
System.arraycopy(data, ppsStartingIndex, PPS, 0, ppsLength);
}
}
}


/**========================================================================*/
/** Prints the byte array in hex */
private void printByteArray(byte[] array)
{
StringBuilder sb1 = new StringBuilder();
for (byte b : array)
{
sb1.append(String.format("%02X ", b));
}
Log.d("EncodeDecode", sb1.toString());
}

public static byte[] YV12toYUV420PackedSemiPlanar(final byte[] input, final int width, final int height) {
/*
* COLOR_TI_FormatYUV420PackedSemiPlanar is NV12
* We convert by putting the corresponding U and V bytes together (interleaved).
*/
final int frameSize = width * height;
final int qFrameSize = frameSize/4;
byte[] output = new byte[input.length];


System.arraycopy(input, 0, output, 0, frameSize);
for (int i = 0; i < (qFrameSize); i++)
{
byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
output[frameSize + i*2] = b;
output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);
}



System.arraycopy(input, 0, output, 0, frameSize); // Y

for (int i = 0; i < qFrameSize; i++) {
output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
}
return output;
}

private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight)
{
byte [] yuv = new byte[imageWidth*imageHeight*3/2];
// Rotate the Y luma
int i = 0;
for(int x = 0;x < imageWidth;x++)
{
for(int y = imageHeight-1;y >= 0;y--)
{
yuv[i] = data[y*imageWidth+x];
i++;
}
}
// Rotate the U and V color components
i = imageWidth*imageHeight*3/2-1;
for(int x = imageWidth-1;x > 0;x=x-2)
{
for(int y = 0;y < imageHeight/2;y++)
{
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x];
i--;
yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)];
i--;
}
}
return yuv;
}

/**========================================================================*/
/** When camera receives a frame this function is called with the frame data as its parameter. It encodes the given data and then stores in frameQueue. */
private void encode(byte[] data)
{
Log.d("EncodeDecode", "ENCODE FUNCTION CALLED");
inputBuffers = mMediaCodec.getInputBuffers();
outputBuffers = mMediaCodec.getOutputBuffers();

int inputBufferIndex = mMediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0)
{
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();

int size = inputBuffer.limit();
//inputBuffer.put(data);

// color right, but rotated
byte[] output = YV12toYUV420PackedSemiPlanar(data,320,240);
inputBuffer.put(output);

// color almost right, orientation ok but distorted
/*byte[] output = YV12toYUV420PackedSemiPlanar(data,320,240);
output = rotateYUV420Degree90(output,320,240);
inputBuffer.put(output);*/

mMediaCodec.queueInputBuffer(inputBufferIndex, 0 /* offset */, size, 0 /* timeUs */, 0);
Log.d("EncodeDecode", "InputBuffer queued");
}
else
{
Log.d("EncodeDecode", "inputBufferIndex < 0, returning null");
return ;
}

MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
Log.d("EncodeDecode", "outputBufferIndex = " + outputBufferIndex);
do
{
if (outputBufferIndex >= 0)
{
Frame frame = new Frame(frameID);
ByteBuffer outBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
byte idrFrameType = 0x65;
int dataLength = 0;

outBuffer.get(outData);

// If SPS & PPS is not ready then
if(ENCODING.equalsIgnoreCase("h264") && ( (SPS == null || SPS.length ==0) || (PPS == null || PPS.length == 0) ) )
getSPS_PPS(outData, 0);

dataLength = outData.length;

// If the frame is an IDR Frame then adding SPS & PPS in front of the actual frame data
if(ENCODING.equalsIgnoreCase("h264") && outData[4] == idrFrameType)
{
int totalDataLength = dataLength + SPS.length + PPS.length;

frame.frameData = new byte[totalDataLength];

System.arraycopy(SPS, 0, frame.frameData, 0, SPS.length);
System.arraycopy(PPS, 0, frame.frameData, SPS.length, PPS.length);
System.arraycopy(outData, 0 , frame.frameData, SPS.length+PPS.length, dataLength);
}
else
{
frame.frameData = new byte[dataLength];
System.arraycopy(outData, 0 , frame.frameData, 0, dataLength);
}

// for testing
Log.d("EncodeDecode" , "Frame no :: " + frameID + " :: frameSize:: " + frame.frameData.length + " :: ");
printByteArray(frame.frameData);

// if encoding type is h264 and sps & pps is ready then, enqueueing the frame in the queue
// if encoding type is h263 then, enqueueing the frame in the queue
if( (ENCODING.equalsIgnoreCase("h264") && SPS != null && PPS != null && SPS.length != 0 && PPS.length != 0) || ENCODING.equalsIgnoreCase("h263") )
{
Log.d("EncodeDecode", "enqueueing frame no: " + (frameID));

try
{
queue.put(frame);
}
catch(InterruptedException e)
{
Log.e("EncodeDecode", "interrupted while waiting");
e.printStackTrace();
}
catch(NullPointerException e)
{
Log.e("EncodeDecode", "frame is null");
e.printStackTrace();
}
catch(IllegalArgumentException e)
{
Log.e("EncodeDecode", "problem inserting in the queue");
e.printStackTrace();
}

Log.d("EncodeDecode", "frame enqueued. queue size now: " + queue.size());

if(firstTime)
{
Log.d("EncodeDecode", "adding a surface to layout for decoder");
SurfaceView sv = new SurfaceView(getApplicationContext());
handler = new Handler();
sv.getHolder().addCallback(MainActivity.this);
sv.setLayoutParams(new android.widget.FrameLayout.LayoutParams(320, 240));
ll.addView(sv,1);
MainActivity.this.setContentView(ll);
firstTime = false;
}
}

frameID++;
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);

}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{
outputBuffers = mMediaCodec.getOutputBuffers();
Log.e("EncodeDecode","output buffer of encoder : info changed");
}
else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
{
Log.e("EncodeDecode","output buffer of encoder : format changed");
}
else
{
Log.e("EncodeDecode", "unknown value of outputBufferIndex : " + outputBufferIndex);
//printByteArray(data);
}
} while (outputBufferIndex >= 0);
}

private class MySurfaceView extends SurfaceView implements SurfaceHolder.Callback
{
SurfaceHolder holder;
public MySurfaceView(Context context) {
super(context);
holder = this.getHolder();
holder.addCallback(this);
}

public MySurfaceView(Context context, AttributeSet attrs) {
super(context,attrs);
holder = this.getHolder();
holder.addCallback(this);
}

public void surfaceCreated(SurfaceHolder holder) {
try
{
try
{
if(mCamera == null)
mCamera = Camera.open();
mCamera.setDisplayOrientation(90);
Log.d("EncodeDecode","Camera opened");
}
catch (Exception e)
{
Log.d("EncodeDecode","Camera open failed");
e.printStackTrace();
}

Camera.Parameters p = mCamera.getParameters();

if(ENCODING.equalsIgnoreCase("h264"))
p.setPreviewSize(320, 240);
else if(ENCODING.equalsIgnoreCase("h263"))
p.setPreviewSize(352, 288);

mCamera.setParameters(p);
mCamera.setPreviewDisplay(holder);

mCamera.setPreviewCallback(new PreviewCallback()
{
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
Log.d("EncodeDecode", "onPreviewFrame, calling encode function");
encode(data);
}
});
mCamera.startPreview();
mPreviewRunning = true;
}
catch (IOException e)
{
Log.e("EncodeDecode","surfaceCreated():: in setPreviewDisplay(holder) function");
e.printStackTrace();
}
catch (NullPointerException e)
{
Log.e("EncodeDecode","surfaceCreated Nullpointer");
e.printStackTrace();
}
}

public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
if (mPreviewRunning)
{
mCamera.stopPreview();
Log.e("EncodeDecode","preview stopped");
}
try
{
if(mCamera == null)
{
mCamera = Camera.open();
mCamera.setDisplayOrientation(90);
}

Camera.Parameters p = mCamera.getParameters();
if(ENCODING.equalsIgnoreCase("h264"))
p.setPreviewSize(320, 240);
else if(ENCODING.equalsIgnoreCase("h263"))
p.setPreviewSize(352, 288);

p.setPreviewFormat(ImageFormat.YV12);
mCamera.setParameters(p);
mCamera.setPreviewDisplay(holder);
mCamera.unlock();
mCamera.reconnect();
mCamera.setPreviewCallback(new PreviewCallback()
{
@Override
public void onPreviewFrame(byte[] data, Camera camera)
{
Log.d("EncodeDecode", "onPreviewFrame, calling encode function");
encode(data);
}
});
Log.d("EncodeDecode", "previewCallBack set");
mCamera.startPreview();
mPreviewRunning = true;
}
catch (Exception e)
{
Log.e("EncodeDecode","surface changed:set preview display failed");
e.printStackTrace();
}

}

public void surfaceDestroyed(SurfaceHolder holder)
{

}
}


@Override
public void surfaceCreated(SurfaceHolder holder)
{
Log.d("EncodeDecode", "mainActivity surfaceCreated");
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
Log.d("EncodeDecode", "mainActivity surfaceChanged.");
if (mPlayer == null)
{
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
Log.d("EncodeDecode", "PlayerThread started");
}
}

@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
if (mPlayer != null)
{
mPlayer.interrupt();
}
}

private class PlayerThread extends Thread
{
//private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;

public PlayerThread(Surface surface)
{
this.surface = surface;
}

@Override
public void run()
{
while(SPS == null || PPS == null || SPS.length == 0 || PPS.length == 0)
{
try
{
Log.d("EncodeDecode", "DECODER_THREAD:: sps,pps not ready yet");
sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();

}
}

Log.d("EncodeDecode", "DECODER_THREAD:: sps,pps READY");

if(ENCODING.equalsIgnoreCase("h264"))
{
decoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 320, 240);
mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(SPS));
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(PPS));
decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */);
}
else if(ENCODING.equalsIgnoreCase("h263"))
{
decoder = MediaCodec.createDecoderByType("video/3gpp");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/3gpp", 352, 288);
decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */);
}

if (decoder == null)
{
Log.e("DecodeActivity", "DECODER_THREAD:: Can't find video info!");
return;
}

decoder.start();
Log.d("EncodeDecode", "DECODER_THREAD:: decoder.start() called");

ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();


int i = 0;
while(!Thread.interrupted())
{
Frame currentFrame = null;
try
{
Log.d("EncodeDecode", "DECODER_THREAD:: calling queue.take(), if there is no frame in the queue it will wait");
currentFrame = queue.take();
}
catch (InterruptedException e)
{
Log.e("EncodeDecode","DECODER_THREAD:: interrupted while PlayerThread was waiting for the next frame");
e.printStackTrace();
}

if(currentFrame == null)
Log.e("EncodeDecode","DECODER_THREAD:: null frame dequeued");
else
Log.d("EncodeDecode","DECODER_THREAD:: " + currentFrame.id + " no frame dequeued");

if(currentFrame != null && currentFrame.frameData != null && currentFrame.frameData.length != 0)
{
Log.d("EncodeDecode", "DECODER_THREAD:: decoding frame no: " + i + " , dataLength = " + currentFrame.frameData.length);

int inIndex = 0;
while ((inIndex = decoder.dequeueInputBuffer(1)) < 0)
;

if (inIndex >= 0)
{
Log.d("EncodeDecode", "DECODER_THREAD:: sample size: " + currentFrame.frameData.length);

ByteBuffer buffer = inputBuffers[inIndex];
buffer.clear();
buffer.put(currentFrame.frameData);
decoder.queueInputBuffer(inIndex, 0, currentFrame.frameData.length, 0, 0);

BufferInfo info = new BufferInfo();
int outIndex = decoder.dequeueOutputBuffer(info, 100000);

switch (outIndex)
{
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.e("EncodeDecode", "DECODER_THREAD:: INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.e("EncodeDecode", "DECODER_THREAD:: New format " + decoder.getOutputFormat());

break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.e("EncodeDecode", "DECODER_THREAD:: dequeueOutputBuffer timed out!");
break;
default:
Log.d("EncodeDecode", "DECODER_THREAD:: decoded SUCCESSFULLY!!!");
ByteBuffer outbuffer = outputBuffers[outIndex];
decoder.releaseOutputBuffer(outIndex, true);
break;
}
i++;
}
}
}

decoder.stop();
decoder.release();

}
}
}

最佳答案

当我制作一个可以通过 RTMP 实时广播相机帧的应用程序时,我在纵向模式下遇到了同样的问题,但我可以使用 TextureView 解决它。首先,我没有在发送端旋转帧。但是我旋转了在媒体播放器中链接的 TextureView,并在接收方调整了 textureview 的大小。

enter image description here

我的编码如下。

<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">

<TextureView
android:id="@+id/videoView"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>

 private void updateTextureViewSize(int viewWidth, int viewHeight) {
int pivotPointX = viewWidth / 2;
int pivotPointY = viewHeight / 2;

Matrix matrix = new Matrix();

if(isLandscapeOrientation) {
matrix.preRotate(0);
matrix.setScale(1.0f, 1.0f, pivotPointX, pivotPointY);
videoView.setTransform(matrix);
videoView.setLayoutParams(new FrameLayout.LayoutParams(viewWidth, viewHeight));
} else {
matrix.preRotate(0);
matrix.setScale(1.0f, 1.0f, pivotPointX, pivotPointY);
videoView.setRotation(90);
videoView.setTranslationX(-viewWidth / 2);
videoView.setTranslationY(-viewHeight / 2);
videoView.setLayoutParams(new FrameLayout.LayoutParams(viewWidth * 2, viewHeight * 2));
}

}

private TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Surface s = new Surface(surface);
if(mMediaPlayer != null) {
mMediaPlayer.setSurface(s);

DisplayMetrics displaymetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
int sh = displaymetrics.heightPixels;
int sw = displaymetrics.widthPixels;

updateTextureViewSize(sw, sh);

}

}

@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

}

@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}

@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};

关于android - 来自相机的 MediaCodec 视频流错误的方向和颜色,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/21131661/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com