gpt4 book ai didi

android - 对正在播放的视频应用效果

转载 作者:IT老高 更新时间:2023-10-28 22:21:57 26 4
gpt4 key购买 nike

我是 Open Gl 世界的新手,我用谷歌搜索了很多,但我无法找到实现 Effects 的方法在正在播放的视频上。经过一番研究,我终于找到了 class可用于在 GLSurfaceView 上播放视频。我知道Google documentation并且我们可以在视频上应用效果。

通过关注 post我能够成功地在位图上应用效果。现在我想为我的视频做这个,所以任何帮助或指点都非常感谢。

Here是我用来渲染正在播放的视频的 VideoSurfaceView

package me.crossle.demo.surfacetexture;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;

@SuppressLint("ViewConstructor")
class VideoSurfaceView extends GLSurfaceView {

VideoRender mRenderer;
private MediaPlayer mMediaPlayer = null;

public VideoSurfaceView(Context context, MediaPlayer mp) {
super(context);

setEGLContextClientVersion(2);
mMediaPlayer = mp;
mRenderer = new VideoRender(context);
setRenderer(mRenderer);
}

@Override
public void onResume() {
queueEvent(new Runnable(){
public void run() {
mRenderer.setMediaPlayer(mMediaPlayer);
}});

super.onResume();
}

private static class VideoRender
implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private static String TAG = "VideoRender";

private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};

private FloatBuffer mTriangleVertices;

private final String mVertexShader =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";

private final String mFragmentShader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";

private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];

private int mProgram;
private int mTextureID;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;

private SurfaceTexture mSurface;
private boolean updateSurface = false;

private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;

private MediaPlayer mMediaPlayer;

public VideoRender(Context context) {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);

Matrix.setIdentityM(mSTMatrix, 0);
}

public void setMediaPlayer(MediaPlayer player) {
mMediaPlayer = player;
}

@Override
public void onDrawFrame(GL10 glUnused) {
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}

GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);

mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");

mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");

Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();

}

@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {

}

@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}

muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}

muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}


int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);

mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");

GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);

/*
* Create the SurfaceTexture that will feed this textureID,
* and pass it to the MediaPlayer
*/
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);

Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
mMediaPlayer.setScreenOnWhilePlaying(true);
surface.release();

try {
mMediaPlayer.prepare();
} catch (IOException t) {
Log.e(TAG, "media player prepare failed");
}

synchronized(this) {
updateSurface = false;
}

mMediaPlayer.start();
}

synchronized public void onFrameAvailable(SurfaceTexture surface) {
updateSurface = true;
}

private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}

private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}

int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}

private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}

} // End of class VideoRender.

} // End of class VideoSurfaceView.

这是我的 MainActivity

package me.crossle.demo.surfacetexture;

import java.io.File;

import android.app.Activity;
import android.content.res.AssetFileDescriptor;
import android.content.res.Resources;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;

public class MainActivity extends Activity {

private static final String TAG = "MainActivity";

protected Resources mResources;

private VideoSurfaceView mVideoView = null;
private MediaPlayer mMediaPlayer = null;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

mResources = getResources();
mMediaPlayer = new MediaPlayer();

try {
File dir = Environment
.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS);

File file = new File(dir,
"video.mp4");
mMediaPlayer.setDataSource(file.getAbsolutePath());

} catch (Exception e) {
Log.e(TAG, e.getMessage(), e);
}

mVideoView = new VideoSurfaceView(this, mMediaPlayer);
setContentView(mVideoView);

}

@Override
protected void onResume() {
super.onResume();
mVideoView.onResume();
}
}

最佳答案

我已经解决了这个问题,我正在发布答案,以防其他人也在寻找一种在他们的视频上应用不同过滤器的方法。

Lunero 指出正确的方向后和 Fadden我现在可以申请几乎所有 EffectFactory正在播放的视频的效果。虽然这些效果仅用于预览目的并且不会更改原始视频,但它们仍然可以为我完成这项工作。

我所做的是我更改了应用于正在渲染的视频的 FragmentShaders 代码,我能够实现不同的效果。

这里是一些fragmentShaders的代码。

黑白效果

String fragmentShader = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 vTextureCoord;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " float colorR = (color.r + color.g + color.b) / 3.0;\n"
+ " float colorG = (color.r + color.g + color.b) / 3.0;\n"
+ " float colorB = (color.r + color.g + color.b) / 3.0;\n"
+ " gl_FragColor = vec4(colorR, colorG, colorB, color.a);\n"
+ "}\n";

负面影响

String fragmentShader = "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 vTextureCoord;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "void main() {\n"
+ " vec4 color = texture2D(sTexture, vTextureCoord);\n"
+ " float colorR = (1.0 - color.r) / 1.0;\n"
+ " float colorG = (1.0 - color.g) / 1.0;\n"
+ " float colorB = (1.0 - color.b) / 1.0;\n"
+ " gl_FragColor = vec4(colorR, colorG, colorB, color.a);\n"
+ "}\n";

没有任何效果的原创视频

enter image description here

黑白效果视频

Black and White Effect

具有负面影响的视频

Negative Effect

如果您想应用更多效果,那么我建议您查看 VidEffects在github上。它将帮助您在视频上应用许多不同的效果。

关于android - 对正在播放的视频应用效果,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/31805837/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com