gpt4 book ai didi

android - GLSurfaceView截图时显示黑屏

转载 作者:太空宇宙 更新时间:2023-11-03 12:08:12 34 4
gpt4 key购买 nike

为了解决我的问题,我徘徊了七个小时。我创建了一个项目,我在其中使用 GLSurfaceView 在上面设置了一张图片,然后我使用 EffectFactory 类。

但我的问题是,当我用受影响的图像拍摄屏幕截图时,它总是显示为黑屏而不是图像。我知道这可能是因为 SurFaceview。但如果有任何建议可能对我有帮助。

提前致谢。

这是我的代码。

public class EffectsFilterActivity extends Activity implements
GLSurfaceView.Renderer {

private GLSurfaceView mEffectView;
private int[] mTextures = new int[2];
private EffectContext mEffectContext;
private Effect mEffect;
private TextureRenderer mTexRenderer = new TextureRenderer();
private int mImageWidth;
private int mImageHeight;
private boolean mInitialized = false;
int mCurrentEffect;

public void setCurrentEffect(int effect) {
mCurrentEffect = effect;
}

@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.main);

Button b = (Button) findViewById(R.id.button1);
b.setOnClickListener(new OnClickListener() {

@Override
public void onClick(View v) {
// TODO Auto-generated method stub
Bitmap b = takeScreenshot();
saveBitmap(b);

}
});

mEffectView = (GLSurfaceView) findViewById(R.id.effectsview);
mEffectView.setEGLContextClientVersion(2);
mEffectView.setRenderer(this);
mEffectView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
mCurrentEffect = R.id.none;
}

private void loadTextures() {
// Generate textures
GLES20.glGenTextures(2, mTextures, 0);

// Load input bitmap
Bitmap bitmap = BitmapFactory.decodeResource(getResources(),
R.drawable.puppy);
mImageWidth = bitmap.getWidth();
mImageHeight = bitmap.getHeight();
mTexRenderer.updateTextureSize(mImageWidth, mImageHeight);

// Upload to texture
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);

// Set texture parameters
GLToolbox.initTexParams();
}

private void initEffect() {
EffectFactory effectFactory = mEffectContext.getFactory();
if (mEffect != null) {
mEffect.release();
}
/**
* Initialize the correct effect based on the selected menu/action item
*/
switch (mCurrentEffect) {

case R.id.none:
break;

case R.id.autofix:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_AUTOFIX);
mEffect.setParameter("scale", 0.5f);
break;

case R.id.bw:
mEffect = effectFactory
.createEffect(EffectFactory.EFFECT_BLACKWHITE);
mEffect.setParameter("black", .1f);
mEffect.setParameter("white", .7f);
break;

case R.id.brightness:
mEffect = effectFactory
.createEffect(EffectFactory.EFFECT_BRIGHTNESS);
mEffect.setParameter("brightness", 2.0f);
break;

case R.id.contrast:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_CONTRAST);
mEffect.setParameter("contrast", 1.4f);
break;

case R.id.crossprocess:
mEffect = effectFactory
.createEffect(EffectFactory.EFFECT_CROSSPROCESS);
break;

case R.id.documentary:
mEffect = effectFactory
.createEffect(EffectFactory.EFFECT_DOCUMENTARY);
break;

case R.id.duotone:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_DUOTONE);
mEffect.setParameter("first_color", Color.YELLOW);
mEffect.setParameter("second_color", Color.DKGRAY);
break;

case R.id.filllight:
mEffect = effectFactory
.createEffect(EffectFactory.EFFECT_FILLLIGHT);
mEffect.setParameter("strength", .8f);
break;

case R.id.fisheye:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_FISHEYE);
mEffect.setParameter("scale", .5f);
break;

case R.id.flipvert:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_FLIP);
mEffect.setParameter("vertical", true);
break;

case R.id.fliphor:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_FLIP);
mEffect.setParameter("horizontal", true);
break;

case R.id.grain:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_GRAIN);
mEffect.setParameter("strength", 1.0f);
break;

case R.id.grayscale:
mEffect = effectFactory
.createEffect(EffectFactory.EFFECT_GRAYSCALE);
break;

case R.id.lomoish:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_LOMOISH);
break;

case R.id.negative:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_NEGATIVE);
break;

case R.id.posterize:
mEffect = effectFactory
.createEffect(EffectFactory.EFFECT_POSTERIZE);
break;

case R.id.rotate:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_ROTATE);
mEffect.setParameter("angle", 180);
break;

case R.id.saturate:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_SATURATE);
mEffect.setParameter("scale", .5f);
break;

case R.id.sepia:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_SEPIA);
break;

case R.id.sharpen:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_SHARPEN);
break;

case R.id.temperature:
mEffect = effectFactory
.createEffect(EffectFactory.EFFECT_TEMPERATURE);
mEffect.setParameter("scale", .9f);
break;

case R.id.tint:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_TINT);
mEffect.setParameter("tint", Color.MAGENTA);
break;

case R.id.vignette:
mEffect = effectFactory.createEffect(EffectFactory.EFFECT_VIGNETTE);
mEffect.setParameter("scale", .5f);
break;

default:
break;

}
}

private void applyEffect() {
mEffect.apply(mTextures[0], mImageWidth, mImageHeight, mTextures[1]);
}

private void renderResult() {
if (mCurrentEffect != R.id.none) {
// if no effect is chosen, just render the original bitmap
mTexRenderer.renderTexture(mTextures[1]);
} else {
// render the result of applyEffect()
mTexRenderer.renderTexture(mTextures[0]);
}
}

@Override
public void onDrawFrame(GL10 gl) {
if (!mInitialized) {
// Only need to do this once
mEffectContext = EffectContext.createWithCurrentGlContext();
mTexRenderer.init();
loadTextures();
mInitialized = true;
}
if (mCurrentEffect != R.id.none) {
// if an effect is chosen initialize it and apply it to the texture
initEffect();
applyEffect();
}
renderResult();
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
if (mTexRenderer != null) {
mTexRenderer.updateViewSize(width, height);
}
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main, menu);
return true;
}

@Override
public boolean onOptionsItemSelected(MenuItem item) {
setCurrentEffect(item.getItemId());
mEffectView.requestRender();
return true;
}

public Bitmap takeScreenshot() {

mEffectView.getRootView();
mEffectView.setDrawingCacheEnabled(true);
mEffectView.buildDrawingCache();
mEffectView.requestRender();
return mEffectView.getDrawingCache();
}

public void saveBitmap(Bitmap bitmap) {
File imagePath = new File(Environment.getExternalStorageDirectory()
+ "/Piyush.png");
FileOutputStream fos;
try {
fos = new FileOutputStream(imagePath);
bitmap.compress(CompressFormat.JPEG, 100, fos);
fos.flush();
fos.close();
} catch (FileNotFoundException e) {
Log.e("GREC", e.getMessage(), e);
} catch (IOException e) {
Log.e("GREC", e.getMessage(), e);
}
}
}

GLToolbox.java

 public class GLToolbox {

public static int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
String info = GLES20.glGetShaderInfoLog(shader);
GLES20.glDeleteShader(shader);
shader = 0;
throw new RuntimeException("Could not compile shader "
+ shaderType + ":" + info);
}
}
return shader;
}

public static int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}

int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
String info = GLES20.glGetProgramInfoLog(program);
GLES20.glDeleteProgram(program);
program = 0;
throw new RuntimeException("Could not link program: " + info);
}
}
return program;
}

public static void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
throw new RuntimeException(op + ": glError " + error);
}
}

public static void initTexParams() {
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
}

}

TextureRenderer.java

public class TextureRenderer {

private int mProgram;
private int mTexSamplerHandle;
private int mTexCoordHandle;
private int mPosCoordHandle;

private FloatBuffer mTexVertices;
private FloatBuffer mPosVertices;

private int mViewWidth;
private int mViewHeight;

private int mTexWidth;
private int mTexHeight;

private static final String VERTEX_SHADER = "attribute vec4 a_position;\n"
+ "attribute vec2 a_texcoord;\n" + "varying vec2 v_texcoord;\n"
+ "void main() {\n" + " gl_Position = a_position;\n"
+ " v_texcoord = a_texcoord;\n" + "}\n";

private static final String FRAGMENT_SHADER = "precision mediump float;\n"
+ "uniform sampler2D tex_sampler;\n" + "varying vec2 v_texcoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(tex_sampler, v_texcoord);\n" + "}\n";

private static final float[] TEX_VERTICES = { 0.0f, 1.0f, 1.0f, 1.0f, 0.0f,
0.0f, 1.0f, 0.0f };

private static final float[] POS_VERTICES = { -1.0f, -1.0f, 1.0f, -1.0f,
-1.0f, 1.0f, 1.0f, 1.0f };

private static final int FLOAT_SIZE_BYTES = 4;

public void init() {
// Create program
mProgram = GLToolbox.createProgram(VERTEX_SHADER, FRAGMENT_SHADER);

// Bind attributes and uniforms
mTexSamplerHandle = GLES20
.glGetUniformLocation(mProgram, "tex_sampler");
mTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texcoord");
mPosCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_position");

// Setup coordinate buffers
mTexVertices = ByteBuffer
.allocateDirect(TEX_VERTICES.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTexVertices.put(TEX_VERTICES).position(0);
mPosVertices = ByteBuffer
.allocateDirect(POS_VERTICES.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mPosVertices.put(POS_VERTICES).position(0);
}

public void tearDown() {
GLES20.glDeleteProgram(mProgram);
}

public void updateTextureSize(int texWidth, int texHeight) {
mTexWidth = texWidth;
mTexHeight = texHeight;
computeOutputVertices();
}

public void updateViewSize(int viewWidth, int viewHeight) {
mViewWidth = viewWidth;
mViewHeight = viewHeight;
computeOutputVertices();
}

public void renderTexture(int texId) {
// Bind default FBO
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);

// Use our shader program
GLES20.glUseProgram(mProgram);
GLToolbox.checkGlError("glUseProgram");

// Set viewport
GLES20.glViewport(0, 0, mViewWidth, mViewHeight);
GLToolbox.checkGlError("glViewport");

// Disable blending
GLES20.glDisable(GLES20.GL_BLEND);

// Set the vertex attributes
GLES20.glVertexAttribPointer(mTexCoordHandle, 2, GLES20.GL_FLOAT,
false, 0, mTexVertices);
GLES20.glEnableVertexAttribArray(mTexCoordHandle);
GLES20.glVertexAttribPointer(mPosCoordHandle, 2, GLES20.GL_FLOAT,
false, 0, mPosVertices);
GLES20.glEnableVertexAttribArray(mPosCoordHandle);
GLToolbox.checkGlError("vertex attribute setup");

// Set the input texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLToolbox.checkGlError("glActiveTexture");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texId);
GLToolbox.checkGlError("glBindTexture");
GLES20.glUniform1i(mTexSamplerHandle, 0);

// Draw
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}

private void computeOutputVertices() {
if (mPosVertices != null) {
float imgAspectRatio = mTexWidth / (float) mTexHeight;
float viewAspectRatio = mViewWidth / (float) mViewHeight;
float relativeAspectRatio = viewAspectRatio / imgAspectRatio;
float x0, y0, x1, y1;
if (relativeAspectRatio > 1.0f) {
x0 = -1.0f / relativeAspectRatio;
y0 = -1.0f;
x1 = 1.0f / relativeAspectRatio;
y1 = 1.0f;
} else {
x0 = -1.0f;
y0 = -relativeAspectRatio;
x1 = 1.0f;
y1 = relativeAspectRatio;
}
float[] coords = new float[] { x0, y0, x1, y0, x0, y1, x1, y1 };
mPosVertices.put(coords).position(0);
}
}
}

我的 Xml 文件:

<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
tools:ignore="HardcodedText" >

<Button
android:id="@+id/button1"
android:layout_width="wrap_content"
android:layout_height="0dp"
android:layout_weight="0.1"
android:text="Button" />

<android.opengl.GLSurfaceView
android:id="@+id/effectsview"
android:layout_width="fill_parent"
android:layout_height="0dp"
android:layout_weight="0.9" />

</LinearLayout>

最佳答案

试试这个...

private volatile boolean saveFrame;

// called when Capture button is clicked.
public void onClick(View view) {
saveFrame = true;
mEffectView.requestRender();
}

@Override
public void onDrawFrame(GL10 gl) {
if (!mInitialized) {
// Only need to do this once
mEffectContext = EffectContext.createWithCurrentGlContext();
mTexRenderer.init();
loadTextures();
mInitialized = true;
}
if (mCurrentEffect != NONE) {
// if an effect is chosen initialize it and apply it to the texture
initEffect();
applyEffect();
}
renderResult();
if (saveFrame) {
saveBitmap(takeScreenshot(gl));
saveFrame = false;
}
}

private void saveBitmap(Bitmap bitmap) {
try {
FileOutputStream stream = openFileOutput("image.png", MODE_PRIVATE);
bitmap.compress(CompressFormat.PNG, 100, stream);
stream.flush();
stream.close();
Log.i("TAG", "SAVED");
} catch (Exception e) {
Log.e("TAG", e.toString(), e);
}
}

public Bitmap takeScreenshot(GL10 mGL) {
final int mWidth = mEffectView.getWidth();
final int mHeight = mEffectView.getHeight();
IntBuffer ib = IntBuffer.allocate(mWidth * mHeight);
IntBuffer ibt = IntBuffer.allocate(mWidth * mHeight);
mGL.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, ib);

// Convert upside down mirror-reversed image to right-side up normal image.
for (int i = 0; i < mHeight; i++) {
for (int j = 0; j < mWidth; j++) {
ibt.put((mHeight - i - 1) * mWidth + j, ib.get(i * mWidth + j));
}
}

Bitmap mBitmap = Bitmap.createBitmap(mWidth, mHeight,Bitmap.Config.ARGB_8888);
mBitmap.copyPixelsFromBuffer(ibt);
return mBitmap;
}

关于android - GLSurfaceView截图时显示黑屏,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/21753519/

34 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com