- android - RelativeLayout 背景可绘制重叠内容
- android - 如何链接 cpufeatures lib 以获取 native android 库?
- java - OnItemClickListener 不起作用,但 OnLongItemClickListener 在自定义 ListView 中起作用
- java - Android 文件转字符串
我想制作一个新的过滤器,例如 GPUImage 的 GPUImageTwoInputFilter。
这是我的代码。一个名为 IFFourInputFilter 的基类,它很可能是 GPUImageTwoInputFilter。
#import "IFFourInputFilter.h"
NSString *const kIFFourInputTextureVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
attribute vec4 inputTextureCoordinate3;
attribute vec4 inputTextureCoordinate4;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 textureCoordinate3;
varying vec2 textureCoordinate4;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
textureCoordinate2 = inputTextureCoordinate2.xy;
textureCoordinate3 = inputTextureCoordinate3.xy;
textureCoordinate4 = inputTextureCoordinate4.xy;
}
);
@implementation IFFourInputFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kIFFourInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
inputRotation2 = kGPUImageNoRotation;
inputRotation3 = kGPUImageNoRotation;
inputRotation4 = kGPUImageNoRotation;
hasSetTexture1 = NO;
hasSetTexture2 = NO;
hasSetTexture3 = NO;
hasReceivedFrame1 = NO;
hasReceivedFrame2 = NO;
hasReceivedFrame3 = NO;
hasReceivedFrame4 = NO;
frameWasVideo1 = NO;
frameWasVideo2 = NO;
frameWasVideo3 = NO;
frameWasVideo4 = NO;
frameCheckDisabled1 = NO;
frameCheckDisabled2 = NO;
frameCheckDisabled3 = NO;
frameCheckDisabled4 = NO;
frameTime1 = kCMTimeInvalid;
frameTime2 = kCMTimeInvalid;
frameTime3 = kCMTimeInvalid;
frameTime4 = kCMTimeInvalid;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageOpenGLESContext useImageProcessingContext];
filterTextureCoordinateAttribute2 = [filterProgram attributeIndex:@"inputTextureCoordinate2"];
filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
glEnableVertexAttribArray(filterTextureCoordinateAttribute2);
filterTextureCoordinateAttribute3 = [filterProgram attributeIndex:@"inputTextureCoordinate3"];
filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
glEnableVertexAttribArray(filterTextureCoordinateAttribute3);
filterTextureCoordinateAttribute4 = [filterProgram attributeIndex:@"inputTextureCoordinate4"];
filterInputTextureUniform4 = [filterProgram uniformIndex:@"inputImageTexture4"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
glEnableVertexAttribArray(filterTextureCoordinateAttribute4);
});
return self;
}
- (void)initializeAttributes;
{
[super initializeAttributes];
[filterProgram addAttribute:@"inputTextureCoordinate2"];
[filterProgram addAttribute:@"inputTextureCoordinate3"];
[filterProgram addAttribute:@"inputTextureCoordinate4"];
}
- (void)disableFrameCheck1;
{
frameCheckDisabled1 = YES;
}
- (void)disableFrameCheck2;
{
frameCheckDisabled2 = YES;
}
- (void)disableFrameCheck3;
{
frameCheckDisabled3 = YES;
}
- (void)disableFrameCheck4;
{
frameCheckDisabled4 = YES;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture;
{
if (self.preventRendering)
{
return;
}
[GPUImageOpenGLESContext setActiveShaderProgram:filterProgram];
[self setUniformsForProgramAtIndex:0];
[self setFilterFBO];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, sourceTexture);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, filterSourceTexture2);
glUniform1i(filterInputTextureUniform2, 3);
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, filterSourceTexture3);
glUniform1i(filterInputTextureUniform3, 4);
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, filterSourceTexture4);
glUniform1i(filterInputTextureUniform4, 5);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glVertexAttribPointer(filterTextureCoordinateAttribute2, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glVertexAttribPointer(filterTextureCoordinateAttribute3, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);
glVertexAttribPointer(filterTextureCoordinateAttribute4, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation4]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
- (void)releaseInputTexturesIfNeeded;
{
if (shouldConserveMemoryForNextFrame)
{
[firstTextureDelegate textureNoLongerNeededForTarget:self];
[textureDelegate2 textureNoLongerNeededForTarget:self];
[textureDelegate3 textureNoLongerNeededForTarget:self];
[textureDelegate4 textureNoLongerNeededForTarget:self];
shouldConserveMemoryForNextFrame = NO;
}
}
#pragma mark -
#pragma mark GPUImageInput
- (NSInteger)nextAvailableTextureIndex;
{
if (!hasSetTexture1){
return 0;
}else if (!hasSetTexture2) {
return 1;
}else if (!hasSetTexture3) {
return 2;
}else{
return 3;
}
}
- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
{
switch (textureIndex) {
case 0:
filterSourceTexture = newInputTexture;
hasSetTexture1 = YES;
break;
case 1:
filterSourceTexture2 = newInputTexture;
hasSetTexture2 = YES;
break;
case 2:
filterSourceTexture3 = newInputTexture;
hasSetTexture3 = YES;
break;
case 3:
filterSourceTexture4 = newInputTexture;
break;
default:
break;
}
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
[super setInputSize:newSize atIndex:textureIndex];
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetTexture1 = NO;
}
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
switch (textureIndex) {
case 0:
inputRotation = newInputRotation;
break;
case 1:
inputRotation2 = newInputRotation;
break;
case 2:
inputRotation3 = newInputRotation;
break;
case 3:
inputRotation4 = newInputRotation;
break;
default:
break;
}
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
GPUImageRotationMode rotationToCheck;
switch (textureIndex) {
case 0:
rotationToCheck = inputRotation;
break;
case 1:
rotationToCheck = inputRotation2;
break;
case 2:
rotationToCheck = inputRotation3;
break;
case 3:
rotationToCheck = inputRotation4;
break;
default:
break;
}
if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
outputTextureRetainCount = [targets count];
// You can set up infinite update loops, so this helps to short circuit them
if (hasReceivedFrame1 && hasReceivedFrame2 && hasReceivedFrame3 && hasReceivedFrame4)
{
return;
}
BOOL updatedMovieFrameOppositeStillImage = NO;
switch (textureIndex) {
case 0:
hasReceivedFrame1 = YES;
frameTime1 = frameTime;
if (frameCheckDisabled2)
{
hasReceivedFrame2 = YES;
}
if (frameCheckDisabled3)
{
hasReceivedFrame3 = YES;
}
if (frameCheckDisabled4)
{
hasReceivedFrame4 = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if (CMTIME_IS_INDEFINITE(frameTime2) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime4))
{
updatedMovieFrameOppositeStillImage = YES;
}
}
break;
case 1:
hasReceivedFrame2 = YES;
frameTime2 = frameTime;
if (frameCheckDisabled1)
{
hasReceivedFrame1 = YES;
}
if (frameCheckDisabled3)
{
hasReceivedFrame3 = YES;
}
if (frameCheckDisabled4)
{
hasReceivedFrame4 = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime4))
{
updatedMovieFrameOppositeStillImage = YES;
}
}
break;
case 2:
hasReceivedFrame3 = YES;
frameTime3 = frameTime;
if (frameCheckDisabled1)
{
hasReceivedFrame1 = YES;
}
if (frameCheckDisabled2)
{
hasReceivedFrame2 = YES;
}
if (frameCheckDisabled4)
{
hasReceivedFrame4 = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime2) && CMTIME_IS_INDEFINITE(frameTime4))
{
updatedMovieFrameOppositeStillImage = YES;
}
}
break;
case 3:
hasReceivedFrame4 = YES;
frameTime4 = frameTime;
if (frameCheckDisabled1)
{
hasReceivedFrame1 = YES;
}
if (frameCheckDisabled3)
{
hasReceivedFrame3 = YES;
}
if (frameCheckDisabled2)
{
hasReceivedFrame2 = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime2))
{
updatedMovieFrameOppositeStillImage = YES;
}
}
break;
default:
break;
}
// || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
if ((hasReceivedFrame1 && hasReceivedFrame2 && hasReceivedFrame3 && hasReceivedFrame4) || updatedMovieFrameOppositeStillImage)
{
[super newFrameReadyAtTime:frameTime atIndex:0];
hasReceivedFrame1 = NO;
hasReceivedFrame2 = NO;
hasReceivedFrame3 = NO;
hasReceivedFrame4 = NO;
}
}
- (void)setTextureDelegate:(id<GPUImageTextureDelegate>)newTextureDelegate atIndex:(NSInteger)textureIndex;
{
switch (textureIndex) {
case 0:
firstTextureDelegate = newTextureDelegate;
break;
case 1:
textureDelegate2 = newTextureDelegate;
break;
case 2:
textureDelegate3 = newTextureDelegate;
break;
case 3:
textureDelegate4 = newTextureDelegate;
break;
default:
break;
}
}
@end
名为 IFAmaroFilter 的类扩展了 IFFourInputFilter。
#import "IFAmaroFilter.h"
NSString *const kIFAmaroFilterFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2; //blowout;
uniform sampler2D inputImageTexture3; //overlay;
uniform sampler2D inputImageTexture4; //map
void main()
{
vec4 texel = texture2D(inputImageTexture, textureCoordinate);
vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
vec4 mapped;
mapped.r = texture2D(inputImageTexture4, vec2(texel.r, 0.16666)).r;
mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
mapped.a = 1.0;
gl_FragColor = texel;
}
);
@implementation IFAmaroFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kIFAmaroFilterFragmentShaderString]))
{
return nil;
}
return self;
}
@end
当我使用过滤器时,我得到了黑色输出。代码如下:
filter = [[IFAmaroFilter alloc] init];
GPUImagePicture *gp1 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"blackboard1024" ofType:@"png"]]];
GPUImagePicture *gp2 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"overlayMap" ofType:@"png"]]];
GPUImagePicture *gp3 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"amaroMap" ofType:@"png"]]];
[stillCamera addTarget:filter atTextureLocation:0];
[gp1 addTarget:filter atTextureLocation:1];
[gp1 processImage];
[gp2 addTarget:filter atTextureLocation:2];
[gp2 processImage];
[gp3 addTarget:filter atTextureLocation:3];
[gp3 processImage];
[filter addTarget:(GPUImageView *)self.view];
最佳答案
我发现 GPUImagePicture 会自动释放,所以过滤器不会接收纹理。如果你遇到同样的问题,仔细检查纹理的生命控制,观察它们何时被释放。
关于ios - 我想用 GPUImage 制作一个多于两个的输入纹理过滤器。但我得到一个黑色输出,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/14514949/
我有一个未定义数量的显示上下文,每个都将显示一个纹理。当我调用 glGenTextures 时,我会在所有显示上下文中返回相同的名称。这会起作用吗?即使它们具有相同的名称,它们仍会存储和显示不同的纹理
我在 SVG 中看到过:文本填充是图像而不是颜色;我一直想知道使用 CSS3 是否可以实现这样的事情。 我浏览了整个网络,到目前为止只找到了基本上将图像覆盖在文本上的解决方法(请参阅 this ,这对
我是 WebGL 的新手。 :)我知道顶点数据和纹理不应该经常更新,但是当它们确实发生变化时,首选哪个:- 通过调用 gl.deleteBuffer 销毁先前的缓冲区 (static_draw) 并创
我需要将 GL_RGBA32F 作为内部格式,但我在 OpenGL ES 实现中没有得到它。相反,我只得到 GL_FLOAT 作为纹理数据类型。 OES_texture_float 规范没有说明里面的
当我执行某些几何体的渲染时,我可以在控制台中看到此警告: THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter sho
我正在尝试使用阴影贴图实现阴影,因此我需要将场景渲染到单独的帧缓冲区(纹理)。我无法让它正常工作,因此在剥离我的代码库后,我留下了一组相对简单的指令,这些指令应该将场景渲染到纹理,然后简单地渲染纹理。
我在 XNA 中使用带有自定义着色器的标准 .fbx 导入器。当我使用 BasicEffect 时,.fbx 模型被 UV 正确包裹并且纹理正确。但是,当我使用我的自定义效果时,我必须将纹理作为参数加
如果我创建一个 .PNG 1024 x 1024 的纹理并在中间画一个 124 x 124 的圆,它周围是空的,它使用的 RAM 量是否与我画一个 124 x 的圆一样124 x 124 空间上的 1
我试图在 Android 中绘制一个地球仪,为此我使用了 OpenGL。然而,为了让它更容易理解,我将从制作一个简单的 3D 立方体开始。我使用 Blender 创建我的 3D 对象(立方体),并在我
文本本身的背景图像层是否有任何 JS/CSS 解决方案? 示例 最佳答案 检查这个http://lea.verou.me/2012/05/text-masking-the-standards-way/
非功能代码: if sprite.texture == "texture" { (code) } 当 Sprite 具有特定纹理时,我正在尝试访问 Sprite 的纹理以运行代码。目前纹理仅在我的
我正在尝试学习适用于 iOS 的 SceneKit 并超越基本形状。我对纹理的工作原理有点困惑。在示例项目中,平面是一个网格,并对其应用了平面 png 纹理。你如何“告诉”纹理如何包裹到物体上?在 3
基本上, 这有效: var expl1 = new THREE.ImageUtils.loadTexture( 'images/explodes/expl1.png' ); this.material
我正在尝试将各种场景渲染为一组纹理,每个场景都有自己的纹理到应该绘制的位置...... 问题: 创建 512 个 FBO,每个 FBO 绑定(bind)了 512 个纹理,这有多糟糕。只使用一个 FB
我正在使用文本 protobuf 文件进行系统配置。 我遇到的一个问题是序列化的 protobuf 格式不支持注释。 有没有办法解决? 我说的是文本序列化数据格式,而不是方案定义。 这个问题是有人在某
我想将我的 3D 纹理的初始化从 CPU 移到 GPU。作为测试,我编写了一个着色器将所有体素设置为一个常数值,但纹理根本没有修改。我如何使它工作? 计算着色器: #version 430 layou
我可以像这样用 JavFX 制作一个矩形: Rectangle node2 = RectangleBuilder.create() .x(-100) .
我在 iPhone 上遇到了 openGL 问题,我确信一定有一个简单的解决方案! 当我加载纹理并显示它时,我得到了很多我认为所谓的“色带”,其中颜色,特别是渐变上的颜色,似乎会自动“优化”。 只是为
假设我有一个域类 class Profile{ String name byte[] logo } 和一个 Controller : class ImageController {
我正在开发一款使用 SDL 的 2D 游戏。由于某些系统的 CPU 较弱而 GPU 较强,因此除了普通的 SDL/软件之外,我还有一个使用 OpenGL 的渲染器后端。 渲染器界面的简化版本如下所示:
我是一名优秀的程序员,十分优秀!