- android - RelativeLayout 背景可绘制重叠内容
- android - 如何链接 cpufeatures lib 以获取 native android 库?
- java - OnItemClickListener 不起作用,但 OnLongItemClickListener 在自定义 ListView 中起作用
- java - Android 文件转字符串
我想制作一个新的过滤器,例如 GPUImage 的 GPUImageTwoInputFilter。
这是我的代码。一个名为 IFFourInputFilter 的基类,它很可能是 GPUImageTwoInputFilter。
#import "IFFourInputFilter.h"
NSString *const kIFFourInputTextureVertexShaderString = SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;
attribute vec4 inputTextureCoordinate2;
attribute vec4 inputTextureCoordinate3;
attribute vec4 inputTextureCoordinate4;
varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;
varying vec2 textureCoordinate3;
varying vec2 textureCoordinate4;
void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;
textureCoordinate2 = inputTextureCoordinate2.xy;
textureCoordinate3 = inputTextureCoordinate3.xy;
textureCoordinate4 = inputTextureCoordinate4.xy;
}
);
@implementation IFFourInputFilter
#pragma mark -
#pragma mark Initialization and teardown
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [self initWithVertexShaderFromString:kIFFourInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
return self;
}
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
{
if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
{
return nil;
}
inputRotation2 = kGPUImageNoRotation;
inputRotation3 = kGPUImageNoRotation;
inputRotation4 = kGPUImageNoRotation;
hasSetTexture1 = NO;
hasSetTexture2 = NO;
hasSetTexture3 = NO;
hasReceivedFrame1 = NO;
hasReceivedFrame2 = NO;
hasReceivedFrame3 = NO;
hasReceivedFrame4 = NO;
frameWasVideo1 = NO;
frameWasVideo2 = NO;
frameWasVideo3 = NO;
frameWasVideo4 = NO;
frameCheckDisabled1 = NO;
frameCheckDisabled2 = NO;
frameCheckDisabled3 = NO;
frameCheckDisabled4 = NO;
frameTime1 = kCMTimeInvalid;
frameTime2 = kCMTimeInvalid;
frameTime3 = kCMTimeInvalid;
frameTime4 = kCMTimeInvalid;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageOpenGLESContext useImageProcessingContext];
filterTextureCoordinateAttribute2 = [filterProgram attributeIndex:@"inputTextureCoordinate2"];
filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
glEnableVertexAttribArray(filterTextureCoordinateAttribute2);
filterTextureCoordinateAttribute3 = [filterProgram attributeIndex:@"inputTextureCoordinate3"];
filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
glEnableVertexAttribArray(filterTextureCoordinateAttribute3);
filterTextureCoordinateAttribute4 = [filterProgram attributeIndex:@"inputTextureCoordinate4"];
filterInputTextureUniform4 = [filterProgram uniformIndex:@"inputImageTexture4"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
glEnableVertexAttribArray(filterTextureCoordinateAttribute4);
});
return self;
}
- (void)initializeAttributes;
{
[super initializeAttributes];
[filterProgram addAttribute:@"inputTextureCoordinate2"];
[filterProgram addAttribute:@"inputTextureCoordinate3"];
[filterProgram addAttribute:@"inputTextureCoordinate4"];
}
- (void)disableFrameCheck1;
{
frameCheckDisabled1 = YES;
}
- (void)disableFrameCheck2;
{
frameCheckDisabled2 = YES;
}
- (void)disableFrameCheck3;
{
frameCheckDisabled3 = YES;
}
- (void)disableFrameCheck4;
{
frameCheckDisabled4 = YES;
}
#pragma mark -
#pragma mark Rendering
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates sourceTexture:(GLuint)sourceTexture;
{
if (self.preventRendering)
{
return;
}
[GPUImageOpenGLESContext setActiveShaderProgram:filterProgram];
[self setUniformsForProgramAtIndex:0];
[self setFilterFBO];
glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, sourceTexture);
glUniform1i(filterInputTextureUniform, 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, filterSourceTexture2);
glUniform1i(filterInputTextureUniform2, 3);
glActiveTexture(GL_TEXTURE4);
glBindTexture(GL_TEXTURE_2D, filterSourceTexture3);
glUniform1i(filterInputTextureUniform3, 4);
glActiveTexture(GL_TEXTURE5);
glBindTexture(GL_TEXTURE_2D, filterSourceTexture4);
glUniform1i(filterInputTextureUniform4, 5);
glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
glVertexAttribPointer(filterTextureCoordinateAttribute2, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
glVertexAttribPointer(filterTextureCoordinateAttribute3, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);
glVertexAttribPointer(filterTextureCoordinateAttribute4, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation4]);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
- (void)releaseInputTexturesIfNeeded;
{
if (shouldConserveMemoryForNextFrame)
{
[firstTextureDelegate textureNoLongerNeededForTarget:self];
[textureDelegate2 textureNoLongerNeededForTarget:self];
[textureDelegate3 textureNoLongerNeededForTarget:self];
[textureDelegate4 textureNoLongerNeededForTarget:self];
shouldConserveMemoryForNextFrame = NO;
}
}
#pragma mark -
#pragma mark GPUImageInput
- (NSInteger)nextAvailableTextureIndex;
{
if (!hasSetTexture1){
return 0;
}else if (!hasSetTexture2) {
return 1;
}else if (!hasSetTexture3) {
return 2;
}else{
return 3;
}
}
- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
{
switch (textureIndex) {
case 0:
filterSourceTexture = newInputTexture;
hasSetTexture1 = YES;
break;
case 1:
filterSourceTexture2 = newInputTexture;
hasSetTexture2 = YES;
break;
case 2:
filterSourceTexture3 = newInputTexture;
hasSetTexture3 = YES;
break;
case 3:
filterSourceTexture4 = newInputTexture;
break;
default:
break;
}
}
- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
{
if (textureIndex == 0)
{
[super setInputSize:newSize atIndex:textureIndex];
if (CGSizeEqualToSize(newSize, CGSizeZero))
{
hasSetTexture1 = NO;
}
}
}
- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
{
switch (textureIndex) {
case 0:
inputRotation = newInputRotation;
break;
case 1:
inputRotation2 = newInputRotation;
break;
case 2:
inputRotation3 = newInputRotation;
break;
case 3:
inputRotation4 = newInputRotation;
break;
default:
break;
}
}
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
{
CGSize rotatedSize = sizeToRotate;
GPUImageRotationMode rotationToCheck;
switch (textureIndex) {
case 0:
rotationToCheck = inputRotation;
break;
case 1:
rotationToCheck = inputRotation2;
break;
case 2:
rotationToCheck = inputRotation3;
break;
case 3:
rotationToCheck = inputRotation4;
break;
default:
break;
}
if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
{
rotatedSize.width = sizeToRotate.height;
rotatedSize.height = sizeToRotate.width;
}
return rotatedSize;
}
- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
{
outputTextureRetainCount = [targets count];
// You can set up infinite update loops, so this helps to short circuit them
if (hasReceivedFrame1 && hasReceivedFrame2 && hasReceivedFrame3 && hasReceivedFrame4)
{
return;
}
BOOL updatedMovieFrameOppositeStillImage = NO;
switch (textureIndex) {
case 0:
hasReceivedFrame1 = YES;
frameTime1 = frameTime;
if (frameCheckDisabled2)
{
hasReceivedFrame2 = YES;
}
if (frameCheckDisabled3)
{
hasReceivedFrame3 = YES;
}
if (frameCheckDisabled4)
{
hasReceivedFrame4 = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if (CMTIME_IS_INDEFINITE(frameTime2) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime4))
{
updatedMovieFrameOppositeStillImage = YES;
}
}
break;
case 1:
hasReceivedFrame2 = YES;
frameTime2 = frameTime;
if (frameCheckDisabled1)
{
hasReceivedFrame1 = YES;
}
if (frameCheckDisabled3)
{
hasReceivedFrame3 = YES;
}
if (frameCheckDisabled4)
{
hasReceivedFrame4 = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime4))
{
updatedMovieFrameOppositeStillImage = YES;
}
}
break;
case 2:
hasReceivedFrame3 = YES;
frameTime3 = frameTime;
if (frameCheckDisabled1)
{
hasReceivedFrame1 = YES;
}
if (frameCheckDisabled2)
{
hasReceivedFrame2 = YES;
}
if (frameCheckDisabled4)
{
hasReceivedFrame4 = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime2) && CMTIME_IS_INDEFINITE(frameTime4))
{
updatedMovieFrameOppositeStillImage = YES;
}
}
break;
case 3:
hasReceivedFrame4 = YES;
frameTime4 = frameTime;
if (frameCheckDisabled1)
{
hasReceivedFrame1 = YES;
}
if (frameCheckDisabled3)
{
hasReceivedFrame3 = YES;
}
if (frameCheckDisabled2)
{
hasReceivedFrame2 = YES;
}
if (!CMTIME_IS_INDEFINITE(frameTime))
{
if (CMTIME_IS_INDEFINITE(frameTime1) && CMTIME_IS_INDEFINITE(frameTime3) && CMTIME_IS_INDEFINITE(frameTime2))
{
updatedMovieFrameOppositeStillImage = YES;
}
}
break;
default:
break;
}
// || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
if ((hasReceivedFrame1 && hasReceivedFrame2 && hasReceivedFrame3 && hasReceivedFrame4) || updatedMovieFrameOppositeStillImage)
{
[super newFrameReadyAtTime:frameTime atIndex:0];
hasReceivedFrame1 = NO;
hasReceivedFrame2 = NO;
hasReceivedFrame3 = NO;
hasReceivedFrame4 = NO;
}
}
- (void)setTextureDelegate:(id<GPUImageTextureDelegate>)newTextureDelegate atIndex:(NSInteger)textureIndex;
{
switch (textureIndex) {
case 0:
firstTextureDelegate = newTextureDelegate;
break;
case 1:
textureDelegate2 = newTextureDelegate;
break;
case 2:
textureDelegate3 = newTextureDelegate;
break;
case 3:
textureDelegate4 = newTextureDelegate;
break;
default:
break;
}
}
@end
名为 IFAmaroFilter 的类扩展了 IFFourInputFilter。
#import "IFAmaroFilter.h"
NSString *const kIFAmaroFilterFragmentShaderString = SHADER_STRING
(
precision lowp float;
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2; //blowout;
uniform sampler2D inputImageTexture3; //overlay;
uniform sampler2D inputImageTexture4; //map
void main()
{
vec4 texel = texture2D(inputImageTexture, textureCoordinate);
vec3 bbTexel = texture2D(inputImageTexture2, textureCoordinate).rgb;
texel.r = texture2D(inputImageTexture3, vec2(bbTexel.r, texel.r)).r;
texel.g = texture2D(inputImageTexture3, vec2(bbTexel.g, texel.g)).g;
texel.b = texture2D(inputImageTexture3, vec2(bbTexel.b, texel.b)).b;
vec4 mapped;
mapped.r = texture2D(inputImageTexture4, vec2(texel.r, 0.16666)).r;
mapped.g = texture2D(inputImageTexture4, vec2(texel.g, .5)).g;
mapped.b = texture2D(inputImageTexture4, vec2(texel.b, .83333)).b;
mapped.a = 1.0;
gl_FragColor = texel;
}
);
@implementation IFAmaroFilter
- (id)init;
{
if (!(self = [super initWithFragmentShaderFromString:kIFAmaroFilterFragmentShaderString]))
{
return nil;
}
return self;
}
@end
当我使用过滤器时,我得到了黑色输出。代码如下:
filter = [[IFAmaroFilter alloc] init];
GPUImagePicture *gp1 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"blackboard1024" ofType:@"png"]]];
GPUImagePicture *gp2 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"overlayMap" ofType:@"png"]]];
GPUImagePicture *gp3 = [[GPUImagePicture alloc] initWithImage:[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"amaroMap" ofType:@"png"]]];
[stillCamera addTarget:filter atTextureLocation:0];
[gp1 addTarget:filter atTextureLocation:1];
[gp1 processImage];
[gp2 addTarget:filter atTextureLocation:2];
[gp2 processImage];
[gp3 addTarget:filter atTextureLocation:3];
[gp3 processImage];
[filter addTarget:(GPUImageView *)self.view];
最佳答案
我发现 GPUImagePicture 会自动释放,所以过滤器不会接收纹理。如果你遇到同样的问题,仔细检查纹理的生命控制,观察它们何时被释放。
关于ios - 我想用 GPUImage 制作一个多于两个的输入纹理过滤器。但我得到一个黑色输出,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/14514949/
IO 设备如何知道属于它的内存中的值在memory mapped IO 中发生了变化? ? 例如,假设内存地址 0 专用于保存 VGA 设备的背景颜色。当我们更改 memory[0] 中的值时,VGA
我目前正在开发一个使用Facebook sdk登录(通过FBLoginView)的iOS应用。 一切正常,除了那些拥有较旧版本的facebook的人。 当他们按下“使用Facebook登录”按钮时,他
假设我有: this - is an - example - with some - dashesNSRange将使用`rangeOfString:@“-”拾取“-”的第一个实例,但是如果我只想要最后
Card.io SDK提供以下详细信息: 卡号,有效期,月份,年份,CVV和邮政编码。 如何从此SDK获取国家名称。 - (void)userDidProvideCreditCardInfo:(Car
iOS 应用程序如何从网络服务下载图片并在安装过程中将它们安装到用户的 iOS 设备上?可能吗? 最佳答案 您无法控制应用在用户设备上的安装,因此无法在安装过程中下载其他数据。 只需在安装后首次启动应
我曾经开发过一款企业版 iOS 产品,我们公司曾将其出售给大型企业,供他们的员工使用。 该应用程序通过 AppStore 提供,企业用户获得了公司特定的配置文件(包含应用程序配置文件)以启用他们有权使
我正在尝试将 Card.io SDK 集成到我的 iOS 应用程序中。我想为 CardIO ui 做一个简单的本地化,如更改取消按钮标题或“在此保留信用卡”提示文本。 我在 github 上找到了这个
我正在使用 CardIOView 和 CardIOViewDelegate 类,没有可以设置为 YES 的 BOOL 来扫描 collectCardholderName。我可以看到它在 CardIOP
我有一个集成了通话工具包的 voip 应用程序。每次我从我的 voip 应用程序调用时,都会在 native 电话应用程序中创建一个新的最近通话记录。我在 voip 应用程序中也有自定义联系人(电话应
iOS 应用程序如何知道应用程序打开时屏幕上是否已经有键盘?应用程序运行后,它可以接收键盘显示/隐藏通知。但是,如果应用程序在分屏模式下作为辅助应用程序打开,而主应用程序已经显示键盘,则辅助应用程序不
我在模拟器中收到以下错误: ImageIO: CGImageReadSessionGetCachedImageBlockData *** CGImageReadSessionGetCachedIm
如 Apple 文档所示,可以通过 EAAccessory Framework 与经过认证的配件(由 Apple 认证)进行通信。但是我有点困惑,因为一些帖子告诉我它也可以通过 CoreBluetoo
尽管现在的调试器已经很不错了,但有时找出应用程序中正在发生的事情的最好方法仍然是古老的 NSLog。当您连接到计算机时,这样做很容易; Xcode 会帮助弹出日志查看器面板,然后就可以了。当您不在办公
在我的 iOS 应用程序中,我定义了一些兴趣点。其中一些有一个 Kontakt.io 信标的名称,它绑定(bind)到一个特定的 PoI(我的意思是通常贴在信标标签上的名称)。现在我想在附近发现信标,
我正在为警报提示创建一个 trigger.io 插件。尝试从警报提示返回数据。这是我的代码: // Prompt + (void)show_prompt:(ForgeTask*)task{
您好,我是 Apple iOS 的新手。我阅读并搜索了很多关于推送通知的文章,但我没有发现任何关于 APNS 从 io4 到 ios 6 的新更新的信息。任何人都可以向我提供 APNS 如何在 ios
UITabBar 的高度似乎在 iOS 7 和 8/9/10/11 之间发生了变化。我发布这个问题是为了让其他人轻松找到答案。 那么:在 iPhone 和 iPad 上的 iOS 8/9/10/11
我想我可以针对不同的 iOS 版本使用不同的 Storyboard。 由于 UI 的差异,我将创建下一个 Storyboard: Main_iPhone.storyboard Main_iPad.st
我正在写一些东西,我将使用设备的 iTunes 库中的一部分音轨来覆盖 2 个视频的组合,例如: AVMutableComposition* mixComposition = [[AVMutableC
我创建了一个简单的 iOS 程序,可以顺利编译并在 iPad 模拟器上运行良好。当我告诉 XCode 4 使用我连接的 iPad 设备时,无法编译相同的程序。问题似乎是当我尝试使用附加的 iPad 时
我是一名优秀的程序员,十分优秀!