2015-05-14 69 views
5

我正在使用OpenGL并使用GLPaint的示例。我创建了一个应用程序,在用户触摸图像时在图像上应用旋转(纹理)。我有一个问题,我需要保存第一个触摸,然后应用下一个阶段等。但在下一次触摸时,先前的应用效果将被删除。
我试图使用深度缓冲区&帧缓冲区,但都没有能够得到所需的结果。我附上我的所有代码和着色器Opengl Renderbuffer纹理touchmove ios问题

#import <QuartzCore/QuartzCore.h> 
#import <OpenGLES/EAGLDrawable.h> 
#import <GLKit/GLKit.h> 

#import "PaintingView.h" 

enum { 
    ATTRIB_VERTEX, 
    NUM_ATTRIBS 
}; 

@interface PaintingView() 
{ 
    // The pixel dimensions of the backbuffer 
    GLint backingWidth; 
    GLint backingHeight; 

    EAGLContext *context; 

    // OpenGL names for the renderbuffer and framebuffers used to render to this view 
    GLuint viewRenderbuffer, viewFramebuffer, texture; 

    // OpenGL name for the depth buffer that is attached to viewFramebuffer, if it exists (0 if it does not exist) 
    GLuint depthRenderbuffer; 


    Boolean needsErase; 

    // Shader objects 
// GLuint vertexShader; 
// GLuint fragmentShader; 
// GLuint shaderProgram; 

    // Buffer Objects 
    GLuint vboId; 

    BOOL initialized; 

    GLint inputImageTexture2Uniform, filterPositionAttribute, filterTextureCoordinateAttribute; 

    NSString *vertexShader, *fragmentShader; 

    Boolean firstTouch; 

    CGPoint twirlCenter; 
} 

// Program Handle 
@property (assign, nonatomic, readonly) GLuint program; 

// Attribute Handles 
@property (assign, nonatomic, readonly) GLuint aPosition; 

@property(nonatomic, readwrite) CGPoint location; 
@property(nonatomic, readwrite) CGPoint previousLocation; 


@end 

@implementation PaintingView 

// Implement this to override the default layer class (which is [CALayer class]). 
// We do this so that our view will be backed by a layer that is capable of OpenGL ES rendering. 
+ (Class)layerClass 
{ 
    return [CAEAGLLayer class]; 
} 

// The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder: 
- (id)initWithCoder:(NSCoder*)coder { 
    if ((self = [super initWithCoder:coder])) { 
     CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; 

     eaglLayer.opaque = NO; 
     // In this application, we want to retain the EAGLDrawable contents after a call to presentRenderbuffer. 
     eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: 
             [NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil]; 

     context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 

     //  context = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 100, 100)]; 

     if (!context || ![EAGLContext setCurrentContext:context]) { 
      return nil; 
     } 

     // Set the view's scale factor as you wish 
     self.contentScaleFactor = [[UIScreen mainScreen] scale]; 

     // Make sure to start with a cleared buffer 
     needsErase = YES; 
    } 
    return self; 
} 

// If our view is resized, we'll be asked to layout subviews. 
// This is the perfect opportunity to also update the framebuffer so that it is 
// the same size as our display area. 
-(void)layoutSubviews 
{ 
    [EAGLContext setCurrentContext:context]; 

    if (!initialized) { 
     initialized = [self initGL]; 
    } 
    else { 
     [self resizeFromLayer:(CAEAGLLayer*)self.layer]; 
    } 
} 

- (BOOL)initGL 
{ 
    // Generate IDs for a framebuffer object and a color renderbuffer 
    glGenFramebuffers(1, &viewFramebuffer); 
    glGenRenderbuffers(1, &viewRenderbuffer); 

    glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer); 
    glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer); 
    // This call associates the storage for the current render buffer with the EAGLDrawable (our CAEAGLLayer) 
    // allowing us to draw into a buffer that will later be rendered to screen wherever the layer is (which corresponds with our view). 
    [context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(id<EAGLDrawable>)self.layer]; 
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, viewRenderbuffer); 

    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth); 
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight); 

    // For this sample, we do not need a depth buffer. If you do, this is how you can create one and attach it to the framebuffer: 
    // glGenRenderbuffers(1, &depthRenderbuffer); 
    // glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer); 
    // glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, backingWidth, backingHeight); 
    // glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer); 

    if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) 
    { 
     NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER)); 
     return NO; 
    } 

    // Setup the view port in Pixels 
    glViewport(0, 0, backingWidth, backingHeight); 

    // Create a Vertex Buffer Object to hold our data 
    glGenBuffers(1, &vboId); 

    // Load the brush texture 
    // brushTexture = [self textureFromName:@"transprnt.png"]; 

    // Load shaders 
    [self setupShaders]; 

    inputImageTexture2Uniform = [self uniformIndex:@"inputImageTexture"]; 
    UIImage *uyet = [UIImage imageNamed:@"kerala.jpg"]; 
    [self setImage:uyet]; 




    // Enable blending and set a blending function appropriate for premultiplied alpha pixel data 
    glEnable(GL_BLEND); 
    glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA); 


    return YES; 
} 

- (void)setupShaders 
{ 
    vertexShader = @"RWTBase"; 
    fragmentShader = @"TwirlShader"; 
    // Program 
    _program = [self programWithVertexShader:vertexShader fragmentShader:fragmentShader]; 

    // Attributes 
    filterPositionAttribute = glGetAttribLocation(_program, "aPosition"); 
    filterTextureCoordinateAttribute = glGetAttribLocation(_program, "inputTextureCoordinate"); 

    glEnableVertexAttribArray(filterPositionAttribute); 
    glEnableVertexAttribArray(filterTextureCoordinateAttribute); 

    glBindAttribLocation(_program, 
         filterPositionAttribute, 
         [@"aPosition" UTF8String]); 
    glBindAttribLocation(_program, 
         filterTextureCoordinateAttribute, 
         [@"inputTextureCoordinate" UTF8String]); 



    // Program 
    glUseProgram(_program); 

    static const GLfloat textureCoordinates[] = { 
     0.0f, 0.0f, 
     1.0f, 0.0f, 
     0.0f, 1.0f, 
     1.0f, 1.0f, 
    }; 
    static const GLfloat vertices[] = { 
     -1.0f, -1.0f, 
     1.0f, -1.0f, 
     -1.0f, 1.0f, 
     1.0f, 1.0f, 
    }; 

    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices); 
    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); 

} 

- (BOOL)resizeFromLayer:(CAEAGLLayer *)layer 
{ 
    // Allocate color buffer backing based on the current layer size 
    glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer); 
    [context renderbufferStorage:GL_RENDERBUFFER fromDrawable:layer]; 
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth); 
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight); 

    // For this sample, we do not need a depth buffer. If you do, this is how you can allocate depth buffer backing: 
    // glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer); 
    // glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, backingWidth, backingHeight); 
    // glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer); 

    if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) 
    { 
     NSLog(@"Failed to make complete framebuffer objectz %x", glCheckFramebufferStatus(GL_FRAMEBUFFER)); 
     return NO; 
    } 


    // Update viewport 
    glViewport(0, 0, backingWidth, backingHeight); 

    return YES; 
} 

// Releases resources when they are not longer needed. 
- (void)dealloc 
{ 
    // Destroy framebuffers and renderbuffers 
    if (viewFramebuffer) { 
     glDeleteFramebuffers(1, &viewFramebuffer); 
     viewFramebuffer = 0; 
    } 
    if (viewRenderbuffer) { 
     glDeleteRenderbuffers(1, &viewRenderbuffer); 
     viewRenderbuffer = 0; 
    } 
    if (depthRenderbuffer) 
    { 
     glDeleteRenderbuffers(1, &depthRenderbuffer); 
     depthRenderbuffer = 0; 
    } 
    // vbo 
    if (vboId) { 
     glDeleteBuffers(1, &vboId); 
     vboId = 0; 
    } 

    // tear down context 
    if ([EAGLContext currentContext] == context) 
     [EAGLContext setCurrentContext:nil]; 
} 

// Custom Methods.... 
/* 
*********** 
*   * 
* ATG * 
*   * 
*********** 
*/ 
#pragma mark - Compile & Link 
- (GLuint)programWithVertexShader:(NSString*)vsh fragmentShader:(NSString*)fsh { 
    // Build shaders 
    GLuint vertexShader1 = [self shaderWithName:vsh type:GL_VERTEX_SHADER]; 
    GLuint fragmentShader1 = [self shaderWithName:fsh type:GL_FRAGMENT_SHADER]; 

    // Create program 
    GLuint programHandle = glCreateProgram(); 

    // Attach shaders 
    glAttachShader(programHandle, vertexShader1); 
    glAttachShader(programHandle, fragmentShader1); 

    // Link program 
    glLinkProgram(programHandle); 

    // Check for errors 
    GLint linkSuccess; 
    glGetProgramiv(programHandle, GL_LINK_STATUS, &linkSuccess); 
    if (linkSuccess == GL_FALSE) { 
     GLchar messages[1024]; 
     glGetProgramInfoLog(programHandle, sizeof(messages), 0, &messages[0]); 
     NSLog(@"%@:- GLSL Program Error: %s", [self class], messages); 
    } 

    // Delete shaders 
    glDeleteShader(vertexShader1); 
    glDeleteShader(fragmentShader1); 

    return programHandle; 
} 

- (GLuint)shaderWithName:(NSString*)name type:(GLenum)type { 
    // Load the shader file 
    NSString* file; 
    if (type == GL_VERTEX_SHADER) { 
     file = [[NSBundle mainBundle] pathForResource:name ofType:@"vsh"]; 
    } else if (type == GL_FRAGMENT_SHADER) { 
     file = [[NSBundle mainBundle] pathForResource:name ofType:@"fsh"]; 
    } 

    // Create the shader source 
    const GLchar* source = (GLchar*)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String]; 

    // Create the shader object 
    GLuint shaderHandle = glCreateShader(type); 

    // Load the shader source 
    glShaderSource(shaderHandle, 1, &source, 0); 

    // Compile the shader 
    glCompileShader(shaderHandle); 

    // Check for errors 
    GLint compileSuccess; 
    glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compileSuccess); 
    if (compileSuccess == GL_FALSE) { 
     GLchar messages[1024]; 
     glGetShaderInfoLog(shaderHandle, sizeof(messages), 0, &messages[0]); 
     NSLog(@"%@:- GLSL Shader Error: %s", [self class], messages); 
    } 

    return shaderHandle; 
} 

// Touch Methiods.. 
#pragma mark - Touches 

- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event { 

// CGRect    bounds = [self.view bounds]; 
// UITouch*   touch = [[event touchesForView:self.view] anyObject]; 
// // NSLog(@"Hellossss"); 
// firstTouch = YES; 
// _location = [touch locationInView:self.view]; 
// _location.y = bounds.size.height - _location.y; 
} 

// Handles the continuation of a touch. 
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event 
{ 

    CGRect    bounds = [self bounds]; 
    UITouch*   touch = [[event touchesForView:self] anyObject]; 


    UITouch* touchEvent = [touches anyObject]; 
    CGPoint locationInView = [touchEvent locationInView:self]; 
    twirlCenter = getNormalizedPoint(self, locationInView); 


// [EAGLContext setCurrentContext:context]; 
// glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer); 

    // Render the stroke 
    [self applyEffect]; 

// [self drawEffects]; 
    if (firstTouch) { 
     firstTouch = NO; 
     _previousLocation = [touch previousLocationInView:self]; 
     _previousLocation.y = bounds.size.height - _previousLocation.y; 
    } else { 
     _location = [touch locationInView:self]; 
     _location.y = bounds.size.height - _location.y; 
     _previousLocation = [touch previousLocationInView:self]; 
     _previousLocation.y = bounds.size.height - _previousLocation.y; 
    } 

    // Render the stroke 
    [self renderLineFromPoint:_previousLocation toPoint:_location]; 
} 

-(void)drawEffects { 
// [EAGLContext setCurrentContext:context]; 
// glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer); 

    // Draw 
    glBindBuffer(GL_ARRAY_BUFFER, vboId); 

    glUseProgram(_program); 
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); 


    // Display the buffer 
    glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer); 
    [context presentRenderbuffer:GL_RENDERBUFFER]; 
} 

-(void)applyEffect { 

    //  [EAGLContext setCurrentContext:context]; 
    //  glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer); 

    GLint radiusIndex = [self uniformIndex:@"radius"]; 
    glUniform1f(radiusIndex, 0.1); 

    GLint angleIndex = [self uniformIndex:@"angle"]; 
    glUniform1f(angleIndex, -0.5); 

    // twirlCenter = CGPointMake(1.0, 0.0); 
    GLint centerIndex = [self uniformIndex:@"center"]; 

    GLfloat positionArray[2]; 
    positionArray[0] = twirlCenter.x; 
    positionArray[1] = twirlCenter.y; 
    glUniform2fv(centerIndex, 1, positionArray); 

} 

// Drawings a line onscreen based on where the user touches 
- (void)renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end 
{ 
    static GLfloat*  vertexBuffer = NULL; 
    static NSUInteger vertexMax = 64; 
    NSUInteger   vertexCount = 0, 
    count, 
    i; 

    [EAGLContext setCurrentContext:context]; 
    glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer); 

    // Convert locations from Points to Pixels 
    CGFloat scale = self.contentScaleFactor; 
    start.x *= scale; 
    start.y *= scale; 
    end.x *= scale; 
    end.y *= scale; 

    // Allocate vertex array buffer 
    if(vertexBuffer == NULL) 
     vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat)); 

    // Add points to the buffer so there are drawing points every X pixels 
    count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y))/3), 1); 
    for(i = 0; i < count; ++i) { 
     if(vertexCount == vertexMax) { 
      vertexMax = 2 * vertexMax; 
      vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat)); 
     } 

     vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i/(GLfloat)count); 
     vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i/(GLfloat)count); 
     vertexCount += 1; 
    } 

    // Load data to the Vertex Buffer Object 
    glBindBuffer(GL_ARRAY_BUFFER, vboId); 
    glBufferData(GL_ARRAY_BUFFER, vertexCount*2*sizeof(GLfloat), vertexBuffer, GL_DYNAMIC_DRAW); 

    glEnableVertexAttribArray(ATTRIB_VERTEX); 
    glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 0, 0); 

    // Draw 
    glBindTexture(GL_TEXTURE_2D, texture); 

    glUseProgram(_program); 


    glDrawArrays(GL_POINTS, 0, (int)vertexCount); 

    // Display the buffer 
    glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer); 
    [context presentRenderbuffer:GL_RENDERBUFFER]; 
} 

///fsdffdf 


static CGPoint getNormalizedPoint(UIView* view, CGPoint locationInView) 
{ 
    const float normalizedX = (locationInView.x/view.bounds.size.width) * (2.f - 1.f); 
    const float normalizedY = ((view.bounds.size.height - locationInView.y)/view.bounds.size.height) * (2.f - 1.f); 

    return CGPointMake(normalizedX, normalizedY); 
} 

// set Img... 

- (void)setImage:(UIImage *)image 
{ 
    // Create an RGBA bitmap context 
    CGImageRef CGImage = image.CGImage; 
    GLint width = (GLint)CGImageGetWidth(CGImage); 
    GLint height = (GLint)CGImageGetHeight(CGImage); 
    size_t bitsPerComponent = 8; 
    size_t bytesPerRow = width * 4; 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef context1 = CGBitmapContextCreate(NULL, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedFirst|kCGBitmapByteOrder32Little); 
    // Invert vertically for OpenGL 
    CGContextTranslateCTM(context1, 0, height); 
    CGContextScaleCTM(context1, 1, -1); 
    CGContextDrawImage(context1, CGRectMake(0, 0, width, height), CGImage); 
    GLubyte *textureData = (GLubyte *)CGBitmapContextGetData(context1); 

    // [self setContentSize:CGSizeMake(width, height)]; 
    // [self _setTextureData:textureData width:width height:height]; 
    [self generateDefaultTextureWithWidth:width height:height data:textureData]; 

    CGContextRelease(context1); 
    CGColorSpaceRelease(colorSpace); 
} 

- (GLuint)generateDefaultTextureWithWidth:(GLint)width height:(GLint)height data:(GLvoid *)data 
{ 
    // texture = 0; 
    glActiveTexture(GL_TEXTURE0); 
    glGenTextures(1, &texture); 
    glBindTexture(GL_TEXTURE_2D, texture); 
    glUniform1i(inputImageTexture2Uniform, 0); 


    // glGenTextures(1, &texture); 
    // glBindTexture(GL_TEXTURE_2D, texture); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, data); 
    // glBindTexture(GL_TEXTURE_2D, 0); 
    return texture; 
} 

- (GLuint)uniformIndex:(NSString *)uniformName 
{ 
    return glGetUniformLocation(_program, [uniformName UTF8String]); 
} 

- (void)setPaintLine12 
{ 
    [self performSelector:@selector(drawEffects) withObject:nil afterDelay:0.5]; 
} 

// Erases the screen 
- (void)erase 
{ 
    NSLog(@"erase"); 
} 

- (BOOL)canBecomeFirstResponder { 
    return YES; 
} 

@end 

而且附加着色如下:

precision highp float; 

varying vec2 textureCoordinate; 

uniform sampler2D inputImageTexture; 

uniform vec2 center; 
uniform float radius; 
uniform float angle; 

void main() 
{ 
    vec2 textureCoordinateToUse = textureCoordinate; 
    float dist = distance(center, textureCoordinate); 
    if (dist < radius) 
    { 
     textureCoordinateToUse -= center; 
     float percent = (radius - dist)/radius; 
     float theta = percent * percent * angle * 8.0; 
     float s = sin(theta); 
     float c = cos(theta); 
     textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c))); 
     textureCoordinateToUse += center; 
    } 

    gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse); 
} 

请帮我调试什么是错的,如何解决这个问题。

您也可以从这里Twirl on Touch-github

回答

0

运行完整的项目你似乎重绘触摸整个屏幕,然后覆盖以前与原始图像修改像素。您还有其他问题,如效果比例似乎与您的屏幕比例相同,并且在设备之间不一致。

无论如何要保持效果,您将需要继续在同一图像(纹理)上工作而不覆盖整个屏幕。这个具体的效果可能有点棘手,但它通常有3个选项:

  1. 您只能更新触摸屏幕的一部分。这意味着创建一个顶点数据,其中位置表示触摸点周围的矩形,纹理坐标表示您需要重绘的纹理部分。这个过程的缺点是,如果位置重叠,它将覆盖前面的效果。
  2. 通过将纹理绑定到FBO(帧缓冲区对象),您可以不断在相同纹理上添加效果。现在FBO是你吸引的目标,也是它的纹理来源。一旦重绘,您需要将纹理绘制到主框架缓冲区。这可能是你也需要双缓冲才能正确实现这一点(有两个相同的纹理,其中第一个是源,第二个是目的地)。缺点是如果放置在相同的位置,效果将叠加。
  3. 保存所有的触摸并创建一个系统,它将同时在所有触摸位置上创建效果。这意味着你将需要重构你的着色器输入来处理这个问题。可能最好的解决方案是创建一个FBO,它将保存您的映射数据,然后您可以控制这些效果如何堆叠。然后映射纹理可以包含每个像素的角度和半径等内容。在每次刷新时,您都会将贴图纹理和原始纹理传递到纹理,纹理将使用映射来查找正确的像素,并在1绘制调用中重绘整个场景。缺点是这是很多工作。
+0

您有任何示例或示例代码,我可以将它们用作参考。由于我不是opengl的专家,所以很难理解这一点。 –

+0

什么部分?首先是非常简单的,你需要做的只是将视图坐标转换成GL坐标。第二个需要一个FBO,它仍然非常简单,并且可以在Web上找到吨,然后将其与主缓冲区相同。第三个需要大量的数学和逻辑来创建和处理映射,否则几乎与第二个相同。 –

+0

第二个。我无法找到FBO的例子。 –