• 学习OpenGL:笔记十


    造一面镜子

    最基本的原理就是中学关于镜面反射的物理知识。镜子上显示的图像,可以看做镜像过去的另一个人所看到的的情景。使用OpenGL的术语来说就是把摄像机以镜子所在的平面做镜像,得到的镜像摄像机所观察到的世界,就是镜面上应该显示的内容。基本原理虽然很简单,但实现过程中也会遇到诸多问题。比如如何把镜像摄像机的渲染结果贴到镜面上,镜像摄像机被其他物体遮挡该如何处理。

    摄像机

    对摄像机进行了封装,主要是为了更方便的进行镜像。。主要功能是生成摄像机和镜像摄像机。摄像机使用向前的向量forward,向上的向量up和位置position管理自身信息。镜像时将这三个变量分别求解出镜像值即可。求解向量的镜像主要使用了向量的反射公式。

    @interface Camera : NSObject
    @property (assign, nonatomic) GLKVector3 forward;
    @property (assign, nonatomic) GLKVector3 up;
    @property (assign, nonatomic) GLKVector3 position;
    
    - (void)setupCameraWithEye:(GLKVector3)eye lookAt:(GLKVector3)lookAt up:(GLKVector3)up;
    - (void)mirrorTo:(Camera *)targetCamera plane:(GLKVector4)plane;
    - (GLKMatrix4)cameraMatrix;
    @end
    
    - (GLKVector3)reflect:(GLKVector3)sourceVector normalVector:(GLKVector3)normalVector {
        CGFloat normalScalar = 2 * GLKVector3DotProduct(sourceVector, normalVector);
        GLKVector3 scaledNormalVector = GLKVector3MultiplyScalar(normalVector, normalScalar);
        GLKVector3 reflectVector = GLKVector3Subtract(sourceVector, scaledNormalVector);
        return reflectVector;
    }
    
    - (void)mirrorTo:(Camera *)targetCamera plane:(GLKVector4)plane {
        GLKVector3 planeNormal = GLKVector3Normalize(GLKVector3Make(plane.x, plane.y, plane.z));
        
        GLKVector3 mirrorForward = GLKVector3Normalize([self reflect:self.forward normalVector:planeNormal]);
        GLKVector3 mirrorUp = GLKVector3Normalize([self reflect:self.up normalVector:planeNormal]);
        
        GLKVector3 planeCenter = GLKVector3MultiplyScalar(planeNormal, plane.w);
        GLKVector3 eyeVector = GLKVector3Subtract(planeCenter, self.position);
        CGFloat eyeVectorLength = GLKVector3Length(eyeVector);
        eyeVector = GLKVector3Normalize(eyeVector);
        GLKVector3 mirrorEyeVector = GLKVector3Normalize([self reflect:eyeVector normalVector:planeNormal]);
        mirrorEyeVector = GLKVector3MultiplyScalar(mirrorEyeVector, eyeVectorLength);
        GLKVector3 mirrorPosition = GLKVector3Subtract(planeCenter, mirrorEyeVector);
        targetCamera.position = mirrorPosition;
        targetCamera.up = mirrorUp;
        targetCamera.forward = mirrorForward;
    }
    
    - (GLKMatrix4)cameraMatrix {
        GLKVector3 eye = self.position;
        GLKVector3 lookAt = GLKVector3Add(eye, self.forward);
        return GLKMatrix4MakeLookAt(eye.x, eye.y, eye.z, lookAt.x, lookAt.y, lookAt.z, self.up.x, self.up.y, self.up.z);
    }
    

    渲染镜像摄像机内容

    想要把镜像摄像机的内容渲染到镜面的平面上,我们需要建立一个新的Framebuffer,并且绑定一个纹理到它的颜色附件中。这样就可以把镜像摄像机的内容渲染到纹理了。

    - (void)createTextureFramebuffer:(CGSize)framebufferSize {
        glGenFramebuffers(1, &mirrorFramebuffer);
        glBindFramebuffer(GL_FRAMEBUFFER, mirrorFramebuffer);
        // 生成颜色缓冲区的纹理对象并绑定到framebuffer上
        glGenTextures(1, &mirrorTexture);
        glBindTexture(GL_TEXTURE_2D, mirrorTexture);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, framebufferSize.width, framebufferSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, mirrorTexture, 0);
        
        // 下面这段代码不使用纹理作为深度缓冲区。
        GLuint depthBufferID;
        glGenRenderbuffers(1, &depthBufferID);
        glBindRenderbuffer(GL_RENDERBUFFER, depthBufferID);
        glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, framebufferSize.width, framebufferSize.height);
        glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthBufferID);
    
        GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
        if (status != GL_FRAMEBUFFER_COMPLETE) {
            // framebuffer生成失败
        }
        glBindFramebuffer(GL_FRAMEBUFFER, 0);
    }  

    制作场景

    - (void)createWall {
        UIImage *normalImage = [UIImage imageNamed:@"stoneFloor_NRM.png"];
        GLKTextureInfo *normalMap = [GLKTextureLoader textureWithCGImage:normalImage.CGImage options:nil error:nil];
        UIImage *diffuseImage = [UIImage imageNamed:@"stoneFloor.jpg"];
        GLKTextureInfo *diffuseMap = [GLKTextureLoader textureWithCGImage:diffuseImage.CGImage options:nil error:nil];
        
        NSString *objFile = [[NSBundle mainBundle] pathForResource:@"cube" ofType:@"obj"];
        WavefrontOBJ *wall1 = [WavefrontOBJ objWithGLContext:self.glContext objFile:objFile diffuseMap:diffuseMap normalMap:normalMap];
        wall1.modelMatrix = GLKMatrix4Multiply(GLKMatrix4MakeTranslation(0, 0, -16), GLKMatrix4MakeScale(15,15,1));
        [self.objects addObject:wall1];
        
        WavefrontOBJ *wall2 = [WavefrontOBJ objWithGLContext:self.glContext objFile:objFile diffuseMap:diffuseMap normalMap:normalMap];
        wall2.modelMatrix = GLKMatrix4Multiply(GLKMatrix4MakeTranslation(0, 0, 16), GLKMatrix4MakeScale(15,15,1));
        [self.objects addObject:wall2];
        
        WavefrontOBJ *wall3 = [WavefrontOBJ objWithGLContext:self.glContext objFile:objFile diffuseMap:diffuseMap normalMap:normalMap];
        wall3.modelMatrix = GLKMatrix4Multiply(GLKMatrix4MakeTranslation(16, 0, 0), GLKMatrix4MakeScale(1,15,15));
        [self.objects addObject:wall3];
        
        WavefrontOBJ *wall4 = [WavefrontOBJ objWithGLContext:self.glContext objFile:objFile diffuseMap:diffuseMap normalMap:normalMap];
        wall4.modelMatrix = GLKMatrix4Multiply(GLKMatrix4MakeTranslation(-16, 0, 0), GLKMatrix4MakeScale(1,15,15));
        [self.objects addObject:wall4];
    }
    
    - (void)createFloor {
        UIImage *normalImage = [UIImage imageNamed:@"stoneFloor_NRM.png"];
        GLKTextureInfo *normalMap = [GLKTextureLoader textureWithCGImage:normalImage.CGImage options:nil error:nil];
        UIImage *diffuseImage = [UIImage imageNamed:@"stoneFloor.jpg"];
        GLKTextureInfo *diffuseMap = [GLKTextureLoader textureWithCGImage:diffuseImage.CGImage options:nil error:nil];
        
        NSString *objFile = [[NSBundle mainBundle] pathForResource:@"cube" ofType:@"obj"];
        WavefrontOBJ *floor = [WavefrontOBJ objWithGLContext:self.glContext objFile:objFile diffuseMap:diffuseMap normalMap:normalMap];
        floor.modelMatrix = GLKMatrix4Multiply(GLKMatrix4MakeTranslation(0, -1, 0), GLKMatrix4MakeScale(15,1,15));
        [self.objects addObject:floor];
    }  

    这是4堵墙和一个地板

    创建镜子

    - (void)createMirror {
        CGSize framebufferSize = CGSizeMake(1024, 1024);
        [self createTextureFramebuffer: framebufferSize];
        
        NSString *vertexShaderPath = [[NSBundle mainBundle] pathForResource:@"vertex" ofType:@".glsl"];
        NSString *fragmentShaderPath = [[NSBundle mainBundle] pathForResource:@"frag_mirror" ofType:@".glsl"];
        GLContext *mirrorGLContext = [GLContext contextWithVertexShaderPath:vertexShaderPath fragmentShaderPath:fragmentShaderPath];
        
        self.mirror = [[Mirror alloc] initWithGLContext:mirrorGLContext texture:mirrorTexture];
        self.mirror.modelMatrix = GLKMatrix4Multiply(GLKMatrix4MakeTranslation(0, 3.5, 0), GLKMatrix4MakeScale(8, 7, 1));
    }  

    效果图

      

  • 相关阅读:
    mybatis批量更新报错
    Axure8破解码
    小程序开发-Step1
    2018新年计划
    java 写 Excel(不生成实体文件,写为流的形式)
    git 生成公钥 使用命令行无需输入用户名密码(windows)
    Node.js:上传文件,服务端如何获取文件上传进度
    Express:模板引擎深入研究
    windows下nginx的安装及使用
    Chrome开发者工具详解-Network面板
  • 原文地址:https://www.cnblogs.com/neverMore-face/p/10171642.html
Copyright © 2020-2023  润新知