• 十二,HDR环境贴图卷积


    在前面章节,只要知道方向向量和颜色,就可以知道该设置什么颜色了。那么为什么还要进行卷积呢?
    原因很简单。是间接光不是颜色,是间接光不是颜色,是间接光不是颜色。

    我是尽量避免写公式的,但是迟早要看到这个可怕的公式。
    在这里插入图片描述
    每个方向向量可以认为是wi,以前采样的颜色,在这里可以认为是L(p,wi),
    而这个公式计算所有间接漫反射光的积分。

    通过以往经验可知,积分必定要转换为离散和,也就是说,用N个采样数据之和代替积分。

    那么,从哪里开始采样呢?以什么依据采样呢?答案是表面法线中心,从中间到两边。
    从下图可以看到很好理解了。在这里插入图片描述
    再细化就是当作球体,经纬度,经度从0到360,纬度是0到90度。
    在这里插入图片描述
    由于球的性质,采样不均衡,所以,再加个sin值调节区域采样贡献。又由于较大的角度光比较暗,再加上cos调节。如下图:
    在这里插入图片描述
    在这里插入图片描述

    采样时,从球面坐标转到3d直角向量,再将该向量从切线空间转到世界空间,然后用该向量采样HDR环境贴图。
    在这里插入图片描述
    似乎看起来很复杂,但是,更复杂的还在后面,镜面IBL,哈哈,

    看看采样后的运行结果
    在这里插入图片描述
    代码如下:
    #include
    #include
    #include
    #include
    #include
    #include
    #include
    #include

    static const char * vertexShader =
    {
    “in vec3 aPos;\n”
    “varying vec3 WorldPos;”
    “void main(void)\n”
    “{\n”
    “WorldPos = aPos;\n”
    " gl_Position = ftransform();\n"
    “}\n”
    };

    static const char *psShader =
    {
    "varying vec3 WorldPos; "
    "uniform samplerCube environmentMap; "
    "const float PI = 3.14159265359; "
    "void main() "
    "{ "
    " vec3 N = normalize(WorldPos); "
    " vec3 irradiance = vec3(0.0); "
    " vec3 up = vec3(0.0, 1.0, 0.0); "
    " vec3 right = normalize(cross(up, N)); "
    " up = normalize(cross(N, right)); "
    " float sampleDelta = 0.025; "
    " float nrSamples = 0.0; "
    " for (float phi = 0.0; phi < 2.0 * PI; phi += sampleDelta) "
    " { "
    " for (float theta = 0.0; theta < 0.5 * PI; theta += sampleDelta) "
    " { "
    " vec3 tangentSample = vec3(sin(theta) * cos(phi), sin(theta) * sin(phi), cos(theta)); "
    " vec3 sampleVec = tangentSample.x * right + tangentSample.y * up + tangentSample.z * N; "
    " irradiance += texture(environmentMap, sampleVec).rgb * cos(theta) * sin(theta); "
    " nrSamples++; "
    " } "
    " } "
    " irradiance = PI * irradiance * (1.0 / float(nrSamples)); "
    " gl_FragColor = vec4(irradiance, 1.0); "
    “}”
    };
    class MyNodeVisitor : public osg::NodeVisitor
    {
    public:
    MyNodeVisitor() : osg::NodeVisitor(osg::NodeVisitor::TRAVERSE_ALL_CHILDREN)
    {

    }
    void apply(osg::Geode& geode)
    {
    	int count = geode.getNumDrawables();
    	for (int i = 0; i < count; i++)
    	{
    		osg::ref_ptr geometry = geode.getDrawable(i)->asGeometry();
    		if (!geometry.valid())
    		{
    			continue;
    		}
    		osg::Array* vertexArray = geometry->getVertexArray();
    		geometry->setVertexAttribArray(1, vertexArray);
    
    	}
    	traverse(geode);
    }
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17

    };

    int main()
    {
    osg::ref_ptrosg::TextureCubeMap tcm = new osg::TextureCubeMap;
    tcm->setTextureSize(512, 512);
    tcm->setFilter(osg::Texture::MIN_FILTER, osg::Texture::LINEAR);
    tcm->setFilter(osg::Texture::MAG_FILTER, osg::Texture::LINEAR);
    tcm->setWrap(osg::Texture::WRAP_S, osg::Texture::CLAMP_TO_EDGE);
    tcm->setWrap(osg::Texture::WRAP_T, osg::Texture::CLAMP_TO_EDGE);
    tcm->setWrap(osg::Texture::WRAP_R, osg::Texture::CLAMP_TO_EDGE);

    std::string strImagePosX = "D:/delete/Right face camera.bmp";
    osg::ref_ptr imagePosX = osgDB::readImageFile(strImagePosX);
    tcm->setImage(osg::TextureCubeMap::POSITIVE_X, imagePosX);
    std::string strImageNegX = "D:/delete/Left face camera.bmp";
    osg::ref_ptr imageNegX = osgDB::readImageFile(strImageNegX);
    tcm->setImage(osg::TextureCubeMap::NEGATIVE_X, imageNegX);
    
    std::string strImagePosY = "D:/delete/Front face camera.bmp";;
    osg::ref_ptr imagePosY = osgDB::readImageFile(strImagePosY);
    tcm->setImage(osg::TextureCubeMap::POSITIVE_Y, imagePosY);
    std::string strImageNegY = "D:/delete/Back face camera.bmp";;
    osg::ref_ptr imageNegY = osgDB::readImageFile(strImageNegY);
    tcm->setImage(osg::TextureCubeMap::NEGATIVE_Y, imageNegY);
    
    std::string strImagePosZ = "D:/delete/Top face camera.bmp";
    osg::ref_ptr imagePosZ = osgDB::readImageFile(strImagePosZ);
    tcm->setImage(osg::TextureCubeMap::POSITIVE_Z, imagePosZ);
    std::string strImageNegZ = "D:/delete/Bottom face camera.bmp";
    osg::ref_ptr imageNegZ = osgDB::readImageFile(strImageNegZ);
    tcm->setImage(osg::TextureCubeMap::NEGATIVE_Z, imageNegZ);
    
    osg::ref_ptr box = new osg::Box(osg::Vec3(0, 0, 0), 1);
    osg::ref_ptr drawable = new osg::ShapeDrawable(box);
    osg::ref_ptr geode = new osg::Geode;
    geode->addDrawable(drawable);
    MyNodeVisitor nv;
    geode->accept(nv);
    osg::ref_ptr stateset = geode->getOrCreateStateSet();
    stateset->setTextureAttributeAndModes(0, tcm, osg::StateAttribute::OVERRIDE | osg::StateAttribute::ON);
    
    //shader
    
    osg::ref_ptr vs1 = new osg::Shader(osg::Shader::VERTEX, vertexShader);
    osg::ref_ptr ps1 = new osg::Shader(osg::Shader::FRAGMENT, psShader);
    osg::ref_ptr program1 = new osg::Program;
    program1->addShader(vs1);
    program1->addShader(ps1);
    program1->addBindAttribLocation("aPos", 1);
    
    osg::ref_ptr tex0Uniform = new osg::Uniform("environmentMap", 0);
    stateset->addUniform(tex0Uniform);
    stateset->setAttribute(program1, osg::StateAttribute::ON);
    
    osg::ref_ptr viewer = new osgViewer::Viewer;
    viewer->setSceneData(geode);
    viewer->realize();
    return viewer->run();
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25
    • 26
    • 27
    • 28
    • 29
    • 30
    • 31
    • 32
    • 33
    • 34
    • 35
    • 36
    • 37
    • 38
    • 39
    • 40
    • 41
    • 42
    • 43
    • 44
    • 45
    • 46
    • 47

    }

  • 相关阅读:
    List & Label Professional Reporting Edition 28
    Java数组的定义与使用(保姆级别详细)(一)
    uniapp运动课程健身打卡系统微信小程序
    Xilinx microblaze axi can 使用说明
    一文揭秘JavaScript中你不知道的async与await实现原理与细节
    Fastdfs之集群安装
    ElasticSearch7.3学习(二十一)----Filter与Query对比、使用explain关键字分析语法
    JavaSE学习值之--认识异常
    VUE cli3.0项目打包部署服务器
    一文读懂Vue.js与React.js的区别
  • 原文地址:https://blog.csdn.net/directx3d_beginner/article/details/133350494