自学内容网 自学内容网

OpenGL Texture C++ Camera Filter滤镜

       基于OpenGL Texture纹理的强大功能,在片段着色器(Shader)中编写GLSL代码,对YUV的数据进行数据转换从而实现视频编辑软件中的相机滤镜功能。

        接上一篇OpenGL Texture C++ 预览Camera视频的功能实现,本篇来实现Camera滤镜效果并各种滤镜的切换。

        项目github地址:GitHub - wangyongyao1989/WyFFmpeg: 音视频相关基础实现

       效果展示:

         

filter_switch_show1

一.着色器程序创建及切换:

        着色器的编译/链接/使用基于上一篇OpenGL Texture C++ 预览Camera视频的OpenGLTextureVideoRender类进行createProgram() -> createTextures() -> draw() -> render()扩展应用,代码如下:

//  Author : wangyongyao https://github.com/wangyongyao1989
// Created by MMM on 2024/9/5.
//

#include "OpenglesTexureVideoRender.h"
#include "OpenGLShader.h"

void
OpenglesTexureVideoRender::init(ANativeWindow *window, AAssetManager *assetManager, size_t width,
                                size_t height) {
    LOGI("OpenglesTexureVideoRender init==%d, %d", width, width);
    m_backingWidth = width;
    m_backingHeight = height;
}

void OpenglesTexureVideoRender::render() {
//    LOGI("OpenglesTexureVideoRender render");

    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);

    if (!updateTextures() || !useProgram()) return;

    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}

void OpenglesTexureVideoRender::updateFrame(const video_frame &frame) {
    m_sizeY = frame.width * frame.height;
    m_sizeU = frame.width * frame.height / 4;
    m_sizeV = frame.width * frame.height / 4;

    if (m_pDataY == nullptr || m_width != frame.width || m_height != frame.height) {
        m_pDataY = std::make_unique<uint8_t[]>(m_sizeY + m_sizeU + m_sizeV);
        m_pDataU = m_pDataY.get() + m_sizeY;
        m_pDataV = m_pDataU + m_sizeU;
        isProgramChanged = true;
    }

    m_width = frame.width;
    m_height = frame.height;

    if (m_width == frame.stride_y) {
        memcpy(m_pDataY.get(), frame.y, m_sizeY);
    } else {
        uint8_t *pSrcY = frame.y;
        uint8_t *pDstY = m_pDataY.get();

        for (int h = 0; h < m_height; h++) {
            memcpy(pDstY, pSrcY, m_width);

            pSrcY += frame.stride_y;
            pDstY += m_width;
        }
    }

    if (m_width / 2 == frame.stride_uv) {
        memcpy(m_pDataU, frame.u, m_sizeU);
        memcpy(m_pDataV, frame.v, m_sizeV);
    } else {
        uint8_t *pSrcU = frame.u;
        uint8_t *pSrcV = frame.v;
        uint8_t *pDstU = m_pDataU;
        uint8_t *pDstV = m_pDataV;

        for (int h = 0; h < m_height / 2; h++) {
            memcpy(pDstU, pSrcU, m_width / 2);
            memcpy(pDstV, pSrcV, m_width / 2);

            pDstU += m_width / 2;
            pDstV += m_width / 2;

            pSrcU += frame.stride_uv;
            pSrcV += frame.stride_uv;
        }
    }

    isDirty = true;
}

void OpenglesTexureVideoRender::draw(uint8_t *buffer, size_t length, size_t width, size_t height,
                                     float rotation) {
    m_length = length;
    m_rotation = rotation;

    video_frame frame{};
    frame.width = width;
    frame.height = height;
    frame.stride_y = width;
    frame.stride_uv = width / 2;
    frame.y = buffer;
    frame.u = buffer + width * height;
    frame.v = buffer + width * height * 5 / 4;

    updateFrame(frame);
}

void OpenglesTexureVideoRender::setParameters(uint32_t params) {
    m_params = params;
}

uint32_t OpenglesTexureVideoRender::getParameters() {
    return m_params;
}

bool OpenglesTexureVideoRender::createTextures() {
    auto widthY = (GLsizei) m_width;
    auto heightY = (GLsizei) m_height;

    glActiveTexture(GL_TEXTURE0);
    glGenTextures(1, &m_textureIdY);
    glBindTexture(GL_TEXTURE_2D, m_textureIdY);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthY, heightY, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdY) {
        LOGE("OpenGL Error Create Y texture");
        return false;
    }

    GLsizei widthU = (GLsizei) m_width / 2;
    GLsizei heightU = (GLsizei) m_height / 2;

    glActiveTexture(GL_TEXTURE1);
    glGenTextures(1, &m_textureIdU);
    glBindTexture(GL_TEXTURE_2D, m_textureIdU);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthU, heightU, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdU) {
        LOGE("OpenGL Error Create U texture");
        return false;
    }

    GLsizei widthV = (GLsizei) m_width / 2;
    GLsizei heightV = (GLsizei) m_height / 2;

    glActiveTexture(GL_TEXTURE2);
    glGenTextures(1, &m_textureIdV);
    glBindTexture(GL_TEXTURE_2D, m_textureIdV);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, widthV, heightV, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE,
                 nullptr);

    if (!m_textureIdV) {
        LOGE("OpenGL Error Create V texture");
        return false;
    }

    return true;
}

bool OpenglesTexureVideoRender::updateTextures() {
    if (!m_textureIdY && !m_textureIdU && !m_textureIdV && !createTextures()) return false;
    LOGI("OpenglesTexureVideoRender updateTextures");

    if (isDirty) {
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, m_textureIdY);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width, (GLsizei) m_height, 0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataY.get());

        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, m_textureIdU);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width / 2, (GLsizei) m_height / 2,
                     0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataU);

        glActiveTexture(GL_TEXTURE2);
        glBindTexture(GL_TEXTURE_2D, m_textureIdV);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, (GLsizei) m_width / 2, (GLsizei) m_height / 2,
                     0,
                     GL_LUMINANCE, GL_UNSIGNED_BYTE, m_pDataV);

        isDirty = false;

        return true;
    }

    return false;
}

int
OpenglesTexureVideoRender::createProgram() {

    m_program = openGlShader->createProgram();
    m_vertexShader = openGlShader->vertexShader;
    m_pixelShader = openGlShader->fraShader;
    LOGI("OpenglesTexureVideoRender createProgram m_program:%d", m_program);

    if (!m_program) {
        LOGE("Could not create program.");
        return 0;
    }

    //Get Uniform Variables Location
    m_vertexPos = (GLuint) glGetAttribLocation(m_program, "position");
    m_textureYLoc = glGetUniformLocation(m_program, "s_textureY");
    m_textureULoc = glGetUniformLocation(m_program, "s_textureU");
    m_textureVLoc = glGetUniformLocation(m_program, "s_textureV");
    m_textureLoc = (GLuint) glGetAttribLocation(m_program, "texcoord");

    return m_program;
}

GLuint OpenglesTexureVideoRender::useProgram() {
    if (!m_program && !createProgram()) {
        LOGE("Could not use program.");
        return 0;
    }

    if (isProgramChanged) {
        glUseProgram(m_program);
        glVertexAttribPointer(m_vertexPos, 2, GL_FLOAT, GL_FALSE, 0, kVerticek);
        glEnableVertexAttribArray(m_vertexPos);

        glUniform1i(m_textureYLoc, 0);
        glUniform1i(m_textureULoc, 1);
        glUniform1i(m_textureVLoc, 2);
        glVertexAttribPointer(m_textureLoc, 2, GL_FLOAT, GL_FALSE, 0, kTextureCoordk);
        glEnableVertexAttribArray(m_textureLoc);
        isProgramChanged = false;
    }

    return m_program;
}

bool OpenglesTexureVideoRender::setSharderPath(const char *vertexPath, const char *fragmentPath) {
    openGlShader->getSharderPath(vertexPath, fragmentPath);
    return 0;
}

bool OpenglesTexureVideoRender::setSharderStringPath(string vertexPath, string fragmentPath) {
    openGlShader->getSharderStringPath(vertexPath, fragmentPath);
    return 0;
}

OpenglesTexureVideoRender::OpenglesTexureVideoRender() {
    openGlShader = new OpenGLShader();
}

OpenglesTexureVideoRender::~OpenglesTexureVideoRender() {
    deleteTextures();
    delete_program(m_program);
}

void OpenglesTexureVideoRender::delete_program(GLuint &program) {
    if (program) {
        glUseProgram(0);
        glDeleteProgram(program);
        program = 0;
    }
}

void OpenglesTexureVideoRender::deleteTextures() {
    if (m_textureIdY) {
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, 0);
        glDeleteTextures(1, &m_textureIdY);

        m_textureIdY = 0;
    }

    if (m_textureIdU) {
        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, 0);
        glDeleteTextures(1, &m_textureIdU);

        m_textureIdU = 0;
    }

    if (m_textureIdV) {
        glActiveTexture(GL_TEXTURE2);
        glBindTexture(GL_TEXTURE_2D, 0);
        glDeleteTextures(1, &m_textureIdV);

        m_textureIdV = 0;
    }
}

void OpenglesTexureVideoRender::printGLString(const char *name, GLenum s) {
    const char *v = (const char *) glGetString(s);
    LOGI("OpenGL %s = %s\n", name, v);
}

void OpenglesTexureVideoRender::checkGlError(const char *op) {
    for (GLint error = glGetError(); error; error = glGetError()) {
        LOGI("after %s() glError (0x%x)\n", op, error);
    }
}

        纹理的使用:

        在Filter滤镜的片段着色中沿用着三个关于YUV的s_textureY/s_textureU/s_textureV三个uniform参数,用于创建纹理时的数据传递。

     //Get Uniform Variables Location
    m_vertexPos = (GLuint) glGetAttribLocation(m_program, "position");
    m_textureYLoc = glGetUniformLocation(m_program, "s_textureY");
    m_textureULoc = glGetUniformLocation(m_program, "s_textureU");
    m_textureVLoc = glGetUniformLocation(m_program, "s_textureV");
    m_textureLoc = (GLuint) glGetAttribLocation(m_program, "texcoord");

       多片段着色器程序的传入:

        要实现滤镜效果,会在片段着色器程序(Fragment Shader)中对YUV数据进行各种有意思的转换。这样的话必须传入多个片段着色器程序代码,以供滤镜切换时使用。

bool OpenglesTextureFilterRender
    ::setSharderStringPathes(string vertexPath,
      vector<string> fragmentPathes) {
    m_fragmentStringPathes = fragmentPathes;
    m_vertexStringPath = vertexPath;
    return OpenglesTexureVideoRender::setSharderStringPath(vertexPath,                                                           
    m_fragmentStringPathes.front());
}

        片段着色器的切换:

        对滤镜来说关于顶点数据及纹理的顶点数据是不变的,所以在本篇中只讨论顶点程序(Vertex Shader)不变的,当然也可以同时顶点程序也可以变化实现一些动态的效果。等以后think出一些多的idea在实现动态滤镜的效果,到时再写几篇博客分享分享。

        在render函数中切换片段着色器,切换时要把之前的着色器程序删除,然后再传入需要的片段着色器重新走一遍着色器的编译/链接/使用过程。 

void OpenglesTextureFilterRender::render() {
    if (m_filter != m_prevFilter) {
        m_prevFilter = m_filter;
        if (m_filter >= 0 && m_filter < m_fragmentStringPathes.size()) {
            isProgramChanged = true;
            delete_program(m_program);
            setSharderStringPath(m_vertexStringPath
                                 , m_fragmentStringPathes.at(m_filter));
            createProgram();
        }
    }

    OpenglesTexureVideoRender::render();
}

        

二.片段着色器程序GLSL:

        1.YUV数据的真实显示:

        无添加任何滤镜的状态,显示Camera的真实画面:       

#version 320 es

precision mediump float;

in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;

// https://stackoverflow.com/questions/26695253/when-switching-to-glsl-300-met-the-following-error
//The predefined variable gl_FragColor does not exist anymore in GLSL ES 3.00.
//out vec4 gl_FragColor;
out vec4 FragColor;

void main() {
     float y, u, v, r, g, b;
     y = texture(s_textureY, v_texcoord).r;
     u = texture(s_textureU, v_texcoord).r;
     v = texture(s_textureV, v_texcoord).r;
     u = u - 0.5;
     v = v - 0.5;
     r = y + 1.403 * v;
     g = y - 0.344 * u - 0.714 * v;
     b = y + 1.770 * u;
     FragColor = vec4(r, g, b, 1.0f);
//    gl_FragColor = vec4(r, g, b, 1.0);

}

        2.模糊处理滤镜:        

#version 320 es

precision mediump float;

in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;

out vec4 FragColor;

vec4 YuvToRgb(vec2 uv) {
        float y, u, v, r, g, b;
        y = texture(s_textureY, uv).r;
        u = texture(s_textureU, uv).r;
        v = texture(s_textureV, uv).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        return vec4(r, g, b, 1.0);
    }

void main() {
     vec4 sample0, sample1, sample2, sample3;
     float blurStep = 0.5;
     float step = blurStep / 100.0f;
     sample0 = YuvToRgb(vec2(v_texcoord.x - step, v_texcoord.y - step));
     sample1 = YuvToRgb(vec2(v_texcoord.x + step, v_texcoord.y + step));
     sample2 = YuvToRgb(vec2(v_texcoord.x + step, v_texcoord.y - step));
     sample3 = YuvToRgb(vec2(v_texcoord.x - step, v_texcoord.y + step));
     FragColor = (sample0 + sample1 + sample2 + sample3) / 4.0;
}

        3.鱼眼滤镜:        

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;

out vec4 FragColor;

//鱼眼滤镜
void main() {

        float aperture = 158.0;
        float apertureHalf = 0.5 * aperture * (PI / 180.0);
        float maxFactor = sin(apertureHalf);
        vec2 uv;
        vec2 xy = 2.0 * v_texcoord.xy - 1.0;
        float d = length(xy);
        if (d < (2.0 - maxFactor)) {
            d = length(xy * maxFactor);
            float z = sqrt(1.0 - d * d);
            float r = atan(d, z) / PI;
            float phi = atan(xy.y, xy.x);
            uv.x = r * cos(phi) + 0.5;
            uv.y = r * sin(phi) + 0.5;
        } else {
            uv = v_texcoord.xy;
        }
        float y, u, v, r, g, b;
        y = texture(s_textureY, uv).r;
        u = texture(s_textureU, uv).r;
        v = texture(s_textureV, uv).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        FragColor = vec4(r, g, b, 1.0);

}

        4. 旋流过滤器:          

        

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;

//旋流过滤器
void main() {

        float radius = 200.0;
        float angle = 0.8;
        vec2 center = vec2(texSize.x / 2.0, texSize.y / 2.0);
        vec2 tc = v_texcoord * texSize;
        tc -= center;
        float dist = length(tc);
        if (dist < radius) {
              float percent = (radius - dist) / radius;
              float theta = percent * percent * angle * 8.0;
              float s = sin(theta);
              float c = cos(theta);
              tc = vec2(dot(tc, vec2(c, -s)), dot(tc, vec2(s, c)));
        }
        tc += center;
        float y, u, v, r, g, b;
        y = texture(s_textureY, tc / texSize).r;
        u = texture(s_textureU, tc / texSize).r;
        v = texture(s_textureV, tc / texSize).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        FragColor = vec4(r, g, b, 1.0);

}

5.放大镜滤光片:

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;

vec4 YuvToRgb(vec2 uv) {
        float y, u, v, r, g, b;
        y = texture(s_textureY, uv).r;
        u = texture(s_textureU, uv).r;
        v = texture(s_textureV, uv).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        return vec4(r, g, b, 1.0);
    }

//放大镜滤光片
void main() {

       float circleRadius = float(0.5);
       float minZoom = 0.4;
       float maxZoom = 0.6;
       vec2 center = vec2(texSize.x / 2.0, texSize.y / 2.0);
       vec2 uv = v_texcoord;
       uv.x *= (texSize.x / texSize.y);
       vec2 realCenter = vec2(0.0, 0.0);
       realCenter.x = (center.x / texSize.x) * (texSize.x / texSize.y);
       realCenter.y = center.y / texSize.y;
       float maxX = realCenter.x + circleRadius;
       float minX = realCenter.x - circleRadius;
       float maxY = realCenter.y + circleRadius;
       float minY = realCenter.y - circleRadius;
       if (uv.x > minX && uv.x < maxX && uv.y > minY && uv.y < maxY) {
         float relX = uv.x - realCenter.x;
         float relY = uv.y - realCenter.y;
         float ang =  atan(relY, relX);
         float dist = sqrt(relX * relX + relY * relY);
         if (dist <= circleRadius) {
           float newRad = dist * ((maxZoom * dist / circleRadius) + minZoom);
           float newX = realCenter.x + cos(ang) * newRad;
           newX *= (texSize.y / texSize.x);
           float newY = realCenter.y + sin(ang) * newRad;
           FragColor = YuvToRgb(vec2(newX, newY));
         } else {
           FragColor = YuvToRgb(v_texcoord);
         }
       } else {
          FragColor = YuvToRgb(v_texcoord);
       }


}

6.利希滕斯坦过滤器

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;



//利希滕斯坦式过滤器
void main() {

        float size = texSize.x / 75.0;
        float radius = size * 0.5;
        vec2 fragCoord = v_texcoord * texSize.xy;
        vec2 quadPos = floor(fragCoord.xy / size) * size;
        vec2 quad = quadPos/texSize.xy;
        vec2 quadCenter = (quadPos + size/2.0);
        float dist = length(quadCenter - fragCoord.xy);
        float y, u, v, r, g, b;
        y = texture(s_textureY, quad).r;
        u = texture(s_textureU, quad).r;
        v = texture(s_textureV, quad).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        if (dist > radius) {
          FragColor = vec4(0.25);
        } else {
          FragColor = vec4(r, g, b, 1.0);
        }

}

        7.三角形马赛克滤镜:

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;

 vec4 YuvToRgb(vec2 uv) {
        float y, u, v, r, g, b;
        y = texture(s_textureY, uv).r;
        u = texture(s_textureU, uv).r;
        v = texture(s_textureV, uv).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        return vec4(r, g, b, 1.0);
    }

//三角形马赛克滤镜
void main() {

     vec2 tileNum = vec2(40.0, 20.0);
     vec2 uv = v_texcoord;
     vec2 uv2 = floor(uv * tileNum) / tileNum;
     uv -= uv2;
     uv *= tileNum;
     vec3 color = YuvToRgb(uv2 + vec2(step(1.0 - uv.y, uv.x) / (2.0 * tileNum.x),
     step(uv.x, uv.y) / (2.0 * tileNum.y))).rgb;
     FragColor = vec4(color, 1.0);

}

8.像素过滤器:

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;


//像素过滤器
void main() {

      vec2 pixelSize = vec2(texSize.x/100.0, texSize.y/100.0);
      vec2 uv = v_texcoord.xy;
      float dx = pixelSize.x*(1./texSize.x);
      float dy = pixelSize.y*(1./texSize.y);
      vec2 coord = vec2(dx*floor(uv.x/dx),
      dy*floor(uv.y/dy));
      float y, u, v, r, g, b;
      y = texture(s_textureY, coord).r;
      u = texture(s_textureU, coord).r;
      v = texture(s_textureV, coord).r;
      u = u - 0.5;
      v = v - 0.5;
      r = y + 1.403 * v;
      g = y - 0.344 * u - 0.714 * v;
      b = y + 1.770 * u;

     FragColor = vec4(r, g, b, 1.0);

}

8.交叉缝合过滤器:

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;


    vec4 YuvToRgb(vec2 uv) {
        float y, u, v, r, g, b;
        y = texture(s_textureY, uv).r;
        u = texture(s_textureU, uv).r;
        v = texture(s_textureV, uv).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        return vec4(r, g, b, 1.0);
    }

    vec4 CrossStitching(vec2 uv) {
            float stitchSize = texSize.x / 35.0;
            int invert = 0;
            vec4 color = vec4(0.0);
            float size = stitchSize;
            vec2 cPos = uv * texSize.xy;
            vec2 tlPos = floor(cPos / vec2(size, size));
            tlPos *= size;
            int remX = int(mod(cPos.x, size));
            int remY = int(mod(cPos.y, size));
            if (remX == 0 && remY == 0)
                tlPos = cPos;
            vec2 blPos = tlPos;
            blPos.y += (size - 1.0);
            if ((remX == remY) || (((int(cPos.x) - int(blPos.x)) == (int(blPos.y) - int(cPos.y))))) {
                if (invert == 1)
                    color = vec4(0.2, 0.15, 0.05, 1.0);
                else
                    color = YuvToRgb(tlPos * vec2(1.0 / texSize.x, 1.0 / texSize.y)) * 1.4;
            } else {
                if (invert == 1)
                    color = YuvToRgb(tlPos * vec2(1.0 / texSize.x, 1.0 / texSize.y)) * 1.4;
                else
                    color = vec4(0.0, 0.0, 0.0, 1.0);
            }
            return color;
        }

//交叉缝合过滤器
void main() {
     FragColor = CrossStitching(v_texcoord);

}

9.Toonfiy过滤器:

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;
const int kHueLevCount = 6;
const int kSatLevCount = 7;
const int kValLevCount = 4;
float hueLevels[kHueLevCount];
float satLevels[kSatLevCount];
float valLevels[kValLevCount];
float edge_thres = 0.2;
float edge_thres2 = 5.0;


out vec4 FragColor;


    vec4 YuvToRgb(vec2 uv) {
        float y, u, v, r, g, b;
        y = texture(s_textureY, uv).r;
        u = texture(s_textureU, uv).r;
        v = texture(s_textureV, uv).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        return vec4(r, g, b, 1.0);
    }

   vec3 RGBtoHSV(float r, float g, float b) {
           float minv, maxv, delta;
           vec3 res;
           minv = min(min(r, g), b);
           maxv = max(max(r, g), b);
           res.z = maxv;
           delta = maxv - minv;
           if (maxv != 0.0)
               res.y = delta / maxv;
           else {
               res.y = 0.0;
               res.x = -1.0;
               return res;
           }
           if (r == maxv)
               res.x = ( g - b ) / delta;
           else if (g == maxv)
               res.x = 2.0 + ( b - r ) / delta;
           else
               res.x = 4.0 + ( r - g ) / delta;
           res.x = res.x * 60.0;
           if(res.x < 0.0)
               res.x = res.x + 360.0;
           return res;
       }
       vec3 HSVtoRGB(float h, float s, float v ) {
           int i;
           float f, p, q, t;
           vec3 res;
           if(s == 0.0) {
               res.x = v;
               res.y = v;
               res.z = v;
               return res;
           }
           h /= 60.0;
           i = int(floor( h ));
           f = h - float(i);
           p = v * ( 1.0 - s );
           q = v * ( 1.0 - s * f );
           t = v * ( 1.0 - s * ( 1.0 - f ) );
           if (i == 0) {
                   res.x = v;
                   res.y = t;
                   res.z = p;
           } else if (i == 1) {
                   res.x = q;
                   res.y = v;
                   res.z = p;
           } else if (i == 2) {
                   res.x = p;
                   res.y = v;
                   res.z = t;
           } else if (i == 3) {
                   res.x = p;
                   res.y = q;
                   res.z = v;
           } else if (i == 4) {
                   res.x = t;
                   res.y = p;
                   res.z = v;
           } else if (i == 5) {
                   res.x = v;
                   res.y = p;
                   res.z = q;
           }
           return res;
       }

       float nearestLevel(float col, int mode) {
           int levCount;
           if (mode==0) levCount = kHueLevCount;
           if (mode==1) levCount = kSatLevCount;
           if (mode==2) levCount = kValLevCount;
           for (int i=0; i<levCount-1; i++ ) {
               if (mode==0) {
                   if (col >= hueLevels[i] && col <= hueLevels[i+1]) {
                       return hueLevels[i+1];
                   }
               }
               if (mode==1) {
                   if (col >= satLevels[i] && col <= satLevels[i+1]) {
                       return satLevels[i+1];
                   }
               }
               if (mode==2) {
                   if (col >= valLevels[i] && col <= valLevels[i+1]) {
                       return valLevels[i+1];
                   }
               }
           }
       }
       float avgIntensity(vec4 pix) {
           return (pix.r + pix.g + pix.b)/3.;
       }
       vec4 getPixel(vec2 coords, float dx, float dy) {
           return YuvToRgb(coords + vec2(dx, dy));
       }
       float IsEdge(in vec2 coords) {
           float dxtex = 1.0 / float(texSize.x);
           float dytex = 1.0 / float(texSize.y);
           float pix[9];
           int k = -1;
           float delta;
           for (int i=-1; i<2; i++) {
               for(int j=-1; j<2; j++) {
                   k++;
                   pix[k] = avgIntensity(getPixel(coords,float(i)*dxtex, float(j)*dytex));
               }
           }
           delta = (abs(pix[1]-pix[7]) + abs(pix[5]-pix[3]) + abs(pix[0]-pix[8])+ abs(pix[2]-pix[6]))/4.;
           return clamp(edge_thres2*delta,0.0,1.0);
       }

//Toonify过滤器
void main() {
        hueLevels[0] = 0.0;
        hueLevels[1] = 140.0;
        hueLevels[2] = 160.0;
        hueLevels[3] = 240.0;
        hueLevels[4] = 240.0;
        hueLevels[5] = 360.0;
        satLevels[0] = 0.0;
        satLevels[1] = 0.15;
        satLevels[2] = 0.3;
        satLevels[3] = 0.45;
        satLevels[4] = 0.6;
        satLevels[5] = 0.8;
        satLevels[6] = 1.0;
        valLevels[0] = 0.0;
        valLevels[1] = 0.3;
        valLevels[2] = 0.6;
        valLevels[3] = 1.0;
        vec2 uv = v_texcoord;
        vec3 color = YuvToRgb(uv).rgb;
        vec3 vHSV =  RGBtoHSV(color.r, color.g, color.b);
        vHSV.x = nearestLevel(vHSV.x, 0);
        vHSV.y = nearestLevel(vHSV.y, 1);
        vHSV.z = nearestLevel(vHSV.z, 2);
        float edg = IsEdge(uv);
        vec3 vRGB = (edg >= edge_thres) ? vec3(0.0,0.0,0.0) : HSVtoRGB(vHSV.x,vHSV.y,vHSV.z);

        FragColor = vec4(vRGB.x, vRGB.y, vRGB.z, 1.0);

}

10.捕食者热视觉滤镜:

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;

//捕食者热视觉滤镜
void main() {
               float y, u, v, r, g, b;
               y = texture(s_textureY, v_texcoord).r;
               u = texture(s_textureU, v_texcoord).r;
               v = texture(s_textureV, v_texcoord).r;
               u = u - 0.5;
               v = v - 0.5;
               r = y + 1.403 * v;
               g = y - 0.344 * u - 0.714 * v;
               b = y + 1.770 * u;
               vec3 color = vec3(r, g, b);
               vec2 uv = v_texcoord.xy;
               vec3 colors[3];
               colors[0] = vec3(0.,0.,1.);
               colors[1] = vec3(1.,1.,0.);
               colors[2] = vec3(1.,0.,0.);
               float lum = (color.r + color.g + color.b)/3.;
               int idx = (lum < 0.5) ? 0 : 1;
               vec3 rgb = mix(colors[idx],colors[idx+1],(lum-float(idx)*0.5)/0.5);
               FragColor = vec4(rgb, 1.0);

}

11.压花过滤器:

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;

 vec4 YuvToRgb(vec2 uv) {
        float y, u, v, r, g, b;
        y = texture(s_textureY, uv).r;
        u = texture(s_textureU, uv).r;
        v = texture(s_textureV, uv).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        return vec4(r, g, b, 1.0);
    }

//压花过滤器
void main() {
               vec4 color;
               color.rgb = vec3(0.5);
               vec2 onePixel = vec2(1.0 / texSize.x, 1.0 / texSize.y);
               color -= YuvToRgb(v_texcoord - onePixel) * 5.0;
               color += YuvToRgb(v_texcoord + onePixel) * 5.0;
               color.rgb = vec3((color.r + color.g + color.b) / 3.0);
               FragColor = vec4(color.rgb, 1.0);

}

12.边缘检测滤波器:

#version 320 es

precision mediump float;
const float PI = 3.1415926535;
in vec2 v_texcoord;

uniform lowp sampler2D s_textureY;
uniform lowp sampler2D s_textureU;
uniform lowp sampler2D s_textureV;
uniform vec2 texSize;

out vec4 FragColor;

 vec4 YuvToRgb(vec2 uv) {
        float y, u, v, r, g, b;
        y = texture(s_textureY, uv).r;
        u = texture(s_textureU, uv).r;
        v = texture(s_textureV, uv).r;
        u = u - 0.5;
        v = v - 0.5;
        r = y + 1.403 * v;
        g = y - 0.344 * u - 0.714 * v;
        b = y + 1.770 * u;
        return vec4(r, g, b, 1.0);
    }

//边缘检测滤波器
void main() {
               vec2 pos = v_texcoord.xy;
               vec2 onePixel = vec2(1, 1) / texSize;
               vec4 color = vec4(0);
               mat3 edgeDetectionKernel = mat3(
                    -1, -1, -1,
                    -1, 8, -1,
                     -1, -1, -1
               );
               for(int i = 0; i < 3; i++) {
                  for(int j = 0; j < 3; j++) {
                    vec2 samplePos = pos + vec2(i - 1 , j - 1) * onePixel;
                    vec4 sampleColor = YuvToRgb(samplePos);
                    sampleColor *= edgeDetectionKernel[i][j];
                    color += sampleColor;
                  }
               }
               FragColor = vec4(color.rgb, 1.0);

}


原文地址:https://blog.csdn.net/wangyongyao1989/article/details/142280571

免责声明:本站文章内容转载自网络资源,如本站内容侵犯了原著者的合法权益,可联系本站删除。更多内容请关注自学内容网(zxcms.com)!