OpenGL ES series
- GLKit que
- Introduction and rendering process
- GLKit app loads images
- Filter 1- Split screen filter
- Filter 2- Grayscale, Reverse, Vortex, Mosaic
- Filter 3- Zoom, out-of-body, flash white, burr
1. Zoom filter
Principle: Is the basic principle, by modifying the corresponding relationship between vertex coordinates and texture coordinates to achieve.
Chip shader code implementation:
precision highp float; uniform sampler2D Texture; varying vec2 TextureCoordsVarying; // The current schedule is uniformfloat Time;
const floatPI = 3.1415926; Void main (void) {// Zoom time periodfloatDuration = 0.6; // Scale the maximum incrementfloatMaxAmplitude = 0.3; // Zoom the center point vec2 anchorPoint = vec2(0.5, 0.5); // Calculate the scale according to the current time schedulefloat time = mod(Time, duration);
floatAmplitude = 1.0 + maxAmplitude * abs(sin(time * (PI/duration))); Vec2 textCoords = TextureCoordsVarying; textCoords = vec2(anchorPoint.x + (textCoords.x - anchorPoint.x) / amplitude, anchorPoint.y + (textCoords.y - anchorPoint.y) / amplitude); // vec4 mask = texture2D(Texture, textCoords); Gl_FragColor = vec4 (mask. RGB, 1.0); }Copy the code
2. Out-of-body filter
Principle: It is a superposition of two layers, and the top layer will gradually enlarge and reduce the opacity as time goes by. I’m also using the zoom effect here.
Chip shader code implementation:
precision highp float; uniform sampler2D Texture; varying vec2 TextureCoordsVarying; // The current schedule is uniformfloatTime; Void main (void) {// Time periodfloatDuration = 0.7; // Maximum transparency of soul layerfloatMaxAlpha = 0.4; // Maximum zoom of soul layerfloatMaxScale = 1.8; // Soul level change center point vec2 anchorPoint = vec2(0.5, 0.5); / / schedulefloatprogress = mod(Time, duration) / duration; // 0~1 // Change value of soul layer transparencyfloatAlpha = maxAlpha * (1.0 - progress); // The zoom of the soul layerfloatScale = 1.0 + (maxscale-1.0) * progress; // Soul layer texture coordinates changefloat weakX = anchorPoint.x + (TextureCoordsVarying.x - anchorPoint.x) / scale;
floatweakY = anchorPoint.y + (TextureCoordsVarying.y - anchorPoint.y) / scale; vec2 weakTextureCoords = vec2(weakX, weakY); WeakMask = texture2D(Texture, weakTextureCoords); // The original layer vec4 mask = texture2D(Texture, TextureCoordsVarying); // Layer blend gl_FragColor = mask * (1.0-alpha) + weakMask * alpha; }Copy the code
3. Flash white filter
How it works: Simply blend the white layer, the opacity of the white layer changes over time.
Chip shader code implementation:
precision highp float; uniform sampler2D Texture; varying vec2 TextureCoordsVarying; // The current schedule is uniformfloat Time;
const floatPI = 3.1415926; Void main (void) {// Cycle timefloatDuration = 0.6; / / schedulefloatprogress = mod(Time, duration); // whiteMask = vec4(1.0, 1.0, 1.0, 1.0); // Mix factorfloatamplitude = abs(sin(progress * (PI / duration))); // The original layer vec4 mask = texture2D(Texture, TextureCoordsVarying); Gl_FragColor = mask * (1.0 - amplitude) + whiteMask * amplitude; }}Copy the code
4. Burr filter
Principle: Tear + weak color color offset. We randomly offset each row of pixels by -1 to 1 (for texture coordinates), but if the entire image is offset by a larger value, we may not even see what the original image looks like. Therefore, our logic is to set a threshold value, less than this threshold before the deflection, beyond this threshold times a reduction factor. The end result is that most rows are slightly offset, and only a small number of rows are significantly offset
Chip shader code implementation:
precision highp float;
uniform sampler2D Texture;
varying vec2 TextureCoordsVarying;
uniform float Time;
const floatPI = 3.1415926; / / random numberfloat rand(float n) {
returnFract (sin (n) * 43758.5453123); } void main (void) {// Maximum tearfloatMaxJitter = 0.06; // The duration of a sessionfloatDuration = 0.3; // Red color offsetfloatColorROffset = 0.01; // Blue color offsetfloatColorBOffset = 0.025; // Calculate the amplitudefloatTime = mod(time, duration * 2.0);floatAmplitude = Max (sin(time * (PI/duration)), 0.0); // Random pixel offset [-1,1]floatJitter = rand(TextureCoordsVarying. Y) * 2.0-1.0; Bool needOffset = abs(jitter) < maxJitter * amplitude; // Get the texture X value. //needOffset = YES, needOffset = YES; //needOffset = NO.floattextureX = TextureCoordsVarying.x + (needOffset ? Jitter: (jitter * amplitude * 0.006)); / / after tearing the texture coordinates vec2 textureCoords = vec2 (TextureCoordsVarying. X, textureY); Vec4 mask = texture2D(Texture, textureCoords); Vec4 maskR = texture2D(Texture, textureCoords + vec2(colorROffset * amplitude, 0.0)); Vec4 maskB = texture2D(Texture, textureCoords + vec2(colorBOffset * amplitude, 0.0)); Gl_FragColor = VEC4 (maskr.r, mask.g, maskb.b, mask.a); }Copy the code