shader value conversion error when passing value between vertex and fragment shader - fragment-shader

I have the following fragment and vertex shader.
Vertex:
#version 450
layout(location = 0) in vec2 Position;
layout(location = 1) in vec4 Color;
layout(location = 0) out vec2 fPosition;
void main()
{
gl_Position = vec4(Position, 0, 1);
fPosition = Position;
}
Fragment:
#version 450
layout(location = 0) in vec2 fPosition;
layout(location = 0) out vec4 fColor;
void main() {
vec4 colors[4] = vec4[](
vec4(1.0, 0.0, 0.0, 1.0),
vec4(0.0, 1.0, 0.0, 1.0),
vec4(0.0, 0.0, 1.0, 1.0),
vec4(0.0, 0.0, 0.0, 1.0)
);
fColor = vec4(1.0);
for(int row = 0; row < 2; row++) {
for(int col = 0; col < 2; col++) {
float dist = distance(fPosition, vec2(-0.50 + col, 0.50 - row));
float delta = fwidth(dist);
float alpha = smoothstep(0.45-delta, 0.45, dist);
fColor = mix(colors[row*2+col], fColor, alpha);
}
}
}
But when compiling this I am getting the following error:
cannot convert from ' gl_Position 4-component vector of float Position' to 'layout( location=0) smooth out highp 2-component vector of float'
And i have no clue how to fix it. (this is my first time doing shader programming).
If additional information is needed please let me know.

1.
You do not need to specify layouts when transferring variables between vertex shader and fragment shader. Remove the layout(location = 0) parameter for the fPosition variable in the vertex and fragment shader.
2.
You only need to specify layout if you passing the variables (your position buffers) to the vertex shader through buffers. To add on, variables like positions, normals and textureCoords must always pass through the vertex shader first and then to the fragment shader.
3.
When exporting your final colour (fColor in your case) from the fragment shader, you do not need to pass a location, just specify the vector4 variable as out vec4 fColor; openGL detects it automatically.
4.
The error you actually got was telling you that you were assigning vector4 variable (fColor) to your already stored vec2 variables (fPosition). Note: In your vertex shader at attribute (location) "0", you had accessed the vertices that you had loaded, but you tried to assign a vector4 to the same location later in the fragment shader. OpenGL does not automatically overwrite data like that.

Related

Optimization for scene with custom shader

I have a three.js scene made with rogue engine, which im using to make a VR experience.
In that im using a fairly complex shader, it takes world space location of two locators for transitioning between their normal shader and just some color, the transition is using noise for some effect (see video below, its showing the effect of the first locator but the second one is also similar, it goes bottom to top),
the location of the object is passed as Vector 3 uniforms., the shader itself im injecting to a MeshStandardMaterial using onBeforeCompile.
the performance is already bad and really tanks when im using textures, im using three texture sets for the scene, im using diffuse,rough,metal,emission and AO so each is sampled thrice and then masked using vertex colors. (not present in the code below)
varying vec3 W_Pos; //world position vector
varying vec3 F_Nrml; //normal vector
varying vec3 camDir; // cam facing
varying vec3 vertexColor;
uniform vec3 astral_locator; // First locator
uniform vec3 astral_spread; // i pass the locator's scale here and scale it up for the transition
uniform vec3 starScatter_starScale_nScale; //three float parameters im passing as vector for easier control in rogue engine
uniform vec3 breakPoints;
uniform vec3 c1;
uniform vec3 c2;
uniform vec3 c3;
uniform vec3 noise_locator; //Second locator
uniform vec3 nStretch_nScale_emSharp;// same as above, three floats passed as a vector
uniform vec3 emCol;
vec4 mod289(vec4 x){return x - floor(x * (1.0 / 289.0)) * 289.0;}
vec4 perm(vec4 x){return mod289(((x * 34.0) + 1.0) * x);}
vec3 rand2( vec3 p ) {
return fract(
sin(
vec3(dot(p,vec3(127.1,310.7,143.54)),dot(p,vec3(269.5,183.3,217.42)),dot(p,vec3(2459.5,133.3,17.42))))*43758.5453);
}
float mapping(float number, float inMin, float inMax, float outMin, float outMax){return (number - inMin) * (outMax - outMin) / (inMax - inMin) + outMin;}
vec4 vertexMask(vec4 map1, vec4 map2, vec4 map3, vec3 vertMask){vec4 me1 = mix(vec4(0.0), map1,vertMask.r); vec4 me2 = mix(me1, map2,vertMask.g); vec4 me3 = mix(me2, map3,vertMask.b); return me3;}
//Noises
float noise(vec3 p){
vec3 a = floor(p);
vec3 d = p - a;
d = d * d * (3.0 - 2.0 * d);
vec4 b = a.xxyy + vec4(0.0, 1.0, 0.0, 1.0);
vec4 k1 = perm(b.xyxy);
vec4 k2 = perm(k1.xyxy + b.zzww);
vec4 c = k2 + a.zzzz;
vec4 k3 = perm(c);
vec4 k4 = perm(c + 1.0);
vec4 o1 = fract(k3 * (1.0 / 41.0));
vec4 o2 = fract(k4 * (1.0 / 41.0));
vec4 o3 = o2 * d.z + o1 * (1.0 - d.z);
vec2 o4 = o3.yw * d.x + o3.xz * (1.0 - d.x);
return o4.y * d.y + o4.x * (1.0 - d.y);
}
float facing(){
vec3 nrml = F_Nrml;
vec3 cam = camDir;
vec3 normal = normalize(nrml.xyz);
vec3 eye = normalize(-cam);
float rim = smoothstep(-0.75, 1.0, 1.0 - dot(normal, eye));
return clamp(rim, 0.0, 1.0);
}
//Function for the second locatior
vec2 noiseMove(vec3 loc,vec3 noiseDat){
float noise_stretch = noiseDat.x;
float noise_scale = noiseDat.y;
float emission_sharp = noiseDat.z;
float noise_move = -loc.y;
float gen_Pattern;
float gen_Pattern_invert;
float emi_sharp_fac;
float transparency;
float emission;
gen_Pattern = ((W_Pos.y+noise_move)*noise_stretch) + noise(W_Pos.xyz*noise_scale);
gen_Pattern_invert = 1.0 - gen_Pattern;
emi_sharp_fac = clamp(emission_sharp*1000.0,1.0,1000.0)*gen_Pattern;
emission = emission_sharp*gen_Pattern;
emission = 1.0 - emission;
emission = emission * emi_sharp_fac;
emission = clamp(emission,0.0,1.0);
transparency = clamp(gen_Pattern_invert,0.0,1.0);
return vec2(emission,transparency);
}
//Function for the first locator
vec4 astral(vec3 loc, vec3 spr,vec3 cee1,vec3 cee2,vec3 cee3, vec3 breakks, vec3 star){//star is WIP
float f = facing();
float re1 = mapping(f,breakks.x,1.0,0.0,1.0);
float re2 = mapping(f,breakks.y,1.0,0.0,1.0);
float re3 = mapping(f,breakks.z,1.0,0.0,1.0);
vec3 me1 = mix(vec3(0.,0.,0.),cee1,re1);
vec3 me2 = mix(me1,cee2,re2);
vec3 me3 = mix(me2,cee3,re3);
float dist = distance(W_Pos.xyz + (noise(W_Pos.xyz*star.z)-0.5),loc);
float val = step(dist,spr.x);
return vec4(me3,val);
}
void main(){
vec4 ast = astral(astral_locator,astral_spread,c1,c2,c3,breakPoints,starScatter_starScale_nScale);
vec2 noice = noiseMove(noise_locator,nStretch_nScale_emSharp);
vec3 outp = mix(mix(outgoingLight,ast.xyz,ast.w),emCol,noice.x); //Take output light from the three.js shader and mix it with the custom shader
float t = noice.y;
#ifdef NONSCIFI
t = 1.0 - noice.y;
#endif
t *= diffuseColor.a;
gl_FragColor = vec4(outp*t,t);
}
is there a way to optimize it better? a couple things i can think of is storing the noise and then using it instead of calculating every frame, and figuring out occlusion culling (renderpass doesnt work well in VR so cant store the depth pass, gotta figure a way), objects in the scene are already instances to reduce draw calls. im assuming making some objects static might help, including the locators but i dont know if it will stop the uniform from updating every frame.
is there anything else that can be done?
also i apologize for the structure of the question, i rarely post questions thanks to stackoverflow :p

Vulkan compute shader. Smooth uv coordinates

I have this shader:
#version 450
layout(binding = 0) buffer b0 {
vec2 src[ ];
};
layout(binding = 1) buffer b1 {
vec2 dst[ ];
};
layout(binding = 2) buffer b2 {
int index[ ];
};
layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;
void main()
{
int ind =int(gl_GlobalInvocationID.x);
vec2 norm;
norm=src[index[ind*3+2]]-src[index[ind*3]]+src[index[ind*3+1]]-src[index[ind*3]];
norm/=2.0;
dst[index[ind*3]] +=norm;
norm=src[index[ind*3+0]]-src[index[ind*3+1]]+src[index[ind*3+2]]-src[index[ind*3+1]];
dst[index[ind*3+1]] +=norm;
norm=src[index[ind*3+1]]-src[index[ind*3+2]]+src[index[ind*3+0]]-src[index[ind*3+2]];
norm/=2.0;
dst[index[ind*3+2]] +=norm;
}
Because dst buffer is not "atomic", the summation is incorrect.
Is there any way to solve this problem? My answer is no, but if i missed something.
For each vertex in polygon I am calculating a vector from vertex to the center of polygon. Different polygons has the same vertices.
dst - is a vertex buffer, the result of the summation of those shifts from vertex to polygon center.
Each time I have different computation results.

GLSL variables not storing?

I am learning GLSL through Unity and I recently came across a problem involving the storing of variables.
Shader "Shader" {
Properties{
_Hole_Position("hole_Position", Vector) = (0., 0., 0., 1.0)
_Hole_EventHorizonDistance("hole_EventHorizonDistance", Float) = 1.0
_DebugValue("debugValue", Float) = 0.0
}
SubShader{
Pass{
GLSLPROGRAM
uniform mat4 _Object2World;
//Variables
varying float debugValue;
varying vec4 pos;
varying vec4 hole_Position;
varying float hole_EventHorizonDistance = 1;
#ifdef VERTEX
void main()
{
pos = _Object2World * gl_Vertex;
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
}
#endif
#ifdef FRAGMENT
void main()
{
float dist = distance(vec4(pos.x, 0.0,pos.z, 1.0), vec4(hole_Position.x, 0.0, hole_Position.z, 1.0));
debugValue = dist;
if (dist < hole_EventHorizonDistance)
{
gl_FragColor = vec4(0.3, 0.3, 0.3, 1.0);
}
else
{
gl_FragColor = vec4(0.4, 0.6, 1.0, 1.0);
}
//gl_FragColor = vec4(hole_EventHorizonDistance, 0, 0, 1.0);
}
#endif
ENDGLSL
}
}
}
Now Hole_Position and EventHorizonDistance are changed from an outside C#-script with:
g.GetComponent<Renderer>().sharedMaterial.SetVector("_Hole_Position", new Vector4(transform.position.x, transform.position.y, transform.position.z, 1));
g.GetComponent<Renderer>().sharedMaterial.SetFloat("_Hole_EventHorizonDistance", 2);
this does not work as I intend it too (by changing the fragments color if its position is within 2 units from Hole_Position. However debugging with:
gl_FragColor = vec4(hole_EventHorizonDistance, 0, 0, 1.0);
seemingly suggests that EventHorizon is 0 at all times (the mesh tested on remains completely black), however debugging by getting and printing the variable from an outside (via
print(g.GetComponent<Renderer>().sharedMaterial.GetFloat("_Hole_EventHorizonDistance"));
) tells me EventHorizonDistance = 2. I cannot wrap my head around why this is the case, why is it so?

Shader not showing up properly

I've been playing with shaders with a toy called ShaderToy and trying to create a top-down view water effect for a 2D game based on the code (for the shader) from Jonas Wagner. You can easily copy/paste this code in ShaderToy and see the effect.
The shader looks cool in ShaderToy, but when I try to replicate the same in my code, something goes wrong, see image below:
http://ivan.org.es/temp/shader_problems.png
My vertex shader (I don't know what's the one used in ShaderToy):
uniform mat4 Projection;
attribute vec2 Position;
void main(){
gl_Position = Projection*vec4(Position, 0.0, 1.0);
}
The Fragment shader:
precision lowp float;
vec3 sunDirection = normalize(vec3(0.0, -1.0, 0.0));
vec3 sunColor = vec3(1.0, 0.8, 0.7);
vec3 eye = vec3(0.0, 1.0, 0.0);
vec4 getNoise(vec2 uv){
vec2 uv0 = (uv/103.0)+vec2(iGlobalTime/17.0, iGlobalTime/29.0);
vec2 uv1 = uv/107.0-vec2(iGlobalTime/-19.0, iGlobalTime/31.0);
vec2 uv2 = uv/vec2(897.0, 983.0)+vec2(iGlobalTime/101.0, iGlobalTime/97.0);
vec2 uv3 = uv/vec2(991.0, 877.0)-vec2(iGlobalTime/109.0, iGlobalTime/-113.0);
vec4 noise = (texture2D(iChannel0, uv0)) +
(texture2D(iChannel0, uv1)) +
(texture2D(iChannel0, uv2)) +
(texture2D(iChannel0, uv3));
return noise*0.5-1.0;
}
void sunLight(const vec3 surfaceNormal, const vec3 eyeDirection, float shiny, float spec, float diffuse, inout vec3 diffuseColor, inout vec3 specularColor){
vec3 reflection = normalize(reflect(-sunDirection, surfaceNormal));
float direction = max(0.0, dot(eyeDirection, reflection));
specularColor += pow(direction, shiny)*sunColor*spec;
diffuseColor += max(dot(sunDirection, surfaceNormal),0.0)*sunColor*diffuse;
}
void main(){
vec2 uv = gl_FragCoord.xy / iResolution.xy;
uv *= 100.0;
vec4 noise = getNoise(uv);
vec3 surfaceNormal = normalize(noise.xzy*vec3(2.0, 1.0, 2.0));
vec3 diffuse = vec3(0.3);
vec3 specular = vec3(0.0);
vec3 worldToEye = vec3(0.0, 1.0, 0.0);//eye-worldPosition;
vec3 eyeDirection = normalize(worldToEye);
sunLight(surfaceNormal, eyeDirection, 100.0, 1.5, 0.5, diffuse, specular);
gl_FragColor = vec4((diffuse+specular+vec3(0.1))*vec3(0.3, 0.5, 0.9), 1.0);
}
Please notice that the fragment shader code is exactly the same in ShaderToy and in my engine, it seems to me like uv coords from gl_FragCoord are somehow wrong or a precision problem, because after a while the effect goes worse and worse. I'm using an orthographic projection, but it shouldn't have too much to do with this since I'm getting uv coordinates directly from screen.
Some insights on what's going on?
It turns out that I was loading my textures with
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
The shader noise function was expecting GL_REPEAT instead.

Why not vec3 for OpenGL ES 2.0 gl_Position?

I am new to OpenGL ES 2.0, and cannot understand the following simplest shader:
attribute vec4 vPosition;
void main()
{
gl_Position = vPosition;
}
My question is, since a position would be a vector of (x, y, z), why is gl_Position a vec4 instead of vec3?
The w in vec4(x, y, z, w) is used for clipping, and plays its part while linear algebra transformations are applied to the position.
By default, this should be set to 1.0.
See here for some more info: http://web.archive.org/web/20160408103910/http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-iOS-chapter-4.html
If you provide your vertices to the shader directly in clip space, you could just pass x,y,z and add 1 as the w component in that shader.
attribute vec3 vPosition; // vec3 instead of vec4
void main()
{
gl_Position = vec4 (vPosition, 1.0);
}