how to draw a helix in 3d using fragment shader (shadertoy) - background

I am relatily new to GLSL.
I want to create a solar system model and use it as a wallpaper (using shadertoy) (Something like this and while i have the planets moving correctly i cant figure out how to do the helix paths that follow those planets.
Here is my code so far
uniform vec2 iResolution;
uniform float iTime;
#define pi 3.141592653589
float circ(vec2 uv, vec2 pos, float rad, float blur) {
return smoothstep(blur, 0., length(-uv + pos)-rad); //draws a circle to the screen
}
float line(vec2 uv, vec3 start, vec3 end, float width) {
vec2 p = uv - start.xy;
vec2 d = end.xy - start.xy;
float l = length(d);
d = normalize(d); //direction
float t = clamp(dot(p, d), 0., l);
return (length(p - d*t)) < width ? 1 : 0.;
}
float helix(vec2 uv, vec3 start, vec3 direction, float width, float length, float angle) {
float delta = iTime / angle;
vec2 p = uv - start.xy;
vec2 d = (normalize(direction) * length).xy;
float l = length(d);
d /= l;
float t = clamp(dot(p, d), 0., l);
return (length(p - d*t)) < width ? 1 : 0.;
}
vec3 rotate(vec3 point, vec3 angle) {
mat3 rot = mat3(
cos(angle.y)*cos(angle.z), cos(angle.z)*sin(angle.x)*sin(angle.y)-cos(angle.x)*sin(angle.z), cos(angle.x)*cos(angle.z)*sin(angle.y)+sin(angle.x)*sin(angle.z),
cos(angle.y)*sin(angle.z), cos(angle.x)*cos(angle.z)+sin(angle.x)*sin(angle.y)*sin(angle.z), -cos(angle.z)*sin(angle.x)+cos(angle.x)*sin(angle.y)*sin(angle.z),
-sin(angle.y), cos(angle.y)*sin(angle.x), cos(angle.x)*cos(angle.y));
return rot * point;
}
void main() {
vec2 uv = fragCoord / iResolution.xy;
float ratio = iResolution.x / iResolution.y;
uv -= .5; //center origin
uv.x = uv.x * ratio;//make screen square
uv /= .3;//zoom
float planetA[5] = float[](0., iTime / 0.241, iTime / 0.6152, iTime, iTime / 1.8809);
vec3 planets[5] = vec3[](
vec3(0.), // sun
vec3(cos(planetA[1]) * .4, sin(planetA[1]) * .4, 0.), // mercury
vec3(cos(planetA[2]) * .7, sin(planetA[2]) * .7, 0.), // venus
vec3(cos(planetA[3]), sin(planetA[3]), 0.), // earth
vec3(cos(planetA[4])*1.5, sin(planetA[4])*1.5, 0.)// mars
);
vec3 planetsC[5] = vec3[](
vec3(0.89, 0.9, 0.45), // sun
vec3(0.54, 0.57, 0.63), // mercury
vec3(0.9, 0.5, 0.2), // venus
vec3(0.2, 0.3, 0.8), // earth
vec3(0.8, 0.3, 0.2)// mars
);
vec3 rotVec = vec3(-pi/4, pi/4, 0.);
fragColor = vec4(0.);
fragColor = mix(fragColor, vec4(1.), line(uv, vec3(0.), rotate(vec3(0., 0., 2.), rotVec), 0.01)); //sun trail
for (int i = 1; i < planets.length(); i++) {
planets[i] = rotate(planets[i], vec3(-pi/4., pi/4., 0.)); //rotate the planet
fragColor = mix(fragColor, vec4(planetsC[i], 1.), helix(uv, planets[i], rotate(vec3(0., 0., 2.), rotVec), 0.01, 2., planetA[i])); //planet trail
}
for (int i = 0; i < planets.length(); i++) { //draws the planets
fragColor = mix(fragColor, vec4(planetsC[i], 1.), circ(uv, planets[i].xy, 0.05, 0.01));
}
}
the helix function is currently only a modified version of the line method but i want it to curve around the suns trail.
Any advice and/or help would be appreciated as i am still learing.
I have tried to convert the helix equation:
x = r * cos(t) y = r * sin(t) z = t but havent gotten it to work
heres the method currently, although it only displays a straigt line:
float helix(vec2 uv, vec3 start, vec3 direction, float width, float length, float angle) {
float delta = iTime / angle;
vec2 p = uv - start.xy;
vec2 d = (normalize(direction) * length).xy;
float l = length(d);
d /= l;
float t = clamp(dot(p, d), 0., l);
return (length(p - d*t)) < width ? 1 : 0.;
}

Related

Optimization for scene with custom shader

I have a three.js scene made with rogue engine, which im using to make a VR experience.
In that im using a fairly complex shader, it takes world space location of two locators for transitioning between their normal shader and just some color, the transition is using noise for some effect (see video below, its showing the effect of the first locator but the second one is also similar, it goes bottom to top),
the location of the object is passed as Vector 3 uniforms., the shader itself im injecting to a MeshStandardMaterial using onBeforeCompile.
the performance is already bad and really tanks when im using textures, im using three texture sets for the scene, im using diffuse,rough,metal,emission and AO so each is sampled thrice and then masked using vertex colors. (not present in the code below)
varying vec3 W_Pos; //world position vector
varying vec3 F_Nrml; //normal vector
varying vec3 camDir; // cam facing
varying vec3 vertexColor;
uniform vec3 astral_locator; // First locator
uniform vec3 astral_spread; // i pass the locator's scale here and scale it up for the transition
uniform vec3 starScatter_starScale_nScale; //three float parameters im passing as vector for easier control in rogue engine
uniform vec3 breakPoints;
uniform vec3 c1;
uniform vec3 c2;
uniform vec3 c3;
uniform vec3 noise_locator; //Second locator
uniform vec3 nStretch_nScale_emSharp;// same as above, three floats passed as a vector
uniform vec3 emCol;
vec4 mod289(vec4 x){return x - floor(x * (1.0 / 289.0)) * 289.0;}
vec4 perm(vec4 x){return mod289(((x * 34.0) + 1.0) * x);}
vec3 rand2( vec3 p ) {
return fract(
sin(
vec3(dot(p,vec3(127.1,310.7,143.54)),dot(p,vec3(269.5,183.3,217.42)),dot(p,vec3(2459.5,133.3,17.42))))*43758.5453);
}
float mapping(float number, float inMin, float inMax, float outMin, float outMax){return (number - inMin) * (outMax - outMin) / (inMax - inMin) + outMin;}
vec4 vertexMask(vec4 map1, vec4 map2, vec4 map3, vec3 vertMask){vec4 me1 = mix(vec4(0.0), map1,vertMask.r); vec4 me2 = mix(me1, map2,vertMask.g); vec4 me3 = mix(me2, map3,vertMask.b); return me3;}
//Noises
float noise(vec3 p){
vec3 a = floor(p);
vec3 d = p - a;
d = d * d * (3.0 - 2.0 * d);
vec4 b = a.xxyy + vec4(0.0, 1.0, 0.0, 1.0);
vec4 k1 = perm(b.xyxy);
vec4 k2 = perm(k1.xyxy + b.zzww);
vec4 c = k2 + a.zzzz;
vec4 k3 = perm(c);
vec4 k4 = perm(c + 1.0);
vec4 o1 = fract(k3 * (1.0 / 41.0));
vec4 o2 = fract(k4 * (1.0 / 41.0));
vec4 o3 = o2 * d.z + o1 * (1.0 - d.z);
vec2 o4 = o3.yw * d.x + o3.xz * (1.0 - d.x);
return o4.y * d.y + o4.x * (1.0 - d.y);
}
float facing(){
vec3 nrml = F_Nrml;
vec3 cam = camDir;
vec3 normal = normalize(nrml.xyz);
vec3 eye = normalize(-cam);
float rim = smoothstep(-0.75, 1.0, 1.0 - dot(normal, eye));
return clamp(rim, 0.0, 1.0);
}
//Function for the second locatior
vec2 noiseMove(vec3 loc,vec3 noiseDat){
float noise_stretch = noiseDat.x;
float noise_scale = noiseDat.y;
float emission_sharp = noiseDat.z;
float noise_move = -loc.y;
float gen_Pattern;
float gen_Pattern_invert;
float emi_sharp_fac;
float transparency;
float emission;
gen_Pattern = ((W_Pos.y+noise_move)*noise_stretch) + noise(W_Pos.xyz*noise_scale);
gen_Pattern_invert = 1.0 - gen_Pattern;
emi_sharp_fac = clamp(emission_sharp*1000.0,1.0,1000.0)*gen_Pattern;
emission = emission_sharp*gen_Pattern;
emission = 1.0 - emission;
emission = emission * emi_sharp_fac;
emission = clamp(emission,0.0,1.0);
transparency = clamp(gen_Pattern_invert,0.0,1.0);
return vec2(emission,transparency);
}
//Function for the first locator
vec4 astral(vec3 loc, vec3 spr,vec3 cee1,vec3 cee2,vec3 cee3, vec3 breakks, vec3 star){//star is WIP
float f = facing();
float re1 = mapping(f,breakks.x,1.0,0.0,1.0);
float re2 = mapping(f,breakks.y,1.0,0.0,1.0);
float re3 = mapping(f,breakks.z,1.0,0.0,1.0);
vec3 me1 = mix(vec3(0.,0.,0.),cee1,re1);
vec3 me2 = mix(me1,cee2,re2);
vec3 me3 = mix(me2,cee3,re3);
float dist = distance(W_Pos.xyz + (noise(W_Pos.xyz*star.z)-0.5),loc);
float val = step(dist,spr.x);
return vec4(me3,val);
}
void main(){
vec4 ast = astral(astral_locator,astral_spread,c1,c2,c3,breakPoints,starScatter_starScale_nScale);
vec2 noice = noiseMove(noise_locator,nStretch_nScale_emSharp);
vec3 outp = mix(mix(outgoingLight,ast.xyz,ast.w),emCol,noice.x); //Take output light from the three.js shader and mix it with the custom shader
float t = noice.y;
#ifdef NONSCIFI
t = 1.0 - noice.y;
#endif
t *= diffuseColor.a;
gl_FragColor = vec4(outp*t,t);
}
is there a way to optimize it better? a couple things i can think of is storing the noise and then using it instead of calculating every frame, and figuring out occlusion culling (renderpass doesnt work well in VR so cant store the depth pass, gotta figure a way), objects in the scene are already instances to reduce draw calls. im assuming making some objects static might help, including the locators but i dont know if it will stop the uniform from updating every frame.
is there anything else that can be done?
also i apologize for the structure of the question, i rarely post questions thanks to stackoverflow :p

Change Shape of Heatmap Element from Circle to Square?

I'm a novice in Openlayer6. Who can tell me how to change the rendered elements of the Heatmap.js from a circle to a square? Thanks a lot!
This is currently not configurable, though you can override the createRenderer method of the Heatmap layer to do this (not supported by the api, so it may break in the future).
Here is a working example: https://codesandbox.io/s/heatmap-earthquakes-squares-hdrbs?file=/main.js
These are the needed changes from the orignal function:
diff --git a/src/ol/layer/Heatmap.js b/src/ol/layer/Heatmap.js
index c3e3306c8..2873bf184 100644
--- a/src/ol/layer/Heatmap.js
+++ b/src/ol/layer/Heatmap.js
## -222,8 +222,8 ## class Heatmap extends VectorLayer {
void main(void) {
vec2 texCoord = v_texCoord * 2.0 - vec2(1.0, 1.0);
- float sqRadius = texCoord.x * texCoord.x + texCoord.y * texCoord.y;
- float value = (1.0 - sqrt(sqRadius)) * u_blurSlope;
+ float distance = max(abs(texCoord.x), abs(texCoord.y));
+ float value = (1.0 - distance) * u_blurSlope;
float alpha = smoothstep(0.0, 1.0, value) * v_weight;
gl_FragColor = vec4(alpha, alpha, alpha, alpha);
}`,
## -263,8 +263,8 ## class Heatmap extends VectorLayer {
void main(void) {
vec2 texCoord = v_texCoord * 2.0 - vec2(1.0, 1.0);
- float sqRadius = texCoord.x * texCoord.x + texCoord.y * texCoord.y;
- float value = (1.0 - sqrt(sqRadius)) * u_blurSlope;
+ float distance = max(abs(texCoord.x), abs(texCoord.y));
+ float value = (1.0 - distance) * u_blurSlope;
float alpha = smoothstep(0.0, 1.0, value) * v_weight;
if (alpha < 0.05) {
discard;

move lat long by meters in direction and distance

Any idea how make it? My code looks like this,but it dont work. In attached photo where point 0 is center of circle and rest are circle points(every 45 degress). An example we see that points don't make circle
Look point 0 is center of circle
I pasting my code here:
static float[] RotateVector(float[] v, float degrees)
{
float sin = (float)Math.Sin(degrees * 0.0174553294f);
float cos = (float)Math.Cos(degrees * 0.0174553294f);
float tx = v[0];
float ty = v[1];
return new float[] { (cos * tx) - (sin * ty), (sin * tx) + (cos * ty) };
}
static void Main(string[] args)
{
float lat = 53.1324886f;
float lon = 23.1688403f;
float R = 6378137;
float distance = 100;
float dn = 0;
float de = 1;
float[] rotation = RotateVector(new float[] { dn, de }, 180);
rotation[0] = rotation[0] * distance;
rotation[1] = rotation[1] * distance;
float dLat = rotation[0] / R;
float dLon = rotation[1] / (R * (float)Math.Cos(Math.PI * lat / 180));
float latO = lat + dLat * 180 / (float)Math.PI;
float lonO = lon + dLon * 180 / (float)Math.PI;
Console.WriteLine(latO+" "+ lonO);
Console.ReadKey();
}
Website with map don't work correctly. On google maps code work fine.

GLSL variables not storing?

I am learning GLSL through Unity and I recently came across a problem involving the storing of variables.
Shader "Shader" {
Properties{
_Hole_Position("hole_Position", Vector) = (0., 0., 0., 1.0)
_Hole_EventHorizonDistance("hole_EventHorizonDistance", Float) = 1.0
_DebugValue("debugValue", Float) = 0.0
}
SubShader{
Pass{
GLSLPROGRAM
uniform mat4 _Object2World;
//Variables
varying float debugValue;
varying vec4 pos;
varying vec4 hole_Position;
varying float hole_EventHorizonDistance = 1;
#ifdef VERTEX
void main()
{
pos = _Object2World * gl_Vertex;
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
}
#endif
#ifdef FRAGMENT
void main()
{
float dist = distance(vec4(pos.x, 0.0,pos.z, 1.0), vec4(hole_Position.x, 0.0, hole_Position.z, 1.0));
debugValue = dist;
if (dist < hole_EventHorizonDistance)
{
gl_FragColor = vec4(0.3, 0.3, 0.3, 1.0);
}
else
{
gl_FragColor = vec4(0.4, 0.6, 1.0, 1.0);
}
//gl_FragColor = vec4(hole_EventHorizonDistance, 0, 0, 1.0);
}
#endif
ENDGLSL
}
}
}
Now Hole_Position and EventHorizonDistance are changed from an outside C#-script with:
g.GetComponent<Renderer>().sharedMaterial.SetVector("_Hole_Position", new Vector4(transform.position.x, transform.position.y, transform.position.z, 1));
g.GetComponent<Renderer>().sharedMaterial.SetFloat("_Hole_EventHorizonDistance", 2);
this does not work as I intend it too (by changing the fragments color if its position is within 2 units from Hole_Position. However debugging with:
gl_FragColor = vec4(hole_EventHorizonDistance, 0, 0, 1.0);
seemingly suggests that EventHorizon is 0 at all times (the mesh tested on remains completely black), however debugging by getting and printing the variable from an outside (via
print(g.GetComponent<Renderer>().sharedMaterial.GetFloat("_Hole_EventHorizonDistance"));
) tells me EventHorizonDistance = 2. I cannot wrap my head around why this is the case, why is it so?

What does dirAtten value mean in Kajiya-Kay Model?

In the hair rendering slide developed by Sheuermann at ATI at GDC 2004, I found code like this:
float StrandSpecular (float3 T, float3 V, float3 L, float exponent)
{
float3 H = normalize(L + V);
float dotTH = dot(T, H);
float sinTH = sqrt(1.0 - dotTH*dotTH);
float dirAtten = smoothstep(-1.0, 0.0, dot(T, H));
return dirAtten * pow(sinTH, exponent);
}
I truly have no idea of the value dirAtten mean, what does this exactly mean in this shading model?
I regard this dirAtten as one attenuation coeffecient and it controls the range of the lighting you can see.