I am relatily new to GLSL.
I want to create a solar system model and use it as a wallpaper (using shadertoy) (Something like this and while i have the planets moving correctly i cant figure out how to do the helix paths that follow those planets.
Here is my code so far
uniform vec2 iResolution;
uniform float iTime;
#define pi 3.141592653589
float circ(vec2 uv, vec2 pos, float rad, float blur) {
return smoothstep(blur, 0., length(-uv + pos)-rad); //draws a circle to the screen
}
float line(vec2 uv, vec3 start, vec3 end, float width) {
vec2 p = uv - start.xy;
vec2 d = end.xy - start.xy;
float l = length(d);
d = normalize(d); //direction
float t = clamp(dot(p, d), 0., l);
return (length(p - d*t)) < width ? 1 : 0.;
}
float helix(vec2 uv, vec3 start, vec3 direction, float width, float length, float angle) {
float delta = iTime / angle;
vec2 p = uv - start.xy;
vec2 d = (normalize(direction) * length).xy;
float l = length(d);
d /= l;
float t = clamp(dot(p, d), 0., l);
return (length(p - d*t)) < width ? 1 : 0.;
}
vec3 rotate(vec3 point, vec3 angle) {
mat3 rot = mat3(
cos(angle.y)*cos(angle.z), cos(angle.z)*sin(angle.x)*sin(angle.y)-cos(angle.x)*sin(angle.z), cos(angle.x)*cos(angle.z)*sin(angle.y)+sin(angle.x)*sin(angle.z),
cos(angle.y)*sin(angle.z), cos(angle.x)*cos(angle.z)+sin(angle.x)*sin(angle.y)*sin(angle.z), -cos(angle.z)*sin(angle.x)+cos(angle.x)*sin(angle.y)*sin(angle.z),
-sin(angle.y), cos(angle.y)*sin(angle.x), cos(angle.x)*cos(angle.y));
return rot * point;
}
void main() {
vec2 uv = fragCoord / iResolution.xy;
float ratio = iResolution.x / iResolution.y;
uv -= .5; //center origin
uv.x = uv.x * ratio;//make screen square
uv /= .3;//zoom
float planetA[5] = float[](0., iTime / 0.241, iTime / 0.6152, iTime, iTime / 1.8809);
vec3 planets[5] = vec3[](
vec3(0.), // sun
vec3(cos(planetA[1]) * .4, sin(planetA[1]) * .4, 0.), // mercury
vec3(cos(planetA[2]) * .7, sin(planetA[2]) * .7, 0.), // venus
vec3(cos(planetA[3]), sin(planetA[3]), 0.), // earth
vec3(cos(planetA[4])*1.5, sin(planetA[4])*1.5, 0.)// mars
);
vec3 planetsC[5] = vec3[](
vec3(0.89, 0.9, 0.45), // sun
vec3(0.54, 0.57, 0.63), // mercury
vec3(0.9, 0.5, 0.2), // venus
vec3(0.2, 0.3, 0.8), // earth
vec3(0.8, 0.3, 0.2)// mars
);
vec3 rotVec = vec3(-pi/4, pi/4, 0.);
fragColor = vec4(0.);
fragColor = mix(fragColor, vec4(1.), line(uv, vec3(0.), rotate(vec3(0., 0., 2.), rotVec), 0.01)); //sun trail
for (int i = 1; i < planets.length(); i++) {
planets[i] = rotate(planets[i], vec3(-pi/4., pi/4., 0.)); //rotate the planet
fragColor = mix(fragColor, vec4(planetsC[i], 1.), helix(uv, planets[i], rotate(vec3(0., 0., 2.), rotVec), 0.01, 2., planetA[i])); //planet trail
}
for (int i = 0; i < planets.length(); i++) { //draws the planets
fragColor = mix(fragColor, vec4(planetsC[i], 1.), circ(uv, planets[i].xy, 0.05, 0.01));
}
}
the helix function is currently only a modified version of the line method but i want it to curve around the suns trail.
Any advice and/or help would be appreciated as i am still learing.
I have tried to convert the helix equation:
x = r * cos(t) y = r * sin(t) z = t but havent gotten it to work
heres the method currently, although it only displays a straigt line:
float helix(vec2 uv, vec3 start, vec3 direction, float width, float length, float angle) {
float delta = iTime / angle;
vec2 p = uv - start.xy;
vec2 d = (normalize(direction) * length).xy;
float l = length(d);
d /= l;
float t = clamp(dot(p, d), 0., l);
return (length(p - d*t)) < width ? 1 : 0.;
}
In my game I want to make a scrolling background with moving stars. I am using ParallaxBackground node with ParallaxLayer as a child, and the later has TextureRect child that display a 2d shader for the stars.
Nodes hierarchy:
ParallaxBackground -> StarsLayer -> Stars
Stars is the TextureRect and its rect_size equals the project window size.
Here is the 2d shader that it uses:
shader_type canvas_item;
uniform vec4 bg_color: hint_color;
float rand(vec2 st) {
return fract(sin(dot(st.xy, vec2(12.9898,78.233))) * 43758.5453123);
}
void fragment() {
float size = 100.0;
float prob = 0.9;
vec2 pos = floor(1.0 / size * FRAGCOORD.xy);
float color = 0.0;
float starValue = rand(pos);
if (starValue > prob)
{
vec2 center = size * pos + vec2(size, size) * 0.5;
float t = 0.9 + 0.2 * sin(TIME * 8.0 + (starValue - prob) / (1.0 - prob) * 45.0);
color = 1.0 - distance(FRAGCOORD.xy, center) / (0.5 * size);
color = color * t / (abs(FRAGCOORD.y - center.y)) * t / (abs(FRAGCOORD.x - center.x));
}
else if (rand(SCREEN_UV.xy / 20.0) > 0.996)
{
float r = rand(SCREEN_UV.xy);
color = r * (0.85 * sin(TIME * (r * 5.0) + 720.0 * r) + 0.95);
}
COLOR = vec4(vec3(color),1.0) + bg_color;
}
Here is ParallaxBackground script:
extends ParallaxBackground
onready var stars_layer = $StarsLayer
var bg_offset = 0.0
func _ready():
stars_layer.motion_mirroring = Vector2(0, Helpers.WINDOW_SIZE.y)
func _process(delta):
bg_offset += 30 * delta
scroll_offset = Vector2(0, bg_offset)
The problem is that the stars are being showed but not moving at all.
Use motion_offset instead of scroll_offset
func _process(delta):
motion_offset += 30 * delta
Any idea how make it? My code looks like this,but it dont work. In attached photo where point 0 is center of circle and rest are circle points(every 45 degress). An example we see that points don't make circle
Look point 0 is center of circle
I pasting my code here:
static float[] RotateVector(float[] v, float degrees)
{
float sin = (float)Math.Sin(degrees * 0.0174553294f);
float cos = (float)Math.Cos(degrees * 0.0174553294f);
float tx = v[0];
float ty = v[1];
return new float[] { (cos * tx) - (sin * ty), (sin * tx) + (cos * ty) };
}
static void Main(string[] args)
{
float lat = 53.1324886f;
float lon = 23.1688403f;
float R = 6378137;
float distance = 100;
float dn = 0;
float de = 1;
float[] rotation = RotateVector(new float[] { dn, de }, 180);
rotation[0] = rotation[0] * distance;
rotation[1] = rotation[1] * distance;
float dLat = rotation[0] / R;
float dLon = rotation[1] / (R * (float)Math.Cos(Math.PI * lat / 180));
float latO = lat + dLat * 180 / (float)Math.PI;
float lonO = lon + dLon * 180 / (float)Math.PI;
Console.WriteLine(latO+" "+ lonO);
Console.ReadKey();
}
Website with map don't work correctly. On google maps code work fine.
Been having some issues implementing a camera for my renderer. As the question states,I would like to know the necessary steps to generate such a camera.With field of view and aspect ratio included.Its important that the Coordinate system be left handed such that -z pushes the camera away from the screen(as I understand it).I have tried looking online but most of the implementations are incomplete or have failed me.Any help is appreciated.Thank You.
I had trouble with this and took a long time to figure out. Here is the code for camera class.
#ifndef CAMERA_H_
#define CAMERA_H_
#include "common.h"
struct Camera {
Vec3fa position, direction;
float fovDist, aspectRatio;
double imgWidth, imgHeight;
Mat4 camMatrix;
Camera(Vec3fa pos, Vec3fa cRot, Vec3fa cDir, float cfov, int width, int height) {
position = pos;
aspectRatio = width / (float)height;
imgWidth = width;
imgHeight = height;
Vec3fa angle = Vec3fa(cRot.x, cRot.y, -cRot.z);
camMatrix.setRotationRadians(angle * M_PI / 180.0f);
direction = Vec3fa(0.0f, 0.0f, -1.0f);
camMatrix.rotateVect(direction);
fovDist = 2.0f * tan(M_PI * 0.5f * cfov / 180.0);
}
Vec3fa getRayDirection(float x, float y) {
Vec3fa delta = Vec3fa((x-0.5f) * fovDist * aspectRatio, (y-0.5f) * fovDist, 0.0f);
camMatrix.rotateVect(delta);
return (direction + delta);
}
};
#endif
Incase if you need the rotateVect() code in the Mat4 class
void Mat4::rotateVect(Vector3& vect) const
{
Vector3 tmp = vect;
vect.x = tmp.x * (*this)[0] + tmp.y * (*this)[4] + tmp.z * (*this)[8];
vect.y = tmp.x * (*this)[1] + tmp.y * (*this)[5] + tmp.z * (*this)[9];
vect.z = tmp.x * (*this)[2] + tmp.y * (*this)[6] + tmp.z * (*this)[10];
}
Here is our setRotationRadians code
void Mat4::setRotationRadians(Vector3 rotation)
{
const float cr = cos(rotation.x);
const float sr = sin(rotation.x);
const float cp = cos(rotation.y);
const float sp = sin(rotation.y);
const float cy = cos(rotation.z);
const float sy = sin(rotation.z);
(*this)[0] = (cp * cy);
(*this)[1] = (cp * sy);
(*this)[2] = (-sp);
const float srsp = sr * sp;
const float crsp = cr * sp;
(*this)[4] = (srsp * cy - cr * sy);
(*this)[5] = (srsp * sy + cr * cy);
(*this)[6] = (sr * cp);
(*this)[8] = (crsp * cy + sr * sy);
(*this)[9] = (crsp * sy - sr * cy);
(*this)[10] = (cr * cp);
}
Currently, I am working with a ray tracer that takes an iterative approach towards developing the scenes. My goal is to turn it into a recursive ray tracer.
At the moment, I have a ray tracer defined to do the following operation to create the bitmap it is stored in:
int WIDTH = 640;
int HEIGHT = 640;
BMP Image(WIDTH, HEIGHT); // create new bitmap
// Slightly shoot rays left of right camera direction
double xAMT, yAMT;
*/
Color blue(0.1, 0.61, 0.76, 0);
for (int x = 0; x < WIDTH; x++) {
for (int y = 0; y < HEIGHT; y++) {
if (WIDTH > HEIGHT) {
xAMT = ((x + 0.5) / WIDTH) * aspectRatio - (((WIDTH - HEIGHT) / (double)HEIGHT) / 2);
yAMT = ((HEIGHT - y) + 0.5) / HEIGHT;
}
else if (HEIGHT > WIDTH) {
xAMT = (x + 0.5) / WIDTH;
yAMT = (((HEIGHT - y) + 0.5) / HEIGHT) / aspectRatio - (((HEIGHT - WIDTH) / (double)WIDTH) / 2);
}
else {
xAMT = (x + 0.5) / WIDTH;
yAMT = ((HEIGHT - y) + 0.5) / HEIGHT;
}
..... // calculate intersections, shading, reflectiveness.... etc
Image.setPixel(x, y, blue); // this is here just as an example
}
}
Is there another approach to calculating the reflective and refractive child rays outside the double for-loop?
Are the for-loops necessary? // yes because of the bitmap?
What approaches can be taken to minimize/optimize an iterative ray tracer?