webgl texture on expo gives black screen - react-native

I'm trying webGL for the first time, thing is I am working on Expo with the expo-gl package, aiming to build a filter component for photo editing. So far, I have been able to create a context successfully. When creating the shaders everything works fine (I can render a triangle, two triangles, etc...)
Problem comes when I try to load an image as a texture, I get no errors, but all I get is a black screen on the emulator and the phone (both android).
I have done checks for powOf2 images and image.onload, and I did a console.log for gl.texImage2D but got undefined. I would really appreciate any insight or help for this issue.
I divided into 4 parts what I think are the relevant pieces of code for this issue.
1.shaders(template literals):
const vertexShaderSource = '
attribute vec2 a_texCoord;
attribute vec4 a_position;
varying vec2 v_texCoord;
void main() {
gl_Position = a_position;
v_texCoord = a_texCoord;
}
';
const fragmentShaderSource = '
precision mediump float;
uniform sampler2D u_image;
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
';
2.ReactNative(expo) state and lifecicle:
export default class App extends React.Component {
state = {
ready: false,
image: null,
};
componentDidMount() {
(async () => {
const image = Asset.fromModule(require('./bw.jpg'));
await image.downloadAsync();
this.setState({
ready: true,
image,
});
})();
}
render() {
return (
<View style={styles.container}>
<Image source={require('./back.jpg')} style={{ width: '100%' }} />
<GLView
style={{ width: '100%', height: '100%', position: 'absolute' }}
onContextCreate={this._onContextCreate}
/>
</View>
);
}
3.gl context:
_onContextCreate = gl => {
if (_initialized) { return }
function createShader(gl, type, source) {
var shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
var success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (success) {
return shader;
}
//on error
console.log(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
}
//get shaders
var vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
var fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderSource);
function createProgram(gl, vertexShader, fragmentShader) {
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
//on error
var success = gl.getProgramParameter(program, gl.LINK_STATUS);
if (success) {
return program;
}
console.log(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
}
//create program
var program = createProgram(gl, vertexShader, fragmentShader);
//get attributes
var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
//a_position buffer for fragment shader
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// three 2d points
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
-1, 1,
1, -1,
1, 1,
1, -1,
-1, 1,
]), gl.STATIC_DRAW);
//create the viewport
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
// Clear the canvas
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clear(gl.COLOR_BUFFER_BIT);
// use program
gl.useProgram(program);
gl.enableVertexAttribArray(positionAttributeLocation);
gl.vertexAttribPointer(
positionAttributeLocation, 2, gl.FLOAT, false, 0, 0)
gl.drawArrays(gl.TRIANGLES, 0, 6);
4.Code for the texture, and end of _onContextCreate:
//get image from state
const image = this.state.image
var texCoordLocation = gl.getAttribLocation(program, "a_texCoord");
var uSampler = gl.getUniformLocation(program, 'u_image');
// provide texture coordinates for the rectangle.
var texCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
var positions = [
-1,-1,
-1, 1,
1,-1,
1, 1,
1,-1,
-1, 1,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 0, 0);
//create texture
var texture = gl.createTexture();
//check if image is ready
if (image.downloaded) loadTexture(texture, image)
//get image width & height for pow2 check.
const { width, height } = Image.resolveAssetSource(image);
function loadTexture(texture, img) {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(uSampler, 0);
//pow2 check
if (isPowerOf2(width) && isPowerOf2(height)) {
gl.generateMipmap(gl.TEXTURE_2D);
} else {
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
}
return texture
}
function isPowerOf2(value) {
return (value & (value - 1)) == 0;
}
gl.flush();
gl.endFrameEXP();
_initialized = true;
};
Thanks in advance!

Related

html canvas - multiple svg images which all have different fillStyle colors

For a digital artwork I'm generating a canvas element in Vue which draws from an array of multiple images.
The images can be split in two categories:
SVG (comes with a fill-color)
PNG (just needs to be drawn as a regular image)
I came up with this:
const depict = (options) => {
ctx.clearRect(0, 0, canvas.width, canvas.height);
const myOptions = Object.assign({}, options);
if (myOptions.ext == "svg") {
return loadImage(myOptions.uri).then((img) => {
ctx.drawImage(img, 0, 0, 100, 100);
ctx.globalCompositeOperation = "source-in";
ctx.fillStyle = myOptions.clr;
ctx.fillRect(0, 0, 100, 100);
ctx.globalCompositeOperation = "source-over";
});
} else {
return loadImage(myOptions.uri).then((img) => {
ctx.fillStyle = myOptions.clr;
ctx.drawImage(img, 0, 0, 100, 100);
});
}
};
this.inputs.forEach(depict);
for context:
myOptions.clr = the color
myOptions.uri = the url of the image
myOptions.ext = the extension of the image
While all images are drawn correctly I can't figure out why the last fillStyle overlays the whole image. I just want all the svg's to have the fillStyle which is attached to them.
I tried multiple globalCompositeOperation in different orders. I also tried drawing the svg between ctx.save and ctx.restore. No succes… I might be missing some logic here.
So! I figured it out myself in the meantime :)
I created an async loop with a promise. Inside this I created a temporary canvas per image which I then drew to one canvas. I took inspiration from this solution: https://stackoverflow.com/a/6687218/15289586
Here is the final code:
// create the parent canvas
let parentCanv = document.createElement("canvas");
const getContext = () => parentCanv.getContext("2d");
const parentCtx = getContext();
parentCanv.classList.add("grid");
// det the wrapper from the DOM
let wrapper = document.getElementById("wrapper");
// this function loops through the array
async function drawShapes(files) {
for (const file of files) {
await depict(file);
}
// if looped > append parent canvas to to wrapper
wrapper.appendChild(parentCanv);
}
// async image loading worked best
const loadImage = (url) => {
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = () => resolve(img);
img.onerror = () => reject(new Error(`load ${url} fail`));
img.src = url;
});
};
// depict the file
const depict = (options) => {
// make a promise
return new Promise((accept, reject) => {
const myOptions = Object.assign({}, options);
var childCanv = document.createElement("canvas");
const getContext = () => childCanv.getContext("2d");
const childCtx = getContext();
if (myOptions.ext == "svg") {
loadImage(myOptions.uri).then((img) => {
childCtx.drawImage(img, 0, 0, 100, parentCanv.height);
childCtx.globalCompositeOperation = "source-in";
childCtx.fillStyle = myOptions.clr;
childCtx.fillRect(0, 0, parentCanv.width, parentCanv.height);
parentCtx.drawImage(childCanv, 0, 0);
accept();
});
} else {
loadImage(myOptions.uri).then((img) => {
// ctx.fillStyle = myOptions.clr;
childCtx.drawImage(img, 0, 0, 100, parentCanv.height);
parentCtx.drawImage(childCanv, 0, 0);
accept();
});
}
});
};
drawShapes(this.inputs);

Three.js load a gltf model and set color for it but when zooming out it is all black

Hello everyone,I have meet a strange problem which is that I loaded a gltf model in three.js and set color for it.When zooming in it has colors, but when zooming out,it is all black.And when I directly set color for it's material,it can works well.
Thank you.
here is the sample sreenphotos and code.
loadGlbModel() {
const loader = new GLTFLoader();
loader.load(
`/three/eiffel-tower.gltf`,
(gltf) => {
const geometry = gltf.scene.children[0].geometry;
const positions = geometry.attributes.position;
const count = positions.count;
geometry.setAttribute(
"color",
new THREE.BufferAttribute(new Float32Array(count * 3), 3)
);
const color = new THREE.Color();
const colors = geometry.attributes.color;
const radius = 200;
debugger;
for (let i = 0; i < count; i++) {
color.setHSL(positions.getY(i) / radius / 2, 1.0, 0.5);
colors.setXYZ(i, 1, 0, 0);
}
const material = new THREE.MeshPhongMaterial({
color: 0xffffff,
flatShading: true,
vertexColors: true,
shininess: 0,
});
const wireframeMaterial = new THREE.MeshBasicMaterial({
color: 0x000000,
wireframe: true,
transparent: true,
});
let mesh = new THREE.Mesh(geometry, material);
let wireframe = new THREE.Mesh(geometry, wireframeMaterial);
mesh.add(wireframe);
mesh.scale.set(0.1, 0.1, 0.1);
const redColor = new THREE.Color(1, 0, 0);
console.log(mesh);
// mesh.children[0].material.color = redColor;
const light = new THREE.DirectionalLight(0xffffff);
light.position.set(0, 0, 1);
this.scene.add(light);
this.scene.add(mesh);
},
(xhr) => {
console.log((xhr.loaded / xhr.total) * 100 + "% loaded");
},
(error) => {
console.error(error);
}
);
},
You are rendering the wireframe version too, which consists of lines in screen-space. As you zoom out, these lines maintain the same width in pixels, thus covering everything else.
Do you wish to render the fireframe too? If not, don't. If you do, then consider hiding it as the user zooms out.

Print value of an array using three.js TextGeomtery

How can I print the values of an array using three.js textGeometry. Trying the following code but no output.
`for(let i=0;i<=4;i++)
{
let arr = [1,2,3,4,5,6,7,8];
let char = arr[i];
let loader = new THREE.FontLoader();
let font = loader.parse(fontJSON);
let geometry = new THREE.TextBufferGeometry(char ,{font : font , size : 1 , height : 0.1 });
let material = new THREE.MeshBasicMaterial({ color : 0xffffff });
let text = new THREE.Mesh(geometry , material);
text.position.set(i,0,0);
scene.add(text);
}`
You have to make sure to provide a string to TextBufferGeometry, no number. You can easily ensure this by calling toString() on your char variable. I've refactored your code a bit to show you a complete example.
let camera, scene, renderer;
init();
animate();
function init() {
camera = new THREE.PerspectiveCamera(70, window.innerWidth / window.innerHeight, 0.01, 10);
camera.position.z = 8;
scene = new THREE.Scene();
const material = new THREE.MeshBasicMaterial({
color: 0xffffff
});
const arr = [1, 2, 3, 4, 5, 6, 7, 8];
const loader = new THREE.FontLoader();
loader.load('https://threejs.org/examples/fonts/helvetiker_regular.typeface.json', (font) => {
for (let i = 0; i <= 4; i++) {
const char = arr[i];
const geometry = new THREE.TextBufferGeometry(char.toString(), {
font: font,
size: 1,
height: 0.1
});
const text = new THREE.Mesh(geometry, material);
text.position.set(i, 0, 0);
scene.add(text);
}
});
renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
}
function animate() {
requestAnimationFrame(animate);
renderer.render(scene, camera);
}
body {
margin: 0;
}
canvas {
display: block;
}
<script src="https://cdn.jsdelivr.net/npm/three#0.117.1/build/three.js"></script>

OrbitControls are not working react-native

I am trying to implement orbitControls in my react application. I have loaded 3d model GraphicsView
of expo-graphics. Model loaded perfectly, now i need to rotate the 3D model with screen drag. For
this i have added orbitControls, which is not working properly. Model did'nt rotate with screen
drag.Please help what i need to do to do to rotate 3D model in my react app.
Here is my model class.
```
import React from 'react';
import ExpoTHREE, { THREE } from 'expo-three';
import { GraphicsView } from 'expo-graphics';
import OrbitControls from 'three-orbitcontrols';
class Model extends React.Component {
componentDidMount() {
THREE.suppressExpoWarnings();
}
// When our context is built we can start coding 3D things.
onContextCreate = async ({ gl, pixelRatio, width, height }) => {
// Create a 3D renderer
this.renderer = new ExpoTHREE.Renderer({
gl,
pixelRatio,
width,
height,
});
// We will add all of our meshes to this scene.
this.scene = new THREE.Scene();
this.scene.background = new THREE.Color(0x3d392f);
//this.scene.fog = new THREE.Fog( 0xa0a0a0, 10, 50 );
this.camera = new THREE.PerspectiveCamera(12, width/height, 1, 1000);
this.camera.position.set(3, 3, 3);
this.camera.lookAt(0, 0, 0);
this.controls = new OrbitControls(this.camera, this.renderer.domElement);
this.controls.rotateSpeed = 2.0;
this.controls.zoomSpeed = 1.2;
this.controls.panSpeed = 0.8;
this.controls.noPan = false;
this.controls.staticMoving = false;
this.controls.dynamicDampingFactor = 0.3;
this.controls.keys = [ 65, 83, 68 ];
this.controls.addEventListener( 'change', this.onRender );
//clock = new THREE.Clock();
//this.scene.add(new THREE.AmbientLight(0xffffff));
var hemiLight = new THREE.HemisphereLight( 0xffffff, 0x444444 );
hemiLight.position.set( 0, 20, 0 );
this.scene.add( hemiLight );
var dirLight = new THREE.DirectionalLight( 0xffffff );
dirLight.position.set( - 3, 10, - 10 );
dirLight.castShadow = true;
dirLight.shadow.camera.top = 2;
dirLight.shadow.camera.bottom = - 2;
dirLight.shadow.camera.left = - 2;
dirLight.shadow.camera.right = 2;
dirLight.shadow.camera.near = 0.1;
dirLight.shadow.camera.far = 40;
this.scene.add( dirLight );
await this.loadModel();
};
loadModel = async () => {
const obj = {
"f.obj": require('../assets/models/f.obj')
}
const model = await ExpoTHREE.loadAsync(
obj['f.obj'],
null,
obj
);
// this ensures the model will be small enough to be viewed properly
ExpoTHREE.utils.scaleLongestSideToSize(model, 1);
this.scene.add(model)
};
// When the phone rotates, or the view changes size, this method will be called.
onResize = ({ x, y, scale, width, height }) => {
// Let's stop the function if we haven't setup our scene yet
if (!this.renderer) {
return;
}
this.camera.aspect = width / height;
this.camera.updateProjectionMatrix();
this.renderer.setPixelRatio(scale);
this.renderer.setSize(width, height);
};
// Called every frame.
onRender = delta => {
// Finally render the scene with the Camera
this.renderer.render(this.scene, this.camera);
};
render() {
return (
<GraphicsView
onContextCreate={this.onContextCreate}
onRender={this.onRender}
onResize={this.onResize}
/>
);
}
}
export default Model;
```

react-native requestanimationframe - flicker - freeze - app crash

am learning to build app using react-native and below is the component code that am using. The problem is - when I simulate this app - it crashes in few seconds. I couldnt understand what is causing this problem. Can someone please suggest.
I am trying to animate a canvas circle using requestAnimationFrame in this component.
Thank you ..
import React, { Component } from 'react';
import {
Platform,
StyleSheet,
Text,
View,
Dimensions
} from 'react-native';
import Canvas from 'react-native-canvas';
export default class ThreeDiscs extends Component{
constructor(props){
super(props);
this.state = {
context: null,
screen: {
width: Dimensions.get('window').width,
height: Dimensions.get('window').height
},
a: 0,
b: 0,
c: 100,
d: 100
};
this.coordinates = {
a: 0,
b: 0,
c: 100,
d: 100
};
this.directions = {
increment: 5,
direction: 0
};
this.discs = [
{
x: 150,
y: 250,
lastX: 150,
lastY: 250,
velocityX: -3.2,
velocityY: 3.5,
radius: 10,
innerColor: 'rgba(255,255,0,1)',
middleColor: 'rgba(255,255,0,0.7)',
outerColor: 'rgba(255,255,0,0.5)',
strokeStyle: 'gray',
}
];
this.numDiscs = this.discs.length;
}
rendercanvas = (canvas) => {
canvas.width = this.state.screen.width;
canvas.height = this.state.screen.height;
const context = canvas.getContext('2d');
this.setState({context: context});
context.fillStyle = 'purple';
context.fillRect(0,0,100,100);
this.drawBackground(context);
this.update(context);
this.draw(context);
this.animatecanvas(context);
}
drawBackground(context) {
var STEP_Y = 12,
i = this.state.screen.height;
context.strokeStyle = 'lightgray';
context.lineWidth = 0.5;
while(i > STEP_Y*4) {
context.beginPath();
context.moveTo(0, i);
context.lineTo(context.canvas.width, i);
context.stroke();
i -= STEP_Y;
}
context.save();
context.strokeStyle = 'rgba(100,0,0,0.3)';
context.lineWidth = 1;
context.beginPath();
context.moveTo(35,0);
context.lineTo(35,context.canvas.height);
context.stroke();
context.restore();
}
update(context) {
var disc = null;
for(var i=0; i < this.numDiscs; ++i) {
disc = this.discs[i];
if (disc.x + disc.velocityX + disc.radius > context.canvas.width ||
disc.x + disc.velocityX - disc.radius < 0)
disc.velocityX = -disc.velocityX;
if (disc.y + disc.velocityY + disc.radius > context.canvas.height ||
disc.y + disc.velocityY - disc.radius < 0)
disc.velocityY= -disc.velocityY;
disc.x += disc.velocityX;
disc.y += disc.velocityY;
}
}
draw(context) {
var disc = this.discs[i];
for(var i=0; i < this.numDiscs; ++i) {
disc = this.discs[i];
context.save();
context.beginPath();
context.arc(disc.x, disc.y, disc.radius, 0, Math.PI*2, false);
context.fillStyle = '#000';
context.strokeStyle = disc.strokeStyle;
context.fill();
context.stroke();
context.restore();
}
}
animatecanvas(context){
context.clearRect(0,0,this.state.screen.width,this.state.screen.height);
this.drawBackground(context);
this.update(context);
this.draw(context);
requestAnimationFrame(() => {this.animatecanvas(context)});
}
render(){
return(
<Canvas ref={this.rendercanvas} />
);
}
}