Expo Camera : taking picture doesn't react on first onPress - React Native - react-native

I am not able to make the camera work on the first click, only on the second.
I am getting this error: LogBox.js:154 Possible Unhandled Promise Rejection (id: 0):
ReferenceError: Camera is not defined
My code:
const takePicture = async () => {
let options = {
quality: 1,
base64: true,
exif: false,
};
setTimerOn(true);
setTimeout(async function () {
if (cameraRef.current) {
try {
const data = await cameraRef.current.takePictureAsync(options);
setImage(data);
setTimerOn(false);
if (image) {
try {
MediaLibrary.saveToLibraryAsync(image.uri).then(() => {
setImage(undefined);
});
navigation.navigate("VerifyPictureScreen", {
photo: image,
});
} catch (error) {
console.log(error);
}
}
} catch (error) {
console.log(error);
}
}
}, timer * 1000);
};

Related

How to implement Agora SDK videocall to react-native project

I successfully implemented the agora SDK videocall module with virtual background to my react.js web app, but when I try to implement it to the react-native mobile version I keep getting erros I don't know how to solve. I'm a bit new to react-native so this migth be an easy fix but I can't find it.
Basically, after submitting a form with the uid, channel, role, and token (I have a token service) the videocall component is rendered.
These are my dependencies
"agora-access-token": "^2.0.4",
"agora-react-native-rtm": "^1.5.0",
"agora-extension-virtual-background": "^1.1.1",
"agora-rtc-sdk-ng": "^4.14.0",
"agora-rn-uikit": "^4.0.0",
"axios": "^0.27.2",
"react": "18.0.0",
"react-native": "0.69.4",
"react-native-agora": "^3.7.1",
"react-native-dotenv": "^3.3.1"
This is the main videocall component.
import React,{ useEffect } from "react";
import { useState } from "react";
import axios from "axios";
import { Call } from "./components/Call";
const VideoCallApp = ({ videoCallData }) => {
const [token, setToken] = useState("");
const [virtualBackgroundData, setVirtualBackgroundData] = useState({
type: "img",
// example
// type: 'img',
// value: ''
//
// type: 'blur',
// value: integer // blurring degree, low (1), medium (2), or high (3).
//
// type: 'color',
// value: string // color on hex or string
});
useEffect(() => {
const getToken = async () => {
const url = `${process.env.REACT_APP_AGORA_TOKEN_SERVICE}/rtc/${videoCallData.channel}/${videoCallData.role}/uid/${videoCallData.uid}`;
try {
const response = await axios.get(url);
const token = response.data.rtcToken;
setToken(token);
} catch (err) {
alert(err);
}
};
getToken();
}, []);
return (
token && (
<Call
rtcProps={{
appId: process.env.REACT_APP_AGORA_APP_ID,
channel: videoCallData.channel,
token: token,
uid: videoCallData.uid,
}}
virtualBackground={virtualBackgroundData}
/>
)
);
};
export default VideoCallApp;
Which renders the Call component, it has more functionality for the virtual background.
import { useEffect, useState } from 'react'
import AgoraRTC from 'agora-rtc-sdk-ng';
import VirtualBackgroundExtension from 'agora-extension-virtual-background';
import { LocalVideo } from './LocalVideo';
import { RemoteVideo } from './RemoteVideo';
import { VideoControllers } from './VideoButtons';
import { View } from 'react-native';
const client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
const extension = new VirtualBackgroundExtension();
AgoraRTC.registerExtensions([extension]);
export const Call = ({ rtcProps = {}, virtualBackground = {} }) => {
const [localTracks, setLocalTracks] = useState({
audioTrack: null,
videoTrack: null
});
const [processor, setProcessor] = useState(null);
useEffect(() => {
if (processor) {
try {
const initProcessor = async () => {
// Initialize the extension and pass in the URL of the Wasm file
await processor.init(process.env.PUBLIC_URL + "/assets/wasms");
// Inject the extension into the video processing pipeline in the SDK
localTracks.videoTrack.pipe(processor).pipe(localTracks.videoTrack.processorDestination);
playVirtualBackground();
}
initProcessor()
} catch (e) {
console.log("Fail to load WASM resource!"); return null;
}
}
}, [processor]);
useEffect(() => {
if (localTracks.videoTrack && processor) {
setProcessor(null);
}
}, [localTracks]);
const playVirtualBackground = async () => {
try {
switch (virtualBackground.type) {
case 'color':
processor.setOptions({ type: 'color', color: virtualBackground.value });
break;
case 'blur':
processor.setOptions({ type: 'blur', blurDegree: Number(virtualBackground.value) });
break;
case 'img':
const imgElement = document.createElement('img');
imgElement.onload = async () => {
try {
processor.setOptions({ type: 'img', source: imgElement });
await processor.enable();
} catch (error) {
console.log(error)
}
}
imgElement.src = process.env.PUBLIC_URL + '/assets/backgrounds/background-7.jpg';
imgElement.crossOrigin = "anonymous";
break;
default:
break;
}
await processor.enable();
} catch (error) {
console.log(error)
}
}
const join = async () => {
await client.join(rtcProps.appId, rtcProps.channel, rtcProps.token, Number(rtcProps.uid));
}
const startVideo = () => {
AgoraRTC.createCameraVideoTrack()
.then(videoTrack => {
setLocalTracks(tracks => ({
...tracks,
videoTrack
}));
client.publish(videoTrack);
videoTrack.play('local');
})
}
const startAudio = () => {
AgoraRTC.createMicrophoneAudioTrack()
.then(audioTrack => {
setLocalTracks(tracks => ({
...tracks,
audioTrack
}));
client.publish(audioTrack);
});
}
const stopVideo = () => {
localTracks.videoTrack.close();
localTracks.videoTrack.stop();
client.unpublish(localTracks.videoTrack);
}
const stopAudio = () => {
localTracks.audioTrack.close();
localTracks.audioTrack.stop();
client.unpublish(localTracks.audioTrack);
}
const leaveVideoCall = () => {
stopVideo();
stopAudio();
client.leave();
}
async function startOneToOneVideoCall() {
join()
.then(() => {
startVideo();
startAudio();
client.on('user-published', async (user, mediaType) => {
if (client._users.length > 1) {
client.leave();
alert('Please Wait Room is Full');
return;
}
await client.subscribe(user, mediaType);
if (mediaType === 'video') {
const remoteVideoTrack = user.videoTrack;
remoteVideoTrack.play('remote');
}
if (mediaType === 'audio') {
user.audioTrack.play();
}
});
});
}
// Initialization
function setProcessorInstance() {
if (!processor && localTracks.videoTrack) {
// Create a VirtualBackgroundProcessor instance
setProcessor(extension.createProcessor());
}
}
async function setBackground() {
if (localTracks.videoTrack) {
setProcessorInstance()
}
}
useEffect(() => {
startOneToOneVideoCall();
}, []);
return (
<View >
<View>
<LocalVideo />
<RemoteVideo />
<VideoControllers
actions={{
startAudio,
stopAudio,
startVideo,
stopVideo,
leaveVideoCall,
startOneToOneVideoCall,
setBackground
}}
/>
</View>
</View>
)
}
The local an remote video component are emty Views where the videos are displayed and the VideoControllers are Buttons that manage the videocall.
When I run the app the form works fine but as soon as I subbmit it the app crashes with these errors.
WARN `new NativeEventEmitter()` was called with a non-null argument without the required `addListener` method.
WARN `new NativeEventEmitter()` was called with a non-null argument without the required `removeListeners` method.
LOG Running "videocall" with {"rootTag":1}
ERROR TypeError: window.addEventListener is not a function. (In 'window.addEventListener("online", function () {
_this32.networkState = EB.ONLINE;
})', 'window.addEventListener' is undefined)
VideoCallApp
Form#http://localhost:8081/index.bundle?platform=android&dev=true&minify=false&app=com.videocall&modulesOnly=false&runModule=true:121056:41
RCTView
View
RCTView
View
AppContainer#http://localhost:8081/index.bundle?platform=android&dev=true&minify=false&app=com.videocall&modulesOnly=false&runModule=true:78626:36
videocall(RootComponent)
ERROR TypeError: undefined is not an object (evaluating '_$$_REQUIRE(_dependencyMap[5], "./components/Call").Call')
Something is happening at the Call component and I think it migth be the DOM manipulation for the videos but I can't find an example of a react-native project with agora SDK.
I don't want to use the UIkit because, eventhough it works, I can't use the virtual background which I need for this project. Can anyone help me?

using foreground services pedometer value is blank in react native

I am getting the pedometer value when I first start the app. But, after I close it and restart the app, the value is blank and there is no exception/error. Please review and advise. if you find any other working code please help me with that .I was using the https://github.com/voximplant/react-native-foreground-service for foreground services and expo pedometer for the pedometer. while using the pedometer only it works fine
export default class HomeScreen extends React.Component {
state = {
pastStepCount: 0,
};
componentDidMount() {
this._subscribe();
}
componentWillUnmount(){
this._unsubscribe();
}
_subscribe = () => {
this._subscription = Pedometer.watchStepCount(result => {
this.setState({
currentStepCount: result.steps,
});
});
try {
this.setState();
console.log("steps counting");
} catch (error) {
Alert.alert('error')
}
};
_unsubscribe = () => {
this._subscription && this._subscription.remove();
this._subscription = null;
};
//start
foregroundService = VIForegroundService.getInstance();
state = {
isRunningService: false,
};
componentDidMount(){
this.startService();
}
componentWillUnmount(){
this.stopService();
}
async startService() {
if (this.state.isRunningService) return;
if (Platform.Version >= 26) {
const channelConfig = {
id: 'ForegroundServiceChannel',
name: 'Notification Channel',
description: 'Notification Channel for Foreground Service',
enableVibration: false,
importance: 2
};
await this.foregroundService.createNotificationChannel(channelConfig);
}
const notificationConfig = {
channelId: 'ForegroundServiceChannel',
id: 3456,
title: 'Foreground Service',
text: 'Foreground service is running',
icon: 'ic_notification',
priority: 0,
};
try {
this.subscribeForegroundButtonPressedEvent();
await this.foregroundService.startService(notificationConfig);
this.setState({isRunningService: true});
} catch (error) {
this.foregroundService.off();
}
}
async stopService() {
if (!this.state.isRunningService) return;
this.setState({isRunningService: false});
await this.foregroundService.stopService();
this.foregroundService.off();
}
subscribeForegroundButtonPressedEvent() {
this.foregroundService.on('VIForegroundServiceButtonPressed', async () => {
await this.stopService();
});
}
//ends

React-native: How to change the audio speed in expo-av

I'm having trouble changing the prop: 'rate' to change the speed of the audio being played.
I'm using expo-av (https://docs.expo.dev/versions/latest/sdk/av/).
Here's my code:
import {Text, View, Alert } from 'react-native'
import * as MediaLibrary from 'expo-media-library';
import { DataProvider } from 'recyclerlistview';
import {Audio} from 'expo-av';
import { play, pause, resume, playNext } from "../misc/AudioController";
export const AudioContext = createContext()
export class AudioProvider extends Component {
constructor(props) {
super(props);
this.state = {
audioFiles: [],
permissionError: false,
dataProvider: new DataProvider((r1, r2) => r1 !== r2),
playbackObj: null,
soundObj: null,
currentAudio: {},
isPlaying: false,
currentAudioIndex: null,
playbackPosition: null,
playbackDuration: null,
rate: 2.0,
};
this.totalAudioCount = 0;
}
permissionAlert = () => {
Alert.alert("Permission Required", "This app needs to read audio files", [
{ text: "I am ready", onPress: () => this.getPermission() },
{
text: "cancel",
onPress: () => this.permissionAlert(),
},
]);
};
getAudioFiles = async () => {
const { dataProvider, audioFiles } = this.state;
let media = await MediaLibrary.getAssetsAsync({
mediaType: "audio",
});
media = await MediaLibrary.getAssetsAsync({
mediaType: "audio",
first: media.totalCount,
});
this.totalAudioCount = media.totalCount;
this.setState({
...this.state,
dataProvider: dataProvider.cloneWithRows([
...audioFiles,
...media.assets,
]),
audioFiles: [...audioFiles, ...media.assets],
});
};
loadPreviousAudio = async () => {
let previousAudio = await AsyncStorageLib.getItem("previousAudio");
let currentAudio;
let currentAudioIndex;
if (previousAudio === null) {
currentAudio = this.state.audioFiles[0];
currentAudioIndex = 0;
} else {
previousAudio = JSON.parse(previousAudio);
currentAudio = previousAudio.audio;
currentAudioIndex = previousAudio.index;
}
this.setState({ ...this.state, currentAudio, currentAudio });
};
getPermission = async () => {
// {
// "canAskAgain": true,
// "expires": "never",
// "granted": false,
// "status": "undetermined",
// }
const permission = await MediaLibrary.getPermissionsAsync();
if (permission.granted) {
this.getAudioFiles();
}
if (!permission.canAskAgain && !permission.granted) {
this.setState({ ...this.state, permissionError: true });
}
if (!permission.granted && permission.canAskAgain) {
const { status, canAskAgain } =
await MediaLibrary.requestPermissionsAsync();
if (status === "denied" && canAskAgain) {
this.permissionAlert();
}
if (status === "granted") {
this.getAudioFiles();
}
if (status === "denied" && !canAskAgain) {
this.setState({ ...this.state, permissionError: true });
}
}
};
onPlaybackStatusUpdate = async (playbackStatus) => {
console.log("hier");
if (playbackStatus.isLoaded && playbackStatus.isPlaying) {
this.updateState(this, {
playbackPosition: playbackStatus.positionMillis,
playbackDuration: playbackStatus.durationMillis,
});
}
if (playbackStatus.didJustFinish) {
const nextAudioIndex = this.state.currentAudioIndex + 1;
if (nextAudioIndex >= this.totalAudioCount) {
this.state.playbackObj.unloadAsync();
this.updateState(this, {
soundObj: null,
currentAudio: this.state.audioFiles[0],
isPlaying: false,
currentAudioIndex: 0,
playbackPosition: null,
playbackDuration: null,
});
}
const audio = this.state.audioFiles[nextAudioIndex];
const status = await playNext(this.state.playbackObj, audio.uri);
this.updateState(this, {
soundObj: status,
currentAudio: audio,
isPlaying: true,
currentAudioIndex: nextAudioIndex,
});
}
};
componentDidMount() {
this.getPermission();
if (this.state.playbackObj === null) {
this.setState({ ...this.state, playbackObj: new Audio.Sound(), });
}
}
updateState = (prevState, newState = {}) => {
this.setState({ ...prevState, ...newState });
};
render() {
const {
audioFiles,
dataProvider,
permissionError,
playbackObj,
soundObj,
currentAudio,
isPlaying,
currentAudioIndex,
playbackPosition,
playbackDuration,
rate,
} = this.state;
if (permissionError)
return (
<View
style={{
flex: 1,
justifyContent: "center",
alignItems: "center",
}}
>
<Text>It looks like you haven't accepted the permission</Text>
</View>
);
return (
<AudioContext.Provider
value={{
audioFiles,
dataProvider,
playbackObj,
soundObj,
currentAudio,
isPlaying,
currentAudioIndex,
totalAudioCount: this.totalAudioCount,
playbackPosition,
playbackDuration,
rate,
updateState: this.updateState,
loadPreviousAudio: this.loadPreviousAudio,
onPlaybackStatusUpdate: this.onPlaybackStatusUpdate
}}
>
{this.props.children}
</AudioContext.Provider>
);
}
}
import {Component} from 'react';
import AsyncStorageLib from '#react-native-async-storage/async-storage';
export default AudioProvider;
and here's some more:
// play audio
// Import the react-native-sound module
import { PitchCorrectionQuality,shouldCorrectPitch, rate } from "expo-av/build/AV.types";
export const play = async (playbackObj, uri,) => {
try {
return await playbackObj.loadAsync(
{uri},
{shouldPlay: true},
);
} catch (error) {
console.log('error inside play helper method', error.message)
}
};
//pause
export const pause = async playbackObj => {
try {
// playbackObj.setRateAsync(rate = 2.0, shouldCorrectPitch = true, PitchCorrectionQuality= High);
return await playbackObj.setStatusAsync({
shouldPlay: false},
);
} catch (error) {
console.log('error inside pause helper method', error.message)
}
};
//resume
export const resume = async playbackObj => {
try {
return await playbackObj.playAsync(
);
} catch (error) {
console.log('error inside pause resume method', error.message)
}
};
//select next
export const playNext = async (playbackObj, uri) => {
try {
await playbackObj.stopAsync()
await playbackObj.unloadAsync();
return await play(playbackObj, uri);
} catch (error) {
console.log('error inside playNext helper method')
}
}
I've tried including 'rate: 2.0' inside this.state{audioFiles: [],
permissionError: false, etc.} but it didn't work.
Also I've tried doing: await playbackObj.setRateAsync() in the 2nd code snippet.
Any suggestions?
Nvm, I found the solution. Here's my updated code:
// play audio
// Import the react-native-sound module
import { PitchCorrectionQuality,shouldCorrectPitch, rate } from "expo-av/build/AV.types";
export const play = async (playbackObj, uri,) => {
try {
await playbackObj.loadAsync(
{uri},
{shouldPlay: true},
);
return await playbackObj.setStatusAsync({ rate: 0.9749090909 });
} catch (error) {
console.log('error inside play helper method', error.message)
}
};
//pause
export const pause = async playbackObj => {
try {
return await playbackObj.setStatusAsync({
shouldPlay: false,
rate: 0.9749090909,
});
} catch (error) {
console.log('error inside pause helper method', error.message)
}
};
//resume
export const resume = async playbackObj => {
try {
return await playbackObj.playAsync(
);
} catch (error) {
console.log('error inside pause resume method', error.message)
}
};
//select next
export const playNext = async (playbackObj, uri) => {
try {
await playbackObj.stopAsync()
await playbackObj.unloadAsync();
return await play(playbackObj, uri);
} catch (error) {
console.log('error inside playNext helper method')
}
}

Slider not keep on moving with onProgress method in react native

I am playing some audio files in react native. For progress of the audio file (Duration), I am showing slider for status of the audio file and for forward and reverse the duration.
But, According to audio duration, It is not keep on moving the position (like timer).
https://www.npmjs.com/package/react-native-slider
getInfo = async () => {
try {
const info = await SoundPlayer.getInfo();
// console.log('getInfo', info); // {duration: 12.416, currentTime: 7.691}
const currentTime = get(info, 'currentTime');
this.setState({ currentDuration: currentTime });
} catch (e) {
console.log('There is no song playing', e);
}
}
getProgress = (e) => {
console.log('getProgress calling');
this.getInfo();
this.setState({
currentTime: this.state.currentDuration,
});
}
<Slider
maximumValue={parseFloat(totalLength)}
minimumTrackTintColor="color"
maximumTractTintColor="color"
step={1}
value={currentTime}
onSlidingComplete={value => this.onValueChange(value)}
style={styles.slider}
thumbTintColor="red"
trackLength={parseFloat(totalLength)}
// onSlidingStart={() => this.setState({ paused: true })}
currentPosition={currentTime}
onProgress={e => this.getProgress(e)}
/>
It has to move slider value automatically according to audio duration
Any suggestions?
You'll need a counter to update the progress bar each second
timer = null;
durationCounter = () => {
this.timer = setInterval(async () => {
const info = await SoundPlayer.getInfo();
this.setState({
currentTime: info.currentTime
});
}, 1000);
};
componentDidMount = () => {
SoundPlayer.onFinishedLoading(async success => {
this.durationCounter();
});
}
componentWillMount = () => {
this.timer && clearInterval(this.timer);
};

react native async getting data when running app first time

I have two components, in first components storing data in asyncstorage, in second component display data, when install app and save data does not get data from asyncstorage, when open app second time data are displayed.
storeData = async (item, messave, messrem) => {
const checkarary = this.state.favorite;
if(checkarary.some(e => e.name === item.name)) {
const value = this.state.favorite;
const position = value.filter((lists) => lists.id !== item.id);
this.setState({
favorite: position
}, () => {
try {
AsyncStorage.setItem('favoriti', JSON.stringify(this.state.favorite), () => {
Toast.show({
text: messrem,
buttonText: "Okay",
duration: 3000,
type: "danger"
});
});
} catch (error) {
}
});
} else {
this.setState({
favorite: [...this.state.favorite, item]
}, () => {
try {
AsyncStorage.setItem('favoriti', JSON.stringify(this.state.favorite), () => {
// AsyncStorage.getItem('favoriti', (err, result) => {
// console.log(result);
// });
Toast.show({
text: messave,
buttonText: "Okay",
duration: 3000,
type: "success"
});
});
} catch (error) {
}
});
}
};
Getting data in second component
_retrieveData = async () => {
try {
AsyncStorage.getItem('favoriti').then((value) => {
const parsed = JSON.parse(value);
this.setState({ favorite: parsed })
})
} catch (error) {
}
};
componentDidMount() {
this._retrieveData();
setTimeout(() => {
this.setState({
loading: false,
})
}, 2000)
};
componentDidUpdate() {
this._retrieveData();
};
How fix this issue, is there some solution. Can I set Item and reload app when install app or somthing else.
Use this
componentWillMount() {
this._retrieveData();
setTimeout(() => {
this.setState({
loading: false,
})
}, 2000)
};
instead of
componentDidMount() {
this._retrieveData();
setTimeout(() => {
this.setState({
loading: false,
})
}, 2000)
};
As componentWillMount is called after constructor is called for class and componentDidMount is called after screen is once rendered.