expo camera eas build crashes when started video recording - react-native

I have been working on a react-native camera app using expo camera.
According to me, on launching the camera app in expo go, video recording automatically start using onCameraReady method in expo camera but when building eas builds the app crashes.
I have used onCameraReady method to call the function record which will call recordAsync function to record the video.
<Camera
ref={(ref) => {
setcamera(ref)
}}
style={{flex: 1, aspectRatio: 9/16}}
ratio={'16:9'}
type={type}
onCameraReady={record}
>
On initial launch the video is not recorded i have to manually click record button to record the video.
const record = async () => {
setstartRecording(true)
await camera.recordAsync()
.then(video => {
MediaLibrary.saveToLibraryAsync(video.uri)
})
}
Kindly help me, i am not been able to solve this.
import { Camera, CameraType } from 'expo-camera';
import { setStatusBarTranslucent } from 'expo-status-bar';
import { useEffect, useState } from 'react';
import { Alert, Dimensions, StyleSheet, Text, TouchableOpacity, View } from 'react-native';
import { MaterialIcons } from '#expo/vector-icons';
import { Fontisto } from '#expo/vector-icons';
import * as MediaLibrary from 'expo-media-library';
export default function App() {
const [type, settype] = useState(CameraType.back)
const [camera, setcamera] = useState()
const [cameraStatus,setcameraStatus] = useState({})
const [micStatus,setmicStatus] = useState({})
const [mediaStatus,setmediaStatus] = useState({})
const [startRecording, setstartRecording] = useState(false)
const [timer, settimer] = useState(0)
async function permission(){
await Camera.requestCameraPermissionsAsync().then(setcameraStatus)
await Camera.requestMicrophonePermissionsAsync().then(setmicStatus)
await MediaLibrary.requestPermissionsAsync().then(setmediaStatus)
}
useEffect(() => {
permission()
setStatusBarTranslucent(true)
},[])
useEffect(() => {
let interval = null
if(startRecording){
interval = setInterval(() => {
settimer( prev => prev + 1)
}, 1000);
}else{
clearInterval(interval)
settimer(0)
}
return () => {
clearInterval(interval)
}
},[startRecording])
const record = async () => {
setstartRecording(true)
await camera.recordAsync()
.then(video => {
MediaLibrary.saveToLibraryAsync(video.uri)
})
}
const toggleCameraType = () => {
setstartRecording(false)
settype( current => (current === CameraType.back ? CameraType.front : CameraType.back))
}
const stopRecording = () => {
camera.stopRecording()
setstartRecording(false)
}
return (
<Camera
ref={(ref) => {
setcamera(ref)
}}
style={{flex: 1, aspectRatio: 9/16}}
ratio={'16:9'}
type={type}
onCameraReady={record}
>
<View style={{position: 'absolute', flexDirection: 'row', bottom: 0, padding: 48, justifyContent: 'space-between', width: Dimensions.get('window').width}}>
<View
style={{
width: 61,
height: 61,
borderWidth: 3,
borderColor: 'white',
borderRadius: 100,
backgroundColor: 'black',
alignItems: 'center',
justifyContent: 'center'
}}>
<Text
style={{
color: 'white',
fontSize: 34,
fontWeight: 'bold'
}}>{timer}</Text>
</View>
<TouchableOpacity onPress={startRecording ? () => stopRecording() : () => setstartRecording(true)}>
<Fontisto name="record" size={64} color={ startRecording ? "red" : "white"} />
</TouchableOpacity>
<TouchableOpacity onPress={toggleCameraType}>
<MaterialIcons name="flip-camera-android" size={64} color="white" />
</TouchableOpacity>
</View>
</Camera>
);
}
const styles = StyleSheet.create({
camera: {
flex: 1,
}
});
I am making a camera app which automatically starts recording video using the expo-camera library.
On launching the application the video recording must start immediately after mounting the camera component.
It works well on the expo go app but when building eas build and installing the apk on a physical device, the app crashes or the app does not record video at all.

Related

How to get all the pdf files available in internal storage of android in react native

I am using react-native-fs for reading files in device's external storage. I want to get all pdf books stored in an android device and list them in the screen. Searched in google, read docs in react-native-fs but not succeed getting all pdf books. please help if something wrong with my code.
What I'm doing wrong?
Here is my code.
import React, { useState, useEffect } from 'react';
import {
Alert,
StyleSheet,
Text,
View,
Dimensions,
ImageBackground,
ScrollView,
PermissionsAndroid,
ActivityIndicator,
} from 'react-native';
import RNFS from 'react-native-fs';
import BookOffline from '../components/BookOffline';
const MyBooks = ({ navigation }) => {
// collecting data from device
const [books, setBooks] = useState([])
const [bookList, setBookList] = useState([]);
useEffect(() => {
getPermission();
}, []);
const getPermission = async () => {
try {
PermissionsAndroid.request(
PermissionsAndroid.PERMISSIONS.READ_EXTERNAL_STORAGE
).then(granted => {
if (granted === PermissionsAndroid.RESULTS.GRANTED) {
readStorage();
} else {
//If permission denied then show alert
Alert.alert('Not Granted storage_permission');
navigation.goBack();
}
});
} catch (err) {
//To handle permission related issue
console.log('error', err);
}
};
const readStorage = async () => {
let list2 = [];
await RNFS.readDir(RNFS.ExternalStorageDirectoryPath) // On Android, use "RNFS.DocumentDirectoryPath" (MainBundlePath is not defined)
.then(result => {
result.forEach((item, index) => {
// console.log(index, item)
if (item.name.endsWith('.pdf')) {
setBooks([...books, item])
}
else if (item.isDirectory()) {
RNFS.readDir(item.path)
.then(result => {
list2 = result.filter((item) => item.name.endsWith('.pdf'))
setBooks([...books, ...list2])
}).catch((error) => {
console.log(error)
})
}
});
setBookList(books)
console.log("bookList", bookList)
})
.catch(error => {
console.log(error);
});
};
return bookList.length == 0 ? (
<View style={{ flex: 1, justifyContent: 'center', alignItems: 'center' }}>
<Text>Loading...</Text>
<ActivityIndicator size="large" />
</View>
) : (
<View style={{ justifyContent: 'center', alignItems: 'center' }}>
<ImageBackground
source={require('../assets/images/tech-dark-design.jpg')}
resizeMode="cover"
style={{ width: '100%' }}>
<ScrollView style={styles.images}>
{bookList.map((item, index) => {
return <BookOffline data={item} key={index} />;
})}
</ScrollView>
</ImageBackground>
</View>
);
};
export default MyBooks;
const styles = StyleSheet.create({
container: {
height: Dimensions.get('window').height - 110,
// padding: 5,
},
list: {
width: '100%',
shadowColor: '#000',
shadowOffset: {
width: 0,
height: 1,
},
shadowOpacity: 0.2,
shadowRadius: 1.41,
elevation: 2,
marginBottom: 1,
},
});

React Native: navigation

I'm new to react native, this is an application that scans Qr codes I want to have two buttons one that scans again and one that redirects to a new screen called 'List' with the QR code data. However after scanning the QR code and every time i press the scan again or result button the expo application crashes this is my code:
import React, { useState, useEffect } from 'react';
import { Text, View, StyleSheet, Button } from 'react-native';
import { BarCodeScanner } from 'expo-barcode-scanner';
import { Link } from "react-router-dom"
import CryptoJS from 'crypto-js';
import { decode } from 'base-64';
import { useNavigation } from '#react-navigation/native';
export default function Scan({ navigation }) {
const [hasPermission, setHasPermission] = useState(null);
const [scanned, setScanned] = useState(false);
const [text, setText] = useState('Not yet scanned')
const askForCameraPermission = () => {
(async () => {
const { status } = await BarCodeScanner.requestPermissionsAsync();
setHasPermission(status === 'granted');
})()
}
// Request Camera Permission
useEffect(() => {
askForCameraPermission();
}, []);
// What happens when we scan the bar code
const handleBarCodeScanned = ({ type, data }) => {
setScanned(true);
var master_key = '5486231058798416'
// Decode the base64 data so we can separate iv and crypt text.
var rawData = decode(data);
// Split by 16 because my IV size
var iv = rawData.substring(0, 16);
var crypttext = rawData.substring(16);
//Parsers"
crypttext = CryptoJS.enc.Latin1.parse(crypttext);
iv = CryptoJS.enc.Latin1.parse(iv);
var key = CryptoJS.enc.Utf8.parse(master_key);
// Decrypt
var plaintextArray = CryptoJS.AES.decrypt(
{ ciphertext: crypttext},
key,
{iv: iv, mode: CryptoJS.mode.CBC, padding: CryptoJS.pad.Pkcs7}
);
// Can be Utf8 too
var output_plaintext = CryptoJS.enc.Latin1.stringify(plaintextArray).json();
console.log("plain text : " + output_plaintext);
setText(output_plaintext.json())
console.log('Type: ' + type + '\nData: ' + data)
};
// Check permissions and return the screens
if (hasPermission === null) {
return (
<View style={styles.container}>
<Text>Requesting for camera permission</Text>
</View>)
}
if (hasPermission === false) {
return (
<View style={styles.container}>
<Text style={{ margin: 10 }}>No access to camera</Text>
<Button title={'Allow Camera'} onPress={() => askForCameraPermission()} />
</View>)
}
// Navigation
function MyBackButton() {
const navigation = useNavigation();
return (
<Button
title="Result"
onPress={() => {
navigation.navigate('List');
}}
/>
);
}
// Return the View
return (
<View style={styles.container}>
<View style={styles.barcodebox}>
<BarCodeScanner
onBarCodeScanned={scanned ? undefined : handleBarCodeScanned}
style={{ height: 400, width: 400 }} />
</View>
{scanned && <Button title={'Result'} onPress={() => MyBackButton()} color='black' />}
{scanned && <Button title={'Scan again?'} onPress={() => setScanned(false)} color='black' />}
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff',
alignItems: 'center',
justifyContent: 'center',
},
maintext: {
fontSize: 16,
margin: 20,
},
barcodebox: {
alignItems: 'center',
justifyContent: 'center',
height: 300,
width: 300,
overflow: 'hidden',
borderRadius: 30,
backgroundColor: 'black'
}
});
in your code MyBackButton not just a function, it return a view -> crash.
Fix your func MyBackButton =>
// Navigation
function MyBackButton() {
const navigation = useNavigation();
navigation.navigate('List');
}
Rename your MyBackButton -> myBackAction

Unhandled promise rejection: TypeError: undefined is not an object (evaluating 'camera.takePictureAsync') React-Native expo-camera

I am trying to create a function that will access my device's camera, and will allow me to take a picture, but I get the above error. I modeled this similar to requesting access to the camera roll and it works fine, but I cannot get it to work for the camera.
What may be causing this? Below is some of my code:
import * as ImagePicker from 'expo-image-picker' //I am using expo
import {Camera} from 'expo-camera'
export default function Photo(){
// Image Picker function start
useEffect(() => {
(async ()=> {
if (Platform.OS != 'web'){
const ( status !== 'granted') {
if(status !== 'granted) {
alert('Camera roll required to upload photo from your library');
}
}
})();
},[]);
//Image Picker function end
const camera = useRef(null) //added this
const takePicture = async () => { // added this
useEffect(() => {
(async () => {
if (Platform.OS !== 'web'){
const { status1 } = await Camera.requestPermissionsAsync();
if (status1 !== 'granted'){
alert('Camera required to take a photo');
}
} //added this
},
})();
}, [])
}
<Camera //added this
ref = { camera }
onGoogleVisionBarcodesDetected = {({barcodes}) => {
console.log(barcodes)
}}
/> //added this
<View style = {[ styles.button, {justifyContent: 'center', borderRadius: 20, backgroundColor: '#fff', paddingTop: 10, width: width*0.5, alignItems: 'center' } ]}>
<TouchableOpacity
color='#fff'
onPress = { ()=> takePicture () }
>
<Text style = {[ styles.button, {}]}>Take Photo </Text>
</TouchableOpacity>
</View>
This might help
import React, { useRef } from 'react'
import { View, Text, StyleSheet, TouchableOpacity } from 'react-native'
import { RNCamera } from 'react-native-camera'
function PlayWithCamera() {
const camera = useRef(null);
const takePicture = async () => {
const result1 = await camera.takePictureAsync();
....
};
return (
<View style={styles.container}>
<RNCamera
ref={camera}
.....
onGoogleVisionBarcodesDetected={({ barcodes }) => {
console.log(barcodes)
}}
/>
<View ... >
<TouchableOpacity
onPress={() => takePicture() } // change here
>
......
</TouchableOpacity>
</View>
</View>
)
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'column',
backgroundColor: 'black',
},
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center',
},
capture: {
flex: 0,
backgroundColor: '#fff',
borderRadius: 5,
padding: 15,
paddingHorizontal: 20,
alignSelf: 'center',
margin: 20,
},
})
export default PlayWithCamera

How to play list of videos sequentially by default without clicking next button accessing from local files of the device using react native cli

const filePath = `${dirs.DownloadDir}/samplevideos/1.mp4`;
const filePath1 = `${dirs.DownloadDir}/samplevideos/2.mp4`;
const paths = [{path:filePath},{path:filePath1}]
{paths&&paths.map((data,inx)=>{
return <Video key={inx} source={{uri:data.path }}
volume={50}
resizeMode="cover"
style={styles.videoStyle}
/>
})}
I have tried this but playing only last video. Any help will be appreciated.
I found solution for this question, So posting here it may help someone in the future.
there is a onEnd callback available in react-native-video based on that increasing index of next video
import React,{useEffect,useState} from 'react';
import {
StyleSheet,
View,
Image,
Text,
Dimensions,
PermissionsAndroid
} from 'react-native';
import Video from 'react-native-video';
import RNFetchBlob from 'rn-fetch-blob';
const FILE_PATHS = `${RNFetchBlob.fs.dirs.DownloadDir}/samplevideos/`;
const App = () => {
const [videoPaths,setVideosPath] = useState([]);
const [inxofCurrentVideo,setVideoIndex] = useState(0);
useEffect(() => {
PermissionsAndroid.RESULTS.GRANTED;
getVideoPaths();
},[])
const getVideoPaths = ()=>{
RNFetchBlob.fs.ls(FILE_PATHS).then(files => {
setVideosPath(files);
}).catch(error => console.log(error))
};
const onEnd =()=> {
if((inxofCurrentVideo < videoPaths.length) || (videoPaths.length === inxofCurrentVideo)){
setVideoIndex(inxofCurrentVideo+1);
}
}
return (
<View style={styles.videoContainer}>
<View style={styles.row}>
{videoPaths.length>0&&<Video onEnd = {onEnd}
source={{uri:(FILE_PATHS + videoPaths[inxofCurrentVideo])}}
volume={50}
resizeMode="cover"
style={styles.videoStyle}
/> }
</View>
</View>
);
};
const styles = StyleSheet.create({
videoContainer: {
flex: 1,
// backgroundColor: 'black',
},
row: {
flex: 1,
flexDirection: "row",
width: '100%'
},
col6: {
width: "50%"
},
videoStyle: {
position: 'absolute',
top: 0,
bottom: 5,
left: 0,
right: 0,
},
});
export default App;

How to open the camera and taking the picture in react native?

I want to open the device camera from my app when user click on the button and when user click on back button it should react to my application from device camera. I am able to open camera and take photo by running react native project. But I want to do it how camera works in what's app. That is clicking on button -> opening camera -> send button .
I am an beginner in react native .I tried many ways but I am not getting how it can be done.
Can anybody assist me to do this.
My App.js code is,
'use strict';
import React, { Component } from 'react';
import {
AppRegistry,
Dimensions,
StyleSheet,
Text,
TouchableHighlight,
View
} from 'react-native';
import Camera from 'react-native-camera';
class BadInstagramCloneApp extends Component {
render() {
return (
<View style={styles.container}>
<Camera
ref={(cam) => {
this.camera = cam;
}}
style={styles.preview}
aspect={Camera.constants.Aspect.fill}>
<Text style={styles.capture} onPress={this.takePicture.bind(this)}>[CAPTURE]</Text>
</Camera>
</View>
);
}
takePicture() {
const options = {};
//options.location = ...
this.camera.capture({metadata: options})
.then((data) => console.log(data))
.catch(err => console.error(err));
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'row',
},
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center'
},
capture: {
flex: 0,
backgroundColor: '#fff',
borderRadius: 5,
color: '#000',
padding: 10,
margin: 40
}
});
AppRegistry.registerComponent('BadInstagramCloneApp', () => BadInstagramCloneApp);
You can use the state to show/hide the camera view/component.
Please check the following code:
...
class BadInstagramCloneApp extends Component {
constructor(props) {
super(props);
this.state = {
isCameraVisiable: false
}
}
showCameraView = () => {
this.setState({ isCameraVisible: true });
}
render() {
const { isCameraVisible } = this.state;
return (
<View style={styles.container}>
{!isCameraVisible &&<Button title="Show me Camera" onPress={this.showCameraView} />}
{isCameraVisible &&
<Camera
ref={(cam) => {
this.camera = cam;
}}
style={styles.preview}
aspect={Camera.constants.Aspect.fill}>
<Text style={styles.capture} onPress={this.takePicture.bind(this)}>[CAPTURE]</Text>
</Camera>}
</View>
);
}
takePicture() {
const options = {};
//options.location = ...
this.camera.capture({metadata: options})
.then((data) => {
console.log(data);
this.setState({ isCameraVisible: false });
}
.catch(err => console.error(err));
}
}
...
You can use https://github.com/ivpusic/react-native-image-crop-picker for this. This component helps you to take photo and also the photo if required. Follow the documentation correctly and here is the code for camera selection option
ImagePicker.openCamera({
cropping: true,
width: 500,
height: 500,
cropperCircleOverlay: true,
compressImageMaxWidth: 640,
compressImageMaxHeight: 480,
freeStyleCropEnabled: true,
}).then(image => {
this.setState({imageModalVisible: false})
})
.catch(e => {
console.log(e), this.setState({imageModalVisible: false})
});
Correction of best answer because of deprecation of Camera to RNCamera plus missing closing bracket ")" right before the .catch and like a spelling mistake with the declaration of state:
But basically there's 2 routes, whether you're using expo or react native. You gotta have Pods/Ruby/Cocoapods or manually link and all that if you're using traditional React Native, but just go with expo-camera if you got an expo set up and don't listen to this.
This is a React-Native with Pods/Ruby/CocoaPods solution, whereas going with expo-camera might be much faster and better if you're not set up like this.
import React, { Component } from 'react';
import {
Text,
View,
StyleSheet,
Button,
TouchableOpacity
} from 'react-native';
import { RNCamera } from 'react-native-camera';
export default class Camera2 extends Component {
constructor(props) {
super(props);
this.state = {
isCameraVisible: false
}
}
showCameraView = () => {
this.setState({ isCameraVisible: true });
}
takePicture = async () => {
try {
const data = await this.camera.takePictureAsync();
console.log('Path to image: ' + data.uri);
} catch (err) {
// console.log('err: ', err);
}
};
render() {
const { isCameraVisible } = this.state;
return (
<View style={styles.container}>
{!isCameraVisible &&<Button title="Show me Camera" onPress={this.showCameraView} />}
{isCameraVisible &&
<RNCamera
ref={cam => {
this.camera = cam;
}}
style={styles.preview}
>
<View style={styles.captureContainer}>
<TouchableOpacity style={styles.capture} onPress={this.takePicture}>
<Text style={styles.capture} onPress={this.takePicture.bind(this)}>[CAPTURE]</Text>
<Text>Take Photo</Text>
</TouchableOpacity>
</View>
</RNCamera>}
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
flexDirection: 'row',
},
preview: {
flex: 1,
justifyContent: 'flex-end',
alignItems: 'center'
},
capture: {
flex: 0,
backgroundColor: '#fff',
borderRadius: 5,
color: '#000',
padding: 10,
margin: 40
}
});