How do I upload an image taken React-Native-camera to Firebase storage? - react-native

I want upload image taken with react-native-camera to https://github.com/invertase/react-native-firebase storage on RN
I can't upload image.
I tried image-picker-library and did work.
import React, { Component } from 'react';
import {Image, View, Text, StyleSheet, Dimensions,TouchableOpacity} from 'react-native'
import { RNCamera } from 'react-native-camera';
import {strings} from '../Lang/Strings';
import { Actions } from 'react-native-router-flux';
const { width, height } = Dimensions.get('window');
export default class ScanPage extends Component {
constructor(props) {
super(props);
this.state = {
takePicture = async () => {
if (this.camera) {
const options = { quality: 0.5, base64: true }
const data = await this.camera.takePictureAsync(options)
Actions.ProfilePage({imagePath:data.uri,
selectedIndex:this.state.selectedIndex,
shapes:this.state.shapes });
this.uploadPhoto(data);
};
};
render() {
const {selectedIndex, images, shapes} = this.state;
return(
<View style={styles.container}>
<RNCamera
ref={ref => {
this.camera = ref;
}}
style={styles.preview}
type={RNCamera.Constants.Type.front}
permissionDialogTitle={'Permission to use camera'}
permissionDialogMessage={'We need your permission to use your camera phone'} />
<View style={{flex:1,justifyContent:'center' ,alignItems:'center'}}>
<View style={{marginTop:120}}>
<Image source={images[selectedIndex]} >
</Image>
</View>
</View>
<View style={styles.buttonSection}>
<TouchableOpacity onPress={this._TogglePrev}>
<View style={styles.buttons}>
<Text style={{textAlign:'center',color: 'white'}}>
{strings.back}
</Text>
</View>
</TouchableOpacity>
<View style={{alignItems:'center', justifyContent:'center',
height:height*0.04}}>
<Text style ={{color:'white',textAlign:'center'}}>{shapes[selectedIndex]} </Text>
</View>
<TouchableOpacity onPress={this._ToggleNext}>
<View style={styles.buttons}>
<Text style={{textAlign:'center', color: 'white'}}>
{strings.next}
</Text>
</View>
</TouchableOpacity>
</View>
<View style={{alignItems:'center', justifyContent:'center',
backgroundColor:'#D9E6FF',height:height*0.001,width:width*1}}>
<Text style ={{color:'white',textAlign:'center'}}> </Text>
</View>
<View style={{ flex: 0, flexDirection: 'row', justifyContent: 'center'}}>
<TouchableOpacity onPress={this.takePicture.bind(this)} style={styles.capture}
>
<View style={{
backgroundColor: 'white',
borderRadius: (height*0.16)/2,
padding: 15,
alignSelf: 'center',
margin: 25,
height:height*0.085,
width:width*0.16,
justifyContent:'center',
alignItems:'center',
borderWidth:0.9,
borderColor:'#D9E6FF',}}></View>
</TouchableOpacity>
</View>
</View>
);
}
takePicture = async function() {
if (this.camera) {
const options = { quality: 0.5, base64: true };
const data = await this.camera.takePictureAsync(options);
Actions.ProfilePage({imagePath:data.uri,
selectedIndex:this.state.selectedIndex,
shapes:this.state.shapes
});
}
};
}
I didn't upload firebase,
versions
react: 16.4.1,
react-native: 0.56.0,
react-native-camera:1.12.0,
react-native-firebase:5.2.3
react-native-router-flux:4.0.1

Can't believe I've figured it out >.<
if you've set up your project correctly to include firebase
takePicture = async() => {
if (this.camera) {
// this code takes the picture
const options = { quality: 0.5, base64: true };
const data = await this.camera.takePictureAsync(options);
// open debug to see the uri of image
console.log(data.uri);
// send your data.uri off to firebase! :D
const processed = await firebase.vision().imageLabelerProcessImage(data.uri, {
confidenceThreshold: 0.8,
});
//Look at your debugger again
console.log('Label: ', processed);
}
};
I hope this helps!

_publish = async () => {
const imageuri= this.state.imagePath;
//this is where + how you want your image to be stored into
const refFile= firebase.storage().ref().child('profile_pic');
refFile.putFile(imageuri)
.catch(error => {
console.log(error);
// Alert.alert('Hey', error);
});
}
hope it helps!

Related

React Native: Camera from "expo-camera" stop running when face is not ever detected

I am newer for using react-native, and wanna try to create a camera with filter. I'm blocked in step to recognize face. Have success to draw rectangle when face detected, but the problem is once it goes out of detection. The camera stop running as it fixes on the last real-time capture
Here is my code:
import { useState, useEffect, useRef } from 'react'
import { Camera } from 'expo-camera'
import * as MediaLibrary from 'expo-media-library'
import { Text, StyleSheet, View, TouchableOpacity } from 'react-native'
import Button from './Button'
import { Ionicons } from '#expo/vector-icons'
import * as FaceDetector from 'expo-face-detector'
export default function PCamera() {
const cameraRef = useRef(undefined)
const [faceDetected, setFaceDetected] = useState([])
const [lastImage, setImage] = useState(undefined)
const [hasUsePermssion, setUsePermission] = useState(false)
const [type, switchToType] = useState(Camera.Constants.Type.front)
const takePicture = async () => {
if (cameraRef) {
try {
const options = {
quality: 1,
base64: true,
exif: false,
}
const data = await cameraRef.current.takePictureAsync(options)
setImage(data.uri)
console.log(data)
} catch (err) {
console.error(err)
}
}
}
const swithMode = () => {
switchToType(
type === Camera.Constants.Type.front
? Camera.Constants.Type.back
: Camera.Constants.Type.front
)
}
const handleFacesDetected = ({ faces }) => {
setFaceDetected(faces)
}
useEffect(() => {
;(async () => {
const { status } = await Camera.requestCameraPermissionsAsync()
if (status === 'granted') {
setUsePermission(true)
}
})()
}, [])
if (hasUsePermssion === null) {
return <View />
}
if (hasUsePermssion === false) {
return <Text>No access to camera</Text>
}
return (
<View style={styles.cameraContainer}>
<View style={styles.overlay}>
<Camera
ref={cameraRef}
style={styles.camera}
type={type}
onFacesDetected={handleFacesDetected}
faceDetectorSettings={{
mode: FaceDetector.FaceDetectorMode.fast,
detectLandmarks: FaceDetector.FaceDetectorLandmarks.all,
runClassifications:
FaceDetector.FaceDetectorClassifications.none,
minDetectionInterval: 100,
tracking: true,
}}
>
{faceDetected.length > 0 &&
faceDetected.map((face) => (
<View
key={face.faceID}
style={{
position: 'absolute',
borderWidth: 2,
borderColor: 'red',
left: face.bounds.origin.x,
top: face.bounds.origin.y,
width: face.bounds.size.width,
height: face.bounds.size.height,
}}
/>
))}
</Camera>
</View>
<View style={styles.optionsContainer}>
<View>
<TouchableOpacity onPress={swithMode}>
<Text>
<Ionicons
name="camera-reverse-outline"
size={24}
color="black"
/>
</Text>
</TouchableOpacity>
</View>
<Button
icon="camera"
title="Take Photo"
onPress={takePicture}
style={styles.button}
/>
<View>
<Text>...</Text>
</View>
</View>
</View>
)}
const styles = StyleSheet.create({
cameraContainer: {flex: 1,
},
overlay: {
flex: 6,
borderBottomStartRadius: 75,
borderBottomEndRadius: 75,
overflow: 'hidden',
},
camera: {
flex: 1,
},
optionsContainer: {
flex: 1,
flexDirection: 'row',
justifyContent: 'space-around',
alignItems: 'center',
},
})
N.B: Don't take care of the Button, it's a custom component and works well

How do you get 1 specific value from a prop in expo?

I've been trying to pass up this prop from CameraButton.js file that gives the UI of an image that was taken but whenever I activate the prop in the AddPost.js, it gives me all the values but when I try to get the singular value of the image like using console.log(props.route.params.image) and gives error undefined is not an object
enter image description here
but it works perfectly when export default function console.log(props.route.params) and shows
enter image description here
AddPost.JS
import { useNavigation } from "#react-navigation/core";
import React from 'react'
import {useState} from "react";
import { View, TextInput, Button } from 'react-native'
export default function AddPost(props) {
console.log(props);
const navigation = useNavigation();
const [caption, setCaption] = useState("")
const uploadImage = async () => {
const response = await fetch(uri)
}
return (
<View style={{flex: 1}}>
<TextInput
placeholder="Whats on your mind Edgers navars"
onChangeText={(caption) => setCaption(caption)}
/>
<Button title = "Take A Photo" onPress={() => navigation.navigate("CameraButton")}
/>
<Button title = "Save" onPress={() => uploadImage()}
/>
</View>
)
}
CameraButton.Js
import { Camera, CameraType } from 'expo-camera';
import { useNavigation } from "#react-navigation/core";
import { useState } from 'react';
import { Button, StyleSheet, Text, TouchableOpacity, View, Image } from 'react-native';
import * as ImagePicker from 'expo-image-picker';
export default function App() {
const navigation = useNavigation();
const [type, setType] = useState(Camera.Constants.Type.back)
const [permission, requestPermission] = Camera.useCameraPermissions();
const [image, setImage] = useState(null);
const [camera, setCamera] = useState(null);
const takePicture = async () => {
if(camera){
const data = await camera.takePictureAsync(null);
setImage(data.uri);
}
}
if (!permission) {
// Camera permissions are still loading
return <View />;
}
if (!permission.granted) {
// Camera permissions are not granted yet
return (
<View style={styles.container}>
<Text style={{ textAlign: 'center' }}>
We need your permission to show the camera
</Text>
<Button onPress={requestPermission} title="grant permission" />
</View>
);
}
function toggleCameraType() {
setType((current) => (
current === Camera.Constants.Type.back ? Camera.Constants.Type.front : Camera.Constants.Type.back
));
}
// No permissions request is necessary for launching the image library
let openImagePickerAsync = async () => {
let permissionResult = await ImagePicker.requestMediaLibraryPermissionsAsync();
if (permissionResult.granted === false) {
alert("Permission to access camera roll is required!");
return;
}
let result = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.All,
allowsEditing: true,
aspect: [4, 3],
quality: 1,
});
if (!result.cancelled) {
setImage(result.uri);
}
}
return (
<View style={styles.container}>
<Camera ref={ref => setCamera(ref)} style={styles.camera} type={type}>
<View style={styles.buttonContainer}>
<TouchableOpacity
style={styles.button}
onPress={toggleCameraType}>
<Text style={styles.text}>Flip Camera</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.button}
onPress={() => takePicture()}>
<Text style={styles.text}>Take Picture</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.button}
onPress={openImagePickerAsync}>
<Text style={styles.text}>Choose Picture</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.button}
onPress={() => navigation.navigate('AddPost', {image})}>
<Text style={styles.text}>Save Picture</Text>
</TouchableOpacity>
</View>
</Camera>
{image &&<Image source={{uri: image}}style={styles.camera}/>}
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
},
camera: {
flex: 1,
},
buttonContainer: {
flex: 1,
flexDirection: 'row',
backgroundColor: 'transparent',
margin: 64,
},
button: {
flex: 1,
alignSelf: 'flex-end',
alignItems: 'center',
},
text: {
fontSize: 24,
fontWeight: 'bold',
color: 'white',
},
});
You have to get the uri from the route object.
const response = await fetch(props.route.params?.image)
In you file CameraButton.js set the navigation for this:
<TouchableOpacity
style={styles.button}
onPress={() => navigation.navigate('AddPost', {
image: image
})}>
<Text style={styles.text}>Save Picture</Text>
</TouchableOpacity>
Be sure that the state image contains only the uri and not and object
Try props[0].route.params.image.

I am creating a React native web view

My file name is talesrunner23 and splash.png file is in ASSETS, but when you run it, it says it can't be found
<Image source={require('/talesrunner23/assets/splash.png/')}They say I'm wrong here
please help me I'm a beginner, so I have to write down all the codes
please please
I don't know at all
*import React, { useRef, useState, useCallback, useEffect } from 'react';
import {
View,
BackHandler,
Platform,
StyleSheet,
ActivityIndicator,
} from 'react-native';
import { WebView } from 'react-native-webview';
const DELAY_BEFORE_WEBVIEW = 10; // <--- seconds before webview load
export default function App() {
// ref
const webView = useRef();
// callbacks
const handleBack = useCallback(() => {
if (canGoBack && webView.current) {
webView.current.goBack();
return true;
}
return false;
`enter code here`;
}, [canGoBack]);
// effects
useEffect(() => {
BackHandler.addEventListener('hardwareBackPress', handleBack);
return () => {
BackHandler.removeEventListener('hardwareBackPress', handleBack);
};
}, [handleBack]);
useEffect(() => {
setTimeout(() => {
setIsLoading(false);
}, 30 * DELAY_BEFORE_WEBVIEW);
}, []);
// states
const [canGoBack, setCanGoBack] = useState(false);
const [isLoading, setIsLoading] = useState(true);
return (
<View style={styles.container}>
<WebView
ref={webView}
source={{ uri: 'https://www.talesrunnerbestguild.co.kr/' }}
style={styles.webView}
onLoadProgress={(event) => setCanGoBack(event.nativeEvent.canGoBack)}
/>
{isLoading && <CenterLoader />}
</View>
);
}
const CenterLoader = () => (
<View style={styles.loaderContainer}>
<Image source={require('/talesrunner23/assets/splash.png/')}
style={{height:100,width:100}}/>
</View>
);
const styles = StyleSheet.create({
container: { flex: 1 },
loaderContainer: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
position: 'absolute',
width: '100%',
height: '100%',
backgroundColor:'white' // <-- comment this to show webview while loading
},
webView:
Platform.OS === 'ios'
? { marginTop: 30, marginBottom: 40 }
: { marginTop: 30 },
});
You will not want to require but import the Image source at the top. Here is a quick example I made in a sandbox: https://codesandbox.io/s/image-example-c4irqo?file=/src/App.js
import Cat from "./cat.jpeg";
function App() {
return (
<View style={styles.app}>
<View style={styles.header}>
<Image
accessibilityLabel="Cat"
source={Cat}
resizeMode="contain"
style={styles.logo}
/>
<Text style={styles.title}>Image Example</Text>
</View>
</View>
);
}

How to play recording through the speaker in react native expo av

I'm trying to make a voice memo component for my app using expo-av. I've figured out how to create the recording but when playing it, it only plays through the earpiece speaker. Is there a way I can playback the recording through the main phone speakers? I haven't tried testing this on android but for iPhone the audio only plays back through the earphone speaker. Thank you.
import React from 'react';
import { Button, StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Audio } from 'expo-av';
import * as Sharing from 'expo-sharing';
import { MaterialCommunityIcons } from '#expo/vector-icons';
import AppText from './AppText';
import Screen from './Screen';
export default function AppVoice() {
const [recording, setRecording] = React.useState();
const [recordings, setRecordings] = React.useState([]);
const [message, setMessage] = React.useState("");
async function startRecording() {
try {
const permission = await Audio.requestPermissionsAsync();
if (permission.status === "granted") {
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
const { recording } = await Audio.Recording.createAsync(
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
);
setRecording(recording);
} else {
setMessage("Please grant permission to app to access microphone");
}
} catch (err) {
console.error('Failed to start recording', err);
}
}
async function stopRecording() {
setRecording(undefined);
await recording.stopAndUnloadAsync();
let updatedRecordings = [...recordings];
const { sound, status } = await recording.createNewLoadedSoundAsync();
updatedRecordings.push({
sound: sound,
duration: getDurationFormatted(status.durationMillis),
file: recording.getURI()
});
setRecordings(updatedRecordings);
}
function getDurationFormatted(millis) {
const minutes = millis / 1000 / 60;
const minutesDisplay = Math.floor(minutes);
const seconds = Math.round((minutes - minutesDisplay) * 60);
const secondsDisplay = seconds < 10 ? `0${seconds}` : seconds;
return `${minutesDisplay}:${secondsDisplay}`;
}
function getRecordingLines() {
return recordings.map((recordingLine, index) => {
return (
<View key={index} style={styles.row}>
<Text style={styles.fill}>Recording {index + 1} - {recordingLine.duration}</Text>
<Button style={styles.button} onPress={() => recordingLine.sound.replayAsync()} title="Play"></Button>
<Button style={styles.button} onPress={() => Sharing.shareAsync(recordingLine.file)} title="Share"></Button>
</View>
);
});
}
return (
<Screen style={{flex:1, backgroundColor:'black'}}>
<View style={styles.container}>
<View style={styles.recorder}>
<TouchableOpacity style={{position:'absolute', left:10}}>
<MaterialCommunityIcons name="microphone" size={24} color="black" />
</TouchableOpacity>
<AppText style={{color:'black', textAlign:'center'}}>Voice Memo</AppText>
<TouchableOpacity onPress={recording ? stopRecording : startRecording} style={{position:'absolute', right:10}}>
{recording ? <MaterialCommunityIcons name="pause" size={28} color="black" /> : <MaterialCommunityIcons name="record-circle-outline" size={28} color="red" />}
</TouchableOpacity>
</View>
<View style={{flex:1}}>
{getRecordingLines()}
</View>
</View>
</Screen>
);
}
const styles = StyleSheet.create({
recorder: {
width:300,
backgroundColor:'white',
height:50,
borderRadius: 100,
justifyContent:'center'
},
container:{
flex:1,
},
row: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
},
fill: {
flex: 1,
margin: 16,
color:'white'
},
button: {
margin: 16
}
});
I don't know how I figured it out but what you need to do is when recording you need to have the "allowRecordingIOS" to true and when stopping the recording you have to set it to false. Here's the updated code:
import React from 'react';
import { Button, StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Audio } from 'expo-av';
import * as Sharing from 'expo-sharing';
import { MaterialCommunityIcons } from '#expo/vector-icons';
import AppText from './AppText';
import Screen from './Screen';
export default function AppVoice() {
const [recording, setRecording] = React.useState();
const [recordings, setRecordings] = React.useState([]);
const [message, setMessage] = React.useState("");
async function startRecording() {
try {
const permission = await Audio.requestPermissionsAsync();
if (permission.status === "granted") {
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
const { recording } = await Audio.Recording.createAsync(
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
);
setRecording(recording);
} else {
setMessage("Please grant permission to app to access microphone");
}
} catch (err) {
console.error('Failed to start recording', err);
}
}
async function stopRecording() {
setRecording(undefined);
await recording.stopAndUnloadAsync();
await Audio.setAudioModeAsync({
allowsRecordingIOS: false,
playsInSilentModeIOS: true,
});
let updatedRecordings = [...recordings];
const { sound, status } = await recording.createNewLoadedSoundAsync();
updatedRecordings.push({
sound: sound,
duration: getDurationFormatted(status.durationMillis),
file: recording.getURI()
});
setRecordings(updatedRecordings);
}
function getDurationFormatted(millis) {
const minutes = millis / 1000 / 60;
const minutesDisplay = Math.floor(minutes);
const seconds = Math.round((minutes - minutesDisplay) * 60);
const secondsDisplay = seconds < 10 ? `0${seconds}` : seconds;
return `${minutesDisplay}:${secondsDisplay}`;
}
function getRecordingLines() {
return recordings.map((recordingLine, index) => {
return (
<View key={index} style={styles.row}>
<Text style={styles.fill}>Recording {index + 1} - {recordingLine.duration}</Text>
<Button style={styles.button} onPress={() => recordingLine.sound.replayAsync()} title="Play"></Button>
<Button style={styles.button} onPress={() => Sharing.shareAsync(recordingLine.file)} title="Share"></Button>
</View>
);
});
}
return (
<Screen style={{flex:1, backgroundColor:'black'}}>
<View style={styles.container}>
<View style={styles.recorder}>
<TouchableOpacity style={{position:'absolute', left:10}}>
<MaterialCommunityIcons name="microphone" size={24} color="black" />
</TouchableOpacity>
<AppText style={{color:'black', textAlign:'center'}}>Voice Memo</AppText>
<TouchableOpacity onPress={recording ? stopRecording : startRecording} style={{position:'absolute', right:10}}>
{recording ? <MaterialCommunityIcons name="pause" size={28} color="black" /> : <MaterialCommunityIcons name="record-circle-outline" size={28} color="red" />}
</TouchableOpacity>
</View>
<View style={{flex:1}}>
{getRecordingLines()}
</View>
</View>
</Screen>
);
}
const styles = StyleSheet.create({
recorder: {
width:300,
backgroundColor:'white',
height:50,
borderRadius: 100,
justifyContent:'center'
},
container:{
flex:1,
},
row: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
},
fill: {
flex: 1,
margin: 16,
color:'white'
},
button: {
margin: 16
}
});

Possible Unhandled Promise Rejection (id: 0): TypeError: undefined is not an object (evaluating '_this.props.navigation.navigate')

I'm trying to display an image I have captured using expo-camera, from the camera component I'm trying to navigate to a new file which will display the image but after I took the image it won't navigate to the new page.
I tried importing the file and then navigate it but it still won't work and give me the warning instead.
This is the code where I tried to navigate to the new file.
export default class CameraExample extends React.Component {
state = {
hasCameraPermission: null,
type: Camera.Constants.Type.back,
};
async componentDidMount() {
const { status } = await Permissions.askAsync(Permissions.CAMERA);
this.setState({ hasCameraPermission: status === 'granted' });
}
snap = async() => {
if(this.camera) {
console.log('Taking photo');
const options = {quality: 1, base64: true, fixOrientation: true, exif: true};
const photo = await this.camera.takePictureAsync(options);
this.props.navigation.navigate("Show", {photouri: photo.uri})
}
}
render() {
const { hasCameraPermission } = this.state;
if (hasCameraPermission === null) {
return <View />;
} else if (hasCameraPermission === false) {
return <Text>No access to camera</Text>;
} else {
return (
<View style={{ flex: 1 }}>
<Camera style={{ flex: 1 }} type={this.state.type}
ref = {ref => {
this.camera = ref;
}}
>
<View
style={{
flex: 1,
backgroundColor: 'transparent',
flexDirection: 'row',
}}>
<TouchableOpacity onPress={this.snap.bind(this)}>
<Ionicons
name = "md-camera"
color = "white"
size = {30}
/>
</TouchableOpacity>
<TouchableOpacity
style={{
flex: 0.1,
alignSelf: 'flex-end',
alignItems: 'center',
}}
onPress={() => {
this.setState({
type:
this.state.type === Camera.Constants.Type.back
? Camera.Constants.Type.front
: Camera.Constants.Type.back,
});
}}>
<Ionicons
name = "md-reverse-camera"
color = "white"
size = {30}
/>
</TouchableOpacity>
</View>
</Camera>
</View>
);
}
}
}
And this is the code where I try to display the image.
export default class ShowImages extends React.Component{
render(){
console.log('OK')
const { navigation } = this.props;
const paramm = navigation.getParam('photouri');
return(
<Content>
<View>
<Text>
paramm: {JSON.stringify(paramm)}
</Text>
<Image style={{height: 700, width: 850, alignSelf: "center"}}
source={{uri: this.props.navigation.state.paramm.photouri}}
resizeMode="contain"/>
</View>
</Content>
)
}
}
I expect it to navigate to the new page and display the captured
image but it gave me the warning. I can't seem to find what is wrong with my code. Can anyone suggest what I should do? Thank you.
change this
<TouchableOpacity onPress={this.snap.bind(this)}> => <TouchableOpacity onPress={this.snap}>
Put it in the status value and pass it on.
export default class ShowImages extends React.Component{
constructor(props) {
super(props);
this.state = {
paramm: this.props.navigation.state.params.photouri
};
}
...
<Image style={{height: 700, width: 850, alignSelf: "center"}}
source={{uri: this.state.paramm }}
resizeMode="contain"/>