How do you get 1 specific value from a prop in expo? - react-native

I've been trying to pass up this prop from CameraButton.js file that gives the UI of an image that was taken but whenever I activate the prop in the AddPost.js, it gives me all the values but when I try to get the singular value of the image like using console.log(props.route.params.image) and gives error undefined is not an object
enter image description here
but it works perfectly when export default function console.log(props.route.params) and shows
enter image description here
AddPost.JS
import { useNavigation } from "#react-navigation/core";
import React from 'react'
import {useState} from "react";
import { View, TextInput, Button } from 'react-native'
export default function AddPost(props) {
console.log(props);
const navigation = useNavigation();
const [caption, setCaption] = useState("")
const uploadImage = async () => {
const response = await fetch(uri)
}
return (
<View style={{flex: 1}}>
<TextInput
placeholder="Whats on your mind Edgers navars"
onChangeText={(caption) => setCaption(caption)}
/>
<Button title = "Take A Photo" onPress={() => navigation.navigate("CameraButton")}
/>
<Button title = "Save" onPress={() => uploadImage()}
/>
</View>
)
}
CameraButton.Js
import { Camera, CameraType } from 'expo-camera';
import { useNavigation } from "#react-navigation/core";
import { useState } from 'react';
import { Button, StyleSheet, Text, TouchableOpacity, View, Image } from 'react-native';
import * as ImagePicker from 'expo-image-picker';
export default function App() {
const navigation = useNavigation();
const [type, setType] = useState(Camera.Constants.Type.back)
const [permission, requestPermission] = Camera.useCameraPermissions();
const [image, setImage] = useState(null);
const [camera, setCamera] = useState(null);
const takePicture = async () => {
if(camera){
const data = await camera.takePictureAsync(null);
setImage(data.uri);
}
}
if (!permission) {
// Camera permissions are still loading
return <View />;
}
if (!permission.granted) {
// Camera permissions are not granted yet
return (
<View style={styles.container}>
<Text style={{ textAlign: 'center' }}>
We need your permission to show the camera
</Text>
<Button onPress={requestPermission} title="grant permission" />
</View>
);
}
function toggleCameraType() {
setType((current) => (
current === Camera.Constants.Type.back ? Camera.Constants.Type.front : Camera.Constants.Type.back
));
}
// No permissions request is necessary for launching the image library
let openImagePickerAsync = async () => {
let permissionResult = await ImagePicker.requestMediaLibraryPermissionsAsync();
if (permissionResult.granted === false) {
alert("Permission to access camera roll is required!");
return;
}
let result = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.All,
allowsEditing: true,
aspect: [4, 3],
quality: 1,
});
if (!result.cancelled) {
setImage(result.uri);
}
}
return (
<View style={styles.container}>
<Camera ref={ref => setCamera(ref)} style={styles.camera} type={type}>
<View style={styles.buttonContainer}>
<TouchableOpacity
style={styles.button}
onPress={toggleCameraType}>
<Text style={styles.text}>Flip Camera</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.button}
onPress={() => takePicture()}>
<Text style={styles.text}>Take Picture</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.button}
onPress={openImagePickerAsync}>
<Text style={styles.text}>Choose Picture</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.button}
onPress={() => navigation.navigate('AddPost', {image})}>
<Text style={styles.text}>Save Picture</Text>
</TouchableOpacity>
</View>
</Camera>
{image &&<Image source={{uri: image}}style={styles.camera}/>}
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
},
camera: {
flex: 1,
},
buttonContainer: {
flex: 1,
flexDirection: 'row',
backgroundColor: 'transparent',
margin: 64,
},
button: {
flex: 1,
alignSelf: 'flex-end',
alignItems: 'center',
},
text: {
fontSize: 24,
fontWeight: 'bold',
color: 'white',
},
});

You have to get the uri from the route object.
const response = await fetch(props.route.params?.image)

In you file CameraButton.js set the navigation for this:
<TouchableOpacity
style={styles.button}
onPress={() => navigation.navigate('AddPost', {
image: image
})}>
<Text style={styles.text}>Save Picture</Text>
</TouchableOpacity>
Be sure that the state image contains only the uri and not and object

Try props[0].route.params.image.

Related

React Native: Camera from "expo-camera" stop running when face is not ever detected

I am newer for using react-native, and wanna try to create a camera with filter. I'm blocked in step to recognize face. Have success to draw rectangle when face detected, but the problem is once it goes out of detection. The camera stop running as it fixes on the last real-time capture
Here is my code:
import { useState, useEffect, useRef } from 'react'
import { Camera } from 'expo-camera'
import * as MediaLibrary from 'expo-media-library'
import { Text, StyleSheet, View, TouchableOpacity } from 'react-native'
import Button from './Button'
import { Ionicons } from '#expo/vector-icons'
import * as FaceDetector from 'expo-face-detector'
export default function PCamera() {
const cameraRef = useRef(undefined)
const [faceDetected, setFaceDetected] = useState([])
const [lastImage, setImage] = useState(undefined)
const [hasUsePermssion, setUsePermission] = useState(false)
const [type, switchToType] = useState(Camera.Constants.Type.front)
const takePicture = async () => {
if (cameraRef) {
try {
const options = {
quality: 1,
base64: true,
exif: false,
}
const data = await cameraRef.current.takePictureAsync(options)
setImage(data.uri)
console.log(data)
} catch (err) {
console.error(err)
}
}
}
const swithMode = () => {
switchToType(
type === Camera.Constants.Type.front
? Camera.Constants.Type.back
: Camera.Constants.Type.front
)
}
const handleFacesDetected = ({ faces }) => {
setFaceDetected(faces)
}
useEffect(() => {
;(async () => {
const { status } = await Camera.requestCameraPermissionsAsync()
if (status === 'granted') {
setUsePermission(true)
}
})()
}, [])
if (hasUsePermssion === null) {
return <View />
}
if (hasUsePermssion === false) {
return <Text>No access to camera</Text>
}
return (
<View style={styles.cameraContainer}>
<View style={styles.overlay}>
<Camera
ref={cameraRef}
style={styles.camera}
type={type}
onFacesDetected={handleFacesDetected}
faceDetectorSettings={{
mode: FaceDetector.FaceDetectorMode.fast,
detectLandmarks: FaceDetector.FaceDetectorLandmarks.all,
runClassifications:
FaceDetector.FaceDetectorClassifications.none,
minDetectionInterval: 100,
tracking: true,
}}
>
{faceDetected.length > 0 &&
faceDetected.map((face) => (
<View
key={face.faceID}
style={{
position: 'absolute',
borderWidth: 2,
borderColor: 'red',
left: face.bounds.origin.x,
top: face.bounds.origin.y,
width: face.bounds.size.width,
height: face.bounds.size.height,
}}
/>
))}
</Camera>
</View>
<View style={styles.optionsContainer}>
<View>
<TouchableOpacity onPress={swithMode}>
<Text>
<Ionicons
name="camera-reverse-outline"
size={24}
color="black"
/>
</Text>
</TouchableOpacity>
</View>
<Button
icon="camera"
title="Take Photo"
onPress={takePicture}
style={styles.button}
/>
<View>
<Text>...</Text>
</View>
</View>
</View>
)}
const styles = StyleSheet.create({
cameraContainer: {flex: 1,
},
overlay: {
flex: 6,
borderBottomStartRadius: 75,
borderBottomEndRadius: 75,
overflow: 'hidden',
},
camera: {
flex: 1,
},
optionsContainer: {
flex: 1,
flexDirection: 'row',
justifyContent: 'space-around',
alignItems: 'center',
},
})
N.B: Don't take care of the Button, it's a custom component and works well

How to put the bottomsheet to the front of the screen?

In ReactNative, the bottomsheet is displayed overlaid on the fragment.
Is there a way to make the bottomsheet rise to the top of the screenenter image description here
The bottom sheet looks opaque as in the picture, so the bottom sheet cannot be touched Please help
The code below is a shortened version
enter image description here
enter image description here
import React, { FC , Component, useState, useEffect, Fragment,useCallback, useMemo, useRef } from "react"
import { FlatList, ViewStyle, StyleSheet, View, Platform, TextInput, TouchableOpacity} from "react-native"
import {
BottomSheetModal,
BottomSheetModalProvider,
BottomSheetBackdrop,
} from '#gorhom/bottom-sheet';
const ROOT: ViewStyle = {
backgroundColor: DefaultTheme.colors.background,
flex: 1,
}
export const ChecklookupScreen: FC<StackScreenProps<NavigatorParamList, "checklookup">> = observer(function ChecklookupScreen() {
const bottomSheetModalRef = useRef<BottomSheetModal>(null);
// variables
const snapPoints = useMemo(() => ['25%', '50%'], []);
// callbacks
const handlePresentModalPress = useCallback((index: string) => {
LOG.info('handlePresentModalPress', index);
bottomSheetModalRef.current?.present();
}, []);
const handleSheetChanges = useCallback((index: number) => {
LOG.info
console.log('handleSheetChanges', index);
}, []);
const renderItem = ({ item, index }) => (
<TouchableOpacity
key={index + item.inspNo + item.spvsNo}
//style={listContainer}
onPress={throttle(() => {
onClickItem(item.inspNo,item.spvsNo);
})}
>
<View>
<Fragment>
</View>
<Button icon="magnify-expand"
mode="elevated"
style={styles.detailButton}
onPress={throttle(() => {
onClickItem(item.inspNo,item.spvsNo);
})}
// onPress={() => navigation.navigate("checkdetail")}
>
</Button>
</View>
</Fragment>
</View>
</TouchableOpacity>
);
const fetchChecklookups = async (offset: number) => {
LOG.debug('fetchChecklookups:' + offset);
setRefreshing(true);
await checklookupStore.getChecklookups(offset)
setRefreshing(false);
};
const onEndReached = () => {
if (checklookupStore?.checklookupsTotalRecord <= checklookups?.length) {
LOG.debug('onEndReached555555555');
} else {
setPage(page + 1)
fetchChecklookups(page + 1);
}
};
const [searchQuery, setSearchQuery] = React.useState('');
const onChangeSearch = query => setSearchQuery(query);
return (
<Screen preset="fixed" style={{ backgroundColor: colors.background, flex: 1, padding: 10,}}>
<View style={{ flex: 1,}}>
<View style={{ flex: 1, }}>
<Searchbar
placeholder="조회조건을 입력해주세요"
onChangeText={onChangeSearch}
value={searchQuery}
onPressIn={() => handlePresentModalPress('touch on')}
/>
<BottomSheetModalProvider>
<BottomSheetModal
backgroundStyle={{ backgroundColor: "gray" }}
style={styles.bottomSheet}
ref={bottomSheetModalRef}
index={1}
snapPoints={snapPoints}
onChange={handleSheetChanges}
>
<View style={{ marginTop: 10, marginLeft: 50, marginRight: 50, flexDirection: "row"}}>
<View style={{ flex: 1, }}>
<Button
mode="outlined"
>소속을 입력하세요
</Button>
</View>
</View>
</BottomSheetModal>
</BottomSheetModalProvider>
</Screen>
)
})
You can try with portal, wrap you bottom sheet to from another package.

How to play recording through the speaker in react native expo av

I'm trying to make a voice memo component for my app using expo-av. I've figured out how to create the recording but when playing it, it only plays through the earpiece speaker. Is there a way I can playback the recording through the main phone speakers? I haven't tried testing this on android but for iPhone the audio only plays back through the earphone speaker. Thank you.
import React from 'react';
import { Button, StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Audio } from 'expo-av';
import * as Sharing from 'expo-sharing';
import { MaterialCommunityIcons } from '#expo/vector-icons';
import AppText from './AppText';
import Screen from './Screen';
export default function AppVoice() {
const [recording, setRecording] = React.useState();
const [recordings, setRecordings] = React.useState([]);
const [message, setMessage] = React.useState("");
async function startRecording() {
try {
const permission = await Audio.requestPermissionsAsync();
if (permission.status === "granted") {
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
const { recording } = await Audio.Recording.createAsync(
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
);
setRecording(recording);
} else {
setMessage("Please grant permission to app to access microphone");
}
} catch (err) {
console.error('Failed to start recording', err);
}
}
async function stopRecording() {
setRecording(undefined);
await recording.stopAndUnloadAsync();
let updatedRecordings = [...recordings];
const { sound, status } = await recording.createNewLoadedSoundAsync();
updatedRecordings.push({
sound: sound,
duration: getDurationFormatted(status.durationMillis),
file: recording.getURI()
});
setRecordings(updatedRecordings);
}
function getDurationFormatted(millis) {
const minutes = millis / 1000 / 60;
const minutesDisplay = Math.floor(minutes);
const seconds = Math.round((minutes - minutesDisplay) * 60);
const secondsDisplay = seconds < 10 ? `0${seconds}` : seconds;
return `${minutesDisplay}:${secondsDisplay}`;
}
function getRecordingLines() {
return recordings.map((recordingLine, index) => {
return (
<View key={index} style={styles.row}>
<Text style={styles.fill}>Recording {index + 1} - {recordingLine.duration}</Text>
<Button style={styles.button} onPress={() => recordingLine.sound.replayAsync()} title="Play"></Button>
<Button style={styles.button} onPress={() => Sharing.shareAsync(recordingLine.file)} title="Share"></Button>
</View>
);
});
}
return (
<Screen style={{flex:1, backgroundColor:'black'}}>
<View style={styles.container}>
<View style={styles.recorder}>
<TouchableOpacity style={{position:'absolute', left:10}}>
<MaterialCommunityIcons name="microphone" size={24} color="black" />
</TouchableOpacity>
<AppText style={{color:'black', textAlign:'center'}}>Voice Memo</AppText>
<TouchableOpacity onPress={recording ? stopRecording : startRecording} style={{position:'absolute', right:10}}>
{recording ? <MaterialCommunityIcons name="pause" size={28} color="black" /> : <MaterialCommunityIcons name="record-circle-outline" size={28} color="red" />}
</TouchableOpacity>
</View>
<View style={{flex:1}}>
{getRecordingLines()}
</View>
</View>
</Screen>
);
}
const styles = StyleSheet.create({
recorder: {
width:300,
backgroundColor:'white',
height:50,
borderRadius: 100,
justifyContent:'center'
},
container:{
flex:1,
},
row: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
},
fill: {
flex: 1,
margin: 16,
color:'white'
},
button: {
margin: 16
}
});
I don't know how I figured it out but what you need to do is when recording you need to have the "allowRecordingIOS" to true and when stopping the recording you have to set it to false. Here's the updated code:
import React from 'react';
import { Button, StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Audio } from 'expo-av';
import * as Sharing from 'expo-sharing';
import { MaterialCommunityIcons } from '#expo/vector-icons';
import AppText from './AppText';
import Screen from './Screen';
export default function AppVoice() {
const [recording, setRecording] = React.useState();
const [recordings, setRecordings] = React.useState([]);
const [message, setMessage] = React.useState("");
async function startRecording() {
try {
const permission = await Audio.requestPermissionsAsync();
if (permission.status === "granted") {
await Audio.setAudioModeAsync({
allowsRecordingIOS: true,
playsInSilentModeIOS: true,
});
const { recording } = await Audio.Recording.createAsync(
Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
);
setRecording(recording);
} else {
setMessage("Please grant permission to app to access microphone");
}
} catch (err) {
console.error('Failed to start recording', err);
}
}
async function stopRecording() {
setRecording(undefined);
await recording.stopAndUnloadAsync();
await Audio.setAudioModeAsync({
allowsRecordingIOS: false,
playsInSilentModeIOS: true,
});
let updatedRecordings = [...recordings];
const { sound, status } = await recording.createNewLoadedSoundAsync();
updatedRecordings.push({
sound: sound,
duration: getDurationFormatted(status.durationMillis),
file: recording.getURI()
});
setRecordings(updatedRecordings);
}
function getDurationFormatted(millis) {
const minutes = millis / 1000 / 60;
const minutesDisplay = Math.floor(minutes);
const seconds = Math.round((minutes - minutesDisplay) * 60);
const secondsDisplay = seconds < 10 ? `0${seconds}` : seconds;
return `${minutesDisplay}:${secondsDisplay}`;
}
function getRecordingLines() {
return recordings.map((recordingLine, index) => {
return (
<View key={index} style={styles.row}>
<Text style={styles.fill}>Recording {index + 1} - {recordingLine.duration}</Text>
<Button style={styles.button} onPress={() => recordingLine.sound.replayAsync()} title="Play"></Button>
<Button style={styles.button} onPress={() => Sharing.shareAsync(recordingLine.file)} title="Share"></Button>
</View>
);
});
}
return (
<Screen style={{flex:1, backgroundColor:'black'}}>
<View style={styles.container}>
<View style={styles.recorder}>
<TouchableOpacity style={{position:'absolute', left:10}}>
<MaterialCommunityIcons name="microphone" size={24} color="black" />
</TouchableOpacity>
<AppText style={{color:'black', textAlign:'center'}}>Voice Memo</AppText>
<TouchableOpacity onPress={recording ? stopRecording : startRecording} style={{position:'absolute', right:10}}>
{recording ? <MaterialCommunityIcons name="pause" size={28} color="black" /> : <MaterialCommunityIcons name="record-circle-outline" size={28} color="red" />}
</TouchableOpacity>
</View>
<View style={{flex:1}}>
{getRecordingLines()}
</View>
</View>
</Screen>
);
}
const styles = StyleSheet.create({
recorder: {
width:300,
backgroundColor:'white',
height:50,
borderRadius: 100,
justifyContent:'center'
},
container:{
flex:1,
},
row: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'center',
},
fill: {
flex: 1,
margin: 16,
color:'white'
},
button: {
margin: 16
}
});

how to capture image using expo-camera in react native

I am new to react-native. I have created an app. but now I want to Capture Image from this app. I have got the code from expo website But the problem is This code only open camera. I want to capture An Image Through That camera. So if it is possible please help me..
here is my code
import React, { useState, useEffect } from 'react';
import { StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Camera } from 'expo-camera';
import { Ionicons } from '#expo/vector-icons';
export default function App({navigation}) {
const [hasPermission, setHasPermission] = useState(null);
const [type, setType] = useState(Camera.Constants.Type.back);
useEffect(() => {
(async () => {
const { status } = await Camera.requestPermissionsAsync();
setHasPermission(status === 'granted');
})();
}, []);
if (hasPermission === null) {
return <View />;
}
if (hasPermission === false) {
return <Text>No access to camera</Text>;
}
return (
<View style={styles.container}>
<View style={styles.header}>
<Ionicons style={{paddingLeft:20}} name="arrow-back" size={40}
color="black" onPress={() => navigation.navigate("OtherInfo")} />
<Text style={{fontSize:20, paddingLeft: 70, paddingTop: 10}}>Get Image</Text>
</View>
<Camera style={styles.camera} type={type}>
<View style={styles.buttonContainer}>
<TouchableOpacity
style={styles.button}
onPress={() => {
setType(
type === Camera.Constants.Type.back
? Camera.Constants.Type.front
: Camera.Constants.Type.back
);
}}>
<Text style={styles.text}> Flip </Text>
</TouchableOpacity>
</View>
</Camera>
</View>
);
}
const styles = StyleSheet.create({
camera:{
height:500
},
header:{
flexDirection: 'row'
}
});
Here is the working Example of the small app which takes the picture from Camera as well as Gallary and shows it to after it is clicked or selected
Working App: Expo Snack
import React, { useState, useEffect } from 'react';
import { StyleSheet, Text, View, Button, Image } from 'react-native';
import { Camera } from 'expo-camera';
import * as ImagePicker from 'expo-image-picker';
export default function Add({ navigation }) {
const [cameraPermission, setCameraPermission] = useState(null);
const [galleryPermission, setGalleryPermission] = useState(null);
const [camera, setCamera] = useState(null);
const [imageUri, setImageUri] = useState(null);
const [type, setType] = useState(Camera.Constants.Type.back);
const permisionFunction = async () => {
// here is how you can get the camera permission
const cameraPermission = await Camera.requestCameraPermissionsAsync();
setCameraPermission(cameraPermission.status === 'granted');
const imagePermission = await ImagePicker.getMediaLibraryPermissionsAsync();
console.log(imagePermission.status);
setGalleryPermission(imagePermission.status === 'granted');
if (
imagePermission.status !== 'granted' &&
cameraPermission.status !== 'granted'
) {
alert('Permission for media access needed.');
}
};
useEffect(() => {
permisionFunction();
}, []);
const takePicture = async () => {
if (camera) {
const data = await camera.takePictureAsync(null);
console.log(data.uri);
setImageUri(data.uri);
}
};
const pickImage = async () => {
let result = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.All,
allowsEditing: true,
aspect: [1, 1],
quality: 1,
presentationStyle: 0
});
console.log(result);
if (!result.canceled) {
setImageUri(result.assets[0].uri);
}
};
return (
<View style={styles.container}>
<View style={styles.cameraContainer}>
<Camera
ref={(ref) => setCamera(ref)}
style={styles.fixedRatio}
type={type}
ratio={'1:1'}
/>
</View>
<Button title={'Take Picture'} onPress={takePicture} />
<Button title={'Gallery'} onPress={pickImage} />
{imageUri && <Image source={{ uri: imageUri }} style={{ flex: 1 }} />}
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
},
cameraContainer: {
flex: 1,
flexDirection: 'row',
},
fixedRatio: {
flex: 1,
aspectRatio: 1,
},
button: {
flex: 0.1,
padding: 10,
alignSelf: 'flex-end',
alignItems: 'center',
},
});

How do I upload an image taken React-Native-camera to Firebase storage?

I want upload image taken with react-native-camera to https://github.com/invertase/react-native-firebase storage on RN
I can't upload image.
I tried image-picker-library and did work.
import React, { Component } from 'react';
import {Image, View, Text, StyleSheet, Dimensions,TouchableOpacity} from 'react-native'
import { RNCamera } from 'react-native-camera';
import {strings} from '../Lang/Strings';
import { Actions } from 'react-native-router-flux';
const { width, height } = Dimensions.get('window');
export default class ScanPage extends Component {
constructor(props) {
super(props);
this.state = {
takePicture = async () => {
if (this.camera) {
const options = { quality: 0.5, base64: true }
const data = await this.camera.takePictureAsync(options)
Actions.ProfilePage({imagePath:data.uri,
selectedIndex:this.state.selectedIndex,
shapes:this.state.shapes });
this.uploadPhoto(data);
};
};
render() {
const {selectedIndex, images, shapes} = this.state;
return(
<View style={styles.container}>
<RNCamera
ref={ref => {
this.camera = ref;
}}
style={styles.preview}
type={RNCamera.Constants.Type.front}
permissionDialogTitle={'Permission to use camera'}
permissionDialogMessage={'We need your permission to use your camera phone'} />
<View style={{flex:1,justifyContent:'center' ,alignItems:'center'}}>
<View style={{marginTop:120}}>
<Image source={images[selectedIndex]} >
</Image>
</View>
</View>
<View style={styles.buttonSection}>
<TouchableOpacity onPress={this._TogglePrev}>
<View style={styles.buttons}>
<Text style={{textAlign:'center',color: 'white'}}>
{strings.back}
</Text>
</View>
</TouchableOpacity>
<View style={{alignItems:'center', justifyContent:'center',
height:height*0.04}}>
<Text style ={{color:'white',textAlign:'center'}}>{shapes[selectedIndex]} </Text>
</View>
<TouchableOpacity onPress={this._ToggleNext}>
<View style={styles.buttons}>
<Text style={{textAlign:'center', color: 'white'}}>
{strings.next}
</Text>
</View>
</TouchableOpacity>
</View>
<View style={{alignItems:'center', justifyContent:'center',
backgroundColor:'#D9E6FF',height:height*0.001,width:width*1}}>
<Text style ={{color:'white',textAlign:'center'}}> </Text>
</View>
<View style={{ flex: 0, flexDirection: 'row', justifyContent: 'center'}}>
<TouchableOpacity onPress={this.takePicture.bind(this)} style={styles.capture}
>
<View style={{
backgroundColor: 'white',
borderRadius: (height*0.16)/2,
padding: 15,
alignSelf: 'center',
margin: 25,
height:height*0.085,
width:width*0.16,
justifyContent:'center',
alignItems:'center',
borderWidth:0.9,
borderColor:'#D9E6FF',}}></View>
</TouchableOpacity>
</View>
</View>
);
}
takePicture = async function() {
if (this.camera) {
const options = { quality: 0.5, base64: true };
const data = await this.camera.takePictureAsync(options);
Actions.ProfilePage({imagePath:data.uri,
selectedIndex:this.state.selectedIndex,
shapes:this.state.shapes
});
}
};
}
I didn't upload firebase,
versions
react: 16.4.1,
react-native: 0.56.0,
react-native-camera:1.12.0,
react-native-firebase:5.2.3
react-native-router-flux:4.0.1
Can't believe I've figured it out >.<
if you've set up your project correctly to include firebase
takePicture = async() => {
if (this.camera) {
// this code takes the picture
const options = { quality: 0.5, base64: true };
const data = await this.camera.takePictureAsync(options);
// open debug to see the uri of image
console.log(data.uri);
// send your data.uri off to firebase! :D
const processed = await firebase.vision().imageLabelerProcessImage(data.uri, {
confidenceThreshold: 0.8,
});
//Look at your debugger again
console.log('Label: ', processed);
}
};
I hope this helps!
_publish = async () => {
const imageuri= this.state.imagePath;
//this is where + how you want your image to be stored into
const refFile= firebase.storage().ref().child('profile_pic');
refFile.putFile(imageuri)
.catch(error => {
console.log(error);
// Alert.alert('Hey', error);
});
}
hope it helps!