I use electron-builder to package my vue projects. In my project I have a video page that uses rtmp video stream.
However, the video plays fine when I use electron:serve and browser, but the video does not play when I use electron:build.
My version is as follows.
os : windows10
"vue": "2.6.10",
"electron": "^13.6.9",
"electron-builder":"#22.14.13"
"vue-aliplayer-v2": "^1.3.0",
My configuration is as follows.
'use strict'
import { app, protocol, BrowserWindow } from 'electron'
import { createProtocol } from 'vue-cli-plugin-electron-builder/lib'
import installExtension, { VUEJS_DEVTOOLS } from 'electron-devtools-installer'
const isDevelopment = process.env.NODE_ENV !== 'production'
protocol.registerSchemesAsPrivileged([
{ scheme: 'app', privileges: { secure: true, standard: true, stream: true } }
])
async function createWindow() {
const win = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
enableRemoteModule: !!process.env.IS_TEST,
webSecurity: false,
nodeIntegration: process.env.ELECTRON_NODE_INTEGRATION,
contextIsolation: !process.env.ELECTRON_NODE_INTEGRATION
}
})
if (process.env.WEBPACK_DEV_SERVER_URL) {
await win.loadURL(process.env.WEBPACK_DEV_SERVER_URL)
if (!process.env.IS_TEST) win.webContents.openDevTools()
} else {
createProtocol('app')
win.loadURL('app://./index.html')
}
}
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
app.quit()
}
})
app.on('activate', () => {
if (BrowserWindow.getAllWindows().length === 0) createWindow()
})
app.on('ready', async () => {
if (isDevelopment && !process.env.IS_TEST) {
// Install Vue Devtools
try {
await installExtension(VUEJS_DEVTOOLS)
} catch (e) {
console.error('Vue Devtools failed to install:', e.toString())
}
}
createWindow()
})
if (isDevelopment) {
if (process.platform === 'win32') {
process.on('message', (data) => {
if (data === 'graceful-exit') {
app.quit()
}
})
} else {
process.on('SIGTERM', () => {
app.quit()
})
}
}
video page:
components: {
VueAliplayerV2
},
data() {
return {
options: {
isLive: true, //切换为直播流的时候必填
format: 'm3u8' //切换为直播流的时候必填
},
source: '//localhost:9600/live/uav0.m3u8',
show: true,
isShowMultiple: false,
};
},
I want to know how I need to solve the problem of unplayable video in electron.
Related
I successfully implemented the agora SDK videocall module with virtual background to my react.js web app, but when I try to implement it to the react-native mobile version I keep getting erros I don't know how to solve. I'm a bit new to react-native so this migth be an easy fix but I can't find it.
Basically, after submitting a form with the uid, channel, role, and token (I have a token service) the videocall component is rendered.
These are my dependencies
"agora-access-token": "^2.0.4",
"agora-react-native-rtm": "^1.5.0",
"agora-extension-virtual-background": "^1.1.1",
"agora-rtc-sdk-ng": "^4.14.0",
"agora-rn-uikit": "^4.0.0",
"axios": "^0.27.2",
"react": "18.0.0",
"react-native": "0.69.4",
"react-native-agora": "^3.7.1",
"react-native-dotenv": "^3.3.1"
This is the main videocall component.
import React,{ useEffect } from "react";
import { useState } from "react";
import axios from "axios";
import { Call } from "./components/Call";
const VideoCallApp = ({ videoCallData }) => {
const [token, setToken] = useState("");
const [virtualBackgroundData, setVirtualBackgroundData] = useState({
type: "img",
// example
// type: 'img',
// value: ''
//
// type: 'blur',
// value: integer // blurring degree, low (1), medium (2), or high (3).
//
// type: 'color',
// value: string // color on hex or string
});
useEffect(() => {
const getToken = async () => {
const url = `${process.env.REACT_APP_AGORA_TOKEN_SERVICE}/rtc/${videoCallData.channel}/${videoCallData.role}/uid/${videoCallData.uid}`;
try {
const response = await axios.get(url);
const token = response.data.rtcToken;
setToken(token);
} catch (err) {
alert(err);
}
};
getToken();
}, []);
return (
token && (
<Call
rtcProps={{
appId: process.env.REACT_APP_AGORA_APP_ID,
channel: videoCallData.channel,
token: token,
uid: videoCallData.uid,
}}
virtualBackground={virtualBackgroundData}
/>
)
);
};
export default VideoCallApp;
Which renders the Call component, it has more functionality for the virtual background.
import { useEffect, useState } from 'react'
import AgoraRTC from 'agora-rtc-sdk-ng';
import VirtualBackgroundExtension from 'agora-extension-virtual-background';
import { LocalVideo } from './LocalVideo';
import { RemoteVideo } from './RemoteVideo';
import { VideoControllers } from './VideoButtons';
import { View } from 'react-native';
const client = AgoraRTC.createClient({ mode: "rtc", codec: "vp8" });
const extension = new VirtualBackgroundExtension();
AgoraRTC.registerExtensions([extension]);
export const Call = ({ rtcProps = {}, virtualBackground = {} }) => {
const [localTracks, setLocalTracks] = useState({
audioTrack: null,
videoTrack: null
});
const [processor, setProcessor] = useState(null);
useEffect(() => {
if (processor) {
try {
const initProcessor = async () => {
// Initialize the extension and pass in the URL of the Wasm file
await processor.init(process.env.PUBLIC_URL + "/assets/wasms");
// Inject the extension into the video processing pipeline in the SDK
localTracks.videoTrack.pipe(processor).pipe(localTracks.videoTrack.processorDestination);
playVirtualBackground();
}
initProcessor()
} catch (e) {
console.log("Fail to load WASM resource!"); return null;
}
}
}, [processor]);
useEffect(() => {
if (localTracks.videoTrack && processor) {
setProcessor(null);
}
}, [localTracks]);
const playVirtualBackground = async () => {
try {
switch (virtualBackground.type) {
case 'color':
processor.setOptions({ type: 'color', color: virtualBackground.value });
break;
case 'blur':
processor.setOptions({ type: 'blur', blurDegree: Number(virtualBackground.value) });
break;
case 'img':
const imgElement = document.createElement('img');
imgElement.onload = async () => {
try {
processor.setOptions({ type: 'img', source: imgElement });
await processor.enable();
} catch (error) {
console.log(error)
}
}
imgElement.src = process.env.PUBLIC_URL + '/assets/backgrounds/background-7.jpg';
imgElement.crossOrigin = "anonymous";
break;
default:
break;
}
await processor.enable();
} catch (error) {
console.log(error)
}
}
const join = async () => {
await client.join(rtcProps.appId, rtcProps.channel, rtcProps.token, Number(rtcProps.uid));
}
const startVideo = () => {
AgoraRTC.createCameraVideoTrack()
.then(videoTrack => {
setLocalTracks(tracks => ({
...tracks,
videoTrack
}));
client.publish(videoTrack);
videoTrack.play('local');
})
}
const startAudio = () => {
AgoraRTC.createMicrophoneAudioTrack()
.then(audioTrack => {
setLocalTracks(tracks => ({
...tracks,
audioTrack
}));
client.publish(audioTrack);
});
}
const stopVideo = () => {
localTracks.videoTrack.close();
localTracks.videoTrack.stop();
client.unpublish(localTracks.videoTrack);
}
const stopAudio = () => {
localTracks.audioTrack.close();
localTracks.audioTrack.stop();
client.unpublish(localTracks.audioTrack);
}
const leaveVideoCall = () => {
stopVideo();
stopAudio();
client.leave();
}
async function startOneToOneVideoCall() {
join()
.then(() => {
startVideo();
startAudio();
client.on('user-published', async (user, mediaType) => {
if (client._users.length > 1) {
client.leave();
alert('Please Wait Room is Full');
return;
}
await client.subscribe(user, mediaType);
if (mediaType === 'video') {
const remoteVideoTrack = user.videoTrack;
remoteVideoTrack.play('remote');
}
if (mediaType === 'audio') {
user.audioTrack.play();
}
});
});
}
// Initialization
function setProcessorInstance() {
if (!processor && localTracks.videoTrack) {
// Create a VirtualBackgroundProcessor instance
setProcessor(extension.createProcessor());
}
}
async function setBackground() {
if (localTracks.videoTrack) {
setProcessorInstance()
}
}
useEffect(() => {
startOneToOneVideoCall();
}, []);
return (
<View >
<View>
<LocalVideo />
<RemoteVideo />
<VideoControllers
actions={{
startAudio,
stopAudio,
startVideo,
stopVideo,
leaveVideoCall,
startOneToOneVideoCall,
setBackground
}}
/>
</View>
</View>
)
}
The local an remote video component are emty Views where the videos are displayed and the VideoControllers are Buttons that manage the videocall.
When I run the app the form works fine but as soon as I subbmit it the app crashes with these errors.
WARN `new NativeEventEmitter()` was called with a non-null argument without the required `addListener` method.
WARN `new NativeEventEmitter()` was called with a non-null argument without the required `removeListeners` method.
LOG Running "videocall" with {"rootTag":1}
ERROR TypeError: window.addEventListener is not a function. (In 'window.addEventListener("online", function () {
_this32.networkState = EB.ONLINE;
})', 'window.addEventListener' is undefined)
VideoCallApp
Form#http://localhost:8081/index.bundle?platform=android&dev=true&minify=false&app=com.videocall&modulesOnly=false&runModule=true:121056:41
RCTView
View
RCTView
View
AppContainer#http://localhost:8081/index.bundle?platform=android&dev=true&minify=false&app=com.videocall&modulesOnly=false&runModule=true:78626:36
videocall(RootComponent)
ERROR TypeError: undefined is not an object (evaluating '_$$_REQUIRE(_dependencyMap[5], "./components/Call").Call')
Something is happening at the Call component and I think it migth be the DOM manipulation for the videos but I can't find an example of a react-native project with agora SDK.
I don't want to use the UIkit because, eventhough it works, I can't use the virtual background which I need for this project. Can anyone help me?
I'm bad in understanding what's happening in metro.config.js file
I already added svg transformer. Now I need to add css transformer.
I have such metro.config.js file:
const { getDefaultConfig } = require("metro-config");
module.exports = (async () => {
const {
resolver: { sourceExts, assetExts }
} = await getDefaultConfig();
return {
transformer: {
getTransformOptions: async () => ({
transform: {
experimentalImportSupport: false,
inlineRequires: false
}
}),
babelTransformerPath: require.resolve("react-native-svg-transformer")
},
resolver: {
assetExts: assetExts.filter(ext => ext !== "svg"),
sourceExts: [...sourceExts, "svg"]
}
};
})();
In order to have react-native-css-modules I need to place such content in metro.config.js:
const { getDefaultConfig } = require("metro-config");
module.exports = (async () => {
const {
resolver: { sourceExts },
} = await getDefaultConfig();
return {
transformer: {
babelTransformerPath: require.resolve("react-native-css-transformer"),
},
resolver: {
sourceExts: [...sourceExts, "css"],
},
};
})();
But if I just replace the content svg transformer will not work.
How can I combine two metro.config.js files?
I'm having trouble changing the prop: 'rate' to change the speed of the audio being played.
I'm using expo-av (https://docs.expo.dev/versions/latest/sdk/av/).
Here's my code:
import {Text, View, Alert } from 'react-native'
import * as MediaLibrary from 'expo-media-library';
import { DataProvider } from 'recyclerlistview';
import {Audio} from 'expo-av';
import { play, pause, resume, playNext } from "../misc/AudioController";
export const AudioContext = createContext()
export class AudioProvider extends Component {
constructor(props) {
super(props);
this.state = {
audioFiles: [],
permissionError: false,
dataProvider: new DataProvider((r1, r2) => r1 !== r2),
playbackObj: null,
soundObj: null,
currentAudio: {},
isPlaying: false,
currentAudioIndex: null,
playbackPosition: null,
playbackDuration: null,
rate: 2.0,
};
this.totalAudioCount = 0;
}
permissionAlert = () => {
Alert.alert("Permission Required", "This app needs to read audio files", [
{ text: "I am ready", onPress: () => this.getPermission() },
{
text: "cancel",
onPress: () => this.permissionAlert(),
},
]);
};
getAudioFiles = async () => {
const { dataProvider, audioFiles } = this.state;
let media = await MediaLibrary.getAssetsAsync({
mediaType: "audio",
});
media = await MediaLibrary.getAssetsAsync({
mediaType: "audio",
first: media.totalCount,
});
this.totalAudioCount = media.totalCount;
this.setState({
...this.state,
dataProvider: dataProvider.cloneWithRows([
...audioFiles,
...media.assets,
]),
audioFiles: [...audioFiles, ...media.assets],
});
};
loadPreviousAudio = async () => {
let previousAudio = await AsyncStorageLib.getItem("previousAudio");
let currentAudio;
let currentAudioIndex;
if (previousAudio === null) {
currentAudio = this.state.audioFiles[0];
currentAudioIndex = 0;
} else {
previousAudio = JSON.parse(previousAudio);
currentAudio = previousAudio.audio;
currentAudioIndex = previousAudio.index;
}
this.setState({ ...this.state, currentAudio, currentAudio });
};
getPermission = async () => {
// {
// "canAskAgain": true,
// "expires": "never",
// "granted": false,
// "status": "undetermined",
// }
const permission = await MediaLibrary.getPermissionsAsync();
if (permission.granted) {
this.getAudioFiles();
}
if (!permission.canAskAgain && !permission.granted) {
this.setState({ ...this.state, permissionError: true });
}
if (!permission.granted && permission.canAskAgain) {
const { status, canAskAgain } =
await MediaLibrary.requestPermissionsAsync();
if (status === "denied" && canAskAgain) {
this.permissionAlert();
}
if (status === "granted") {
this.getAudioFiles();
}
if (status === "denied" && !canAskAgain) {
this.setState({ ...this.state, permissionError: true });
}
}
};
onPlaybackStatusUpdate = async (playbackStatus) => {
console.log("hier");
if (playbackStatus.isLoaded && playbackStatus.isPlaying) {
this.updateState(this, {
playbackPosition: playbackStatus.positionMillis,
playbackDuration: playbackStatus.durationMillis,
});
}
if (playbackStatus.didJustFinish) {
const nextAudioIndex = this.state.currentAudioIndex + 1;
if (nextAudioIndex >= this.totalAudioCount) {
this.state.playbackObj.unloadAsync();
this.updateState(this, {
soundObj: null,
currentAudio: this.state.audioFiles[0],
isPlaying: false,
currentAudioIndex: 0,
playbackPosition: null,
playbackDuration: null,
});
}
const audio = this.state.audioFiles[nextAudioIndex];
const status = await playNext(this.state.playbackObj, audio.uri);
this.updateState(this, {
soundObj: status,
currentAudio: audio,
isPlaying: true,
currentAudioIndex: nextAudioIndex,
});
}
};
componentDidMount() {
this.getPermission();
if (this.state.playbackObj === null) {
this.setState({ ...this.state, playbackObj: new Audio.Sound(), });
}
}
updateState = (prevState, newState = {}) => {
this.setState({ ...prevState, ...newState });
};
render() {
const {
audioFiles,
dataProvider,
permissionError,
playbackObj,
soundObj,
currentAudio,
isPlaying,
currentAudioIndex,
playbackPosition,
playbackDuration,
rate,
} = this.state;
if (permissionError)
return (
<View
style={{
flex: 1,
justifyContent: "center",
alignItems: "center",
}}
>
<Text>It looks like you haven't accepted the permission</Text>
</View>
);
return (
<AudioContext.Provider
value={{
audioFiles,
dataProvider,
playbackObj,
soundObj,
currentAudio,
isPlaying,
currentAudioIndex,
totalAudioCount: this.totalAudioCount,
playbackPosition,
playbackDuration,
rate,
updateState: this.updateState,
loadPreviousAudio: this.loadPreviousAudio,
onPlaybackStatusUpdate: this.onPlaybackStatusUpdate
}}
>
{this.props.children}
</AudioContext.Provider>
);
}
}
import {Component} from 'react';
import AsyncStorageLib from '#react-native-async-storage/async-storage';
export default AudioProvider;
and here's some more:
// play audio
// Import the react-native-sound module
import { PitchCorrectionQuality,shouldCorrectPitch, rate } from "expo-av/build/AV.types";
export const play = async (playbackObj, uri,) => {
try {
return await playbackObj.loadAsync(
{uri},
{shouldPlay: true},
);
} catch (error) {
console.log('error inside play helper method', error.message)
}
};
//pause
export const pause = async playbackObj => {
try {
// playbackObj.setRateAsync(rate = 2.0, shouldCorrectPitch = true, PitchCorrectionQuality= High);
return await playbackObj.setStatusAsync({
shouldPlay: false},
);
} catch (error) {
console.log('error inside pause helper method', error.message)
}
};
//resume
export const resume = async playbackObj => {
try {
return await playbackObj.playAsync(
);
} catch (error) {
console.log('error inside pause resume method', error.message)
}
};
//select next
export const playNext = async (playbackObj, uri) => {
try {
await playbackObj.stopAsync()
await playbackObj.unloadAsync();
return await play(playbackObj, uri);
} catch (error) {
console.log('error inside playNext helper method')
}
}
I've tried including 'rate: 2.0' inside this.state{audioFiles: [],
permissionError: false, etc.} but it didn't work.
Also I've tried doing: await playbackObj.setRateAsync() in the 2nd code snippet.
Any suggestions?
Nvm, I found the solution. Here's my updated code:
// play audio
// Import the react-native-sound module
import { PitchCorrectionQuality,shouldCorrectPitch, rate } from "expo-av/build/AV.types";
export const play = async (playbackObj, uri,) => {
try {
await playbackObj.loadAsync(
{uri},
{shouldPlay: true},
);
return await playbackObj.setStatusAsync({ rate: 0.9749090909 });
} catch (error) {
console.log('error inside play helper method', error.message)
}
};
//pause
export const pause = async playbackObj => {
try {
return await playbackObj.setStatusAsync({
shouldPlay: false,
rate: 0.9749090909,
});
} catch (error) {
console.log('error inside pause helper method', error.message)
}
};
//resume
export const resume = async playbackObj => {
try {
return await playbackObj.playAsync(
);
} catch (error) {
console.log('error inside pause resume method', error.message)
}
};
//select next
export const playNext = async (playbackObj, uri) => {
try {
await playbackObj.stopAsync()
await playbackObj.unloadAsync();
return await play(playbackObj, uri);
} catch (error) {
console.log('error inside playNext helper method')
}
}
With vue 3, I have state actions in different files globally, but they do not perform the necessary checks in the vue connector. I don't understand where I am doing wrong. I will be happy if you help.
I am waiting for your help to see the mistakes I have made. I am open to any comments.
app.js
require('./bootstrap');
import {createApp} from 'vue';
import App from './App.vue'
import WebsiteRouter from './Website/router';
import AdminRouter from './Admin/router';
import ApplicationRouter from './Application/router';
import store from './store';
axios.defaults.withCredentials = true;
store.dispatch('getUser').then(() => {
createApp(App)
.use(chanceRoute())
.use(store)
.use(DashboardPlugin)
.mount("#app");
})
import { createStore } from 'vuex'
import app from './app'
import appConfig from './app-config'
export default new createStore({
modules: {
app,
appConfig,
}
})
app\index.js
import axios from 'axios';
import sharedMutations from 'vuex-shared-mutations';
export default {
state: {
user: null
},
getters: {
user(state) {
return state.user;
},
verified(state) {
if (state.user) return state.user.email_verified_at
return null
},
id(state) {
if (state.user) return state.user.id
return null
}
},
mutations: {
setUser(state, payload) {
state.user = payload;
}
},
actions: {
async login({dispatch}, payload) {
try {
await axios.get('/sanctum/csrf-cookie');
await axios.post('/api/login', payload).then((res) => {
return dispatch('getUser');
}).catch((err) => {
throw err.response
});
} catch (e) {
throw e
}
},
async register({dispatch}, payload) {
try {
await axios.post('/api/register', payload).then((res) => {
return dispatch('login', {'email': payload.email, 'password': payload.password})
}).catch((err) => {
throw(err.response)
})
} catch (e) {
throw (e)
}
},
async logout({commit}) {
await axios.post('/api/logout').then((res) => {
commit('setUser', null);
}).catch((err) => {
})
},
async getUser({commit}) {
await axios.get('/api/user').then((res) => {
commit('setUser', res.data);
}).catch((err) => {
})
},
async profile({commit}, payload) {
await axios.patch('/api/profile', payload).then((res) => {
commit('setUser', res.data.user);
}).catch((err) => {
throw err.response
})
},
async password({commit}, payload) {
await axios.patch('/api/password', payload).then((res) => {
}).catch((err) => {
throw err.response
})
},
async verifyResend({dispatch}, payload) {
let res = await axios.post('/api/verify-resend', payload)
if (res.status != 200) throw res
return res
},
async verifyEmail({dispatch}, payload) {
let res = await axios.post('/api/verify-email/' + payload.id + '/' + payload.hash)
if (res.status != 200) throw res
dispatch('getUser')
return res
},
},
plugins: [sharedMutations({predicate: ['setUser']})],
}
app-config\index.js
import { $themeConfig } from '../../themeConfig'
export default {
namespaced: true,
state: {
layout: {
isRTL: $themeConfig.layout.isRTL,
skin: localStorage.getItem('skin') || $themeConfig.layout.skin,
routerTransition: $themeConfig.layout.routerTransition,
type: $themeConfig.layout.type,
contentWidth: $themeConfig.layout.contentWidth,
menu: {
hidden: $themeConfig.layout.menu.hidden,
},
navbar: {
type: $themeConfig.layout.navbar.type,
backgroundColor: $themeConfig.layout.navbar.backgroundColor,
},
footer: {
type: $themeConfig.layout.footer.type,
},
},
},
getters: {},
mutations: {
TOGGLE_RTL(state) {
state.layout.isRTL = !state.layout.isRTL
document.documentElement.setAttribute('dir', state.layout.isRTL ? 'rtl' : 'ltr')
},
UPDATE_SKIN(state, skin) {
state.layout.skin = skin
// Update value in localStorage
localStorage.setItem('skin', skin)
// Update DOM for dark-layout
if (skin === 'dark') document.body.classList.add('dark-layout')
else if (document.body.className.match('dark-layout')) document.body.classList.remove('dark-layout')
},
UPDATE_ROUTER_TRANSITION(state, val) {
state.layout.routerTransition = val
},
UPDATE_LAYOUT_TYPE(state, val) {
state.layout.type = val
},
UPDATE_CONTENT_WIDTH(state, val) {
state.layout.contentWidth = val
},
UPDATE_NAV_MENU_HIDDEN(state, val) {
state.layout.menu.hidden = val
},
UPDATE_NAVBAR_CONFIG(state, obj) {
Object.assign(state.layout.navbar, obj)
},
UPDATE_FOOTER_CONFIG(state, obj) {
Object.assign(state.layout.footer, obj)
},
},
actions: {},
}
I am building a react-native app with expo, I am using the modules of react-native-svg-transformer and also amplify (where I have created functions). The problem is that the functions in the amplify directory have package.json files and do not work. This is why I'm trying to put the amplify directory in the blocList of the metro configuration, but I keep getting the error:
Failed to construct transformer: TypeError: Transformer is not a constructor
This is my metro config:
const { getDefaultConfig } = require("expo/metro-config")
// extra config is needed to enable `react-native-svg-transformer`
module.exports = (async () => {
const {
resolver: { sourceExts, assetExts },
} = await getDefaultConfig(__dirname)
return {
transformer: {
babelTransformerPath: require.resolve("react-native-svg-transformer"),
assetPlugins: ["expo-asset/tools/hashAssetFiles"],
},
resolver: {
assetExts: assetExts.filter((ext) => ext !== "svg"),
sourceExts: [...sourceExts, "svg"],
blockList: [/amplify.*/]
},
}
})()
I also tried this but it didn't work either:
const { getDefaultConfig } = require("expo/metro-config")
// extra config is needed to enable `react-native-svg-transformer`
module.exports = (async () => {
const {
resolver: { sourceExts, assetExts },
} = await getDefaultConfig(__dirname)
return {
transformer: {
babelTransformerPath: require.resolve("react-native-svg-transformer"),
experimentalImportSupport: false,
inlineRequires: true,
},
resolver: {
assetExts: assetExts.filter((ext) => ext !== "svg"),
sourceExts: [...sourceExts, "svg"],
blockList: [/amplify.*/]
},
}
})()
It was that some packages used metro-config#0.59 and I npm-installed metro-config#0.61 so they were incompatible.