useEffect(() => {
initEngine();
return () => {
engine?.destroy();
};
}, []);
const initEngine = async () => {
if (engine) return;
engine = await RtcEngine.createWithContext(new RtcEngineContext(appId));
addListeners();
await engine?.enableVideo();
await engine?.startPreview();
await engine?.setChannelProfile(ChannelProfile.LiveBroadcasting);
await engine?.setClientRole(ClientRole.Broadcaster);
}
const addListeners = () => {
engine?.addListener('Warning', warningCode => {
console.info('Warning', warningCode);
});
engine?.addListener('Error', errorCode => {
console.info('Error', errorCode);
});
engine?.addListener('JoinChannelSuccess', (channel, uid, elapsed) => {
console.info('JoinChannelSuccess', channel, uid, elapsed);
setIsJoined(true);
});
engine?.addListener('LeaveChannel', stats => {
console.info('LeaveChannel', stats);
setIsJoined(false);
setRemoteUid([]);
});
engine?.addListener('UserJoined', (uid, elapsed) => {
console.info('UserJoined', uid, elapsed);
setRemoteUid([...remoteUid, uid]);
});
engine?.addListener('UserOffline', (uid, reason) => {
console.info('UserOffline', uid, reason);
setRemoteUid(remoteUid.filter(value => value !== uid));
});
};
This is my code example.
I take Warning 8 and Warning 16.
My camera doesn't come out.
But the other person can see my face.
I don't know how to solve the error code even if I look at it.
Related
this is my function
const generatedDalleImage = async () => {
await openai.createImageEdit(
selectedImage,
selectedMaskImage,
"human face",
1,
"1024x1024"
).then((response) => {
console.log(response);
setGeneratedImage(response.data.data[0].url)
}).catch(err => {
console.log(err);
});
}
i am getting this error
localVarFormParams.getHeaders is not function when using openai.createImageEdit()
i am really stuck in this one so any help is appreciated
I have made a function that checks for internet availability. whenever I call this function it gives me true every time whether the internet is ON or OFF. I want to have one function that contains code to check the internet and I can call it before fetching data from the internet . my code is below.
const [campusList, setCampusList]= React.useState([{label:'Select Campus', value:'select campus'}]);
const isConnected =()=>{
NetInfo.fetch().then(state => {
console.log("Connection type", state.type);
console.log("Is connected?", state.isConnected);
if(state.isConnected)
return true;
else
return false;
});
}
const loadCampuses = async()=>{
if(isConnected)
{
await fetch(url)
.then((respons)=>respons.json())
.then((jsonResponse)=>{
jsonResponse.map((data)=>
setCampusList(campusList=> [...campusList, {label:data.Text, value:data.Value}])
);
})
.catch((error)=>console.log(error))
//.finally(()=>setLoading(false))
}
}
fetch Returns a Promise that resolves to a NetInfoState object. you need to wait promise to resolve
try this
const isConnected = sendRequest => {
NetInfo.fetch().then(state => {
if (state.isConnected) {
sendRequest();
}
});
};
const loadCampuses = () => {
isConnected(async () => {
await fetch(url)
.then(respons => respons.json())
.then(jsonResponse => {
jsonResponse.map(data =>
setCampusList(campusList => [
...campusList,
{ label: data.Text, value: data.Value },
]),
);
})
.catch(error => console.log(error));
});
};
oh right, it's a promise, not just a straight return. you need to await for it. You don't need a separate function:
if(await NetInfo.fetch().isConnected)
When a csv file is uploaded on my s3 bucket, my lambda will be triggered to insert my data into DynamoDB.
I need a stream because the file is too large to be downloaded as full object.
const batchWrite = async (clientDynamoDB, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const params = { RequestItems: ri };
await clientDynamoDB.batchWriteItem(params).promise();
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("data", (row) => {
stream.pause();
itemsToProcess.push(row);
if (itemsToProcess.length === sizeChunk) {
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
stream.resume();
});
itemsToProcess = [];
}
})
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on("end", () => {
stream.pause();
console.log("end");
batchWrite(clientDynamoDB, itemsToProcess).finally(() => {
resolve("OK");
});
});
});
}
module.exports.main = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const object = event.Records[0].s3;
const bucket = object.bucket.name;
const file = object.object.key;
const agent = new https.Agent({
keepAlive: true
});
const client = new AWS.DynamoDB({
httpOptions: {
agent
}
});
try {
//get Stream csv data
const stream = s3
.getObject({
Bucket: bucket,
Key: file
})
.createReadStream()
.on('error', (e) => {
console.log(e);
});
await runStreamPromiseAsync(stream, client);
} catch (e) {
console.log(e);
}
};
When my file is 1000 lines everything is inserted but when I have 5000 lines, my function insert only around 3000 lines and this number is random... Sometimes more sometimes less..
So I'd like to understand what am I missing here ?
I also read this article but to be honest even if you pause the second stream, the first one is still running.. So if someone have any ideas on how to do this, it would be greatly appreciated !
Thanks
I found out why It was not fully processed, it's because the callback of batchWriteItem can return unprocess Items. So I change the function batchWrite and also the runPromiseStreamAsync a little bit because i might not have all the items processed from itemsToProcess.
Anyway here is the full code :
const batchWrite = (client, itemsToProcess) => {
const ri = {};
ri[TABLE_DYNAMO] = itemsToProcess.map((itm) => toPutRequest(itm));
const items = { RequestItems: ri };
const processItemsCallback = function(err, data) {
return new Promise((resolve, reject) => {
if(!data || data.length === 0){
return resolve();
}
if(err){
return reject(err);
}
let params = {};
params.RequestItems = data.UnprocessedItems;
return client.batchWriteItem(params, processItemsCallback);
});
};
return client.batchWriteItem(items, processItemsCallback );
};
function runStreamPromiseAsync(stream, clientDynamoDB) {
return new Promise((resolve, reject) => {
const sizeChunk = 25;
let itemsToProcess = [];
let arrayPromise = [];
stream
.pipe(fastCsv.parse({headers: Object.keys(schemaGeData), trim: true}))
.on("error", (err) => {
console.log(err);
reject("Error");
})
.on('data', data => {
itemsToProcess.push(data);
if(itemsToProcess.length === sizeChunk){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
itemsToProcess = [];
}
})
.on('end', () => {
if(itemsToProcess.length !== 0){
arrayPromise.push(batchWrite(clientDynamoDB, itemsToProcess));
}
resolve(Promise.all(arrayPromise).catch(e => {
reject(e)
}));
});
});
}
My effect is:
#Effect({dispatch: false}) /* sends request to httpService with params as login credentials on instance of loginAction. */
login$: Observable<Action> = this.actions$
.instanceOf(LoginActions.LoginAction)
.switchMap(
action => {
return this.loginHttpService.login(action.payload)
.map( (res: any) => {
if (res && res.message !== 'Invalid Login') {
const firstName = res.firstName;
const lastName = res.lastName;
this.tokenService.setToken(res.jwt);
this.tokenService.setFirstName(firstName.charAt(0).toUpperCase() + firstName.slice(1));
this.tokenService.setLastName(lastName.charAt(0).toUpperCase() + lastName .slice(1));
this.tokenService.setId(res.id);
this.tokenService.setAvatar(firstName.charAt(0).toUpperCase() + lastName.charAt(0).toUpperCase());
const permissions = res.roles
this.tokenService.setUserRoles(permissions)
return Observable.create(observer => {
console.log('in observable')
this.permissionsService.loadPermissions(permissions, () => {
observer.next({
type: 'string'
});
console.log('here we go')
this.store.dispatch(new LoginActions.LoginSuccessAction({user: res}))
return observer.complete();
})
})
}
})
.catch( (e:any) => {
this.store.dispatch(new LoginActions.LoginFailureAction(true));
return Observable.create(observer => {
return observer.complete();
})
});
});
The in observable log never fires. What am I doing incorrectly?
Sorry but I don't understand why you need to create a new Observable.
In this kind of situation, here is what I'm used to do :
#Effect()
login$ = this.actions$
.ofType<LoginAction.LoginAction>(LoginActions.LOGIN_ACTION)
.pipe(
map(action => action.payload),
switchMap(payload => {
return this.loginHttpService.login(payload)
.map(userLogged => {
return new LoginActions.LoginSuccessAction({user: userLogged});
})
.catch(error => {
return new LoginActions.LoginFailureAction(true);
});
})
);
#Effect()
loginSuccess$ = this.actions$
.ofType<LoginAction.LoginSuccess>(LoginActions.LOGIN_SUCCESS)
.pipe(
map(action => action.payload),
switchMap(payload => {
return this.permissionsService.loadPermissions(payload.user)
.map(permissions => {
return new LoginActions.PermissionsLoaded(permissions);
})
.catch(error => {
return new LoginActions.PermissionsLoadingFailed();
})
})
);
If existing loadPermissions method doesn't return an Observable, a new method as below can do the job :
loadPermissions(user): Observable<Permissions> {
return Observable.create(observer => {
loadPermissionsWithCallback(user, (response) => {
observer.next(response);
return observer.complete();
});
});
}
It's a chain of actions. LOGIN_ACTION -> LOGIN_SUCCESS -> PERMISSIONS_LOADED.
User is fully logged when action PERMISSIONS_LOADED is dispatched.
Each service method (login and loadPermissions) should return an Observable, and it's the case with new HttpClientModule.
Of course, it's just a simplified and incomplete example... Hope this will help you a little bit.
I have an array of items that I am passing to an API endpoint (using Sequelize as my ORM). I'm trying to iterate over each item and update it, however I'm getting a Unhandled rejection Error: Can't set headers after they are sent.
stepsController.put = (req, res) => {
const { steps } = req.body;
// Steps is an array of objects that I want to update...
steps.map(step => {
Step.findOne({ where: { id: step.id } })
.then(savedStep =>
savedStep
.update({
order: step.order,
})
.then(success => res.status(200).send(success))
.catch(error => res.send(error))
)
.then(ok => res.status(200).send(ok))
.catch(err => res.send(err));
});
};
I believe this is because it's sending the response for each item. Sequelize's update method is a promise. How can I iterate over all of the items and make sure all of the items are updated before sending a single successful response?
There are three ways you can do
Promise.all
Co
Async Await
1) Here it is , you can use Promise.all :
stepsController.put = (req, res) => {
const { steps } = req.body;
// Steps is an array of objects that I want to update...
Promise.all(steps.map(step => {
return Step.findOne({ where: { id: step.id } }).then(savedStep =>
return savedStep.update({
order: step.order,
})
.catch(error => error)
).catch(err => err)
}))
.then(ok => res.status(200).send(ok))
.catch(err => res.send(err));
};
2) Another way is to use co :
const co = require('co');
stepsController.put = co.wrap(function* (req, res) => {
try {
const { steps } = req.body;
// Steps is an array of objects that I want to update...
for(let i=0;i<steps.length ; i++) {
let savedStep = yield Step.findOne({ where: { id: steps[i].id } });
if(savedStep)
yield savedStep.update({ order: steps[i].order});
}
res.status(200).send();
}
catch(err){
res.send(err);
}
});
3) If you’re using Node 8.0+ , there is no need of any package you can directly use async await :
stepsController.put = async(req, res) => {
try {
const { steps } = req.body;
// Steps is an array of objects that I want to update...
for(let i=0;i<steps.length ; i++) {
let savedStep = await Step.findOne({ where: { id: steps[i].id } });
if(savedStep)
await savedStep.update({ order: steps[i].order});
}
res.status(200).send();
}
catch(err){
res.send(err);
}
};