I'm using EasyRTC to develop a video chat app. The ICE configuration is set up following the guide on XirSys site:
easyrtc.on("getIceConfig", function(connectionObj, callback){
var iceConfig = [];
request.post('https://api.xirsys.com/getIceServers', {
form: {
ident: '***',
secret: '***',
domain: '***',
application: 'default',
room: 'default',
secure: 1
},
},
function (error, response, body) {
console.log(arguments);
if (!error && response.statusCode == 200) {
iceConfig = JSON.parse(body).d.iceServers;
console.log(iceConfig);
callback(null, iceConfig);
}
else {
console.log(error);
}
});
});
It's working, I can run the EasyRTC demos but there's no STUN/TURN hit in the XirSys console. I suspect this is because the app is still using the public signaling server from Priologic.
The documentation on XirSys' site mentions a "later tutorial" for how to change the signaling server but I couldn't find any.
Does anybody know how to do it?
Thanks.
UPDATE
The problem seems to persist after migrating to the new platform version and changing the request above with:
request({
url: 'https://service.xirsys.com/ice',
qs: {
ident: '***',
secret: '***',
domain: '***',
application: "default",
room: "default",
secure: 1
},
json: true
},
function(error, response, body) {
if (!error && response.statusCode == 200) {
iceConfig = body.d.iceServers;
callback(null, iceConfig);
} else {
console.log(error);
}
});
I answered this question directly via email, but for consistency, will update here as well.
The new XirSys platform went live, yesterday, which has full usage monitoring capabilities within the dashboard. Please give this a go, but remember that the meters will not update until 10 minutes after a disconnection from the TURN server. Also, it may help to use Wireshark or some other network sniffer, so as to accurately see how your connections are failing / working.
Many thanks,
Lee
CTO # XirSys
Related
I have implemented cloudflare on a live website, the website has a socket server that's setup with socket.io and express, everything were working fine before implementing cloudflare
Currently I'm using port: 2053 which i've allowed access to through Laravel forge
socket.js
var app = require('express')();
const fs = require('fs');
var server = require('https').createServer({
key: fs.readFileSync('/etc/nginx/ssl/mywebsite.com/1234/server.key'),
cert: fs.readFileSync('/etc/nginx/ssl/mywebsite.com/1234/server.crt'),
}, app);
var io = require('socket.io')(server, {
cors: {
origin: function(origin, fn) {
if (origin === "http://mywebsite.test" || origin === "https://mywebsite.com") {
return fn(null, origin);
}
return fn('Error Invalid domain');
},
methods: ['GET', 'POST'],
'reconnect': true
},
});
var Redis = require('ioredis');
var redis = new Redis();
redis.subscribe('asset-channel', () => {
console.log('asset-channel: started');
});
redis.on('message', function(channel, message) {
var message = JSON.parse(message);
io.to(message.data.id).emit(channel + ':' +message.event + ':'+ message.data.id, message.data);
});
io.on("connection", (socket) => {
socket.on("join:", (data) => {
socket.join(data.id);
});
socket.on("leave:", (data) => {
socket.leave(data.id);
});
});
server.listen(2053, () => {
console.log('Server is running!');
});
app.js
if (! window.hasOwnProperty('io')) {
// if (
// window.origin === "http://mywebsite.test" ||
// window.origin === "https://mywebsite.com" ||
// window.origin == "https://mywebsite.test"
// ) {
window.io = io.connect(`${window.origin}:2053`);
window.io.on('connection');
// }
}
As mentioned before everything were working fine before implementing cloudflare and i have tried to read some different documentation like:
https://developers.cloudflare.com/cloudflare-one/policies/zero-trust/cors
https://socket.io/docs/v4/handling-cors/
I found many different problems similar online, and tried several solutions but nothing seem to make the socket connection work
Tried to allow all cors like so:
var io = require('socket.io')(server, {
cors: {
origin: "*",
methods: ['GET', 'POST'],
'reconnect': true
},
});
Didn't work either, tried configure some stuff in nginx which didn't work either
Error
Access to XMLHttpRequest at 'https://mywebsite.com:2053/socket.io/?EIO=4&transport=polling&t=NurmHmi' from origin 'https://mywebsite.com' has been blocked by CORS policy: No 'Access-Control-Allow-Origin' header is present on the requested resource.
I think i might have to configure something in the cloudflare dashboard, i just dont know what and my googling skills could not take me to the finish line this time around.
Im not too experienced with sockets so it would be awesome if there are some skilled socket expert who have had this issue before who can guide me in the correct direction? :)
I made it run by adding this to the app.js:
window.io = io.connect(`${window.origin}:2053`, { transports: ["websocket"] });
Apparently it will try to use polling instead of websocket.
I'm running an Angular Universal application that is talking to an API. Now I'm trying to set up a proxy in the Universal server that proxies API requests to the actual API server:
server.use(['/api', '/sitemap.txt'], createProxyMiddleware({
target: process.env.API_URL,
onProxyReq: req => {
console.log('Using origin: ' + getOrigin(req.getHeaders()));
req.setHeader('origin', getOrigin(req.getHeaders()));
},
pathRewrite: {'^/api': ''}
}));
This works perfectly when running locally, but when running it on the server (Azure WebApp), it doesn't work. I can see the console log being called in the WebApp logs, but the resulting document is the Angular application with a message "page not found".
I'm totally out of ideas on where to look for solutions.
Edit:
I tried another proxy middleware and it does do the trick. This code works both locally and on Azure.
import * as proxy from 'express-http-proxy';
// ...
server.use(['/api', '/sitemap.txt'], proxy(process.env.API_URL, {
proxyPathResolver: req => {
let url: string = req.url;
if (url.startsWith('/api')) {
url = url.substr(4);
}
return url;
},
proxyReqOptDecorator(proxyReqOpts, srcReq) {
proxyReqOpts.headers['origin'] = getOrigin(proxyReqOpts.headers);
return proxyReqOpts;
}
}));
But it has some other limitations that make it unusable for our project, so I still need this resolved.
I have it working correctly now. This is the current setup:
server.use(
'/api',
createProxyMiddleware({
target: process.env.API_URL,
changeOrigin: true,
headers: {
Connection: 'keep-alive',
},
onProxyReq: (proxyReq, req, _res) => {
proxyReq.setHeader('origin', getOrigin(req.headers));
},
pathRewrite: {
'^/api': '',
},
})
);
So I added changeOrigin and the keep-alive header. I'm not sure which of the two resolved the issue, once I got it to work I never bothered to check it out. I suspect it's the header, though.
Edit: It definitely isn't actually CORS. It is like it is just ignoring my attempts to write the tokens into cookies... I am also having trouble getting it to throw an error that I can find useful... I will keep throwing darts at the wall until one makes sense.
I have a graphql-yoga server with an apollo client frontend. I am using cookie-parser on the server to store Microsoft Graph authentication tokens in the browser. I am getting an error that shows up as CORS but I can't figure out why.
Access to fetch at 'https://api.mydomain.store/' from origin 'https://mydomain.store' has been blocked by CORS policy: No 'Access-Control-Allow-Origin' header is present on the requested resource. If an opaque response serves your needs, set the request's mode to 'no-cors' to fetch the resource with CORS disabled.
Error: Network error: Failed to fetch
This is strange because I can make other queries to the server with no CORS issues. I thought it had to do with the way I was setting the options on the cookies, but I think that I am doing that correctly:
//saveCookies
function saveValuesToCookie(token: any, res: Response) {
// Parse the identity token
const user: any = jwt.decode(token.token.id_token);
const isDev = process.env.NODE_ENV === 'development';
console.log('isDev: ', isDev);
const cookieOpts: CookieOptions = {
domain: isDev ? 'localhost' : 'mydomain.store',
maxAge: 1000 * 60 * 60 * 24 * 365,
httpOnly: true,
sameSite: isDev ? 'lax' : true,
secure: isDev ? false : true,
};
// Save the access token in a cookie
res.cookie('graph_access_token', token.token.access_token, cookieOpts);
//save the users email to a cookie
res.cookie('graph_user_email', user.preferred_username, cookieOpts);
// Save the refresh token in a cookie
res.cookie('graph_refresh_token', token.token.refresh_token, cookieOpts);
res.cookie('graph_user_id', user.oid, cookieOpts);
// Save the token expiration tiem in a cookie
res.cookie(
'graph_token_expires',
token.token.expires_at.getTime(),
cookieOpts
);
}
Based on the resources I have seen so far, this seems correct with current browser rules.
So I look at where I am calling the mutation:
//apollo react authorize mutation
const AUTHORIZE_MUTATION = gql`
mutation($code: String!) {
authorize(code: $code) {
id
email
displayName
studentFaculty
}
}
`;
function AuthorizePage() {
const router = useRouter();
const { code } = router.query; //grab the code from the query params
const [authorize, { loading }] = useMutation(AUTHORIZE_MUTATION, {
variables: { code },
// refetchQueries: [{ query: ME_QUERY }],
onError: (error) => {
console.error(error); //**this is where the CORS error originates**
return Router.push('/');
},
update: (_, { data }) => {
console.log(data); //** never gets this far **
},
});
useEffect(() => {
if (router.query.code) {
authorize();
}
}, [router.query]);
if (loading) return <Loading />;
return <Loading />;
}
I am at a loss. Can anyone spot what is wrong or point me towards other places to look? The hardest part for me is that the code works perfectly on localhost serving client on port 3000 and server on port 4000.
I don't think this error is a genuine CORS issue because of the fact that I can make unauthenticated queries no problem.
Thanks for your time!
I am building a vue.js client which needs to be authenticated through github oauth using an express server. It's easy to do this using server side rendering but REST API has been troublesome for me.
I have set the homepage url as "http://localhost:3000" where the server runs and I want the authorization callback url to be "http://localhost:8080" (which hosts the client). I am redirecting to "http://localhost:3000/auth/github/redirect" instead, and in its callback redirecting to "http://localhost:8080". The problem I am facing is that I am unable to send user data to the vuejs client through res.redirect. I am not sure if I am doing it the right way.
router.get("/github", passport.authenticate("github"));
router.get(
"/github/redirect",
passport.authenticate("github", { failureRedirect: "/login" }),
(req, res) => {
// res.send(req.user);
res.redirect("http://localhost:8080/"); // req.user should be sent with this
}
);
I have implemented the following approach as a work around :-
A route that returns the user details in a get request :
router.get("/check", (req, res) => {
if (req.user === undefined) {
res.json({});
} else {
res.json({
user: req.user
});
}
});
The client app hits this api right after redirection along with some necessary headers :
checkIfLoggedIn() {
const url = `${API_ROOT}auth/check/`;
return axios(url, {
headers: { "Content-Type": "application/json" },
withCredentials: true
});
}
To enable credentials, we have to pass the following options while configuring cors :
var corsOption = {
origin: true,
credentials: true
};
app.use(cors(corsOption));
I am trying to send an email using nodemailer and a custom email address configured through GoDaddy. Here is a screen shot of the "custom configurations" page in c-panel:
and my code:
const nodemailer = require('nodemailer');
var transporter = nodemailer.createTransport({
service: 'Godaddy',
secureConnection: false,
auth: {
user: 'info#mywebsite.com',
pass: 'mypassword'
}
});
var mailOptions = {
from: 'info#mywebsite.com',
to: 'otheremail#gmail.com',
subject: 'Sending Email using Node.js',
text: 'That was easy!',
html: '<h1>Welcome</h1><p>That was easy!</p>'
};
transporter.sendMail(mailOptions, function(error, info){
if (error) {
console.log(error);
} else {
console.log('Email sent: ' + info.response);
}
});
and my error log:
{ Error: connect EHOSTUNREACH 173.201.192.101:25
at Object.exports._errnoException (util.js:1012:11)
at exports._exceptionWithHostPort (util.js:1035:20)
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1080:14)
code: 'ECONNECTION',
errno: 'EHOSTUNREACH',
syscall: 'connect',
address: '173.201.192.101',
port: 25,
command: 'CONN' }
I've tried changing the port number, making it secure vs non-ssl, using my website address as the host, and pretty much everything else I can think of. I have successfully sent an email from the godaddy email using one of the webmail clients. Has anyone else ever encountered this or have recommendations on things to try?
I am trying to send emails using nodemailer from Google Cloud Function using GoDaddy SMTP settings. I do not have Office365 enabled on my GoDaddy hosting. None of the above options worked for me today (12 November 2019). TLS need to be enabled.
I had to use the following configuration:
const mailTransport = nodemailer.createTransport({
host: "smtpout.secureserver.net",
secure: true,
secureConnection: false, // TLS requires secureConnection to be false
tls: {
ciphers:'SSLv3'
},
requireTLS:true,
port: 465,
debug: true,
auth: {
user: "put your godaddy hosted email here",
pass: "put your email password here"
}
});
Then, I could send a test email as follows:
const mailOptions = {
from: `put your godaddy hosted email here`,
to: `bharat.biswal#gmail.com`,
subject: `This is a Test Subject`,
text: `Hi Bharat
Happy Halloween!
If you need any help, please contact us.
Thank You. And Welcome!
Support Team
`,
};
mailTransport.sendMail(mailOptions).then(() => {
console.log('Email sent successfully');
}).catch((err) => {
console.log('Failed to send email');
console.error(err);
});
you should make some changes in your transporter:
var smtpTrans = nodeMailer.createTransport({
service: 'Godaddy',
host: "smtpout.secureserver.net",
secureConnection: true,
port: 465,
auth: {
user: "username",
pass: "password"
}
});
I realize this is an old post, but just wanted to add to this since the GoDaddy SMTP server has changed, just in case someone else comes across this and has the same problem I had. The answer by #tirmey did not work for me, but this did.
let nodemailer = require('nodemailer');
let mailerConfig = {
host: "smtp.office365.com",
secureConnection: true,
port: 587,
auth: {
user: "username#email.com",
pass: "password"
}
};
let transporter = nodemailer.createTransport(mailerConfig);
let mailOptions = {
from: mailerConfig.auth.user,
to: 'SomePerson#email.com',
subject: 'Some Subject',
html: `<body>` +
`<p>Hey Dude</p>` +
`</body>`
};
transporter.sendMail(mailOptions, function (error) {
if (error) {
console.log('error:', error);
} else {
console.log('good');
}
});
Solutions proposed above seem no longer valid, none of them worked for me. Following solution works for me:
const nodemailer = require('nodemailer');
const os = require('os');
let mailerConfig = {
host: os.hostname(),
port: 25,
};
let transporter = nodemailer.createTransport(mailerConfig);
transporter.sendMail({
from: '<from>',
to: '<to>',
subject: '<subject>',
text: '<text>'
}, (err, info) => {
console.log(info);
console.log(err);
});
I could solve the problem by using this code and some points that I brought them after codes:
const smtpTransport = nodemailer.createTransport({
host: "smtp.office365.com",
secure: false,
port: 587,
auth : {
user : 'info#my-domain.com',
pass : 'Password'
}
});
const mailOptions = {
to: 'target-mail#',
subject: 'Test 01',
html: 'Body',
from : 'info#resoluship.com'
};
await smtpTransport.sendMail(mailOptions);
Don't forget to use 'from' attribute in mailOptions
Don't use ',' in your 'from' attribute
For me, the solution for production shared hosting server was completely different than for testing.
It seems no authentication or credentials are required for it to work.
I created this code from this document describing how to use an SMTP relay server. You can use this with nodemailer. GoDaddy support told me I couldn't but I don't think they know about third party tools.
https://au.godaddy.com/help/send-form-mail-using-an-smtp-relay-server-953
async function main() {
// create reusable transporter object using the default SMTP transport
let transporter = nodemailer.createTransport({
host: 'localhost', //use localhost for linux cPanel hosting
port: 25,
secure: false,
// no need for authentication
tls: {
rejectUnauthorized: false
}
});
// send mail with defined transport object
let info = await transporter.sendMail({
to: "you#youremail.com", // list of receivers
subject: `New Message from ${name}`, // Subject line
text: `yourtext`, // plain text body
html: `your text in html`, // html body
headers: {
priority: 'high'
},
from: "you#youremail.com" // sender address
});
// send success page if successful
if (res.statusCode === 200) {
res.sendFile(path.join(__dirname, 'views/success.ejs'))
}
console.log("Message sent: %s", info.messageId, res.statusCode);
}
main().catch(console.error);
The most common problem with this error is the antivirus. So disable it for 10 minutes if you are testing it locally.